method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public void createControl(Composite parent) { Composite container = new Composite(parent, SWT.NULL); setControl(container);
void function(Composite parent) { Composite container = new Composite(parent, SWT.NULL); setControl(container);
/** * Create contents of the wizard. * * @param parent */
Create contents of the wizard
createControl
{ "repo_name": "asposewords/Aspose_Words_Java", "path": "Plugins/Aspose_Words_Java_for_Eclipse_Maven/AsposeWordsEclipsePlugin/src/com/aspose/words/maven/examples/AsposeExampleWizardPage.java", "license": "mit", "size": 11128 }
[ "org.eclipse.swt.widgets.Composite" ]
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.*;
[ "org.eclipse.swt" ]
org.eclipse.swt;
2,606,241
public void setSwitchId(String switchId) { if (switchId == null) { throw new IllegalArgumentException("need to set a switch_id"); } else if (!Utils.validateSwitchId(switchId)) { throw new IllegalArgumentException("need to set valid value for switch_id"); } this.switchId = switchId; }
void function(String switchId) { if (switchId == null) { throw new IllegalArgumentException(STR); } else if (!Utils.validateSwitchId(switchId)) { throw new IllegalArgumentException(STR); } this.switchId = switchId; }
/** * Sets id of the switch. * * @param switchId of the switch */
Sets id of the switch
setSwitchId
{ "repo_name": "nikitamarchenko/open-kilda", "path": "services/src/messaging/src/main/java/org/openkilda/messaging/command/flow/BaseFlow.java", "license": "apache-2.0", "size": 4243 }
[ "org.openkilda.messaging.Utils" ]
import org.openkilda.messaging.Utils;
import org.openkilda.messaging.*;
[ "org.openkilda.messaging" ]
org.openkilda.messaging;
373,589
protected long createRootNode(Txn transaction) throws IOException { BTreeNode root = createBTreeNode(transaction, LEAF, null, true); setRootNode(root); return root.page.getPageNum(); }
long function(Txn transaction) throws IOException { BTreeNode root = createBTreeNode(transaction, LEAF, null, true); setRootNode(root); return root.page.getPageNum(); }
/** * Create the root node. * * @param transaction * @return The root node * @throws IOException */
Create the root node
createRootNode
{ "repo_name": "kingargyle/exist-1.4.x", "path": "src/org/exist/storage/btree/BTree.java", "license": "lgpl-2.1", "size": 95097 }
[ "java.io.IOException", "org.exist.storage.txn.Txn" ]
import java.io.IOException; import org.exist.storage.txn.Txn;
import java.io.*; import org.exist.storage.txn.*;
[ "java.io", "org.exist.storage" ]
java.io; org.exist.storage;
2,486,150
return new ClientBuilderImpl(); }
return new ClientBuilderImpl(); }
/** * Get a new builder instance that can used to configure and build a {@link PulsarClient} instance. * * @return the {@link ClientBuilder} * * @since 2.0.0 */
Get a new builder instance that can used to configure and build a <code>PulsarClient</code> instance
builder
{ "repo_name": "sschepens/pulsar", "path": "pulsar-client/src/main/java/org/apache/pulsar/client/api/PulsarClient.java", "license": "apache-2.0", "size": 14497 }
[ "org.apache.pulsar.client.impl.ClientBuilderImpl" ]
import org.apache.pulsar.client.impl.ClientBuilderImpl;
import org.apache.pulsar.client.impl.*;
[ "org.apache.pulsar" ]
org.apache.pulsar;
318,501
@Test(timeout = 30000, expected=IllegalStateException.class) public void testCreateBrowserOnTopicSession() throws JMSException { topicSession.createBrowser(queue); }
@Test(timeout = 30000, expected=IllegalStateException.class) void function() throws JMSException { topicSession.createBrowser(queue); }
/** * Test that a call to <code>createBrowser()</code> method * on a <code>TopicSession</code> throws a * <code>javax.jms.IllegalStateException</code>. * (see JMS 1.1 specs, table 4-1). * * @since JMS 1.1 */
Test that a call to <code>createBrowser()</code> method on a <code>TopicSession</code> throws a <code>javax.jms.IllegalStateException</code>. (see JMS 1.1 specs, table 4-1)
testCreateBrowserOnTopicSession
{ "repo_name": "avranju/qpid-jms", "path": "qpid-jms-client/src/test/java/org/apache/qpid/jms/session/JmsTopicSessionTest.java", "license": "apache-2.0", "size": 5476 }
[ "javax.jms.IllegalStateException", "javax.jms.JMSException", "org.junit.Test" ]
import javax.jms.IllegalStateException; import javax.jms.JMSException; import org.junit.Test;
import javax.jms.*; import org.junit.*;
[ "javax.jms", "org.junit" ]
javax.jms; org.junit;
1,203,777
public void stop(final BundleContext context) throws Exception { super.stop(context); plugin = null; }
void function(final BundleContext context) throws Exception { super.stop(context); plugin = null; }
/** * This method is called when the plug-in is stopped */
This method is called when the plug-in is stopped
stop
{ "repo_name": "theanuradha/debrief", "path": "org.mwc.debrief.track_shift/src/org/mwc/debrief/track_shift/TrackShiftActivator.java", "license": "epl-1.0", "size": 1927 }
[ "org.osgi.framework.BundleContext" ]
import org.osgi.framework.BundleContext;
import org.osgi.framework.*;
[ "org.osgi.framework" ]
org.osgi.framework;
1,991,929
public static Animation inFromRightAnimation(long duration, Interpolator interpolator) { Animation inFromRight = new TranslateAnimation( Animation.RELATIVE_TO_PARENT, +1.0f, Animation.RELATIVE_TO_PARENT, 0.0f, Animation.RELATIVE_TO_PARENT, 0.0f, Animation.RELATIVE_TO_PARENT, 0.0f ); inFromRight.setDuration(duration); inFromRight.setInterpolator(interpolator==null?new AccelerateInterpolator():interpolator); return inFromRight; }
static Animation function(long duration, Interpolator interpolator) { Animation inFromRight = new TranslateAnimation( Animation.RELATIVE_TO_PARENT, +1.0f, Animation.RELATIVE_TO_PARENT, 0.0f, Animation.RELATIVE_TO_PARENT, 0.0f, Animation.RELATIVE_TO_PARENT, 0.0f ); inFromRight.setDuration(duration); inFromRight.setInterpolator(interpolator==null?new AccelerateInterpolator():interpolator); return inFromRight; }
/** * Slide animations to enter a view from right. * * @param duration the animation duration in milliseconds * @param interpolator the interpolator to use (pass {@code null} to use the {@link AccelerateInterpolator} interpolator) * @return a slide transition animation */
Slide animations to enter a view from right
inFromRightAnimation
{ "repo_name": "adiguzel/wordwise", "path": "src/com/tekle/oss/android/animation/AnimationFactory.java", "license": "apache-2.0", "size": 15184 }
[ "android.view.animation.AccelerateInterpolator", "android.view.animation.Animation", "android.view.animation.Interpolator", "android.view.animation.TranslateAnimation" ]
import android.view.animation.AccelerateInterpolator; import android.view.animation.Animation; import android.view.animation.Interpolator; import android.view.animation.TranslateAnimation;
import android.view.animation.*;
[ "android.view" ]
android.view;
1,620,906
public static Set<ThreadGroup> getGroups() { return getGroups(currentGroup(), null); } /** * Returns a possibly empty set of thread groups that are immediate descendants of * the specified thread group. * * @param parent The group to obtain descendant groups from * @throws NullPointerException If the thread group is {@code null}
static Set<ThreadGroup> function() { return getGroups(currentGroup(), null); } /** * Returns a possibly empty set of thread groups that are immediate descendants of * the specified thread group. * * @param parent The group to obtain descendant groups from * @throws NullPointerException If the thread group is {@code null}
/** * Returns a possibly empty set of thread groups that are immediate descendants * of the current thread's thread group. */
Returns a possibly empty set of thread groups that are immediate descendants of the current thread's thread group
getGroups
{ "repo_name": "cfloersch/Stdlib", "path": "src/main/java/xpertss/threads/Threads.java", "license": "gpl-2.0", "size": 20495 }
[ "java.util.Set" ]
import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
1,688,813
public interface DragScrollProfile { float getSpeed(float w, long t); } private class DragScroller implements Runnable, AbsListView.OnScrollListener { private boolean mAbort; private long mPrevTime; private int dy; private float dt; private long tStart; private int scrollDir; public final static int STOP = -1; public final static int UP = 0; public final static int DOWN = 1; private float mScrollSpeed; // pixels per ms private boolean mScrolling = false;
interface DragScrollProfile { float function(float w, long t); } private class DragScroller implements Runnable, AbsListView.OnScrollListener { private boolean mAbort; private long mPrevTime; private int dy; private float dt; private long tStart; private int scrollDir; public final static int STOP = -1; public final static int UP = 0; public final static int DOWN = 1; private float mScrollSpeed; private boolean mScrolling = false;
/** * Return a scroll speed in pixels/millisecond. Always return a positive * number. * * @param w Normalized position in scroll region (i.e. w \in [0,1]). * Small w typically means slow scrolling. * @param t Time (in milliseconds) since start of scroll (handy if you * want scroll acceleration). * @return Scroll speed at position w and time t in pixels/ms. */
Return a scroll speed in pixels/millisecond. Always return a positive number
getSpeed
{ "repo_name": "Chonlakant/frostwire-android", "path": "apollo/src/com/andrew/apollo/dragdrop/DragSortListView.java", "license": "gpl-3.0", "size": 68906 }
[ "android.widget.AbsListView" ]
import android.widget.AbsListView;
import android.widget.*;
[ "android.widget" ]
android.widget;
511,047
public static boolean checkIfDiffIsLegal(SolrDocumentList a, SolrDocumentList b, String aName, String bName, Set<String> bAddFails, Set<String> bDeleteFails) { boolean legal = true; Set<Map> setA = new HashSet<>(); for (SolrDocument sdoc : a) { setA.add(new HashMap(sdoc)); } Set<Map> setB = new HashSet<>(); for (SolrDocument sdoc : b) { setB.add(new HashMap(sdoc)); } Set<Map> onlyInA = new HashSet<>(setA); onlyInA.removeAll(setB); Set<Map> onlyInB = new HashSet<>(setB); onlyInB.removeAll(setA); if (onlyInA.size() == 0 && onlyInB.size() == 0) { throw new IllegalArgumentException("No difference between list a and b"); } System.err.println("###### Only in " + aName + ": " + onlyInA); System.err.println("###### Only in " + bName + ": " + onlyInB); for (Map doc : onlyInA) { if (bAddFails == null || !bAddFails.contains(doc.get("id"))) { legal = false; // System.err.println("###### Only in " + aName + ": " + doc.get("id")); } else { System.err.println("###### Only in " + aName + ": " + doc.get("id") + ", but this is expected because we found an add fail for " + doc.get("id")); } } for (Map doc : onlyInB) { if (bDeleteFails == null || !bDeleteFails.contains(doc.get("id"))) { legal = false; // System.err.println("###### Only in " + bName + ": " + doc.get("id")); } else { System.err.println("###### Only in " + bName + ": " + doc.get("id") + ", but this is expected because we found a delete fail for " + doc.get("id")); } } return legal; }
static boolean function(SolrDocumentList a, SolrDocumentList b, String aName, String bName, Set<String> bAddFails, Set<String> bDeleteFails) { boolean legal = true; Set<Map> setA = new HashSet<>(); for (SolrDocument sdoc : a) { setA.add(new HashMap(sdoc)); } Set<Map> setB = new HashSet<>(); for (SolrDocument sdoc : b) { setB.add(new HashMap(sdoc)); } Set<Map> onlyInA = new HashSet<>(setA); onlyInA.removeAll(setB); Set<Map> onlyInB = new HashSet<>(setB); onlyInB.removeAll(setA); if (onlyInA.size() == 0 && onlyInB.size() == 0) { throw new IllegalArgumentException(STR); } System.err.println(STR + aName + STR + onlyInA); System.err.println(STR + bName + STR + onlyInB); for (Map doc : onlyInA) { if (bAddFails == null !bAddFails.contains(doc.get("id"))) { legal = false; } else { System.err.println(STR + aName + STR + doc.get("id") + STR + doc.get("id")); } } for (Map doc : onlyInB) { if (bDeleteFails == null !bDeleteFails.contains(doc.get("id"))) { legal = false; } else { System.err.println(STR + bName + STR + doc.get("id") + STR + doc.get("id")); } } return legal; }
/** * When a and b are known to be different, this method tells if the difference * is legal given the adds and deletes that failed from b. * * @param a first list of docs * @param b second list of docs * @param aName label for first list of docs * @param bName label for second list of docs * @param bAddFails null or list of the ids of adds that failed for b * @param bDeleteFails null or list of the ids of deletes that failed for b * @return true if the difference in a and b is legal */
When a and b are known to be different, this method tells if the difference is legal given the adds and deletes that failed from b
checkIfDiffIsLegal
{ "repo_name": "williamchengit/TestRepo", "path": "solr/test-framework/src/java/org/apache/solr/cloud/CloudInspectUtil.java", "license": "apache-2.0", "size": 8346 }
[ "java.util.HashMap", "java.util.HashSet", "java.util.Map", "java.util.Set", "org.apache.solr.common.SolrDocument", "org.apache.solr.common.SolrDocumentList" ]
import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList;
import java.util.*; import org.apache.solr.common.*;
[ "java.util", "org.apache.solr" ]
java.util; org.apache.solr;
2,682,834
@Override public void onAudioFocusChange(final int focusChange) { if (DEBUG) { Log.d(TAG, "StreamHandler.onAudioFocusChange() with " + focusChange); } if (mMediaPlayer != null) { final float duckVolume = 0.2f; switch (focusChange) { case AudioManager.AUDIOFOCUS_GAIN: if (mMediaPlayer.isPlaying()) { if (DEBUG) { Log.d(TAG, "Regaining after ducked transient loss."); } mMediaPlayer.setVolume(1.0f, 1.0f); } else if (!mPreparingStream) { if (DEBUG) { Log.d(TAG, "Coming out of transient loss."); } mMediaPlayer.start(); } break; case AudioManager.AUDIOFOCUS_LOSS: MPDControl.run(MPDroidService.MPD_ASYNC_HELPER.oMPD, MPDControl.ACTION_PAUSE); break; case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT: mMediaPlayer.pause(); break; case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK: mMediaPlayer.setVolume(duckVolume, duckVolume); break; default: break; } } }
void function(final int focusChange) { if (DEBUG) { Log.d(TAG, STR + focusChange); } if (mMediaPlayer != null) { final float duckVolume = 0.2f; switch (focusChange) { case AudioManager.AUDIOFOCUS_GAIN: if (mMediaPlayer.isPlaying()) { if (DEBUG) { Log.d(TAG, STR); } mMediaPlayer.setVolume(1.0f, 1.0f); } else if (!mPreparingStream) { if (DEBUG) { Log.d(TAG, STR); } mMediaPlayer.start(); } break; case AudioManager.AUDIOFOCUS_LOSS: MPDControl.run(MPDroidService.MPD_ASYNC_HELPER.oMPD, MPDControl.ACTION_PAUSE); break; case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT: mMediaPlayer.pause(); break; case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK: mMediaPlayer.setVolume(duckVolume, duckVolume); break; default: break; } } }
/** * Handle the change of volume if a notification, or any other kind of * interrupting audio event. * * @param focusChange The type of focus change. */
Handle the change of volume if a notification, or any other kind of interrupting audio event
onAudioFocusChange
{ "repo_name": "jcnoir/dmix", "path": "MPDroid/src/main/java/com/namelessdev/mpdroid/service/StreamHandler.java", "license": "apache-2.0", "size": 25147 }
[ "android.media.AudioManager", "android.util.Log", "com.namelessdev.mpdroid.helpers.MPDControl" ]
import android.media.AudioManager; import android.util.Log; import com.namelessdev.mpdroid.helpers.MPDControl;
import android.media.*; import android.util.*; import com.namelessdev.mpdroid.helpers.*;
[ "android.media", "android.util", "com.namelessdev.mpdroid" ]
android.media; android.util; com.namelessdev.mpdroid;
2,850,982
void access(final Path path, final Set<AccessMode> mode) throws AccessControlException, FileNotFoundException, IOException;
void access(final Path path, final Set<AccessMode> mode) throws AccessControlException, FileNotFoundException, IOException;
/** * Checks if the path designated by {@code path} is accessible for the * given {@code mode}. * * @param path the path to check access * @param mode the access mode * @throws AccessControlException if the current user is not allowed to access the patg. * @throws FileNotFoundException the the path doesn't exist * @throws IOException if an error occurs while checking */
Checks if the path designated by path is accessible for the given mode
access
{ "repo_name": "dremio/dremio-oss", "path": "common/src/main/java/com/dremio/io/file/FileSystem.java", "license": "apache-2.0", "size": 12923 }
[ "java.io.FileNotFoundException", "java.io.IOException", "java.nio.file.AccessMode", "java.security.AccessControlException", "java.util.Set" ]
import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.AccessMode; import java.security.AccessControlException; import java.util.Set;
import java.io.*; import java.nio.file.*; import java.security.*; import java.util.*;
[ "java.io", "java.nio", "java.security", "java.util" ]
java.io; java.nio; java.security; java.util;
744,959
public void setNonARRenderer(OpenGLRenderer customRenderer) { renderer.setNonARRenderer(customRenderer); }
void function(OpenGLRenderer customRenderer) { renderer.setNonARRenderer(customRenderer); }
/** * Set a renderer that draws non AR stuff. Optional, may be set to null or omited. * and setups lighting stuff. * @param customRenderer */
Set a renderer that draws non AR stuff. Optional, may be set to null or omited. and setups lighting stuff
setNonARRenderer
{ "repo_name": "Turupawn/LuxxorAR", "path": "src/edu/dhbw/andar/AndARActivity.java", "license": "gpl-3.0", "size": 10295 }
[ "edu.dhbw.andar.interfaces.OpenGLRenderer" ]
import edu.dhbw.andar.interfaces.OpenGLRenderer;
import edu.dhbw.andar.interfaces.*;
[ "edu.dhbw.andar" ]
edu.dhbw.andar;
1,026,169
protected boolean isCppCheckEnabled( boolean quiet ) { if ( cppCheck.getSkip() ) { if ( ! quiet ) { getLog().info( CppCheckConfiguration.SKIP_MESSAGE + ", 'skip' set to true in the " + CppCheckConfiguration.TOOL_NAME + " configuration" ); } return false; } if ( cppCheck.getCppCheckPath() == null ) { if ( ! quiet ) { getLog().info( CppCheckConfiguration.SKIP_MESSAGE + ", path to " + CppCheckConfiguration.TOOL_NAME + " not set" ); } return false; } return true; }
boolean function( boolean quiet ) { if ( cppCheck.getSkip() ) { if ( ! quiet ) { getLog().info( CppCheckConfiguration.SKIP_MESSAGE + STR + CppCheckConfiguration.TOOL_NAME + STR ); } return false; } if ( cppCheck.getCppCheckPath() == null ) { if ( ! quiet ) { getLog().info( CppCheckConfiguration.SKIP_MESSAGE + STR + CppCheckConfiguration.TOOL_NAME + STR ); } return false; } return true; }
/** * Determine whether CppCheck is enabled by the configuration * @param quiet set to true to suppress logging * @return true if CppCheck is enabled, false otherwise */
Determine whether CppCheck is enabled by the configuration
isCppCheckEnabled
{ "repo_name": "andi12/msbuild-maven-plugin", "path": "msbuild-maven-plugin/src/main/java/uk/org/raje/maven/plugin/msbuild/AbstractMSBuildPluginMojo.java", "license": "apache-2.0", "size": 28963 }
[ "uk.org.raje.maven.plugin.msbuild.configuration.CppCheckConfiguration" ]
import uk.org.raje.maven.plugin.msbuild.configuration.CppCheckConfiguration;
import uk.org.raje.maven.plugin.msbuild.configuration.*;
[ "uk.org.raje" ]
uk.org.raje;
72,783
if(StringUtils.isNotEmpty(dateComparison)) { for (DateComparison d : DateComparison.values()) { if (dateComparison.equalsIgnoreCase(d.getDateComparison())) { return d; } } } throw new IllegalArgumentException("No date comparison found with value: " + dateComparison); }
if(StringUtils.isNotEmpty(dateComparison)) { for (DateComparison d : DateComparison.values()) { if (dateComparison.equalsIgnoreCase(d.getDateComparison())) { return d; } } } throw new IllegalArgumentException(STR + dateComparison); }
/** * Gets the enumeration from a string value. * @param dateComparison The value to look up. * @return A {@link DateComparison comparison}. */
Gets the enumeration from a string value
fromValue
{ "repo_name": "mtnfog/entity-query-language", "path": "eql-filters/src/main/java/ai/idylnlp/eql/filters/comparisons/DateComparison.java", "license": "apache-2.0", "size": 2327 }
[ "org.apache.commons.lang3.StringUtils" ]
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.*;
[ "org.apache.commons" ]
org.apache.commons;
1,616,656
protected void executeQueryPlan(Date date, Timestamp timestamp, double discount, int quantity) throws SQLException { Log.getLogWriter().info("Getting query plan for " + queryName + " with date=" + date + ", timestamp=" + timestamp + ", discount=" + discount + ", quantity=" + quantity); if (preparedQueryPlanStmt == null) { preparedQueryPlanStmt = this.connection.prepareStatement("explain " + query); } preparedQueryPlanStmt.setDate(1, date); preparedQueryPlanStmt.setTimestamp(2, timestamp); preparedQueryPlanStmt.setDouble(3, discount); preparedQueryPlanStmt.setDouble(4, discount); preparedQueryPlanStmt.setInt(5, quantity); ResultSet rs = preparedQueryPlanStmt.executeQuery(); logQueryPlan(rs); } public static class Result implements ResultRow { // columns in a result row BigDecimal revenue;
void function(Date date, Timestamp timestamp, double discount, int quantity) throws SQLException { Log.getLogWriter().info(STR + queryName + STR + date + STR + timestamp + STR + discount + STR + quantity); if (preparedQueryPlanStmt == null) { preparedQueryPlanStmt = this.connection.prepareStatement(STR + query); } preparedQueryPlanStmt.setDate(1, date); preparedQueryPlanStmt.setTimestamp(2, timestamp); preparedQueryPlanStmt.setDouble(3, discount); preparedQueryPlanStmt.setDouble(4, discount); preparedQueryPlanStmt.setInt(5, quantity); ResultSet rs = preparedQueryPlanStmt.executeQuery(); logQueryPlan(rs); } public static class Result implements ResultRow { BigDecimal revenue;
/** Execute a query to obtain a query plan with the given query argument * * @param date Date parameter for this query. * @param timestamp The timestamp that is equivalent to date. * @param discount Discount parameter for this query. * @param quantity Quantity parameter for this query. * @throws SQLException Thrown if any exceptions are encountered while executing this query. */
Execute a query to obtain a query plan with the given query argument
executeQueryPlan
{ "repo_name": "papicella/snappy-store", "path": "tests/sql/src/main/java/gfxdperf/tpch/AbstractQ6.java", "license": "apache-2.0", "size": 6940 }
[ "java.math.BigDecimal", "java.sql.Date", "java.sql.ResultSet", "java.sql.SQLException", "java.sql.Timestamp" ]
import java.math.BigDecimal; import java.sql.Date; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp;
import java.math.*; import java.sql.*;
[ "java.math", "java.sql" ]
java.math; java.sql;
788,195
private void getSpecs(Path directory, Collection<Spec> specs) throws Exception { FileStatus[] fileStatuses = fs.listStatus(directory); for (FileStatus fileStatus : fileStatuses) { if (fileStatus.isDirectory()) { getSpecs(fileStatus.getPath(), specs); } else { try { specs.add(readSpecFromFile(fileStatus.getPath())); } catch (Exception e) { log.warn(String.format("Path[%s] cannot be correctly deserialized as Spec", fileStatus.getPath()), e); } } } }
void function(Path directory, Collection<Spec> specs) throws Exception { FileStatus[] fileStatuses = fs.listStatus(directory); for (FileStatus fileStatus : fileStatuses) { if (fileStatus.isDirectory()) { getSpecs(fileStatus.getPath(), specs); } else { try { specs.add(readSpecFromFile(fileStatus.getPath())); } catch (Exception e) { log.warn(String.format(STR, fileStatus.getPath()), e); } } } }
/** * For multiple {@link FlowSpec}s to be loaded, catch Exceptions when one of them failed to be loaded and * continue with the rest. * * The {@link IOException} thrown from standard FileSystem call will be propagated, while the file-specific * exception will be caught to ensure other files being able to deserialized. * * @param directory The directory that contains specs to be deserialized * @param specs Container of specs. */
For multiple <code>FlowSpec</code>s to be loaded, catch Exceptions when one of them failed to be loaded and continue with the rest. The <code>IOException</code> thrown from standard FileSystem call will be propagated, while the file-specific exception will be caught to ensure other files being able to deserialized
getSpecs
{ "repo_name": "shirshanka/gobblin", "path": "gobblin-runtime/src/main/java/org/apache/gobblin/runtime/spec_store/FSSpecStore.java", "license": "apache-2.0", "size": 12885 }
[ "java.util.Collection", "org.apache.gobblin.runtime.api.Spec", "org.apache.hadoop.fs.FileStatus", "org.apache.hadoop.fs.Path" ]
import java.util.Collection; import org.apache.gobblin.runtime.api.Spec; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path;
import java.util.*; import org.apache.gobblin.runtime.api.*; import org.apache.hadoop.fs.*;
[ "java.util", "org.apache.gobblin", "org.apache.hadoop" ]
java.util; org.apache.gobblin; org.apache.hadoop;
2,224,873
public static String collectCodePoint(String string, CodePointFunction function) { int size = string.length(); StringBuilder builder = new StringBuilder(size); for (int i = 0; i < size; ) { int codePoint = string.codePointAt(i); builder.appendCodePoint(function.valueOf(codePoint)); i += Character.charCount(codePoint); } return builder.toString(); }
static String function(String string, CodePointFunction function) { int size = string.length(); StringBuilder builder = new StringBuilder(size); for (int i = 0; i < size; ) { int codePoint = string.codePointAt(i); builder.appendCodePoint(function.valueOf(codePoint)); i += Character.charCount(codePoint); } return builder.toString(); }
/** * Transform the int code point elements to a new string using the specified function {@code function}. * * @since 7.0 */
Transform the int code point elements to a new string using the specified function function
collectCodePoint
{ "repo_name": "bhav0904/eclipse-collections", "path": "eclipse-collections/src/main/java/org/eclipse/collections/impl/utility/StringIterate.java", "license": "bsd-3-clause", "size": 43992 }
[ "org.eclipse.collections.impl.block.function.primitive.CodePointFunction" ]
import org.eclipse.collections.impl.block.function.primitive.CodePointFunction;
import org.eclipse.collections.impl.block.function.primitive.*;
[ "org.eclipse.collections" ]
org.eclipse.collections;
2,227,512
void reverseForEachWithIndex(ObjectIntProcedure<? super T> procedure);
void reverseForEachWithIndex(ObjectIntProcedure<? super T> procedure);
/** * Evaluates the procedure for each element and it's index in reverse order. * <pre>e.g. * people.reverseForEachWithIndex((person, index) -> * LOGGER.info("Index: " + index + " person: " + person.getName())); * </pre> * * @since 9.0.0 */
Evaluates the procedure for each element and it's index in reverse order. <code>e.g. people.reverseForEachWithIndex((person, index) -> LOGGER.info("Index: " + index + " person: " + person.getName())); </code>
reverseForEachWithIndex
{ "repo_name": "bhav0904/eclipse-collections", "path": "eclipse-collections-api/src/main/java/org/eclipse/collections/api/ordered/ReversibleIterable.java", "license": "bsd-3-clause", "size": 7851 }
[ "org.eclipse.collections.api.block.procedure.primitive.ObjectIntProcedure" ]
import org.eclipse.collections.api.block.procedure.primitive.ObjectIntProcedure;
import org.eclipse.collections.api.block.procedure.primitive.*;
[ "org.eclipse.collections" ]
org.eclipse.collections;
2,481,530
boolean createCollection(String name) throws EXistException, PermissionDeniedException;
boolean createCollection(String name) throws EXistException, PermissionDeniedException;
/** * Create a new collection on the database. * * @param name the path to the new collection. * @return * @throws EXistException * @throws PermissionDeniedException */
Create a new collection on the database
createCollection
{ "repo_name": "olvidalo/exist", "path": "exist-core/src/main/java/org/exist/xmlrpc/RpcAPI.java", "license": "lgpl-2.1", "size": 39990 }
[ "org.exist.EXistException", "org.exist.security.PermissionDeniedException" ]
import org.exist.EXistException; import org.exist.security.PermissionDeniedException;
import org.exist.*; import org.exist.security.*;
[ "org.exist", "org.exist.security" ]
org.exist; org.exist.security;
1,965,570
static Predicate<RoleMapperExpression> predicate(ExpressionModel map) { return expr -> expr.match(map); }
static Predicate<RoleMapperExpression> predicate(ExpressionModel map) { return expr -> expr.match(map); }
/** * Creates an <em>inverted</em> predicate that can test whether an expression matches * a fixed object. Its purpose is for cases where there is a {@link java.util.stream.Stream} of * expressions, that need to be filtered against a single map. */
Creates an inverted predicate that can test whether an expression matches a fixed object. Its purpose is for cases where there is a <code>java.util.stream.Stream</code> of expressions, that need to be filtered against a single map
predicate
{ "repo_name": "jmluy/elasticsearch", "path": "x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/RoleMapperExpression.java", "license": "apache-2.0", "size": 1507 }
[ "java.util.function.Predicate" ]
import java.util.function.Predicate;
import java.util.function.*;
[ "java.util" ]
java.util;
607,870
protected void scanAttribute(XMLAttributesImpl attributes) throws IOException, XNIException { if (DEBUG_START_END_ELEMENT) System.out.println(">>> scanAttribute()"); // name fEntityScanner.scanQName(fAttributeQName); // equals fEntityScanner.skipSpaces(); if (!fEntityScanner.skipChar('=')) { reportFatalError( "EqRequiredInAttribute", new Object[] { fCurrentElement.rawname, fAttributeQName.rawname }); } fEntityScanner.skipSpaces(); // content int attrIndex; if (fBindNamespaces) { attrIndex = attributes.getLength(); attributes.addAttributeNS( fAttributeQName, XMLSymbols.fCDATASymbol, null); } else { int oldLen = attributes.getLength(); attrIndex = attributes.addAttribute( fAttributeQName, XMLSymbols.fCDATASymbol, null); // WFC: Unique Att Spec if (oldLen == attributes.getLength()) { reportFatalError( "AttributeNotUnique", new Object[] { fCurrentElement.rawname, fAttributeQName.rawname }); } } //REVISIT: one more case needs to be included: external PE and standalone is no boolean isVC = fHasExternalDTD && !fStandalone; // REVISIT: it seems that this function should not take attributes, and length scanAttributeValue( this.fTempString, fTempString2, fAttributeQName.rawname, isVC, fCurrentElement.rawname); String value = fTempString.toString(); attributes.setValue(attrIndex, value); attributes.setNonNormalizedValue(attrIndex, fTempString2.toString()); attributes.setSpecified(attrIndex, true); // record namespace declarations if any. if (fBindNamespaces) { String localpart = fAttributeQName.localpart; String prefix = fAttributeQName.prefix != null ? fAttributeQName.prefix : XMLSymbols.EMPTY_STRING; // when it's of form xmlns="..." or xmlns:prefix="...", // it's a namespace declaration. but prefix:xmlns="..." isn't. if (prefix == XMLSymbols.PREFIX_XMLNS || prefix == XMLSymbols.EMPTY_STRING && localpart == XMLSymbols.PREFIX_XMLNS) { // get the internalized value of this attribute String uri = fSymbolTable.addSymbol(value); // 1. "xmlns" can't be bound to any namespace if (prefix == XMLSymbols.PREFIX_XMLNS && localpart == XMLSymbols.PREFIX_XMLNS) { fErrorReporter.reportError( XMLMessageFormatter.XMLNS_DOMAIN, "CantBindXMLNS", new Object[] { fAttributeQName }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } // 2. the namespace for "xmlns" can't be bound to any prefix if (uri == NamespaceContext.XMLNS_URI) { fErrorReporter.reportError( XMLMessageFormatter.XMLNS_DOMAIN, "CantBindXMLNS", new Object[] { fAttributeQName }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } // 3. "xml" can't be bound to any other namespace than it's own if (localpart == XMLSymbols.PREFIX_XML) { if (uri != NamespaceContext.XML_URI) { fErrorReporter.reportError( XMLMessageFormatter.XMLNS_DOMAIN, "CantBindXML", new Object[] { fAttributeQName }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } } // 4. the namespace for "xml" can't be bound to any other prefix else { if (uri == NamespaceContext.XML_URI) { fErrorReporter.reportError( XMLMessageFormatter.XMLNS_DOMAIN, "CantBindXML", new Object[] { fAttributeQName }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } } prefix = localpart != XMLSymbols.PREFIX_XMLNS ? localpart : XMLSymbols.EMPTY_STRING; // Declare prefix in context. Removing the association between a prefix and a // namespace name is permitted in XML 1.1, so if the uri value is the empty string, // the prefix is being unbound. -- mrglavas fNamespaceContext.declarePrefix( prefix, uri.length() != 0 ? uri : null); // bind namespace attribute to a namespace attributes.setURI( attrIndex, fNamespaceContext.getURI(XMLSymbols.PREFIX_XMLNS)); } else { // attempt to bind attribute if (fAttributeQName.prefix != null) { attributes.setURI( attrIndex, fNamespaceContext.getURI(fAttributeQName.prefix)); } } } if (DEBUG_START_END_ELEMENT) System.out.println("<<< scanAttribute()"); } // scanAttribute(XMLAttributes)
void function(XMLAttributesImpl attributes) throws IOException, XNIException { if (DEBUG_START_END_ELEMENT) System.out.println(STR); fEntityScanner.scanQName(fAttributeQName); fEntityScanner.skipSpaces(); if (!fEntityScanner.skipChar('=')) { reportFatalError( STR, new Object[] { fCurrentElement.rawname, fAttributeQName.rawname }); } fEntityScanner.skipSpaces(); int attrIndex; if (fBindNamespaces) { attrIndex = attributes.getLength(); attributes.addAttributeNS( fAttributeQName, XMLSymbols.fCDATASymbol, null); } else { int oldLen = attributes.getLength(); attrIndex = attributes.addAttribute( fAttributeQName, XMLSymbols.fCDATASymbol, null); if (oldLen == attributes.getLength()) { reportFatalError( STR, new Object[] { fCurrentElement.rawname, fAttributeQName.rawname }); } } boolean isVC = fHasExternalDTD && !fStandalone; scanAttributeValue( this.fTempString, fTempString2, fAttributeQName.rawname, isVC, fCurrentElement.rawname); String value = fTempString.toString(); attributes.setValue(attrIndex, value); attributes.setNonNormalizedValue(attrIndex, fTempString2.toString()); attributes.setSpecified(attrIndex, true); if (fBindNamespaces) { String localpart = fAttributeQName.localpart; String prefix = fAttributeQName.prefix != null ? fAttributeQName.prefix : XMLSymbols.EMPTY_STRING; if (prefix == XMLSymbols.PREFIX_XMLNS prefix == XMLSymbols.EMPTY_STRING && localpart == XMLSymbols.PREFIX_XMLNS) { String uri = fSymbolTable.addSymbol(value); if (prefix == XMLSymbols.PREFIX_XMLNS && localpart == XMLSymbols.PREFIX_XMLNS) { fErrorReporter.reportError( XMLMessageFormatter.XMLNS_DOMAIN, STR, new Object[] { fAttributeQName }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } if (uri == NamespaceContext.XMLNS_URI) { fErrorReporter.reportError( XMLMessageFormatter.XMLNS_DOMAIN, STR, new Object[] { fAttributeQName }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } if (localpart == XMLSymbols.PREFIX_XML) { if (uri != NamespaceContext.XML_URI) { fErrorReporter.reportError( XMLMessageFormatter.XMLNS_DOMAIN, STR, new Object[] { fAttributeQName }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } } else { if (uri == NamespaceContext.XML_URI) { fErrorReporter.reportError( XMLMessageFormatter.XMLNS_DOMAIN, STR, new Object[] { fAttributeQName }, XMLErrorReporter.SEVERITY_FATAL_ERROR); } } prefix = localpart != XMLSymbols.PREFIX_XMLNS ? localpart : XMLSymbols.EMPTY_STRING; fNamespaceContext.declarePrefix( prefix, uri.length() != 0 ? uri : null); attributes.setURI( attrIndex, fNamespaceContext.getURI(XMLSymbols.PREFIX_XMLNS)); } else { if (fAttributeQName.prefix != null) { attributes.setURI( attrIndex, fNamespaceContext.getURI(fAttributeQName.prefix)); } } } if (DEBUG_START_END_ELEMENT) System.out.println(STR); }
/** * Scans an attribute. * <p> * <pre> * [41] Attribute ::= Name Eq AttValue * </pre> * <p> * <strong>Note:</strong> This method assumes that the next * character on the stream is the first character of the attribute * name. * <p> * <strong>Note:</strong> This method uses the fAttributeQName and * fQName variables. The contents of these variables will be * destroyed. * * @param attributes The attributes list for the scanned attribute. */
Scans an attribute. <code> [41] Attribute ::= Name Eq AttValue </code> Note: This method assumes that the next character on the stream is the first character of the attribute name. Note: This method uses the fAttributeQName and fQName variables. The contents of these variables will be destroyed
scanAttribute
{ "repo_name": "lostdj/Jaklin-OpenJDK-JAXP", "path": "src/java.xml/share/classes/com/sun/org/apache/xerces/internal/impl/XML11NSDocumentScannerImpl.java", "license": "gpl-2.0", "size": 36428 }
[ "com.sun.org.apache.xerces.internal.impl.msg.XMLMessageFormatter", "com.sun.org.apache.xerces.internal.util.XMLAttributesImpl", "com.sun.org.apache.xerces.internal.util.XMLSymbols", "com.sun.org.apache.xerces.internal.xni.NamespaceContext", "com.sun.org.apache.xerces.internal.xni.XNIException", "java.io.I...
import com.sun.org.apache.xerces.internal.impl.msg.XMLMessageFormatter; import com.sun.org.apache.xerces.internal.util.XMLAttributesImpl; import com.sun.org.apache.xerces.internal.util.XMLSymbols; import com.sun.org.apache.xerces.internal.xni.NamespaceContext; import com.sun.org.apache.xerces.internal.xni.XNIException; import java.io.IOException;
import com.sun.org.apache.xerces.internal.impl.msg.*; import com.sun.org.apache.xerces.internal.util.*; import com.sun.org.apache.xerces.internal.xni.*; import java.io.*;
[ "com.sun.org", "java.io" ]
com.sun.org; java.io;
2,855,691
@Override public void write(DataOutput out) throws IOException { }
void function(DataOutput out) throws IOException { }
/** * Serialize the fields of this object to <code>out</code>. * * @param out <code>DataOuput</code> to serialize this object into. * @throws IOException */
Serialize the fields of this object to <code>out</code>
write
{ "repo_name": "huitseeker/DataVec", "path": "datavec-api/src/main/java/org/datavec/api/split/ListStringSplit.java", "license": "apache-2.0", "size": 3594 }
[ "java.io.DataOutput", "java.io.IOException" ]
import java.io.DataOutput; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
457,241
public HTableDescriptor setDurability(Durability durability) { this.durability = durability; setValue(DURABILITY_KEY, durability.name()); return this; }
HTableDescriptor function(Durability durability) { this.durability = durability; setValue(DURABILITY_KEY, durability.name()); return this; }
/** * Sets the {@link Durability} setting for the table. This defaults to Durability.USE_DEFAULT. * @param durability enum value */
Sets the <code>Durability</code> setting for the table. This defaults to Durability.USE_DEFAULT
setDurability
{ "repo_name": "ibmsoe/hbase", "path": "hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java", "license": "apache-2.0", "size": 57499 }
[ "org.apache.hadoop.hbase.client.Durability" ]
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
2,731,864
return new TestSuite(FlowArrangementTests.class); } public FlowArrangementTests(String name) { super(name); }
return new TestSuite(FlowArrangementTests.class); } public FlowArrangementTests(String name) { super(name); }
/** * Returns the tests as a test suite. * * @return The test suite. */
Returns the tests as a test suite
suite
{ "repo_name": "raedle/univis", "path": "lib/jfreechart-1.0.1/src/org/jfree/chart/block/junit/FlowArrangementTests.java", "license": "lgpl-2.1", "size": 5320 }
[ "junit.framework.TestSuite" ]
import junit.framework.TestSuite;
import junit.framework.*;
[ "junit.framework" ]
junit.framework;
1,148,158
ChaincodeEvent getEvent();
ChaincodeEvent getEvent();
/** * Returns the CHAINCODE type event that will be posted to interested * clients when the chaincode's result is committed to the ledger. * * @return the chaincode event or null */
Returns the CHAINCODE type event that will be posted to interested clients when the chaincode's result is committed to the ledger
getEvent
{ "repo_name": "lukehuangch/fabric", "path": "core/chaincode/shim/java/src/main/java/org/hyperledger/fabric/shim/ChaincodeStub.java", "license": "apache-2.0", "size": 8834 }
[ "org.hyperledger.fabric.protos.peer.ChaincodeEventPackage" ]
import org.hyperledger.fabric.protos.peer.ChaincodeEventPackage;
import org.hyperledger.fabric.protos.peer.*;
[ "org.hyperledger.fabric" ]
org.hyperledger.fabric;
1,879,623
@Test public void triangulateSuccessTridol3() { Vector2[] vertices = this.load(EarClippingTest.class.getResourceAsStream("/org/dyn4j/data/tridol3.dat")); // decompose the poly List<? extends Convex> result = this.algo.triangulate(vertices); // the result should have n - 2 triangles shapes TestCase.assertEquals(vertices.length - 2, result.size()); }
void function() { Vector2[] vertices = this.load(EarClippingTest.class.getResourceAsStream(STR)); List<? extends Convex> result = this.algo.triangulate(vertices); TestCase.assertEquals(vertices.length - 2, result.size()); }
/** * Tests the triangulation implementation against the tridol2 data file. * @since 3.1.10 */
Tests the triangulation implementation against the tridol2 data file
triangulateSuccessTridol3
{ "repo_name": "satishbabusee/dyn4j", "path": "junit/org/dyn4j/geometry/EarClippingTest.java", "license": "bsd-3-clause", "size": 21967 }
[ "java.util.List", "junit.framework.TestCase", "org.dyn4j.geometry.Convex", "org.dyn4j.geometry.Vector2" ]
import java.util.List; import junit.framework.TestCase; import org.dyn4j.geometry.Convex; import org.dyn4j.geometry.Vector2;
import java.util.*; import junit.framework.*; import org.dyn4j.geometry.*;
[ "java.util", "junit.framework", "org.dyn4j.geometry" ]
java.util; junit.framework; org.dyn4j.geometry;
2,606,512
public NFNotaConsultaRetorno consultaNota(final String chaveDeAcesso) throws Exception { return this.wsNotaConsulta.consultaNota(chaveDeAcesso); }
NFNotaConsultaRetorno function(final String chaveDeAcesso) throws Exception { return this.wsNotaConsulta.consultaNota(chaveDeAcesso); }
/** * Faz a consulta da nota * @param chaveDeAcesso chave de acesso da nota * @return dados da consulta da nota retornado pelo webservice * @throws Exception caso nao consiga gerar o xml ou problema de conexao com o sefaz */
Faz a consulta da nota
consultaNota
{ "repo_name": "jefperito/nfe", "path": "src/main/java/com/fincatto/documentofiscal/nfe310/webservices/WSFacade.java", "license": "apache-2.0", "size": 14488 }
[ "com.fincatto.documentofiscal.nfe310.classes.nota.consulta.NFNotaConsultaRetorno" ]
import com.fincatto.documentofiscal.nfe310.classes.nota.consulta.NFNotaConsultaRetorno;
import com.fincatto.documentofiscal.nfe310.classes.nota.consulta.*;
[ "com.fincatto.documentofiscal" ]
com.fincatto.documentofiscal;
1,221,246
public UserDetail[] getAllUsersOfGroups(List<String> groupIds) throws AdminException { if (groupIds == null || groupIds.isEmpty()) { return ArrayUtil.EMPTY_USER_DETAIL_ARRAY; } Connection con = null; try { con = DBUtil.openConnection(); List<UserDetail> users = userDAO.getUsersOfGroups(con, groupIds); return users.toArray(new UserDetail[users.size()]); } catch (Exception e) { throw new AdminException("UserManager.getAllUsersOfGroups", SilverpeasException.ERROR, "admin.EX_ERR_GET_USER_GROUPS", e); } finally { DBUtil.close(con); } }
UserDetail[] function(List<String> groupIds) throws AdminException { if (groupIds == null groupIds.isEmpty()) { return ArrayUtil.EMPTY_USER_DETAIL_ARRAY; } Connection con = null; try { con = DBUtil.openConnection(); List<UserDetail> users = userDAO.getUsersOfGroups(con, groupIds); return users.toArray(new UserDetail[users.size()]); } catch (Exception e) { throw new AdminException(STR, SilverpeasException.ERROR, STR, e); } finally { DBUtil.close(con); } }
/** * Get the users that are in the group or one of his sub-groups * * @param groupIds * @return * @throws AdminException */
Get the users that are in the group or one of his sub-groups
getAllUsersOfGroups
{ "repo_name": "ebonnet/Silverpeas-Core", "path": "core-library/src/main/java/org/silverpeas/core/admin/user/UserManager.java", "license": "agpl-3.0", "size": 32426 }
[ "java.sql.Connection", "java.util.List", "org.silverpeas.core.admin.service.AdminException", "org.silverpeas.core.admin.user.model.UserDetail", "org.silverpeas.core.exception.SilverpeasException", "org.silverpeas.core.persistence.jdbc.DBUtil", "org.silverpeas.core.util.ArrayUtil" ]
import java.sql.Connection; import java.util.List; import org.silverpeas.core.admin.service.AdminException; import org.silverpeas.core.admin.user.model.UserDetail; import org.silverpeas.core.exception.SilverpeasException; import org.silverpeas.core.persistence.jdbc.DBUtil; import org.silverpeas.core.util.ArrayUtil;
import java.sql.*; import java.util.*; import org.silverpeas.core.admin.service.*; import org.silverpeas.core.admin.user.model.*; import org.silverpeas.core.exception.*; import org.silverpeas.core.persistence.jdbc.*; import org.silverpeas.core.util.*;
[ "java.sql", "java.util", "org.silverpeas.core" ]
java.sql; java.util; org.silverpeas.core;
304,540
public String getSelectedUri() { if(this.getSelectedConnectionOption(). equals(OptionConnect.useOtherConnection.name())){ return Val.chkStr(this.selectedUri); } else { return getDefaultSearchUri(); } }
String function() { if(this.getSelectedConnectionOption(). equals(OptionConnect.useOtherConnection.name())){ return Val.chkStr(this.selectedUri); } else { return getDefaultSearchUri(); } }
/** * Gets the selected uri. * * @return the selected uri */
Gets the selected uri
getSelectedUri
{ "repo_name": "treejames/GeoprocessingAppstore", "path": "src/com/esri/gpt/catalog/search/SearchFilterConnection.java", "license": "apache-2.0", "size": 6265 }
[ "com.esri.gpt.framework.util.Val" ]
import com.esri.gpt.framework.util.Val;
import com.esri.gpt.framework.util.*;
[ "com.esri.gpt" ]
com.esri.gpt;
2,866,488
public void setActivateOnItemClick(boolean activateOnItemClick) { // When setting CHOICE_MODE_SINGLE, ListView will automatically // give items the 'activated' state when touched. getListView().setChoiceMode(activateOnItemClick ? ListView.CHOICE_MODE_SINGLE : ListView.CHOICE_MODE_NONE); }
void function(boolean activateOnItemClick) { getListView().setChoiceMode(activateOnItemClick ? ListView.CHOICE_MODE_SINGLE : ListView.CHOICE_MODE_NONE); }
/** * Turns on activate-on-click mode. When this mode is on, list items will be * given the 'activated' state when touched. */
Turns on activate-on-click mode. When this mode is on, list items will be given the 'activated' state when touched
setActivateOnItemClick
{ "repo_name": "Arcus92/PlayMusicExporter", "path": "playmusicexporter/src/main/java/de/arcus/playmusicexporter2/fragments/MusicContainerListFragment.java", "license": "mit", "size": 6975 }
[ "android.widget.ListView" ]
import android.widget.ListView;
import android.widget.*;
[ "android.widget" ]
android.widget;
2,114,890
protected String stripEmptyValues(String value) { for (String string : EMPTY_VALUES) { if (StringUtils.equalsIgnoreCase(value, string)) { return null; } } return value; } /** * {@inheritDoc}
String function(String value) { for (String string : EMPTY_VALUES) { if (StringUtils.equalsIgnoreCase(value, string)) { return null; } } return value; } /** * {@inheritDoc}
/** * Removes empty value markers. */
Removes empty value markers
stripEmptyValues
{ "repo_name": "cschneider/openhab", "path": "bundles/binding/org.openhab.binding.weather/src/main/java/org/openhab/binding/weather/internal/parser/AbstractWeatherParser.java", "license": "epl-1.0", "size": 6140 }
[ "org.apache.commons.lang.StringUtils" ]
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.*;
[ "org.apache.commons" ]
org.apache.commons;
905,004
@Test @Category(RunnableOnService.class) public void testOutputTimeFnLatest() { p.apply( Create.timestamped( TimestampedValue.of(KV.of(0, "hello"), new Instant(0)), TimestampedValue.of(KV.of(0, "goodbye"), new Instant(10)))) .apply(Window.<KV<Integer, String>>into(FixedWindows.of(Duration.standardMinutes(10))) .withOutputTimeFn(OutputTimeFns.outputAtLatestInputTimestamp())) .apply(GroupByKey.<Integer, String>create()) .apply(ParDo.of(new AssertTimestamp(new Instant(10)))); p.run(); } private static class AssertTimestamp<K, V> extends DoFn<KV<K, V>, Void> { private final Instant timestamp; public AssertTimestamp(Instant timestamp) { this.timestamp = timestamp; }
@Category(RunnableOnService.class) void function() { p.apply( Create.timestamped( TimestampedValue.of(KV.of(0, "hello"), new Instant(0)), TimestampedValue.of(KV.of(0, STR), new Instant(10)))) .apply(Window.<KV<Integer, String>>into(FixedWindows.of(Duration.standardMinutes(10))) .withOutputTimeFn(OutputTimeFns.outputAtLatestInputTimestamp())) .apply(GroupByKey.<Integer, String>create()) .apply(ParDo.of(new AssertTimestamp(new Instant(10)))); p.run(); } private static class AssertTimestamp<K, V> extends DoFn<KV<K, V>, Void> { private final Instant timestamp; public AssertTimestamp(Instant timestamp) { this.timestamp = timestamp; }
/** * Tests that when two elements are combined via a GroupByKey their output timestamp agrees * with the windowing function customized to use the latest value. */
Tests that when two elements are combined via a GroupByKey their output timestamp agrees with the windowing function customized to use the latest value
testOutputTimeFnLatest
{ "repo_name": "chamikaramj/incubator-beam", "path": "sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java", "license": "apache-2.0", "size": 19993 }
[ "org.apache.beam.sdk.testing.RunnableOnService", "org.apache.beam.sdk.transforms.windowing.FixedWindows", "org.apache.beam.sdk.transforms.windowing.OutputTimeFns", "org.apache.beam.sdk.transforms.windowing.Window", "org.apache.beam.sdk.values.KV", "org.apache.beam.sdk.values.TimestampedValue", "org.joda...
import org.apache.beam.sdk.testing.RunnableOnService; import org.apache.beam.sdk.transforms.windowing.FixedWindows; import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TimestampedValue; import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.experimental.categories.Category;
import org.apache.beam.sdk.testing.*; import org.apache.beam.sdk.transforms.windowing.*; import org.apache.beam.sdk.values.*; import org.joda.time.*; import org.junit.experimental.categories.*;
[ "org.apache.beam", "org.joda.time", "org.junit.experimental" ]
org.apache.beam; org.joda.time; org.junit.experimental;
1,375,651
public void setMappedWriteMethod(final Method mappedSetter) throws IntrospectionException { mappedWriteMethodRef = new MappedMethodReference(mappedSetter); findMappedPropertyType(); }
void function(final Method mappedSetter) throws IntrospectionException { mappedWriteMethodRef = new MappedMethodReference(mappedSetter); findMappedPropertyType(); }
/** * Sets the method that should be used to write the property value. * * @param mappedSetter The mapped setter method. * @throws IntrospectionException If an error occurs finding the * mapped property */
Sets the method that should be used to write the property value
setMappedWriteMethod
{ "repo_name": "apache/commons-beanutils", "path": "src/main/java/org/apache/commons/beanutils2/MappedPropertyDescriptor.java", "license": "apache-2.0", "size": 20761 }
[ "java.beans.IntrospectionException", "java.lang.reflect.Method" ]
import java.beans.IntrospectionException; import java.lang.reflect.Method;
import java.beans.*; import java.lang.reflect.*;
[ "java.beans", "java.lang" ]
java.beans; java.lang;
1,797,638
void deleteDistributor( DistributorTO distributor );
void deleteDistributor( DistributorTO distributor );
/** * Metodo que se encarga de eliminar al distribuidor * * @param distributor */
Metodo que se encarga de eliminar al distribuidor
deleteDistributor
{ "repo_name": "sidlors/digital-booking", "path": "digital-booking-integration-api/src/main/java/mx/com/cinepolis/digital/booking/integration/distributor/ServiceAdminDistributorIntegratorEJB.java", "license": "epl-1.0", "size": 1697 }
[ "mx.com.cinepolis.digital.booking.commons.to.DistributorTO" ]
import mx.com.cinepolis.digital.booking.commons.to.DistributorTO;
import mx.com.cinepolis.digital.booking.commons.to.*;
[ "mx.com.cinepolis" ]
mx.com.cinepolis;
225,082
void deleteEmptyDir(String namespacedLogBaseDir, Location dir) throws IOException { LOG.debug("Got path {}", dir); Location namespacedLogBaseLocation = rootDir.append(namespacesDir).append(namespacedLogBaseDir); deleteEmptyDirsInNamespace(namespacedLogBaseLocation, dir); }
void deleteEmptyDir(String namespacedLogBaseDir, Location dir) throws IOException { LOG.debug(STR, dir); Location namespacedLogBaseLocation = rootDir.append(namespacesDir).append(namespacedLogBaseDir); deleteEmptyDirsInNamespace(namespacedLogBaseLocation, dir); }
/** * For the specified directory to be deleted, finds its namespaced log location, then deletes * @param namespacedLogBaseDir namespaced log base dir without the root dir prefixed * @param dir dir to delete * @throws IOException */
For the specified directory to be deleted, finds its namespaced log location, then deletes
deleteEmptyDir
{ "repo_name": "hsaputra/cdap", "path": "cdap-watchdog/src/main/java/co/cask/cdap/logging/write/LogCleanup.java", "license": "apache-2.0", "size": 5639 }
[ "java.io.IOException", "org.apache.twill.filesystem.Location" ]
import java.io.IOException; import org.apache.twill.filesystem.Location;
import java.io.*; import org.apache.twill.filesystem.*;
[ "java.io", "org.apache.twill" ]
java.io; org.apache.twill;
462,603
@Override public void onNeighborBlockChange(World world, int x, int y, int z, Block neighbor) { if (!world.isRemote) { if (!World.doesBlockHaveSolidTopSurface(world, x, y-1, z)) { breakBlock(world, x, y, z, this, world.getBlockMetadata(x, y, z)); world.setBlockToAir(x, y, z); } } }
void function(World world, int x, int y, int z, Block neighbor) { if (!world.isRemote) { if (!World.doesBlockHaveSolidTopSurface(world, x, y-1, z)) { breakBlock(world, x, y, z, this, world.getBlockMetadata(x, y, z)); world.setBlockToAir(x, y, z); } } }
/** * Checks to see if its valid to put this block at the specified coordinates. Args: world, x, y, z */
Checks to see if its valid to put this block at the specified coordinates. Args: world, x, y, z
onNeighborBlockChange
{ "repo_name": "shuttler67/ForgeMod", "path": "src/main/java/com/shuttler67/demonomancy/block/BlockPentacle.java", "license": "gpl-3.0", "size": 5150 }
[ "net.minecraft.block.Block", "net.minecraft.world.World" ]
import net.minecraft.block.Block; import net.minecraft.world.World;
import net.minecraft.block.*; import net.minecraft.world.*;
[ "net.minecraft.block", "net.minecraft.world" ]
net.minecraft.block; net.minecraft.world;
2,717,375
public void submitTarGzipAndUpdate( Map<String, LocalResource> providerResources) throws IOException, BadClusterStateException { Path dependencyLibTarGzip = getDependencyTarGzip(); LocalResource lc = createAmResource(dependencyLibTarGzip, LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION); providerResources.put(YarnServiceConstants.DEPENDENCY_LOCALIZED_DIR_LINK, lc); }
void function( Map<String, LocalResource> providerResources) throws IOException, BadClusterStateException { Path dependencyLibTarGzip = getDependencyTarGzip(); LocalResource lc = createAmResource(dependencyLibTarGzip, LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION); providerResources.put(YarnServiceConstants.DEPENDENCY_LOCALIZED_DIR_LINK, lc); }
/** * Submit the AM tar.gz resource referenced by the instance's cluster * filesystem. Also, update the providerResources object with the new * resource. * * @param providerResources * the provider resource map to be updated * @throws IOException * trouble copying to HDFS */
Submit the AM tar.gz resource referenced by the instance's cluster filesystem. Also, update the providerResources object with the new resource
submitTarGzipAndUpdate
{ "repo_name": "nandakumar131/hadoop", "path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/CoreFileSystem.java", "license": "apache-2.0", "size": 19834 }
[ "java.io.IOException", "java.util.Map", "org.apache.hadoop.fs.Path", "org.apache.hadoop.yarn.api.records.LocalResource", "org.apache.hadoop.yarn.api.records.LocalResourceType", "org.apache.hadoop.yarn.api.records.LocalResourceVisibility", "org.apache.hadoop.yarn.service.conf.YarnServiceConstants", "or...
import java.io.IOException; import java.util.Map; import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.service.conf.YarnServiceConstants; import org.apache.hadoop.yarn.service.exceptions.BadClusterStateException;
import java.io.*; import java.util.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.yarn.api.records.*; import org.apache.hadoop.yarn.service.conf.*; import org.apache.hadoop.yarn.service.exceptions.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
510,482
@Override public Map<String, TableDescriptor> getByNamespace(String name) throws IOException { Map<String, TableDescriptor> htds = new TreeMap<>(); List<Path> tableDirs = FSUtils.getLocalTableDirs(fs, FSUtils.getNamespaceDir(rootdir, name)); for (Path d: tableDirs) { TableDescriptor htd = null; try { htd = get(FSUtils.getTableName(d)); } catch (FileNotFoundException fnfe) { // inability of retrieving one HTD shouldn't stop getting the remaining LOG.warn("Trouble retrieving htd", fnfe); } if (htd == null) continue; htds.put(FSUtils.getTableName(d).getNameAsString(), htd); } return htds; }
Map<String, TableDescriptor> function(String name) throws IOException { Map<String, TableDescriptor> htds = new TreeMap<>(); List<Path> tableDirs = FSUtils.getLocalTableDirs(fs, FSUtils.getNamespaceDir(rootdir, name)); for (Path d: tableDirs) { TableDescriptor htd = null; try { htd = get(FSUtils.getTableName(d)); } catch (FileNotFoundException fnfe) { LOG.warn(STR, fnfe); } if (htd == null) continue; htds.put(FSUtils.getTableName(d).getNameAsString(), htd); } return htds; }
/** * Find descriptors by namespace. * @see #get(org.apache.hadoop.hbase.TableName) */
Find descriptors by namespace
getByNamespace
{ "repo_name": "vincentpoon/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java", "license": "apache-2.0", "size": 33068 }
[ "java.io.FileNotFoundException", "java.io.IOException", "java.util.List", "java.util.Map", "java.util.TreeMap", "org.apache.hadoop.fs.Path", "org.apache.hadoop.hbase.client.TableDescriptor" ]
import java.io.FileNotFoundException; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.TableDescriptor;
import java.io.*; import java.util.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hbase.client.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
1,386,879
public Vector getViews() { return views; }
Vector function() { return views; }
/** * Returns a vector containing all the views those are contained by this <code>MDIFrame</code>. * @return Returns the views. */
Returns a vector containing all the views those are contained by this <code>MDIFrame</code>
getViews
{ "repo_name": "google-code/aeliamdi", "path": "src/org/aeliamdi/MDIFrame.java", "license": "lgpl-2.1", "size": 43557 }
[ "java.util.Vector" ]
import java.util.Vector;
import java.util.*;
[ "java.util" ]
java.util;
1,061,969
@Override public ArrayList<T> parseRows(Document doc, HocElement adHocHeader, boolean returnRawHtml, Class<T> hocElementClass, int targetSectionIndex) throws Exception { try { ArrayList<T> rowElements = new ArrayList<T>(); // Use the counter to make every item unique, this is important for // when parsing happens on the RGT side as some items can be // otherwise identical! Integer counter = 0; Elements allReportSections = doc .select(SELECTOR_RAW_ROWS_REPORTTABLE); if (!nonConforming) { parseConformingSection(adHocHeader, returnRawHtml, hocElementClass, targetSectionIndex, rowElements, counter, allReportSections); } else { parseNonConformingSection(returnRawHtml, hocElementClass, targetSectionIndex, rowElements, counter, allReportSections); } log.debug("Number of rows parsed: " + rowElements.size()); return rowElements; } catch (Exception e) { log.error("Error parsing rows"); throw e; } }
ArrayList<T> function(Document doc, HocElement adHocHeader, boolean returnRawHtml, Class<T> hocElementClass, int targetSectionIndex) throws Exception { try { ArrayList<T> rowElements = new ArrayList<T>(); Integer counter = 0; Elements allReportSections = doc .select(SELECTOR_RAW_ROWS_REPORTTABLE); if (!nonConforming) { parseConformingSection(adHocHeader, returnRawHtml, hocElementClass, targetSectionIndex, rowElements, counter, allReportSections); } else { parseNonConformingSection(returnRawHtml, hocElementClass, targetSectionIndex, rowElements, counter, allReportSections); } log.debug(STR + rowElements.size()); return rowElements; } catch (Exception e) { log.error(STR); throw e; } }
/** * Parse report section table rows from the Protex report, returning the * data in a list of HocElements. nonConforming = report sections of the * form: Label: value. Normal (conforming) = report sections that contain * columns of data, each with a header (label) at the top (first row). * * @param doc * the doc * @param adHocHeader * the ad hoc header * @param returnRawHtml * the return raw html * @param hocElementClass * the hoc element class * @return the array list * @throws Exception */
Parse report section table rows from the Protex report, returning the data in a list of HocElements. nonConforming = report sections of the form: Label: value. Normal (conforming) = report sections that contain columns of data, each with a header (label) at the top (first row)
parseRows
{ "repo_name": "blackducksoftware/cf-6x-connector", "path": "src/main/java/com/blackducksoftware/tools/commonframework/connector/protex/report/AdHocParserProtex6.java", "license": "gpl-2.0", "size": 13562 }
[ "com.blackducksoftware.tools.commonframework.standard.protex.report.HocElement", "java.util.ArrayList", "org.jsoup.nodes.Document", "org.jsoup.select.Elements" ]
import com.blackducksoftware.tools.commonframework.standard.protex.report.HocElement; import java.util.ArrayList; import org.jsoup.nodes.Document; import org.jsoup.select.Elements;
import com.blackducksoftware.tools.commonframework.standard.protex.report.*; import java.util.*; import org.jsoup.nodes.*; import org.jsoup.select.*;
[ "com.blackducksoftware.tools", "java.util", "org.jsoup.nodes", "org.jsoup.select" ]
com.blackducksoftware.tools; java.util; org.jsoup.nodes; org.jsoup.select;
2,771,099
@Override protected SessionFactory getSessionFactory() { return _sessionFactory; }
SessionFactory function() { return _sessionFactory; }
/** * Gets the session factory object * @return Session factory */
Gets the session factory object
getSessionFactory
{ "repo_name": "UOC/PeLP", "path": "src/test/java/edu/uoc/pelp/test/model/dao/LocalLoggingDAO.java", "license": "gpl-3.0", "size": 4512 }
[ "org.hibernate.SessionFactory" ]
import org.hibernate.SessionFactory;
import org.hibernate.*;
[ "org.hibernate" ]
org.hibernate;
492,898
@Issue("JENKINS-7494") @Test public void defaultUserAvatarCanBeFetched() throws Exception { User user = User.get("avatar-user", true); HtmlPage page = j.createWebClient().goTo("user/" + user.getDisplayName()); j.assertAllImageLoadSuccessfully(page); }
@Issue(STR) @Test void function() throws Exception { User user = User.get(STR, true); HtmlPage page = j.createWebClient().goTo("user/" + user.getDisplayName()); j.assertAllImageLoadSuccessfully(page); }
/** * Asserts that the default user avatar can be fetched (ie no 404) */
Asserts that the default user avatar can be fetched (ie no 404)
defaultUserAvatarCanBeFetched
{ "repo_name": "rsandell/jenkins", "path": "test/src/test/java/hudson/model/UserTest.java", "license": "mit", "size": 34438 }
[ "com.gargoylesoftware.htmlunit.html.HtmlPage", "org.junit.Test", "org.jvnet.hudson.test.Issue" ]
import com.gargoylesoftware.htmlunit.html.HtmlPage; import org.junit.Test; import org.jvnet.hudson.test.Issue;
import com.gargoylesoftware.htmlunit.html.*; import org.junit.*; import org.jvnet.hudson.test.*;
[ "com.gargoylesoftware.htmlunit", "org.junit", "org.jvnet.hudson" ]
com.gargoylesoftware.htmlunit; org.junit; org.jvnet.hudson;
444,682
@Test public void test1(){ Field field = FieldUtils.getDeclaredField(DangaMemCachedConfig.class, "serverList", true); Alias alias = field.getAnnotation(Alias.class); assertEquals( "@com.feilong.core.bean.Alias(name=memcached.serverlist, sampleValue=172.20.31.23:11211,172.20.31.22:11211)", annotationToStringBuilder.build(alias)); }
void function(){ Field field = FieldUtils.getDeclaredField(DangaMemCachedConfig.class, STR, true); Alias alias = field.getAnnotation(Alias.class); assertEquals( STR, annotationToStringBuilder.build(alias)); }
/** * Test 1. */
Test 1
test1
{ "repo_name": "venusdrogon/feilong-core", "path": "src/test/java/com/feilong/core/lang/annotation/DefaultAnnotationToStringBuilderTest.java", "license": "apache-2.0", "size": 1894 }
[ "com.feilong.core.bean.Alias", "com.feilong.core.entity.DangaMemCachedConfig", "java.lang.reflect.Field", "org.apache.commons.lang3.reflect.FieldUtils", "org.junit.Assert" ]
import com.feilong.core.bean.Alias; import com.feilong.core.entity.DangaMemCachedConfig; import java.lang.reflect.Field; import org.apache.commons.lang3.reflect.FieldUtils; import org.junit.Assert;
import com.feilong.core.bean.*; import com.feilong.core.entity.*; import java.lang.reflect.*; import org.apache.commons.lang3.reflect.*; import org.junit.*;
[ "com.feilong.core", "java.lang", "org.apache.commons", "org.junit" ]
com.feilong.core; java.lang; org.apache.commons; org.junit;
1,958,789
public WizardModel getModel() { return this.model; }
WizardModel function() { return this.model; }
/** * Gets the model for this instance. * * @return The model. */
Gets the model for this instance
getModel
{ "repo_name": "ervandew/formic", "path": "src/java/org/formic/wizard/impl/console/ConsoleWizard.java", "license": "lgpl-2.1", "size": 12491 }
[ "org.pietschy.wizard.WizardModel" ]
import org.pietschy.wizard.WizardModel;
import org.pietschy.wizard.*;
[ "org.pietschy.wizard" ]
org.pietschy.wizard;
2,593,921
public void stopDatabaseLogging() { isDatabaseLogging = false; Utilities.toast(context, R.string.gpsloggingoff, Toast.LENGTH_SHORT); }
void function() { isDatabaseLogging = false; Utilities.toast(context, R.string.gpsloggingoff, Toast.LENGTH_SHORT); }
/** * Stop logging. */
Stop logging
stopDatabaseLogging
{ "repo_name": "GitHubDroid/geodroid_master_update", "path": "geodroidlibrary/src/eu/geopaparazzi/library/gps/GpsDatabaseLogger.java", "license": "gpl-3.0", "size": 14928 }
[ "android.widget.Toast", "eu.geopaparazzi.library.util.Utilities" ]
import android.widget.Toast; import eu.geopaparazzi.library.util.Utilities;
import android.widget.*; import eu.geopaparazzi.library.util.*;
[ "android.widget", "eu.geopaparazzi.library" ]
android.widget; eu.geopaparazzi.library;
2,105,826
@Test public void testSetComponentClass () { nullParameter.setComponentClass("ComboBox"); parameterFromName.setComponentClass("IntSpinner"); noComponentParameter.setComponentClass("DoubleSpinner"); fullParameter.setComponentClass("TimeSpinner"); assertEquals("ComboBox", nullParameter.getComponentClass()); assertEquals("IntSpinner", parameterFromName.getComponentClass()); assertEquals("DoubleSpinner", noComponentParameter.getComponentClass()); assertEquals("TimeSpinner", fullParameter.getComponentClass()); }
void function () { nullParameter.setComponentClass(STR); parameterFromName.setComponentClass(STR); noComponentParameter.setComponentClass(STR); fullParameter.setComponentClass(STR); assertEquals(STR, nullParameter.getComponentClass()); assertEquals(STR, parameterFromName.getComponentClass()); assertEquals(STR, noComponentParameter.getComponentClass()); assertEquals(STR, fullParameter.getComponentClass()); }
/** * Test method for {@link org.clubrockisen.entities.Parameter#setComponentClass(java.lang.String)}. */
Test method for <code>org.clubrockisen.entities.Parameter#setComponentClass(java.lang.String)</code>
testSetComponentClass
{ "repo_name": "Club-Rock-ISEN/EntryManager", "path": "src/test/java/org/clubrockisen/entities/ParameterTest.java", "license": "bsd-3-clause", "size": 9144 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
100,099
@JsonProperty("file") public Handle getFile() { return file; }
@JsonProperty("file") Handle function() { return file; }
/** * <p>Original spec-file type: Handle</p> * <pre> * @optional hid file_name type url remote_md5 remote_sha1 * </pre> * */
Original spec-file type: Handle <code>
getFile
{ "repo_name": "arfathpasha/kb_cufflinks", "path": "lib/src/us/kbase/kbcufflinks/RNASeqDifferentialExpression.java", "license": "mit", "size": 7539 }
[ "com.fasterxml.jackson.annotation.JsonProperty" ]
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.*;
[ "com.fasterxml.jackson" ]
com.fasterxml.jackson;
2,200,326
@Test public void testBaseContainer() { RPObject coin = obj.getSlot("lhand").getFirst().getSlot("container").getFirst(); assertEquals(obj, coin.getBaseContainer()); }
void function() { RPObject coin = obj.getSlot("lhand").getFirst().getSlot(STR).getFirst(); assertEquals(obj, coin.getBaseContainer()); }
/** * Test the base container method that should return the base container of * any contained object. The base container is the container of a object * that is not contained by anyone. * */
Test the base container method that should return the base container of any contained object. The base container is the container of a object that is not contained by anyone
testBaseContainer
{ "repo_name": "nhnb/marauroa", "path": "tests/marauroa/common/game/RPObjectTest.java", "license": "gpl-2.0", "size": 12773 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
114,434
public DemandSet getDemands() { return demands; }
DemandSet function() { return demands; }
/** * Gets the set of additional EVA demands. * * @return the set of additional EVA demands */
Gets the set of additional EVA demands
getDemands
{ "repo_name": "ptgrogan/spacenet", "path": "src/main/java/edu/mit/spacenet/simulator/event/ExplorationProcess.java", "license": "apache-2.0", "size": 6179 }
[ "edu.mit.spacenet.domain.resource.DemandSet" ]
import edu.mit.spacenet.domain.resource.DemandSet;
import edu.mit.spacenet.domain.resource.*;
[ "edu.mit.spacenet" ]
edu.mit.spacenet;
2,208,372
public static boolean createGroup(String name, ChatColor color, int permission) { return createGroup(name, permission, color, false); }
static boolean function(String name, ChatColor color, int permission) { return createGroup(name, permission, color, false); }
/** * Creates a new group * * @param name * The name of the group * @param permission * The permission level of the group * @return if successful */
Creates a new group
createGroup
{ "repo_name": "hypereddie/GGS-Plugin-Pack", "path": "src/com/ep/ggs/groupmanager/API/GroupManagerAPI.java", "license": "gpl-3.0", "size": 4527 }
[ "com.ep.ggs.chat.ChatColor" ]
import com.ep.ggs.chat.ChatColor;
import com.ep.ggs.chat.*;
[ "com.ep.ggs" ]
com.ep.ggs;
215,357
public static void debugLsr(Configuration conf, Path p) throws IOException { debugLsr(conf, p, new PrintingErrorReporter()); }
static void function(Configuration conf, Path p) throws IOException { debugLsr(conf, p, new PrintingErrorReporter()); }
/** * ls -r for debugging purposes */
ls -r for debugging purposes
debugLsr
{ "repo_name": "StackVista/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java", "license": "apache-2.0", "size": 172348 }
[ "java.io.IOException", "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.fs.Path" ]
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path;
import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,482,085
public static void writeSmartExternalScripts(ResponseWriter writer, String gateKey, String gateValue, String contextBasePath, String[] scriptPaths) throws IOException { writer.write("<script>"); writer.write(" if (typeof window['" + gateKey + "'] == '" + gateValue + "')"); writer.write(" {"); for (int i = 0; i < scriptPaths.length; i++) { writer.write(" document.write("); writer.write(" \"<\" + \"script type='text/javascript' src='/'\" + " + contextBasePath + " +"); writer.write(" \"'" + scriptPaths[i] + "'><\" + \"/script>);"); } writer.write(" var " + gateKey + " = '" + gateValue + "';"); writer.write(" }"); writer.write("</script>"); writer.write(""); writer.write(""); }
static void function(ResponseWriter writer, String gateKey, String gateValue, String contextBasePath, String[] scriptPaths) throws IOException { writer.write(STR); writer.write(STR + gateKey + STR + gateValue + "')"); writer.write(STR); for (int i = 0; i < scriptPaths.length; i++) { writer.write(STR); writer.write(STR<\STRscript type='text/javascript' src='/'\STR + contextBasePath + STR); writer.write(STR'STR'><\STR/script>);STR var STR = 'STR';STR }STR</script>STRSTR"); }
/** * Renders a script that includes an external JavaScript that gets added to * the document through a document.write() if a gatekeeper value is NOT set. * This effectively makes the script inclusion a per request JavaScript * singleton. * * @param writer * the ResponseWriter * @param gateKey * for key value pair * @param gateValue * value for key value pair for gatekeeper * @param contextBasePath * the web app with the script * @param scriptPath * the webapp-relative path * @throws IOException */
Renders a script that includes an external JavaScript that gets added to the document through a document.write() if a gatekeeper value is NOT set. This effectively makes the script inclusion a per request JavaScript singleton
writeSmartExternalScripts
{ "repo_name": "OpenCollabZA/sakai", "path": "jsf2/jsf2-widgets/src/java/org/sakaiproject/jsf2/util/RendererUtil.java", "license": "apache-2.0", "size": 17712 }
[ "java.io.IOException", "javax.faces.context.ResponseWriter" ]
import java.io.IOException; import javax.faces.context.ResponseWriter;
import java.io.*; import javax.faces.context.*;
[ "java.io", "javax.faces" ]
java.io; javax.faces;
1,932,674
private MessageConsumerResources createConsumer() throws Exception { MessageConsumerResources answer; ConnectionResource connectionResource = getOrCreateConnectionResource(); Connection conn = connectionResource.borrowConnection(); try { Session session = conn.createSession(isTransacted(), isTransacted() ? Session.SESSION_TRANSACTED : Session.AUTO_ACKNOWLEDGE); MessageConsumer messageConsumer = getEndpoint().getJmsObjectFactory().createMessageConsumer(session, getEndpoint()); MessageListener handler = createMessageHandler(session); messageConsumer.setMessageListener(handler); answer = new MessageConsumerResources(session, messageConsumer); } catch (Exception e) { log.error("Unable to create the MessageConsumer", e); throw e; } finally { connectionResource.returnConnection(conn); } return answer; }
MessageConsumerResources function() throws Exception { MessageConsumerResources answer; ConnectionResource connectionResource = getOrCreateConnectionResource(); Connection conn = connectionResource.borrowConnection(); try { Session session = conn.createSession(isTransacted(), isTransacted() ? Session.SESSION_TRANSACTED : Session.AUTO_ACKNOWLEDGE); MessageConsumer messageConsumer = getEndpoint().getJmsObjectFactory().createMessageConsumer(session, getEndpoint()); MessageListener handler = createMessageHandler(session); messageConsumer.setMessageListener(handler); answer = new MessageConsumerResources(session, messageConsumer); } catch (Exception e) { log.error(STR, e); throw e; } finally { connectionResource.returnConnection(conn); } return answer; }
/** * Creates a {@link MessageConsumerResources} with a dedicated * {@link Session} required for transacted and InOut consumers. */
Creates a <code>MessageConsumerResources</code> with a dedicated <code>Session</code> required for transacted and InOut consumers
createConsumer
{ "repo_name": "curso007/camel", "path": "components/camel-sjms/src/main/java/org/apache/camel/component/sjms/SjmsConsumer.java", "license": "apache-2.0", "size": 13373 }
[ "javax.jms.Connection", "javax.jms.MessageConsumer", "javax.jms.MessageListener", "javax.jms.Session", "org.apache.camel.component.sjms.jms.ConnectionResource" ]
import javax.jms.Connection; import javax.jms.MessageConsumer; import javax.jms.MessageListener; import javax.jms.Session; import org.apache.camel.component.sjms.jms.ConnectionResource;
import javax.jms.*; import org.apache.camel.component.sjms.jms.*;
[ "javax.jms", "org.apache.camel" ]
javax.jms; org.apache.camel;
187,481
Configuration getRuntimeConf() { return YarnClientConfiguration.CONF.build(); }
Configuration getRuntimeConf() { return YarnClientConfiguration.CONF.build(); }
/** * Specifies YARN as the runtime for REEF cluster. Override this method to use a different * runtime. */
Specifies YARN as the runtime for REEF cluster. Override this method to use a different runtime
getRuntimeConf
{ "repo_name": "cs564/heron", "path": "heron/schedulers/src/java/com/twitter/heron/scheduler/yarn/YarnLauncher.java", "license": "apache-2.0", "size": 8126 }
[ "org.apache.reef.runtime.yarn.client.YarnClientConfiguration", "org.apache.reef.tang.Configuration" ]
import org.apache.reef.runtime.yarn.client.YarnClientConfiguration; import org.apache.reef.tang.Configuration;
import org.apache.reef.runtime.yarn.client.*; import org.apache.reef.tang.*;
[ "org.apache.reef" ]
org.apache.reef;
2,587,698
public static @NotNull String posixQuote(@NotNull String argument) { return shouldWrapWithQuotes(argument) ? "'" + StringUtil.replace(argument, "'", "'\"'\"'") + "'" : argument; }
static @NotNull String function(@NotNull String argument) { return shouldWrapWithQuotes(argument) ? "'" + StringUtil.replace(argument, "'", "'\"'\"'") + "'" : argument; }
/** * When necessary, quotes the specified argument with single quotes, according to the POSIX shell rules, * replacing single quotes with hardly readable but recursion-safe {@code '"'"'}. */
When necessary, quotes the specified argument with single quotes, according to the POSIX shell rules, replacing single quotes with hardly readable but recursion-safe '"'"'
posixQuote
{ "repo_name": "siosio/intellij-community", "path": "platform/util/src/com/intellij/execution/CommandLineUtil.java", "license": "apache-2.0", "size": 22943 }
[ "com.intellij.openapi.util.text.StringUtil", "org.jetbrains.annotations.NotNull" ]
import com.intellij.openapi.util.text.StringUtil; import org.jetbrains.annotations.NotNull;
import com.intellij.openapi.util.text.*; import org.jetbrains.annotations.*;
[ "com.intellij.openapi", "org.jetbrains.annotations" ]
com.intellij.openapi; org.jetbrains.annotations;
363,730
public static FunctionScoreQueryBuilder functionScoreQuery(QueryBuilder queryBuilder, FunctionScoreQueryBuilder.FilterFunctionBuilder[] filterFunctionBuilders) { return new FunctionScoreQueryBuilder(queryBuilder, filterFunctionBuilders); }
static FunctionScoreQueryBuilder function(QueryBuilder queryBuilder, FunctionScoreQueryBuilder.FilterFunctionBuilder[] filterFunctionBuilders) { return new FunctionScoreQueryBuilder(queryBuilder, filterFunctionBuilders); }
/** * A query that allows to define a custom scoring function * * @param queryBuilder The query to custom score * @param filterFunctionBuilders the filters and functions to execute * @return the function score query */
A query that allows to define a custom scoring function
functionScoreQuery
{ "repo_name": "strapdata/elassandra5-rc", "path": "core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java", "license": "apache-2.0", "size": 28890 }
[ "org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder" ]
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.query.functionscore.*;
[ "org.elasticsearch.index" ]
org.elasticsearch.index;
54,061
public void setTags(Collection<TagAnnotationData> tags) { this.tags = tags; } public Collection<TermAnnotationData> getTerms() { return terms; }
void function(Collection<TagAnnotationData> tags) { this.tags = tags; } public Collection<TermAnnotationData> getTerms() { return terms; }
/** * Sets the collections of tags. * * @param tags The value to set. */
Sets the collections of tags
setTags
{ "repo_name": "jballanc/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/env/data/util/StructuredDataResults.java", "license": "gpl-2.0", "size": 8198 }
[ "java.util.Collection" ]
import java.util.Collection;
import java.util.*;
[ "java.util" ]
java.util;
1,073,128
@Test public void testIntersectsRegionNegativeAdjacentXNonAdjacentY() { final SpatialRegion r = new SpatialRegion(3, 5, 0, 2); Assert.assertFalse(r.intersects(new SpatialRegion(1, 2, 5, 6))); }
void function() { final SpatialRegion r = new SpatialRegion(3, 5, 0, 2); Assert.assertFalse(r.intersects(new SpatialRegion(1, 2, 5, 6))); }
/** * Test method for * {@link se.kth.speech.SpatialRegion#intersects(se.kth.speech.SpatialMap.SpatialRegion)}. */
Test method for <code>se.kth.speech.SpatialRegion#intersects(se.kth.speech.SpatialMap.SpatialRegion)</code>
testIntersectsRegionNegativeAdjacentXNonAdjacentY
{ "repo_name": "errantlinguist/tangrams-restricted", "path": "game/src/test/java/se/kth/speech/SpatialRegionTest.java", "license": "gpl-3.0", "size": 12937 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
1,485,671
EClass getDefinitionParseResult();
EClass getDefinitionParseResult();
/** * Returns the meta object for class '{@link org.asup.db.syntax.QDefinitionParseResult <em>Definition Parse Result</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Definition Parse Result</em>'. * @see org.asup.db.syntax.QDefinitionParseResult * @generated */
Returns the meta object for class '<code>org.asup.db.syntax.QDefinitionParseResult Definition Parse Result</code>'.
getDefinitionParseResult
{ "repo_name": "asupdev/asup", "path": "org.asup.db.syntax/src/org/asup/db/syntax/QDatabaseSyntaxPackage.java", "license": "epl-1.0", "size": 54597 }
[ "org.eclipse.emf.ecore.EClass" ]
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,059,219
private ArrayWritable createStruct(Object obj, CarbonColumn carbonColumn) throws IOException { if (obj instanceof GenericInternalRow) { Object[] objArray = ((GenericInternalRow) obj).values(); List<CarbonDimension> childCarbonDimensions = null; if (carbonColumn.isDimension() && carbonColumn.getColumnSchema().getNumberOfChild() > 0) { childCarbonDimensions = ((CarbonDimension) carbonColumn).getListOfChildDimensions(); } if (null != childCarbonDimensions) { Writable[] arr = new Writable[objArray.length]; for (int i = 0; i < objArray.length; i++) { arr[i] = createWritableObject(objArray[i], childCarbonDimensions.get(i)); } return new ArrayWritable(Writable.class, arr); } } throw new IOException("DataType not supported in Carbondata"); }
ArrayWritable function(Object obj, CarbonColumn carbonColumn) throws IOException { if (obj instanceof GenericInternalRow) { Object[] objArray = ((GenericInternalRow) obj).values(); List<CarbonDimension> childCarbonDimensions = null; if (carbonColumn.isDimension() && carbonColumn.getColumnSchema().getNumberOfChild() > 0) { childCarbonDimensions = ((CarbonDimension) carbonColumn).getListOfChildDimensions(); } if (null != childCarbonDimensions) { Writable[] arr = new Writable[objArray.length]; for (int i = 0; i < objArray.length; i++) { arr[i] = createWritableObject(objArray[i], childCarbonDimensions.get(i)); } return new ArrayWritable(Writable.class, arr); } } throw new IOException(STR); }
/** * Create the Struct data for the Struct Datatype * * @param obj * @param carbonColumn * @return * @throws IOException */
Create the Struct data for the Struct Datatype
createStruct
{ "repo_name": "ravipesala/incubator-carbondata", "path": "integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java", "license": "apache-2.0", "size": 9598 }
[ "java.io.IOException", "java.util.List", "org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn", "org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension", "org.apache.hadoop.io.ArrayWritable", "org.apache.hadoop.io.Writable", "org.apache.spark.sql.catalyst.expressions.Ge...
import java.io.IOException; import java.util.List; import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn; import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.Writable; import org.apache.spark.sql.catalyst.expressions.GenericInternalRow;
import java.io.*; import java.util.*; import org.apache.carbondata.core.metadata.schema.table.column.*; import org.apache.hadoop.io.*; import org.apache.spark.sql.catalyst.expressions.*;
[ "java.io", "java.util", "org.apache.carbondata", "org.apache.hadoop", "org.apache.spark" ]
java.io; java.util; org.apache.carbondata; org.apache.hadoop; org.apache.spark;
2,442,200
public PhraseSuggestionBuilder addCandidateGenerator(CandidateGenerator generator) { List<CandidateGenerator> list = this.generators.get(generator.getType()); if (list == null) { list = new ArrayList<>(); this.generators.put(generator.getType(), list); } list.add(generator); return this; }
PhraseSuggestionBuilder function(CandidateGenerator generator) { List<CandidateGenerator> list = this.generators.get(generator.getType()); if (list == null) { list = new ArrayList<>(); this.generators.put(generator.getType(), list); } list.add(generator); return this; }
/** * Adds a {@link CandidateGenerator} to this suggester. The * {@link CandidateGenerator} is used to draw candidates for each individual * phrase term before the candidates are scored. */
Adds a <code>CandidateGenerator</code> to this suggester. The <code>CandidateGenerator</code> is used to draw candidates for each individual phrase term before the candidates are scored
addCandidateGenerator
{ "repo_name": "xuzha/elasticsearch", "path": "core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java", "license": "apache-2.0", "size": 32412 }
[ "java.util.ArrayList", "java.util.List" ]
import java.util.ArrayList; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
335,321
public void setHttpAuthUsernamePassword(String host, String realm, String username, String password) { if (host == null || realm == null || !waitForInit()) { return; } final ContentValues c = new ContentValues(); c.put(HTTPAUTH_HOST_COL, host); c.put(HTTPAUTH_REALM_COL, realm); c.put(HTTPAUTH_USERNAME_COL, username); c.put(HTTPAUTH_PASSWORD_COL, password); mDatabase.insert(HTTPAUTH_TABLE_NAME, HTTPAUTH_HOST_COL, c); }
void function(String host, String realm, String username, String password) { if (host == null realm == null !waitForInit()) { return; } final ContentValues c = new ContentValues(); c.put(HTTPAUTH_HOST_COL, host); c.put(HTTPAUTH_REALM_COL, realm); c.put(HTTPAUTH_USERNAME_COL, username); c.put(HTTPAUTH_PASSWORD_COL, password); mDatabase.insert(HTTPAUTH_TABLE_NAME, HTTPAUTH_HOST_COL, c); }
/** * Sets the HTTP authentication password. Tuple (HTTPAUTH_HOST_COL, HTTPAUTH_REALM_COL, * HTTPAUTH_USERNAME_COL) is unique. * * @param host the host for the password * @param realm the realm for the password * @param username the username for the password. * @param password the password */
Sets the HTTP authentication password. Tuple (HTTPAUTH_HOST_COL, HTTPAUTH_REALM_COL, HTTPAUTH_USERNAME_COL) is unique
setHttpAuthUsernamePassword
{ "repo_name": "qtekfun/htcDesire820Kernel", "path": "external/chromium_org/android_webview/java/src/org/chromium/android_webview/HttpAuthDatabase.java", "license": "gpl-2.0", "size": 8949 }
[ "android.content.ContentValues" ]
import android.content.ContentValues;
import android.content.*;
[ "android.content" ]
android.content;
432,248
public int compare(RecordKey secondKey, DataRecord record1, DataRecord record2) { int compResult; int[] record2KeyFields = secondKey.getKeyFields(); if (keyFields.length != record2KeyFields.length) { throw new RuntimeException("Can't compare. keys have different number of DataFields"); } if (useCollator) { for (int i = 0; i < keyFields.length; i++) { final DataField field1 = record1.getField(keyFields[i]); if (collators[i] != null && field1.getMetadata().getDataType() == DataFieldType.STRING) { compResult = ((StringDataField) field1).compareTo( record2.getField(record2KeyFields[i]),collators[i]); }else{ compResult = field1.compareTo( record2.getField(record2KeyFields[i])); } if (compResult != 0) { if (equalNULLs) { if (!(record1.getField(keyFields[i]).isNull() && record2 .getField(record2KeyFields[i]).isNull())) { return orderCorrection(i, compResult); } continue; } return orderCorrection(i, compResult); } } }else{ for (int i = 0; i < keyFields.length; i++) { compResult = record1.getField(keyFields[i]).compareTo( record2.getField(record2KeyFields[i])); if (compResult != 0) { if (equalNULLs) { if (!(record1.getField(keyFields[i]).isNull() && record2 .getField(record2KeyFields[i]).isNull())) { return orderCorrection(i, compResult); } continue; } return orderCorrection(i, compResult); } } } return 0; // seem to be the same }
int function(RecordKey secondKey, DataRecord record1, DataRecord record2) { int compResult; int[] record2KeyFields = secondKey.getKeyFields(); if (keyFields.length != record2KeyFields.length) { throw new RuntimeException(STR); } if (useCollator) { for (int i = 0; i < keyFields.length; i++) { final DataField field1 = record1.getField(keyFields[i]); if (collators[i] != null && field1.getMetadata().getDataType() == DataFieldType.STRING) { compResult = ((StringDataField) field1).compareTo( record2.getField(record2KeyFields[i]),collators[i]); }else{ compResult = field1.compareTo( record2.getField(record2KeyFields[i])); } if (compResult != 0) { if (equalNULLs) { if (!(record1.getField(keyFields[i]).isNull() && record2 .getField(record2KeyFields[i]).isNull())) { return orderCorrection(i, compResult); } continue; } return orderCorrection(i, compResult); } } }else{ for (int i = 0; i < keyFields.length; i++) { compResult = record1.getField(keyFields[i]).compareTo( record2.getField(record2KeyFields[i])); if (compResult != 0) { if (equalNULLs) { if (!(record1.getField(keyFields[i]).isNull() && record2 .getField(record2KeyFields[i]).isNull())) { return orderCorrection(i, compResult); } continue; } return orderCorrection(i, compResult); } } } return 0; }
/** * Compares two records (can have different layout) based on defined * key-fields and returns (-1;0;1) if (< ; = ; >).<br> * The particular fields to be compared have to be of the same type ! * * @param secondKey * RecordKey defined for the second record * @param record1 * First record * @param record2 * Second record * @return -1 ; 0 ; 1 */
Compares two records (can have different layout) based on defined key-fields and returns (-1;0;1) if (). The particular fields to be compared have to be of the same type
compare
{ "repo_name": "CloverETL/CloverETL-Engine", "path": "cloveretl.engine/src/org/jetel/data/RecordComparator.java", "license": "lgpl-2.1", "size": 15400 }
[ "org.jetel.metadata.DataFieldType" ]
import org.jetel.metadata.DataFieldType;
import org.jetel.metadata.*;
[ "org.jetel.metadata" ]
org.jetel.metadata;
590,001
@Override public Class<MetaFeedRecord> getRecordType() { return MetaFeedRecord.class; } public final TableField<MetaFeedRecord, Integer> ID = createField(DSL.name("id"), org.jooq.impl.SQLDataType.INTEGER.nullable(false).identity(true), this, ""); public final TableField<MetaFeedRecord, String> NAME = createField(DSL.name("name"), org.jooq.impl.SQLDataType.VARCHAR(255).nullable(false), this, ""); public MetaFeed() { this(DSL.name("meta_feed"), null); } public MetaFeed(String alias) { this(DSL.name(alias), META_FEED); } public MetaFeed(Name alias) { this(alias, META_FEED); } private MetaFeed(Name alias, Table<MetaFeedRecord> aliased) { this(alias, aliased, null); } private MetaFeed(Name alias, Table<MetaFeedRecord> aliased, Field<?>[] parameters) { super(alias, null, aliased, parameters, DSL.comment("")); } public <O extends Record> MetaFeed(Table<O> child, ForeignKey<O, MetaFeedRecord> key) { super(child, key, META_FEED); }
Class<MetaFeedRecord> function() { return MetaFeedRecord.class; } public final TableField<MetaFeedRecord, Integer> ID = createField(DSL.name("id"), org.jooq.impl.SQLDataType.INTEGER.nullable(false).identity(true), this, STRnameSTRSTRmeta_feedSTR")); } public <O extends Record> MetaFeed(Table<O> child, ForeignKey<O, MetaFeedRecord> key) { super(child, key, META_FEED); }
/** * The class holding records for this type */
The class holding records for this type
getRecordType
{ "repo_name": "gchq/stroom", "path": "stroom-meta/stroom-meta-impl-db-jooq/src/main/java/stroom/meta/impl/db/jooq/tables/MetaFeed.java", "license": "apache-2.0", "size": 3996 }
[ "org.jooq.ForeignKey", "org.jooq.Record", "org.jooq.Table", "org.jooq.TableField", "org.jooq.impl.DSL" ]
import org.jooq.ForeignKey; import org.jooq.Record; import org.jooq.Table; import org.jooq.TableField; import org.jooq.impl.DSL;
import org.jooq.*; import org.jooq.impl.*;
[ "org.jooq", "org.jooq.impl" ]
org.jooq; org.jooq.impl;
2,101,437
protected void onClickAddWidgetButton(View view) { if (LOGD) Log.d(TAG, "onClickAddWidgetButton"); if (mIsSafeModeEnabled) { Toast.makeText(this, R.string.safemode_widget_error, Toast.LENGTH_SHORT).show(); } else { showWidgetsView(true , true ); if (mLauncherCallbacks != null) { mLauncherCallbacks.onClickAddWidgetButton(view); } } }
void function(View view) { if (LOGD) Log.d(TAG, STR); if (mIsSafeModeEnabled) { Toast.makeText(this, R.string.safemode_widget_error, Toast.LENGTH_SHORT).show(); } else { showWidgetsView(true , true ); if (mLauncherCallbacks != null) { mLauncherCallbacks.onClickAddWidgetButton(view); } } }
/** * Event handler for the (Add) Widgets button that appears after a long press * on the home screen. */
Event handler for the (Add) Widgets button that appears after a long press on the home screen
onClickAddWidgetButton
{ "repo_name": "bojanvu23/android_packages_apps_Trebuchet_Gradle", "path": "Trebuchet/src/main/java/com/lite/android/launcher3/Launcher.java", "license": "apache-2.0", "size": 211390 }
[ "android.util.Log", "android.view.View", "android.widget.Toast" ]
import android.util.Log; import android.view.View; import android.widget.Toast;
import android.util.*; import android.view.*; import android.widget.*;
[ "android.util", "android.view", "android.widget" ]
android.util; android.view; android.widget;
2,658,464
public Object[] toArray() { int j = -1; Object[] array = new Object[size]; // Iterates over the values, adding them to the array. for (Iterator<V> iterator = iterator(); iterator.hasNext();) { array[++j] = iterator.next(); } return array; }
Object[] function() { int j = -1; Object[] array = new Object[size]; for (Iterator<V> iterator = iterator(); iterator.hasNext();) { array[++j] = iterator.next(); } return array; }
/** * Translates the mapped pairs' values into an array of Objects * * @return an object array of all the values currently in the map. */
Translates the mapped pairs' values into an array of Objects
toArray
{ "repo_name": "fnp/pylucene", "path": "lucene-java-3.5.0/lucene/contrib/facet/src/java/org/apache/lucene/util/collections/ArrayHashMap.java", "license": "apache-2.0", "size": 15306 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
300,980
@FlakyTest(tolerance = 3) public void testPerformAutoCleanupShouldNotDeleteBecauseInQueue_withFeedsWithNoMedia() throws IOException { // add feed with no enclosures so that item ID != media ID saveFeedlist(context, 1, 10, false); // add candidate for performAutoCleanup List<Feed> feeds = saveFeedlist(context, 1, 1, true); FeedMedia m = feeds.get(0).getItems().get(0).getMedia(); m.setDownloaded(true); m.setFile_url("file"); PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setMedia(m); adapter.close(); testPerformAutoCleanupShouldNotDeleteBecauseInQueue(); }
@FlakyTest(tolerance = 3) void function() throws IOException { saveFeedlist(context, 1, 10, false); List<Feed> feeds = saveFeedlist(context, 1, 1, true); FeedMedia m = feeds.get(0).getItems().get(0).getMedia(); m.setDownloaded(true); m.setFile_url("file"); PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setMedia(m); adapter.close(); testPerformAutoCleanupShouldNotDeleteBecauseInQueue(); }
/** * Reproduces a bug where DBTasks.performAutoCleanup(android.content.Context) would use the ID of the FeedItem in the * call to DBWriter.deleteFeedMediaOfItem instead of the ID of the FeedMedia. This would cause the wrong item to be deleted. * @throws IOException */
Reproduces a bug where DBTasks.performAutoCleanup(android.content.Context) would use the ID of the FeedItem in the call to DBWriter.deleteFeedMediaOfItem instead of the ID of the FeedMedia. This would cause the wrong item to be deleted
testPerformAutoCleanupShouldNotDeleteBecauseInQueue_withFeedsWithNoMedia
{ "repo_name": "wangjun/AntennaPod", "path": "app/src/androidTest/java/de/test/antennapod/storage/DBTasksTest.java", "license": "mit", "size": 11400 }
[ "android.test.FlakyTest", "de.danoeh.antennapod.core.feed.Feed", "de.danoeh.antennapod.core.feed.FeedMedia", "de.danoeh.antennapod.core.storage.PodDBAdapter", "de.test.antennapod.storage.DBTestUtils", "java.io.IOException", "java.util.List" ]
import android.test.FlakyTest; import de.danoeh.antennapod.core.feed.Feed; import de.danoeh.antennapod.core.feed.FeedMedia; import de.danoeh.antennapod.core.storage.PodDBAdapter; import de.test.antennapod.storage.DBTestUtils; import java.io.IOException; import java.util.List;
import android.test.*; import de.danoeh.antennapod.core.feed.*; import de.danoeh.antennapod.core.storage.*; import de.test.antennapod.storage.*; import java.io.*; import java.util.*;
[ "android.test", "de.danoeh.antennapod", "de.test.antennapod", "java.io", "java.util" ]
android.test; de.danoeh.antennapod; de.test.antennapod; java.io; java.util;
2,172,956
private boolean isValidDomain( String domain ) { return !StringUtils.endsWithAny( domain, invalidDomains.toArray( new String[invalidDomains.size()] ) ); }
boolean function( String domain ) { return !StringUtils.endsWithAny( domain, invalidDomains.toArray( new String[invalidDomains.size()] ) ); }
/** * Checks if the given domain ends with any of the invalid domains listed in sakai.properties * @param domain the domain suffix to be checked * @return true if the domain is valid; false otherwise */
Checks if the given domain ends with any of the invalid domains listed in sakai.properties
isValidDomain
{ "repo_name": "OpenCollabZA/sakai", "path": "site-manage/site-manage-participant-helper/src/java/org/sakaiproject/site/tool/helper/participant/impl/SiteAddParticipantHandler.java", "license": "apache-2.0", "size": 43398 }
[ "org.apache.commons.lang3.StringUtils" ]
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.*;
[ "org.apache.commons" ]
org.apache.commons;
427,624
private int _getItems( FacesContext context, RenderingContext arc, UIComponent component, List<SelectItem> items, long minValue, long maxValue, long value, int blockSize, UIComponent rangeLabel) { int selectedIndex = -1; boolean maxUnknown = (maxValue == XhtmlConstants.MAX_VALUE_UNKNOWN); // Zero-indexed block index. long blockIndex = (value - minValue + blockSize - 1L) / blockSize; // sometimes a record set won't start on a multiple of blockSize. So // remember to add any offset: // this can safely be an int because it is an index into the blockSize, // which is itself an int: int offset = (int) (value - (minValue + (blockIndex * blockSize))); if (offset < 0) offset = offset + blockSize; // Total number of blocks (again, zero-indexed) long maxBlockIndex; if (maxUnknown) maxBlockIndex = blockIndex + 1; else { maxBlockIndex = (maxValue - minValue - offset) / blockSize; if (offset > 0) maxBlockIndex++; } // Calculate the first block that should be shown. The order goes: // Group 0: 0-28 + More // Group 1:Previous + 29-56 + More // Group 2:Previous + 57-84 + More // etc.. long firstBlockIndex; // If everything is visible, or we're in the first group, start at zero. if ((maxBlockIndex <= (_MAX_VISIBLE_OPTIONS - 1L)) || (blockIndex <= (_MAX_VISIBLE_OPTIONS - 2L))) firstBlockIndex = 0; else firstBlockIndex = ((blockIndex - 1L) / (_MAX_VISIBLE_OPTIONS - 2L)) * (_MAX_VISIBLE_OPTIONS - 2L); // And we always show a total of 30 groups (or straight to the end) long lastBlockIndex = firstBlockIndex + (_MAX_VISIBLE_OPTIONS - 1L); if (lastBlockIndex > maxBlockIndex) lastBlockIndex = maxBlockIndex; boolean showAllActive = getShowAll(getFacesBean(component)); // Add "Show All" option if showAll was set to true OR // when there are less than 30 groups (maxBlockIndex // start as zero, hence "29") and only allow it when there's // more than 1 visible item! if (showAllActive || (!maxUnknown && (lastBlockIndex > firstBlockIndex) && (maxBlockIndex <= (_MAX_VISIBLE_OPTIONS - 1L)) )) { // Omit show all if it's not supported if (showAllSupported()) { items.add(_createShowAllSelectItem(arc, maxValue)); if (showAllActive) selectedIndex = 0; } } for (blockIndex = firstBlockIndex; blockIndex <= lastBlockIndex; blockIndex++) { long blockStart = minValue + (blockIndex * blockSize); // if there is an offset, then adjust accordingly. for example, if the // offset is 7 (and the blockSize is 10), then the new blockStarts are: // 1-7, 8-17, 18-27, etc ... if (offset > 0) blockStart += (offset - blockSize); final int currentRecordSize; // check to see if this is the very first record set in a table using an // offset: if (blockStart < minValue) { // treat this specially. this is the 1-7 case from the example above: blockStart = minValue; currentRecordSize = offset; } else { currentRecordSize = blockSize; } // return immediately if the start of the next range is not available. if (maxUnknown) { if (!isRowAvailable(component, (int)blockStart - 1)) return selectedIndex; } String text; // Need "Previous..." if ((blockIndex == firstBlockIndex) && (blockIndex != 0)) { text = arc.getTranslatedString(_PREVIOUS_TEXT_KEY); } // Need "More..." (on the last block, either 'cause // the total number of blocks is unknown or we've shown enough blocks // However, don't show More... if the total number of blocks is unknown, // and we checked and found out that the start of the next block doesn't // exist. else if ((blockIndex == lastBlockIndex) && (maxUnknown || (lastBlockIndex < maxBlockIndex))) { text = arc.getTranslatedString(_MORE_TEXT_KEY); } else { text = null; } // =-=AEW I don't understand this next line... long currValue = showAllActive ? minValue - 1 : value;// Don't select SelectItem item = _createNavigationItem(context, arc, component, blockStart, currentRecordSize, maxValue, text, rangeLabel); if ((currValue >= blockStart) && (currValue < (blockStart + currentRecordSize))) { selectedIndex = items.size(); } items.add(item); } return selectedIndex; }
int function( FacesContext context, RenderingContext arc, UIComponent component, List<SelectItem> items, long minValue, long maxValue, long value, int blockSize, UIComponent rangeLabel) { int selectedIndex = -1; boolean maxUnknown = (maxValue == XhtmlConstants.MAX_VALUE_UNKNOWN); long blockIndex = (value - minValue + blockSize - 1L) / blockSize; int offset = (int) (value - (minValue + (blockIndex * blockSize))); if (offset < 0) offset = offset + blockSize; long maxBlockIndex; if (maxUnknown) maxBlockIndex = blockIndex + 1; else { maxBlockIndex = (maxValue - minValue - offset) / blockSize; if (offset > 0) maxBlockIndex++; } long firstBlockIndex; if ((maxBlockIndex <= (_MAX_VISIBLE_OPTIONS - 1L)) (blockIndex <= (_MAX_VISIBLE_OPTIONS - 2L))) firstBlockIndex = 0; else firstBlockIndex = ((blockIndex - 1L) / (_MAX_VISIBLE_OPTIONS - 2L)) * (_MAX_VISIBLE_OPTIONS - 2L); long lastBlockIndex = firstBlockIndex + (_MAX_VISIBLE_OPTIONS - 1L); if (lastBlockIndex > maxBlockIndex) lastBlockIndex = maxBlockIndex; boolean showAllActive = getShowAll(getFacesBean(component)); if (showAllActive (!maxUnknown && (lastBlockIndex > firstBlockIndex) && (maxBlockIndex <= (_MAX_VISIBLE_OPTIONS - 1L)) )) { if (showAllSupported()) { items.add(_createShowAllSelectItem(arc, maxValue)); if (showAllActive) selectedIndex = 0; } } for (blockIndex = firstBlockIndex; blockIndex <= lastBlockIndex; blockIndex++) { long blockStart = minValue + (blockIndex * blockSize); if (offset > 0) blockStart += (offset - blockSize); final int currentRecordSize; if (blockStart < minValue) { blockStart = minValue; currentRecordSize = offset; } else { currentRecordSize = blockSize; } if (maxUnknown) { if (!isRowAvailable(component, (int)blockStart - 1)) return selectedIndex; } String text; if ((blockIndex == firstBlockIndex) && (blockIndex != 0)) { text = arc.getTranslatedString(_PREVIOUS_TEXT_KEY); } else if ((blockIndex == lastBlockIndex) && (maxUnknown (lastBlockIndex < maxBlockIndex))) { text = arc.getTranslatedString(_MORE_TEXT_KEY); } else { text = null; } long currValue = showAllActive ? minValue - 1 : value; SelectItem item = _createNavigationItem(context, arc, component, blockStart, currentRecordSize, maxValue, text, rangeLabel); if ((currValue >= blockStart) && (currValue < (blockStart + currentRecordSize))) { selectedIndex = items.size(); } items.add(item); } return selectedIndex; }
/** * create each of the choice options and add them onto the List. * @return the number of options added */
create each of the choice options and add them onto the List
_getItems
{ "repo_name": "adamrduffy/trinidad-1.0.x", "path": "trinidad-impl/src/main/java/org/apache/myfaces/trinidadinternal/renderkit/core/xhtml/SelectRangeChoiceBarRenderer.java", "license": "apache-2.0", "size": 46657 }
[ "java.util.List", "javax.faces.component.UIComponent", "javax.faces.context.FacesContext", "javax.faces.model.SelectItem", "org.apache.myfaces.trinidad.context.RenderingContext" ]
import java.util.List; import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.faces.model.SelectItem; import org.apache.myfaces.trinidad.context.RenderingContext;
import java.util.*; import javax.faces.component.*; import javax.faces.context.*; import javax.faces.model.*; import org.apache.myfaces.trinidad.context.*;
[ "java.util", "javax.faces", "org.apache.myfaces" ]
java.util; javax.faces; org.apache.myfaces;
2,798,374
public void sendMessages(ServiceBusMessageBatch batch) { Objects.requireNonNull(batch, "'batch' cannot be null."); asyncClient.sendMessages(batch).block(tryTimeout); }
void function(ServiceBusMessageBatch batch) { Objects.requireNonNull(batch, STR); asyncClient.sendMessages(batch).block(tryTimeout); }
/** * Sends a message batch to the Azure Service Bus entity this sender is connected to. * * @param batch of messages which allows client to send maximum allowed size for a batch of messages. * * @throws NullPointerException if {@code batch} is {@code null}. */
Sends a message batch to the Azure Service Bus entity this sender is connected to
sendMessages
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/servicebus/azure-messaging-servicebus/src/main/java/com/azure/messaging/servicebus/ServiceBusSenderClient.java", "license": "mit", "size": 13613 }
[ "java.util.Objects" ]
import java.util.Objects;
import java.util.*;
[ "java.util" ]
java.util;
2,411,320
private void getVBytesForOffset(int kvoff, InMemValBytes vbytes) { final int nextindex = (kvoff / ACCTSIZE == (kvend - 1 + kvoffsets.length) % kvoffsets.length) ? bufend : kvindices[(kvoff + ACCTSIZE + KEYSTART) % kvindices.length]; int vallen = (nextindex >= kvindices[kvoff + VALSTART]) ? nextindex - kvindices[kvoff + VALSTART] : (bufvoid - kvindices[kvoff + VALSTART]) + nextindex; vbytes.reset(kvbuffer, kvindices[kvoff + VALSTART], vallen); } protected class InMemValBytes extends DataInputBuffer { private byte[] buffer; private int start; private int length;
void function(int kvoff, InMemValBytes vbytes) { final int nextindex = (kvoff / ACCTSIZE == (kvend - 1 + kvoffsets.length) % kvoffsets.length) ? bufend : kvindices[(kvoff + ACCTSIZE + KEYSTART) % kvindices.length]; int vallen = (nextindex >= kvindices[kvoff + VALSTART]) ? nextindex - kvindices[kvoff + VALSTART] : (bufvoid - kvindices[kvoff + VALSTART]) + nextindex; vbytes.reset(kvbuffer, kvindices[kvoff + VALSTART], vallen); } protected class InMemValBytes extends DataInputBuffer { private byte[] buffer; private int start; private int length;
/** * Given an offset, populate vbytes with the associated set of * deserialized value bytes. Should only be called during a spill. */
Given an offset, populate vbytes with the associated set of deserialized value bytes. Should only be called during a spill
getVBytesForOffset
{ "repo_name": "wzhuo918/release-1.1.2-MDP", "path": "src/mapred/org/apache/hadoop/mapred/MapTask.java", "license": "apache-2.0", "size": 59540 }
[ "org.apache.hadoop.io.DataInputBuffer" ]
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
1,512,224
public void normalConnect( String partitionId ) throws KettleDatabaseException { if ( databaseMeta == null ) { throw new KettleDatabaseException( "No valid database connection defined!" ); } try { DataSourceProviderInterface dsp = DataSourceProviderFactory.getDataSourceProviderInterface(); if ( dsp == null ) { // since DataSourceProviderFactory is initialised with new DatabaseUtil(), // this assignment is correct dsp = new DatabaseUtil(); } if ( databaseMeta.getAccessType() == DatabaseMeta.TYPE_ACCESS_JNDI ) { String jndiName = environmentSubstitute( databaseMeta.getDatabaseName() ); try { this.connection = dsp.getNamedDataSource( jndiName, DatasourceType.JNDI ).getConnection(); } catch ( DataSourceNamingException e ) { log.logError( "Unable to find datasource by JNDI name: " + jndiName, e ); throw e; } } else { if ( databaseMeta.isUsingConnectionPool() ) { String name = databaseMeta.getName(); try { try { this.connection = dsp.getNamedDataSource( name, DatasourceType.POOLED ).getConnection(); } catch ( UnsupportedOperationException | NullPointerException e ) { // UnsupportedOperationException is happen at DatabaseUtil doesn't support pooled DS, use legacy routine // NullPointerException is happen when we will try to run the transformation on the remote server but // server does not have such databases, so will using legacy routine as well this.connection = ConnectionPoolUtil.getConnection( log, databaseMeta, partitionId ); } if ( getConnection().getAutoCommit() != isAutoCommit() ) { setAutoCommit( isAutoCommit() ); } } catch ( DataSourceNamingException e ) { log.logError( "Unable to find pooled datasource by its name: " + name, e ); throw e; } } else { // using non-jndi and non-pooled connection -- just a simple JDBC connectUsingClass( databaseMeta.getDriverClass(), partitionId ); } } // See if we need to execute extra SQL statement... String sql = environmentSubstitute( databaseMeta.getConnectSQL() ); // only execute if the SQL is not empty, null and is not just a bunch of // spaces, tabs, CR etc. if ( !Utils.isEmpty( sql ) && !Const.onlySpaces( sql ) ) { execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Executed connect time SQL statements:" + Const.CR + sql ); } } } catch ( Exception e ) { throw new KettleDatabaseException( "Error occurred while trying to connect to the database", e ); } }
void function( String partitionId ) throws KettleDatabaseException { if ( databaseMeta == null ) { throw new KettleDatabaseException( STR ); } try { DataSourceProviderInterface dsp = DataSourceProviderFactory.getDataSourceProviderInterface(); if ( dsp == null ) { dsp = new DatabaseUtil(); } if ( databaseMeta.getAccessType() == DatabaseMeta.TYPE_ACCESS_JNDI ) { String jndiName = environmentSubstitute( databaseMeta.getDatabaseName() ); try { this.connection = dsp.getNamedDataSource( jndiName, DatasourceType.JNDI ).getConnection(); } catch ( DataSourceNamingException e ) { log.logError( STR + jndiName, e ); throw e; } } else { if ( databaseMeta.isUsingConnectionPool() ) { String name = databaseMeta.getName(); try { try { this.connection = dsp.getNamedDataSource( name, DatasourceType.POOLED ).getConnection(); } catch ( UnsupportedOperationException NullPointerException e ) { this.connection = ConnectionPoolUtil.getConnection( log, databaseMeta, partitionId ); } if ( getConnection().getAutoCommit() != isAutoCommit() ) { setAutoCommit( isAutoCommit() ); } } catch ( DataSourceNamingException e ) { log.logError( STR + name, e ); throw e; } } else { connectUsingClass( databaseMeta.getDriverClass(), partitionId ); } } String sql = environmentSubstitute( databaseMeta.getConnectSQL() ); if ( !Utils.isEmpty( sql ) && !Const.onlySpaces( sql ) ) { execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( STR + Const.CR + sql ); } } } catch ( Exception e ) { throw new KettleDatabaseException( STR, e ); } }
/** * Open the database connection. The algorithm is: * <ol> * <li>If <code>databaseMeta.getAccessType()</code> returns * <code>DatabaseMeta.TYPE_ACCESS_JNDI</code>, then the connection's datasource is looked up in JNDI </li> * <li>If <code>databaseMeta.isUsingConnectionPool()</code>, then the connection's datasource is looked up in the * pool</li> * <li>otherwise, the connection is established via {@linkplain java.sql.DriverManager}</li> * </ol> * * @param partitionId the partition ID in the cluster to connect to. * @throws KettleDatabaseException if something went wrong. */
Open the database connection. The algorithm is: If <code>databaseMeta.getAccessType()</code> returns <code>DatabaseMeta.TYPE_ACCESS_JNDI</code>, then the connection's datasource is looked up in JNDI If <code>databaseMeta.isUsingConnectionPool()</code>, then the connection's datasource is looked up in the pool otherwise, the connection is established via java.sql.DriverManager
normalConnect
{ "repo_name": "tmcsantos/pentaho-kettle", "path": "core/src/main/java/org/pentaho/di/core/database/Database.java", "license": "apache-2.0", "size": 181888 }
[ "org.pentaho.di.core.Const", "org.pentaho.di.core.database.DataSourceProviderInterface", "org.pentaho.di.core.database.util.DatabaseUtil", "org.pentaho.di.core.exception.KettleDatabaseException", "org.pentaho.di.core.util.Utils" ]
import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DataSourceProviderInterface; import org.pentaho.di.core.database.util.DatabaseUtil; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.*; import org.pentaho.di.core.database.*; import org.pentaho.di.core.database.util.*; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.util.*;
[ "org.pentaho.di" ]
org.pentaho.di;
153,181
@SuppressWarnings({"unchecked"}) protected Object readColumnValue(int colNdx, Class destinationType, Class<? extends SqlType> sqlTypeClass, int columnDbSqlType) { if (colNdx != cachedColumnNdx) { try { SqlType sqlType; if (sqlTypeClass != null) { sqlType = SqlTypeManager.lookupSqlType(sqlTypeClass); } else { sqlType = SqlTypeManager.lookup(destinationType); } if (sqlType != null) { cachedColumnValue = sqlType.readValue(resultSet, colNdx + 1, destinationType, columnDbSqlType); } else { cachedColumnValue = resultSet.getObject(colNdx + 1); cachedColumnValue = TypeConverterManager.convertType(cachedColumnValue, destinationType); } } catch (SQLException sex) { throw new DbOomException(dbOomQuery, "Invalid value for column #" + (colNdx + 1), sex); } cachedColumnNdx = colNdx; } return cachedColumnValue; } /** * {@inheritDoc}
@SuppressWarnings({STR}) Object function(int colNdx, Class destinationType, Class<? extends SqlType> sqlTypeClass, int columnDbSqlType) { if (colNdx != cachedColumnNdx) { try { SqlType sqlType; if (sqlTypeClass != null) { sqlType = SqlTypeManager.lookupSqlType(sqlTypeClass); } else { sqlType = SqlTypeManager.lookup(destinationType); } if (sqlType != null) { cachedColumnValue = sqlType.readValue(resultSet, colNdx + 1, destinationType, columnDbSqlType); } else { cachedColumnValue = resultSet.getObject(colNdx + 1); cachedColumnValue = TypeConverterManager.convertType(cachedColumnValue, destinationType); } } catch (SQLException sex) { throw new DbOomException(dbOomQuery, STR + (colNdx + 1), sex); } cachedColumnNdx = colNdx; } return cachedColumnValue; } /** * {@inheritDoc}
/** * Reads column value from result set. Since this method may be called more then once for * the same column, it caches column values. */
Reads column value from result set. Since this method may be called more then once for the same column, it caches column values
readColumnValue
{ "repo_name": "007slm/jodd", "path": "jodd-db/src/main/java/jodd/db/oom/mapper/DefaultResultSetMapper.java", "license": "bsd-3-clause", "size": 16247 }
[ "java.sql.SQLException" ]
import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
782,276
@Test public void testMapSerialization() throws Exception { final long key = 0L; final KeyGroupRange keyGroupRange = new KeyGroupRange(0, 0); // objects for heap state list serialisation final HeapKeyedStateBackend<Long> longHeapKeyedStateBackend = new HeapKeyedStateBackend<>( mock(TaskKvStateRegistry.class), LongSerializer.INSTANCE, ClassLoader.getSystemClassLoader(), keyGroupRange.getNumberOfKeyGroups(), keyGroupRange, async, new ExecutionConfig(), TestLocalRecoveryConfig.disabled(), new HeapPriorityQueueSetFactory(keyGroupRange, keyGroupRange.getNumberOfKeyGroups(), 128), TtlTimeProvider.DEFAULT ); longHeapKeyedStateBackend.setCurrentKey(key); final InternalMapState<Long, VoidNamespace, Long, String> mapState = (InternalMapState<Long, VoidNamespace, Long, String>) longHeapKeyedStateBackend.getPartitionedState( VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, new MapStateDescriptor<>("test", LongSerializer.INSTANCE, StringSerializer.INSTANCE)); testMapSerialization(key, mapState); }
void function() throws Exception { final long key = 0L; final KeyGroupRange keyGroupRange = new KeyGroupRange(0, 0); final HeapKeyedStateBackend<Long> longHeapKeyedStateBackend = new HeapKeyedStateBackend<>( mock(TaskKvStateRegistry.class), LongSerializer.INSTANCE, ClassLoader.getSystemClassLoader(), keyGroupRange.getNumberOfKeyGroups(), keyGroupRange, async, new ExecutionConfig(), TestLocalRecoveryConfig.disabled(), new HeapPriorityQueueSetFactory(keyGroupRange, keyGroupRange.getNumberOfKeyGroups(), 128), TtlTimeProvider.DEFAULT ); longHeapKeyedStateBackend.setCurrentKey(key); final InternalMapState<Long, VoidNamespace, Long, String> mapState = (InternalMapState<Long, VoidNamespace, Long, String>) longHeapKeyedStateBackend.getPartitionedState( VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, new MapStateDescriptor<>("test", LongSerializer.INSTANCE, StringSerializer.INSTANCE)); testMapSerialization(key, mapState); }
/** * Tests map serialization utils. */
Tests map serialization utils
testMapSerialization
{ "repo_name": "mylog00/flink", "path": "flink-queryable-state/flink-queryable-state-runtime/src/test/java/org/apache/flink/queryablestate/network/KvStateRequestSerializerTest.java", "license": "apache-2.0", "size": 15323 }
[ "org.apache.flink.api.common.ExecutionConfig", "org.apache.flink.api.common.state.MapStateDescriptor", "org.apache.flink.api.common.typeutils.base.LongSerializer", "org.apache.flink.api.common.typeutils.base.StringSerializer", "org.apache.flink.queryablestate.client.VoidNamespace", "org.apache.flink.query...
import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.state.MapStateDescriptor; import org.apache.flink.api.common.typeutils.base.LongSerializer; import org.apache.flink.api.common.typeutils.base.StringSerializer; import org.apache.flink.queryablestate.client.VoidNamespace; import org.apache.flink.queryablestate.client.VoidNamespaceSerializer; import org.apache.flink.runtime.query.TaskKvStateRegistry; import org.apache.flink.runtime.state.KeyGroupRange; import org.apache.flink.runtime.state.TestLocalRecoveryConfig; import org.apache.flink.runtime.state.heap.HeapKeyedStateBackend; import org.apache.flink.runtime.state.heap.HeapPriorityQueueSetFactory; import org.apache.flink.runtime.state.internal.InternalMapState; import org.apache.flink.runtime.state.ttl.TtlTimeProvider; import org.mockito.Mockito;
import org.apache.flink.api.common.*; import org.apache.flink.api.common.state.*; import org.apache.flink.api.common.typeutils.base.*; import org.apache.flink.queryablestate.client.*; import org.apache.flink.runtime.query.*; import org.apache.flink.runtime.state.*; import org.apache.flink.runtime.state.heap.*; import org.apache.flink.runtime.state.internal.*; import org.apache.flink.runtime.state.ttl.*; import org.mockito.*;
[ "org.apache.flink", "org.mockito" ]
org.apache.flink; org.mockito;
2,890,604
Preconditions.checkArgument(workUnit.contains(ConfigurationKeys.CONVERTER_FILTER_FIELD_NAME), "Missing required property converter.filter.field for the AvroFilterConverter class."); Preconditions.checkArgument(workUnit.contains(ConfigurationKeys.CONVERTER_FILTER_FIELD_VALUE), "Missing required property converter.filter.value for the AvroFilterConverter class."); this.fieldName = workUnit.getProp(ConfigurationKeys.CONVERTER_FILTER_FIELD_NAME); this.fieldValue = workUnit.getProp(ConfigurationKeys.CONVERTER_FILTER_FIELD_VALUE); return super.init(workUnit); } /** * Returns the inputSchema unmodified. * {@inheritDoc}
Preconditions.checkArgument(workUnit.contains(ConfigurationKeys.CONVERTER_FILTER_FIELD_NAME), STR); Preconditions.checkArgument(workUnit.contains(ConfigurationKeys.CONVERTER_FILTER_FIELD_VALUE), STR); this.fieldName = workUnit.getProp(ConfigurationKeys.CONVERTER_FILTER_FIELD_NAME); this.fieldValue = workUnit.getProp(ConfigurationKeys.CONVERTER_FILTER_FIELD_VALUE); return super.init(workUnit); } /** * Returns the inputSchema unmodified. * {@inheritDoc}
/** * The config must specify {@link ConfigurationKeys#CONVERTER_FILTER_FIELD_NAME} to indicate which field to retrieve * from the Avro record and {@link ConfigurationKeys#CONVERTER_FILTER_FIELD_VALUE} to indicate the expected value of * the field. * {@inheritDoc} * @see gobblin.converter.Converter#init(gobblin.configuration.WorkUnitState) */
The config must specify <code>ConfigurationKeys#CONVERTER_FILTER_FIELD_NAME</code> to indicate which field to retrieve from the Avro record and <code>ConfigurationKeys#CONVERTER_FILTER_FIELD_VALUE</code> to indicate the expected value of the field.
init
{ "repo_name": "chavdar/gobblin", "path": "gobblin-core/src/main/java/gobblin/converter/filter/AvroFilterConverter.java", "license": "apache-2.0", "size": 3889 }
[ "com.google.common.base.Preconditions" ]
import com.google.common.base.Preconditions;
import com.google.common.base.*;
[ "com.google.common" ]
com.google.common;
495,193
private static void enableXInclude(final DocumentBuilderFactory factory) { try { // Alternative: We set if a system property on the command line is set, for example: // -DLog4j.XInclude=true factory.setXIncludeAware(true); } catch (final UnsupportedOperationException e) { LOGGER.warn("The DocumentBuilderFactory [{}] does not support XInclude: {}", factory, e); } catch (@SuppressWarnings("ErrorNotRethrown") final AbstractMethodError | NoSuchMethodError err) { LOGGER.warn("The DocumentBuilderFactory [{}] is out of date and does not support XInclude: {}", factory, err); } try { // Alternative: We could specify all features and values with system properties like: // -DLog4j.DocumentBuilderFactory.Feature="http://apache.org/xml/features/xinclude/fixup-base-uris true" factory.setFeature(XINCLUDE_FIXUP_BASE_URIS, true); } catch (final ParserConfigurationException e) { LOGGER.warn("The DocumentBuilderFactory [{}] does not support the feature [{}]: {}", factory, XINCLUDE_FIXUP_BASE_URIS, e); } catch (@SuppressWarnings("ErrorNotRethrown") final AbstractMethodError err) { LOGGER.warn("The DocumentBuilderFactory [{}] is out of date and does not support setFeature: {}", factory, err); } try { factory.setFeature(XINCLUDE_FIXUP_LANGUAGE, true); } catch (final ParserConfigurationException e) { LOGGER.warn("The DocumentBuilderFactory [{}] does not support the feature [{}]: {}", factory, XINCLUDE_FIXUP_LANGUAGE, e); } catch (@SuppressWarnings("ErrorNotRethrown") final AbstractMethodError err) { LOGGER.warn("The DocumentBuilderFactory [{}] is out of date and does not support setFeature: {}", factory, err); } }
static void function(final DocumentBuilderFactory factory) { try { factory.setXIncludeAware(true); } catch (final UnsupportedOperationException e) { LOGGER.warn(STR, factory, e); } catch (@SuppressWarnings(STR) final AbstractMethodError NoSuchMethodError err) { LOGGER.warn(STR, factory, err); } try { factory.setFeature(XINCLUDE_FIXUP_BASE_URIS, true); } catch (final ParserConfigurationException e) { LOGGER.warn(STR, factory, XINCLUDE_FIXUP_BASE_URIS, e); } catch (@SuppressWarnings(STR) final AbstractMethodError err) { LOGGER.warn(STR, factory, err); } try { factory.setFeature(XINCLUDE_FIXUP_LANGUAGE, true); } catch (final ParserConfigurationException e) { LOGGER.warn(STR, factory, XINCLUDE_FIXUP_LANGUAGE, e); } catch (@SuppressWarnings(STR) final AbstractMethodError err) { LOGGER.warn(STR, factory, err); } }
/** * Enables XInclude for the given DocumentBuilderFactory * * @param factory a DocumentBuilderFactory */
Enables XInclude for the given DocumentBuilderFactory
enableXInclude
{ "repo_name": "lburgazzoli/apache-logging-log4j2", "path": "log4j-core/src/main/java/org/apache/logging/log4j/core/config/xml/XmlConfiguration.java", "license": "apache-2.0", "size": 15995 }
[ "javax.xml.parsers.DocumentBuilderFactory", "javax.xml.parsers.ParserConfigurationException" ]
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.*;
[ "javax.xml" ]
javax.xml;
2,435,862
@Test public void testSetSelected() { List<NodeInterface> nodeTypeList = new ArrayList<NodeInterface>(); List<SLDDataInterface> sldDataList = new ArrayList<SLDDataInterface>(); try { FileTreeNode sldTreeNode = new FileTreeNode(new File("/test"), "test.sld"); sldTreeNode.setFileCategory(FileTreeNodeTypeEnum.SLD); nodeTypeList.add(sldTreeNode); sldDataList.add(new SLDData(null, "")); TestTool testTool = new TestTool(null); assertNull(testTool.getConnection()); assertTrue(testTool.getLayerList().isEmpty()); testTool.setSelectedItems(nodeTypeList, sldDataList); assertNull(testTool.getConnection()); assertTrue(testTool.getLayerList().isEmpty()); } catch (SecurityException e) { fail(e.getStackTrace().toString()); } catch (FileNotFoundException e) { fail(e.getStackTrace().toString()); } }
void function() { List<NodeInterface> nodeTypeList = new ArrayList<NodeInterface>(); List<SLDDataInterface> sldDataList = new ArrayList<SLDDataInterface>(); try { FileTreeNode sldTreeNode = new FileTreeNode(new File("/test"), STR); sldTreeNode.setFileCategory(FileTreeNodeTypeEnum.SLD); nodeTypeList.add(sldTreeNode); sldDataList.add(new SLDData(null, "")); TestTool testTool = new TestTool(null); assertNull(testTool.getConnection()); assertTrue(testTool.getLayerList().isEmpty()); testTool.setSelectedItems(nodeTypeList, sldDataList); assertNull(testTool.getConnection()); assertTrue(testTool.getLayerList().isEmpty()); } catch (SecurityException e) { fail(e.getStackTrace().toString()); } catch (FileNotFoundException e) { fail(e.getStackTrace().toString()); } }
/** * Test method for {@link * com.sldeditor.tool.connectionlist.GeoServerConnectionListTool#setSelectedItems(java.util.List, * java.util.List)}. */
Test method for <code>com.sldeditor.tool.connectionlist.GeoServerConnectionListTool#setSelectedItems(java.util.List, java.util.List)</code>
testSetSelected
{ "repo_name": "robward-scisys/sldeditor", "path": "modules/application/src/test/java/com/sldeditor/test/unit/tool/layerstyle/GeoServerLayerUpdateToolTest.java", "license": "gpl-3.0", "size": 17731 }
[ "com.sldeditor.common.NodeInterface", "com.sldeditor.common.SLDDataInterface", "com.sldeditor.common.data.SLDData", "com.sldeditor.datasource.extension.filesystem.node.file.FileTreeNode", "com.sldeditor.datasource.extension.filesystem.node.file.FileTreeNodeTypeEnum", "java.io.File", "java.io.FileNotFoun...
import com.sldeditor.common.NodeInterface; import com.sldeditor.common.SLDDataInterface; import com.sldeditor.common.data.SLDData; import com.sldeditor.datasource.extension.filesystem.node.file.FileTreeNode; import com.sldeditor.datasource.extension.filesystem.node.file.FileTreeNodeTypeEnum; import java.io.File; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.Assertions;
import com.sldeditor.common.*; import com.sldeditor.common.data.*; import com.sldeditor.datasource.extension.filesystem.node.file.*; import java.io.*; import java.util.*; import org.junit.jupiter.api.*;
[ "com.sldeditor.common", "com.sldeditor.datasource", "java.io", "java.util", "org.junit.jupiter" ]
com.sldeditor.common; com.sldeditor.datasource; java.io; java.util; org.junit.jupiter;
1,356,092
ActuateType getActuate();
ActuateType getActuate();
/** * Returns the value of the '<em><b>Actuate</b></em>' attribute. * The literals are from the enumeration {@link org.w3.xlink.ActuateType}. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Actuate</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Actuate</em>' attribute. * @see org.w3.xlink.ActuateType * @see #isSetActuate() * @see #unsetActuate() * @see #setActuate(ActuateType) * @see net.opengis.gml311.Gml311Package#getOperationRefType_Actuate() * @model unsettable="true" * extendedMetaData="kind='attribute' name='actuate' namespace='http://www.w3.org/1999/xlink'" * @generated */
Returns the value of the 'Actuate' attribute. The literals are from the enumeration <code>org.w3.xlink.ActuateType</code>. If the meaning of the 'Actuate' attribute isn't clear, there really should be more of a description here...
getActuate
{ "repo_name": "geotools/geotools", "path": "modules/ogc/net.opengis.wmts/src/net/opengis/gml311/OperationRefType.java", "license": "lgpl-2.1", "size": 15043 }
[ "org.w3.xlink.ActuateType" ]
import org.w3.xlink.ActuateType;
import org.w3.xlink.*;
[ "org.w3.xlink" ]
org.w3.xlink;
62,293
@Override public CompletableFuture<LogRecordWithDLSN> getLastLogRecordAsync() { return getLastLogRecordAsyncInternal(false, false); }
CompletableFuture<LogRecordWithDLSN> function() { return getLastLogRecordAsyncInternal(false, false); }
/** * Get Latest log record in the log. * * @return latest log record */
Get Latest log record in the log
getLastLogRecordAsync
{ "repo_name": "sijie/bookkeeper", "path": "stream/distributedlog/core/src/main/java/org/apache/distributedlog/BKDistributedLogManager.java", "license": "apache-2.0", "size": 42167 }
[ "java.util.concurrent.CompletableFuture" ]
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
1,491,029
private int nodeJavaMajorVersion(ClusterNode node) throws IgniteCheckedException { try { // The format is identical for Oracle JDK, OpenJDK and IBM JDK. return Integer.parseInt(node.<String>attribute("java.version").split("\\.")[1]); } catch (Exception e) { U.error(log, "Failed to get java major version (unknown 'java.version' format) [ver=" + node.<String>attribute("java.version") + "]", e); return 0; } }
int function(ClusterNode node) throws IgniteCheckedException { try { return Integer.parseInt(node.<String>attribute(STR).split("\\.")[1]); } catch (Exception e) { U.error(log, STR + node.<String>attribute(STR) + "]", e); return 0; } }
/** * Gets Java major version running on the node. * * @param node Cluster node. * @return Java major version. * @throws IgniteCheckedException If failed to get the version. */
Gets Java major version running on the node
nodeJavaMajorVersion
{ "repo_name": "ntikhonov/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java", "license": "apache-2.0", "size": 113733 }
[ "org.apache.ignite.IgniteCheckedException", "org.apache.ignite.cluster.ClusterNode", "org.apache.ignite.internal.util.typedef.internal.U" ]
import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.*; import org.apache.ignite.cluster.*; import org.apache.ignite.internal.util.typedef.internal.*;
[ "org.apache.ignite" ]
org.apache.ignite;
2,783,872
HttpServletRequest httpRequest = ((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest(); String ipAddress = httpRequest.getHeader("X-FORWARDED-FOR"); if (ipAddress == null) { ipAddress = httpRequest.getRemoteAddr(); } return ipAddress; }
HttpServletRequest httpRequest = ((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest(); String ipAddress = httpRequest.getHeader(STR); if (ipAddress == null) { ipAddress = httpRequest.getRemoteAddr(); } return ipAddress; }
/** * returns the Ip Address of the current logged in user using HTTP request * @return Ip Address */
returns the Ip Address of the current logged in user using HTTP request
getIpAddress
{ "repo_name": "alaa-ayyad/Sample-Spring-MVC", "path": "src/main/java/utils/AppUtils.java", "license": "mit", "size": 1015 }
[ "javax.servlet.http.HttpServletRequest", "org.springframework.web.context.request.RequestContextHolder", "org.springframework.web.context.request.ServletRequestAttributes" ]
import javax.servlet.http.HttpServletRequest; import org.springframework.web.context.request.RequestContextHolder; import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.http.*; import org.springframework.web.context.request.*;
[ "javax.servlet", "org.springframework.web" ]
javax.servlet; org.springframework.web;
1,771,182
private synchronized void handleWatchdog() { final int exitCode; if (watchdog == null) return; if (watchdogTimeout == 0) return; if (!isRunning()) { exitCode = OnCommandResultListener.SHELL_DIED; Debug.log(String.format("[%s%%] SHELL_DIED", shell.toUpperCase(Locale.ENGLISH))); } else if (watchdogCount++ < watchdogTimeout) { return; } else { exitCode = OnCommandResultListener.WATCHDOG_EXIT; Debug.log(String.format("[%s%%] WATCHDOG_EXIT", shell.toUpperCase(Locale.ENGLISH))); } if (handler != null) { postCallback(command, exitCode, buffer); } // prevent multiple callbacks for the same command command = null; buffer = null; idle = true; watchdog.shutdown(); watchdog = null; kill(); }
synchronized void function() { final int exitCode; if (watchdog == null) return; if (watchdogTimeout == 0) return; if (!isRunning()) { exitCode = OnCommandResultListener.SHELL_DIED; Debug.log(String.format(STR, shell.toUpperCase(Locale.ENGLISH))); } else if (watchdogCount++ < watchdogTimeout) { return; } else { exitCode = OnCommandResultListener.WATCHDOG_EXIT; Debug.log(String.format(STR, shell.toUpperCase(Locale.ENGLISH))); } if (handler != null) { postCallback(command, exitCode, buffer); } command = null; buffer = null; idle = true; watchdog.shutdown(); watchdog = null; kill(); }
/** * Called from a ScheduledThreadPoolExecutor timer thread every second * when there is an outstanding command */
Called from a ScheduledThreadPoolExecutor timer thread every second when there is an outstanding command
handleWatchdog
{ "repo_name": "54dxs/mobilesafe", "path": "ms/src/eu/chainfire/libsuperuser/Shell.java", "license": "apache-2.0", "size": 68561 }
[ "java.util.Locale" ]
import java.util.Locale;
import java.util.*;
[ "java.util" ]
java.util;
33,866
public static <T> T open(File file, Class<T> clazz, DriverRegistry registry) throws IOException { return open(file.toURI(), clazz, registry); }
static <T> T function(File file, Class<T> clazz, DriverRegistry registry) throws IOException { return open(file.toURI(), clazz, registry); }
/** * Opens a connection to data specified by a file. * <p> * The optional <tt>class</tt> parameter is used to filter the candidate driver set. For * example to constrain to workspace drivers. * <pre><code> * Workspace ws = Drivers.open(..., Workspace.class); * </code></pre> * </p> * @param file The file to open. * @param clazz Class used to filter registered drivers, may be <code>null</code>. * * @return The data object, or <code>null</code> if no suitable driver could be found for the * specified file. * * @throws IOException Any connection errors, such as a file system error or * database connection failure. */
Opens a connection to data specified by a file. The optional class parameter is used to filter the candidate driver set. For example to constrain to workspace drivers. <code><code> Workspace ws = Drivers.open(..., Workspace.class); </code></code>
open
{ "repo_name": "geosolutions-it/jeo", "path": "core/src/main/java/org/jeo/data/Drivers.java", "license": "apache-2.0", "size": 13091 }
[ "java.io.File", "java.io.IOException" ]
import java.io.File; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,163,428
public static <A> BoundNullaryPredicate bind(Predicate<? super A> predicate, A arg) { return null == predicate ? null : new BoundNullaryPredicate(predicate, arg); }
static <A> BoundNullaryPredicate function(Predicate<? super A> predicate, A arg) { return null == predicate ? null : new BoundNullaryPredicate(predicate, arg); }
/** * Adapt the given, possibly-<code>null</code>, * {@link Predicate Predicate} to the * {@link NullaryPredicate NullaryPredicate} interface by binding * the specified <code>Object</code> as a constant * argument. * When the given <code>Predicate</code> is <code>null</code>, * returns <code>null</code>. * * @param <A> input type * @param predicate the possibly-<code>null</code> * {@link Predicate Predicate} to adapt * @param arg the object to bind as a constant argument * @return a <code>BoundNullaryPredicate</code> wrapping the given * {@link Predicate Predicate}, or <code>null</code> * if the given <code>Predicate</code> is <code>null</code> */
Adapt the given, possibly-<code>null</code>, <code>Predicate Predicate</code> to the <code>NullaryPredicate NullaryPredicate</code> interface by binding the specified <code>Object</code> as a constant argument. When the given <code>Predicate</code> is <code>null</code>, returns <code>null</code>
bind
{ "repo_name": "apache/commons-functor", "path": "core/src/main/java/org/apache/commons/functor/adapter/BoundNullaryPredicate.java", "license": "apache-2.0", "size": 3953 }
[ "org.apache.commons.functor.Predicate" ]
import org.apache.commons.functor.Predicate;
import org.apache.commons.functor.*;
[ "org.apache.commons" ]
org.apache.commons;
1,959,419
public void setActivateOnItemClick(boolean activateOnItemClick) { // When setting CHOICE_MODE_SINGLE, ListView will automatically // give items the 'activated' state when touched. getListView().setChoiceMode( activateOnItemClick ? ListView.CHOICE_MODE_SINGLE : ListView.CHOICE_MODE_NONE); }
void function(boolean activateOnItemClick) { getListView().setChoiceMode( activateOnItemClick ? ListView.CHOICE_MODE_SINGLE : ListView.CHOICE_MODE_NONE); }
/** * Turns on activate-on-click mode. When this mode is on, list items will be * given the 'activated' state when touched. */
Turns on activate-on-click mode. When this mode is on, list items will be given the 'activated' state when touched
setActivateOnItemClick
{ "repo_name": "dansun/Gameofkrowns", "path": "gameofkrowns-client/gameofkrowns-androidclient/src/nu/danielsundberg/droid/gameofkrowns/list/activity/GameofkrownsListFragment.java", "license": "apache-2.0", "size": 4678 }
[ "android.widget.ListView" ]
import android.widget.ListView;
import android.widget.*;
[ "android.widget" ]
android.widget;
2,543,745
protected void initDataBindings() { JTableBinding<TvShowChooserModel, List<TvShowChooserModel>, JTable> jTableBinding = SwingBindings.createJTableBinding(UpdateStrategy.READ, tvShowsFound, table); // BeanProperty<TvShowChooserModel, String> tvShowChooserModelBeanProperty = BeanProperty.create("combinedName"); jTableBinding.addColumnBinding(tvShowChooserModelBeanProperty).setEditable(false); // jTableBinding.bind(); // BeanProperty<JTable, String> jTableBeanProperty_1 = BeanProperty.create("selectedElement.overview"); BeanProperty<JTextPane, String> jTextPaneBeanProperty = BeanProperty.create("text"); AutoBinding<JTable, String, JTextPane, String> autoBinding_1 = Bindings.createAutoBinding(UpdateStrategy.READ, table, jTableBeanProperty_1, tpTvShowOverview, jTextPaneBeanProperty); autoBinding_1.bind(); // BeanProperty<JTable, String> jTableBeanProperty_2 = BeanProperty.create("selectedElement.posterUrl"); BeanProperty<ImageLabel, String> imageLabelBeanProperty = BeanProperty.create("imageUrl"); AutoBinding<JTable, String, ImageLabel, String> autoBinding_2 = Bindings.createAutoBinding(UpdateStrategy.READ, table, jTableBeanProperty_2, lblTvShowPoster, imageLabelBeanProperty); autoBinding_2.bind(); // BeanProperty<JTable, String> jTableBeanProperty_3 = BeanProperty.create("selectedElement.combinedName"); BeanProperty<JTextArea, String> jTextAreaBeanProperty_1 = BeanProperty.create("text"); AutoBinding<JTable, String, JTextArea, String> autoBinding_3 = Bindings.createAutoBinding(UpdateStrategy.READ, table, jTableBeanProperty_3, lblTvShowName, jTextAreaBeanProperty_1); autoBinding_3.bind(); }
void function() { JTableBinding<TvShowChooserModel, List<TvShowChooserModel>, JTable> jTableBinding = SwingBindings.createJTableBinding(UpdateStrategy.READ, tvShowsFound, table); jTableBinding.addColumnBinding(tvShowChooserModelBeanProperty).setEditable(false); BeanProperty<JTextPane, String> jTextPaneBeanProperty = BeanProperty.create("text"); AutoBinding<JTable, String, JTextPane, String> autoBinding_1 = Bindings.createAutoBinding(UpdateStrategy.READ, table, jTableBeanProperty_1, tpTvShowOverview, jTextPaneBeanProperty); autoBinding_1.bind(); BeanProperty<ImageLabel, String> imageLabelBeanProperty = BeanProperty.create(STR); AutoBinding<JTable, String, ImageLabel, String> autoBinding_2 = Bindings.createAutoBinding(UpdateStrategy.READ, table, jTableBeanProperty_2, lblTvShowPoster, imageLabelBeanProperty); autoBinding_2.bind(); BeanProperty<JTextArea, String> jTextAreaBeanProperty_1 = BeanProperty.create("text"); AutoBinding<JTable, String, JTextArea, String> autoBinding_3 = Bindings.createAutoBinding(UpdateStrategy.READ, table, jTableBeanProperty_3, lblTvShowName, jTextAreaBeanProperty_1); autoBinding_3.bind(); }
/** * Inits the data bindings. */
Inits the data bindings
initDataBindings
{ "repo_name": "mlaggner/tinyMediaManager", "path": "src/org/tinymediamanager/ui/tvshows/dialogs/TvShowChooserDialog.java", "license": "apache-2.0", "size": 24583 }
[ "java.util.List", "javax.swing.JTable", "javax.swing.JTextArea", "javax.swing.JTextPane", "org.jdesktop.beansbinding.AutoBinding", "org.jdesktop.beansbinding.BeanProperty", "org.jdesktop.beansbinding.Bindings", "org.jdesktop.swingbinding.JTableBinding", "org.jdesktop.swingbinding.SwingBindings", "...
import java.util.List; import javax.swing.JTable; import javax.swing.JTextArea; import javax.swing.JTextPane; import org.jdesktop.beansbinding.AutoBinding; import org.jdesktop.beansbinding.BeanProperty; import org.jdesktop.beansbinding.Bindings; import org.jdesktop.swingbinding.JTableBinding; import org.jdesktop.swingbinding.SwingBindings; import org.tinymediamanager.ui.components.ImageLabel; import org.tinymediamanager.ui.tvshows.TvShowChooserModel;
import java.util.*; import javax.swing.*; import org.jdesktop.beansbinding.*; import org.jdesktop.swingbinding.*; import org.tinymediamanager.ui.components.*; import org.tinymediamanager.ui.tvshows.*;
[ "java.util", "javax.swing", "org.jdesktop.beansbinding", "org.jdesktop.swingbinding", "org.tinymediamanager.ui" ]
java.util; javax.swing; org.jdesktop.beansbinding; org.jdesktop.swingbinding; org.tinymediamanager.ui;
2,207,998
public void cancel() { ScheduledFuture<?> future = this.future; if (future != null) { future.cancel(true); } }
void function() { ScheduledFuture<?> future = this.future; if (future != null) { future.cancel(true); } }
/** * Trigger cancellation of this scheduled task. */
Trigger cancellation of this scheduled task
cancel
{ "repo_name": "spring-projects/spring-framework", "path": "spring-context/src/main/java/org/springframework/scheduling/config/ScheduledTask.java", "license": "apache-2.0", "size": 1726 }
[ "java.util.concurrent.ScheduledFuture" ]
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
1,384,385
public static void storeDate(Date date, byte[] dest, int offset) { Calendar cal = new GregorianCalendar(); cal.setTime(date); LittleEndian.putShort(dest, offset + 0, (short) cal.get(Calendar.YEAR)); LittleEndian.putShort(dest, offset + 2, (short)(cal.get(Calendar.MONTH) + 1)); LittleEndian.putShort(dest, offset + 4, (short)(cal.get(Calendar.DAY_OF_WEEK)-1)); LittleEndian.putShort(dest, offset + 6, (short) cal.get(Calendar.DAY_OF_MONTH)); LittleEndian.putShort(dest, offset + 8, (short) cal.get(Calendar.HOUR_OF_DAY)); LittleEndian.putShort(dest, offset + 10,(short) cal.get(Calendar.MINUTE)); LittleEndian.putShort(dest, offset + 12,(short) cal.get(Calendar.SECOND)); LittleEndian.putShort(dest, offset + 14,(short) cal.get(Calendar.MILLISECOND)); }
static void function(Date date, byte[] dest, int offset) { Calendar cal = new GregorianCalendar(); cal.setTime(date); LittleEndian.putShort(dest, offset + 0, (short) cal.get(Calendar.YEAR)); LittleEndian.putShort(dest, offset + 2, (short)(cal.get(Calendar.MONTH) + 1)); LittleEndian.putShort(dest, offset + 4, (short)(cal.get(Calendar.DAY_OF_WEEK)-1)); LittleEndian.putShort(dest, offset + 6, (short) cal.get(Calendar.DAY_OF_MONTH)); LittleEndian.putShort(dest, offset + 8, (short) cal.get(Calendar.HOUR_OF_DAY)); LittleEndian.putShort(dest, offset + 10,(short) cal.get(Calendar.MINUTE)); LittleEndian.putShort(dest, offset + 12,(short) cal.get(Calendar.SECOND)); LittleEndian.putShort(dest, offset + 14,(short) cal.get(Calendar.MILLISECOND)); }
/** * Convert the supplied java Date into a SystemTime struct, and write it * into the supplied byte array. */
Convert the supplied java Date into a SystemTime struct, and write it into the supplied byte array
storeDate
{ "repo_name": "benjaminy/STuneLite", "path": "OldJavaImplementation/poi-3.2-FINAL/src/scratchpad/src/org/apache/poi/hslf/util/SystemTimeUtils.java", "license": "gpl-2.0", "size": 3908 }
[ "java.util.Calendar", "java.util.Date", "java.util.GregorianCalendar", "org.apache.poi.util.LittleEndian" ]
import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import org.apache.poi.util.LittleEndian;
import java.util.*; import org.apache.poi.util.*;
[ "java.util", "org.apache.poi" ]
java.util; org.apache.poi;
1,018,185
public void removeSafeChannel(Channel channel) { if (channel == null) throw new IllegalArgumentException("invalid channel"); m_mSafeChannels.remove(channel.getSafeName().toLowerCase()); m_logger.log(Level.INFO, "safe channel removed from server '" + m_strName + "': " + channel.getName()); }
void function(Channel channel) { if (channel == null) throw new IllegalArgumentException(STR); m_mSafeChannels.remove(channel.getSafeName().toLowerCase()); m_logger.log(Level.INFO, STR + m_strName + STR + channel.getName()); }
/** * Removes a safe channel from this server. * * @param channel * the channel to remove. */
Removes a safe channel from this server
removeSafeChannel
{ "repo_name": "bhuisgen/hbircs", "path": "src/fr/hbis/ircs/Server.java", "license": "gpl-2.0", "size": 16161 }
[ "java.util.logging.Level" ]
import java.util.logging.Level;
import java.util.logging.*;
[ "java.util" ]
java.util;
1,529,059
public CdataMatcher cdataMatcher() throws TagValidationException { return this.back().getCdataMatcher(); }
CdataMatcher function() throws TagValidationException { return this.back().getCdataMatcher(); }
/** * Returns the cdata matcher for the tag currently on the stack. If there * is no cdata matcher, returns null. * * @return returns the CdataMatcher for the tag currently on the stack. * @throws TagValidationException the TagValidationException. */
Returns the cdata matcher for the tag currently on the stack. If there is no cdata matcher, returns null
cdataMatcher
{ "repo_name": "taboola/amphtml", "path": "validator/java/src/main/java/dev/amp/validator/TagStack.java", "license": "apache-2.0", "size": 21478 }
[ "dev.amp.validator.exception.TagValidationException" ]
import dev.amp.validator.exception.TagValidationException;
import dev.amp.validator.exception.*;
[ "dev.amp.validator" ]
dev.amp.validator;
1,538,433
@Test(timeout = 100000) public void testRestartBetweenWrites() throws Exception { // Create a non-replicated table for this test, so that // we're sure when we reconnect to the leader after restarting // the tablet servers, it's definitely the same leader we wrote // to before. KuduTable nonReplicatedTable = harness.getClient().createTable( "non-replicated", schema, getBasicCreateTableOptions().setNumReplicas(1)); try { // Write before doing any restarts to establish a connection. AsyncKuduSession session = client.newSession(); session.setTimeoutMillis(30000); session.setFlushMode(SessionConfiguration.FlushMode.AUTO_FLUSH_SYNC); session.apply(createBasicSchemaInsert(nonReplicatedTable, 1)).join(); int numClientsBefore = client.getConnectionListCopy().size(); // Restart all the tablet servers. harness.killAllTabletServers(); harness.startAllTabletServers(); // Perform another write, which will require reconnecting to the same // tablet server that we wrote to above. session.apply(createBasicSchemaInsert(nonReplicatedTable, 2)).join(); // We should not have leaked an entry in the client2tablets map. int numClientsAfter = client.getConnectionListCopy().size(); assertEquals(numClientsBefore, numClientsAfter); } finally { harness.startAllTabletServers(); client.deleteTable("non-replicated").join(); } }
@Test(timeout = 100000) void function() throws Exception { KuduTable nonReplicatedTable = harness.getClient().createTable( STR, schema, getBasicCreateTableOptions().setNumReplicas(1)); try { AsyncKuduSession session = client.newSession(); session.setTimeoutMillis(30000); session.setFlushMode(SessionConfiguration.FlushMode.AUTO_FLUSH_SYNC); session.apply(createBasicSchemaInsert(nonReplicatedTable, 1)).join(); int numClientsBefore = client.getConnectionListCopy().size(); harness.killAllTabletServers(); harness.startAllTabletServers(); session.apply(createBasicSchemaInsert(nonReplicatedTable, 2)).join(); int numClientsAfter = client.getConnectionListCopy().size(); assertEquals(numClientsBefore, numClientsAfter); } finally { harness.startAllTabletServers(); client.deleteTable(STR).join(); } }
/** * Regression test for a bug in which, when a tablet client is disconnected * and we reconnect, we were previously leaking the old RpcProxy * object in the client2tablets map. */
Regression test for a bug in which, when a tablet client is disconnected and we reconnect, we were previously leaking the old RpcProxy object in the client2tablets map
testRestartBetweenWrites
{ "repo_name": "InspurUSA/kudu", "path": "java/kudu-client/src/test/java/org/apache/kudu/client/TestAsyncKuduSession.java", "license": "apache-2.0", "size": 21805 }
[ "org.apache.kudu.test.ClientTestUtil", "org.junit.Assert", "org.junit.Test" ]
import org.apache.kudu.test.ClientTestUtil; import org.junit.Assert; import org.junit.Test;
import org.apache.kudu.test.*; import org.junit.*;
[ "org.apache.kudu", "org.junit" ]
org.apache.kudu; org.junit;
1,216,330
public Map<String, Collection<String>> getInstalledAssetNames() { if (installAssets == null) return null; Map<String, Collection<String>> installed = new HashMap<String, Collection<String>>(); Collection<String> installedAddons = new ArrayList<String>(); Collection<String> installedFeatures = new ArrayList<String>(); Collection<String> installedFixes = new ArrayList<String>(); Collection<String> installedSamples = new ArrayList<String>(); Collection<String> installedOpenSources = new ArrayList<String>(); for (List<InstallAsset> iaList : installAssets) { for (InstallAsset asset : iaList) { if (asset.isFeature()) { ESAAsset esa = (ESAAsset) asset; if (esa.isPublic()) { String esaName = esa.getShortName(); if (esaName == null || esaName.isEmpty()) esaName = esa.getFeatureName(); if (esa.isAddon()) installedAddons.add(esaName); else installedFeatures.add(esaName); } } else if (asset.isFix()) { installedFixes.add(asset.toString()); } else if (asset.isSample()) { installedSamples.add(asset.toString()); } else if (asset.isOpenSource()) { installedOpenSources.add(asset.toString()); } } } installed.put(InstallConstants.ADDON, installedAddons); installed.put(InstallConstants.FEATURE, installedFeatures); installed.put(InstallConstants.IFIX, installedFixes); installed.put(InstallConstants.SAMPLE, installedSamples); installed.put(InstallConstants.OPENSOURCE, installedOpenSources); return installed; }
Map<String, Collection<String>> function() { if (installAssets == null) return null; Map<String, Collection<String>> installed = new HashMap<String, Collection<String>>(); Collection<String> installedAddons = new ArrayList<String>(); Collection<String> installedFeatures = new ArrayList<String>(); Collection<String> installedFixes = new ArrayList<String>(); Collection<String> installedSamples = new ArrayList<String>(); Collection<String> installedOpenSources = new ArrayList<String>(); for (List<InstallAsset> iaList : installAssets) { for (InstallAsset asset : iaList) { if (asset.isFeature()) { ESAAsset esa = (ESAAsset) asset; if (esa.isPublic()) { String esaName = esa.getShortName(); if (esaName == null esaName.isEmpty()) esaName = esa.getFeatureName(); if (esa.isAddon()) installedAddons.add(esaName); else installedFeatures.add(esaName); } } else if (asset.isFix()) { installedFixes.add(asset.toString()); } else if (asset.isSample()) { installedSamples.add(asset.toString()); } else if (asset.isOpenSource()) { installedOpenSources.add(asset.toString()); } } } installed.put(InstallConstants.ADDON, installedAddons); installed.put(InstallConstants.FEATURE, installedFeatures); installed.put(InstallConstants.IFIX, installedFixes); installed.put(InstallConstants.SAMPLE, installedSamples); installed.put(InstallConstants.OPENSOURCE, installedOpenSources); return installed; }
/** * Gets the names of assets inside installAssets * * @return Map of asset types and collections of asset names */
Gets the names of assets inside installAssets
getInstalledAssetNames
{ "repo_name": "kgibm/open-liberty", "path": "dev/com.ibm.ws.install/src/com/ibm/ws/install/internal/Director.java", "license": "epl-1.0", "size": 93799 }
[ "com.ibm.ws.install.InstallConstants", "com.ibm.ws.install.internal.asset.ESAAsset", "com.ibm.ws.install.internal.asset.InstallAsset", "java.util.ArrayList", "java.util.Collection", "java.util.HashMap", "java.util.List", "java.util.Map" ]
import com.ibm.ws.install.InstallConstants; import com.ibm.ws.install.internal.asset.ESAAsset; import com.ibm.ws.install.internal.asset.InstallAsset; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map;
import com.ibm.ws.install.*; import com.ibm.ws.install.internal.asset.*; import java.util.*;
[ "com.ibm.ws", "java.util" ]
com.ibm.ws; java.util;
2,721,857
private static Collection<HalfEdge> createPHalfEdges(SpqrNode node) { Iterator<MultiVertex> vertexIterator = node.skeleton.vertices.iterator(); Vertex vertex1 = node.skeletonVertexToVertex.get(vertexIterator.next()); Vertex vertex2 = node.skeletonVertexToVertex.get(vertexIterator.next()); int edgeCount = node.skeleton.vertices.iterator().next().edges.size(); Collection<HalfEdge> halfEdges = new ArrayList<HalfEdge>(2 * edgeCount); // Create the first two HalfEdges HalfEdge firstHalfEdge = new HalfEdge(vertex1, node.realEdges.isEmpty()); HalfEdge firstTwinEdge = new HalfEdge(vertex2, node.realEdges.isEmpty()); firstHalfEdge.twinEdge = firstTwinEdge; firstTwinEdge.twinEdge = firstHalfEdge; halfEdges.add(firstHalfEdge); halfEdges.add(firstTwinEdge); // Create the remaining HalfEdges HalfEdge prevHalfEdge = firstHalfEdge; HalfEdge prevTwinEdge = firstTwinEdge; List<HalfEdge> twinEdges = new ArrayList<HalfEdge>(edgeCount - 1); for (int i = 0; i < edgeCount - 1; i++) { HalfEdge halfEdge = new HalfEdge(vertex1, true); HalfEdge twinEdge = new HalfEdge(vertex2, true); twinEdges.add(twinEdge); prevHalfEdge.nextClockwise = halfEdge; prevTwinEdge.nextClockwise = twinEdge; halfEdges.add(halfEdge); halfEdges.add(twinEdge); prevHalfEdge = halfEdge; prevTwinEdge = twinEdge; } prevHalfEdge.nextClockwise = firstHalfEdge; prevTwinEdge.nextClockwise = firstTwinEdge; // Set the twinEdge links HalfEdge halfEdge = firstHalfEdge.nextClockwise; for (int i = 0; i < edgeCount - 1; i++) { HalfEdge twinEdge = twinEdges.get(twinEdges.size() - i - 1); halfEdge.twinEdge = twinEdge; twinEdge.twinEdge = halfEdge; halfEdge = halfEdge.nextClockwise; } // Set the nextOnExternalFace links halfEdge = firstHalfEdge; for (int i = 0; i < edgeCount; i++) { HalfEdge next = halfEdge.twinEdge.nextClockwise; halfEdge.nextOnExternalFace = next; next.nextOnExternalFace = halfEdge; halfEdge = halfEdge.nextClockwise; } return halfEdges; }
static Collection<HalfEdge> function(SpqrNode node) { Iterator<MultiVertex> vertexIterator = node.skeleton.vertices.iterator(); Vertex vertex1 = node.skeletonVertexToVertex.get(vertexIterator.next()); Vertex vertex2 = node.skeletonVertexToVertex.get(vertexIterator.next()); int edgeCount = node.skeleton.vertices.iterator().next().edges.size(); Collection<HalfEdge> halfEdges = new ArrayList<HalfEdge>(2 * edgeCount); HalfEdge firstHalfEdge = new HalfEdge(vertex1, node.realEdges.isEmpty()); HalfEdge firstTwinEdge = new HalfEdge(vertex2, node.realEdges.isEmpty()); firstHalfEdge.twinEdge = firstTwinEdge; firstTwinEdge.twinEdge = firstHalfEdge; halfEdges.add(firstHalfEdge); halfEdges.add(firstTwinEdge); HalfEdge prevHalfEdge = firstHalfEdge; HalfEdge prevTwinEdge = firstTwinEdge; List<HalfEdge> twinEdges = new ArrayList<HalfEdge>(edgeCount - 1); for (int i = 0; i < edgeCount - 1; i++) { HalfEdge halfEdge = new HalfEdge(vertex1, true); HalfEdge twinEdge = new HalfEdge(vertex2, true); twinEdges.add(twinEdge); prevHalfEdge.nextClockwise = halfEdge; prevTwinEdge.nextClockwise = twinEdge; halfEdges.add(halfEdge); halfEdges.add(twinEdge); prevHalfEdge = halfEdge; prevTwinEdge = twinEdge; } prevHalfEdge.nextClockwise = firstHalfEdge; prevTwinEdge.nextClockwise = firstTwinEdge; HalfEdge halfEdge = firstHalfEdge.nextClockwise; for (int i = 0; i < edgeCount - 1; i++) { HalfEdge twinEdge = twinEdges.get(twinEdges.size() - i - 1); halfEdge.twinEdge = twinEdge; twinEdge.twinEdge = halfEdge; halfEdge = halfEdge.nextClockwise; } halfEdge = firstHalfEdge; for (int i = 0; i < edgeCount; i++) { HalfEdge next = halfEdge.twinEdge.nextClockwise; halfEdge.nextOnExternalFace = next; next.nextOnExternalFace = halfEdge; halfEdge = halfEdge.nextClockwise; } return halfEdges; }
/** * Returns the HalfEdges for the skeleton of the specified node of type SpqrNode.Type.P. This does not set the * virtualMatch fields. */
Returns the HalfEdges for the skeleton of the specified node of type SpqrNode.Type.P. This does not set the virtualMatch fields
createPHalfEdges
{ "repo_name": "btrekkie/reductions", "path": "src/com/github/btrekkie/graph/ec/EcPlanarEmbedding.java", "license": "mit", "size": 61994 }
[ "com.github.btrekkie.graph.MultiVertex", "com.github.btrekkie.graph.Vertex", "com.github.btrekkie.graph.spqr.SpqrNode", "java.util.ArrayList", "java.util.Collection", "java.util.Iterator", "java.util.List" ]
import com.github.btrekkie.graph.MultiVertex; import com.github.btrekkie.graph.Vertex; import com.github.btrekkie.graph.spqr.SpqrNode; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List;
import com.github.btrekkie.graph.*; import com.github.btrekkie.graph.spqr.*; import java.util.*;
[ "com.github.btrekkie", "java.util" ]
com.github.btrekkie; java.util;
1,951,738
private boolean isVarArgsParameter( Node param, @Nullable JSDocInfo info) { if (codingConvention.isVarArgsParameter(param)) { return true; } String paramName = param.getString(); return info != null && info.hasParameterType(paramName) && info.getParameterType(paramName).isVarArgs(); }
boolean function( Node param, @Nullable JSDocInfo info) { if (codingConvention.isVarArgsParameter(param)) { return true; } String paramName = param.getString(); return info != null && info.hasParameterType(paramName) && info.getParameterType(paramName).isVarArgs(); }
/** * Determine whether this is a var args parameter. * @return Whether the given param is a var args param. */
Determine whether this is a var args parameter
isVarArgsParameter
{ "repo_name": "zombiezen/cardcpx", "path": "third_party/closure-compiler/src/com/google/javascript/jscomp/FunctionTypeBuilder.java", "license": "apache-2.0", "size": 33864 }
[ "com.google.javascript.rhino.JSDocInfo", "com.google.javascript.rhino.Node", "javax.annotation.Nullable" ]
import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import javax.annotation.Nullable;
import com.google.javascript.rhino.*; import javax.annotation.*;
[ "com.google.javascript", "javax.annotation" ]
com.google.javascript; javax.annotation;
977,890
public void setOriginalDocument(JsonDoc doc) { originalDoc = doc; }
void function(JsonDoc doc) { originalDoc = doc; }
/** * Sets the copy of the document before any modifications */
Sets the copy of the document before any modifications
setOriginalDocument
{ "repo_name": "dcrissman/lightblue-core", "path": "crud/src/main/java/com/redhat/lightblue/crud/DocCtx.java", "license": "gpl-3.0", "size": 4670 }
[ "com.redhat.lightblue.util.JsonDoc" ]
import com.redhat.lightblue.util.JsonDoc;
import com.redhat.lightblue.util.*;
[ "com.redhat.lightblue" ]
com.redhat.lightblue;
291,911
public List<T> pollAll() { List<T> retList = new ArrayList<>(size); for (LinkedElement<T> entry : entries) { LinkedElement<T> current = entry; while (current != null) { retList.add(current.element); current = current.next; } } this.clear(); return retList; }
List<T> function() { List<T> retList = new ArrayList<>(size); for (LinkedElement<T> entry : entries) { LinkedElement<T> current = entry; while (current != null) { retList.add(current.element); current = current.next; } } this.clear(); return retList; }
/** * Remove all elements from the set and return them. Clear the entries. */
Remove all elements from the set and return them. Clear the entries
pollAll
{ "repo_name": "robzor92/hops", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightHashSet.java", "license": "apache-2.0", "size": 17531 }
[ "java.util.ArrayList", "java.util.List" ]
import java.util.ArrayList; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,708,349
public Integer doCommitX(final byte[] regionName, final long transactionId, final long commitId, final int participantNum, final boolean ignoreUnknownTransaction) throws CommitUnsuccessfulException, IOException { boolean retry = false; boolean refresh = false; int retryCount = 0; int retrySleep = TM_SLEEP; if( TRANSACTION_ALGORITHM == AlgorithmType.MVCC){ do { retry = false; refresh = false; try { if (LOG.isDebugEnabled()) LOG.debug("doCommitX -- ENTRY txid: " + transactionId + " commitId " + commitId + " participantNum " + participantNum + " ignoreUnknownTransaction: " + ignoreUnknownTransaction); Batch.Call<TrxRegionService, CommitResponse> callable = new Batch.Call<TrxRegionService, CommitResponse>() { ServerRpcController controller = new ServerRpcController(); BlockingRpcCallback<CommitResponse> rpcCallback = new BlockingRpcCallback<CommitResponse>();
Integer function(final byte[] regionName, final long transactionId, final long commitId, final int participantNum, final boolean ignoreUnknownTransaction) throws CommitUnsuccessfulException, IOException { boolean retry = false; boolean refresh = false; int retryCount = 0; int retrySleep = TM_SLEEP; if( TRANSACTION_ALGORITHM == AlgorithmType.MVCC){ do { retry = false; refresh = false; try { if (LOG.isDebugEnabled()) LOG.debug(STR + transactionId + STR + commitId + STR + participantNum + STR + ignoreUnknownTransaction); Batch.Call<TrxRegionService, CommitResponse> callable = new Batch.Call<TrxRegionService, CommitResponse>() { ServerRpcController controller = new ServerRpcController(); BlockingRpcCallback<CommitResponse> rpcCallback = new BlockingRpcCallback<CommitResponse>();
/** * Method : doCommitX * Params : regionName - name of Region * transactionId - transaction identifier * Return : Always 0, can ignore * Purpose : Call commit for a given regionserver */
Method : doCommitX Params : regionName - name of Region transactionId - transaction identifier Return : Always 0, can ignore Purpose : Call commit for a given regionserver
doCommitX
{ "repo_name": "mashengchen/incubator-trafodion", "path": "core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/transactional/TransactionManager.java", "license": "apache-2.0", "size": 162039 }
[ "java.io.IOException", "org.apache.hadoop.hbase.client.coprocessor.Batch", "org.apache.hadoop.hbase.coprocessor.transactional.generated.TrxRegionProtos", "org.apache.hadoop.hbase.ipc.BlockingRpcCallback", "org.apache.hadoop.hbase.ipc.ServerRpcController" ]
import java.io.IOException; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.coprocessor.transactional.generated.TrxRegionProtos; import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.ServerRpcController;
import java.io.*; import org.apache.hadoop.hbase.client.coprocessor.*; import org.apache.hadoop.hbase.coprocessor.transactional.generated.*; import org.apache.hadoop.hbase.ipc.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
315,778
public static LabeledVectorSet loadDatasetFromTxt(String rsrcPath, boolean isFallOnBadData) { try { Path path = Paths.get(LabeledDatasetHelper.class.getClassLoader().getResource(rsrcPath).toURI()); try { return LabeledDatasetLoader.loadFromTxtFile(path, SEPARATOR, isFallOnBadData); } catch (IOException e) { e.printStackTrace(); } } catch (URISyntaxException e) { e.printStackTrace(); return null; } return null; }
static LabeledVectorSet function(String rsrcPath, boolean isFallOnBadData) { try { Path path = Paths.get(LabeledDatasetHelper.class.getClassLoader().getResource(rsrcPath).toURI()); try { return LabeledDatasetLoader.loadFromTxtFile(path, SEPARATOR, isFallOnBadData); } catch (IOException e) { e.printStackTrace(); } } catch (URISyntaxException e) { e.printStackTrace(); return null; } return null; }
/** * Loads labeled dataset from file with .txt extension. * * @param rsrcPath path to dataset. * @return Null if path is incorrect. */
Loads labeled dataset from file with .txt extension
loadDatasetFromTxt
{ "repo_name": "andrey-kuznetsov/ignite", "path": "modules/ml/src/test/java/org/apache/ignite/ml/knn/LabeledDatasetHelper.java", "license": "apache-2.0", "size": 1995 }
[ "java.io.IOException", "java.net.URISyntaxException", "java.nio.file.Path", "java.nio.file.Paths", "org.apache.ignite.ml.structures.LabeledVectorSet", "org.apache.ignite.ml.structures.preprocessing.LabeledDatasetLoader" ]
import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Path; import java.nio.file.Paths; import org.apache.ignite.ml.structures.LabeledVectorSet; import org.apache.ignite.ml.structures.preprocessing.LabeledDatasetLoader;
import java.io.*; import java.net.*; import java.nio.file.*; import org.apache.ignite.ml.structures.*; import org.apache.ignite.ml.structures.preprocessing.*;
[ "java.io", "java.net", "java.nio", "org.apache.ignite" ]
java.io; java.net; java.nio; org.apache.ignite;
1,030,495
public BufferedImage retrieveFromImageCache(String imagePath, ColorRule colorRule, double zoom) throws IOException { return imageCache.retrieveImage(imagePath,colorRule,zoom); }
BufferedImage function(String imagePath, ColorRule colorRule, double zoom) throws IOException { return imageCache.retrieveImage(imagePath,colorRule,zoom); }
/** * retrieve an image for the cache, loading it if necessary, * or processing it by resizing/coloring an existing neutral image. * Note: the color parameter only indicates the use of a colormap, * it should be null if no colormap is used (even in the case of a colored sprite). */
retrieve an image for the cache, loading it if necessary, or processing it by resizing/coloring an existing neutral image. Note: the color parameter only indicates the use of a colormap, it should be null if no colormap is used (even in the case of a colored sprite)
retrieveFromImageCache
{ "repo_name": "vlabatut/totalboumboum", "path": "src/org/totalboumboum/configuration/engine/EngineConfiguration.java", "license": "gpl-2.0", "size": 8349 }
[ "java.awt.image.BufferedImage", "java.io.IOException", "org.totalboumboum.engine.content.feature.gesture.anime.color.ColorRule" ]
import java.awt.image.BufferedImage; import java.io.IOException; import org.totalboumboum.engine.content.feature.gesture.anime.color.ColorRule;
import java.awt.image.*; import java.io.*; import org.totalboumboum.engine.content.feature.gesture.anime.color.*;
[ "java.awt", "java.io", "org.totalboumboum.engine" ]
java.awt; java.io; org.totalboumboum.engine;
301,659