method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public Date getEndTime() { return this.endTime; }
Date function() { return this.endTime; }
/** * Getter for endTime * @return Date to get */
Getter for endTime
getEndTime
{ "repo_name": "aronparsons/spacewalk", "path": "java/code/src/com/redhat/rhn/domain/audit/XccdfTestResult.java", "license": "gpl-2.0", "size": 6623 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
20,789
public void sendWonder (String path, List<Object> msg) { try { wonder.send(new OSCMessage(path, msg)); } catch (Exception e) { System.out.println(e.getMessage()); } }
void function (String path, List<Object> msg) { try { wonder.send(new OSCMessage(path, msg)); } catch (Exception e) { System.out.println(e.getMessage()); } }
/** * benutzt eine Objekt Liste zum Versenden von OSC Nachrichten * @param path OSC - Path * @param msg Objekt Array mit Nachrichten */
benutzt eine Objekt Liste zum Versenden von OSC Nachrichten
sendWonder
{ "repo_name": "fohl/audioprj", "path": "audioworkspace/OSCControllerServer/src/osccontroller/OSCSender.java", "license": "gpl-2.0", "size": 1372 }
[ "com.illposed.osc.OSCMessage", "java.util.List" ]
import com.illposed.osc.OSCMessage; import java.util.List;
import com.illposed.osc.*; import java.util.*;
[ "com.illposed.osc", "java.util" ]
com.illposed.osc; java.util;
2,761,951
public Packer setYTopRelative(boolean how) { if (how == true) gc.gridy = GridBagConstraints.RELATIVE; else gc.gridy = 0; setConstraints(comp, gc); return this; }
Packer function(boolean how) { if (how == true) gc.gridy = GridBagConstraints.RELATIVE; else gc.gridy = 0; setConstraints(comp, gc); return this; }
/** * Add gridy=RELATIVE to the constraints for the current component if how == * true 0 it if false. */
Add gridy=RELATIVE to the constraints for the current component if how == true 0 it if false
setYTopRelative
{ "repo_name": "anandswarupv/DataCleaner", "path": "desktop/ui/src/main/java/org/datacleaner/widgets/tabs/Packer.java", "license": "lgpl-3.0", "size": 22145 }
[ "java.awt.GridBagConstraints" ]
import java.awt.GridBagConstraints;
import java.awt.*;
[ "java.awt" ]
java.awt;
353,450
@ApiModelProperty(required = true, value = "") public V1JobTemplateSpec getJobTemplate() { return jobTemplate; }
@ApiModelProperty(required = true, value = "") V1JobTemplateSpec function() { return jobTemplate; }
/** * Get jobTemplate * * @return jobTemplate */
Get jobTemplate
getJobTemplate
{ "repo_name": "kubernetes-client/java", "path": "kubernetes/src/main/java/io/kubernetes/client/openapi/models/V1CronJobSpec.java", "license": "apache-2.0", "size": 12378 }
[ "io.swagger.annotations.ApiModelProperty" ]
import io.swagger.annotations.ApiModelProperty;
import io.swagger.annotations.*;
[ "io.swagger.annotations" ]
io.swagger.annotations;
2,703,875
public void testGetNextNoDup() throws Throwable { try { initEnv(true); Hashtable dataMap = new Hashtable(); createRandomDuplicateData(dataMap, false);
void function() throws Throwable { try { initEnv(true); Hashtable dataMap = new Hashtable(); createRandomDuplicateData(dataMap, false);
/** * Create a bunch of random duplicate data. Iterate over it using * getNextNoDup until the end of the top level set. Verify that * ascending order is maintained and that we reach see the proper * number of top-level keys. */
Create a bunch of random duplicate data. Iterate over it using getNextNoDup until the end of the top level set. Verify that ascending order is maintained and that we reach see the proper number of top-level keys
testGetNextNoDup
{ "repo_name": "nologic/nabs", "path": "client/trunk/shared/libraries/je-3.2.44/test/com/sleepycat/je/dbi/DbCursorDuplicateTest.java", "license": "gpl-2.0", "size": 32905 }
[ "java.util.Hashtable" ]
import java.util.Hashtable;
import java.util.*;
[ "java.util" ]
java.util;
195,022
mouseStillPressedThrottle.submitOnUiThread(() -> { if (WyldCard.getInstance().getMouseManager().isMouseDown()) { r.run(); } }); }
mouseStillPressedThrottle.submitOnUiThread(() -> { if (WyldCard.getInstance().getMouseManager().isMouseDown()) { r.run(); } }); }
/** * Invoke when {@link java.awt.event.MouseListener#mousePressed(MouseEvent)} event occurs, and the given * {@link Runnable} will be fired if the mouse remains down after a given interval. Has no effect if the user * releases the mouse early. * * @param r The action to take if the mouse remains down. */
Invoke when <code>java.awt.event.MouseListener#mousePressed(MouseEvent)</code> event occurs, and the given <code>Runnable</code> will be fired if the mouse remains down after a given interval. Has no effect if the user releases the mouse early
then
{ "repo_name": "defano/hypertalk-java", "path": "src/main/java/com/defano/wyldcard/awt/MouseStillDown.java", "license": "mit", "size": 1022 }
[ "com.defano.wyldcard.WyldCard" ]
import com.defano.wyldcard.WyldCard;
import com.defano.wyldcard.*;
[ "com.defano.wyldcard" ]
com.defano.wyldcard;
2,377,955
private boolean onSuggestionsKey(View v, int keyCode, KeyEvent event) { // guard against possible race conditions (late arrival after dismiss) if (mSearchable == null) { return false; } if (mSuggestionsAdapter == null) { return false; } if (event.getAction() == KeyEvent.ACTION_DOWN && KeyEventCompat.hasNoModifiers(event)) { // First, check for enter or search (both of which we'll treat as a // "click") if (keyCode == KeyEvent.KEYCODE_ENTER || keyCode == KeyEvent.KEYCODE_SEARCH || keyCode == KeyEvent.KEYCODE_TAB) { int position = mQueryTextView.getListSelection(); return onItemClicked(position, KeyEvent.KEYCODE_UNKNOWN, null); } // Next, check for left/right moves, which we use to "return" the // user to the edit view if (keyCode == KeyEvent.KEYCODE_DPAD_LEFT || keyCode == KeyEvent.KEYCODE_DPAD_RIGHT) { // give "focus" to text editor, with cursor at the beginning if // left key, at end if right key // TODO: Reverse left/right for right-to-left languages, e.g. // Arabic int selPoint = (keyCode == KeyEvent.KEYCODE_DPAD_LEFT) ? 0 : mQueryTextView .length(); mQueryTextView.setSelection(selPoint); mQueryTextView.setListSelection(0); mQueryTextView.clearListSelection(); ensureImeVisible(mQueryTextView, true); return true; } // Next, check for an "up and out" move if (keyCode == KeyEvent.KEYCODE_DPAD_UP && 0 == mQueryTextView.getListSelection()) { // TODO: restoreUserQuery(); // let ACTV complete the move return false; } // Next, check for an "action key" // TODO SearchableInfo.ActionKeyInfo actionKey = mSearchable.findActionKey(keyCode); // TODO if ((actionKey != null) // TODO && ((actionKey.getSuggestActionMsg() != null) || (actionKey // TODO .getSuggestActionMsgColumn() != null))) { // TODO // launch suggestion using action key column // TODO int position = mQueryTextView.getListSelection(); // TODO if (position != ListView.INVALID_POSITION) { // TODO Cursor c = mSuggestionsAdapter.getCursor(); // TODO if (c.moveToPosition(position)) { // TODO final String actionMsg = getActionKeyMessage(c, actionKey); // TODO if (actionMsg != null && (actionMsg.length() > 0)) { // TODO return onItemClicked(position, keyCode, actionMsg); // TODO } // TODO } // TODO } // TODO } } return false; } // TODO private static String getActionKeyMessage(Cursor c, SearchableInfo.ActionKeyInfo actionKey) { // TODO String result = null; // TODO // check first in the cursor data, for a suggestion-specific message // TODO final String column = actionKey.getSuggestActionMsgColumn(); // TODO if (column != null) { // TODO result = SuggestionsAdapter.getColumnString(c, column); // TODO } // TODO // If the cursor didn't give us a message, see if there's a single // TODO // message defined // TODO // for the actionkey (for all suggestions) // TODO if (result == null) { // TODO result = actionKey.getSuggestActionMsg(); // TODO } // TODO return result; // TODO }
boolean function(View v, int keyCode, KeyEvent event) { if (mSearchable == null) { return false; } if (mSuggestionsAdapter == null) { return false; } if (event.getAction() == KeyEvent.ACTION_DOWN && KeyEventCompat.hasNoModifiers(event)) { if (keyCode == KeyEvent.KEYCODE_ENTER keyCode == KeyEvent.KEYCODE_SEARCH keyCode == KeyEvent.KEYCODE_TAB) { int position = mQueryTextView.getListSelection(); return onItemClicked(position, KeyEvent.KEYCODE_UNKNOWN, null); } if (keyCode == KeyEvent.KEYCODE_DPAD_LEFT keyCode == KeyEvent.KEYCODE_DPAD_RIGHT) { int selPoint = (keyCode == KeyEvent.KEYCODE_DPAD_LEFT) ? 0 : mQueryTextView .length(); mQueryTextView.setSelection(selPoint); mQueryTextView.setListSelection(0); mQueryTextView.clearListSelection(); ensureImeVisible(mQueryTextView, true); return true; } if (keyCode == KeyEvent.KEYCODE_DPAD_UP && 0 == mQueryTextView.getListSelection()) { return false; } } return false; }
/** * React to the user typing while in the suggestions list. First, check for * action keys. If not handled, try refocusing regular characters into the * EditText. */
React to the user typing while in the suggestions list. First, check for action keys. If not handled, try refocusing regular characters into the EditText
onSuggestionsKey
{ "repo_name": "imaeses/k-9", "path": "plugins/ActionBarSherlock/library/src/com/actionbarsherlock/widget/SearchView.java", "license": "bsd-3-clause", "size": 71173 }
[ "android.support.v4.view.KeyEventCompat", "android.view.KeyEvent", "android.view.View" ]
import android.support.v4.view.KeyEventCompat; import android.view.KeyEvent; import android.view.View;
import android.support.v4.view.*; import android.view.*;
[ "android.support", "android.view" ]
android.support; android.view;
2,051,939
public PLPInfo getProductLineProject() { return plp; }
PLPInfo function() { return plp; }
/** * Returns the {@link PLPInfo} which will be configured by this combo box and which is used for data generation. * @return The {@link PLPInfo} which will be configured by this combo box and which is used for data generation */
Returns the <code>PLPInfo</code> which will be configured by this combo box and which is used for data generation
getProductLineProject
{ "repo_name": "SSEHUB/EASyProducer", "path": "Plugins/EASy-Producer/EASy-Producer.UI/src/net/ssehub/easy/producer/ui/productline_editor/components/AbstractComboBox.java", "license": "apache-2.0", "size": 9606 }
[ "net.ssehub.easy.producer.core.mgmt.PLPInfo" ]
import net.ssehub.easy.producer.core.mgmt.PLPInfo;
import net.ssehub.easy.producer.core.mgmt.*;
[ "net.ssehub.easy" ]
net.ssehub.easy;
349,400
@Override public HttpRequestBase getMapMethod(String wmsUrl, String layer, String imageMimeType, String srs, double westBoundLongitude, double southBoundLatitude, double eastBoundLongitude, double northBoundLatitude, int width, int height, String styles, String styleBody) throws URISyntaxException { List<NameValuePair> existingParam = this.extractQueryParams(wmsUrl); //preserve any existing query params existingParam.add(new BasicNameValuePair("service", "WMS")); existingParam.add(new BasicNameValuePair("request", "GetMap")); existingParam.add(new BasicNameValuePair("version", "1.1.1")); existingParam.add(new BasicNameValuePair("format", imageMimeType)); existingParam.add(new BasicNameValuePair("transparent", "TRUE")); existingParam.add(new BasicNameValuePair("layers", layer)); if (styles != null) { existingParam.add(new BasicNameValuePair("styles", styles)); } //This is a geoserver specific URL param if (styleBody != null) { existingParam.add(new BasicNameValuePair("sld_body", styleBody)); } existingParam.add(new BasicNameValuePair("srs", srs)); existingParam.add(new BasicNameValuePair("bbox", String.format("%1$s,%2$s,%3$s,%4$s", westBoundLongitude, southBoundLatitude, eastBoundLongitude, northBoundLatitude))); existingParam.add(new BasicNameValuePair("width", Integer.toString(width))); existingParam.add(new BasicNameValuePair("height", Integer.toString(height))); HttpGet method = new HttpGet(wmsUrl); method.setURI(HttpUtil.parseURI(wmsUrl, existingParam)); return method; }
HttpRequestBase function(String wmsUrl, String layer, String imageMimeType, String srs, double westBoundLongitude, double southBoundLatitude, double eastBoundLongitude, double northBoundLatitude, int width, int height, String styles, String styleBody) throws URISyntaxException { List<NameValuePair> existingParam = this.extractQueryParams(wmsUrl); existingParam.add(new BasicNameValuePair(STR, "WMS")); existingParam.add(new BasicNameValuePair(STR, STR)); existingParam.add(new BasicNameValuePair(STR, "1.1.1")); existingParam.add(new BasicNameValuePair(STR, imageMimeType)); existingParam.add(new BasicNameValuePair(STR, "TRUE")); existingParam.add(new BasicNameValuePair(STR, layer)); if (styles != null) { existingParam.add(new BasicNameValuePair(STR, styles)); } if (styleBody != null) { existingParam.add(new BasicNameValuePair(STR, styleBody)); } existingParam.add(new BasicNameValuePair("srs", srs)); existingParam.add(new BasicNameValuePair("bbox", String.format(STR, westBoundLongitude, southBoundLatitude, eastBoundLongitude, northBoundLatitude))); existingParam.add(new BasicNameValuePair("width", Integer.toString(width))); existingParam.add(new BasicNameValuePair(STR, Integer.toString(height))); HttpGet method = new HttpGet(wmsUrl); method.setURI(HttpUtil.parseURI(wmsUrl, existingParam)); return method; }
/** * Generates a WMS request for downloading part of a map layer as an image * * @param wmsUrl The WMS endpoint (will have any existing query parameters preserved) * @param layer The name of the layer to download * @param imageMimeType The format of the image to download as * @param srs The spatial reference system for the bounding box * @param westBoundLongitude The west bound longitude of the bounding box * @param southBoundLatitude The south bound latitude of the bounding box * @param eastBoundLongitude The east bound longitude of the bounding box * @param northBoundLatitude The north bound latitude of the bounding box * @param width The desired output image width in pixels * @param height The desired output image height in pixels * @param styles [Optional] What style name should be applied * @param styleBody [Optional] Only valid for Geoserver WMS, a style sheet definition * @return * @throws URISyntaxException */
Generates a WMS request for downloading part of a map layer as an image
getMapMethod
{ "repo_name": "GeoscienceAustralia/Auscope-portal-core", "path": "src/main/java/org/auscope/portal/core/services/methodmakers/WMSMethodMaker.java", "license": "lgpl-3.0", "size": 25790 }
[ "java.net.URISyntaxException", "java.util.List", "org.apache.http.NameValuePair", "org.apache.http.client.methods.HttpGet", "org.apache.http.client.methods.HttpRequestBase", "org.apache.http.message.BasicNameValuePair", "org.auscope.portal.core.util.HttpUtil" ]
import java.net.URISyntaxException; import java.util.List; import org.apache.http.NameValuePair; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.message.BasicNameValuePair; import org.auscope.portal.core.util.HttpUtil;
import java.net.*; import java.util.*; import org.apache.http.*; import org.apache.http.client.methods.*; import org.apache.http.message.*; import org.auscope.portal.core.util.*;
[ "java.net", "java.util", "org.apache.http", "org.auscope.portal" ]
java.net; java.util; org.apache.http; org.auscope.portal;
2,458,462
public void setPath(Path path) { if (path == null) throw new NullPointerException(L.l("'path' may not be null for resin:properties")); _path = path; }
void function(Path path) { if (path == null) throw new NullPointerException(L.l(STR)); _path = path; }
/** * Sets the resin:properties. */
Sets the resin:properties
setPath
{ "repo_name": "dwango/quercus", "path": "src/main/java/com/caucho/config/core/ResinProperties.java", "license": "gpl-2.0", "size": 5278 }
[ "com.caucho.vfs.Path" ]
import com.caucho.vfs.Path;
import com.caucho.vfs.*;
[ "com.caucho.vfs" ]
com.caucho.vfs;
1,396,128
public Date getInvalidDate() { return this.invalidDate; }
Date function() { return this.invalidDate; }
/** * <p>Getter for the field <code>invalidDate</code>.</p> * * @return a {@link java.util.Date} object. */
Getter for the field <code>invalidDate</code>
getInvalidDate
{ "repo_name": "NotFound403/WePay", "path": "src/main/java/cn/felord/wepay/ali/sdk/api/domain/ArrangementOpenQueryResultVO.java", "license": "apache-2.0", "size": 3196 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
725,145
EnvironmentHolder environment; try (StripedLocks.ReadLock lock = stripedLocks.acquireReadLock(workspaceId)) { environment = environments.get(workspaceId); if (environment == null) { throw new EnvironmentNotRunningException("Environment with ID '" + workspaceId + "' is not found"); } return new ArrayList<>(environment.machines); } }
EnvironmentHolder environment; try (StripedLocks.ReadLock lock = stripedLocks.acquireReadLock(workspaceId)) { environment = environments.get(workspaceId); if (environment == null) { throw new EnvironmentNotRunningException(STR + workspaceId + STR); } return new ArrayList<>(environment.machines); } }
/** * Returns all machines from environment of specific workspace. * * @param workspaceId * ID of workspace that owns environment machines * @return list of machines * @throws EnvironmentNotRunningException * if environment is not running */
Returns all machines from environment of specific workspace
getMachines
{ "repo_name": "slemeur/che", "path": "wsmaster/che-core-api-workspace/src/main/java/org/eclipse/che/api/environment/server/CheEnvironmentEngine.java", "license": "epl-1.0", "size": 46041 }
[ "java.util.ArrayList", "org.eclipse.che.api.environment.server.exception.EnvironmentNotRunningException", "org.eclipse.che.api.workspace.server.StripedLocks" ]
import java.util.ArrayList; import org.eclipse.che.api.environment.server.exception.EnvironmentNotRunningException; import org.eclipse.che.api.workspace.server.StripedLocks;
import java.util.*; import org.eclipse.che.api.environment.server.exception.*; import org.eclipse.che.api.workspace.server.*;
[ "java.util", "org.eclipse.che" ]
java.util; org.eclipse.che;
2,382,937
EOperation getUser__UpdateUserName__String();
EOperation getUser__UpdateUserName__String();
/** * Returns the meta object for the '{@link LRBAC.User#UpdateUserName(java.lang.String) <em>Update User Name</em>}' operation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the '<em>Update User Name</em>' operation. * @see LRBAC.User#UpdateUserName(java.lang.String) * @generated */
Returns the meta object for the '<code>LRBAC.User#UpdateUserName(java.lang.String) Update User Name</code>' operation.
getUser__UpdateUserName__String
{ "repo_name": "arnobl/kompren", "path": "kompren-examples/LRBAC.model/src/LRBAC/LRBACPackage.java", "license": "epl-1.0", "size": 47810 }
[ "org.eclipse.emf.ecore.EOperation" ]
import org.eclipse.emf.ecore.EOperation;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,761,839
public boolean isTerminal() { return successorListOf(protocol, this).isEmpty(); }
boolean function() { return successorListOf(protocol, this).isEmpty(); }
/** * Checks if is terminal. * * @return true, if is terminal */
Checks if is terminal
isTerminal
{ "repo_name": "gama-platform/gama", "path": "msi.gaml.extensions.fipa/src/msi/gaml/extensions/fipa/ProtocolNode.java", "license": "gpl-3.0", "size": 1901 }
[ "org.jgrapht.Graphs" ]
import org.jgrapht.Graphs;
import org.jgrapht.*;
[ "org.jgrapht" ]
org.jgrapht;
859,215
public UserGroupDTO createUserGroupDto(final Group userGroup, Set<TenantEntity> users, final Set<AccessPolicySummaryEntity> accessPolicies) { if (userGroup == null) { return null; } // convert to access policies to handle backward compatibility due to incorrect // type in the UserGroupDTO final Set<AccessPolicyEntity> policies = accessPolicies.stream().map(summaryEntity -> { final AccessPolicyDTO policy = new AccessPolicyDTO(); policy.setId(summaryEntity.getId()); if (summaryEntity.getPermissions().getCanRead()) { final AccessPolicySummaryDTO summary = summaryEntity.getComponent(); policy.setResource(summary.getResource()); policy.setAction(summary.getAction()); policy.setConfigurable(summary.getConfigurable()); policy.setComponentReference(summary.getComponentReference()); } return entityFactory.createAccessPolicyEntity(policy, summaryEntity.getRevision(), summaryEntity.getPermissions()); }).collect(Collectors.toSet()); final UserGroupDTO dto = new UserGroupDTO(); dto.setId(userGroup.getIdentifier()); dto.setUsers(users); dto.setIdentity(userGroup.getName()); dto.setConfigurable(AuthorizerCapabilityDetection.isGroupConfigurable(authorizer, userGroup)); dto.setAccessPolicies(policies); return dto; }
UserGroupDTO function(final Group userGroup, Set<TenantEntity> users, final Set<AccessPolicySummaryEntity> accessPolicies) { if (userGroup == null) { return null; } final Set<AccessPolicyEntity> policies = accessPolicies.stream().map(summaryEntity -> { final AccessPolicyDTO policy = new AccessPolicyDTO(); policy.setId(summaryEntity.getId()); if (summaryEntity.getPermissions().getCanRead()) { final AccessPolicySummaryDTO summary = summaryEntity.getComponent(); policy.setResource(summary.getResource()); policy.setAction(summary.getAction()); policy.setConfigurable(summary.getConfigurable()); policy.setComponentReference(summary.getComponentReference()); } return entityFactory.createAccessPolicyEntity(policy, summaryEntity.getRevision(), summaryEntity.getPermissions()); }).collect(Collectors.toSet()); final UserGroupDTO dto = new UserGroupDTO(); dto.setId(userGroup.getIdentifier()); dto.setUsers(users); dto.setIdentity(userGroup.getName()); dto.setConfigurable(AuthorizerCapabilityDetection.isGroupConfigurable(authorizer, userGroup)); dto.setAccessPolicies(policies); return dto; }
/** * Creates a {@link UserGroupDTO} from the specified {@link Group}. * * @param userGroup user group * @return dto */
Creates a <code>UserGroupDTO</code> from the specified <code>Group</code>
createUserGroupDto
{ "repo_name": "MikeThomsen/nifi", "path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java", "license": "apache-2.0", "size": 232154 }
[ "java.util.Set", "java.util.stream.Collectors", "org.apache.nifi.authorization.AuthorizerCapabilityDetection", "org.apache.nifi.authorization.Group", "org.apache.nifi.web.api.entity.AccessPolicyEntity", "org.apache.nifi.web.api.entity.AccessPolicySummaryEntity", "org.apache.nifi.web.api.entity.TenantEntity" ]
import java.util.Set; import java.util.stream.Collectors; import org.apache.nifi.authorization.AuthorizerCapabilityDetection; import org.apache.nifi.authorization.Group; import org.apache.nifi.web.api.entity.AccessPolicyEntity; import org.apache.nifi.web.api.entity.AccessPolicySummaryEntity; import org.apache.nifi.web.api.entity.TenantEntity;
import java.util.*; import java.util.stream.*; import org.apache.nifi.authorization.*; import org.apache.nifi.web.api.entity.*;
[ "java.util", "org.apache.nifi" ]
java.util; org.apache.nifi;
2,552,779
final void growArray(boolean locked) { Object[] newA = null; try { Object[] oldA; int oldSize, newSize; if ((oldA = array) != null && (oldSize = oldA.length) > 0 && (newSize = oldSize << 1) <= MAXIMUM_QUEUE_CAPACITY && newSize > 0) { try { newA = new Object[newSize]; } catch (OutOfMemoryError ex) { } if (newA != null) { // poll from old array, push to new int oldMask = oldSize - 1, newMask = newSize - 1; for (int s = top - 1, k = oldMask; k >= 0; --k) { X x = (X) QA.getAndSet(oldA, s & oldMask, null); if (x != null) newA[s-- & newMask] = x; else break; } array = newA; VarHandle.releaseFence(); } } } finally { if (locked) phase = 0; } if (newA == null) throw new RejectedExecutionException("Queue capacity exceeded"); }
final void growArray(boolean locked) { Object[] newA = null; try { Object[] oldA; int oldSize, newSize; if ((oldA = array) != null && (oldSize = oldA.length) > 0 && (newSize = oldSize << 1) <= MAXIMUM_QUEUE_CAPACITY && newSize > 0) { try { newA = new Object[newSize]; } catch (OutOfMemoryError ex) { } if (newA != null) { int oldMask = oldSize - 1, newMask = newSize - 1; for (int s = top - 1, k = oldMask; k >= 0; --k) { X x = (X) QA.getAndSet(oldA, s & oldMask, null); if (x != null) newA[s-- & newMask] = x; else break; } array = newA; VarHandle.releaseFence(); } } } finally { if (locked) phase = 0; } if (newA == null) throw new RejectedExecutionException(STR); }
/** * Doubles the capacity of array. Call either by owner or with * lock held -- it is OK for base, but not top, to move while * resizings are in progress. */
Doubles the capacity of array. Call either by owner or with lock held -- it is OK for base, but not top, to move while resizings are in progress
growArray
{ "repo_name": "automenta/narchy", "path": "util/src/main/java/jcog/exe/WorkQueue.java", "license": "agpl-3.0", "size": 15023 }
[ "java.lang.invoke.VarHandle", "java.util.concurrent.RejectedExecutionException" ]
import java.lang.invoke.VarHandle; import java.util.concurrent.RejectedExecutionException;
import java.lang.invoke.*; import java.util.concurrent.*;
[ "java.lang", "java.util" ]
java.lang; java.util;
691,593
public static TaggedComponent extract(Any any) { try { return ((TaggedComponentHolder) any.extract_Streamable()).value; } catch (ClassCastException cex) { BAD_OPERATION bad = new BAD_OPERATION("TaggedComponent expected"); bad.minor = Minor.Any; bad.initCause(cex); throw bad; } }
static TaggedComponent function(Any any) { try { return ((TaggedComponentHolder) any.extract_Streamable()).value; } catch (ClassCastException cex) { BAD_OPERATION bad = new BAD_OPERATION(STR); bad.minor = Minor.Any; bad.initCause(cex); throw bad; } }
/** * Extract the TaggedComponent from given Any. This method uses the * TaggedComponentHolder. * * @throws BAD_OPERATION if the passed Any does not contain TaggedComponent. */
Extract the TaggedComponent from given Any. This method uses the TaggedComponentHolder
extract
{ "repo_name": "shaotuanchen/sunflower_exp", "path": "tools/source/gcc-4.2.4/libjava/classpath/org/omg/IOP/TaggedComponentHelper.java", "license": "bsd-3-clause", "size": 5712 }
[ "org.omg.CORBA" ]
import org.omg.CORBA;
import org.omg.*;
[ "org.omg" ]
org.omg;
301,272
public void logPositionAndCleanOldLogs(Path log, String id, long position, boolean queueRecovered, boolean holdLogInZK) { String fileName = log.getName(); this.replicationQueues.setLogPosition(id, fileName, position); if (holdLogInZK) { return; } cleanOldLogs(fileName, id, queueRecovered); }
void function(Path log, String id, long position, boolean queueRecovered, boolean holdLogInZK) { String fileName = log.getName(); this.replicationQueues.setLogPosition(id, fileName, position); if (holdLogInZK) { return; } cleanOldLogs(fileName, id, queueRecovered); }
/** * Provide the id of the peer and a log key and this method will figure which * hlog it belongs to and will log, for this region server, the current * position. It will also clean old logs from the queue. * @param log Path to the log currently being replicated from * replication status in zookeeper. It will also delete older entries. * @param id id of the peer cluster * @param position current location in the log * @param queueRecovered indicates if this queue comes from another region server * @param holdLogInZK if true then the log is retained in ZK */
Provide the id of the peer and a log key and this method will figure which hlog it belongs to and will log, for this region server, the current position. It will also clean old logs from the queue
logPositionAndCleanOldLogs
{ "repo_name": "tobegit3hub/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java", "license": "apache-2.0", "size": 20382 }
[ "org.apache.hadoop.fs.Path" ]
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
1,137,496
public Object clone() { OrderedJSONObject clone = (OrderedJSONObject)super.clone(); Iterator order = clone.getOrder(); ArrayList orderList = new ArrayList(); while (order.hasNext()) { orderList.add(order.next()); clone.order = orderList; } return clone; }
Object function() { OrderedJSONObject clone = (OrderedJSONObject)super.clone(); Iterator order = clone.getOrder(); ArrayList orderList = new ArrayList(); while (order.hasNext()) { orderList.add(order.next()); clone.order = orderList; } return clone; }
/** * Returns a shallow copy of this HashMap instance: the keys and values themselves are not cloned. */
Returns a shallow copy of this HashMap instance: the keys and values themselves are not cloned
clone
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.json4j/src/com/ibm/json/java/OrderedJSONObject.java", "license": "epl-1.0", "size": 6876 }
[ "java.util.ArrayList", "java.util.Iterator" ]
import java.util.ArrayList; import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
782,783
private void analyzeShowLocks(ASTNode ast) throws SemanticException { String tableName = null; HashMap<String, String> partSpec = null; boolean isExtended = false; if (ast.getChildCount() >= 1) { // table for which show locks is being executed for (int i = 0; i < ast.getChildCount(); i++) { ASTNode child = (ASTNode) ast.getChild(i); if (child.getType() == HiveParser.TOK_TABTYPE) { ASTNode tableTypeExpr = child; tableName = QualifiedNameUtil.getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0)); // get partition metadata if partition specified if (tableTypeExpr.getChildCount() == 2) { ASTNode partspec = (ASTNode) tableTypeExpr.getChild(1); partSpec = getPartSpec(partspec); } } else if (child.getType() == HiveParser.KW_EXTENDED) { isExtended = true; } } } HiveTxnManager txnManager = null; try { txnManager = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); } catch (LockException e) { throw new SemanticException(e.getMessage()); } ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), tableName, partSpec, isExtended, txnManager.useNewShowLocksFormat()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showLocksDesc), conf)); setFetchTask(createFetchTask(showLocksDesc.getSchema())); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
void function(ASTNode ast) throws SemanticException { String tableName = null; HashMap<String, String> partSpec = null; boolean isExtended = false; if (ast.getChildCount() >= 1) { for (int i = 0; i < ast.getChildCount(); i++) { ASTNode child = (ASTNode) ast.getChild(i); if (child.getType() == HiveParser.TOK_TABTYPE) { ASTNode tableTypeExpr = child; tableName = QualifiedNameUtil.getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0)); if (tableTypeExpr.getChildCount() == 2) { ASTNode partspec = (ASTNode) tableTypeExpr.getChild(1); partSpec = getPartSpec(partspec); } } else if (child.getType() == HiveParser.KW_EXTENDED) { isExtended = true; } } } HiveTxnManager txnManager = null; try { txnManager = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); } catch (LockException e) { throw new SemanticException(e.getMessage()); } ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), tableName, partSpec, isExtended, txnManager.useNewShowLocksFormat()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showLocksDesc), conf)); setFetchTask(createFetchTask(showLocksDesc.getSchema())); ctx.setNeedLockMgr(true); }
/** * Add the task according to the parsed command tree. This is used for the CLI * command "SHOW LOCKS;". * * @param ast * The parsed command tree. * @throws SemanticException * Parsing failed */
Add the task according to the parsed command tree. This is used for the CLI command "SHOW LOCKS;"
analyzeShowLocks
{ "repo_name": "cschenyuan/hive-hack", "path": "ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java", "license": "apache-2.0", "size": 137225 }
[ "java.util.HashMap", "org.apache.hadoop.hive.ql.exec.TaskFactory", "org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager", "org.apache.hadoop.hive.ql.lockmgr.LockException", "org.apache.hadoop.hive.ql.lockmgr.TxnManagerFactory", "org.apache.hadoop.hive.ql.plan.DDLWork", "org.apache.hadoop.hive.ql.plan.ShowLocksDesc" ]
import java.util.HashMap; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; import org.apache.hadoop.hive.ql.lockmgr.LockException; import org.apache.hadoop.hive.ql.lockmgr.TxnManagerFactory; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.ShowLocksDesc;
import java.util.*; import org.apache.hadoop.hive.ql.exec.*; import org.apache.hadoop.hive.ql.lockmgr.*; import org.apache.hadoop.hive.ql.plan.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
2,644,838
public static JSONObject clone(final JSONObject src) { return new JSONObject(src, CollectionUtils.jsonArrayToArray(src.names(), String[].class)); } /** * Builds pagination request with the specified path. * * @param path the specified path, "/{page}/{pageSize}/{windowSize}" * @return pagination request json object, for example, * <pre> * { * "paginationCurrentPageNum": int, * "paginationPageSize": int, * "paginationWindowSize": int * }
static JSONObject function(final JSONObject src) { return new JSONObject(src, CollectionUtils.jsonArrayToArray(src.names(), String[].class)); } /** * Builds pagination request with the specified path. * * @param path the specified path, STR * @return pagination request json object, for example, * <pre> * { * STR: int, * STR: int, * STR: int * }
/** * Clones a JSON object from the specified source object. * * @param src the specified source object * @return cloned object */
Clones a JSON object from the specified source object
clone
{ "repo_name": "b3log/b3log-solo", "path": "src/main/java/org/b3log/solo/util/Solos.java", "license": "apache-2.0", "size": 19498 }
[ "org.b3log.latke.util.CollectionUtils", "org.json.JSONObject" ]
import org.b3log.latke.util.CollectionUtils; import org.json.JSONObject;
import org.b3log.latke.util.*; import org.json.*;
[ "org.b3log.latke", "org.json" ]
org.b3log.latke; org.json;
2,469,063
try { JSONObject json = new JSONObject(); eslFixture.addToJSON(json); JSONArray arr = new JSONArray(); arr.put(TEST_FILENODEREF_STRING); arr.put("workspace://SpacesStore/12345678-1234-1234-1234-123456789012"); json.put(AlfrescoJSONKeys.NODEREFS, arr); mockRequest.setRequestURI(REQUEST_URI_FIXTURE); mockRequest.addParameter("alf_method", "delete"); mockRequest.setContent(json.toString().getBytes()); springAuditFilterBean.doFilter(mockRequest, mockResponse, mockChain); // Now check that two events have been created. assertEquals(2, countRowsInTable(AUDIT_TABLE_NAME)); } catch (Exception e) { fail(); } }
try { JSONObject json = new JSONObject(); eslFixture.addToJSON(json); JSONArray arr = new JSONArray(); arr.put(TEST_FILENODEREF_STRING); arr.put(STRalf_methodSTRdelete"); mockRequest.setContent(json.toString().getBytes()); springAuditFilterBean.doFilter(mockRequest, mockResponse, mockChain); assertEquals(2, countRowsInTable(AUDIT_TABLE_NAME)); } catch (Exception e) { fail(); } }
/** * Test that multi-document deletion ends up with the correct number of auditable events. */
Test that multi-document deletion ends up with the correct number of auditable events
testSuccessfulDeletion
{ "repo_name": "surevine/alfresco-auditing", "path": "src/test/java/com/surevine/alfresco/audit/integration/MultiDocumentDeleteTest.java", "license": "gpl-2.0", "size": 2666 }
[ "org.json.JSONArray", "org.json.JSONObject" ]
import org.json.JSONArray; import org.json.JSONObject;
import org.json.*;
[ "org.json" ]
org.json;
1,235,943
public Builder withLimits(JLimit... limits) { Objects.requireNonNull(limits); this.limits = Arrays.asList(limits); return this; }
Builder function(JLimit... limits) { Objects.requireNonNull(limits); this.limits = Arrays.asList(limits); return this; }
/** * Optional: Set limits for a test. * * @param limits array of {@link JLimit}. */
Optional: Set limits for a test
withLimits
{ "repo_name": "SokolAndrey/jagger", "path": "chassis/core/src/main/java/com/griddynamics/jagger/user/test/configurations/JLoadTest.java", "license": "apache-2.0", "size": 5976 }
[ "com.griddynamics.jagger.user.test.configurations.limits.JLimit", "java.util.Arrays", "java.util.Objects" ]
import com.griddynamics.jagger.user.test.configurations.limits.JLimit; import java.util.Arrays; import java.util.Objects;
import com.griddynamics.jagger.user.test.configurations.limits.*; import java.util.*;
[ "com.griddynamics.jagger", "java.util" ]
com.griddynamics.jagger; java.util;
749,797
private void assertUserCannotAccessNotebook( CreatedControlledGcpAiNotebookInstanceResult createdNotebook, TestUserSpecification testUser) { try { if (NotebookUtils.userHasProxyAccess(createdNotebook, testUser, projectId)) { throw new RuntimeException( String.format( "User %s is still able to access notebook %s", testUser.userEmail, createdNotebook.getAiNotebookInstance().getMetadata().getResourceId())); } } catch (GeneralSecurityException | IOException e) { throw new RuntimeException("Error checking notebook access", e); } }
void function( CreatedControlledGcpAiNotebookInstanceResult createdNotebook, TestUserSpecification testUser) { try { if (NotebookUtils.userHasProxyAccess(createdNotebook, testUser, projectId)) { throw new RuntimeException( String.format( STR, testUser.userEmail, createdNotebook.getAiNotebookInstance().getMetadata().getResourceId())); } } catch (GeneralSecurityException IOException e) { throw new RuntimeException(STR, e); } }
/** * An assertion that the given user cannot access the given notebook. This is pulled into a * separate function to make retrying simpler. */
An assertion that the given user cannot access the given notebook. This is pulled into a separate function to make retrying simpler
assertUserCannotAccessNotebook
{ "repo_name": "DataBiosphere/terra-workspace-manager", "path": "integration/src/main/java/scripts/testscripts/RemoveUser.java", "license": "bsd-3-clause", "size": 10894 }
[ "bio.terra.testrunner.runner.config.TestUserSpecification", "bio.terra.workspace.model.CreatedControlledGcpAiNotebookInstanceResult", "java.io.IOException", "java.security.GeneralSecurityException" ]
import bio.terra.testrunner.runner.config.TestUserSpecification; import bio.terra.workspace.model.CreatedControlledGcpAiNotebookInstanceResult; import java.io.IOException; import java.security.GeneralSecurityException;
import bio.terra.testrunner.runner.config.*; import bio.terra.workspace.model.*; import java.io.*; import java.security.*;
[ "bio.terra.testrunner", "bio.terra.workspace", "java.io", "java.security" ]
bio.terra.testrunner; bio.terra.workspace; java.io; java.security;
286,934
private void showAlert(int titleId, CharSequence text){ final AlertDialog dialog = new AlertDialog.Builder(this) .setTitle(getApplicationContext().getString(titleId)) .setMessage(text) .setNeutralButton(android.R.string.ok, null).create(); dialog.show(); ((TextView)dialog.findViewById(android.R.id.message)).setMovementMethod(LinkMovementMethod.getInstance()); }
void function(int titleId, CharSequence text){ final AlertDialog dialog = new AlertDialog.Builder(this) .setTitle(getApplicationContext().getString(titleId)) .setMessage(text) .setNeutralButton(android.R.string.ok, null).create(); dialog.show(); ((TextView)dialog.findViewById(android.R.id.message)).setMovementMethod(LinkMovementMethod.getInstance()); }
/** * Shows an alert dialog with a given string * @param titleId Id of the title resource * @param text String of the message */
Shows an alert dialog with a given string
showAlert
{ "repo_name": "RobbiNespu/malariapp", "path": "app/src/main/java/org/eyeseetea/malariacare/BaseActivity.java", "license": "gpl-3.0", "size": 8622 }
[ "android.app.AlertDialog", "android.text.method.LinkMovementMethod", "android.widget.TextView" ]
import android.app.AlertDialog; import android.text.method.LinkMovementMethod; import android.widget.TextView;
import android.app.*; import android.text.method.*; import android.widget.*;
[ "android.app", "android.text", "android.widget" ]
android.app; android.text; android.widget;
2,862,902
// Grösse des Screen orientiert sich an der Monitorauflösung shell.setSize(3*Display.getCurrent().getPrimaryMonitor().getClientArea().width /10, 8*Display.getCurrent() .getPrimaryMonitor().getClientArea().height/10); shell.setLocation(0,0); shell.setBackground(this.background); shell.setForeground(this.foreground); shell.setToolTipText(this.ttt); shell.setFont(this.font); // Formlayout anwenden. Damit werden die Bildschirmgruppen flexibel // gruppiert FormLayout formLayout = new FormLayout(); formLayout.marginLeft = 10; formLayout.marginRight = 10; formLayout.marginTop = 5; formLayout.marginBottom = 10; formLayout.spacing = 10; shell.setLayout(formLayout); return null; }
shell.setSize(3*Display.getCurrent().getPrimaryMonitor().getClientArea().width /10, 8*Display.getCurrent() .getPrimaryMonitor().getClientArea().height/10); shell.setLocation(0,0); shell.setBackground(this.background); shell.setForeground(this.foreground); shell.setToolTipText(this.ttt); shell.setFont(this.font); FormLayout formLayout = new FormLayout(); formLayout.marginLeft = 10; formLayout.marginRight = 10; formLayout.marginTop = 5; formLayout.marginBottom = 10; formLayout.spacing = 10; shell.setLayout(formLayout); return null; }
/** * TODO Comment * @param shell - * @param groups - * @param titel - * @return - * @modified - */
TODO Comment
doIt
{ "repo_name": "tfossi/APolGe", "path": "src/tfossi/apolge/io/screenfactory/layout/Layout_SHELL.java", "license": "gpl-3.0", "size": 1926 }
[ "org.eclipse.swt.layout.FormLayout", "org.eclipse.swt.widgets.Display" ]
import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.layout.*; import org.eclipse.swt.widgets.*;
[ "org.eclipse.swt" ]
org.eclipse.swt;
405,281
public void enterWhen(Guard guard) throws InterruptedException { if (guard.monitor != this) { throw new IllegalMonitorStateException(); } final ReentrantLock lock = this.lock; boolean signalBeforeWaiting = lock.isHeldByCurrentThread(); lock.lockInterruptibly(); boolean satisfied = false; try { if (!guard.isSatisfied()) { await(guard, signalBeforeWaiting); } satisfied = true; } finally { if (!satisfied) { leave(); } } }
void function(Guard guard) throws InterruptedException { if (guard.monitor != this) { throw new IllegalMonitorStateException(); } final ReentrantLock lock = this.lock; boolean signalBeforeWaiting = lock.isHeldByCurrentThread(); lock.lockInterruptibly(); boolean satisfied = false; try { if (!guard.isSatisfied()) { await(guard, signalBeforeWaiting); } satisfied = true; } finally { if (!satisfied) { leave(); } } }
/** * Enters this monitor when the guard is satisfied. Blocks indefinitely, but may be interrupted. * * @throws InterruptedException if interrupted while waiting */
Enters this monitor when the guard is satisfied. Blocks indefinitely, but may be interrupted
enterWhen
{ "repo_name": "trivium-io/trivium-core", "path": "src/io/trivium/dep/com/google/common/util/concurrent/Monitor.java", "license": "apache-2.0", "size": 39175 }
[ "java.util.concurrent.locks.ReentrantLock" ]
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.*;
[ "java.util" ]
java.util;
171,788
@Internal public static FsCompletedCheckpointStorageLocation resolveCheckpointPointer(String checkpointPointer) throws IOException { checkNotNull(checkpointPointer, "checkpointPointer"); checkArgument(!checkpointPointer.isEmpty(), "empty checkpoint pointer"); // check if the pointer is in fact a valid file path final Path path; try { path = new Path(checkpointPointer); } catch (Exception e) { throw new IOException("Checkpoint/savepoint path '" + checkpointPointer + "' is not a valid file URI. " + "Either the pointer path is invalid, or the checkpoint was created by a different state backend."); } // check if the file system can be accessed final FileSystem fs; try { fs = path.getFileSystem(); } catch (IOException e) { throw new IOException("Cannot access file system for checkpoint/savepoint path '" + checkpointPointer + "'.", e); } final FileStatus status; try { status = fs.getFileStatus(path); } catch (FileNotFoundException e) { throw new FileNotFoundException("Cannot find checkpoint or savepoint " + "file/directory '" + checkpointPointer + "' on file system '" + fs.getUri().getScheme() + "'."); } // if we are here, the file / directory exists final Path checkpointDir; final FileStatus metadataFileStatus; // If this is a directory, we need to find the meta data file if (status.isDir()) { checkpointDir = status.getPath(); final Path metadataFilePath = new Path(path, METADATA_FILE_NAME); try { metadataFileStatus = fs.getFileStatus(metadataFilePath); } catch (FileNotFoundException e) { throw new FileNotFoundException("Cannot find meta data file '" + METADATA_FILE_NAME + "' in directory '" + path + "'. Please try to load the checkpoint/savepoint " + "directly from the metadata file instead of the directory."); } } else { // this points to a file and we either do no name validation, or // the name is actually correct, so we can return the path metadataFileStatus = status; checkpointDir = status.getPath().getParent(); } final FileStateHandle metaDataFileHandle = new FileStateHandle( metadataFileStatus.getPath(), metadataFileStatus.getLen()); final String pointer = checkpointDir.makeQualified(fs).toString(); return new FsCompletedCheckpointStorageLocation( fs, checkpointDir, metaDataFileHandle, pointer); } // ------------------------------------------------------------------------ // Encoding / Decoding of References // ------------------------------------------------------------------------
static FsCompletedCheckpointStorageLocation function(String checkpointPointer) throws IOException { checkNotNull(checkpointPointer, STR); checkArgument(!checkpointPointer.isEmpty(), STR); final Path path; try { path = new Path(checkpointPointer); } catch (Exception e) { throw new IOException(STR + checkpointPointer + STR + STR); } final FileSystem fs; try { fs = path.getFileSystem(); } catch (IOException e) { throw new IOException(STR + checkpointPointer + "'.", e); } final FileStatus status; try { status = fs.getFileStatus(path); } catch (FileNotFoundException e) { throw new FileNotFoundException(STR + STR + checkpointPointer + STR + fs.getUri().getScheme() + "'."); } final Path checkpointDir; final FileStatus metadataFileStatus; if (status.isDir()) { checkpointDir = status.getPath(); final Path metadataFilePath = new Path(path, METADATA_FILE_NAME); try { metadataFileStatus = fs.getFileStatus(metadataFilePath); } catch (FileNotFoundException e) { throw new FileNotFoundException(STR + METADATA_FILE_NAME + STR + path + STR + STR); } } else { metadataFileStatus = status; checkpointDir = status.getPath().getParent(); } final FileStateHandle metaDataFileHandle = new FileStateHandle( metadataFileStatus.getPath(), metadataFileStatus.getLen()); final String pointer = checkpointDir.makeQualified(fs).toString(); return new FsCompletedCheckpointStorageLocation( fs, checkpointDir, metaDataFileHandle, pointer); }
/** * Takes the given string (representing a pointer to a checkpoint) and resolves it to a file * status for the checkpoint's metadata file. * * @param checkpointPointer The pointer to resolve. * @return A state handle to checkpoint/savepoint's metadata. * * @throws IOException Thrown, if the pointer cannot be resolved, the file system not accessed, or * the pointer points to a location that does not seem to be a checkpoint/savepoint. */
Takes the given string (representing a pointer to a checkpoint) and resolves it to a file status for the checkpoint's metadata file
resolveCheckpointPointer
{ "repo_name": "greghogan/flink", "path": "flink-runtime/src/main/java/org/apache/flink/runtime/state/filesystem/AbstractFsCheckpointStorageAccess.java", "license": "apache-2.0", "size": 12857 }
[ "java.io.FileNotFoundException", "java.io.IOException", "org.apache.flink.core.fs.FileStatus", "org.apache.flink.core.fs.FileSystem", "org.apache.flink.core.fs.Path", "org.apache.flink.util.Preconditions" ]
import java.io.FileNotFoundException; import java.io.IOException; import org.apache.flink.core.fs.FileStatus; import org.apache.flink.core.fs.FileSystem; import org.apache.flink.core.fs.Path; import org.apache.flink.util.Preconditions;
import java.io.*; import org.apache.flink.core.fs.*; import org.apache.flink.util.*;
[ "java.io", "org.apache.flink" ]
java.io; org.apache.flink;
540,218
@Test public void filteredIntent() { MultiPointToSinglePointIntent intent = createFilteredOne(); assertEquals("incorrect id", APPID, intent.appId()); assertEquals("incorrect match", MATCH, intent.selector()); assertEquals("incorrect filtered ingress", FPS1, intent.filteredIngressPoints()); assertEquals("incorrect filtered egress", FP2, intent.filteredEgressPoint()); intent = createAnotherFiltered(); assertEquals("incorrect id", APPID, intent.appId()); assertEquals("incorrect match", MATCH, intent.selector()); assertEquals("incorrect filtered ingress", FPS2, intent.filteredIngressPoints()); assertEquals("incorrect filtered egress", FP1, intent.filteredEgressPoint()); }
void function() { MultiPointToSinglePointIntent intent = createFilteredOne(); assertEquals(STR, APPID, intent.appId()); assertEquals(STR, MATCH, intent.selector()); assertEquals(STR, FPS1, intent.filteredIngressPoints()); assertEquals(STR, FP2, intent.filteredEgressPoint()); intent = createAnotherFiltered(); assertEquals(STR, APPID, intent.appId()); assertEquals(STR, MATCH, intent.selector()); assertEquals(STR, FPS2, intent.filteredIngressPoints()); assertEquals(STR, FP1, intent.filteredEgressPoint()); }
/** * Create two intents with filtered connect points. */
Create two intents with filtered connect points
filteredIntent
{ "repo_name": "donNewtonAlpha/onos", "path": "core/api/src/test/java/org/onosproject/net/intent/MultiPointToSinglePointIntentTest.java", "license": "apache-2.0", "size": 5781 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
973,163
public Set<Throw> findThrowByNoteTxt(String noteTxt) throws DataAccessException;
Set<Throw> function(String noteTxt) throws DataAccessException;
/** * JPQL Query - findThrowByNoteTxt * */
JPQL Query - findThrowByNoteTxt
findThrowByNoteTxt
{ "repo_name": "didoux/Spring-BowlingDB", "path": "generated/bowling/dao/ThrowDAO.java", "license": "gpl-2.0", "size": 7358 }
[ "java.util.Set", "org.springframework.dao.DataAccessException" ]
import java.util.Set; import org.springframework.dao.DataAccessException;
import java.util.*; import org.springframework.dao.*;
[ "java.util", "org.springframework.dao" ]
java.util; org.springframework.dao;
578,258
public int getTrailerAsInt(String trailer) throws IOException, ModuleException, NumberFormatException { String val = getTrailer(trailer); if (val == null) throw new NumberFormatException("null"); return Integer.parseInt(val); }
int function(String trailer) throws IOException, ModuleException, NumberFormatException { String val = getTrailer(trailer); if (val == null) throw new NumberFormatException("null"); return Integer.parseInt(val); }
/** * Retrieves the value for a given tailer. The value is parsed as an * int. * * @param trailer the tailer name. * @return the value for the trailer if the trailer exists * @exception NumberFormatException if the trailer's value is not a number * or if the trailer does not exist. * @exception IOException if any exception occurs on the socket. * @exception ModuleException if any module encounters an exception. */
Retrieves the value for a given tailer. The value is parsed as an int
getTrailerAsInt
{ "repo_name": "unrelatedlabs/java-wemo-bridge", "path": "target/HTTPClient/alt/HotJava/HTTPClient/HTTPResponse.java", "license": "apache-2.0", "size": 27357 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,631,882
private void startOrContinueGraphQuery(GraphQueryTask currentState) { int previousStageIndex = Math.max(currentState.currentDepth - 1, 0); ServiceDocumentQueryResult lastResults = currentState.stages.get(previousStageIndex).results; // Use a query task for our current query depth. If we have less query specifications // than the depth limit, we re-use the query specification at the end of the traversal list int traversalSpecIndex = Math.min(currentState.stages.size() - 1, currentState.currentDepth); // The traversal query should contain linkTerms which tell us which edges / links we need to // traverse. If it does not, then the query task validation will fail, and the graph query // will self patch to failed. QueryTask task = currentState.stages.get(traversalSpecIndex); task.documentExpirationTimeMicros = currentState.documentExpirationTimeMicros; scopeNextStageQueryToSelectedLinks(currentState, lastResults, task); Operation getResultsOrStartQueryOp = null; if (currentState.currentDepth == 0 && lastResults != null) { // We allow a first stage to come with results, either in-line, or through a link. // If a result instance with empty document links but a page link was supplied, // we stayed at currentDepth == 0 and we need to fetch the results, without executing // the query logInfo("Fetching initial stage results from %s", lastResults.nextPageLink); getResultsOrStartQueryOp = Operation.createGet(this, lastResults.nextPageLink) .setCompletion((o, e) -> { handleQueryPageGetCompletion(currentState, o, e); }); } else { // we need to execute a query for this stage // enable connection sharing (HTTP/2) since we want to use a direct task, but avoid // holding up a connection getResultsOrStartQueryOp = Operation.createPost(this, ServiceUriPaths.CORE_QUERY_TASKS) .setBodyNoCloning(task) .setConnectionSharing(true) .setCompletion((o, e) -> { handleQueryStageCompletion(currentState, o, e); }); } sendRequest(getResultsOrStartQueryOp); }
void function(GraphQueryTask currentState) { int previousStageIndex = Math.max(currentState.currentDepth - 1, 0); ServiceDocumentQueryResult lastResults = currentState.stages.get(previousStageIndex).results; int traversalSpecIndex = Math.min(currentState.stages.size() - 1, currentState.currentDepth); QueryTask task = currentState.stages.get(traversalSpecIndex); task.documentExpirationTimeMicros = currentState.documentExpirationTimeMicros; scopeNextStageQueryToSelectedLinks(currentState, lastResults, task); Operation getResultsOrStartQueryOp = null; if (currentState.currentDepth == 0 && lastResults != null) { logInfo(STR, lastResults.nextPageLink); getResultsOrStartQueryOp = Operation.createGet(this, lastResults.nextPageLink) .setCompletion((o, e) -> { handleQueryPageGetCompletion(currentState, o, e); }); } else { getResultsOrStartQueryOp = Operation.createPost(this, ServiceUriPaths.CORE_QUERY_TASKS) .setBodyNoCloning(task) .setConnectionSharing(true) .setCompletion((o, e) -> { handleQueryStageCompletion(currentState, o, e); }); } sendRequest(getResultsOrStartQueryOp); }
/** * Main state machine for the multiple stage graph traversal. * * If the task is just starting, the currentDepth will be zero. In this case * we will issue the first query in the {@link GraphQueryTask#stages}. */
Main state machine for the multiple stage graph traversal. If the task is just starting, the currentDepth will be zero. In this case we will issue the first query in the <code>GraphQueryTask#stages</code>
startOrContinueGraphQuery
{ "repo_name": "toliaqat/xenon", "path": "xenon-common/src/main/java/com/vmware/xenon/services/common/GraphQueryTaskService.java", "license": "apache-2.0", "size": 21380 }
[ "com.vmware.xenon.common.Operation", "com.vmware.xenon.common.ServiceDocumentQueryResult" ]
import com.vmware.xenon.common.Operation; import com.vmware.xenon.common.ServiceDocumentQueryResult;
import com.vmware.xenon.common.*;
[ "com.vmware.xenon" ]
com.vmware.xenon;
2,808,322
public SlowConfig sendHeaderWaiting(Awaiter waiting) { sendHeader.setWaiting(waiting); return this; }
SlowConfig function(Awaiter waiting) { sendHeader.setWaiting(waiting); return this; }
/** * Simulate slow server by executing {@code sendHeaderWaiting} function * before sending data. * * <strong>NOTE:</strong> Calling {@link #sendHeaderDelay(long)} has no * effect if waiting function is set. * * @param waiting * the function to execute to simulate delay before sending data * @return this instance for fluent chaining */
Simulate slow server by executing sendHeaderWaiting function before sending data. effect if waiting function is set
sendHeaderWaiting
{ "repo_name": "groupe-sii/ogham", "path": "ogham-test-utils/src/main/java/fr/sii/ogham/testing/extension/junit/sms/config/SlowConfig.java", "license": "apache-2.0", "size": 28603 }
[ "fr.sii.ogham.testing.sms.simulator.config.Awaiter" ]
import fr.sii.ogham.testing.sms.simulator.config.Awaiter;
import fr.sii.ogham.testing.sms.simulator.config.*;
[ "fr.sii.ogham" ]
fr.sii.ogham;
2,088,385
public void setBuffer(InputBuffer buffer) { }
void function(InputBuffer buffer) { }
/** * Set the next buffer in the filter pipeline (has no effect). */
Set the next buffer in the filter pipeline (has no effect)
setBuffer
{ "repo_name": "yuyupapa/OpenSource", "path": "apache-tomcat-6.0.48/java/org/apache/coyote/http11/filters/SavedRequestInputFilter.java", "license": "apache-2.0", "size": 3135 }
[ "org.apache.coyote.InputBuffer" ]
import org.apache.coyote.InputBuffer;
import org.apache.coyote.*;
[ "org.apache.coyote" ]
org.apache.coyote;
437,675
@Override @SuppressWarnings("unchecked") public List<FoodEntity> getAllFood(long idUser) { Query query = this.getSessionFactory() .getCurrentSession() .createQuery("from FoodEntity where userByIdUser.id=:idUser") .setParameter("idUser",idUser); return query.getResultList(); }
@SuppressWarnings(STR) List<FoodEntity> function(long idUser) { Query query = this.getSessionFactory() .getCurrentSession() .createQuery(STR) .setParameter(STR,idUser); return query.getResultList(); }
/** * <h1>getAllFood</h1> * <p>Obtiene una lista de food de la base de datos</p> * * @return Retorna una un objeto List que tiene una serie de objetos de la base de datos */
getAllFood Obtiene una lista de food de la base de datos
getAllFood
{ "repo_name": "tomas-93/My-Macros", "path": "Repository/src/main/java/com/mymacros/repository/dao/implement/database/FoodDataBaseImplementDao.java", "license": "apache-2.0", "size": 3010 }
[ "com.mymacros.database.entity.FoodEntity", "java.util.List", "javax.persistence.Query" ]
import com.mymacros.database.entity.FoodEntity; import java.util.List; import javax.persistence.Query;
import com.mymacros.database.entity.*; import java.util.*; import javax.persistence.*;
[ "com.mymacros.database", "java.util", "javax.persistence" ]
com.mymacros.database; java.util; javax.persistence;
1,231,605
public void removeUser(final String userId) throws ServiceException { final Transaction transaction = userRepository.beginTransaction(); try { userRepository.remove(userId); transaction.commit(); } catch (final RepositoryException e) { if (transaction.isActive()) { transaction.rollback(); } LOGGER.log(Level.ERROR, "Removes a user[id=" + userId + "] failed", e); throw new ServiceException(e); } }
void function(final String userId) throws ServiceException { final Transaction transaction = userRepository.beginTransaction(); try { userRepository.remove(userId); transaction.commit(); } catch (final RepositoryException e) { if (transaction.isActive()) { transaction.rollback(); } LOGGER.log(Level.ERROR, STR + userId + STR, e); throw new ServiceException(e); } }
/** * Removes a user specified by the given user id. * * @param userId the given user id * @throws ServiceException service exception */
Removes a user specified by the given user id
removeUser
{ "repo_name": "meikaiyipian/solo", "path": "src/main/java/org/b3log/solo/service/UserMgmtService.java", "license": "apache-2.0", "size": 13298 }
[ "org.b3log.latke.logging.Level", "org.b3log.latke.repository.RepositoryException", "org.b3log.latke.repository.Transaction", "org.b3log.latke.service.ServiceException" ]
import org.b3log.latke.logging.Level; import org.b3log.latke.repository.RepositoryException; import org.b3log.latke.repository.Transaction; import org.b3log.latke.service.ServiceException;
import org.b3log.latke.logging.*; import org.b3log.latke.repository.*; import org.b3log.latke.service.*;
[ "org.b3log.latke" ]
org.b3log.latke;
1,349,058
public int removeChildChannels(User loggedInUser, String key, List childChannelLabels) { ActivationKeyManager manager = ActivationKeyManager.getInstance(); ActivationKey activationKey = lookupKey(key, loggedInUser); for (Iterator it = childChannelLabels.iterator(); it.hasNext();) { String childChannelLabel = (String)it.next(); Channel childChannel = null; try { childChannel = ChannelManager.lookupByLabelAndUser(childChannelLabel, loggedInUser); } catch (LookupException e) { throw new InvalidChannelException(e); } // Verify the channel given is actually a child channel: if (childChannel.isBaseChannel()) { throw new InvalidChannelException(childChannel.getName() + " is not a child channel."); } manager.removeChannel(activationKey, childChannel); } return 1; }
int function(User loggedInUser, String key, List childChannelLabels) { ActivationKeyManager manager = ActivationKeyManager.getInstance(); ActivationKey activationKey = lookupKey(key, loggedInUser); for (Iterator it = childChannelLabels.iterator(); it.hasNext();) { String childChannelLabel = (String)it.next(); Channel childChannel = null; try { childChannel = ChannelManager.lookupByLabelAndUser(childChannelLabel, loggedInUser); } catch (LookupException e) { throw new InvalidChannelException(e); } if (childChannel.isBaseChannel()) { throw new InvalidChannelException(childChannel.getName() + STR); } manager.removeChannel(activationKey, childChannel); } return 1; }
/** * Remove a child channel from an activation key. * * @param loggedInUser The current user * @param key The activation key to act upon * @param childChannelLabels List of child channel labels to be removed * from this activation key * @return 1 on success, exception thrown otherwise * * @xmlrpc.doc Remove child channels from an activation key. * @xmlrpc.param #param("string", "sessionKey") * @xmlrpc.param #param("string", "key") * @xmlrpc.param #array_single("string", "childChannelLabel") * @xmlrpc.returntype #return_int_success() */
Remove a child channel from an activation key
removeChildChannels
{ "repo_name": "aronparsons/spacewalk", "path": "java/code/src/com/redhat/rhn/frontend/xmlrpc/activationkey/ActivationKeyHandler.java", "license": "gpl-2.0", "size": 46034 }
[ "com.redhat.rhn.common.hibernate.LookupException", "com.redhat.rhn.domain.channel.Channel", "com.redhat.rhn.domain.token.ActivationKey", "com.redhat.rhn.domain.user.User", "com.redhat.rhn.frontend.xmlrpc.InvalidChannelException", "com.redhat.rhn.manager.channel.ChannelManager", "com.redhat.rhn.manager.token.ActivationKeyManager", "java.util.Iterator", "java.util.List" ]
import com.redhat.rhn.common.hibernate.LookupException; import com.redhat.rhn.domain.channel.Channel; import com.redhat.rhn.domain.token.ActivationKey; import com.redhat.rhn.domain.user.User; import com.redhat.rhn.frontend.xmlrpc.InvalidChannelException; import com.redhat.rhn.manager.channel.ChannelManager; import com.redhat.rhn.manager.token.ActivationKeyManager; import java.util.Iterator; import java.util.List;
import com.redhat.rhn.common.hibernate.*; import com.redhat.rhn.domain.channel.*; import com.redhat.rhn.domain.token.*; import com.redhat.rhn.domain.user.*; import com.redhat.rhn.frontend.xmlrpc.*; import com.redhat.rhn.manager.channel.*; import com.redhat.rhn.manager.token.*; import java.util.*;
[ "com.redhat.rhn", "java.util" ]
com.redhat.rhn; java.util;
371,463
public Observable<ServiceResponse<Page<JobDefinitionInner>>> listNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); }
Observable<ServiceResponse<Page<JobDefinitionInner>>> function(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException(STR); }
/** * Lists all jobs under the specified job collection. * ServiceResponse<PageImpl<JobDefinitionInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;JobDefinitionInner&gt; object wrapped in {@link ServiceResponse} if successful. */
Lists all jobs under the specified job collection
listNextSinglePageAsync
{ "repo_name": "martinsawicki/azure-sdk-for-java", "path": "azure-mgmt-scheduler/src/main/java/com/microsoft/azure/management/scheduler/implementation/JobsInner.java", "license": "mit", "size": 86776 }
[ "com.microsoft.azure.Page", "com.microsoft.rest.ServiceResponse" ]
import com.microsoft.azure.Page; import com.microsoft.rest.ServiceResponse;
import com.microsoft.azure.*; import com.microsoft.rest.*;
[ "com.microsoft.azure", "com.microsoft.rest" ]
com.microsoft.azure; com.microsoft.rest;
1,532,693
public void updateIndexShardSnapshotStatus(SnapshotId snapshotId, ShardId shardId, SnapshotsInProgress.ShardSnapshotStatus status) { UpdateIndexShardSnapshotStatusRequest request = new UpdateIndexShardSnapshotStatusRequest(snapshotId, shardId, status); try { if (clusterService.state().nodes().localNodeMaster()) { innerUpdateSnapshotState(request); } else { transportService.sendRequest(clusterService.state().nodes().masterNode(), UPDATE_SNAPSHOT_ACTION_NAME, request, EmptyTransportResponseHandler.INSTANCE_SAME); } } catch (Throwable t) { logger.warn("[{}] [{}] failed to update snapshot state", t, request.snapshotId(), request.status()); } }
void function(SnapshotId snapshotId, ShardId shardId, SnapshotsInProgress.ShardSnapshotStatus status) { UpdateIndexShardSnapshotStatusRequest request = new UpdateIndexShardSnapshotStatusRequest(snapshotId, shardId, status); try { if (clusterService.state().nodes().localNodeMaster()) { innerUpdateSnapshotState(request); } else { transportService.sendRequest(clusterService.state().nodes().masterNode(), UPDATE_SNAPSHOT_ACTION_NAME, request, EmptyTransportResponseHandler.INSTANCE_SAME); } } catch (Throwable t) { logger.warn(STR, t, request.snapshotId(), request.status()); } }
/** * Updates the shard status */
Updates the shard status
updateIndexShardSnapshotStatus
{ "repo_name": "queirozfcom/elasticsearch", "path": "core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java", "license": "apache-2.0", "size": 29374 }
[ "org.elasticsearch.cluster.SnapshotsInProgress", "org.elasticsearch.cluster.metadata.SnapshotId", "org.elasticsearch.index.shard.ShardId", "org.elasticsearch.transport.EmptyTransportResponseHandler" ]
import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.transport.EmptyTransportResponseHandler;
import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.metadata.*; import org.elasticsearch.index.shard.*; import org.elasticsearch.transport.*;
[ "org.elasticsearch.cluster", "org.elasticsearch.index", "org.elasticsearch.transport" ]
org.elasticsearch.cluster; org.elasticsearch.index; org.elasticsearch.transport;
2,737,555
public void setFallbackProtocol() { setFallbackProtocol(VdsProtocol.XML); fallbackTriggered = true; }
void function() { setFallbackProtocol(VdsProtocol.XML); fallbackTriggered = true; }
/** * Updates DB with fall back protocol (xmlrpc). */
Updates DB with fall back protocol (xmlrpc)
setFallbackProtocol
{ "repo_name": "OpenUniversity/ovirt-engine", "path": "backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/transport/ProtocolDetector.java", "license": "apache-2.0", "size": 6931 }
[ "org.ovirt.engine.core.common.businessentities.VdsProtocol" ]
import org.ovirt.engine.core.common.businessentities.VdsProtocol;
import org.ovirt.engine.core.common.businessentities.*;
[ "org.ovirt.engine" ]
org.ovirt.engine;
2,051,023
@Override public void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { throw DescriptorException.invalidMappingOperation(this, "buildClone"); }
void function(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { throw DescriptorException.invalidMappingOperation(this, STR); }
/** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */
Clone the attribute from the original and assign it to the clone
buildClone
{ "repo_name": "RallySoftware/eclipselink.runtime", "path": "foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/oxm/mappings/XMLAnyObjectMapping.java", "license": "epl-1.0", "size": 30490 }
[ "org.eclipse.persistence.exceptions.DescriptorException", "org.eclipse.persistence.internal.identitymaps.CacheKey", "org.eclipse.persistence.internal.sessions.AbstractSession" ]
import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.sessions.AbstractSession;
import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.identitymaps.*; import org.eclipse.persistence.internal.sessions.*;
[ "org.eclipse.persistence" ]
org.eclipse.persistence;
2,892,723
private Subscription validateSubscriptionSettings( VOSubscription subscription) throws ValidationException, ObjectNotFoundException, OperationNotPermittedException, NonUniqueBusinessKeyException, ConcurrentModificationException { subscription.setSubscriptionId( BaseAssembler.trim(subscription.getSubscriptionId())); String subscriptionId = subscription.getSubscriptionId(); BLValidator.isId("subscriptionId", subscriptionId, true); BLValidator.isDescription("purchaseOrderNumber", subscription.getPurchaseOrderNumber(), false); Subscription subscriptionToModify = dataManager .getReference(Subscription.class, subscription.getKey()); subscriptionToModify.setEventPublished(false); PermissionCheck.owns(subscriptionToModify, dataManager.getCurrentUser().getOrganization(), LOG); BaseAssembler.verifyVersionAndKey(subscriptionToModify, subscription); String ownerId = subscription.getOwnerId(); if (ownerId != null && ownerId.length() != 0) { checkRolesForSubscriptionOwner(ownerId, dataManager.getCurrentUser().getTenantId()); } if (!subscriptionToModify.getSubscriptionId().equals(subscriptionId)) { Subscription sub = new Subscription(); sub.setOrganization(subscriptionToModify.getOrganization()); sub.setSubscriptionId(subscriptionId); dataManager.validateBusinessKeyUniqueness(sub); List<VOTriggerProcess> triggers = triggerService .getAllActionsForOrganizationRelatedSubscription(); for (VOTriggerProcess voTriggerProcess : triggers) { if (voTriggerProcess.getSubscription().getSubscriptionId() .equals(subscriptionId)) { NonUniqueBusinessKeyException e = new NonUniqueBusinessKeyException( DomainObjectException.ClassEnum.SUBSCRIPTION, subscriptionId); throw e; } } // Validate unique subscirptionId and organization in temporary // table Long result = getModifiedEntityDao() .countSubscriptionOfOrganizationAndSubscription( subscriptionToModify, subscriptionId); if (result.longValue() > 0) { NonUniqueBusinessKeyException ex = new NonUniqueBusinessKeyException(); LOG.logError(Log4jLogger.SYSTEM_LOG, ex, LogMessageIdentifier.ERROR_SUBSCRIPTIONID_ALREADY_EXIST_IN_MODIFIEDENTITY, subscriptionId, subscriptionToModify.getOrganization() .getOrganizationId()); throw ex; } } return subscriptionToModify; }
Subscription function( VOSubscription subscription) throws ValidationException, ObjectNotFoundException, OperationNotPermittedException, NonUniqueBusinessKeyException, ConcurrentModificationException { subscription.setSubscriptionId( BaseAssembler.trim(subscription.getSubscriptionId())); String subscriptionId = subscription.getSubscriptionId(); BLValidator.isId(STR, subscriptionId, true); BLValidator.isDescription(STR, subscription.getPurchaseOrderNumber(), false); Subscription subscriptionToModify = dataManager .getReference(Subscription.class, subscription.getKey()); subscriptionToModify.setEventPublished(false); PermissionCheck.owns(subscriptionToModify, dataManager.getCurrentUser().getOrganization(), LOG); BaseAssembler.verifyVersionAndKey(subscriptionToModify, subscription); String ownerId = subscription.getOwnerId(); if (ownerId != null && ownerId.length() != 0) { checkRolesForSubscriptionOwner(ownerId, dataManager.getCurrentUser().getTenantId()); } if (!subscriptionToModify.getSubscriptionId().equals(subscriptionId)) { Subscription sub = new Subscription(); sub.setOrganization(subscriptionToModify.getOrganization()); sub.setSubscriptionId(subscriptionId); dataManager.validateBusinessKeyUniqueness(sub); List<VOTriggerProcess> triggers = triggerService .getAllActionsForOrganizationRelatedSubscription(); for (VOTriggerProcess voTriggerProcess : triggers) { if (voTriggerProcess.getSubscription().getSubscriptionId() .equals(subscriptionId)) { NonUniqueBusinessKeyException e = new NonUniqueBusinessKeyException( DomainObjectException.ClassEnum.SUBSCRIPTION, subscriptionId); throw e; } } Long result = getModifiedEntityDao() .countSubscriptionOfOrganizationAndSubscription( subscriptionToModify, subscriptionId); if (result.longValue() > 0) { NonUniqueBusinessKeyException ex = new NonUniqueBusinessKeyException(); LOG.logError(Log4jLogger.SYSTEM_LOG, ex, LogMessageIdentifier.ERROR_SUBSCRIPTIONID_ALREADY_EXIST_IN_MODIFIEDENTITY, subscriptionId, subscriptionToModify.getOrganization() .getOrganizationId()); throw ex; } } return subscriptionToModify; }
/** * Validates the settings of the specified subscription object. * * @param subscription * The subscription to be validated. * @return The domain object representation of the subscription to be * modified. * @throws ValidationException * Thrown in case the settings could not be validated. * @throws ObjectNotFoundException * Thrown in case the subscription could not be found. * @throws OperationNotPermittedException * Thrown in case the caller tries to modify another * organization's object. * @throws NonUniqueBusinessKeyException * Thrown in case there already is a subscription with the given * id for the current organization (only checked in caes of * changing the current id). * @throws ConcurrentModificationException */
Validates the settings of the specified subscription object
validateSubscriptionSettings
{ "repo_name": "opetrovski/development", "path": "oscm-subscriptionmgmt/javasrc/org/oscm/subscriptionservice/bean/SubscriptionServiceBean.java", "license": "apache-2.0", "size": 259543 }
[ "java.util.List", "org.oscm.domobjects.Subscription", "org.oscm.internal.types.exception.ConcurrentModificationException", "org.oscm.internal.types.exception.DomainObjectException", "org.oscm.internal.types.exception.NonUniqueBusinessKeyException", "org.oscm.internal.types.exception.ObjectNotFoundException", "org.oscm.internal.types.exception.OperationNotPermittedException", "org.oscm.internal.types.exception.ValidationException", "org.oscm.internal.vo.VOSubscription", "org.oscm.internal.vo.VOTriggerProcess", "org.oscm.logging.Log4jLogger", "org.oscm.permission.PermissionCheck", "org.oscm.types.enumtypes.LogMessageIdentifier", "org.oscm.validator.BLValidator", "org.oscm.vo.BaseAssembler" ]
import java.util.List; import org.oscm.domobjects.Subscription; import org.oscm.internal.types.exception.ConcurrentModificationException; import org.oscm.internal.types.exception.DomainObjectException; import org.oscm.internal.types.exception.NonUniqueBusinessKeyException; import org.oscm.internal.types.exception.ObjectNotFoundException; import org.oscm.internal.types.exception.OperationNotPermittedException; import org.oscm.internal.types.exception.ValidationException; import org.oscm.internal.vo.VOSubscription; import org.oscm.internal.vo.VOTriggerProcess; import org.oscm.logging.Log4jLogger; import org.oscm.permission.PermissionCheck; import org.oscm.types.enumtypes.LogMessageIdentifier; import org.oscm.validator.BLValidator; import org.oscm.vo.BaseAssembler;
import java.util.*; import org.oscm.domobjects.*; import org.oscm.internal.types.exception.*; import org.oscm.internal.vo.*; import org.oscm.logging.*; import org.oscm.permission.*; import org.oscm.types.enumtypes.*; import org.oscm.validator.*; import org.oscm.vo.*;
[ "java.util", "org.oscm.domobjects", "org.oscm.internal", "org.oscm.logging", "org.oscm.permission", "org.oscm.types", "org.oscm.validator", "org.oscm.vo" ]
java.util; org.oscm.domobjects; org.oscm.internal; org.oscm.logging; org.oscm.permission; org.oscm.types; org.oscm.validator; org.oscm.vo;
2,729,785
//----------------------------------------------------------------------- public final MetaProperty<String> classifier() { return _classifier; }
final MetaProperty<String> function() { return _classifier; }
/** * The meta-property for the {@code classifier} property. * @return the meta-property, not null */
The meta-property for the classifier property
classifier
{ "repo_name": "jeorme/OG-Platform", "path": "projects/OG-Component/src/main/java/com/opengamma/component/factory/source/HistoricalTimeSeriesSourceComponentFactory.java", "license": "apache-2.0", "size": 20511 }
[ "org.joda.beans.MetaProperty" ]
import org.joda.beans.MetaProperty;
import org.joda.beans.*;
[ "org.joda.beans" ]
org.joda.beans;
1,568,760
public PlayerMark getPlayerMark() { return currentMark; } } private PlayerMark[][] gameTab; private int winLineLength; private PlayerMark currentPlayer; private GameStatus gameStatus; private List<CellCoord> winLine; private List<PlayStep> playHistory; private Set<List<CellCoord>> lines; private Map<CellCoord, Set<List<CellCoord>>> winLinesMap; public GameModel() { this(6, 7, 4); } public GameModel(final int rows, final int cols, final int winLength) throws IllegalArgumentException { if (rows <= 0 || cols <= 0) throw new IllegalArgumentException("Illegal dimenstions. Rows : " + rows + " - Columns : " + cols); if (winLength <= 2 || winLength > Math.min(rows, cols)) throw new IllegalArgumentException("Illegal length of line in order to win. Rows : " + rows + " - Columns : " + cols + " - Length to win : " + winLength); gameTab = new PlayerMark[rows][cols]; winLineLength = winLength; currentPlayer = PlayerMark.getNextPlayer(); gameStatus = GameStatus.CONTINUE_STATUS; winLinesMap = new ConcurrentHashMap<CellCoord, Set<List<CellCoord>>>(); winLine = null; playHistory = new LinkedList<PlayStep>(); lines = new HashSet<List<CellCoord>>(); for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { lines.addAll(getAllLines(i, j)); } } } public GameModel(final GameModel gameModel) throws NullPointerException { if (gameModel == null) throw new NullPointerException(); winLineLength = gameModel.winLineLength; currentPlayer = gameModel.currentPlayer; gameStatus = gameModel.gameStatus; // Here it is safe to copy the win line map - nothing particular // to a given instance of a game model is written here. winLinesMap = gameModel.winLinesMap; lines = gameModel.lines; // Here it is safe to copy the win line, since it is readonly. winLine = gameModel.winLine; gameTab = new PlayerMark[gameModel.gameTab.length][]; for (int i = 0; i < gameModel.gameTab.length; i++) { gameTab[i] = new PlayerMark[gameModel.gameTab[i].length]; for (int j = 0; j < gameModel.gameTab[i].length; j++) gameTab[i][j] = gameModel.gameTab[i][j]; } playHistory = new LinkedList<PlayStep>(gameModel.playHistory); }
PlayerMark function() { return currentMark; } } private PlayerMark[][] gameTab; private int winLineLength; private PlayerMark currentPlayer; private GameStatus gameStatus; private List<CellCoord> winLine; private List<PlayStep> playHistory; private Set<List<CellCoord>> lines; private Map<CellCoord, Set<List<CellCoord>>> winLinesMap; public GameModel() { this(6, 7, 4); } public GameModel(final int rows, final int cols, final int winLength) throws IllegalArgumentException { if (rows <= 0 cols <= 0) throw new IllegalArgumentException(STR + rows + STR + cols); if (winLength <= 2 winLength > Math.min(rows, cols)) throw new IllegalArgumentException(STR + rows + STR + cols + STR + winLength); gameTab = new PlayerMark[rows][cols]; winLineLength = winLength; currentPlayer = PlayerMark.getNextPlayer(); gameStatus = GameStatus.CONTINUE_STATUS; winLinesMap = new ConcurrentHashMap<CellCoord, Set<List<CellCoord>>>(); winLine = null; playHistory = new LinkedList<PlayStep>(); lines = new HashSet<List<CellCoord>>(); for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { lines.addAll(getAllLines(i, j)); } } } public GameModel(final GameModel gameModel) throws NullPointerException { if (gameModel == null) throw new NullPointerException(); winLineLength = gameModel.winLineLength; currentPlayer = gameModel.currentPlayer; gameStatus = gameModel.gameStatus; winLinesMap = gameModel.winLinesMap; lines = gameModel.lines; winLine = gameModel.winLine; gameTab = new PlayerMark[gameModel.gameTab.length][]; for (int i = 0; i < gameModel.gameTab.length; i++) { gameTab[i] = new PlayerMark[gameModel.gameTab[i].length]; for (int j = 0; j < gameModel.gameTab[i].length; j++) gameTab[i][j] = gameModel.gameTab[i][j]; } playHistory = new LinkedList<PlayStep>(gameModel.playHistory); }
/** * Return the mark at the start of the turn. * @return the mark at the start of the turn. */
Return the mark at the start of the turn
getPlayerMark
{ "repo_name": "jaubin/fourinaline", "path": "org/gojul/fourinaline/model/GameModel.java", "license": "gpl-2.0", "size": 36312 }
[ "java.util.HashSet", "java.util.LinkedList", "java.util.List", "java.util.Map", "java.util.Set", "java.util.concurrent.ConcurrentHashMap" ]
import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap;
import java.util.*; import java.util.concurrent.*;
[ "java.util" ]
java.util;
2,667,138
// ===================== CONCATENATE function ===================== // @Function("CONCATENATE") @FunctionParameters({ @FunctionParameter("strings")}) public static String CONCATENATE(String ...strings){ if(strings.length==0) { if(log.isDebugEnabled()) { log.debug("No arguments were specified."); } return null; } StringBuffer sb=new StringBuffer(); for (int i=0;i<strings.length;i++){ sb.append(strings[i]); } return sb.toString(); }
@Function(STR) @FunctionParameters({ @FunctionParameter(STR)}) static String function(String ...strings){ if(strings.length==0) { if(log.isDebugEnabled()) { log.debug(STR); } return null; } StringBuffer sb=new StringBuffer(); for (int i=0;i<strings.length;i++){ sb.append(strings[i]); } return sb.toString(); }
/** * Combines a list of strings into a single one. */
Combines a list of strings into a single one
CONCATENATE
{ "repo_name": "aleatorio12/ProVentasConnector", "path": "jasperreports-6.2.1-project/jasperreports-6.2.1/demo/samples/functions/src/net/sf/jasperreports/functions/standard/TextFunctions.java", "license": "gpl-3.0", "size": 20320 }
[ "net.sf.jasperreports.functions.annotations.Function", "net.sf.jasperreports.functions.annotations.FunctionParameter", "net.sf.jasperreports.functions.annotations.FunctionParameters" ]
import net.sf.jasperreports.functions.annotations.Function; import net.sf.jasperreports.functions.annotations.FunctionParameter; import net.sf.jasperreports.functions.annotations.FunctionParameters;
import net.sf.jasperreports.functions.annotations.*;
[ "net.sf.jasperreports" ]
net.sf.jasperreports;
903,036
EList<String> getVal4();
EList<String> getVal4();
/** * Returns the value of the '<em><b>Val4</b></em>' attribute list. * The list contents are of type {@link java.lang.String}. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Val4</em>' attribute list isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Val4</em>' attribute list. * @see org.eclipse.xtext.serializer.sequencertest.SequencertestPackage#getAlternativeMultiplicities_Val4() * @model unique="false" * @generated */
Returns the value of the 'Val4' attribute list. The list contents are of type <code>java.lang.String</code>. If the meaning of the 'Val4' attribute list isn't clear, there really should be more of a description here...
getVal4
{ "repo_name": "miklossy/xtext-core", "path": "org.eclipse.xtext.tests/src-gen/org/eclipse/xtext/serializer/sequencertest/AlternativeMultiplicities.java", "license": "epl-1.0", "size": 5361 }
[ "org.eclipse.emf.common.util.EList" ]
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,147,661
public static String keyToString(byte[] key) { return new String(key, Charsets.ISO_8859_1); }
static String function(byte[] key) { return new String(key, Charsets.ISO_8859_1); }
/** * Convert the given key to a string. */
Convert the given key to a string
keyToString
{ "repo_name": "oleg-nenashev/remoting", "path": "src/main/java/org/jenkinsci/remoting/engine/Jnlp3Util.java", "license": "mit", "size": 4133 }
[ "org.jenkinsci.remoting.util.Charsets" ]
import org.jenkinsci.remoting.util.Charsets;
import org.jenkinsci.remoting.util.*;
[ "org.jenkinsci.remoting" ]
org.jenkinsci.remoting;
2,080,355
EClass getSourceExpression();
EClass getSourceExpression();
/** * Returns the meta object for class '{@link org.eclectic.frontend.tao.SourceExpression <em>Source Expression</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Source Expression</em>'. * @see org.eclectic.frontend.tao.SourceExpression * @generated */
Returns the meta object for class '<code>org.eclectic.frontend.tao.SourceExpression Source Expression</code>'.
getSourceExpression
{ "repo_name": "jesusc/eclectic", "path": "plugins/org.eclectic.frontend.asm/src-gen/org/eclectic/frontend/tao/TaoPackage.java", "license": "gpl-3.0", "size": 47516 }
[ "org.eclipse.emf.ecore.EClass" ]
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,782,966
public void setExecs(String s) { PatternSet patset = new PatternSet(); patset.setIncludes(s); String[] execNames = patset.getIncludePatterns(getProject()); for (int i = 0; i < execNames.length; i++) { File f = new File(execNames[i]); mExecAttrs.add(f); } }
void function(String s) { PatternSet patset = new PatternSet(); patset.setIncludes(s); String[] execNames = patset.getIncludePatterns(getProject()); for (int i = 0; i < execNames.length; i++) { File f = new File(execNames[i]); mExecAttrs.add(f); } }
/** * Setter for the "execs" attribute (optional) */
Setter for the "execs" attribute (optional)
setExecs
{ "repo_name": "tofi86/Jarbundler", "path": "src/net/sourceforge/jarbundler/JarBundler.java", "license": "apache-2.0", "size": 44706 }
[ "java.io.File", "java.lang.String", "org.apache.tools.ant.types.PatternSet" ]
import java.io.File; import java.lang.String; import org.apache.tools.ant.types.PatternSet;
import java.io.*; import java.lang.*; import org.apache.tools.ant.types.*;
[ "java.io", "java.lang", "org.apache.tools" ]
java.io; java.lang; org.apache.tools;
2,081,627
public String instance2String( Instance inst, LabelAlphabet targetAlphabet, Attributes attrs, int nrFeatures, boolean asString, boolean filterMV) { StringBuilder sb = new StringBuilder(); // For sequences, each instance wraps a feature vector list and a feature vector // was created as new Instance(fvseq, fseq, null, null); // where FeatureVectorSequence fvseq = new FeatureVectorSequence(vectors); // and FeatureSequence fseq = new FeatureSequence(pipe.getTargetAlphabet(), labelidxs); // First unpack the feature vector sequence and fseq FeatureVectorSequence fvseq = (FeatureVectorSequence) inst.getData(); FeatureSequence fseq = (FeatureSequence) inst.getTarget(); boolean haveTargets = (fseq != null && fseq.size() > 0); if (haveTargets && (fseq.size() != fvseq.size())) { throw new GateRuntimeException("There are targets but not the same number, in fvseq=" + fvseq.size() + ", targets=" + fseq.size()); } sb.append("["); // outermost list which encloses the list of feature vectors and the list of targets boolean firstList = true; sb.append("["); // for the feature vector list for (int i = 0; i < fvseq.size(); i++) { FeatureVector fv = fvseq.get(i); Object targetObject = fseq.get(i); if (filterMV) { Object ignore = inst.getProperty(FeatureExtractionMalletSparse.PROP_IGNORE_HAS_MV); if (ignore != null && ignore.equals(true)) { continue; } } if (firstList) { firstList = false; } else { sb.append(", "); } // TODO: the python format does not allow the use of instance weights, instead the // weight must become an additional feature! Double instanceWeight = (Double) inst.getProperty("instanceWeight"); boolean first = true; sb.append(featureVector2String(fv, nrFeatures, attrs, asString)); } sb.append("]"); // close the feature vector list // Only if there are targets, add another list with the targets if (haveTargets) { sb.append(", ["); boolean first = true; for (int i = 0; i < fseq.size(); i++) { Object target = fseq.get(i); if (first) { first = false; } else { sb.append(", "); } sb.append(target2String(target, targetAlphabet, asString)); } // for each target sb.append("]"); // close target list } // if haveTargets sb.append("]"); // close outer list return sb.toString(); }// instance2String
String function( Instance inst, LabelAlphabet targetAlphabet, Attributes attrs, int nrFeatures, boolean asString, boolean filterMV) { StringBuilder sb = new StringBuilder(); FeatureVectorSequence fvseq = (FeatureVectorSequence) inst.getData(); FeatureSequence fseq = (FeatureSequence) inst.getTarget(); boolean haveTargets = (fseq != null && fseq.size() > 0); if (haveTargets && (fseq.size() != fvseq.size())) { throw new GateRuntimeException(STR + fvseq.size() + STR + fseq.size()); } sb.append("["); boolean firstList = true; sb.append("["); for (int i = 0; i < fvseq.size(); i++) { FeatureVector fv = fvseq.get(i); Object targetObject = fseq.get(i); if (filterMV) { Object ignore = inst.getProperty(FeatureExtractionMalletSparse.PROP_IGNORE_HAS_MV); if (ignore != null && ignore.equals(true)) { continue; } } if (firstList) { firstList = false; } else { sb.append(STR); } Double instanceWeight = (Double) inst.getProperty(STR); boolean first = true; sb.append(featureVector2String(fv, nrFeatures, attrs, asString)); } sb.append("]"); if (haveTargets) { sb.append(STR); boolean first = true; for (int i = 0; i < fseq.size(); i++) { Object target = fseq.get(i); if (first) { first = false; } else { sb.append(STR); } sb.append(target2String(target, targetAlphabet, asString)); } sb.append("]"); } sb.append("]"); return sb.toString(); }
/** * Convert instance to string representation. * @param inst instance * @param targetAlphabet target alphabet * @param attrs attributes * @param nrFeatures number of features * @param asString represent as quoted string * @param filterMV filter missing values * @return string representation */
Convert instance to string representation
instance2String
{ "repo_name": "GateNLP/gateplugin-LearningFramework", "path": "src/main/java/gate/plugin/learningframework/export/CorpusExporterMRJsonSeq.java", "license": "lgpl-2.1", "size": 8112 }
[ "cc.mallet.types.FeatureSequence", "cc.mallet.types.FeatureVector", "cc.mallet.types.FeatureVectorSequence", "cc.mallet.types.Instance", "cc.mallet.types.LabelAlphabet" ]
import cc.mallet.types.FeatureSequence; import cc.mallet.types.FeatureVector; import cc.mallet.types.FeatureVectorSequence; import cc.mallet.types.Instance; import cc.mallet.types.LabelAlphabet;
import cc.mallet.types.*;
[ "cc.mallet.types" ]
cc.mallet.types;
2,261,632
String toAbsolutePath(String path) { return new File(path).getAbsolutePath(); }
String toAbsolutePath(String path) { return new File(path).getAbsolutePath(); }
/** * Convert to absolute path */
Convert to absolute path
toAbsolutePath
{ "repo_name": "leepc12/BigDataScript", "path": "src/org/bds/lang/nativeMethods/string/MethodNative_string_dirPath_regex.java", "license": "apache-2.0", "size": 1975 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
1,529,955
public void selectionChanged(ITextSelection selection) { selectionChanged((ISelection)selection); }
void function(ITextSelection selection) { selectionChanged((ISelection)selection); }
/** * Notifies this action that the given text selection has changed. This default * implementation calls <code>selectionChanged(ISelection selection)</code>. * * @param selection the new selection */
Notifies this action that the given text selection has changed. This default implementation calls <code>selectionChanged(ISelection selection)</code>
selectionChanged
{ "repo_name": "ttimbul/eclipse.wst", "path": "bundles/org.eclipse.wst.xsd.ui/src-refactor/org/eclipse/wst/xsd/ui/internal/refactor/actions/SelectionDispatchAction.java", "license": "epl-1.0", "size": 5390 }
[ "org.eclipse.jface.text.ITextSelection", "org.eclipse.jface.viewers.ISelection" ]
import org.eclipse.jface.text.ITextSelection; import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.text.*; import org.eclipse.jface.viewers.*;
[ "org.eclipse.jface" ]
org.eclipse.jface;
1,608,625
@Override public ShardRouting routingEntry() { return this.shardRouting; }
ShardRouting function() { return this.shardRouting; }
/** * Returns the latest cluster routing entry received with this shard. */
Returns the latest cluster routing entry received with this shard
routingEntry
{ "repo_name": "gfyoung/elasticsearch", "path": "server/src/main/java/org/elasticsearch/index/shard/IndexShard.java", "license": "apache-2.0", "size": 137199 }
[ "org.elasticsearch.cluster.routing.ShardRouting" ]
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.*;
[ "org.elasticsearch.cluster" ]
org.elasticsearch.cluster;
1,212,864
public void setVeteranInfoes(final Set<VeteranInfo> veteranInfo) { this.veteranInfoes = veteranInfo; }
void function(final Set<VeteranInfo> veteranInfo) { this.veteranInfoes = veteranInfo; }
/** * Set the value related to the column: veteranInfo. * @param veteranInfo the veteranInfo value you wish to set */
Set the value related to the column: veteranInfo
setVeteranInfoes
{ "repo_name": "servinglynk/servinglynk-hmis", "path": "hmis-model-v2014/src/main/java/com/servinglynk/hmis/warehouse/model/v2014/Client.java", "license": "mpl-2.0", "size": 25310 }
[ "java.util.Set" ]
import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
1,906,234
public double solve(final UnivariateRealFunction f, final double min, final double max, final double initial) throws MaxIterationsExceededException, FunctionEvaluationException { // check for zeros before verifying bracketing if (f.value(min) == 0.0) { return min; } if (f.value(max) == 0.0) { return max; } if (f.value(initial) == 0.0) { return initial; } verifyBracketing(min, max, f); verifySequence(min, initial, max); if (isBracketing(min, initial, f)) { return solve(f, min, initial); } else { return solve(f, initial, max); } }
double function(final UnivariateRealFunction f, final double min, final double max, final double initial) throws MaxIterationsExceededException, FunctionEvaluationException { if (f.value(min) == 0.0) { return min; } if (f.value(max) == 0.0) { return max; } if (f.value(initial) == 0.0) { return initial; } verifyBracketing(min, max, f); verifySequence(min, initial, max); if (isBracketing(min, initial, f)) { return solve(f, min, initial); } else { return solve(f, initial, max); } }
/** * Find a root in the given interval with initial value. * <p> * Requires bracketing condition.</p> * * @param f the function to solve * @param min the lower bound for the interval * @param max the upper bound for the interval * @param initial the start value to use * @return the point at which the function value is zero * @throws MaxIterationsExceededException if the maximum iteration count is exceeded * @throws FunctionEvaluationException if an error occurs evaluating the * function * @throws IllegalArgumentException if any parameters are invalid */
Find a root in the given interval with initial value. Requires bracketing condition
solve
{ "repo_name": "rbouckaert/YABBY", "path": "src/yabby/org/apache/commons/math/analysis/solvers/RiddersSolver.java", "license": "lgpl-3.0", "size": 7671 }
[ "org.apache.commons.math.FunctionEvaluationException", "org.apache.commons.math.MaxIterationsExceededException", "org.apache.commons.math.analysis.UnivariateRealFunction" ]
import org.apache.commons.math.FunctionEvaluationException; import org.apache.commons.math.MaxIterationsExceededException; import org.apache.commons.math.analysis.UnivariateRealFunction;
import org.apache.commons.math.*; import org.apache.commons.math.analysis.*;
[ "org.apache.commons" ]
org.apache.commons;
1,188,175
public ProcessBuilder command(String... command) { this.command = Arrays.asList(command); return this; }
ProcessBuilder function(String... command) { this.command = Arrays.asList(command); return this; }
/** * Sets the command-line arguments to those specified. * This simplifies modifying the arguments by converting * the provided series of constructor arguments into a * list of command-line arguments. * * @param command the name of the program followed by its arguments. * @return a reference to this process builder. */
Sets the command-line arguments to those specified. This simplifies modifying the arguments by converting the provided series of constructor arguments into a list of command-line arguments
command
{ "repo_name": "taciano-perez/JamVM-PH", "path": "src/classpath/java/lang/ProcessBuilder.java", "license": "gpl-2.0", "size": 12554 }
[ "java.util.Arrays" ]
import java.util.Arrays;
import java.util.*;
[ "java.util" ]
java.util;
710,455
public void setPreauthOptions(KdcRequest kdcRequest, PluginRequestContext requestContext, KrbOptions preauthOptions);
void function(KdcRequest kdcRequest, PluginRequestContext requestContext, KrbOptions preauthOptions);
/** * Set krb options passed from user */
Set krb options passed from user
setPreauthOptions
{ "repo_name": "HazelChen/directory-kerberos", "path": "haox-kerb/kerb-client/src/main/java/org/apache/kerberos/kerb/client/preauth/KrbPreauth.java", "license": "apache-2.0", "size": 3784 }
[ "org.apache.kerberos.kerb.client.KrbOptions", "org.apache.kerberos.kerb.client.request.KdcRequest", "org.apache.kerberos.kerb.preauth.PluginRequestContext" ]
import org.apache.kerberos.kerb.client.KrbOptions; import org.apache.kerberos.kerb.client.request.KdcRequest; import org.apache.kerberos.kerb.preauth.PluginRequestContext;
import org.apache.kerberos.kerb.client.*; import org.apache.kerberos.kerb.client.request.*; import org.apache.kerberos.kerb.preauth.*;
[ "org.apache.kerberos" ]
org.apache.kerberos;
1,883,234
public void saveDocuments(DocumentModel[] docModels) throws ClientException;
void function(DocumentModel[] docModels) throws ClientException;
/** * Bulk document saving. * * @param docModels the document models that needs to be saved * @throws ClientException */
Bulk document saving
saveDocuments
{ "repo_name": "cherryhill/collectionspace-services", "path": "services/common/src/main/java/org/collectionspace/services/nuxeo/client/java/CoreSessionInterface.java", "license": "apache-2.0", "size": 5205 }
[ "org.nuxeo.ecm.core.api.ClientException", "org.nuxeo.ecm.core.api.DocumentModel" ]
import org.nuxeo.ecm.core.api.ClientException; import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.*;
[ "org.nuxeo.ecm" ]
org.nuxeo.ecm;
1,048,881
@SuppressWarnings({"unchecked"}) public synchronized <T extends TopLevelItem> T copy(T src, String name) throws IOException { acl.checkPermission(Job.CREATE); T result = (T)createProject(src.getDescriptor(),name,false); // copy config Util.copyFile(Items.getConfigFile(src).getFile(),Items.getConfigFile(result).getFile()); // reload from the new config result = (T)Items.load(parent,result.getRootDir()); result.onCopiedFrom(src); add(result); ItemListener.fireOnCopied(src,result); return result; }
@SuppressWarnings({STR}) synchronized <T extends TopLevelItem> T function(T src, String name) throws IOException { acl.checkPermission(Job.CREATE); T result = (T)createProject(src.getDescriptor(),name,false); Util.copyFile(Items.getConfigFile(src).getFile(),Items.getConfigFile(result).getFile()); result = (T)Items.load(parent,result.getRootDir()); result.onCopiedFrom(src); add(result); ItemListener.fireOnCopied(src,result); return result; }
/** * Copies an existing {@link TopLevelItem} to a new name. * * The caller is responsible for calling {@link ItemListener#fireOnCopied(Item, Item)}. This method * cannot do that because it doesn't know how to make the newly added item reachable from the parent. */
Copies an existing <code>TopLevelItem</code> to a new name. The caller is responsible for calling <code>ItemListener#fireOnCopied(Item, Item)</code>. This method cannot do that because it doesn't know how to make the newly added item reachable from the parent
copy
{ "repo_name": "sumitk1/jenkins", "path": "core/src/main/java/hudson/model/ItemGroupMixIn.java", "license": "mit", "size": 9370 }
[ "hudson.model.listeners.ItemListener", "java.io.IOException" ]
import hudson.model.listeners.ItemListener; import java.io.IOException;
import hudson.model.listeners.*; import java.io.*;
[ "hudson.model.listeners", "java.io" ]
hudson.model.listeners; java.io;
1,071,968
void addScope(Group scope) { Object[] scopeInfo = new Object[3]; ArrayList addScopeList = new ArrayList(); GroupRetained group = (GroupRetained)scope.retained; initAddScope(scope); tempKey.reset(); group.addAllNodesForScopedModelClip(mirrorModelClip,addScopeList, tempKey); scopeInfo[0] = addScopeList; scopeInfo[1] = null; scopeInfo[2] = (scopes.size() > 0 ? Boolean.TRUE: Boolean.FALSE); sendMessage(SCOPE_CHANGED, scopeInfo, null); }
void addScope(Group scope) { Object[] scopeInfo = new Object[3]; ArrayList addScopeList = new ArrayList(); GroupRetained group = (GroupRetained)scope.retained; initAddScope(scope); tempKey.reset(); group.addAllNodesForScopedModelClip(mirrorModelClip,addScopeList, tempKey); scopeInfo[0] = addScopeList; scopeInfo[1] = null; scopeInfo[2] = (scopes.size() > 0 ? Boolean.TRUE: Boolean.FALSE); sendMessage(SCOPE_CHANGED, scopeInfo, null); }
/** * Appends the specified scope to this node's list of scopes. * @param scope the scope to add to this node's list of scopes */
Appends the specified scope to this node's list of scopes
addScope
{ "repo_name": "gouessej/java3d-core", "path": "src/main/java/org/jogamp/java3d/ModelClipRetained.java", "license": "gpl-2.0", "size": 31983 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
1,152,974
void onFinalImageSet(String id, @Nullable INFO imageInfo, @Nullable Animatable animatable);
void onFinalImageSet(String id, @Nullable INFO imageInfo, @Nullable Animatable animatable);
/** * Called after the final image has been set. * @param id controller id * @param imageInfo image info * @param animatable */
Called after the final image has been set
onFinalImageSet
{ "repo_name": "s1rius/fresco", "path": "drawee/src/main/java/com/facebook/drawee/controller/ControllerListener.java", "license": "mit", "size": 2107 }
[ "android.graphics.drawable.Animatable", "javax.annotation.Nullable" ]
import android.graphics.drawable.Animatable; import javax.annotation.Nullable;
import android.graphics.drawable.*; import javax.annotation.*;
[ "android.graphics", "javax.annotation" ]
android.graphics; javax.annotation;
707,383
public void sendArpRequest(DeviceId deviceId, IpAddress targetAddress, ConnectPoint inPort) { byte[] senderMacAddress; byte[] senderIpAddress; try { senderMacAddress = config.getDeviceMac(deviceId).toBytes(); senderIpAddress = config.getRouterIp(deviceId).toOctets(); } catch (DeviceConfigNotFoundException e) { log.warn(e.getMessage() + " Aborting sendArpRequest."); return; } ARP arpRequest = new ARP(); arpRequest.setHardwareType(ARP.HW_TYPE_ETHERNET) .setProtocolType(ARP.PROTO_TYPE_IP) .setHardwareAddressLength( (byte) Ethernet.DATALAYER_ADDRESS_LENGTH) .setProtocolAddressLength((byte) Ip4Address.BYTE_LENGTH) .setOpCode(ARP.OP_REQUEST) .setSenderHardwareAddress(senderMacAddress) .setTargetHardwareAddress(MacAddress.ZERO.toBytes()) .setSenderProtocolAddress(senderIpAddress) .setTargetProtocolAddress(targetAddress.toOctets()); Ethernet eth = new Ethernet(); eth.setDestinationMACAddress(MacAddress.BROADCAST.toBytes()) .setSourceMACAddress(senderMacAddress) .setEtherType(Ethernet.TYPE_ARP).setPayload(arpRequest); removeVlanAndFlood(eth, inPort); }
void function(DeviceId deviceId, IpAddress targetAddress, ConnectPoint inPort) { byte[] senderMacAddress; byte[] senderIpAddress; try { senderMacAddress = config.getDeviceMac(deviceId).toBytes(); senderIpAddress = config.getRouterIp(deviceId).toOctets(); } catch (DeviceConfigNotFoundException e) { log.warn(e.getMessage() + STR); return; } ARP arpRequest = new ARP(); arpRequest.setHardwareType(ARP.HW_TYPE_ETHERNET) .setProtocolType(ARP.PROTO_TYPE_IP) .setHardwareAddressLength( (byte) Ethernet.DATALAYER_ADDRESS_LENGTH) .setProtocolAddressLength((byte) Ip4Address.BYTE_LENGTH) .setOpCode(ARP.OP_REQUEST) .setSenderHardwareAddress(senderMacAddress) .setTargetHardwareAddress(MacAddress.ZERO.toBytes()) .setSenderProtocolAddress(senderIpAddress) .setTargetProtocolAddress(targetAddress.toOctets()); Ethernet eth = new Ethernet(); eth.setDestinationMACAddress(MacAddress.BROADCAST.toBytes()) .setSourceMACAddress(senderMacAddress) .setEtherType(Ethernet.TYPE_ARP).setPayload(arpRequest); removeVlanAndFlood(eth, inPort); }
/** * Sends an APR request for the target IP address to all ports except in-port. * * @param deviceId Switch device ID * @param targetAddress target IP address for ARP * @param inPort in-port */
Sends an APR request for the target IP address to all ports except in-port
sendArpRequest
{ "repo_name": "VinodKumarS-Huawei/ietf96yang", "path": "apps/segmentrouting/src/main/java/org/onosproject/segmentrouting/ArpHandler.java", "license": "apache-2.0", "size": 12378 }
[ "org.onlab.packet.Ethernet", "org.onlab.packet.Ip4Address", "org.onlab.packet.IpAddress", "org.onlab.packet.MacAddress", "org.onosproject.net.ConnectPoint", "org.onosproject.net.DeviceId", "org.onosproject.segmentrouting.config.DeviceConfigNotFoundException" ]
import org.onlab.packet.Ethernet; import org.onlab.packet.Ip4Address; import org.onlab.packet.IpAddress; import org.onlab.packet.MacAddress; import org.onosproject.net.ConnectPoint; import org.onosproject.net.DeviceId; import org.onosproject.segmentrouting.config.DeviceConfigNotFoundException;
import org.onlab.packet.*; import org.onosproject.net.*; import org.onosproject.segmentrouting.config.*;
[ "org.onlab.packet", "org.onosproject.net", "org.onosproject.segmentrouting" ]
org.onlab.packet; org.onosproject.net; org.onosproject.segmentrouting;
1,951,513
@RequestMapping(DISAPPROVE_COMMAND) public ModelAndView disapprove(@RequestParam("id") Long id) { ModelAndView model = new ModelAndView(); model = updateStatus(id, CustomerRequestStatus.REJECTED); return model; }
@RequestMapping(DISAPPROVE_COMMAND) ModelAndView function(@RequestParam("id") Long id) { ModelAndView model = new ModelAndView(); model = updateStatus(id, CustomerRequestStatus.REJECTED); return model; }
/** * Saves an entity and returns a refreshed list type page. * * @param id The id of the entity whose status is being set to approved. * @return The list type view. */
Saves an entity and returns a refreshed list type page
disapprove
{ "repo_name": "NCIP/calims", "path": "calims2-webapp/src/java/gov/nih/nci/calims2/ui/administration/customerservice/customerrequest/CustomerRequestController.java", "license": "bsd-3-clause", "size": 12394 }
[ "gov.nih.nci.calims2.domain.administration.customerservice.enumeration.CustomerRequestStatus", "org.springframework.web.bind.annotation.RequestMapping", "org.springframework.web.bind.annotation.RequestParam", "org.springframework.web.servlet.ModelAndView" ]
import gov.nih.nci.calims2.domain.administration.customerservice.enumeration.CustomerRequestStatus; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.servlet.ModelAndView;
import gov.nih.nci.calims2.domain.administration.customerservice.enumeration.*; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.*;
[ "gov.nih.nci", "org.springframework.web" ]
gov.nih.nci; org.springframework.web;
1,704,751
public void appendTrackedTimestampsToMetadata() throws IOException { appendFileInfo(TIMERANGE_KEY,WritableUtils.toByteArray(timeRangeTracker)); appendFileInfo(EARLIEST_PUT_TS, Bytes.toBytes(earliestPutTs)); }
void function() throws IOException { appendFileInfo(TIMERANGE_KEY,WritableUtils.toByteArray(timeRangeTracker)); appendFileInfo(EARLIEST_PUT_TS, Bytes.toBytes(earliestPutTs)); }
/** * Add TimestampRange and earliest put timestamp to Metadata */
Add TimestampRange and earliest put timestamp to Metadata
appendTrackedTimestampsToMetadata
{ "repo_name": "andrewmains12/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java", "license": "apache-2.0", "size": 56227 }
[ "java.io.IOException", "org.apache.hadoop.hbase.util.Bytes", "org.apache.hadoop.io.WritableUtils" ]
import java.io.IOException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableUtils;
import java.io.*; import org.apache.hadoop.hbase.util.*; import org.apache.hadoop.io.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,323,244
private static void decodeBase256Segment(BitSource bits, StringBuffer result, Vector byteSegments) throws FormatException { // Figure out how long the Base 256 Segment is. int d1 = bits.readBits(8); int count; if (d1 == 0) { // Read the remainder of the symbol count = bits.available() / 8; } else if (d1 < 250) { count = d1; } else { count = 250 * (d1 - 249) + bits.readBits(8); } byte[] bytes = new byte[count]; for (int i = 0; i < count; i++) { // Have seen this particular error in the wild, such as at // http://www.bcgen.com/demo/IDAutomationStreamingDataMatrix.aspx?MODE=3&D=Fred&PFMT=3&PT=F&X=0.3&O=0&LM=0.2 if (bits.available() < 8) { throw FormatException.getFormatInstance(); } bytes[i] = unrandomize255State(bits.readBits(8), i); } byteSegments.addElement(bytes); try { result.append(new String(bytes, "ISO8859_1")); } catch (UnsupportedEncodingException uee) { throw new RuntimeException("Platform does not support required encoding: " + uee); } }
static void function(BitSource bits, StringBuffer result, Vector byteSegments) throws FormatException { int d1 = bits.readBits(8); int count; if (d1 == 0) { count = bits.available() / 8; } else if (d1 < 250) { count = d1; } else { count = 250 * (d1 - 249) + bits.readBits(8); } byte[] bytes = new byte[count]; for (int i = 0; i < count; i++) { if (bits.available() < 8) { throw FormatException.getFormatInstance(); } bytes[i] = unrandomize255State(bits.readBits(8), i); } byteSegments.addElement(bytes); try { result.append(new String(bytes, STR)); } catch (UnsupportedEncodingException uee) { throw new RuntimeException(STR + uee); } }
/** * See ISO 16022:2006, 5.2.9 and Annex B, B.2 */
See ISO 16022:2006, 5.2.9 and Annex B, B.2
decodeBase256Segment
{ "repo_name": "roadlabs/android", "path": "zxing-1.6/core/src/com/google/zxing/datamatrix/decoder/DecodedBitStreamParser.java", "license": "mit", "size": 16055 }
[ "com.google.zxing.FormatException", "com.google.zxing.common.BitSource", "java.io.UnsupportedEncodingException", "java.util.Vector" ]
import com.google.zxing.FormatException; import com.google.zxing.common.BitSource; import java.io.UnsupportedEncodingException; import java.util.Vector;
import com.google.zxing.*; import com.google.zxing.common.*; import java.io.*; import java.util.*;
[ "com.google.zxing", "java.io", "java.util" ]
com.google.zxing; java.io; java.util;
2,716,923
@ApiModelProperty(value = "For bank accounts only. See Bank Account types") public BankAccountTypeEnum getBankAccountType() { return bankAccountType; }
@ApiModelProperty(value = STR) BankAccountTypeEnum function() { return bankAccountType; }
/** * For bank accounts only. See Bank Account types * * @return bankAccountType */
For bank accounts only. See Bank Account types
getBankAccountType
{ "repo_name": "XeroAPI/Xero-Java", "path": "src/main/java/com/xero/models/accounting/Account.java", "license": "mit", "size": 27803 }
[ "io.swagger.annotations.ApiModelProperty" ]
import io.swagger.annotations.ApiModelProperty;
import io.swagger.annotations.*;
[ "io.swagger.annotations" ]
io.swagger.annotations;
2,397,098
private void decompressFirstKV(ByteBuffer out, DataInputStream in) throws IOException { int kvPos = out.position(); out.putInt(keyLength); out.putInt(valueLength); prevTimestampOffset = out.position() + keyLength - KeyValue.TIMESTAMP_TYPE_SIZE; ByteBufferUtils.copyFromStreamToBuffer(out, in, keyLength + valueLength); rowLength = out.getShort(kvPos + KeyValue.ROW_OFFSET); familyLength = out.get(kvPos + KeyValue.ROW_OFFSET + KeyValue.ROW_LENGTH_SIZE + rowLength); type = out.get(prevTimestampOffset + KeyValue.TIMESTAMP_SIZE); } }
void function(ByteBuffer out, DataInputStream in) throws IOException { int kvPos = out.position(); out.putInt(keyLength); out.putInt(valueLength); prevTimestampOffset = out.position() + keyLength - KeyValue.TIMESTAMP_TYPE_SIZE; ByteBufferUtils.copyFromStreamToBuffer(out, in, keyLength + valueLength); rowLength = out.getShort(kvPos + KeyValue.ROW_OFFSET); familyLength = out.get(kvPos + KeyValue.ROW_OFFSET + KeyValue.ROW_LENGTH_SIZE + rowLength); type = out.get(prevTimestampOffset + KeyValue.TIMESTAMP_SIZE); } }
/** * Copies the first key/value from the given stream, and initializes * decompression state based on it. Assumes that we have already read key * and value lengths. Does not set {@link #qualifierLength} (not used by * decompression) or {@link #prevOffset} (set by the calle afterwards). */
Copies the first key/value from the given stream, and initializes decompression state based on it. Assumes that we have already read key and value lengths. Does not set <code>#qualifierLength</code> (not used by decompression) or <code>#prevOffset</code> (set by the calle afterwards)
decompressFirstKV
{ "repo_name": "zqxjjj/NobidaBase", "path": "target/hbase-0.94.9/hbase-0.94.9/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java", "license": "apache-2.0", "size": 21334 }
[ "java.io.DataInputStream", "java.io.IOException", "java.nio.ByteBuffer", "org.apache.hadoop.hbase.KeyValue", "org.apache.hadoop.hbase.util.ByteBufferUtils" ]
import java.io.DataInputStream; import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import java.io.*; import java.nio.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.util.*;
[ "java.io", "java.nio", "org.apache.hadoop" ]
java.io; java.nio; org.apache.hadoop;
887,462
void setServlet(Servlet servlet) { this.servlet = servlet; }
void setServlet(Servlet servlet) { this.servlet = servlet; }
/** * Set the servlet that will be executed at the end of this chain. * * @param servlet The Wrapper for the servlet to be executed */
Set the servlet that will be executed at the end of this chain
setServlet
{ "repo_name": "yuyupapa/OpenSource", "path": "apache-tomcat-6.0.48/java/org/apache/catalina/core/ApplicationFilterChain.java", "license": "apache-2.0", "size": 21017 }
[ "javax.servlet.Servlet" ]
import javax.servlet.Servlet;
import javax.servlet.*;
[ "javax.servlet" ]
javax.servlet;
2,492,650
public static void printArrayList(List<String> list) { for (String s : list) { System.out.println(s); } }
static void function(List<String> list) { for (String s : list) { System.out.println(s); } }
/** * Prints each String element of an ArrayList in a new line. * * @param list */
Prints each String element of an ArrayList in a new line
printArrayList
{ "repo_name": "peeyushsahu/PubMedMiner", "path": "src/main/java/de/unibonn/vishal/utils/NERUtility.java", "license": "gpl-2.0", "size": 3230 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,450,614
public static boolean equalContentsIgnoreOrder(Collection<?> c1, Collection<?> c2) { return c1.size() == c2.size() && c1.containsAll(c2); }
static boolean function(Collection<?> c1, Collection<?> c2) { return c1.size() == c2.size() && c1.containsAll(c2); }
/** * Returns true if the contents of the two given collections are equal, ignoring order. */
Returns true if the contents of the two given collections are equal, ignoring order
equalContentsIgnoreOrder
{ "repo_name": "vespa-engine/vespa", "path": "vespajlib/src/main/java/com/yahoo/collections/CollectionUtil.java", "license": "apache-2.0", "size": 3379 }
[ "java.util.Collection" ]
import java.util.Collection;
import java.util.*;
[ "java.util" ]
java.util;
949,070
public static Language forTwoLetterIsoCode(String code) { for (Language language : Language.values()) if (language.code.toLowerCase(Locale.US).equals(code.toLowerCase(Locale.US).trim())) return language; throw new IllegalArgumentException("Code \"" + code + "\" does not match any language!"); }
static Language function(String code) { for (Language language : Language.values()) if (language.code.toLowerCase(Locale.US).equals(code.toLowerCase(Locale.US).trim())) return language; throw new IllegalArgumentException(STRSTR\STR); }
/** * Find the language by a ISO two letter country code. * * @param code The code to search for. * @return The language instance. * @throws IllegalArgumentException If the code is not recognized. */
Find the language by a ISO two letter country code
forTwoLetterIsoCode
{ "repo_name": "salema/Podcatcher-Deluxe-Android-Studio", "path": "app/src/main/java/com/podcatcher/deluxe/model/types/Language.java", "license": "gpl-3.0", "size": 2840 }
[ "java.util.Locale" ]
import java.util.Locale;
import java.util.*;
[ "java.util" ]
java.util;
2,692,433
@Test public void testMockInvokerProviderRpcException() { URL url = URL.valueOf("remote://1.2.3.4/" + IHelloRpcService.class.getName()); url = url.addParameter(MOCK_KEY, "true").addParameter("invoke_return_error", "true"); Invoker<IHelloRpcService> cluster = getClusterInvoker(url); RpcInvocation invocation = new RpcInvocation(); invocation.setMethodName("getSomething4"); Result ret = cluster.invoke(invocation); Assertions.assertEquals("something4mock", ret.getValue()); }
void function() { URL url = URL.valueOf(STRtrueSTRinvoke_return_errorSTRtrueSTRgetSomething4STRsomething4mock", ret.getValue()); }
/** * Test if mock policy works fine: ProviderRpcException */
Test if mock policy works fine: ProviderRpcException
testMockInvokerProviderRpcException
{ "repo_name": "qtvbwfn/dubbo", "path": "dubbo-cluster/src/test/java/org/apache/dubbo/rpc/cluster/support/wrapper/MockProviderRpcExceptionTest.java", "license": "apache-2.0", "size": 7138 }
[ "org.apache.dubbo.common.URL" ]
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.*;
[ "org.apache.dubbo" ]
org.apache.dubbo;
1,496,197
Map<String,Entry<Long,Long>> getTableSpaceUtilization();
Map<String,Entry<Long,Long>> getTableSpaceUtilization();
/** * Gets the space usage and limit for each table. */
Gets the space usage and limit for each table
getTableSpaceUtilization
{ "repo_name": "HubSpot/hbase", "path": "hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterWrapper.java", "license": "apache-2.0", "size": 3581 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
1,532,288
public float getChannelZeroMax() { this.channelZeroMax.flip(); int channelData = (int) this.channelZeroMax.order(ByteOrder.BIG_ENDIAN).getShort(); return getVoltage(channelData); }
float function() { this.channelZeroMax.flip(); int channelData = (int) this.channelZeroMax.order(ByteOrder.BIG_ENDIAN).getShort(); return getVoltage(channelData); }
/** * A method that gets the channel zero maximum data as a converted decimal float * * @return channelZeroMax - the 2 bytes of the channelZeroMax data as a float */
A method that gets the channel zero maximum data as a converted decimal float
getChannelZeroMax
{ "repo_name": "csjx/realtime-data", "path": "src/main/java/edu/hawaii/soest/kilonalu/adam/AdamParser.java", "license": "gpl-2.0", "size": 23922 }
[ "java.nio.ByteOrder" ]
import java.nio.ByteOrder;
import java.nio.*;
[ "java.nio" ]
java.nio;
1,138,851
public CoinbaseAccountChanges getCoinbaseAccountChanges(Integer page) throws IOException { final CoinbaseAccountChanges accountChanges = coinbase.getAccountChanges( page, exchange.getExchangeSpecification().getApiKey(), signatureCreator, exchange.getNonceFactory()); return accountChanges; }
CoinbaseAccountChanges function(Integer page) throws IOException { final CoinbaseAccountChanges accountChanges = coinbase.getAccountChanges( page, exchange.getExchangeSpecification().getApiKey(), signatureCreator, exchange.getNonceFactory()); return accountChanges; }
/** * Authenticated resource which returns all related changes to an account. This is an alternative * to the {@code getCoinbaseTransactions} API call. It is designed to be faster and provide more * detail so you can generate an overview/summary of individual account changes. * * @param page Optional parameter to request a desired page of results. Will return page 1 if the * supplied page is null or less than 1. * @return The current user, balance, and the most recent account changes. * @throws IOException * @see <a * href="https://coinbase.com/api/doc/1.0/account_changes/index.html">coinbase.com/api/doc/1.0/account_changes/index.html</a> */
Authenticated resource which returns all related changes to an account. This is an alternative to the getCoinbaseTransactions API call. It is designed to be faster and provide more detail so you can generate an overview/summary of individual account changes
getCoinbaseAccountChanges
{ "repo_name": "andre77/XChange", "path": "xchange-coinbase/src/main/java/org/knowm/xchange/coinbase/service/CoinbaseAccountServiceRaw.java", "license": "mit", "size": 28855 }
[ "java.io.IOException", "org.knowm.xchange.coinbase.dto.account.CoinbaseAccountChanges" ]
import java.io.IOException; import org.knowm.xchange.coinbase.dto.account.CoinbaseAccountChanges;
import java.io.*; import org.knowm.xchange.coinbase.dto.account.*;
[ "java.io", "org.knowm.xchange" ]
java.io; org.knowm.xchange;
2,117,230
private Map<String, Object> extractSchedulerOptions(Map<String, Object> parameters) { if (model.getScheduler() != null) { // include default options first Map<String, Object> answer = new LinkedHashMap<>(); model.getDefaultEndpointOptions().forEach((key, value) -> { String schedulerKey = asSchedulerKey(key); if (schedulerKey != null) { answer.put(schedulerKey, value); } }); // and then override with from parameters for (Iterator<Map.Entry<String, Object>> it = parameters.entrySet().iterator(); it.hasNext();) { Map.Entry<String, Object> entry = it.next(); String schedulerKey = asSchedulerKey(entry.getKey()); if (schedulerKey != null) { Object value = entry.getValue(); answer.put(schedulerKey, value); // and remove as it should not be part of regular parameters it.remove(); } } return answer; } return null; }
Map<String, Object> function(Map<String, Object> parameters) { if (model.getScheduler() != null) { Map<String, Object> answer = new LinkedHashMap<>(); model.getDefaultEndpointOptions().forEach((key, value) -> { String schedulerKey = asSchedulerKey(key); if (schedulerKey != null) { answer.put(schedulerKey, value); } }); for (Iterator<Map.Entry<String, Object>> it = parameters.entrySet().iterator(); it.hasNext();) { Map.Entry<String, Object> entry = it.next(); String schedulerKey = asSchedulerKey(entry.getKey()); if (schedulerKey != null) { Object value = entry.getValue(); answer.put(schedulerKey, value); it.remove(); } } return answer; } return null; }
/** * Extracts the scheduler options from the parameters. * <p/> * These options start with <tt>scheduler</tt> in their key name, such as <tt>schedulerPeriod</tt> * which is removed from parameters, and transformed into keys without the <tt>scheduler</tt> prefix. * * @return the scheduler options, or <tt>null</tt> if scheduler not enabled */
Extracts the scheduler options from the parameters. These options start with scheduler in their key name, such as schedulerPeriod which is removed from parameters, and transformed into keys without the scheduler prefix
extractSchedulerOptions
{ "repo_name": "scranton/camel", "path": "connectors/camel-connector/src/main/java/org/apache/camel/component/connector/DefaultConnectorComponent.java", "license": "apache-2.0", "size": 16831 }
[ "java.util.Iterator", "java.util.LinkedHashMap", "java.util.Map" ]
import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
730,483
public static AchievementManager getAchievementManager() { if(achievementM == null) Error.MISSING.add("AchievementManager"); return achievementM; }
static AchievementManager function() { if(achievementM == null) Error.MISSING.add(STR); return achievementM; }
/** * Get the AchievementManager. * Add error to logger if null. * * @return The API's AchievementManager. */
Get the AchievementManager. Add error to logger if null
getAchievementManager
{ "repo_name": "dracnis/VanillaPlus", "path": "VanillaPlus/src/fr/soreth/VanillaPlus/VanillaPlusCore.java", "license": "mit", "size": 15268 }
[ "fr.soreth.VanillaPlus" ]
import fr.soreth.VanillaPlus;
import fr.soreth.*;
[ "fr.soreth" ]
fr.soreth;
826,615
protected void generateFileHeader(QualifiedName qualifiedName, ExtraLanguageAppendable appendable, IExtraLanguageGeneratorContext context) { // }
void function(QualifiedName qualifiedName, ExtraLanguageAppendable appendable, IExtraLanguageGeneratorContext context) {
/** Generate the header of the file.. * * @param qualifiedName the name of the type for which the file was created. * @param appendable the appendable. * @param context the context. */
Generate the header of the file.
generateFileHeader
{ "repo_name": "sarl/sarl", "path": "main/coreplugins/io.sarl.lang/src/io/sarl/lang/extralanguage/compiler/AbstractExtraLanguageGenerator.java", "license": "apache-2.0", "size": 26754 }
[ "org.eclipse.xtext.naming.QualifiedName" ]
import org.eclipse.xtext.naming.QualifiedName;
import org.eclipse.xtext.naming.*;
[ "org.eclipse.xtext" ]
org.eclipse.xtext;
1,507,571
@Override public FileLeaf findDeletedByContent(FileLeaf createdComponent){ return (FileLeaf)findComponentInSetMultimap(createdComponent, getDeletedByContentHash()); }
FileLeaf function(FileLeaf createdComponent){ return (FileLeaf)findComponentInSetMultimap(createdComponent, getDeletedByContentHash()); }
/** * Searches the SetMultiMap<String, FileComponent> deletedByContentHash for * a deleted FileComponent with the same content hash. If several exist, the temporally * closest is returned. * * @param createdComponent The previously deleted component * @return */
Searches the SetMultiMap deletedByContentHash for a deleted FileComponent with the same content hash. If several exist, the temporally closest is returned
findDeletedByContent
{ "repo_name": "PeerWasp/PeerWasp", "path": "peerbox/src/main/java/org/peerbox/watchservice/filetree/FileTree.java", "license": "mit", "size": 9216 }
[ "org.peerbox.watchservice.filetree.composite.FileLeaf" ]
import org.peerbox.watchservice.filetree.composite.FileLeaf;
import org.peerbox.watchservice.filetree.composite.*;
[ "org.peerbox.watchservice" ]
org.peerbox.watchservice;
2,214,651
public Builder onClick(@Nullable ClickAction<?> clickAction) { this.clickAction = clickAction; return this; }
Builder function(@Nullable ClickAction<?> clickAction) { this.clickAction = clickAction; return this; }
/** * Sets the {@link ClickAction} that will be executed if the text is * clicked in the chat. * * @param clickAction The new click action for the text * @return This text builder * @see Text#getClickAction() */
Sets the <code>ClickAction</code> that will be executed if the text is clicked in the chat
onClick
{ "repo_name": "natrolite/natrolite", "path": "natrolite-api/src/main/java/org/natrolite/text/Text.java", "license": "gpl-3.0", "size": 30695 }
[ "javax.annotation.Nullable", "org.natrolite.text.action.ClickAction" ]
import javax.annotation.Nullable; import org.natrolite.text.action.ClickAction;
import javax.annotation.*; import org.natrolite.text.action.*;
[ "javax.annotation", "org.natrolite.text" ]
javax.annotation; org.natrolite.text;
1,290,396
@Test public void testMulti() throws Exception { //ensure that server time increments every time we do an operation, otherwise //previous deletes will eclipse successive puts having the same timestamp EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); Table t = util.getConnection().getTable(tableName); List<Put> puts = new ArrayList<>(); Put p = new Put(row1); p.addColumn(dummy, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(test, dummy, dummy); puts.add(p); // before HBASE-4331, this would throw an exception t.put(puts); checkRowAndDelete(t,row1,1); checkRowAndDelete(t,row2,0); checkRowAndDelete(t,row3,0); puts.clear(); p = new Put(row1); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(test, dummy, dummy); puts.add(p); // before HBASE-4331, this would throw an exception t.put(puts); checkRowAndDelete(t,row1,0); checkRowAndDelete(t,row2,0); checkRowAndDelete(t,row3,0); puts.clear(); p = new Put(row1); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(dummy, dummy, dummy); puts.add(p); // this worked fine even before HBASE-4331 t.put(puts); checkRowAndDelete(t,row1,0); checkRowAndDelete(t,row2,0); checkRowAndDelete(t,row3,1); puts.clear(); p = new Put(row1); p.addColumn(dummy, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(dummy, dummy, dummy); puts.add(p); // this worked fine even before HBASE-4331 t.put(puts); checkRowAndDelete(t,row1,1); checkRowAndDelete(t,row2,0); checkRowAndDelete(t,row3,1); puts.clear(); p = new Put(row1); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(dummy, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(test, dummy, dummy); puts.add(p); // before HBASE-4331, this would throw an exception t.put(puts); checkRowAndDelete(t,row1,0); checkRowAndDelete(t,row2,1); checkRowAndDelete(t,row3,0); t.close(); EnvironmentEdgeManager.reset(); }
void function() throws Exception { EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); Table t = util.getConnection().getTable(tableName); List<Put> puts = new ArrayList<>(); Put p = new Put(row1); p.addColumn(dummy, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(test, dummy, dummy); puts.add(p); t.put(puts); checkRowAndDelete(t,row1,1); checkRowAndDelete(t,row2,0); checkRowAndDelete(t,row3,0); puts.clear(); p = new Put(row1); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(test, dummy, dummy); puts.add(p); t.put(puts); checkRowAndDelete(t,row1,0); checkRowAndDelete(t,row2,0); checkRowAndDelete(t,row3,0); puts.clear(); p = new Put(row1); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(dummy, dummy, dummy); puts.add(p); t.put(puts); checkRowAndDelete(t,row1,0); checkRowAndDelete(t,row2,0); checkRowAndDelete(t,row3,1); puts.clear(); p = new Put(row1); p.addColumn(dummy, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(dummy, dummy, dummy); puts.add(p); t.put(puts); checkRowAndDelete(t,row1,1); checkRowAndDelete(t,row2,0); checkRowAndDelete(t,row3,1); puts.clear(); p = new Put(row1); p.addColumn(test, dummy, dummy); puts.add(p); p = new Put(row2); p.addColumn(dummy, dummy, dummy); puts.add(p); p = new Put(row3); p.addColumn(test, dummy, dummy); puts.add(p); t.put(puts); checkRowAndDelete(t,row1,0); checkRowAndDelete(t,row2,1); checkRowAndDelete(t,row3,0); t.close(); EnvironmentEdgeManager.reset(); }
/** * Test various multiput operations. * @throws Exception */
Test various multiput operations
testMulti
{ "repo_name": "JingchengDu/hbase", "path": "hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java", "license": "apache-2.0", "size": 7173 }
[ "java.util.ArrayList", "java.util.List", "org.apache.hadoop.hbase.client.Put", "org.apache.hadoop.hbase.client.Table", "org.apache.hadoop.hbase.util.EnvironmentEdgeManager", "org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper", "org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge" ]
import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
import java.util.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.util.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
1,938,581
protected T createValue(Prolog engine) { return null; }
T function(Prolog engine) { return null; }
/** * Creates a value to store, returns null by default. * * @param engine Prolog engine. * @return new value. */
Creates a value to store, returns null by default
createValue
{ "repo_name": "gerrit-review/gerrit", "path": "java/com/google/gerrit/server/rules/StoredValue.java", "license": "apache-2.0", "size": 2740 }
[ "com.googlecode.prolog_cafe.lang.Prolog" ]
import com.googlecode.prolog_cafe.lang.Prolog;
import com.googlecode.prolog_cafe.lang.*;
[ "com.googlecode.prolog_cafe" ]
com.googlecode.prolog_cafe;
833,786
public void randomTick(World worldIn, BlockPos pos, IBlockState state, Random random) { }
void function(World worldIn, BlockPos pos, IBlockState state, Random random) { }
/** * Called randomly when setTickRandomly is set to true (used by e.g. crops to grow, etc.) */
Called randomly when setTickRandomly is set to true (used by e.g. crops to grow, etc.)
randomTick
{ "repo_name": "tomtomtom09/CampCraft", "path": "build/tmp/recompileMc/sources/net/minecraft/block/BlockTripWireHook.java", "license": "gpl-3.0", "size": 13496 }
[ "java.util.Random", "net.minecraft.block.state.IBlockState", "net.minecraft.util.BlockPos", "net.minecraft.world.World" ]
import java.util.Random; import net.minecraft.block.state.IBlockState; import net.minecraft.util.BlockPos; import net.minecraft.world.World;
import java.util.*; import net.minecraft.block.state.*; import net.minecraft.util.*; import net.minecraft.world.*;
[ "java.util", "net.minecraft.block", "net.minecraft.util", "net.minecraft.world" ]
java.util; net.minecraft.block; net.minecraft.util; net.minecraft.world;
2,020,573
private static void ensureCapacity(ParsableByteArray byteArray, int limit) { if (byteArray.limit() < limit) { byteArray.reset(new byte[limit], limit); } } // Called by the loading thread.
static void function(ParsableByteArray byteArray, int limit) { if (byteArray.limit() < limit) { byteArray.reset(new byte[limit], limit); } }
/** * Ensure that the passed {@link ParsableByteArray} is of at least the specified limit. */
Ensure that the passed <code>ParsableByteArray</code> is of at least the specified limit
ensureCapacity
{ "repo_name": "Lee-Wills/-tv", "path": "mmd/library/src/main/java/com/google/android/exoplayer/extractor/RollingSampleBuffer.java", "license": "gpl-3.0", "size": 24705 }
[ "com.google.android.exoplayer.util.ParsableByteArray" ]
import com.google.android.exoplayer.util.ParsableByteArray;
import com.google.android.exoplayer.util.*;
[ "com.google.android" ]
com.google.android;
2,674,620
public int setCustomOptions(User loggedInUser, String ksLabel, List<String> options) throws FaultException { KickstartData ksdata = XmlRpcKickstartHelper.getInstance().lookupKsData(ksLabel, loggedInUser.getOrg()); if (ksdata == null) { throw new FaultException(-3, "kickstartProfileNotFound", "No Kickstart Profile found with label: " + ksLabel); } Long ksid = ksdata.getId(); KickstartOptionsCommand cmd = new KickstartOptionsCommand(ksid, loggedInUser); Set<KickstartCommand> customSet = new LinkedHashSet<>(); if (options != null) { for (int i = 0; i < options.size(); i++) { String option = options.get(i); KickstartCommand custom = new KickstartCommand(); custom.setCommandName( KickstartFactory.lookupKickstartCommandName("custom")); // the following is a workaround to ensure that the options are rendered // on the UI on separate lines. if (i < (options.size() - 1)) { option += "\r"; } custom.setArguments(option); custom.setKickstartData(cmd.getKickstartData()); custom.setCustomPosition(customSet.size()); custom.setCreated(new Date()); custom.setModified(new Date()); customSet.add(custom); } if (cmd.getKickstartData().getCustomOptions() == null) { cmd.getKickstartData().setCustomOptions(customSet); } else { cmd.getKickstartData().setCustomOptions(customSet); } cmd.store(); } return 1; }
int function(User loggedInUser, String ksLabel, List<String> options) throws FaultException { KickstartData ksdata = XmlRpcKickstartHelper.getInstance().lookupKsData(ksLabel, loggedInUser.getOrg()); if (ksdata == null) { throw new FaultException(-3, STR, STR + ksLabel); } Long ksid = ksdata.getId(); KickstartOptionsCommand cmd = new KickstartOptionsCommand(ksid, loggedInUser); Set<KickstartCommand> customSet = new LinkedHashSet<>(); if (options != null) { for (int i = 0; i < options.size(); i++) { String option = options.get(i); KickstartCommand custom = new KickstartCommand(); custom.setCommandName( KickstartFactory.lookupKickstartCommandName(STR)); if (i < (options.size() - 1)) { option += "\r"; } custom.setArguments(option); custom.setKickstartData(cmd.getKickstartData()); custom.setCustomPosition(customSet.size()); custom.setCreated(new Date()); custom.setModified(new Date()); customSet.add(custom); } if (cmd.getKickstartData().getCustomOptions() == null) { cmd.getKickstartData().setCustomOptions(customSet); } else { cmd.getKickstartData().setCustomOptions(customSet); } cmd.store(); } return 1; }
/** * Set custom options for a kickstart profile. * @param loggedInUser The current user * @param ksLabel the kickstart label * @param options the custom options to set * @return a int being the number of options set * @throws FaultException A FaultException is thrown if * the profile associated with ksLabel cannot be found * * @xmlrpc.doc Set custom options for a kickstart profile. * @xmlrpc.param #session_key() * @xmlrpc.param #param("string","ksLabel") * @xmlrpc.param #param("string[]","options") * @xmlrpc.returntype #return_int_success() */
Set custom options for a kickstart profile
setCustomOptions
{ "repo_name": "mcalmer/spacewalk", "path": "java/code/src/com/redhat/rhn/frontend/xmlrpc/kickstart/profile/ProfileHandler.java", "license": "gpl-2.0", "size": 65855 }
[ "com.redhat.rhn.FaultException", "com.redhat.rhn.domain.kickstart.KickstartCommand", "com.redhat.rhn.domain.kickstart.KickstartData", "com.redhat.rhn.domain.kickstart.KickstartFactory", "com.redhat.rhn.domain.user.User", "com.redhat.rhn.frontend.xmlrpc.kickstart.XmlRpcKickstartHelper", "com.redhat.rhn.manager.kickstart.KickstartOptionsCommand", "java.util.Date", "java.util.LinkedHashSet", "java.util.List", "java.util.Set" ]
import com.redhat.rhn.FaultException; import com.redhat.rhn.domain.kickstart.KickstartCommand; import com.redhat.rhn.domain.kickstart.KickstartData; import com.redhat.rhn.domain.kickstart.KickstartFactory; import com.redhat.rhn.domain.user.User; import com.redhat.rhn.frontend.xmlrpc.kickstart.XmlRpcKickstartHelper; import com.redhat.rhn.manager.kickstart.KickstartOptionsCommand; import java.util.Date; import java.util.LinkedHashSet; import java.util.List; import java.util.Set;
import com.redhat.rhn.*; import com.redhat.rhn.domain.kickstart.*; import com.redhat.rhn.domain.user.*; import com.redhat.rhn.frontend.xmlrpc.kickstart.*; import com.redhat.rhn.manager.kickstart.*; import java.util.*;
[ "com.redhat.rhn", "java.util" ]
com.redhat.rhn; java.util;
2,540,274
public Builder label(String label) { JodaBeanUtils.notEmpty(label, "label"); this.label = label; return this; }
Builder function(String label) { JodaBeanUtils.notEmpty(label, "label"); this.label = label; return this; }
/** * Sets the label to use for the node, defaulted. * <p> * When building, this will default based on the tenor if not specified. * @param label the new value, not empty * @return this, for chaining, not null */
Sets the label to use for the node, defaulted. When building, this will default based on the tenor if not specified
label
{ "repo_name": "OpenGamma/Strata", "path": "modules/market/src/main/java/com/opengamma/strata/market/curve/node/IborIborSwapCurveNode.java", "license": "apache-2.0", "size": 26820 }
[ "org.joda.beans.JodaBeanUtils" ]
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.*;
[ "org.joda.beans" ]
org.joda.beans;
2,238,936
private void updateTextGraphics(Point pt) { for (int id : labelHaloGraphicIds) { updateGraphic(id, pt); } updateGraphic(labelGraphicId, pt); }
void function(Point pt) { for (int id : labelHaloGraphicIds) { updateGraphic(id, pt); } updateGraphic(labelGraphicId, pt); }
/** * Note: this method is not thread-safe unless you synchronize your call to it. * @param pt */
Note: this method is not thread-safe unless you synchronize your call to it
updateTextGraphics
{ "repo_name": "Esri/vehicle-commander-java", "path": "source/VehicleCommander/src/com/esri/vehiclecommander/controller/MgrsLayerController.java", "license": "apache-2.0", "size": 7666 }
[ "com.esri.core.geometry.Point" ]
import com.esri.core.geometry.Point;
import com.esri.core.geometry.*;
[ "com.esri.core" ]
com.esri.core;
1,381,859
public void writeDelta(OutputStream os) throws IOException { writeDelta(new DataOutputStream(os)); }
void function(OutputStream os) throws IOException { writeDelta(new DataOutputStream(os)); }
/** * Write a delta to the specified stream. */
Write a delta to the specified stream
writeDelta
{ "repo_name": "Netflix/zeno", "path": "src/main/java/com/netflix/zeno/fastblob/io/FastBlobWriter.java", "license": "apache-2.0", "size": 10978 }
[ "java.io.DataOutputStream", "java.io.IOException", "java.io.OutputStream" ]
import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream;
import java.io.*;
[ "java.io" ]
java.io;
2,514,244
public ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender) { return define(name, type, defaultValue, null, importance, documentation, group, orderInGroup, width, displayName, Collections.emptyList(), recommender); }
ConfigDef function(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender) { return define(name, type, defaultValue, null, importance, documentation, group, orderInGroup, width, displayName, Collections.emptyList(), recommender); }
/** * Define a new configuration with no special validation logic and no custom recommender * @param name the name of the config parameter * @param type the type of the config * @param defaultValue the default value to use if this config isn't present * @param importance the importance of this config * @param documentation the documentation string for the config * @param group the group this config belongs to * @param orderInGroup the order of this config in the group * @param width the width of the config * @param displayName the name suitable for display * @param recommender the recommender provides valid values given the parent configuration values * @return This ConfigDef so you can chain calls */
Define a new configuration with no special validation logic and no custom recommender
define
{ "repo_name": "gf53520/kafka", "path": "clients/src/main/java/org/apache/kafka/common/config/ConfigDef.java", "license": "apache-2.0", "size": 59841 }
[ "java.util.Collections" ]
import java.util.Collections;
import java.util.*;
[ "java.util" ]
java.util;
1,386,400
final Vertex outVertex = oldEdge.getVertex(Direction.OUT); final Vertex inVertex = oldEdge.getVertex(Direction.IN); final Edge newEdge = graph.addEdge(newId, outVertex, inVertex, newLabel); ElementHelper.copyProperties(oldEdge, newEdge); graph.removeEdge(oldEdge); return newEdge; }
final Vertex outVertex = oldEdge.getVertex(Direction.OUT); final Vertex inVertex = oldEdge.getVertex(Direction.IN); final Edge newEdge = graph.addEdge(newId, outVertex, inVertex, newLabel); ElementHelper.copyProperties(oldEdge, newEdge); graph.removeEdge(oldEdge); return newEdge; }
/** * An edge is relabeled by creating a new edge with the same properties, but new label. * Note that an edge is deleted and an edge is added. * * @param graph the graph to add the new edge to * @param oldEdge the existing edge to "relabel" * @param newId the id of the new edge * @param newLabel the label of the new edge * @return the newly created edge */
An edge is relabeled by creating a new edge with the same properties, but new label. Note that an edge is deleted and an edge is added
relabelEdge
{ "repo_name": "datablend/blueprints", "path": "blueprints-core/src/main/java/com/tinkerpop/blueprints/util/EdgeHelper.java", "license": "bsd-3-clause", "size": 2001 }
[ "com.tinkerpop.blueprints.Direction", "com.tinkerpop.blueprints.Edge", "com.tinkerpop.blueprints.Vertex" ]
import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.*;
[ "com.tinkerpop.blueprints" ]
com.tinkerpop.blueprints;
1,547,956
public String save(TaskList tasks, String filename) { logger.info(MESSAGE_SAVING_START); try { this.writer = new BufferedWriter(new FileWriter(filename, false)); this.writer.write(serialize(tasks)); this.writer.close(); } catch (IOException e) { logger.error(ERROR_IO); throw new Error(ERROR_IO); } logger.info(MESSAGE_SAVING_DONE); return MESSAGE_SUCCESS; }
String function(TaskList tasks, String filename) { logger.info(MESSAGE_SAVING_START); try { this.writer = new BufferedWriter(new FileWriter(filename, false)); this.writer.write(serialize(tasks)); this.writer.close(); } catch (IOException e) { logger.error(ERROR_IO); throw new Error(ERROR_IO); } logger.info(MESSAGE_SAVING_DONE); return MESSAGE_SUCCESS; }
/** * Save a TaskList object into the data file with the given filename. * * @param tasks the TaskList object which contains the list of tasks. * @param filename the name of the task list to be stored. * @return a feedback message. */
Save a TaskList object into the data file with the given filename
save
{ "repo_name": "CS2103TAug2014-W15-4J/main", "path": "src/controller/Storage.java", "license": "mit", "size": 7591 }
[ "java.io.BufferedWriter", "java.io.FileWriter", "java.io.IOException" ]
import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,593,471
public static GridLayout createGridLayout(int numColumns) { GridLayout toReturn = new GridLayout(); toReturn.numColumns = numColumns; toReturn.makeColumnsEqualWidth = false; toReturn.marginWidth = 0; toReturn.marginHeight = 0; return toReturn; }
static GridLayout function(int numColumns) { GridLayout toReturn = new GridLayout(); toReturn.numColumns = numColumns; toReturn.makeColumnsEqualWidth = false; toReturn.marginWidth = 0; toReturn.marginHeight = 0; return toReturn; }
/** * Creates a new GridLayout with the given number of columns. * * @return A new GridLayout with the given number of columns. */
Creates a new GridLayout with the given number of columns
createGridLayout
{ "repo_name": "rex-xxx/mt6572_x201", "path": "tools/motodev/src/plugins/common/src/com/motorola/studio/android/common/utilities/ui/WidgetsFactory.java", "license": "gpl-2.0", "size": 13671 }
[ "org.eclipse.swt.layout.GridLayout" ]
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.layout.*;
[ "org.eclipse.swt" ]
org.eclipse.swt;
2,172,653
public Location getLocation(Location loc);
Location function(Location loc);
/** * Stores the entity's current position in the provided Location object. * <p> * If the provided Location is null this method does nothing and returns * null. * * @param loc the location to copy into * @return The Location object provided or null */
Stores the entity's current position in the provided Location object. If the provided Location is null this method does nothing and returns null
getLocation
{ "repo_name": "thedj21/bukkit--src", "path": "src/main/java/org/bukkit/entity/Entity.java", "license": "gpl-3.0", "size": 9395 }
[ "org.bukkit.Location" ]
import org.bukkit.Location;
import org.bukkit.*;
[ "org.bukkit" ]
org.bukkit;
421,306
private ConcurrentSkipListSet set5() { ConcurrentSkipListSet q = new ConcurrentSkipListSet(); assertTrue(q.isEmpty()); q.add(one); q.add(two); q.add(three); q.add(four); q.add(five); assertEquals(5, q.size()); return q; }
ConcurrentSkipListSet function() { ConcurrentSkipListSet q = new ConcurrentSkipListSet(); assertTrue(q.isEmpty()); q.add(one); q.add(two); q.add(three); q.add(four); q.add(five); assertEquals(5, q.size()); return q; }
/** * Returns a new set of first 5 ints. */
Returns a new set of first 5 ints
set5
{ "repo_name": "YouDiSN/OpenJDK-Research", "path": "jdk9/jdk/test/java/util/concurrent/tck/ConcurrentSkipListSetTest.java", "license": "gpl-2.0", "size": 31925 }
[ "java.util.concurrent.ConcurrentSkipListSet" ]
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
2,348,874
protected EffortCertificationReport getClosedReportingPeriod(ExpenseTransferAccountingLine transferLine) { List<EffortCertificationReport> effortReports = getEffortReportDefinitionsForLine(transferLine); for (EffortCertificationReport report : effortReports) { if (KFSConstants.PeriodStatusCodes.CLOSED.equals(report.getEffortCertificationReportPeriodStatusCode())) { return report; } } return null; }
EffortCertificationReport function(ExpenseTransferAccountingLine transferLine) { List<EffortCertificationReport> effortReports = getEffortReportDefinitionsForLine(transferLine); for (EffortCertificationReport report : effortReports) { if (KFSConstants.PeriodStatusCodes.CLOSED.equals(report.getEffortCertificationReportPeriodStatusCode())) { return report; } } return null; }
/** * Checks list of report definitions for a closed period. * * @param transferLine - transfer line to find report definition for * @return closed report or null if one is not found */
Checks list of report definitions for a closed period
getClosedReportingPeriod
{ "repo_name": "quikkian-ua-devops/will-financials", "path": "kfs-ld/src/main/java/org/kuali/kfs/module/ld/document/service/impl/SalaryTransferPeriodValidationServiceImpl.java", "license": "agpl-3.0", "size": 16448 }
[ "java.util.List", "org.kuali.kfs.integration.ec.EffortCertificationReport", "org.kuali.kfs.module.ld.businessobject.ExpenseTransferAccountingLine", "org.kuali.kfs.sys.KFSConstants" ]
import java.util.List; import org.kuali.kfs.integration.ec.EffortCertificationReport; import org.kuali.kfs.module.ld.businessobject.ExpenseTransferAccountingLine; import org.kuali.kfs.sys.KFSConstants;
import java.util.*; import org.kuali.kfs.integration.ec.*; import org.kuali.kfs.module.ld.businessobject.*; import org.kuali.kfs.sys.*;
[ "java.util", "org.kuali.kfs" ]
java.util; org.kuali.kfs;
451,896
public Builder setRootUrl(String rootUrl) { this.rootUrl = Optional.of(rootUrl); return this; }
Builder function(String rootUrl) { this.rootUrl = Optional.of(rootUrl); return this; }
/** * The URL of the endpoint to send requests to. The default is * {@code https://www.googleapis.com}. * * @param rootUrl The URL of the endpoint to send requests to * @return this builder */
The URL of the endpoint to send requests to. The default is HREF
setRootUrl
{ "repo_name": "deflaux/utils-java", "path": "src/main/java/com/google/cloud/genomics/utils/GenomicsFactory.java", "license": "apache-2.0", "size": 17536 }
[ "com.google.common.base.Optional" ]
import com.google.common.base.Optional;
import com.google.common.base.*;
[ "com.google.common" ]
com.google.common;
1,170,894
@Test public void testTableOfIntegers() { ElementCollectionGrid<Integer, TestEntity, Integer> grid = null; EntityModel<TestEntity> em = emf.getModel(TestEntity.class); FormOptions fo = new FormOptions(); grid = new ElementCollectionGrid<>(em.getAttributeModel("intTags"), fo); grid.initContent(); grid.setValue(Set.of(4, 5)); grid.getGrid().select(new ValueHolder<Integer>(4)); assertEquals(1, grid.getGrid().getColumns().size()); }
void function() { ElementCollectionGrid<Integer, TestEntity, Integer> grid = null; EntityModel<TestEntity> em = emf.getModel(TestEntity.class); FormOptions fo = new FormOptions(); grid = new ElementCollectionGrid<>(em.getAttributeModel(STR), fo); grid.initContent(); grid.setValue(Set.of(4, 5)); grid.getGrid().select(new ValueHolder<Integer>(4)); assertEquals(1, grid.getGrid().getColumns().size()); }
/** * Test the creation of a table for integers */
Test the creation of a table for integers
testTableOfIntegers
{ "repo_name": "opencirclesolutions/dynamo", "path": "dynamo-frontend/src/test/java/com/ocs/dynamo/ui/component/ElementCollectionGridTest.java", "license": "apache-2.0", "size": 4107 }
[ "com.ocs.dynamo.domain.TestEntity", "com.ocs.dynamo.domain.model.EntityModel", "com.ocs.dynamo.ui.composite.layout.FormOptions", "java.util.Set", "org.junit.jupiter.api.Assertions" ]
import com.ocs.dynamo.domain.TestEntity; import com.ocs.dynamo.domain.model.EntityModel; import com.ocs.dynamo.ui.composite.layout.FormOptions; import java.util.Set; import org.junit.jupiter.api.Assertions;
import com.ocs.dynamo.domain.*; import com.ocs.dynamo.domain.model.*; import com.ocs.dynamo.ui.composite.layout.*; import java.util.*; import org.junit.jupiter.api.*;
[ "com.ocs.dynamo", "java.util", "org.junit.jupiter" ]
com.ocs.dynamo; java.util; org.junit.jupiter;
1,372,406
public static Collection<ICredentialsRenewer> GetCredentialRenewers(Map conf) { try { Set<ICredentialsRenewer> ret = new HashSet<>(); Collection<String> clazzes = (Collection<String>)conf.get(Config.NIMBUS_CREDENTIAL_RENEWERS); if (clazzes != null) { for (String clazz : clazzes) { ICredentialsRenewer inst = (ICredentialsRenewer)Class.forName(clazz).newInstance(); inst.prepare(conf); ret.add(inst); } } return ret; } catch (Exception e) { throw new RuntimeException(e); } }
static Collection<ICredentialsRenewer> function(Map conf) { try { Set<ICredentialsRenewer> ret = new HashSet<>(); Collection<String> clazzes = (Collection<String>)conf.get(Config.NIMBUS_CREDENTIAL_RENEWERS); if (clazzes != null) { for (String clazz : clazzes) { ICredentialsRenewer inst = (ICredentialsRenewer)Class.forName(clazz).newInstance(); inst.prepare(conf); ret.add(inst); } } return ret; } catch (Exception e) { throw new RuntimeException(e); } }
/** * Get all of the configured Credential Renewer Plugins. * @param conf the storm configuration to use. * @return the configured credential renewers. */
Get all of the configured Credential Renewer Plugins
GetCredentialRenewers
{ "repo_name": "anshuiisc/storm-Allbolts-wiring", "path": "storm-core/src/jvm/org/apache/storm/security/auth/AuthUtils.java", "license": "apache-2.0", "size": 15031 }
[ "java.util.Collection", "java.util.HashSet", "java.util.Map", "java.util.Set", "org.apache.storm.Config" ]
import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.storm.Config;
import java.util.*; import org.apache.storm.*;
[ "java.util", "org.apache.storm" ]
java.util; org.apache.storm;
2,492,804
public void setTargetCommunitiesBuilder(NhinTargetCommunitiesBuilder targetBuilder);
void function(NhinTargetCommunitiesBuilder targetBuilder);
/** * Sets the target communities builder. * * @param targetBuilder */
Sets the target communities builder
setTargetCommunitiesBuilder
{ "repo_name": "beiyuxinke/CONNECT", "path": "Product/Production/Services/DocumentRetrieveCore/src/main/java/gov/hhs/fha/nhinc/docretrieve/messaging/director/DocumentRetrieveMessageDirector.java", "license": "bsd-3-clause", "size": 2935 }
[ "gov.hhs.fha.nhinc.messaging.builder.NhinTargetCommunitiesBuilder" ]
import gov.hhs.fha.nhinc.messaging.builder.NhinTargetCommunitiesBuilder;
import gov.hhs.fha.nhinc.messaging.builder.*;
[ "gov.hhs.fha" ]
gov.hhs.fha;
860,529
private Integer getBatchSize(Method method) { // Determine queue batch size, if any Batch batch = method.getAnnotation(Batch.class); if (batch != null) { int batchSize = batch.value(); Preconditions.checkArgument(batchSize > 0, "Batch size should be > 0: %s", method.getName()); return batchSize; } return null; }
Integer function(Method method) { Batch batch = method.getAnnotation(Batch.class); if (batch != null) { int batchSize = batch.value(); Preconditions.checkArgument(batchSize > 0, STR, method.getName()); return batchSize; } return null; }
/** * Returns the user specify batch size or {@code null} if not specified. */
Returns the user specify batch size or null if not specified
getBatchSize
{ "repo_name": "mpouttuclarke/cdap", "path": "cdap-app-fabric/src/main/java/co/cask/cdap/internal/app/runtime/flow/FlowletProgramRunner.java", "license": "apache-2.0", "size": 36798 }
[ "co.cask.cdap.api.annotation.Batch", "com.google.common.base.Preconditions", "java.lang.reflect.Method" ]
import co.cask.cdap.api.annotation.Batch; import com.google.common.base.Preconditions; import java.lang.reflect.Method;
import co.cask.cdap.api.annotation.*; import com.google.common.base.*; import java.lang.reflect.*;
[ "co.cask.cdap", "com.google.common", "java.lang" ]
co.cask.cdap; com.google.common; java.lang;
1,804,739