method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public double getCoordinate(Axis axis, LengthUnit units) {
if (axis.getType() == Axis.Type.Rotation) {
// Never convert rotation angles.
return getCoordinate(axis);
}
else {
return getLengthCoordinate(axis).convertToUnits(units).getValue();
}
}
|
double function(Axis axis, LengthUnit units) { if (axis.getType() == Axis.Type.Rotation) { return getCoordinate(axis); } else { return getLengthCoordinate(axis).convertToUnits(units).getValue(); } }
|
/**
* Get the coordinate for the given axis from the AxesLocation, converted to the given length units.
*
* @param axis
* @param units
* @return
*/
|
Get the coordinate for the given axis from the AxesLocation, converted to the given length units
|
getCoordinate
|
{
"repo_name": "openpnp/openpnp",
"path": "src/main/java/org/openpnp/model/AxesLocation.java",
"license": "gpl-3.0",
"size": 23830
}
|
[
"org.openpnp.spi.Axis"
] |
import org.openpnp.spi.Axis;
|
import org.openpnp.spi.*;
|
[
"org.openpnp.spi"
] |
org.openpnp.spi;
| 2,094,878
|
public static AutofitHelper create(TextView view, AttributeSet attrs, int defStyle) {
AutofitHelper helper = new AutofitHelper(view);
boolean sizeToFit = true;
if (attrs != null) {
Context context = view.getContext();
int minTextSize = (int) helper.getMinTextSize();
float precision = helper.getPrecision();
TypedArray ta = context.obtainStyledAttributes(
attrs,
R.styleable.AutofitTextView,
defStyle,
0);
sizeToFit = ta.getBoolean(R.styleable.AutofitTextView_sizeToFit, sizeToFit);
minTextSize = ta.getDimensionPixelSize(R.styleable.AutofitTextView_minTextSize,
minTextSize);
precision = ta.getFloat(R.styleable.AutofitTextView_precision, precision);
ta.recycle();
helper.setMinTextSize(TypedValue.COMPLEX_UNIT_PX, minTextSize)
.setPrecision(precision);
}
helper.setEnabled(sizeToFit);
return helper;
}
|
static AutofitHelper function(TextView view, AttributeSet attrs, int defStyle) { AutofitHelper helper = new AutofitHelper(view); boolean sizeToFit = true; if (attrs != null) { Context context = view.getContext(); int minTextSize = (int) helper.getMinTextSize(); float precision = helper.getPrecision(); TypedArray ta = context.obtainStyledAttributes( attrs, R.styleable.AutofitTextView, defStyle, 0); sizeToFit = ta.getBoolean(R.styleable.AutofitTextView_sizeToFit, sizeToFit); minTextSize = ta.getDimensionPixelSize(R.styleable.AutofitTextView_minTextSize, minTextSize); precision = ta.getFloat(R.styleable.AutofitTextView_precision, precision); ta.recycle(); helper.setMinTextSize(TypedValue.COMPLEX_UNIT_PX, minTextSize) .setPrecision(precision); } helper.setEnabled(sizeToFit); return helper; }
|
/**
* Creates a new instance of {@code AutofitHelper} that wraps a {@link TextView} and enables
* automatically sizing the text to fit.
*/
|
Creates a new instance of AutofitHelper that wraps a <code>TextView</code> and enables automatically sizing the text to fit
|
create
|
{
"repo_name": "Tengio/android-autofittextview",
"path": "library/src/main/java/me/grantland/widget/AutofitHelper.java",
"license": "apache-2.0",
"size": 17822
}
|
[
"android.content.Context",
"android.content.res.TypedArray",
"android.util.AttributeSet",
"android.util.TypedValue",
"android.widget.TextView"
] |
import android.content.Context; import android.content.res.TypedArray; import android.util.AttributeSet; import android.util.TypedValue; import android.widget.TextView;
|
import android.content.*; import android.content.res.*; import android.util.*; import android.widget.*;
|
[
"android.content",
"android.util",
"android.widget"
] |
android.content; android.util; android.widget;
| 1,340,925
|
public String getDestination(String function, SelectionPeasWrapperSessionController session,
HttpRequest request) {
try {
if (function.equals("open")) {
session.setFormName(request.getParameter("formName"));
session.setElementId(request.getParameter("elementId"));
session.setElementName(request.getParameter("elementName"));
boolean selectionMultiple =
StringUtil.getBooleanValue(request.getParameter("selectionMultiple"));
String instanceId = request.getParameter("instanceId");
List<String> roles = getRoles(request.getParameter("roles"));
session.setSelectable(request.getParameter("selectable"));
if (session.isGroupSelectable()) {
if (selectionMultiple) {
session.setSelectedGroupIds(request.getParameter("selectedGroups"));
} else {
session.setSelectedGroupId(request.getParameter("selectedGroup"));
}
} else {
if (selectionMultiple) {
session.setSelectedUserIds(request.getParameter("selectedUsers"));
} else {
session.setSelectedUserId(request.getParameter("selectedUser"));
}
}
return session.initSelectionPeas(selectionMultiple, instanceId, roles);
} else if (function.equals("close")) {
session.getSelectionPeasSelection();
request.setAttribute("formName", session.getFormName());
request.setAttribute("elementId", session.getElementId());
request.setAttribute("elementName", session.getElementName());
if (session.isGroupSelectable()) {
if (session.getSelection().isMultiSelect()) {
request.setAttribute("groups", session.getSelectedGroups());
return "/selectionPeas/jsp/closeWrapperMultiple.jsp";
} else {
request.setAttribute("group", session.getSelectedGroup());
return "/selectionPeas/jsp/closeWrapper.jsp";
}
} else {
if (session.getSelection().isMultiSelect()) {
request.setAttribute("users", session.getSelectedUsers());
return "/selectionPeas/jsp/closeWrapperMultiple.jsp";
} else {
request.setAttribute("user", session.getSelectedUser());
return "/selectionPeas/jsp/closeWrapper.jsp";
}
}
} else {
return "/admin/jsp/errorpageMain.jsp";
}
} catch (Exception e) {
request.setAttribute("javax.servlet.jsp.jspException", e);
return "/admin/jsp/errorpageMain.jsp";
}
}
|
String function(String function, SelectionPeasWrapperSessionController session, HttpRequest request) { try { if (function.equals("open")) { session.setFormName(request.getParameter(STR)); session.setElementId(request.getParameter(STR)); session.setElementName(request.getParameter(STR)); boolean selectionMultiple = StringUtil.getBooleanValue(request.getParameter(STR)); String instanceId = request.getParameter(STR); List<String> roles = getRoles(request.getParameter("roles")); session.setSelectable(request.getParameter(STR)); if (session.isGroupSelectable()) { if (selectionMultiple) { session.setSelectedGroupIds(request.getParameter(STR)); } else { session.setSelectedGroupId(request.getParameter(STR)); } } else { if (selectionMultiple) { session.setSelectedUserIds(request.getParameter(STR)); } else { session.setSelectedUserId(request.getParameter(STR)); } } return session.initSelectionPeas(selectionMultiple, instanceId, roles); } else if (function.equals("close")) { session.getSelectionPeasSelection(); request.setAttribute(STR, session.getFormName()); request.setAttribute(STR, session.getElementId()); request.setAttribute(STR, session.getElementName()); if (session.isGroupSelectable()) { if (session.getSelection().isMultiSelect()) { request.setAttribute(STR, session.getSelectedGroups()); return STR; } else { request.setAttribute("group", session.getSelectedGroup()); return STR; } } else { if (session.getSelection().isMultiSelect()) { request.setAttribute("users", session.getSelectedUsers()); return STR; } else { request.setAttribute("user", session.getSelectedUser()); return STR; } } } else { return STR; } } catch (Exception e) { request.setAttribute(STR, e); return STR; } }
|
/**
* Do the requested function and return the destination url.
*/
|
Do the requested function and return the destination url
|
getDestination
|
{
"repo_name": "CecileBONIN/Silverpeas-Core",
"path": "war-core/src/main/java/com/stratelia/silverpeas/selectionPeas/servlets/SelectionPeasWrapper.java",
"license": "agpl-3.0",
"size": 5192
}
|
[
"com.silverpeas.util.StringUtil",
"com.stratelia.silverpeas.selectionPeas.control.SelectionPeasWrapperSessionController",
"java.util.List",
"org.silverpeas.servlet.HttpRequest"
] |
import com.silverpeas.util.StringUtil; import com.stratelia.silverpeas.selectionPeas.control.SelectionPeasWrapperSessionController; import java.util.List; import org.silverpeas.servlet.HttpRequest;
|
import com.silverpeas.util.*; import com.stratelia.silverpeas.*; import java.util.*; import org.silverpeas.servlet.*;
|
[
"com.silverpeas.util",
"com.stratelia.silverpeas",
"java.util",
"org.silverpeas.servlet"
] |
com.silverpeas.util; com.stratelia.silverpeas; java.util; org.silverpeas.servlet;
| 2,754,706
|
void append(LogRecord record)
{
Throwable err = Throwables.perform(null, replicas().stream().map(r -> () -> r.append(record)));
if (err != null)
{
if (!record.isFinal() || err.getSuppressed().length == replicas().size() -1)
Throwables.maybeFail(err);
logger.error("Failed to add record '{}' to some replicas '{}'", record, this);
}
}
|
void append(LogRecord record) { Throwable err = Throwables.perform(null, replicas().stream().map(r -> () -> r.append(record))); if (err != null) { if (!record.isFinal() err.getSuppressed().length == replicas().size() -1) Throwables.maybeFail(err); logger.error(STR, record, this); } }
|
/**
* Add the record to all the replicas: if it is a final record then we throw only if we fail to write it
* to all, otherwise we throw if we fail to write it to any file, see CASSANDRA-10421 for details
*/
|
Add the record to all the replicas: if it is a final record then we throw only if we fail to write it to all, otherwise we throw if we fail to write it to any file, see CASSANDRA-10421 for details
|
append
|
{
"repo_name": "carlyeks/cassandra",
"path": "src/java/org/apache/cassandra/db/lifecycle/LogReplicaSet.java",
"license": "apache-2.0",
"size": 9281
}
|
[
"org.apache.cassandra.utils.Throwables"
] |
import org.apache.cassandra.utils.Throwables;
|
import org.apache.cassandra.utils.*;
|
[
"org.apache.cassandra"
] |
org.apache.cassandra;
| 2,220,710
|
@Test
public void testDateConstructor2() {
TimeZone zone = TimeZone.getTimeZone("Europe/Tallinn");
Locale locale = Locale.getDefault(); // locale should not matter here
Millisecond m1 = new Millisecond(new Date(1016722559122L), zone,
locale);
Millisecond m2 = new Millisecond(new Date(1016722559123L), zone,
locale);
assertEquals(122, m1.getMillisecond());
assertEquals(1016722559122L, m1.getLastMillisecond(zone));
assertEquals(123, m2.getMillisecond());
assertEquals(1016722559123L, m2.getFirstMillisecond(zone));
}
|
void function() { TimeZone zone = TimeZone.getTimeZone(STR); Locale locale = Locale.getDefault(); Millisecond m1 = new Millisecond(new Date(1016722559122L), zone, locale); Millisecond m2 = new Millisecond(new Date(1016722559123L), zone, locale); assertEquals(122, m1.getMillisecond()); assertEquals(1016722559122L, m1.getLastMillisecond(zone)); assertEquals(123, m2.getMillisecond()); assertEquals(1016722559123L, m2.getFirstMillisecond(zone)); }
|
/**
* In Tallinn, the 4.55:59.123pm on 21 Mar 2002 is
* java.util.Date(1016722559123L). Use this to check the Millisecond
* constructor.
*/
|
In Tallinn, the 4.55:59.123pm on 21 Mar 2002 is java.util.Date(1016722559123L). Use this to check the Millisecond constructor
|
testDateConstructor2
|
{
"repo_name": "simon04/jfreechart",
"path": "src/test/java/org/jfree/data/time/MillisecondTest.java",
"license": "lgpl-2.1",
"size": 12268
}
|
[
"java.util.Date",
"java.util.Locale",
"java.util.TimeZone",
"org.junit.Assert"
] |
import java.util.Date; import java.util.Locale; import java.util.TimeZone; import org.junit.Assert;
|
import java.util.*; import org.junit.*;
|
[
"java.util",
"org.junit"
] |
java.util; org.junit;
| 2,053,353
|
public static TimeZone getTimezone() {
if (self().timezone == null) {
String tzid = getString("web.timezone", null);
if (StringUtils.isEmpty(tzid)) {
self().timezone = TimeZone.getDefault();
return self().timezone;
}
self().timezone = TimeZone.getTimeZone(tzid);
}
return self().timezone;
}
|
static TimeZone function() { if (self().timezone == null) { String tzid = getString(STR, null); if (StringUtils.isEmpty(tzid)) { self().timezone = TimeZone.getDefault(); return self().timezone; } self().timezone = TimeZone.getTimeZone(tzid); } return self().timezone; }
|
/**
* Returns the preferred timezone for the Gitblit instance.
*
* @return a timezone
*/
|
Returns the preferred timezone for the Gitblit instance
|
getTimezone
|
{
"repo_name": "BullShark/IRCBlit",
"path": "src/main/java/com/gitblit/GitBlit.java",
"license": "apache-2.0",
"size": 119961
}
|
[
"com.gitblit.utils.StringUtils",
"java.util.TimeZone"
] |
import com.gitblit.utils.StringUtils; import java.util.TimeZone;
|
import com.gitblit.utils.*; import java.util.*;
|
[
"com.gitblit.utils",
"java.util"
] |
com.gitblit.utils; java.util;
| 1,491,051
|
public Collection<String> getResources() {
return getRoutingTableRef(DEFAULT_PROPERTY_TYPE, DEFAULT_STATE_TYPE).getResources();
}
|
Collection<String> function() { return getRoutingTableRef(DEFAULT_PROPERTY_TYPE, DEFAULT_STATE_TYPE).getResources(); }
|
/**
* Return names of all resources (shown in ExternalView or CustomizedView) in this cluster.
*/
|
Return names of all resources (shown in ExternalView or CustomizedView) in this cluster
|
getResources
|
{
"repo_name": "apache/helix",
"path": "helix-core/src/main/java/org/apache/helix/spectator/RoutingTableProvider.java",
"license": "apache-2.0",
"size": 46153
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,144,797
|
private ResultCollector executeOnAllBuckets(final Function function,
final PartitionedRegionFunctionExecutor execution, ResultCollector rc, boolean isPRSingleHop) {
Set<Integer> bucketSet = new HashSet<Integer>();
Iterator<Integer> itr = this.getRegionAdvisor().getBucketSet().iterator();
while (itr.hasNext()) {
try {
bucketSet.add(itr.next());
}
catch (NoSuchElementException ex) {
}
}
HashMap<InternalDistributedMember, HashSet<Integer>> memberToBuckets = FunctionExecutionNodePruner
.groupByMemberToBuckets(this, bucketSet, function.optimizeForWrite());
if (memberToBuckets.isEmpty()) {
throw new EmtpyRegionFunctionException(LocalizedStrings.PartitionedRegion_FUNCTION_NOT_EXECUTED_AS_REGION_IS_EMPTY.toLocalizedString()
);
}
while(!execution.getFailedNodes().isEmpty()){
Set memberKeySet = memberToBuckets.keySet();
RetryTimeKeeper retryTime = new RetryTimeKeeper(this.retryTimeout);
Iterator iterator = memberKeySet.iterator();
boolean hasRemovedNode = false;
while (iterator.hasNext()){
if(execution.getFailedNodes().contains(((InternalDistributedMember)iterator.next()).getId())){
hasRemovedNode = true;
}
}
if(hasRemovedNode){
if (retryTime.overMaximum()) {
PRHARedundancyProvider.timedOut(this, null, null, "doing function execution", this.retryTimeout);
// NOTREACHED
}
retryTime.waitToRetryNode();
memberToBuckets = FunctionExecutionNodePruner
.groupByMemberToBuckets(this, bucketSet, function.optimizeForWrite());
}else{
execution.clearFailedNodes();
}
}
Set<InternalDistributedMember> dest = memberToBuckets.keySet();
execution.validateExecution(function, dest);
execution.setExecutionNodes(dest);
boolean isSelf = false;
final Set<Integer> localBucketSet = memberToBuckets.remove(getMyId());
if (localBucketSet != null) {
isSelf = true;
}
final HashMap<InternalDistributedMember, FunctionRemoteContext> recipMap = new HashMap<InternalDistributedMember, FunctionRemoteContext>();
for (InternalDistributedMember recip : memberToBuckets.keySet()) {
FunctionRemoteContext context = new FunctionRemoteContext(function,
execution.getArgumentsForMember(recip.getId()), null, memberToBuckets
.get(recip), execution.isReExecute(), execution.isFnSerializationReqd() );
recipMap.put(recip, context);
}
final LocalResultCollector<?, ?> localResultCollector = execution
.getLocalResultCollector(function, rc);
final DM dm = getDistributionManager();
final PartitionedRegionFunctionResultSender resultSender = new PartitionedRegionFunctionResultSender(
dm, this, 0L, localResultCollector, execution
.getServerResultSender(), recipMap.isEmpty(), !isSelf, execution.isForwardExceptions(), function, localBucketSet);
// execute locally and collect the result
if (isSelf && this.dataStore != null) {
final RegionFunctionContextImpl prContext = new RegionFunctionContextImpl(
function.getId(), PartitionedRegion.this, execution
.getArgumentsForMember(getMyId().getId()), null,
ColocationHelper.constructAndGetAllColocatedLocalDataSet(
PartitionedRegion.this, localBucketSet), localBucketSet,
resultSender, execution.isReExecute());
execution.executeFunctionOnLocalPRNode(function, prContext, resultSender, dm, isTX());
}
PartitionedRegionFunctionResultWaiter resultReciever = new PartitionedRegionFunctionResultWaiter(
getSystem(), this.getPRId(), localResultCollector, function, resultSender);
ResultCollector reply = resultReciever.getPartitionedDataFrom(recipMap,
this, execution);
return reply;
}
|
ResultCollector function(final Function function, final PartitionedRegionFunctionExecutor execution, ResultCollector rc, boolean isPRSingleHop) { Set<Integer> bucketSet = new HashSet<Integer>(); Iterator<Integer> itr = this.getRegionAdvisor().getBucketSet().iterator(); while (itr.hasNext()) { try { bucketSet.add(itr.next()); } catch (NoSuchElementException ex) { } } HashMap<InternalDistributedMember, HashSet<Integer>> memberToBuckets = FunctionExecutionNodePruner .groupByMemberToBuckets(this, bucketSet, function.optimizeForWrite()); if (memberToBuckets.isEmpty()) { throw new EmtpyRegionFunctionException(LocalizedStrings.PartitionedRegion_FUNCTION_NOT_EXECUTED_AS_REGION_IS_EMPTY.toLocalizedString() ); } while(!execution.getFailedNodes().isEmpty()){ Set memberKeySet = memberToBuckets.keySet(); RetryTimeKeeper retryTime = new RetryTimeKeeper(this.retryTimeout); Iterator iterator = memberKeySet.iterator(); boolean hasRemovedNode = false; while (iterator.hasNext()){ if(execution.getFailedNodes().contains(((InternalDistributedMember)iterator.next()).getId())){ hasRemovedNode = true; } } if(hasRemovedNode){ if (retryTime.overMaximum()) { PRHARedundancyProvider.timedOut(this, null, null, STR, this.retryTimeout); } retryTime.waitToRetryNode(); memberToBuckets = FunctionExecutionNodePruner .groupByMemberToBuckets(this, bucketSet, function.optimizeForWrite()); }else{ execution.clearFailedNodes(); } } Set<InternalDistributedMember> dest = memberToBuckets.keySet(); execution.validateExecution(function, dest); execution.setExecutionNodes(dest); boolean isSelf = false; final Set<Integer> localBucketSet = memberToBuckets.remove(getMyId()); if (localBucketSet != null) { isSelf = true; } final HashMap<InternalDistributedMember, FunctionRemoteContext> recipMap = new HashMap<InternalDistributedMember, FunctionRemoteContext>(); for (InternalDistributedMember recip : memberToBuckets.keySet()) { FunctionRemoteContext context = new FunctionRemoteContext(function, execution.getArgumentsForMember(recip.getId()), null, memberToBuckets .get(recip), execution.isReExecute(), execution.isFnSerializationReqd() ); recipMap.put(recip, context); } final LocalResultCollector<?, ?> localResultCollector = execution .getLocalResultCollector(function, rc); final DM dm = getDistributionManager(); final PartitionedRegionFunctionResultSender resultSender = new PartitionedRegionFunctionResultSender( dm, this, 0L, localResultCollector, execution .getServerResultSender(), recipMap.isEmpty(), !isSelf, execution.isForwardExceptions(), function, localBucketSet); if (isSelf && this.dataStore != null) { final RegionFunctionContextImpl prContext = new RegionFunctionContextImpl( function.getId(), PartitionedRegion.this, execution .getArgumentsForMember(getMyId().getId()), null, ColocationHelper.constructAndGetAllColocatedLocalDataSet( PartitionedRegion.this, localBucketSet), localBucketSet, resultSender, execution.isReExecute()); execution.executeFunctionOnLocalPRNode(function, prContext, resultSender, dm, isTX()); } PartitionedRegionFunctionResultWaiter resultReciever = new PartitionedRegionFunctionResultWaiter( getSystem(), this.getPRId(), localResultCollector, function, resultSender); ResultCollector reply = resultReciever.getPartitionedDataFrom(recipMap, this, execution); return reply; }
|
/**
* Executes function on all bucket nodes
*
* @param function
* @param execution
* @return ResultCollector
* @since 6.0
*/
|
Executes function on all bucket nodes
|
executeOnAllBuckets
|
{
"repo_name": "ameybarve15/incubator-geode",
"path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/PartitionedRegion.java",
"license": "apache-2.0",
"size": 403335
}
|
[
"com.gemstone.gemfire.cache.execute.EmtpyRegionFunctionException",
"com.gemstone.gemfire.cache.execute.Function",
"com.gemstone.gemfire.cache.execute.ResultCollector",
"com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember",
"com.gemstone.gemfire.internal.cache.execute.FunctionExecutionNodePruner",
"com.gemstone.gemfire.internal.cache.execute.FunctionRemoteContext",
"com.gemstone.gemfire.internal.cache.execute.LocalResultCollector",
"com.gemstone.gemfire.internal.cache.execute.PartitionedRegionFunctionExecutor",
"com.gemstone.gemfire.internal.cache.execute.PartitionedRegionFunctionResultSender",
"com.gemstone.gemfire.internal.cache.execute.PartitionedRegionFunctionResultWaiter",
"com.gemstone.gemfire.internal.cache.execute.RegionFunctionContextImpl",
"com.gemstone.gemfire.internal.i18n.LocalizedStrings",
"java.util.HashMap",
"java.util.HashSet",
"java.util.Iterator",
"java.util.NoSuchElementException",
"java.util.Set"
] |
import com.gemstone.gemfire.cache.execute.EmtpyRegionFunctionException; import com.gemstone.gemfire.cache.execute.Function; import com.gemstone.gemfire.cache.execute.ResultCollector; import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember; import com.gemstone.gemfire.internal.cache.execute.FunctionExecutionNodePruner; import com.gemstone.gemfire.internal.cache.execute.FunctionRemoteContext; import com.gemstone.gemfire.internal.cache.execute.LocalResultCollector; import com.gemstone.gemfire.internal.cache.execute.PartitionedRegionFunctionExecutor; import com.gemstone.gemfire.internal.cache.execute.PartitionedRegionFunctionResultSender; import com.gemstone.gemfire.internal.cache.execute.PartitionedRegionFunctionResultWaiter; import com.gemstone.gemfire.internal.cache.execute.RegionFunctionContextImpl; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Set;
|
import com.gemstone.gemfire.cache.execute.*; import com.gemstone.gemfire.distributed.internal.membership.*; import com.gemstone.gemfire.internal.cache.execute.*; import com.gemstone.gemfire.internal.i18n.*; import java.util.*;
|
[
"com.gemstone.gemfire",
"java.util"
] |
com.gemstone.gemfire; java.util;
| 172,099
|
@Override
public ConsistencyCheck modelConsistency(final Subset subset) {
BitSet intersection = new BitSet(this.minBitsets.get(subset).length());
intersection.or(this.minBitsets.get(subset));
// we create a copy of minBitsets.get(subset)
intersection.and(this.currentBitset);
boolean consistent = (this.minBitsets.get(subset).equals(intersection));
StringBuilder message = new StringBuilder("");
if (!consistent) {
message.append(getMessageForConsistency(this.minBitsets.get(subset)));
}
// consistent = (super.modelConsistency().getLeft()) ? (consistent &&
// (true)):(consistent && (false))
// message.append((super.modelConsistency(subset)).getRight());
if (super.modelConsistency().isConsistent()) {
consistent = consistent && (true);
message.append((super.modelConsistency(subset)).getMessage());
} else {
consistent = consistent && (false);
message.append((super.modelConsistency(subset)).getMessage());
}
return new ConsistencyCheck(consistent, message.toString());
}
|
ConsistencyCheck function(final Subset subset) { BitSet intersection = new BitSet(this.minBitsets.get(subset).length()); intersection.or(this.minBitsets.get(subset)); intersection.and(this.currentBitset); boolean consistent = (this.minBitsets.get(subset).equals(intersection)); StringBuilder message = new StringBuilder(""); if (!consistent) { message.append(getMessageForConsistency(this.minBitsets.get(subset))); } if (super.modelConsistency().isConsistent()) { consistent = consistent && (true); message.append((super.modelConsistency(subset)).getMessage()); } else { consistent = consistent && (false); message.append((super.modelConsistency(subset)).getMessage()); } return new ConsistencyCheck(consistent, message.toString()); }
|
/**
* Utility to check whether this class is consistent according to a subset
*
* @param subset
* the subset defining the context in which to check whether this class is consistent
* @return a ConsistencyCheck instance whose boolean attribute (consistent)
* indicates if this class is consistent and whose String attribute
* (message)
* indicates why this class is not consistent if it is not
*/
|
Utility to check whether this class is consistent according to a subset
|
modelConsistency
|
{
"repo_name": "itesla/ipst-entsoe",
"path": "cim1-model/src/main/java/cim1/model/SynchronousMachine.java",
"license": "mpl-2.0",
"size": 88394
}
|
[
"java.util.BitSet"
] |
import java.util.BitSet;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,019,421
|
private void labelParticles(List<Particle> list) {
if (list != null && !list.isEmpty() && classificator != null && classificator.getSelectedLabel() != null) {
ParticleLabel label = classificator.getSelectedLabel();
for (Particle p : list) {
image.getSelectedImage().labelHardly(p, label);
}
imagePanel.refresh();
}
}
|
void function(List<Particle> list) { if (list != null && !list.isEmpty() && classificator != null && classificator.getSelectedLabel() != null) { ParticleLabel label = classificator.getSelectedLabel(); for (Particle p : list) { image.getSelectedImage().labelHardly(p, label); } imagePanel.refresh(); } }
|
/**
* Assignes currently selected labelHardly to particles.
*
* @param point
* @return
*/
|
Assignes currently selected labelHardly to particles
|
labelParticles
|
{
"repo_name": "jurrik/pattern",
"path": "pattern.image.editor/src/org/pattern/image/editor/ImageEditor.java",
"license": "bsd-3-clause",
"size": 11957
}
|
[
"java.util.List",
"org.pattern.data.Particle",
"org.pattern.data.ParticleLabel"
] |
import java.util.List; import org.pattern.data.Particle; import org.pattern.data.ParticleLabel;
|
import java.util.*; import org.pattern.data.*;
|
[
"java.util",
"org.pattern.data"
] |
java.util; org.pattern.data;
| 108,544
|
public List<Variation> getVariations() {
if (variations == null) {
return Collections.emptyList();
}
return Collections.unmodifiableList(variations);
}
|
List<Variation> function() { if (variations == null) { return Collections.emptyList(); } return Collections.unmodifiableList(variations); }
|
/**
* Variations associated with the Dynamic Content.
*
* @return Variations associated with the Dynamic Content.
*/
|
Variations associated with the Dynamic Content
|
getVariations
|
{
"repo_name": "Crim/pardot-java-client",
"path": "src/main/java/com/darksci/pardot/api/response/dynamiccontent/DynamicContent.java",
"license": "mit",
"size": 5235
}
|
[
"java.util.Collections",
"java.util.List"
] |
import java.util.Collections; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,211,754
|
public boolean canDelete() throws InternalException, CloudException;
|
boolean function() throws InternalException, CloudException;
|
/**
* Indicates whether or not sub accounts can be deleted for this cloud using
* a {@link AccountSupport#delete(String)} call.
*
* @return true if sub accounts can be deleted, false if otherwise
* @throws CloudException
* the cloud provider encountered an error while processing the
* request
* @throws InternalException
* a Dasein Cloud error occurred while processing the request
*/
|
Indicates whether or not sub accounts can be deleted for this cloud using a <code>AccountSupport#delete(String)</code> call
|
canDelete
|
{
"repo_name": "greese/dasein-cloud-core",
"path": "src/main/java/org/dasein/cloud/admin/AccountCapabilities.java",
"license": "apache-2.0",
"size": 5424
}
|
[
"org.dasein.cloud.CloudException",
"org.dasein.cloud.InternalException"
] |
import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException;
|
import org.dasein.cloud.*;
|
[
"org.dasein.cloud"
] |
org.dasein.cloud;
| 2,833,131
|
@Override()
public java.lang.Class getJavaClass(
) {
return org.chocolate_milk.model.PriceLevelQueryRsType.class;
}
|
@Override() java.lang.Class function( ) { return org.chocolate_milk.model.PriceLevelQueryRsType.class; }
|
/**
* Method getJavaClass.
*
* @return the Java class represented by this descriptor.
*/
|
Method getJavaClass
|
getJavaClass
|
{
"repo_name": "galleon1/chocolate-milk",
"path": "src/org/chocolate_milk/model/descriptors/PriceLevelQueryRsTypeDescriptor.java",
"license": "lgpl-3.0",
"size": 16067
}
|
[
"org.chocolate_milk.model.PriceLevelQueryRsType"
] |
import org.chocolate_milk.model.PriceLevelQueryRsType;
|
import org.chocolate_milk.model.*;
|
[
"org.chocolate_milk.model"
] |
org.chocolate_milk.model;
| 1,128,375
|
public List<ColumnUpdate> getUpdates() {
serialize();
UnsynchronizedBuffer.Reader in = new UnsynchronizedBuffer.Reader(data);
if (updates == null) {
if (entries == 1) {
updates = Collections.singletonList(deserializeColumnUpdate(in));
} else {
ColumnUpdate[] tmpUpdates = new ColumnUpdate[entries];
for (int i = 0; i < entries; i++)
tmpUpdates[i] = deserializeColumnUpdate(in);
updates = Arrays.asList(tmpUpdates);
}
}
return updates;
}
|
List<ColumnUpdate> function() { serialize(); UnsynchronizedBuffer.Reader in = new UnsynchronizedBuffer.Reader(data); if (updates == null) { if (entries == 1) { updates = Collections.singletonList(deserializeColumnUpdate(in)); } else { ColumnUpdate[] tmpUpdates = new ColumnUpdate[entries]; for (int i = 0; i < entries; i++) tmpUpdates[i] = deserializeColumnUpdate(in); updates = Arrays.asList(tmpUpdates); } } return updates; }
|
/**
* Gets the modifications and deletions in this mutation. After calling this method, further modifications to this mutation are ignored. Changes made to the
* returned updates do not affect this mutation.
*
* @return list of modifications and deletions
*/
|
Gets the modifications and deletions in this mutation. After calling this method, further modifications to this mutation are ignored. Changes made to the returned updates do not affect this mutation
|
getUpdates
|
{
"repo_name": "mikewalch/accumulo",
"path": "core/src/main/java/org/apache/accumulo/core/data/Mutation.java",
"license": "apache-2.0",
"size": 36188
}
|
[
"java.util.Arrays",
"java.util.Collections",
"java.util.List",
"org.apache.accumulo.core.util.UnsynchronizedBuffer"
] |
import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.accumulo.core.util.UnsynchronizedBuffer;
|
import java.util.*; import org.apache.accumulo.core.util.*;
|
[
"java.util",
"org.apache.accumulo"
] |
java.util; org.apache.accumulo;
| 2,511,960
|
public static String generateReport(ValidationResult[] validationResults, String root, String extension, HashMap<String,String> properties, Sample[] sampleIDs)
{
validationResults = assignSampleIDs(validationResults,sampleIDs);
validationResults = combineSimilarResults(validationResults);
try
{
DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
//DOCUMENT
Document doc = docBuilder.newDocument();
//MIRINGREPORT ROOT
Element rootElement = doc.createElement("miring-report");
String currentDate = (dateFormat.format(new Date()));
rootElement.setAttribute("timestamp", currentDate);
doc.appendChild(rootElement);
//NAMESPACES
rootElement.setAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance");
rootElement.setAttribute("xsi:noNamespaceSchemaLocation", "http://schemas.nmdp.org/spec/miringreport/1.0/miringreport.xsd");
addMiringCompliantElement(validationResults, doc);
addHmlidElement(root, extension, doc);
addSampleElements(validationResults, sampleIDs, doc);
addPropertyElements(properties, doc);
addValidationResultElements(validationResults, doc);
return(Utilities.getStringFromDoc(doc));
}
catch (ParserConfigurationException pce)
{
logger.error("Parser Configuration Exception in ReportGenerator", pce);
}
catch (Exception e)
{
logger.error("Exception in ReportGenerator", e);
}
//Oops, something went wrong.
logger.error("Unknown Error during Miring Validation Report Generation. Returning Null");
return null;
}
|
static String function(ValidationResult[] validationResults, String root, String extension, HashMap<String,String> properties, Sample[] sampleIDs) { validationResults = assignSampleIDs(validationResults,sampleIDs); validationResults = combineSimilarResults(validationResults); try { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.newDocument(); Element rootElement = doc.createElement(STR); String currentDate = (dateFormat.format(new Date())); rootElement.setAttribute(STR, currentDate); doc.appendChild(rootElement); rootElement.setAttribute(STR, STRxsi:noNamespaceSchemaLocationSTRhttp: addMiringCompliantElement(validationResults, doc); addHmlidElement(root, extension, doc); addSampleElements(validationResults, sampleIDs, doc); addPropertyElements(properties, doc); addValidationResultElements(validationResults, doc); return(Utilities.getStringFromDoc(doc)); } catch (ParserConfigurationException pce) { logger.error(STR, pce); } catch (Exception e) { logger.error(STR, e); } logger.error(STR); return null; }
|
/**
* Generate a Miring Results Report
*
* @param validationResults an array of ValidationError objects
* @param root the root attribute on an HMLID node on the source XML. If it exists, you should include it in the report
* @param extension the extension attribute on an HMLID node on the source XML. If it exists, you should include it in the report
* @param properties a HashMap<String,String> of property values to include on the results report
* @param sampleIDs an array of Sample objects to list on the report.
* @return a String containing MIRING Results Report
*/
|
Generate a Miring Results Report
|
generateReport
|
{
"repo_name": "bmatern-nmdp/MiringValidator",
"path": "src/main/java/org/nmdp/miring/ReportGenerator.java",
"license": "gpl-3.0",
"size": 19162
}
|
[
"java.util.Date",
"java.util.HashMap",
"javax.xml.parsers.DocumentBuilder",
"javax.xml.parsers.DocumentBuilderFactory",
"javax.xml.parsers.ParserConfigurationException",
"org.w3c.dom.Document",
"org.w3c.dom.Element"
] |
import java.util.Date; import java.util.HashMap; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.w3c.dom.Element;
|
import java.util.*; import javax.xml.parsers.*; import org.w3c.dom.*;
|
[
"java.util",
"javax.xml",
"org.w3c.dom"
] |
java.util; javax.xml; org.w3c.dom;
| 1,989,872
|
public HTable createTable(HTableDescriptor htd, byte[][] families, Configuration c)
throws IOException {
for(byte[] family : families) {
HColumnDescriptor hcd = new HColumnDescriptor(family);
// Disable blooms (they are on by default as of 0.95) but we disable them here because
// tests have hard coded counts of what to expect in block cache, etc., and blooms being
// on is interfering.
hcd.setBloomFilterType(BloomType.NONE);
htd.addFamily(hcd);
}
getHBaseAdmin().createTable(htd);
// HBaseAdmin only waits for regions to appear in hbase:meta we should wait until they are assigned
waitUntilAllRegionsAssigned(htd.getTableName());
return (HTable)getConnection().getTable(htd.getTableName());
}
|
HTable function(HTableDescriptor htd, byte[][] families, Configuration c) throws IOException { for(byte[] family : families) { HColumnDescriptor hcd = new HColumnDescriptor(family); hcd.setBloomFilterType(BloomType.NONE); htd.addFamily(hcd); } getHBaseAdmin().createTable(htd); waitUntilAllRegionsAssigned(htd.getTableName()); return (HTable)getConnection().getTable(htd.getTableName()); }
|
/**
* Create a table.
* @param htd
* @param families
* @param c Configuration to use
* @return An HTable instance for the created table.
* @throws IOException
*/
|
Create a table
|
createTable
|
{
"repo_name": "grokcoder/pbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java",
"license": "apache-2.0",
"size": 132664
}
|
[
"java.io.IOException",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hbase.client.HTable",
"org.apache.hadoop.hbase.regionserver.BloomType"
] |
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.regionserver.BloomType;
|
import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.regionserver.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 559,648
|
public void parseEncoding(PDFObject encoding) throws IOException {
differences = new HashMap<Character,String>();
// figure out the base encoding, if one exists
PDFObject baseEncObj = encoding.getDictRef("BaseEncoding");
if (baseEncObj != null) {
baseEncoding = getBaseEncoding(baseEncObj.getStringValue());
}
// parse the differences array
PDFObject diffArrayObj = encoding.getDictRef("Differences");
if (diffArrayObj != null) {
PDFObject[] diffArray = diffArrayObj.getArray();
int curPosition = -1;
for (int i = 0; i < diffArray.length; i++) {
if (diffArray[i].getType() == PDFObject.NUMBER) {
curPosition = diffArray[i].getIntValue();
} else if (diffArray[i].getType() == PDFObject.NAME) {
Character key = new Character((char) curPosition);
differences.put(key, diffArray[i].getStringValue());
curPosition++;
} else {
throw new IllegalArgumentException("Unexpected type in diff array: " + diffArray[i]);
}
}
}
}
|
void function(PDFObject encoding) throws IOException { differences = new HashMap<Character,String>(); PDFObject baseEncObj = encoding.getDictRef(STR); if (baseEncObj != null) { baseEncoding = getBaseEncoding(baseEncObj.getStringValue()); } PDFObject diffArrayObj = encoding.getDictRef(STR); if (diffArrayObj != null) { PDFObject[] diffArray = diffArrayObj.getArray(); int curPosition = -1; for (int i = 0; i < diffArray.length; i++) { if (diffArray[i].getType() == PDFObject.NUMBER) { curPosition = diffArray[i].getIntValue(); } else if (diffArray[i].getType() == PDFObject.NAME) { Character key = new Character((char) curPosition); differences.put(key, diffArray[i].getStringValue()); curPosition++; } else { throw new IllegalArgumentException(STR + diffArray[i]); } } } }
|
/**
* Parse a PDF encoding object for the actual encoding
*/
|
Parse a PDF encoding object for the actual encoding
|
parseEncoding
|
{
"repo_name": "Allogy/allogy-legacy-android-app",
"path": "Allogy/src/com/hsl/txtreader/PDFFontEncoding.java",
"license": "apache-2.0",
"size": 8835
}
|
[
"com.sun.pdfview.PDFObject",
"java.io.IOException",
"java.util.HashMap"
] |
import com.sun.pdfview.PDFObject; import java.io.IOException; import java.util.HashMap;
|
import com.sun.pdfview.*; import java.io.*; import java.util.*;
|
[
"com.sun.pdfview",
"java.io",
"java.util"
] |
com.sun.pdfview; java.io; java.util;
| 1,388,088
|
public void back() {
if (isChanged()) {
MaterialDialog.Builder dialogBuilder = new MaterialDialog.Builder(getContext());
dialogBuilder.setMessage(mBackMessage);
dialogBuilder.setPositiveButton(mBackPositiveActionText, (dialog, which) -> dismiss());
dialogBuilder.setNegativeButton(R.string.cancel, null);
dialogBuilder.show();
} else {
dismiss();
}
}
|
void function() { if (isChanged()) { MaterialDialog.Builder dialogBuilder = new MaterialDialog.Builder(getContext()); dialogBuilder.setMessage(mBackMessage); dialogBuilder.setPositiveButton(mBackPositiveActionText, (dialog, which) -> dismiss()); dialogBuilder.setNegativeButton(R.string.cancel, null); dialogBuilder.show(); } else { dismiss(); } }
|
/**
* Display back dialog discard message if something has been changed in the fragment. If nothing has
* been changed it simply dismisses the window.
*/
|
Display back dialog discard message if something has been changed in the fragment. If nothing has been changed it simply dismisses the window
|
back
|
{
"repo_name": "Spiddekauga/android-utils",
"path": "android-framework/src/main/java/io/blushine/android/AppFragment.java",
"license": "mit",
"size": 5923
}
|
[
"de.mrapp.android.dialog.MaterialDialog"
] |
import de.mrapp.android.dialog.MaterialDialog;
|
import de.mrapp.android.dialog.*;
|
[
"de.mrapp.android"
] |
de.mrapp.android;
| 1,723,935
|
public List<DataProcessor> getElements() {
return frontEndList;
}
|
List<DataProcessor> function() { return frontEndList; }
|
/** Returns the collection of <code>DataProcessor</code>s of this <code>FrontEnd</code>.
* @return list of processors
*/
|
Returns the collection of <code>DataProcessor</code>s of this <code>FrontEnd</code>
|
getElements
|
{
"repo_name": "deepstupid/sphinx5",
"path": "sphinx4-core/src/main/java/edu/cmu/sphinx/frontend/FrontEnd.java",
"license": "agpl-3.0",
"size": 12140
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,230,799
|
public void interpolate(List<float[]> data, List<Double> times, List<float[]> vel, List<float[]> acc)
{
reset();
int l = data.size() - 1; // x[0]...x[l], times[0]...times[l]
n = 3 * l + 2; // #control points = 3l+3
if (l > 0)
{
// --- assign break points and generate knot vector
m_afTime.clear();
// m_afTime.push_back(times[0]);
for (int i = 0; i < l; i++)
{
double t = times.get(i);
m_afTime.add(t);
// -- determine additional inner knots by applying
// -- average parametrization
m_afTime.add(times.get(i) + 1.0f / 3.0f * (times.get(i + 1) - times.get(i)));
m_afTime.add(times.get(i) + 2.0f / 3.0f * (times.get(i + 1) - times.get(i)));
}
// last breakpoint is deferred in preparation for
// calculating the end velocity of the spline
m_afTime.add(times.get(times.size() - 1) + 1E-5f);
recomputeKnotVector();
// --- compose linear system
// cout << "composing matrix" << endl;
List<List<Float>> a = new ArrayList<>();
CollectionUtils.ensureSize(a, n + 1);
for (int i = 0; i <= n; i++)
{
// a[i].resize(n+1);
List<Float> fl = new ArrayList<>();
CollectionUtils.ensureSize(fl, n + 1);
a.set(i, fl);
for (int j = 0; j <= n; j++)
a.get(i).set(j, 0f);
}
// -- l+1 position constraints
// cout << "positions..." << endl;
// row 0: position at t_3 = p0
a.get(0).set(0, 1f);
// row 1..l-1: position at t_4,t_6,...t_2l
Matrix N = new Matrix(n + k, k + 1);
for (int i = 1; i < l; i++)
{
int j = 3 * (i + 1);
calcBlendingFunctionsAt(t.get(j), N);
a.get(i).set(j - 3, (float) N.get(j - 3, k));
a.get(i).set(j - 2, (float) N.get(j - 2, k));
a.get(i).set(j - 1, (float) N.get(j - 1, k));
}
// row l: position at t_n+1 = p_l
a.get(l).set(n, 1f);
// -- l+1 velocity constraints
// cout << "velocities..." << endl;
// row l+1: velocity at t_3
calcDotBlendFunctionsAt(t.get(3), N);
a.get(l + 1).set(0, (float) N.get(0, k));
a.get(l + 1).set(1, (float) N.get(1, k));
a.get(l + 1).set(2, (float) N.get(2, k));
// row l+2..l+2+(l-1)=2l+1
for (int i = 1; i < l; i++)
{
int row = l + 1 + i;
int j = 3 * (i + 1);
calcDotBlendFunctionsAt(t.get(j), N);
for (int h = 0; h < j; h++)
a.get(row).set(h, (float) N.get(h, k));
}
// row 2l+2: velocity at end time
calcDotBlendFunctionsAt(times.get(times.size() - 1), N);
a.get(2 * l + 2).set(n - 2, (float) N.get(n - 2, k));
a.get(2 * l + 2).set(n - 1, (float) N.get(n - 1, k));
a.get(2 * l + 2).set(n, (float) N.get(n, k));
// -- l+1 acceleration constraints
// cout << "accelerations..." << endl;
for (int i = 1; i <= l + 1; i++)
{
//int row = 2 * l + 1 + i;
int j = 3 * (i + 1);
calcDDotBlendFunctionsAt(t.get(j), N);
for (j = 0; j <= n; j++)
a.get(n).set(j, (float) N.get(j, k));
}
// --- compose result vector
// cout << "composing vector" << endl;
// --- compose result vector
List<float[]> b = new ArrayList<>();
CollectionUtils.ensureSize(b, n + 1);
for (int i = 0; i <= l; i++)
b.set(i, data.get(i));
for (int i = l + 1; i <= 2 * l + 1; i++)
b.set(i, vel.get(i - l - 1));
for (int i = 2 * l + 2; i <= n; i++)
b.set(i, vel.get(i - l - 1));
// cout << "solving system" << endl;
// --- solve linear system
if (!LinearSystem.solve(a, b)) log.warn("MgcNUBSpline3::interpolate : couldn't solve linear system!");
// control points: p[0]...p[n]
c = b;
// printControlPoints(cout);
}
else log.warn("interpolate: too less data points provided!");
}
|
void function(List<float[]> data, List<Double> times, List<float[]> vel, List<float[]> acc) { reset(); int l = data.size() - 1; n = 3 * l + 2; if (l > 0) { m_afTime.clear(); for (int i = 0; i < l; i++) { double t = times.get(i); m_afTime.add(t); m_afTime.add(times.get(i) + 1.0f / 3.0f * (times.get(i + 1) - times.get(i))); m_afTime.add(times.get(i) + 2.0f / 3.0f * (times.get(i + 1) - times.get(i))); } m_afTime.add(times.get(times.size() - 1) + 1E-5f); recomputeKnotVector(); List<List<Float>> a = new ArrayList<>(); CollectionUtils.ensureSize(a, n + 1); for (int i = 0; i <= n; i++) { List<Float> fl = new ArrayList<>(); CollectionUtils.ensureSize(fl, n + 1); a.set(i, fl); for (int j = 0; j <= n; j++) a.get(i).set(j, 0f); } a.get(0).set(0, 1f); Matrix N = new Matrix(n + k, k + 1); for (int i = 1; i < l; i++) { int j = 3 * (i + 1); calcBlendingFunctionsAt(t.get(j), N); a.get(i).set(j - 3, (float) N.get(j - 3, k)); a.get(i).set(j - 2, (float) N.get(j - 2, k)); a.get(i).set(j - 1, (float) N.get(j - 1, k)); } a.get(l).set(n, 1f); calcDotBlendFunctionsAt(t.get(3), N); a.get(l + 1).set(0, (float) N.get(0, k)); a.get(l + 1).set(1, (float) N.get(1, k)); a.get(l + 1).set(2, (float) N.get(2, k)); for (int i = 1; i < l; i++) { int row = l + 1 + i; int j = 3 * (i + 1); calcDotBlendFunctionsAt(t.get(j), N); for (int h = 0; h < j; h++) a.get(row).set(h, (float) N.get(h, k)); } calcDotBlendFunctionsAt(times.get(times.size() - 1), N); a.get(2 * l + 2).set(n - 2, (float) N.get(n - 2, k)); a.get(2 * l + 2).set(n - 1, (float) N.get(n - 1, k)); a.get(2 * l + 2).set(n, (float) N.get(n, k)); for (int i = 1; i <= l + 1; i++) { int j = 3 * (i + 1); calcDDotBlendFunctionsAt(t.get(j), N); for (j = 0; j <= n; j++) a.get(n).set(j, (float) N.get(j, k)); } List<float[]> b = new ArrayList<>(); CollectionUtils.ensureSize(b, n + 1); for (int i = 0; i <= l; i++) b.set(i, data.get(i)); for (int i = l + 1; i <= 2 * l + 1; i++) b.set(i, vel.get(i - l - 1)); for (int i = 2 * l + 2; i <= n; i++) b.set(i, vel.get(i - l - 1)); if (!LinearSystem.solve(a, b)) log.warn(STR); c = b; } else log.warn(STR); }
|
/**
* data points x[0]...x[l], times t[0]...t[l]
* velocities v[0]...v[l], accelerations a[0]...a[l]
*/
|
data points x[0]...x[l], times t[0]...t[l] velocities v[0]...v[l], accelerations a[0]...a[l]
|
interpolate
|
{
"repo_name": "ArticulatedSocialAgentsPlatform/AsapRealizer",
"path": "AsapMath/src/asap/math/splines/NUBSpline3.java",
"license": "lgpl-3.0",
"size": 44889
}
|
[
"java.util.ArrayList",
"java.util.List"
] |
import java.util.ArrayList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,858,744
|
@Test
public void testRemoveRouterListener() throws Exception {
ospfRouterListener = EasyMock.createMock(OspfRouterListener.class);
ospfController.addRouterListener(ospfRouterListener);
ospfController.removeRouterListener(ospfRouterListener);
assertThat(ospfController, is(notNullValue()));
}
|
void function() throws Exception { ospfRouterListener = EasyMock.createMock(OspfRouterListener.class); ospfController.addRouterListener(ospfRouterListener); ospfController.removeRouterListener(ospfRouterListener); assertThat(ospfController, is(notNullValue())); }
|
/**
* Tests removeRouterListener() method.
*/
|
Tests removeRouterListener() method
|
testRemoveRouterListener
|
{
"repo_name": "harikrushna-Huawei/hackathon",
"path": "protocols/ospf/ctl/src/test/java/org/onosproject/ospf/controller/impl/OspfControllerImplTest.java",
"license": "apache-2.0",
"size": 7619
}
|
[
"org.easymock.EasyMock",
"org.hamcrest.CoreMatchers",
"org.hamcrest.MatcherAssert",
"org.onosproject.ospf.controller.OspfRouterListener"
] |
import org.easymock.EasyMock; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.onosproject.ospf.controller.OspfRouterListener;
|
import org.easymock.*; import org.hamcrest.*; import org.onosproject.ospf.controller.*;
|
[
"org.easymock",
"org.hamcrest",
"org.onosproject.ospf"
] |
org.easymock; org.hamcrest; org.onosproject.ospf;
| 688,716
|
private void write(Kim kim, Huff huff) throws JSONException {
write(kim, 0, kim.length, huff);
}
|
void function(Kim kim, Huff huff) throws JSONException { write(kim, 0, kim.length, huff); }
|
/**
* Write each of the bytes in a kim with Huffman encoding.
*
* @param kim A kim containing the bytes to be written.
* @param huff The Huffman encoder.
* @throws JSONException
*/
|
Write each of the bytes in a kim with Huffman encoding
|
write
|
{
"repo_name": "reo7sp/BoardPP",
"path": "src/main/java/org/json/zip/Compressor.java",
"license": "apache-2.0",
"size": 13578
}
|
[
"org.json.JSONException",
"org.json.Kim"
] |
import org.json.JSONException; import org.json.Kim;
|
import org.json.*;
|
[
"org.json"
] |
org.json;
| 1,783,357
|
protected void sequence_JSONBooleanLiteral(ISerializationContext context, JSONBooleanLiteral semanticObject) {
genericSequencer.createSequence(context, semanticObject);
}
|
void function(ISerializationContext context, JSONBooleanLiteral semanticObject) { genericSequencer.createSequence(context, semanticObject); }
|
/**
* Contexts:
* JSONValue returns JSONBooleanLiteral
* JSONBooleanLiteral returns JSONBooleanLiteral
*
* Constraint:
* booleanValue?='true'?
*/
|
Contexts: JSONValue returns JSONBooleanLiteral JSONBooleanLiteral returns JSONBooleanLiteral Constraint: booleanValue?='true'
|
sequence_JSONBooleanLiteral
|
{
"repo_name": "lbeurerkellner/n4js",
"path": "plugins/org.eclipse.n4js.json/src-gen/org/eclipse/n4js/json/serializer/JSONSemanticSequencer.java",
"license": "epl-1.0",
"size": 7495
}
|
[
"org.eclipse.n4js.json.JSON",
"org.eclipse.xtext.serializer.ISerializationContext"
] |
import org.eclipse.n4js.json.JSON; import org.eclipse.xtext.serializer.ISerializationContext;
|
import org.eclipse.n4js.json.*; import org.eclipse.xtext.serializer.*;
|
[
"org.eclipse.n4js",
"org.eclipse.xtext"
] |
org.eclipse.n4js; org.eclipse.xtext;
| 2,559,216
|
private String convertBillItemToLineItem(BillItem item) {
String lineItem;
double totalPrice = item.getPrice() * item.getQuantity();
lineItem = String.format(
LINE_ITEM_FORMAT,
item.getName(),
item.getQuantity(),
currencyFormat.format(item.getPrice()),
currencyFormat.format(totalPrice)
);
return lineItem;
}
|
String function(BillItem item) { String lineItem; double totalPrice = item.getPrice() * item.getQuantity(); lineItem = String.format( LINE_ITEM_FORMAT, item.getName(), item.getQuantity(), currencyFormat.format(item.getPrice()), currencyFormat.format(totalPrice) ); return lineItem; }
|
/**
* Gathers the fields from the guest bill items and converts them into their
* formatted string representation to be displayed as a single line-item
* on the printed guest bill.
*
* @param item The bill item to format.
* @return The formatted bill item.
*/
|
Gathers the fields from the guest bill items and converts them into their formatted string representation to be displayed as a single line-item on the printed guest bill
|
convertBillItemToLineItem
|
{
"repo_name": "maillouxc/git-rekt",
"path": "src/main/java/com/gitrekt/resort/model/services/BillPdfGenerator.java",
"license": "mit",
"size": 8759
}
|
[
"com.gitrekt.resort.model.entities.BillItem"
] |
import com.gitrekt.resort.model.entities.BillItem;
|
import com.gitrekt.resort.model.entities.*;
|
[
"com.gitrekt.resort"
] |
com.gitrekt.resort;
| 2,479,952
|
public static void sendNotification(org.wso2.carbon.apimgt.impl.notifier.events.Event event, String notifierType) {
List<Notifier> notifierList = ServiceReferenceHolder.getInstance().getNotifiersMap().get(notifierType);
notifierList.forEach((notifier) -> {
try {
notifier.publishEvent(event);
} catch (NotifierException e) {
log.error("Error when publish " + event + " through notifier:" + notifierType + ". Error:" + e);
}
});
}
|
static void function(org.wso2.carbon.apimgt.impl.notifier.events.Event event, String notifierType) { List<Notifier> notifierList = ServiceReferenceHolder.getInstance().getNotifiersMap().get(notifierType); notifierList.forEach((notifier) -> { try { notifier.publishEvent(event); } catch (NotifierException e) { log.error(STR + event + STR + notifierType + STR + e); } }); }
|
/**
* This method used to send Notifications
*
* @param event Event object
* @param notifierType eventType
*/
|
This method used to send Notifications
|
sendNotification
|
{
"repo_name": "tharikaGitHub/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/utils/APIUtil.java",
"license": "apache-2.0",
"size": 563590
}
|
[
"java.util.List",
"org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder",
"org.wso2.carbon.apimgt.impl.notifier.Notifier",
"org.wso2.carbon.apimgt.impl.notifier.exceptions.NotifierException",
"org.wso2.carbon.databridge.commons.Event"
] |
import java.util.List; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.notifier.Notifier; import org.wso2.carbon.apimgt.impl.notifier.exceptions.NotifierException; import org.wso2.carbon.databridge.commons.Event;
|
import java.util.*; import org.wso2.carbon.apimgt.impl.internal.*; import org.wso2.carbon.apimgt.impl.notifier.*; import org.wso2.carbon.apimgt.impl.notifier.exceptions.*; import org.wso2.carbon.databridge.commons.*;
|
[
"java.util",
"org.wso2.carbon"
] |
java.util; org.wso2.carbon;
| 1,323,657
|
public static int prctl(int option, long arg2, long arg3, long arg4, long arg5) throws ErrnoException { return Libcore.os.prctl(option, arg2, arg3, arg4, arg5); };
public static int pread(FileDescriptor fd, ByteBuffer buffer, long offset) throws ErrnoException, InterruptedIOException { return Libcore.os.pread(fd, buffer, offset); }
|
static int function(int option, long arg2, long arg3, long arg4, long arg5) throws ErrnoException { return Libcore.os.prctl(option, arg2, arg3, arg4, arg5); }; public static int pread(FileDescriptor fd, ByteBuffer buffer, long offset) throws ErrnoException, InterruptedIOException { return Libcore.os.pread(fd, buffer, offset); }
|
/**
* See <a href="http://man7.org/linux/man-pages/man2/prctl.2.html">prctl(2)</a>.
*/
|
See prctl(2)
|
prctl
|
{
"repo_name": "syslover33/ctank",
"path": "java/android-sdk-linux_r24.4.1_src/sources/android-23/android/system/Os.java",
"license": "gpl-3.0",
"size": 28185
}
|
[
"java.io.FileDescriptor",
"java.io.InterruptedIOException",
"java.nio.ByteBuffer"
] |
import java.io.FileDescriptor; import java.io.InterruptedIOException; import java.nio.ByteBuffer;
|
import java.io.*; import java.nio.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 1,684,192
|
public void createManagedComputeService(String provider, boolean eventSupport) {
List<String> options = new LinkedList<String>();
if (regions != null && !regions.isEmpty()) {
options.add("jclouds.regions=" + regions);
}
String cmd = "jclouds:compute-service-create --provider " + provider + " --identity " + identity + " --credential " + credential;
for (String option : options) {
cmd += " --add-option " + option;
}
executeCommand(cmd);
}
|
void function(String provider, boolean eventSupport) { List<String> options = new LinkedList<String>(); if (regions != null && !regions.isEmpty()) { options.add(STR + regions); } String cmd = STR + provider + STR + identity + STR + credential; for (String option : options) { cmd += STR + option; } executeCommand(cmd); }
|
/**
* Creates a Managed Compute Service using the configured system properties.
*/
|
Creates a Managed Compute Service using the configured system properties
|
createManagedComputeService
|
{
"repo_name": "jclouds/legacy-jclouds-karaf",
"path": "itests/src/test/java/org/jclouds/karaf/itests/live/JcloudsLiveTestSupport.java",
"license": "apache-2.0",
"size": 3588
}
|
[
"java.util.LinkedList",
"java.util.List"
] |
import java.util.LinkedList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,575,588
|
void onDismiss(View view, Object token);
}
public SwipeDismissTouchListener(View view, Object token, OnDismissCallback callback) {
ViewConfiguration vc = ViewConfiguration.get(view.getContext());
mSlop = vc.getScaledTouchSlop();
mMinFlingVelocity = vc.getScaledMinimumFlingVelocity();
mMaxFlingVelocity = vc.getScaledMaximumFlingVelocity();
mAnimationTime = view.getContext().getResources().getInteger(
android.R.integer.config_shortAnimTime);
mView = view;
mToken = token;
mCallback = callback;
}
|
void onDismiss(View view, Object token); } public SwipeDismissTouchListener(View view, Object token, OnDismissCallback callback) { ViewConfiguration vc = ViewConfiguration.get(view.getContext()); mSlop = vc.getScaledTouchSlop(); mMinFlingVelocity = vc.getScaledMinimumFlingVelocity(); mMaxFlingVelocity = vc.getScaledMaximumFlingVelocity(); mAnimationTime = view.getContext().getResources().getInteger( android.R.integer.config_shortAnimTime); mView = view; mToken = token; mCallback = callback; }
|
/**
* Called when the user has indicated they she would like to dismiss the view.
*
* @param view The originating {@link View} to be dismissed.
* @param token The optional token passed to this object's constructor.
*/
|
Called when the user has indicated they she would like to dismiss the view
|
onDismiss
|
{
"repo_name": "vbevans94/mobileqbe",
"path": "src/ua/org/cofriends/db/view/SwipeDismissTouchListener.java",
"license": "gpl-2.0",
"size": 9691
}
|
[
"android.view.View",
"android.view.ViewConfiguration"
] |
import android.view.View; import android.view.ViewConfiguration;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 2,503,924
|
public final void goToPreviousProcess() throws UnexistingStepException {
if (this.getProgression() <= AbstractBuildingsIslet.FIRST_STEP) {
throw new UnexistingStepException();
}
this.biController.getPreviousStep();
setDefaultCharacteristicsPanel();
setToolbarButtons();
refreshViews();
this.pController.displayProcessingParameters(this.getProgression());
}
|
final void function() throws UnexistingStepException { if (this.getProgression() <= AbstractBuildingsIslet.FIRST_STEP) { throw new UnexistingStepException(); } this.biController.getPreviousStep(); setDefaultCharacteristicsPanel(); setToolbarButtons(); refreshViews(); this.pController.displayProcessingParameters(this.getProgression()); }
|
/**
* Go BACK to the previous process.
* @throws UnexistingStepException
* if the step does not exist (progression < 1)
*/
|
Go BACK to the previous process
|
goToPreviousProcess
|
{
"repo_name": "Nantes1900/Nantes-1900-PGROU-IHM",
"path": "src/fr/nantes1900/control/isletprocess/IsletProcessController.java",
"license": "gpl-3.0",
"size": 21371
}
|
[
"fr.nantes1900.models.islets.AbstractBuildingsIslet"
] |
import fr.nantes1900.models.islets.AbstractBuildingsIslet;
|
import fr.nantes1900.models.islets.*;
|
[
"fr.nantes1900.models"
] |
fr.nantes1900.models;
| 1,186,748
|
public static <T> T accessMap(Map map, String namePath) {
if (map == null || namePath == null || map.isEmpty() || namePath.length() < 1) {
return (T) null;
}
String[] names = namePath.split("\\.");
Object value = null;
Map tempMap = map;
for (String name : names) {
value = tempMap.get(name);
if (value == null || !(value instanceof Map)) {
break;
}
tempMap = (Map) value;
}
return (T) value;
}
|
static <T> T function(Map map, String namePath) { if (map == null namePath == null map.isEmpty() namePath.length() < 1) { return (T) null; } String[] names = namePath.split("\\."); Object value = null; Map tempMap = map; for (String name : names) { value = tempMap.get(name); if (value == null !(value instanceof Map)) { break; } tempMap = (Map) value; } return (T) value; }
|
/**
* Access Map Object.
* @param <T>
* @param map
* @param namePath properties split by dot '.', such as: rule.ruleName.
* @return
* @author <a href="mailto:iffiff1@gmail.com">Tyler Chen</a>
* @since 2013-2-28
*/
|
Access Map Object
|
accessMap
|
{
"repo_name": "tylerchen/springmvc-mybatis-v1.0-project",
"path": "src/main/java/org/iff/infra/util/GsonHelper.java",
"license": "mit",
"size": 1673
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 371,786
|
return new TemplateHandlerImpl(engine, DEFAULT_TEMPLATE_DIRECTORY, DEFAULT_CONTENT_TYPE);
}
|
return new TemplateHandlerImpl(engine, DEFAULT_TEMPLATE_DIRECTORY, DEFAULT_CONTENT_TYPE); }
|
/**
* Create a handler
*
* @param engine the template engine
* @return the handler
*/
|
Create a handler
|
create
|
{
"repo_name": "remi128/vertx-apex_old",
"path": "src/main/java/io/vertx/ext/apex/handler/TemplateHandler.java",
"license": "apache-2.0",
"size": 2565
}
|
[
"io.vertx.ext.apex.handler.impl.TemplateHandlerImpl"
] |
import io.vertx.ext.apex.handler.impl.TemplateHandlerImpl;
|
import io.vertx.ext.apex.handler.impl.*;
|
[
"io.vertx.ext"
] |
io.vertx.ext;
| 1,386,292
|
public ParsedQuery parseQuery(BytesReference source) {
try {
ParsedQuery parsedQuery = null;
XContentParser parser = XContentHelper.createParser(source);
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
if (token == XContentParser.Token.FIELD_NAME) {
String fieldName = parser.currentName();
if ("query".equals(fieldName)) {
parsedQuery = parse(parser);
} else if ("query_binary".equals(fieldName) || "queryBinary".equals(fieldName)) {
byte[] querySource = parser.binaryValue();
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
parsedQuery = parse(qSourceParser);
} else {
throw new QueryParsingException(getParseContext(), "request does not support [" + fieldName + "]");
}
}
}
if (parsedQuery != null) {
return parsedQuery;
}
} catch (QueryParsingException e) {
throw e;
} catch (Throwable e) {
throw new QueryParsingException(getParseContext(), "Failed to parse", e);
}
throw new QueryParsingException(getParseContext(), "Required query is missing");
}
|
ParsedQuery function(BytesReference source) { try { ParsedQuery parsedQuery = null; XContentParser parser = XContentHelper.createParser(source); for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { if (token == XContentParser.Token.FIELD_NAME) { String fieldName = parser.currentName(); if ("query".equals(fieldName)) { parsedQuery = parse(parser); } else if (STR.equals(fieldName) STR.equals(fieldName)) { byte[] querySource = parser.binaryValue(); XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource); parsedQuery = parse(qSourceParser); } else { throw new QueryParsingException(getParseContext(), STR + fieldName + "]"); } } } if (parsedQuery != null) { return parsedQuery; } } catch (QueryParsingException e) { throw e; } catch (Throwable e) { throw new QueryParsingException(getParseContext(), STR, e); } throw new QueryParsingException(getParseContext(), STR); }
|
/**
* Selectively parses a query from a top level query or query_binary json field from the specified source.
*/
|
Selectively parses a query from a top level query or query_binary json field from the specified source
|
parseQuery
|
{
"repo_name": "strapdata/elassandra-test",
"path": "core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java",
"license": "apache-2.0",
"size": 12884
}
|
[
"org.elasticsearch.common.bytes.BytesReference",
"org.elasticsearch.common.xcontent.XContentFactory",
"org.elasticsearch.common.xcontent.XContentHelper",
"org.elasticsearch.common.xcontent.XContentParser"
] |
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.bytes.*; import org.elasticsearch.common.xcontent.*;
|
[
"org.elasticsearch.common"
] |
org.elasticsearch.common;
| 2,812,409
|
public ArrayList<LatLng> parseFileToList() throws IOException {
//TODO IMPLEMENT
throw new IOException("Could not parse file.");
// return new ArrayList<LatLng>();
}
|
ArrayList<LatLng> function() throws IOException { throw new IOException(STR); }
|
/**
* Receives the file the user has selected to generate a heatmap from, and turns this into a list of coordinates.
* @return List of coordinates in Latitude, Longitude format.
*/
|
Receives the file the user has selected to generate a heatmap from, and turns this into a list of coordinates
|
parseFileToList
|
{
"repo_name": "JoepKerste/smallyam",
"path": "app/src/main/java/joepkerste/com/smallyam/MapsActivity.java",
"license": "mit",
"size": 5195
}
|
[
"com.google.android.gms.maps.model.LatLng",
"java.io.IOException",
"java.util.ArrayList"
] |
import com.google.android.gms.maps.model.LatLng; import java.io.IOException; import java.util.ArrayList;
|
import com.google.android.gms.maps.model.*; import java.io.*; import java.util.*;
|
[
"com.google.android",
"java.io",
"java.util"
] |
com.google.android; java.io; java.util;
| 2,752,177
|
public static synchronized String getAntVersion() throws BuildException {
if (antVersion == null) {
try {
Properties props = new Properties();
InputStream in =
Main.class.getResourceAsStream("/org/apache/tools/ant/version.txt");
props.load(in);
in.close();
StringBuffer msg = new StringBuffer();
msg.append("Apache Ant version ");
msg.append(props.getProperty("VERSION"));
msg.append(" compiled on ");
msg.append(props.getProperty("DATE"));
antVersion = msg.toString();
} catch (IOException ioe) {
throw new BuildException("Could not load the version information:"
+ ioe.getMessage());
} catch (NullPointerException npe) {
throw new BuildException("Could not load the version information.");
}
}
return antVersion;
}
|
static synchronized String function() throws BuildException { if (antVersion == null) { try { Properties props = new Properties(); InputStream in = Main.class.getResourceAsStream(STR); props.load(in); in.close(); StringBuffer msg = new StringBuffer(); msg.append(STR); msg.append(props.getProperty(STR)); msg.append(STR); msg.append(props.getProperty("DATE")); antVersion = msg.toString(); } catch (IOException ioe) { throw new BuildException(STR + ioe.getMessage()); } catch (NullPointerException npe) { throw new BuildException(STR); } } return antVersion; }
|
/**
* Returns the Ant version information, if available. Once the information
* has been loaded once, it's cached and returned from the cache on future
* calls.
*
* @return the Ant version information as a String
* (always non-<code>null</code>)
*
* @exception BuildException if the version information is unavailable
*/
|
Returns the Ant version information, if available. Once the information has been loaded once, it's cached and returned from the cache on future calls
|
getAntVersion
|
{
"repo_name": "neoautus/lucidj",
"path": "extras/AntInstaller/AntInstaller-beta0.8/src/org/tp23/antinstaller/antmod/Main.java",
"license": "apache-2.0",
"size": 41286
}
|
[
"java.io.IOException",
"java.io.InputStream",
"java.util.Properties",
"org.apache.tools.ant.BuildException"
] |
import java.io.IOException; import java.io.InputStream; import java.util.Properties; import org.apache.tools.ant.BuildException;
|
import java.io.*; import java.util.*; import org.apache.tools.ant.*;
|
[
"java.io",
"java.util",
"org.apache.tools"
] |
java.io; java.util; org.apache.tools;
| 464,055
|
protected void doExecute(String finalLocation, ActionInvocation invocation) throws Exception
{
HttpServletResponse response = (HttpServletResponse)invocation.getInvocationContext().get(HTTP_RESPONSE);
response.setContentType("image/jpg");
String word = CaptchaUtil.getRandomWord(wordLength).toLowerCase();
//save word to session
invocation.getInvocationContext().getSession().put("captcha", word);
BufferedImage image = CaptchaUtil.getCaptchaImage(word);
ImageIO.write(image, "jpg", response.getOutputStream());
}
|
void function(String finalLocation, ActionInvocation invocation) throws Exception { HttpServletResponse response = (HttpServletResponse)invocation.getInvocationContext().get(HTTP_RESPONSE); response.setContentType(STR); String word = CaptchaUtil.getRandomWord(wordLength).toLowerCase(); invocation.getInvocationContext().getSession().put(STR, word); BufferedImage image = CaptchaUtil.getCaptchaImage(word); ImageIO.write(image, "jpg", response.getOutputStream()); }
|
/**
* method to define doExecute() for generating an image as result
* @param finalLocation the final location
* @param invocation the instance of ActionInvocation
* */
|
method to define doExecute() for generating an image as result
|
doExecute
|
{
"repo_name": "animesks/projects",
"path": "Non_Academic/FileTrackingSystem_2012/WebApp/src/in/ac/iiitdmj/fts/servlets/captcha/CaptchaResult.java",
"license": "gpl-2.0",
"size": 2307
}
|
[
"com.opensymphony.xwork2.ActionInvocation",
"java.awt.image.BufferedImage",
"javax.imageio.ImageIO",
"javax.servlet.http.HttpServletResponse"
] |
import com.opensymphony.xwork2.ActionInvocation; import java.awt.image.BufferedImage; import javax.imageio.ImageIO; import javax.servlet.http.HttpServletResponse;
|
import com.opensymphony.xwork2.*; import java.awt.image.*; import javax.imageio.*; import javax.servlet.http.*;
|
[
"com.opensymphony.xwork2",
"java.awt",
"javax.imageio",
"javax.servlet"
] |
com.opensymphony.xwork2; java.awt; javax.imageio; javax.servlet;
| 1,866,253
|
frmRegistro = new JFrame();
frmRegistro.setTitle("Registro");
frmRegistro.setBounds(100, 100, 501, 508);
frmRegistro.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frmRegistro.getContentPane().setLayout(null);
txtUsuario = new JTextField();
txtUsuario.setText("Usuario");
txtUsuario.setBounds(42, 39, 217, 20);
frmRegistro.getContentPane().add(txtUsuario);
txtUsuario.setColumns(10);
txtContrasena = new JTextField();
txtContrasena.setText("Contrase\u00F1a");
txtContrasena.setColumns(10);
txtContrasena.setBounds(42, 70, 217, 20);
frmRegistro.getContentPane().add(txtContrasena);
txtemail = new JTextField();
txtemail.setText("E-mail");
txtemail.setColumns(10);
txtemail.setBounds(42, 101, 217, 20);
frmRegistro.getContentPane().add(txtemail);
txtNCompleto = new JTextField();
txtNCompleto.setText("Nombre completo");
txtNCompleto.setColumns(10);
txtNCompleto.setBounds(42, 132, 217, 20);
frmRegistro.getContentPane().add(txtNCompleto);
btnCompletarRegistro= new JButton("Completar registro");
btnCompletarRegistro.setBounds(42, 408, 170, 40);
frmRegistro.getContentPane().add(btnCompletarRegistro);
lblCamposObligatorios = new JLabel("Campos obligatorios *");
lblCamposObligatorios.setBounds(42, 14, 170, 14);
frmRegistro.getContentPane().add(lblCamposObligatorios);
label_1 = new JLabel("*");
label_1.setBounds(265, 70, 46, 14);
frmRegistro.getContentPane().add(label_1);
label_2 = new JLabel("*");
label_2.setBounds(265, 101, 46, 14);
frmRegistro.getContentPane().add(label_2);
label_3 = new JLabel("*");
label_3.setBounds(265, 39, 46, 14);
frmRegistro.getContentPane().add(label_3);
JRadioButton rdbtnH = new JRadioButton("H");
buttonGroup.add(rdbtnH);
rdbtnH.setBounds(298, 82, 55, 25);
frmRegistro.getContentPane().add(rdbtnH);
JRadioButton rdbtnM = new JRadioButton("M");
buttonGroup.add(rdbtnM);
rdbtnM.setBounds(357, 82, 61, 25);
frmRegistro.getContentPane().add(rdbtnM);
JCalendar calendar = new JCalendar();
calendar.setBounds(42, 194, 402, 155);
frmRegistro.getContentPane().add(calendar);
JLabel lblFechaNacimiento = new JLabel("Fecha de nacimiento:");
lblFechaNacimiento.setBounds(42, 165, 217, 16);
frmRegistro.getContentPane().add(lblFechaNacimiento);
JCheckBox chckbxQuiereRecibirNoticias = new JCheckBox("Recibir ofertas?");
chckbxQuiereRecibirNoticias.setSelected(true);
chckbxQuiereRecibirNoticias.setToolTipText("");
chckbxQuiereRecibirNoticias.setBounds(298, 109, 146, 46);
frmRegistro.getContentPane().add(chckbxQuiereRecibirNoticias);
JLabel lblHombreOMujer = new JLabel("Hombre o mujer?");
lblHombreOMujer.setBounds(298, 57, 146, 16);
frmRegistro.getContentPane().add(lblHombreOMujer);
JLabel label = new JLabel("*");
label.setBounds(265, 134, 46, 14);
frmRegistro.getContentPane().add(label);
|
frmRegistro = new JFrame(); frmRegistro.setTitle(STR); frmRegistro.setBounds(100, 100, 501, 508); frmRegistro.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frmRegistro.getContentPane().setLayout(null); txtUsuario = new JTextField(); txtUsuario.setText(STR); txtUsuario.setBounds(42, 39, 217, 20); frmRegistro.getContentPane().add(txtUsuario); txtUsuario.setColumns(10); txtContrasena = new JTextField(); txtContrasena.setText(STR); txtContrasena.setColumns(10); txtContrasena.setBounds(42, 70, 217, 20); frmRegistro.getContentPane().add(txtContrasena); txtemail = new JTextField(); txtemail.setText(STR); txtemail.setColumns(10); txtemail.setBounds(42, 101, 217, 20); frmRegistro.getContentPane().add(txtemail); txtNCompleto = new JTextField(); txtNCompleto.setText(STR); txtNCompleto.setColumns(10); txtNCompleto.setBounds(42, 132, 217, 20); frmRegistro.getContentPane().add(txtNCompleto); btnCompletarRegistro= new JButton(STR); btnCompletarRegistro.setBounds(42, 408, 170, 40); frmRegistro.getContentPane().add(btnCompletarRegistro); lblCamposObligatorios = new JLabel(STR); lblCamposObligatorios.setBounds(42, 14, 170, 14); frmRegistro.getContentPane().add(lblCamposObligatorios); label_1 = new JLabel("*"); label_1.setBounds(265, 70, 46, 14); frmRegistro.getContentPane().add(label_1); label_2 = new JLabel("*"); label_2.setBounds(265, 101, 46, 14); frmRegistro.getContentPane().add(label_2); label_3 = new JLabel("*"); label_3.setBounds(265, 39, 46, 14); frmRegistro.getContentPane().add(label_3); JRadioButton rdbtnH = new JRadioButton("H"); buttonGroup.add(rdbtnH); rdbtnH.setBounds(298, 82, 55, 25); frmRegistro.getContentPane().add(rdbtnH); JRadioButton rdbtnM = new JRadioButton("M"); buttonGroup.add(rdbtnM); rdbtnM.setBounds(357, 82, 61, 25); frmRegistro.getContentPane().add(rdbtnM); JCalendar calendar = new JCalendar(); calendar.setBounds(42, 194, 402, 155); frmRegistro.getContentPane().add(calendar); JLabel lblFechaNacimiento = new JLabel(STR); lblFechaNacimiento.setBounds(42, 165, 217, 16); frmRegistro.getContentPane().add(lblFechaNacimiento); JCheckBox chckbxQuiereRecibirNoticias = new JCheckBox(STR); chckbxQuiereRecibirNoticias.setSelected(true); chckbxQuiereRecibirNoticias.setToolTipText(STRHombre o mujer?STR*"); label.setBounds(265, 134, 46, 14); frmRegistro.getContentPane().add(label);
|
/**
* Initialize the contents of the frame.
*/
|
Initialize the contents of the frame
|
initialize
|
{
"repo_name": "victorkc/Java",
"path": "Traductor/src/Registro.java",
"license": "gpl-3.0",
"size": 5720
}
|
[
"com.toedter.calendar.JCalendar",
"javax.swing.JButton",
"javax.swing.JCheckBox",
"javax.swing.JFrame",
"javax.swing.JLabel",
"javax.swing.JRadioButton",
"javax.swing.JTextField"
] |
import com.toedter.calendar.JCalendar; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JRadioButton; import javax.swing.JTextField;
|
import com.toedter.calendar.*; import javax.swing.*;
|
[
"com.toedter.calendar",
"javax.swing"
] |
com.toedter.calendar; javax.swing;
| 876,737
|
@Aspect(advice = org.support.project.ormapping.transaction.Transaction.class)
public void delete(Integer user, Long no) {
LikeCommentsEntity db = selectOnKey(no);
db.setDeleteFlag(1);
db.setUpdateUser(user);
db.setUpdateDatetime(new Timestamp(DateUtils.now().getTime()));
physicalUpdate(db);
}
|
@Aspect(advice = org.support.project.ormapping.transaction.Transaction.class) void function(Integer user, Long no) { LikeCommentsEntity db = selectOnKey(no); db.setDeleteFlag(1); db.setUpdateUser(user); db.setUpdateDatetime(new Timestamp(DateUtils.now().getTime())); physicalUpdate(db); }
|
/**
* Delete.
* if delete flag is exists, the data is logical delete.
* set saved user id.
* @param user saved userid
* @param no no
*/
|
Delete. if delete flag is exists, the data is logical delete. set saved user id
|
delete
|
{
"repo_name": "support-project/knowledge",
"path": "src/main/java/org/support/project/knowledge/dao/gen/GenLikeCommentsDao.java",
"license": "apache-2.0",
"size": 16777
}
|
[
"java.sql.Timestamp",
"org.support.project.aop.Aspect",
"org.support.project.common.util.DateUtils",
"org.support.project.knowledge.entity.LikeCommentsEntity"
] |
import java.sql.Timestamp; import org.support.project.aop.Aspect; import org.support.project.common.util.DateUtils; import org.support.project.knowledge.entity.LikeCommentsEntity;
|
import java.sql.*; import org.support.project.aop.*; import org.support.project.common.util.*; import org.support.project.knowledge.entity.*;
|
[
"java.sql",
"org.support.project"
] |
java.sql; org.support.project;
| 1,450,160
|
List<Path> userJars = jobGraph.getUserJars();
Collection<Tuple2<String, Path>> userArtifacts = jobGraph.getUserArtifacts().entrySet().stream()
.map(entry -> Tuple2.of(entry.getKey(), new Path(entry.getValue().filePath)))
.collect(Collectors.toList());
uploadJobGraphFiles(jobGraph, userJars, userArtifacts, clientSupplier);
}
|
List<Path> userJars = jobGraph.getUserJars(); Collection<Tuple2<String, Path>> userArtifacts = jobGraph.getUserArtifacts().entrySet().stream() .map(entry -> Tuple2.of(entry.getKey(), new Path(entry.getValue().filePath))) .collect(Collectors.toList()); uploadJobGraphFiles(jobGraph, userJars, userArtifacts, clientSupplier); }
|
/**
* Extracts all files required for the execution from the given {@link JobGraph} and uploads them using the {@link BlobClient}
* from the given {@link Supplier}.
*
* @param jobGraph jobgraph requiring files
* @param clientSupplier supplier of blob client to upload files with
* @throws FlinkException if the upload fails
*/
|
Extracts all files required for the execution from the given <code>JobGraph</code> and uploads them using the <code>BlobClient</code> from the given <code>Supplier</code>
|
extractAndUploadJobGraphFiles
|
{
"repo_name": "hequn8128/flink",
"path": "flink-runtime/src/main/java/org/apache/flink/runtime/client/ClientUtils.java",
"license": "apache-2.0",
"size": 6099
}
|
[
"java.util.Collection",
"java.util.List",
"java.util.stream.Collectors",
"org.apache.flink.api.java.tuple.Tuple2",
"org.apache.flink.core.fs.Path"
] |
import java.util.Collection; import java.util.List; import java.util.stream.Collectors; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.core.fs.Path;
|
import java.util.*; import java.util.stream.*; import org.apache.flink.api.java.tuple.*; import org.apache.flink.core.fs.*;
|
[
"java.util",
"org.apache.flink"
] |
java.util; org.apache.flink;
| 52,788
|
boolean removeProperty(String relPath) throws RepositoryException;
|
boolean removeProperty(String relPath) throws RepositoryException;
|
/**
* Removes the property with the given name.
*
* @param relPath Relative path (or name) of the property to be removed.
* @return true If the property at the specified relPath was successfully
* removed; false if no such property was present.
* @throws RepositoryException If an error occurs.
*/
|
Removes the property with the given name
|
removeProperty
|
{
"repo_name": "SylvesterAbreu/jackrabbit",
"path": "jackrabbit-api/src/main/java/org/apache/jackrabbit/api/security/user/Authorizable.java",
"license": "apache-2.0",
"size": 8062
}
|
[
"javax.jcr.RepositoryException"
] |
import javax.jcr.RepositoryException;
|
import javax.jcr.*;
|
[
"javax.jcr"
] |
javax.jcr;
| 1,769,801
|
@Test
public void testClearPropertyDirectNoAbstractConf() {
final Configuration wrapped = EasyMock.createMock(Configuration.class);
final String key = "test.property";
wrapped.clearProperty(key);
EasyMock.replay(wrapped);
conf = new DataConfiguration(wrapped);
conf.clearPropertyDirect(key);
EasyMock.verify(wrapped);
}
|
void function() { final Configuration wrapped = EasyMock.createMock(Configuration.class); final String key = STR; wrapped.clearProperty(key); EasyMock.replay(wrapped); conf = new DataConfiguration(wrapped); conf.clearPropertyDirect(key); EasyMock.verify(wrapped); }
|
/**
* Tests clearPropertyDirect() if the wrapped configuration does not extend AbstractConfiguration.
*/
|
Tests clearPropertyDirect() if the wrapped configuration does not extend AbstractConfiguration
|
testClearPropertyDirectNoAbstractConf
|
{
"repo_name": "apache/commons-configuration",
"path": "src/test/java/org/apache/commons/configuration2/TestDataConfiguration.java",
"license": "apache-2.0",
"size": 89202
}
|
[
"org.easymock.EasyMock"
] |
import org.easymock.EasyMock;
|
import org.easymock.*;
|
[
"org.easymock"
] |
org.easymock;
| 1,410,854
|
final void emittingChild(final EffectiveModelContext schema, final JsonWriter writer) throws IOException {
checkState(!inChild, "Duplicate child encountered");
emitMyself(schema, writer);
inChild = true;
}
|
final void emittingChild(final EffectiveModelContext schema, final JsonWriter writer) throws IOException { checkState(!inChild, STR); emitMyself(schema, writer); inChild = true; }
|
/**
* Invoked whenever a child node is being emitted. Checks whether this node has
* been emitted, and takes care of that if necessary. Also makes sure separator
* is emitted before a second and subsequent child.
*
* @param schema Schema context
* @param writer Output writer
* @throws IOException when writer reports it
*/
|
Invoked whenever a child node is being emitted. Checks whether this node has been emitted, and takes care of that if necessary. Also makes sure separator is emitted before a second and subsequent child
|
emittingChild
|
{
"repo_name": "opendaylight/yangtools",
"path": "codec/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONStreamWriterContext.java",
"license": "epl-1.0",
"size": 6296
}
|
[
"com.google.common.base.Preconditions",
"com.google.gson.stream.JsonWriter",
"java.io.IOException",
"org.opendaylight.yangtools.yang.model.api.EffectiveModelContext"
] |
import com.google.common.base.Preconditions; import com.google.gson.stream.JsonWriter; import java.io.IOException; import org.opendaylight.yangtools.yang.model.api.EffectiveModelContext;
|
import com.google.common.base.*; import com.google.gson.stream.*; import java.io.*; import org.opendaylight.yangtools.yang.model.api.*;
|
[
"com.google.common",
"com.google.gson",
"java.io",
"org.opendaylight.yangtools"
] |
com.google.common; com.google.gson; java.io; org.opendaylight.yangtools;
| 1,791,785
|
public int DibFree(int dib) {
return Global.dibFree(dib);
}
|
int function(int dib) { return Global.dibFree(dib); }
|
/**
* free dib object.
*/
|
free dib object
|
DibFree
|
{
"repo_name": "gearit/RadaeePDF-B4A",
"path": "Source/Wrapper/RSPDFViewer/src/com/rootsoft/rspdfviewer/pdf/RSPDFGlobal.java",
"license": "apache-2.0",
"size": 9086
}
|
[
"com.radaee.pdf.Global"
] |
import com.radaee.pdf.Global;
|
import com.radaee.pdf.*;
|
[
"com.radaee.pdf"
] |
com.radaee.pdf;
| 2,002,185
|
public void setTranslation(final Point2D p) {
setTranslation(p.getX(), p.getY());
}
|
void function(final Point2D p) { setTranslation(p.getX(), p.getY()); }
|
/**
* Set the text translation offset to point p.
*
* @param p The translation offset.
*/
|
Set the text translation offset to point p
|
setTranslation
|
{
"repo_name": "tangentforks/piccolo2d.java",
"path": "swt/src/main/java/org/piccolo2d/extras/swt/PSWTText.java",
"license": "bsd-3-clause",
"size": 14898
}
|
[
"java.awt.geom.Point2D"
] |
import java.awt.geom.Point2D;
|
import java.awt.geom.*;
|
[
"java.awt"
] |
java.awt;
| 1,951,040
|
@Override
protected void setLineInfo(Line line, LineInfo info) {
line.setSettingValue(yealinkConstants.USER_ID_V6X_SETTING, info.getUserId());
line.setSettingValue(yealinkConstants.DISPLAY_NAME_V6X_SETTING, info.getDisplayName());
line.setSettingValue(yealinkConstants.PASSWORD_V6X_SETTING, info.getPassword());
line.setSettingValue(yealinkConstants.REGISTRATION_SERVER_HOST_V6X_SETTING, info.getRegistrationServer());
line.setSettingValue(yealinkConstants.REGISTRATION_SERVER_PORT_V6X_SETTING, info.getRegistrationServerPort());
line.setSettingValue(yealinkConstants.VOICE_MAIL_NUMBER_V6X_SETTING, info.getVoiceMail());
}
|
void function(Line line, LineInfo info) { line.setSettingValue(yealinkConstants.USER_ID_V6X_SETTING, info.getUserId()); line.setSettingValue(yealinkConstants.DISPLAY_NAME_V6X_SETTING, info.getDisplayName()); line.setSettingValue(yealinkConstants.PASSWORD_V6X_SETTING, info.getPassword()); line.setSettingValue(yealinkConstants.REGISTRATION_SERVER_HOST_V6X_SETTING, info.getRegistrationServer()); line.setSettingValue(yealinkConstants.REGISTRATION_SERVER_PORT_V6X_SETTING, info.getRegistrationServerPort()); line.setSettingValue(yealinkConstants.VOICE_MAIL_NUMBER_V6X_SETTING, info.getVoiceMail()); }
|
/**
* Each subclass must decide how as much of this generic line information translates into its
* own setting model.
*/
|
Each subclass must decide how as much of this generic line information translates into its own setting model
|
setLineInfo
|
{
"repo_name": "siplabs/sipX3CXPhone",
"path": "src/org/sipfoundry/sipxconfig/phone/3cx/3cxPhone.java",
"license": "lgpl-3.0",
"size": 8121
}
|
[
"org.sipfoundry.sipxconfig.phone.Line",
"org.sipfoundry.sipxconfig.phone.LineInfo"
] |
import org.sipfoundry.sipxconfig.phone.Line; import org.sipfoundry.sipxconfig.phone.LineInfo;
|
import org.sipfoundry.sipxconfig.phone.*;
|
[
"org.sipfoundry.sipxconfig"
] |
org.sipfoundry.sipxconfig;
| 1,633,804
|
void setTimeSeries(String metric, Map<String, String> tags,
Aggregator function, boolean rate) throws NoSuchUniqueName;
|
void setTimeSeries(String metric, Map<String, String> tags, Aggregator function, boolean rate) throws NoSuchUniqueName;
|
/**
* Sets the time series to the query.
* @param metric The metric to retreive from the TSDB.
* @param tags The set of tags of interest.
* @param function The aggregation function to use.
* @param rate If true, the rate of the series will be used instead of the
* actual values.
* @throws NoSuchUniqueName if the name of a metric, or a tag name/value
* does not exist.
*/
|
Sets the time series to the query
|
setTimeSeries
|
{
"repo_name": "pepperdata/opentsdb",
"path": "opentsdb-core/src/main/java/net/opentsdb/core/Query.java",
"license": "lgpl-2.1",
"size": 8885
}
|
[
"java.util.Map",
"net.opentsdb.uid.NoSuchUniqueName"
] |
import java.util.Map; import net.opentsdb.uid.NoSuchUniqueName;
|
import java.util.*; import net.opentsdb.uid.*;
|
[
"java.util",
"net.opentsdb.uid"
] |
java.util; net.opentsdb.uid;
| 2,205,611
|
public void setSeriesShapesVisible(int series, boolean visible) {
setSeriesShapesVisible(series, BooleanUtilities.valueOf(visible));
}
|
void function(int series, boolean visible) { setSeriesShapesVisible(series, BooleanUtilities.valueOf(visible)); }
|
/**
* Sets the 'shapes visible' flag for a series and sends a
* {@link RendererChangeEvent} to all registered listeners.
*
* @param series the series index (zero-based).
* @param visible the flag.
*
* @see #getSeriesShapesVisible(int)
*/
|
Sets the 'shapes visible' flag for a series and sends a <code>RendererChangeEvent</code> to all registered listeners
|
setSeriesShapesVisible
|
{
"repo_name": "raincs13/phd",
"path": "source/org/jfree/chart/renderer/category/LineAndShapeRenderer.java",
"license": "lgpl-2.1",
"size": 38555
}
|
[
"org.jfree.util.BooleanUtilities"
] |
import org.jfree.util.BooleanUtilities;
|
import org.jfree.util.*;
|
[
"org.jfree.util"
] |
org.jfree.util;
| 2,300,914
|
static File createRandomDataFile(String dir, String fileName, long size)
throws IOException {
File tmpDir = new File(dir);
FileUtils.forceMkdir(tmpDir);
File tmpFile = new File(tmpDir, fileName);
try (FileOutputStream randFile = new FileOutputStream(tmpFile)) {
Random r = new Random();
for (int x = 0; x < size; x++) {
char c = (char) (r.nextInt(26) + 'a');
randFile.write(c);
}
} catch (IOException e) {
fail(e.getMessage());
}
return tmpFile;
}
|
static File createRandomDataFile(String dir, String fileName, long size) throws IOException { File tmpDir = new File(dir); FileUtils.forceMkdir(tmpDir); File tmpFile = new File(tmpDir, fileName); try (FileOutputStream randFile = new FileOutputStream(tmpFile)) { Random r = new Random(); for (int x = 0; x < size; x++) { char c = (char) (r.nextInt(26) + 'a'); randFile.write(c); } } catch (IOException e) { fail(e.getMessage()); } return tmpFile; }
|
/**
* Creates a file with Random Data.
*
* @return File.
*/
|
Creates a file with Random Data
|
createRandomDataFile
|
{
"repo_name": "szegedim/hadoop",
"path": "hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java",
"license": "apache-2.0",
"size": 27543
}
|
[
"java.io.File",
"java.io.FileOutputStream",
"java.io.IOException",
"java.util.Random",
"org.apache.commons.io.FileUtils",
"org.junit.Assert"
] |
import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.Random; import org.apache.commons.io.FileUtils; import org.junit.Assert;
|
import java.io.*; import java.util.*; import org.apache.commons.io.*; import org.junit.*;
|
[
"java.io",
"java.util",
"org.apache.commons",
"org.junit"
] |
java.io; java.util; org.apache.commons; org.junit;
| 1,732,322
|
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
|
List<Criteria> function() { return oredCriteria; }
|
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AUTHOR
*
* @mbggenerated
*/
|
This method was generated by MyBatis Generator. This method corresponds to the database table AUTHOR
|
getOredCriteria
|
{
"repo_name": "konum/mybatis-dynamicJoin",
"path": "src/main/java/model/AuthorExample.java",
"license": "gpl-2.0",
"size": 12322
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,657,262
|
public void close() {
if (!this.isConnected()) {
throw new IllegalStateException("XBee is not connected");
}
// shutdown parser thread
if (parser != null) {
parser.setDone(true);
// interrupts thread, if waiting. does not interrupt thread if blocking on read
// serial port close will be closed prior to thread exit
parser.interrupt();
}
try {
// xbeeConnection.getOutputStream().close();
xbeeConnection.close();
} catch (IOException e) {
log.warn("Failed to close connection", e);
}
this.type = null;
parser = null;
xbeeConnection = null;
}
|
void function() { if (!this.isConnected()) { throw new IllegalStateException(STR); } if (parser != null) { parser.setDone(true); parser.interrupt(); } try { xbeeConnection.close(); } catch (IOException e) { log.warn(STR, e); } this.type = null; parser = null; xbeeConnection = null; }
|
/**
* Shuts down RXTX and packet parser thread
*/
|
Shuts down RXTX and packet parser thread
|
close
|
{
"repo_name": "rakeshpatil1983/xbee-api",
"path": "src/com/rapplogic/xbee/api/XBee.java",
"license": "gpl-3.0",
"size": 19253
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 863,483
|
public void waitActive() throws IOException {
for (int index = 0; index < nameNodes.length; index++) {
int failedCount = 0;
while (true) {
try {
waitActive(index);
break;
} catch (IOException e) {
failedCount++;
// Cached RPC connection to namenode, if any, is expected to fail once
if (failedCount > 1) {
LOG.warn("Tried waitActive() " + failedCount
+ " time(s) and failed, giving up. "
+ StringUtils.stringifyException(e));
throw e;
}
}
}
}
LOG.info("Cluster is active");
}
|
void function() throws IOException { for (int index = 0; index < nameNodes.length; index++) { int failedCount = 0; while (true) { try { waitActive(index); break; } catch (IOException e) { failedCount++; if (failedCount > 1) { LOG.warn(STR + failedCount + STR + StringUtils.stringifyException(e)); throw e; } } } } LOG.info(STR); }
|
/**
* Wait until the cluster is active and running.
*/
|
Wait until the cluster is active and running
|
waitActive
|
{
"repo_name": "ict-carch/hadoop-plus",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java",
"license": "apache-2.0",
"size": 85900
}
|
[
"java.io.IOException",
"org.apache.hadoop.util.StringUtils"
] |
import java.io.IOException; import org.apache.hadoop.util.StringUtils;
|
import java.io.*; import org.apache.hadoop.util.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 73,701
|
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx opProcCtx,
Object... nodeOutputs) throws SemanticException {
TableScanOperator op = (TableScanOperator) nd;
GenMRProcContext ctx = (GenMRProcContext) opProcCtx;
ctx.reset();
ParseContext parseCtx = ctx.getParseCtx();
Table table = op.getConf().getTableMetadata();
Class<? extends InputFormat> inputFormat = table.getInputFormatClass();
Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();
// create a dummy MapReduce task
MapredWork currWork = GenMapRedUtils.getMapRedWork(parseCtx);
MapRedTask currTask = (MapRedTask) TaskFactory.get(currWork);
ctx.setCurrTask(currTask);
ctx.setCurrTopOp(op);
for (String alias : parseCtx.getTopOps().keySet()) {
Operator<? extends OperatorDesc> currOp = parseCtx.getTopOps().get(alias);
if (currOp == op) {
String currAliasId = alias;
ctx.setCurrAliasId(currAliasId);
mapCurrCtx.put(op, new GenMapRedCtx(currTask, currAliasId));
if (parseCtx.getQueryProperties().isAnalyzeCommand()) {
boolean noScan = parseCtx.getQueryProperties().isNoScanAnalyzeCommand();
if (BasicStatsNoJobTask.canUseBasicStats(table, inputFormat)) {
// For ORC and Parquet, all the following statements are the same
// ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS
// ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS noscan;
// There will not be any MR or Tez job above this task
StatsWork statWork = new StatsWork(table, parseCtx.getConf());
statWork.setFooterScan();
// If partition is specified, get pruned partition list
Set<Partition> confirmedParts = GenMapRedUtils.getConfirmedPartitionsForScan(op);
if (confirmedParts.size() > 0) {
List<String> partCols = GenMapRedUtils.getPartitionColumns(op);
PrunedPartitionList partList = new PrunedPartitionList(table, confirmedParts, partCols, false);
statWork.addInputPartitions(partList.getPartitions());
}
Task<StatsWork> snjTask = TaskFactory.get(statWork);
ctx.setCurrTask(snjTask);
ctx.setCurrTopOp(null);
ctx.getRootTasks().clear();
ctx.getRootTasks().add(snjTask);
} else {
// ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS;
// The plan consists of a simple MapRedTask followed by a StatsTask.
// The MR task is just a simple TableScanOperator
BasicStatsWork statsWork = new BasicStatsWork(table.getTableSpec());
statsWork.setIsExplicitAnalyze(true);
statsWork.setNoScanAnalyzeCommand(noScan);
StatsWork columnStatsWork = new StatsWork(table, statsWork, parseCtx.getConf());
columnStatsWork.collectStatsFromAggregator(op.getConf());
columnStatsWork.setSourceTask(currTask);
Task<StatsWork> columnStatsTask = TaskFactory.get(columnStatsWork);
currTask.addDependentTask(columnStatsTask);
if (!ctx.getRootTasks().contains(currTask)) {
ctx.getRootTasks().add(currTask);
}
// ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS noscan;
// The plan consists of a StatsTask only.
if (noScan) {
columnStatsTask.setParentTasks(null);
ctx.getRootTasks().remove(currTask);
ctx.getRootTasks().add(columnStatsTask);
}
currWork.getMapWork().setGatheringStats(true);
if (currWork.getReduceWork() != null) {
currWork.getReduceWork().setGatheringStats(true);
}
// NOTE: here we should use the new partition predicate pushdown API to get a list of
// pruned list,
// and pass it to setTaskPlan as the last parameter
Set<Partition> confirmedPartns = GenMapRedUtils
.getConfirmedPartitionsForScan(op);
if (confirmedPartns.size() > 0) {
List<String> partCols = GenMapRedUtils.getPartitionColumns(op);
PrunedPartitionList partList = new PrunedPartitionList(table, confirmedPartns, partCols, false);
GenMapRedUtils.setTaskPlan(currAliasId, op, currTask, false, ctx, partList);
} else { // non-partitioned table
GenMapRedUtils.setTaskPlan(currAliasId, op, currTask, false, ctx);
}
}
}
return true;
}
}
assert false;
return null;
}
|
Object function(Node nd, Stack<Node> stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { TableScanOperator op = (TableScanOperator) nd; GenMRProcContext ctx = (GenMRProcContext) opProcCtx; ctx.reset(); ParseContext parseCtx = ctx.getParseCtx(); Table table = op.getConf().getTableMetadata(); Class<? extends InputFormat> inputFormat = table.getInputFormatClass(); Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx(); MapredWork currWork = GenMapRedUtils.getMapRedWork(parseCtx); MapRedTask currTask = (MapRedTask) TaskFactory.get(currWork); ctx.setCurrTask(currTask); ctx.setCurrTopOp(op); for (String alias : parseCtx.getTopOps().keySet()) { Operator<? extends OperatorDesc> currOp = parseCtx.getTopOps().get(alias); if (currOp == op) { String currAliasId = alias; ctx.setCurrAliasId(currAliasId); mapCurrCtx.put(op, new GenMapRedCtx(currTask, currAliasId)); if (parseCtx.getQueryProperties().isAnalyzeCommand()) { boolean noScan = parseCtx.getQueryProperties().isNoScanAnalyzeCommand(); if (BasicStatsNoJobTask.canUseBasicStats(table, inputFormat)) { StatsWork statWork = new StatsWork(table, parseCtx.getConf()); statWork.setFooterScan(); Set<Partition> confirmedParts = GenMapRedUtils.getConfirmedPartitionsForScan(op); if (confirmedParts.size() > 0) { List<String> partCols = GenMapRedUtils.getPartitionColumns(op); PrunedPartitionList partList = new PrunedPartitionList(table, confirmedParts, partCols, false); statWork.addInputPartitions(partList.getPartitions()); } Task<StatsWork> snjTask = TaskFactory.get(statWork); ctx.setCurrTask(snjTask); ctx.setCurrTopOp(null); ctx.getRootTasks().clear(); ctx.getRootTasks().add(snjTask); } else { BasicStatsWork statsWork = new BasicStatsWork(table.getTableSpec()); statsWork.setIsExplicitAnalyze(true); statsWork.setNoScanAnalyzeCommand(noScan); StatsWork columnStatsWork = new StatsWork(table, statsWork, parseCtx.getConf()); columnStatsWork.collectStatsFromAggregator(op.getConf()); columnStatsWork.setSourceTask(currTask); Task<StatsWork> columnStatsTask = TaskFactory.get(columnStatsWork); currTask.addDependentTask(columnStatsTask); if (!ctx.getRootTasks().contains(currTask)) { ctx.getRootTasks().add(currTask); } if (noScan) { columnStatsTask.setParentTasks(null); ctx.getRootTasks().remove(currTask); ctx.getRootTasks().add(columnStatsTask); } currWork.getMapWork().setGatheringStats(true); if (currWork.getReduceWork() != null) { currWork.getReduceWork().setGatheringStats(true); } Set<Partition> confirmedPartns = GenMapRedUtils .getConfirmedPartitionsForScan(op); if (confirmedPartns.size() > 0) { List<String> partCols = GenMapRedUtils.getPartitionColumns(op); PrunedPartitionList partList = new PrunedPartitionList(table, confirmedPartns, partCols, false); GenMapRedUtils.setTaskPlan(currAliasId, op, currTask, false, ctx, partList); } else { GenMapRedUtils.setTaskPlan(currAliasId, op, currTask, false, ctx); } } } return true; } } assert false; return null; }
|
/**
* Table Sink encountered.
* @param nd
* the table sink operator encountered
* @param opProcCtx
* context
*/
|
Table Sink encountered
|
process
|
{
"repo_name": "sankarh/hive",
"path": "ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java",
"license": "apache-2.0",
"size": 6994
}
|
[
"java.util.List",
"java.util.Map",
"java.util.Set",
"java.util.Stack",
"org.apache.hadoop.hive.ql.exec.Operator",
"org.apache.hadoop.hive.ql.exec.TableScanOperator",
"org.apache.hadoop.hive.ql.exec.Task",
"org.apache.hadoop.hive.ql.exec.TaskFactory",
"org.apache.hadoop.hive.ql.exec.mr.MapRedTask",
"org.apache.hadoop.hive.ql.lib.Node",
"org.apache.hadoop.hive.ql.lib.NodeProcessorCtx",
"org.apache.hadoop.hive.ql.metadata.Partition",
"org.apache.hadoop.hive.ql.metadata.Table",
"org.apache.hadoop.hive.ql.optimizer.GenMRProcContext",
"org.apache.hadoop.hive.ql.parse.ParseContext",
"org.apache.hadoop.hive.ql.parse.PrunedPartitionList",
"org.apache.hadoop.hive.ql.parse.SemanticException",
"org.apache.hadoop.hive.ql.plan.BasicStatsWork",
"org.apache.hadoop.hive.ql.plan.MapredWork",
"org.apache.hadoop.hive.ql.plan.OperatorDesc",
"org.apache.hadoop.hive.ql.plan.StatsWork",
"org.apache.hadoop.hive.ql.stats.BasicStatsNoJobTask",
"org.apache.hadoop.mapred.InputFormat"
] |
import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.BasicStatsWork; import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.StatsWork; import org.apache.hadoop.hive.ql.stats.BasicStatsNoJobTask; import org.apache.hadoop.mapred.InputFormat;
|
import java.util.*; import org.apache.hadoop.hive.ql.exec.*; import org.apache.hadoop.hive.ql.exec.mr.*; import org.apache.hadoop.hive.ql.lib.*; import org.apache.hadoop.hive.ql.metadata.*; import org.apache.hadoop.hive.ql.optimizer.*; import org.apache.hadoop.hive.ql.parse.*; import org.apache.hadoop.hive.ql.plan.*; import org.apache.hadoop.hive.ql.stats.*; import org.apache.hadoop.mapred.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 27,589
|
@Test
public void test()
throws Exception {
buildSegment(SEGMENT_DIR_NAME, SEGMENT_NAME);
File indexDir = new File(SEGMENT_DIR_NAME, SEGMENT_NAME);
SegmentMetadataImpl segmentMetadata = new SegmentMetadataImpl(indexDir);
ColumnMetadata columnMetadata = segmentMetadata.getColumnMetadataFor(COLUMN_NAME);
SegmentDirectory segmentDirectory = SegmentDirectory.createFromLocalFS(indexDir, segmentMetadata, ReadMode.mmap);
SegmentDirectory.Reader segmentReader = segmentDirectory.createReader();
ImmutableDictionaryReader offHeapDictionary =
ColumnIndexContainer.loadDictionary(columnMetadata, segmentReader, false);
ImmutableDictionaryReader onHeapDictionary =
ColumnIndexContainer.loadDictionary(columnMetadata, segmentReader, true);
int numElements = offHeapDictionary.length();
Assert.assertEquals(onHeapDictionary.length(), numElements, "Dictionary length mis-match");
for (int id = 0; id < numElements; id++) {
String expected = (String) offHeapDictionary.get(id);
Assert.assertEquals(onHeapDictionary.get(id), expected);
Assert.assertEquals(onHeapDictionary.getStringValue(id), offHeapDictionary.getStringValue(id));
Assert.assertEquals(onHeapDictionary.indexOf(expected), id);
}
Random random = new Random(System.nanoTime());
int batchSize = random.nextInt(onHeapDictionary.length());
int dictIds[] = new int[batchSize];
String[] actualValues = new String[batchSize];
String[] expectedValues = new String[batchSize];
for (int i = 0; i < 100; i++) {
for (int j = 0; j < batchSize; j++) {
dictIds[j] = random.nextInt(numElements);
}
onHeapDictionary.readStringValues(dictIds, 0, batchSize, actualValues, 0);
offHeapDictionary.readStringValues(dictIds, 0, batchSize, expectedValues, 0);
Assert.assertEquals(actualValues, expectedValues);
}
segmentReader.close();
}
|
void function() throws Exception { buildSegment(SEGMENT_DIR_NAME, SEGMENT_NAME); File indexDir = new File(SEGMENT_DIR_NAME, SEGMENT_NAME); SegmentMetadataImpl segmentMetadata = new SegmentMetadataImpl(indexDir); ColumnMetadata columnMetadata = segmentMetadata.getColumnMetadataFor(COLUMN_NAME); SegmentDirectory segmentDirectory = SegmentDirectory.createFromLocalFS(indexDir, segmentMetadata, ReadMode.mmap); SegmentDirectory.Reader segmentReader = segmentDirectory.createReader(); ImmutableDictionaryReader offHeapDictionary = ColumnIndexContainer.loadDictionary(columnMetadata, segmentReader, false); ImmutableDictionaryReader onHeapDictionary = ColumnIndexContainer.loadDictionary(columnMetadata, segmentReader, true); int numElements = offHeapDictionary.length(); Assert.assertEquals(onHeapDictionary.length(), numElements, STR); for (int id = 0; id < numElements; id++) { String expected = (String) offHeapDictionary.get(id); Assert.assertEquals(onHeapDictionary.get(id), expected); Assert.assertEquals(onHeapDictionary.getStringValue(id), offHeapDictionary.getStringValue(id)); Assert.assertEquals(onHeapDictionary.indexOf(expected), id); } Random random = new Random(System.nanoTime()); int batchSize = random.nextInt(onHeapDictionary.length()); int dictIds[] = new int[batchSize]; String[] actualValues = new String[batchSize]; String[] expectedValues = new String[batchSize]; for (int i = 0; i < 100; i++) { for (int j = 0; j < batchSize; j++) { dictIds[j] = random.nextInt(numElements); } onHeapDictionary.readStringValues(dictIds, 0, batchSize, actualValues, 0); offHeapDictionary.readStringValues(dictIds, 0, batchSize, expectedValues, 0); Assert.assertEquals(actualValues, expectedValues); } segmentReader.close(); }
|
/**
* Builds a segment with one string column, and loads two version of its dictionaries, one with default
* off-heap {@link com.linkedin.pinot.core.segment.index.readers.StringDictionary} and another with
* {@link com.linkedin.pinot.core.segment.index.readers.OnHeapStringDictionary}.
*
* Tests all interfaces return the same result for the two dictionary implementations.
*
* @throws Exception
*/
|
Builds a segment with one string column, and loads two version of its dictionaries, one with default off-heap <code>com.linkedin.pinot.core.segment.index.readers.StringDictionary</code> and another with <code>com.linkedin.pinot.core.segment.index.readers.OnHeapStringDictionary</code>. Tests all interfaces return the same result for the two dictionary implementations
|
test
|
{
"repo_name": "sajavadi/pinot",
"path": "pinot-core/src/test/java/com/linkedin/pinot/segments/v1/creator/OnHeapStringDictionaryTest.java",
"license": "apache-2.0",
"size": 6507
}
|
[
"com.linkedin.pinot.common.segment.ReadMode",
"com.linkedin.pinot.core.segment.index.ColumnMetadata",
"com.linkedin.pinot.core.segment.index.SegmentMetadataImpl",
"com.linkedin.pinot.core.segment.index.column.ColumnIndexContainer",
"com.linkedin.pinot.core.segment.index.readers.ImmutableDictionaryReader",
"com.linkedin.pinot.core.segment.store.SegmentDirectory",
"java.io.File",
"java.util.Random",
"org.testng.Assert"
] |
import com.linkedin.pinot.common.segment.ReadMode; import com.linkedin.pinot.core.segment.index.ColumnMetadata; import com.linkedin.pinot.core.segment.index.SegmentMetadataImpl; import com.linkedin.pinot.core.segment.index.column.ColumnIndexContainer; import com.linkedin.pinot.core.segment.index.readers.ImmutableDictionaryReader; import com.linkedin.pinot.core.segment.store.SegmentDirectory; import java.io.File; import java.util.Random; import org.testng.Assert;
|
import com.linkedin.pinot.common.segment.*; import com.linkedin.pinot.core.segment.index.*; import com.linkedin.pinot.core.segment.index.column.*; import com.linkedin.pinot.core.segment.index.readers.*; import com.linkedin.pinot.core.segment.store.*; import java.io.*; import java.util.*; import org.testng.*;
|
[
"com.linkedin.pinot",
"java.io",
"java.util",
"org.testng"
] |
com.linkedin.pinot; java.io; java.util; org.testng;
| 99,448
|
public RouteTemplateDefinition templateBean(String name, Class<?> type, RouteTemplateContext.BeanSupplier<Object> bean) {
if (templateBeans == null) {
templateBeans = new ArrayList<>();
}
RouteTemplateBeanDefinition def = new RouteTemplateBeanDefinition();
def.setName(name);
def.setBeanType(type);
def.setBeanSupplier(bean);
templateBeans.add(def);
return this;
}
|
RouteTemplateDefinition function(String name, Class<?> type, RouteTemplateContext.BeanSupplier<Object> bean) { if (templateBeans == null) { templateBeans = new ArrayList<>(); } RouteTemplateBeanDefinition def = new RouteTemplateBeanDefinition(); def.setName(name); def.setBeanType(type); def.setBeanSupplier(bean); templateBeans.add(def); return this; }
|
/**
* Adds a local bean the route template uses
*
* @param name the name of the bean
* @param type the type of the bean to associate the binding
* @param bean a supplier for the bean
*/
|
Adds a local bean the route template uses
|
templateBean
|
{
"repo_name": "pax95/camel",
"path": "core/camel-core-model/src/main/java/org/apache/camel/model/RouteTemplateDefinition.java",
"license": "apache-2.0",
"size": 15828
}
|
[
"java.util.ArrayList",
"org.apache.camel.RouteTemplateContext"
] |
import java.util.ArrayList; import org.apache.camel.RouteTemplateContext;
|
import java.util.*; import org.apache.camel.*;
|
[
"java.util",
"org.apache.camel"
] |
java.util; org.apache.camel;
| 2,462,958
|
private boolean acquired(ValueStatus status) {
return status == ValueStatus.PENDING || status == ValueStatus.FLUSHED;
}
private static class StatefulValue<K, V> extends ReentrantReadWriteLock {
private static final long serialVersionUID = 0L;
@GridToStringInclude(sensitive = true)
private Entry<? extends K, ? extends V> val;
private StoreOperation storeOperation;
private ValueStatus valStatus;
private Condition flushCond = writeLock().newCondition();
private StatefulValue(Entry<? extends K, ? extends V> val, StoreOperation storeOperation) {
assert storeOperation == StoreOperation.PUT || storeOperation == StoreOperation.RMV;
this.val = val;
this.storeOperation = storeOperation;
valStatus = ValueStatus.NEW;
}
|
boolean function(ValueStatus status) { return status == ValueStatus.PENDING status == ValueStatus.FLUSHED; } private static class StatefulValue<K, V> extends ReentrantReadWriteLock { private static final long serialVersionUID = 0L; @GridToStringInclude(sensitive = true) private Entry<? extends K, ? extends V> val; private StoreOperation storeOperation; private ValueStatus valStatus; private Condition flushCond = writeLock().newCondition(); private StatefulValue(Entry<? extends K, ? extends V> val, StoreOperation storeOperation) { assert storeOperation == StoreOperation.PUT storeOperation == StoreOperation.RMV; this.val = val; this.storeOperation = storeOperation; valStatus = ValueStatus.NEW; }
|
/**
* Checks if given status indicates pending or complete flush operation.
*
* @param status Status to check.
* @return {@code true} if status indicates any pending or complete store update operation.
*/
|
Checks if given status indicates pending or complete flush operation
|
acquired
|
{
"repo_name": "ntikhonov/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/store/GridCacheWriteBehindStore.java",
"license": "apache-2.0",
"size": 48298
}
|
[
"java.util.concurrent.locks.Condition",
"java.util.concurrent.locks.ReentrantReadWriteLock",
"javax.cache.Cache",
"org.apache.ignite.internal.util.tostring.GridToStringInclude"
] |
import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.cache.Cache; import org.apache.ignite.internal.util.tostring.GridToStringInclude;
|
import java.util.concurrent.locks.*; import javax.cache.*; import org.apache.ignite.internal.util.tostring.*;
|
[
"java.util",
"javax.cache",
"org.apache.ignite"
] |
java.util; javax.cache; org.apache.ignite;
| 1,008,101
|
public static void removeHDFSFiles(String[] files) {
try {
FileSystem fs = FileSystem.get(conf);
for (String directory : files) {
Path dir = new Path(directory);
if (fs.exists(dir) && !fs.getFileStatus(dir).isDirectory()) {
fs.delete(dir, false);
}
}
} catch (IOException e) {
}
}
|
static void function(String[] files) { try { FileSystem fs = FileSystem.get(conf); for (String directory : files) { Path dir = new Path(directory); if (fs.exists(dir) && !fs.getFileStatus(dir).isDirectory()) { fs.delete(dir, false); } } } catch (IOException e) { } }
|
/**
* <p>
* Removes all the files specified in the array in HDFS
* </p>
*
* @param files
* files array
*/
|
Removes all the files specified in the array in HDFS
|
removeHDFSFiles
|
{
"repo_name": "Myasuka/systemml",
"path": "src/test/java/org/apache/sysml/test/utils/TestUtils.java",
"license": "apache-2.0",
"size": 65313
}
|
[
"java.io.IOException",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path"
] |
import java.io.IOException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path;
|
import java.io.*; import org.apache.hadoop.fs.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 1,473,474
|
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
switch (id) {
case R.id.action_settings:
Intent settingsIntent = new Intent(this, SettingsActivity.class);
startActivityForResult(settingsIntent, server_settings);
return true;
case R.id.action_about:
Intent aboutIntent = new Intent(this, AboutActivity.class);
startActivityForResult(aboutIntent, about);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
|
boolean function(MenuItem item) { int id = item.getItemId(); switch (id) { case R.id.action_settings: Intent settingsIntent = new Intent(this, SettingsActivity.class); startActivityForResult(settingsIntent, server_settings); return true; case R.id.action_about: Intent aboutIntent = new Intent(this, AboutActivity.class); startActivityForResult(aboutIntent, about); return true; default: return super.onOptionsItemSelected(item); } }
|
/**
* Handels click events on the Buttons in the Action Bar.
*
* @param item MenuItem - the clicked menu item
* @return boolean
*/
|
Handels click events on the Buttons in the Action Bar
|
onOptionsItemSelected
|
{
"repo_name": "ageru/OwnCloud-Notes",
"path": "app/src/main/java/it/niedermann/owncloud/notes/android/activity/NotesListViewActivity.java",
"license": "gpl-3.0",
"size": 16847
}
|
[
"android.content.Intent",
"android.view.MenuItem"
] |
import android.content.Intent; import android.view.MenuItem;
|
import android.content.*; import android.view.*;
|
[
"android.content",
"android.view"
] |
android.content; android.view;
| 831,146
|
public static Storage.Builder
newStorageClient(GcsOptions options) {
String servicePath = options.getGcsEndpoint();
Storage.Builder storageBuilder = new Storage.Builder(getTransport(), getJsonFactory(),
chainHttpRequestInitializer(
options.getGcpCredential(),
// Do not log the code 404. Code up the stack will deal with 404's if needed, and
// logging it by default clutters the output during file staging.
new RetryHttpRequestInitializer(
ImmutableList.of(404), new UploadIdResponseInterceptor())))
.setApplicationName(options.getAppName())
.setGoogleClientRequestInitializer(options.getGoogleApiTrace());
if (servicePath != null) {
ApiComponents components = apiComponentsFromUrl(servicePath);
storageBuilder.setRootUrl(components.rootUrl);
storageBuilder.setServicePath(components.servicePath);
}
return storageBuilder;
}
|
static Storage.Builder function(GcsOptions options) { String servicePath = options.getGcsEndpoint(); Storage.Builder storageBuilder = new Storage.Builder(getTransport(), getJsonFactory(), chainHttpRequestInitializer( options.getGcpCredential(), new RetryHttpRequestInitializer( ImmutableList.of(404), new UploadIdResponseInterceptor()))) .setApplicationName(options.getAppName()) .setGoogleClientRequestInitializer(options.getGoogleApiTrace()); if (servicePath != null) { ApiComponents components = apiComponentsFromUrl(servicePath); storageBuilder.setRootUrl(components.rootUrl); storageBuilder.setServicePath(components.servicePath); } return storageBuilder; }
|
/**
* Returns a Cloud Storage client builder.
*
* <p>Note: this client's endpoint is <b>not</b> modified by the
* {@link DataflowPipelineDebugOptions#getApiRootUrl()} option.
*/
|
Returns a Cloud Storage client builder. Note: this client's endpoint is not modified by the <code>DataflowPipelineDebugOptions#getApiRootUrl()</code> option
|
newStorageClient
|
{
"repo_name": "shakamunyi/beam",
"path": "sdk/src/main/java/com/google/cloud/dataflow/sdk/util/Transport.java",
"license": "apache-2.0",
"size": 8012
}
|
[
"com.google.api.services.storage.Storage",
"com.google.cloud.dataflow.sdk.options.GcsOptions",
"com.google.common.collect.ImmutableList"
] |
import com.google.api.services.storage.Storage; import com.google.cloud.dataflow.sdk.options.GcsOptions; import com.google.common.collect.ImmutableList;
|
import com.google.api.services.storage.*; import com.google.cloud.dataflow.sdk.options.*; import com.google.common.collect.*;
|
[
"com.google.api",
"com.google.cloud",
"com.google.common"
] |
com.google.api; com.google.cloud; com.google.common;
| 2,600,941
|
protected IndexShard newShard(
ShardId shardId,
boolean primary,
String nodeId,
IndexMetadata indexMetadata,
@Nullable CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper,
Runnable globalCheckpointSyncer
) throws IOException {
ShardRouting shardRouting = TestShardRouting.newShardRouting(
shardId,
nodeId,
primary,
ShardRoutingState.INITIALIZING,
primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE
);
return newShard(
shardRouting,
indexMetadata,
readerWrapper,
new InternalEngineFactory(),
globalCheckpointSyncer,
RetentionLeaseSyncer.EMPTY
);
}
|
IndexShard function( ShardId shardId, boolean primary, String nodeId, IndexMetadata indexMetadata, @Nullable CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper, Runnable globalCheckpointSyncer ) throws IOException { ShardRouting shardRouting = TestShardRouting.newShardRouting( shardId, nodeId, primary, ShardRoutingState.INITIALIZING, primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE ); return newShard( shardRouting, indexMetadata, readerWrapper, new InternalEngineFactory(), globalCheckpointSyncer, RetentionLeaseSyncer.EMPTY ); }
|
/**
* creates a new initializing shard. The shard will will be put in its proper path under the
* supplied node id.
*
* @param shardId the shard id to use
* @param primary indicates whether to a primary shard (ready to recover from an empty store) or a replica
* (ready to recover from another shard)
*/
|
creates a new initializing shard. The shard will will be put in its proper path under the supplied node id
|
newShard
|
{
"repo_name": "GlenRSmith/elasticsearch",
"path": "test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java",
"license": "apache-2.0",
"size": 45800
}
|
[
"java.io.IOException",
"org.apache.lucene.index.DirectoryReader",
"org.elasticsearch.cluster.metadata.IndexMetadata",
"org.elasticsearch.cluster.routing.RecoverySource",
"org.elasticsearch.cluster.routing.ShardRouting",
"org.elasticsearch.cluster.routing.ShardRoutingState",
"org.elasticsearch.cluster.routing.TestShardRouting",
"org.elasticsearch.core.CheckedFunction",
"org.elasticsearch.core.Nullable",
"org.elasticsearch.index.engine.InternalEngineFactory",
"org.elasticsearch.index.seqno.RetentionLeaseSyncer"
] |
import java.io.IOException; import org.apache.lucene.index.DirectoryReader; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.seqno.RetentionLeaseSyncer;
|
import java.io.*; import org.apache.lucene.index.*; import org.elasticsearch.cluster.metadata.*; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.core.*; import org.elasticsearch.index.engine.*; import org.elasticsearch.index.seqno.*;
|
[
"java.io",
"org.apache.lucene",
"org.elasticsearch.cluster",
"org.elasticsearch.core",
"org.elasticsearch.index"
] |
java.io; org.apache.lucene; org.elasticsearch.cluster; org.elasticsearch.core; org.elasticsearch.index;
| 52,682
|
List<InterfaceInfo> getSoftImplements() {
return Collections.unmodifiableList(this.getState().getSoftImplements());
}
|
List<InterfaceInfo> getSoftImplements() { return Collections.unmodifiableList(this.getState().getSoftImplements()); }
|
/**
* Get the soft implementations for this mixin
*/
|
Get the soft implementations for this mixin
|
getSoftImplements
|
{
"repo_name": "simon816/Mixin",
"path": "src/main/java/org/spongepowered/asm/mixin/transformer/MixinInfo.java",
"license": "mit",
"size": 40239
}
|
[
"java.util.Collections",
"java.util.List"
] |
import java.util.Collections; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,516,164
|
void reportWarning(NodeTraversal t, Node n, String name) {
compiler.report(t.makeError(n, IMPLICITLY_NULLABLE_JSDOC, name));
}
|
void reportWarning(NodeTraversal t, Node n, String name) { compiler.report(t.makeError(n, IMPLICITLY_NULLABLE_JSDOC, name)); }
|
/**
* Reports an implicitly nullable name in JSDoc warning.
*/
|
Reports an implicitly nullable name in JSDoc warning
|
reportWarning
|
{
"repo_name": "shantanusharma/closure-compiler",
"path": "src/com/google/javascript/jscomp/ImplicitNullabilityCheck.java",
"license": "apache-2.0",
"size": 4816
}
|
[
"com.google.javascript.rhino.Node"
] |
import com.google.javascript.rhino.Node;
|
import com.google.javascript.rhino.*;
|
[
"com.google.javascript"
] |
com.google.javascript;
| 1,662,456
|
public int hashCode(Itemset itemsetObject){
int hashcode =0;
// for (int bit = bitset.nextSetBit(0); bit >= 0; bit = bitset.nextSetBit(bit+1)) {
// for each tid in the tidset
for (int tid=itemsetObject.tidset.nextSetBit(0); tid >= 0; tid = itemsetObject.tidset.nextSetBit(tid+1)) {
// make the sum
hashcode += tid;
}
// to fix the bug of overflowing the size of an integer
if(hashcode < 0){
hashcode = 0 - hashcode;
}
// make the modulo according to the size of the internal array
return (hashcode % size);
}
|
int function(Itemset itemsetObject){ int hashcode =0; for (int tid=itemsetObject.tidset.nextSetBit(0); tid >= 0; tid = itemsetObject.tidset.nextSetBit(tid+1)) { hashcode += tid; } if(hashcode < 0){ hashcode = 0 - hashcode; } return (hashcode % size); }
|
/**
* Calculate the hashcode of an itemset as the sum of the tids of its tids set,
* modulo the internal array length.
* @param itemsetObject an itemset.
* @return the hashcode (an integer)
*/
|
Calculate the hashcode of an itemset as the sum of the tids of its tids set, modulo the internal array length
|
hashCode
|
{
"repo_name": "Quanhua-Guan/spmf",
"path": "ca/pfv/spmf/algorithms/frequentpatterns/eclat_and_charm_bitset/HashTable.java",
"license": "gpl-3.0",
"size": 3716
}
|
[
"ca.pfv.spmf.patterns.itemset_set_integers_with_tids_bitset.Itemset"
] |
import ca.pfv.spmf.patterns.itemset_set_integers_with_tids_bitset.Itemset;
|
import ca.pfv.spmf.patterns.itemset_set_integers_with_tids_bitset.*;
|
[
"ca.pfv.spmf"
] |
ca.pfv.spmf;
| 1,400,044
|
protected static DriverService driverService() {
return driverService;
}
|
static DriverService function() { return driverService; }
|
/**
* Returns the currently bound driver service reference.
*
* @return driver service
*/
|
Returns the currently bound driver service reference
|
driverService
|
{
"repo_name": "Shashikanth-Huawei/bmp",
"path": "core/api/src/main/java/org/onosproject/net/AbstractProjectableModel.java",
"license": "apache-2.0",
"size": 6227
}
|
[
"org.onosproject.net.driver.DriverService"
] |
import org.onosproject.net.driver.DriverService;
|
import org.onosproject.net.driver.*;
|
[
"org.onosproject.net"
] |
org.onosproject.net;
| 2,151,249
|
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginReapply(String resourceGroupName, String vmName);
|
@ServiceMethod(returns = ReturnType.SINGLE) SyncPoller<PollResult<Void>, Void> beginReapply(String resourceGroupName, String vmName);
|
/**
* The operation to reapply a virtual machine's state.
*
* @param resourceGroupName The name of the resource group.
* @param vmName The name of the virtual machine.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
|
The operation to reapply a virtual machine's state
|
beginReapply
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanagerhybrid/azure-resourcemanager-compute/src/main/java/com/azure/resourcemanager/compute/fluent/VirtualMachinesClient.java",
"license": "mit",
"size": 119505
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.management.polling.PollResult",
"com.azure.core.util.polling.SyncPoller"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.management.polling.PollResult; import com.azure.core.util.polling.SyncPoller;
|
import com.azure.core.annotation.*; import com.azure.core.management.polling.*; import com.azure.core.util.polling.*;
|
[
"com.azure.core"
] |
com.azure.core;
| 1,953,865
|
@Override
public Logger getParentLogger()
throws SQLFeatureNotSupportedException {
return dataSource.getParentLogger();
}
|
Logger function() throws SQLFeatureNotSupportedException { return dataSource.getParentLogger(); }
|
/**
* JAVADOC Method Level Comments
*
* @return JAVADOC.
* @throws SQLFeatureNotSupportedException JAVADOC.
*/
|
JAVADOC Method Level Comments
|
getParentLogger
|
{
"repo_name": "cucina/opencucina",
"path": "nosql/security/src/main/java/org/cucina/security/crypto/EncryptedCredentialsDataSource.java",
"license": "apache-2.0",
"size": 3957
}
|
[
"java.sql.SQLFeatureNotSupportedException",
"java.util.logging.Logger"
] |
import java.sql.SQLFeatureNotSupportedException; import java.util.logging.Logger;
|
import java.sql.*; import java.util.logging.*;
|
[
"java.sql",
"java.util"
] |
java.sql; java.util;
| 2,359,392
|
public BatchConfigurationInner withProperties(BatchConfigurationProperties properties) {
this.properties = properties;
return this;
}
|
BatchConfigurationInner function(BatchConfigurationProperties properties) { this.properties = properties; return this; }
|
/**
* Set the batch configuration properties.
*
* @param properties the properties value to set
* @return the BatchConfigurationInner object itself.
*/
|
Set the batch configuration properties
|
withProperties
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/logic/mgmt-v2018_07_01_preview/src/main/java/com/microsoft/azure/management/logic/v2018_07_01_preview/implementation/BatchConfigurationInner.java",
"license": "mit",
"size": 1394
}
|
[
"com.microsoft.azure.management.logic.v2018_07_01_preview.BatchConfigurationProperties"
] |
import com.microsoft.azure.management.logic.v2018_07_01_preview.BatchConfigurationProperties;
|
import com.microsoft.azure.management.logic.v2018_07_01_preview.*;
|
[
"com.microsoft.azure"
] |
com.microsoft.azure;
| 2,113,991
|
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<AppServiceEnvironmentResourceInner> createOrUpdateAsync(
String resourceGroupName, String name, AppServiceEnvironmentResourceInner hostingEnvironmentEnvelope) {
return beginCreateOrUpdateAsync(resourceGroupName, name, hostingEnvironmentEnvelope)
.last()
.flatMap(this.client::getLroFinalResultOrError);
}
|
@ServiceMethod(returns = ReturnType.SINGLE) Mono<AppServiceEnvironmentResourceInner> function( String resourceGroupName, String name, AppServiceEnvironmentResourceInner hostingEnvironmentEnvelope) { return beginCreateOrUpdateAsync(resourceGroupName, name, hostingEnvironmentEnvelope) .last() .flatMap(this.client::getLroFinalResultOrError); }
|
/**
* Create or update an App Service Environment.
*
* @param resourceGroupName Name of the resource group to which the resource belongs.
* @param name Name of the App Service Environment.
* @param hostingEnvironmentEnvelope Configuration details of the App Service Environment.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return app Service Environment ARM resource.
*/
|
Create or update an App Service Environment
|
createOrUpdateAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanagerhybrid/azure-resourcemanager-appservice/src/main/java/com/azure/resourcemanager/appservice/implementation/AppServiceEnvironmentsClientImpl.java",
"license": "mit",
"size": 563770
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.appservice.fluent.models.AppServiceEnvironmentResourceInner"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.appservice.fluent.models.AppServiceEnvironmentResourceInner;
|
import com.azure.core.annotation.*; import com.azure.resourcemanager.appservice.fluent.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 2,642,832
|
CharSequence removeSuggestionSpans(CharSequence text) {
if (text instanceof Spanned) {
Spannable spannable;
if (text instanceof Spannable) {
spannable = (Spannable) text;
} else {
spannable = new SpannableString(text);
text = spannable;
}
SuggestionSpan[] spans = spannable.getSpans(0, text.length(), SuggestionSpan.class);
for (int i = 0; i < spans.length; i++) {
spannable.removeSpan(spans[i]);
}
}
return text;
}
/**
* Set the type of the content with a constant as defined for {@link EditorInfo#inputType}. This
* will take care of changing the key listener, by calling {@link #setKeyListener(KeyListener)},
* to match the given content type. If the given content type is {@link EditorInfo#TYPE_NULL}
|
CharSequence removeSuggestionSpans(CharSequence text) { if (text instanceof Spanned) { Spannable spannable; if (text instanceof Spannable) { spannable = (Spannable) text; } else { spannable = new SpannableString(text); text = spannable; } SuggestionSpan[] spans = spannable.getSpans(0, text.length(), SuggestionSpan.class); for (int i = 0; i < spans.length; i++) { spannable.removeSpan(spans[i]); } } return text; } /** * Set the type of the content with a constant as defined for {@link EditorInfo#inputType}. This * will take care of changing the key listener, by calling {@link #setKeyListener(KeyListener)}, * to match the given content type. If the given content type is {@link EditorInfo#TYPE_NULL}
|
/**
* Removes the suggestion spans.
*/
|
Removes the suggestion spans
|
removeSuggestionSpans
|
{
"repo_name": "OmniEvo/android_frameworks_base",
"path": "core/java/android/widget/TextView.java",
"license": "gpl-3.0",
"size": 380764
}
|
[
"android.text.Spannable",
"android.text.SpannableString",
"android.text.Spanned",
"android.text.method.KeyListener",
"android.text.style.SuggestionSpan",
"android.view.inputmethod.EditorInfo"
] |
import android.text.Spannable; import android.text.SpannableString; import android.text.Spanned; import android.text.method.KeyListener; import android.text.style.SuggestionSpan; import android.view.inputmethod.EditorInfo;
|
import android.text.*; import android.text.method.*; import android.text.style.*; import android.view.inputmethod.*;
|
[
"android.text",
"android.view"
] |
android.text; android.view;
| 2,627,220
|
public synchronized void write(byte b[], int off, int len)
throws IOException {
out.write(b, off, len);
incCount(len);
}
|
synchronized void function(byte b[], int off, int len) throws IOException { out.write(b, off, len); incCount(len); }
|
/**
* Writes <code>len</code> bytes from the specified byte array starting at
* offset <code>off</code> to the underlying output stream. If no exception
* is thrown, the counter <code>written</code> is incremented by
* <code>len</code>.
*
* @param b the data.
* @param off the start offset in the data.
* @param len the number of bytes to write.
* @throws IOException if an I/O error occurs.
* @see FilterOutputStream#out
*/
|
Writes <code>len</code> bytes from the specified byte array starting at offset <code>off</code> to the underlying output stream. If no exception is thrown, the counter <code>written</code> is incremented by <code>len</code>
|
write
|
{
"repo_name": "m2049r/xmrwallet",
"path": "app/src/main/java/com/m2049r/levin/util/LittleEndianDataOutputStream.java",
"license": "apache-2.0",
"size": 14833
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,119,981
|
public void addRecipe(int timePerItem, ItemStack resource, ItemStack primary);
|
void function(int timePerItem, ItemStack resource, ItemStack primary);
|
/**
* Add a recipe to the centrifuge
*
* @param timePerItem
* Time to centrifugate one item of the given type
* @param resource
* ItemStack containing information on item id and damage. Stack size will be ignored.
* @param primary
* Primary product produced by centrifugating one item. Yield 100 %.
*/
|
Add a recipe to the centrifuge
|
addRecipe
|
{
"repo_name": "austinv11/DartCraft2",
"path": "src/api/java/forestry/api/recipes/ICentrifugeManager.java",
"license": "gpl-2.0",
"size": 3066
}
|
[
"net.minecraft.item.ItemStack"
] |
import net.minecraft.item.ItemStack;
|
import net.minecraft.item.*;
|
[
"net.minecraft.item"
] |
net.minecraft.item;
| 1,178,171
|
public void endReceiveHeader(WebResponse theResponse)
{
assertEquals("this is a response header",
theResponse.getConnection().getHeaderField("responseheader"));
}
//-------------------------------------------------------------------------
|
void function(WebResponse theResponse) { assertEquals(STR, theResponse.getConnection().getHeaderField(STR)); }
|
/**
* Verify that it is possible to send back a header and verify it on the
* client side.
*
* @param theResponse the response from the server side.
*/
|
Verify that it is possible to send back a header and verify it on the client side
|
endReceiveHeader
|
{
"repo_name": "simeshev/parabuild-ci",
"path": "3rdparty/cactus141j2ee13/sample-servlet/src/sample/org/apache/cactus/sample/TestSampleServlet.java",
"license": "lgpl-3.0",
"size": 16157
}
|
[
"org.apache.cactus.WebResponse"
] |
import org.apache.cactus.WebResponse;
|
import org.apache.cactus.*;
|
[
"org.apache.cactus"
] |
org.apache.cactus;
| 1,695,011
|
public DataWord getStorageValue(byte[] addr, DataWord key);
|
DataWord function(byte[] addr, DataWord key);
|
/**
* Retrieve storage value from an account for a given key
*
* @param addr of the account
* @param key associated with this value
* @return data in the form of a <code>DataWord</code>
*/
|
Retrieve storage value from an account for a given key
|
getStorageValue
|
{
"repo_name": "ethereumj/ethereumj",
"path": "ethereumj-core/src/main/java/org/ethereum/facade/Repository.java",
"license": "mit",
"size": 5371
}
|
[
"org.ethereum.vm.DataWord"
] |
import org.ethereum.vm.DataWord;
|
import org.ethereum.vm.*;
|
[
"org.ethereum.vm"
] |
org.ethereum.vm;
| 1,996,285
|
public static void putAudioScoreMax(ClusterSet clusterSet) {
logger.info("------ Use Audio ------");
boolean isCloseListCheck = parameter.getParameterNamedSpeaker().isCloseListCheck();
if (parameter.getParameterNamedSpeaker().isUseAudio()) {
double thr = parameter.getParameterNamedSpeaker().getThresholdAudio();
for (String name : clusterSet) {
Cluster cluster = clusterSet.getCluster(name);
ModelScores modelScores = cluster.getModelScores();
double max = -Double.MAX_VALUE;
String maxIdentity = "empty";
for (String identity : modelScores.keySet()) {
if (SpeakerNameUtils.checkSpeakerName(identity, isCloseListCheck, nameAndGenderMap, firstNameAndGenderMap) == true) {
if (checkGender(cluster, identity) == true) {
double score = modelScores.get(identity);
if ((score > thr) && (score > max)) {
max = score;
maxIdentity = identity;
}
}
}
}
if (max > -Double.MAX_VALUE) {
SpeakerName speakerName = cluster.getSpeakerName(SpeakerNameUtils.normalizeSpeakerName(maxIdentity));
speakerName.addScoreCluster(max);
speakerName.incrementScoreCluster(max);
logger.info("ACCEPT Audio MAX name max : " + cluster.getName() + ") --> " + maxIdentity + " = "
+ max + " (" + thr + ")");
}
}
}
}
|
static void function(ClusterSet clusterSet) { logger.info(STR); boolean isCloseListCheck = parameter.getParameterNamedSpeaker().isCloseListCheck(); if (parameter.getParameterNamedSpeaker().isUseAudio()) { double thr = parameter.getParameterNamedSpeaker().getThresholdAudio(); for (String name : clusterSet) { Cluster cluster = clusterSet.getCluster(name); ModelScores modelScores = cluster.getModelScores(); double max = -Double.MAX_VALUE; String maxIdentity = "empty"; for (String identity : modelScores.keySet()) { if (SpeakerNameUtils.checkSpeakerName(identity, isCloseListCheck, nameAndGenderMap, firstNameAndGenderMap) == true) { if (checkGender(cluster, identity) == true) { double score = modelScores.get(identity); if ((score > thr) && (score > max)) { max = score; maxIdentity = identity; } } } } if (max > -Double.MAX_VALUE) { SpeakerName speakerName = cluster.getSpeakerName(SpeakerNameUtils.normalizeSpeakerName(maxIdentity)); speakerName.addScoreCluster(max); speakerName.incrementScoreCluster(max); logger.info(STR + cluster.getName() + STR + maxIdentity + STR + max + STR + thr + ")"); } } } }
|
/**
* Put audio score max.
*
* @param clusterSet the cluster set
*/
|
Put audio score max
|
putAudioScoreMax
|
{
"repo_name": "Adirockzz95/GenderDetect",
"path": "src/src/fr/lium/experimental/spkDiarization/programs/SpeakerIdenificationDecision5.java",
"license": "gpl-3.0",
"size": 40631
}
|
[
"fr.lium.experimental.spkDiarization.libClusteringData.speakerName.SpeakerName",
"fr.lium.experimental.spkDiarization.libNamedSpeaker.SpeakerNameUtils",
"fr.lium.spkDiarization.libClusteringData.Cluster",
"fr.lium.spkDiarization.libClusteringData.ClusterSet",
"fr.lium.spkDiarization.libModel.ModelScores"
] |
import fr.lium.experimental.spkDiarization.libClusteringData.speakerName.SpeakerName; import fr.lium.experimental.spkDiarization.libNamedSpeaker.SpeakerNameUtils; import fr.lium.spkDiarization.libClusteringData.Cluster; import fr.lium.spkDiarization.libClusteringData.ClusterSet; import fr.lium.spkDiarization.libModel.ModelScores;
|
import fr.lium.*; import fr.lium.experimental.*;
|
[
"fr.lium",
"fr.lium.experimental"
] |
fr.lium; fr.lium.experimental;
| 1,867,523
|
protected void internalTestNonMultidexBuildStructure(String ruleLabel) throws Exception {
ConfiguredTarget binary = getConfiguredTarget(ruleLabel);
Set<Artifact> artifacts = actionsTestUtil().artifactClosureOf(getFilesToBuild(binary));
Artifact dexInput = getFirstArtifactEndingWith(artifacts, "classes.jar");
Artifact dexOutput = getFirstArtifactEndingWith(artifacts, "classes.dex.zip");
SpawnAction dexAction = getGeneratingSpawnAction(dexOutput);
assertThat(dexAction.getRemainingArguments())
.containsAllOf(
"--input",
dexInput.getExecPathString(),
"--output",
dexOutput.getExecPathString(),
"--multidex=off")
.inOrder();
}
|
void function(String ruleLabel) throws Exception { ConfiguredTarget binary = getConfiguredTarget(ruleLabel); Set<Artifact> artifacts = actionsTestUtil().artifactClosureOf(getFilesToBuild(binary)); Artifact dexInput = getFirstArtifactEndingWith(artifacts, STR); Artifact dexOutput = getFirstArtifactEndingWith(artifacts, STR); SpawnAction dexAction = getGeneratingSpawnAction(dexOutput); assertThat(dexAction.getRemainingArguments()) .containsAllOf( STR, dexInput.getExecPathString(), STR, dexOutput.getExecPathString(), STR) .inOrder(); }
|
/**
* Internal helper method: given an android_binary rule label, check that the dex merger
* runs is invoked with {@code --multidex=off}.
*/
|
Internal helper method: given an android_binary rule label, check that the dex merger runs is invoked with --multidex=off
|
internalTestNonMultidexBuildStructure
|
{
"repo_name": "ButterflyNetwork/bazel",
"path": "src/test/java/com/google/devtools/build/lib/rules/android/AndroidMultidexBaseTest.java",
"license": "apache-2.0",
"size": 6302
}
|
[
"com.google.common.truth.Truth",
"com.google.devtools.build.lib.actions.Artifact",
"com.google.devtools.build.lib.actions.util.ActionsTestUtil",
"com.google.devtools.build.lib.analysis.ConfiguredTarget",
"com.google.devtools.build.lib.analysis.actions.SpawnAction",
"java.util.Set"
] |
import com.google.common.truth.Truth; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.actions.SpawnAction; import java.util.Set;
|
import com.google.common.truth.*; import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.actions.util.*; import com.google.devtools.build.lib.analysis.*; import com.google.devtools.build.lib.analysis.actions.*; import java.util.*;
|
[
"com.google.common",
"com.google.devtools",
"java.util"
] |
com.google.common; com.google.devtools; java.util;
| 2,899,214
|
public DataType getRecordDataType(final RecordSchema childSchema) {
if (this != RECORD) {
return null;
}
return new RecordDataType(childSchema);
}
|
DataType function(final RecordSchema childSchema) { if (this != RECORD) { return null; } return new RecordDataType(childSchema); }
|
/**
* Returns a Data Type that represents a "RECORD" or "ARRAY" type with the given schema.
*
* @param childSchema the Schema for the Record or Array
* @return a DataType that represents a Record or Array with the given schema, or <code>null</code> if this RecordFieldType
* is not the RECORD or ARRAY type.
*/
|
Returns a Data Type that represents a "RECORD" or "ARRAY" type with the given schema
|
getRecordDataType
|
{
"repo_name": "MikeThomsen/nifi",
"path": "nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/RecordFieldType.java",
"license": "apache-2.0",
"size": 16159
}
|
[
"org.apache.nifi.serialization.record.type.RecordDataType"
] |
import org.apache.nifi.serialization.record.type.RecordDataType;
|
import org.apache.nifi.serialization.record.type.*;
|
[
"org.apache.nifi"
] |
org.apache.nifi;
| 2,567,346
|
@Test
public void testHandleActionResult_isRaiseErrorModalPanel_withActionMessageSize1() {
EngineFault testFault = new EngineFault();
ArrayList<String> translatedErrors = new ArrayList<>(Arrays.asList("Translated Message 1")); //$NON-NLS-1$
when(mockEventsHandler.isRaiseErrorModalPanel(VdcActionType.AddDisk, testFault)).thenReturn(true);
when(mockValidateErrorsTranslator.translateErrorText(any(ArrayList.class))).thenReturn(translatedErrors);
VdcActionParametersBase testParameters = new VdcActionParametersBase();
VdcReturnValueBase returnValue = new VdcReturnValueBase();
returnValue.setFault(testFault);
returnValue.setDescription("This is a description"); //$NON-NLS-1$
returnValue.getValidationMessages().add("Message 1"); //$NON-NLS-1$
returnValue.setValid(false); // Yes this is the default, but to make sure.
frontend.handleActionResult(VdcActionType.AddDisk, testParameters, returnValue, mockActionCallback,
testState, true);
verify(mockActionCallback).executed(callbackParam.capture());
assertEquals("Parameters should match", testParameters, callbackParam.getValue().getParameters()); //$NON-NLS-1$
assertEquals("Result should match", returnValue, callbackParam.getValue().getReturnValue()); //$NON-NLS-1$
assertEquals("States should match", testState, callbackParam.getValue().getState()); //$NON-NLS-1$
assertEquals("Action type should match", VdcActionType.AddDisk, //$NON-NLS-1$
callbackParam.getValue().getActionType());
ArgumentCaptor<FrontendFailureEventArgs> failureCaptor =
ArgumentCaptor.forClass(FrontendFailureEventArgs.class);
verify(mockFrontendFailureEvent).raise(eq(Frontend.class), failureCaptor.capture());
assertEquals("Descriptions should match", "This is a description", //$NON-NLS-1$ //$NON-NLS-2$
failureCaptor.getValue().getMessages().get(0).getDescription());
assertEquals("Text should match translation", "Translated Message 1", //$NON-NLS-1$ //$NON-NLS-2$
failureCaptor.getValue().getMessages().get(0).getText());
}
|
void function() { EngineFault testFault = new EngineFault(); ArrayList<String> translatedErrors = new ArrayList<>(Arrays.asList(STR)); when(mockEventsHandler.isRaiseErrorModalPanel(VdcActionType.AddDisk, testFault)).thenReturn(true); when(mockValidateErrorsTranslator.translateErrorText(any(ArrayList.class))).thenReturn(translatedErrors); VdcActionParametersBase testParameters = new VdcActionParametersBase(); VdcReturnValueBase returnValue = new VdcReturnValueBase(); returnValue.setFault(testFault); returnValue.setDescription(STR); returnValue.getValidationMessages().add(STR); returnValue.setValid(false); frontend.handleActionResult(VdcActionType.AddDisk, testParameters, returnValue, mockActionCallback, testState, true); verify(mockActionCallback).executed(callbackParam.capture()); assertEquals(STR, testParameters, callbackParam.getValue().getParameters()); assertEquals(STR, returnValue, callbackParam.getValue().getReturnValue()); assertEquals(STR, testState, callbackParam.getValue().getState()); assertEquals(STR, VdcActionType.AddDisk, callbackParam.getValue().getActionType()); ArgumentCaptor<FrontendFailureEventArgs> failureCaptor = ArgumentCaptor.forClass(FrontendFailureEventArgs.class); verify(mockFrontendFailureEvent).raise(eq(Frontend.class), failureCaptor.capture()); assertEquals(STR, STR, failureCaptor.getValue().getMessages().get(0).getDescription()); assertEquals(STR, STR, failureCaptor.getValue().getMessages().get(0).getText()); }
|
/**
* Run the following test case.
* <ol>
* <li>Run a single action</li>
* <li>Return logical failure, validate=false.</li>
* <li>isRaiseErrorModalPanel returns true.</li>
* <li>Check to make sure the failure event is fired</li>
* <li>Check to make sure the callback is called</li>
* </ol>
* Test just the handler method.
*/
|
Run the following test case. Run a single action Return logical failure, validate=false. isRaiseErrorModalPanel returns true. Check to make sure the failure event is fired Check to make sure the callback is called Test just the handler method
|
testHandleActionResult_isRaiseErrorModalPanel_withActionMessageSize1
|
{
"repo_name": "OpenUniversity/ovirt-engine",
"path": "frontend/webadmin/modules/frontend/src/test/java/org/ovirt/engine/ui/frontend/FrontendActionTest.java",
"license": "apache-2.0",
"size": 39847
}
|
[
"java.util.ArrayList",
"java.util.Arrays",
"org.junit.Assert",
"org.mockito.ArgumentCaptor",
"org.mockito.Mockito",
"org.ovirt.engine.core.common.action.VdcActionParametersBase",
"org.ovirt.engine.core.common.action.VdcActionType",
"org.ovirt.engine.core.common.action.VdcReturnValueBase",
"org.ovirt.engine.core.common.errors.EngineFault"
] |
import java.util.ArrayList; import java.util.Arrays; import org.junit.Assert; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.ovirt.engine.core.common.action.VdcActionParametersBase; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.action.VdcReturnValueBase; import org.ovirt.engine.core.common.errors.EngineFault;
|
import java.util.*; import org.junit.*; import org.mockito.*; import org.ovirt.engine.core.common.action.*; import org.ovirt.engine.core.common.errors.*;
|
[
"java.util",
"org.junit",
"org.mockito",
"org.ovirt.engine"
] |
java.util; org.junit; org.mockito; org.ovirt.engine;
| 2,900,659
|
public ScriptNode getNodeFromString(String nodeRefString)
{
NodeRef nodeRef = new NodeRef(nodeRefString);
return (ScriptNode)new ValueConverter().convertValueForScript(this.services, getScope(), null, nodeRef);
}
|
ScriptNode function(String nodeRefString) { NodeRef nodeRef = new NodeRef(nodeRefString); return (ScriptNode)new ValueConverter().convertValueForScript(this.services, getScope(), null, nodeRef); }
|
/**
* Gets a JS node object from a string noderef
*
* @param nodeRefString string reference to a node
* @return a JS node object
*/
|
Gets a JS node object from a string noderef
|
getNodeFromString
|
{
"repo_name": "loftuxab/community-edition-old",
"path": "projects/repository/source/java/org/alfresco/repo/jscript/ScriptUtils.java",
"license": "lgpl-3.0",
"size": 8920
}
|
[
"org.alfresco.service.cmr.repository.NodeRef"
] |
import org.alfresco.service.cmr.repository.NodeRef;
|
import org.alfresco.service.cmr.repository.*;
|
[
"org.alfresco.service"
] |
org.alfresco.service;
| 2,610,211
|
@Test
public void printsNullSequenceAsNull() {
final YamlSequence seq = null;
final YamlMapping map = Yaml.createYamlMappingBuilder()
.add("key", "value1")
.add("seq", seq)
.add("anotherKey", "value2")
.build();
final StringBuilder expected = new StringBuilder();
expected
.append("key: value1").append(System.lineSeparator())
.append("seq: null").append(System.lineSeparator())
.append("anotherKey: value2");
MatcherAssert.assertThat(
map.toString(),
Matchers.equalTo(expected.toString())
);
}
|
void function() { final YamlSequence seq = null; final YamlMapping map = Yaml.createYamlMappingBuilder() .add("key", STR) .add("seq", seq) .add(STR, STR) .build(); final StringBuilder expected = new StringBuilder(); expected .append(STR).append(System.lineSeparator()) .append(STR).append(System.lineSeparator()) .append(STR); MatcherAssert.assertThat( map.toString(), Matchers.equalTo(expected.toString()) ); }
|
/**
* An null YamlSequence value is printed as null.
*/
|
An null YamlSequence value is printed as null
|
printsNullSequenceAsNull
|
{
"repo_name": "decorators-squad/camel",
"path": "src/test/java/com/amihaiemil/eoyaml/YamlMappingPrintTest.java",
"license": "bsd-3-clause",
"size": 14315
}
|
[
"org.hamcrest.MatcherAssert",
"org.hamcrest.Matchers"
] |
import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers;
|
import org.hamcrest.*;
|
[
"org.hamcrest"
] |
org.hamcrest;
| 1,523,681
|
// creates upload location field
uploadLocationField = new JTextField();
uploadLocationField.addFocusListener(new FocusListener()
{
@Override
public void focusGained(FocusEvent e)
{
if(uploadLocationField.getText().equals(UPLOAD_LOCATION_FIELD_INITIAL_TEXT))
{
uploadLocationField.setText("");
uploadButton.setEnabled(true);
}
}
@Override
public void focusLost(FocusEvent e) { }
|
uploadLocationField = new JTextField(); uploadLocationField.addFocusListener(new FocusListener() { void function(FocusEvent e) { if(uploadLocationField.getText().equals(UPLOAD_LOCATION_FIELD_INITIAL_TEXT)) { uploadLocationField.setText(""); uploadButton.setEnabled(true); } } public void focusLost(FocusEvent e) { }
|
/**
* Responds to user clicking the upload location field for the
* first time by deleting the initial text and making any new text
* typed black (rather than grey).
*/
|
Responds to user clicking the upload location field for the first time by deleting the initial text and making any new text typed black (rather than grey)
|
focusGained
|
{
"repo_name": "lakras/matlab-to-julia",
"path": "old_version_(perl_and_java)/TranslatorGUI.java",
"license": "mit",
"size": 19774
}
|
[
"java.awt.event.FocusEvent",
"java.awt.event.FocusListener",
"javax.swing.JTextField"
] |
import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import javax.swing.JTextField;
|
import java.awt.event.*; import javax.swing.*;
|
[
"java.awt",
"javax.swing"
] |
java.awt; javax.swing;
| 427,786
|
public static List<Segment> getValidSegments(List<LoadMetadataDetails> loadMetadataDetails) {
List<Segment> segments = new ArrayList<>();
for (LoadMetadataDetails segment : loadMetadataDetails) {
//check if this load is an already merged load.
if (null != segment.getMergedLoadName()) {
segments.add(Segment.toSegment(segment.getMergedLoadName(), null));
} else {
segments.add(Segment.toSegment(segment.getLoadName(), null));
}
}
return segments;
}
|
static List<Segment> function(List<LoadMetadataDetails> loadMetadataDetails) { List<Segment> segments = new ArrayList<>(); for (LoadMetadataDetails segment : loadMetadataDetails) { if (null != segment.getMergedLoadName()) { segments.add(Segment.toSegment(segment.getMergedLoadName(), null)); } else { segments.add(Segment.toSegment(segment.getLoadName(), null)); } } return segments; }
|
/**
* For getting the comma separated valid segments for merging.
*
* @param loadMetadataDetails
* @return
*/
|
For getting the comma separated valid segments for merging
|
getValidSegments
|
{
"repo_name": "jackylk/incubator-carbondata",
"path": "processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java",
"license": "apache-2.0",
"size": 56114
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.apache.carbondata.core.datamap.Segment",
"org.apache.carbondata.core.statusmanager.LoadMetadataDetails"
] |
import java.util.ArrayList; import java.util.List; import org.apache.carbondata.core.datamap.Segment; import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
|
import java.util.*; import org.apache.carbondata.core.datamap.*; import org.apache.carbondata.core.statusmanager.*;
|
[
"java.util",
"org.apache.carbondata"
] |
java.util; org.apache.carbondata;
| 507,566
|
public List<CFGNode> getBreakNodes() {
return this.breakNodes;
}
|
List<CFGNode> function() { return this.breakNodes; }
|
/**
* Get the break nodes for this graph.
* @return The list of break points.
*/
|
Get the break nodes for this graph
|
getBreakNodes
|
{
"repo_name": "saltlab/Pangor",
"path": "core/src/ca/ubc/ece/salt/pangor/cfg/CFG.java",
"license": "apache-2.0",
"size": 5566
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,680,616
|
public void requestRemotePlaybackControl(MediaRouteController.MediaStateListener player) {
// Player should match currently remotely played item, but there
// can be a race between various
// ways that the a video can stop playing remotely. Check that the
// player is current, and ignore if not.
if (mCurrentRouteController == null) return;
if (mCurrentRouteController.getMediaStateListener() != player) return;
showMediaRouteControlDialog(ApplicationStatus.getLastTrackedFocusedActivity());
}
|
void function(MediaRouteController.MediaStateListener player) { if (mCurrentRouteController == null) return; if (mCurrentRouteController.getMediaStateListener() != player) return; showMediaRouteControlDialog(ApplicationStatus.getLastTrackedFocusedActivity()); }
|
/**
* Called when a lower layer requests control of a video that is being cast.
* @param player The player for which remote playback control is being requested.
*/
|
Called when a lower layer requests control of a video that is being cast
|
requestRemotePlaybackControl
|
{
"repo_name": "wuhengzhi/chromium-crosswalk",
"path": "chrome/android/java/src/org/chromium/chrome/browser/media/remote/RemoteMediaPlayerController.java",
"license": "bsd-3-clause",
"size": 12901
}
|
[
"org.chromium.base.ApplicationStatus",
"org.chromium.chrome.browser.media.remote.MediaRouteController"
] |
import org.chromium.base.ApplicationStatus; import org.chromium.chrome.browser.media.remote.MediaRouteController;
|
import org.chromium.base.*; import org.chromium.chrome.browser.media.remote.*;
|
[
"org.chromium.base",
"org.chromium.chrome"
] |
org.chromium.base; org.chromium.chrome;
| 565,792
|
static SqlNode renameColumns(SqlNodeList columnList, SqlNode query) {
if (columnList == null) {
return query;
}
final SqlParserPos p = query.getParserPosition();
final SqlNodeList selectList = SqlNodeList.SINGLETON_STAR;
final SqlCall from =
SqlStdOperatorTable.AS.createCall(p,
ImmutableList.<SqlNode>builder()
.add(query)
.add(new SqlIdentifier("_", p))
.addAll(columnList)
.build());
return new SqlSelect(p, null, selectList, from, null, null, null, null,
null, null, null, null);
}
|
static SqlNode renameColumns(SqlNodeList columnList, SqlNode query) { if (columnList == null) { return query; } final SqlParserPos p = query.getParserPosition(); final SqlNodeList selectList = SqlNodeList.SINGLETON_STAR; final SqlCall from = SqlStdOperatorTable.AS.createCall(p, ImmutableList.<SqlNode>builder() .add(query) .add(new SqlIdentifier("_", p)) .addAll(columnList) .build()); return new SqlSelect(p, null, selectList, from, null, null, null, null, null, null, null, null); }
|
/** Wraps a query to rename its columns. Used by CREATE VIEW and CREATE
* MATERIALIZED VIEW. */
|
Wraps a query to rename its columns. Used by CREATE VIEW and CREATE
|
renameColumns
|
{
"repo_name": "googleinterns/calcite",
"path": "core/src/main/java/org/apache/calcite/sql/SqlDdlNodes.java",
"license": "apache-2.0",
"size": 10492
}
|
[
"com.google.common.collect.ImmutableList",
"org.apache.calcite.sql.fun.SqlStdOperatorTable",
"org.apache.calcite.sql.parser.SqlParserPos"
] |
import com.google.common.collect.ImmutableList; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.parser.SqlParserPos;
|
import com.google.common.collect.*; import org.apache.calcite.sql.fun.*; import org.apache.calcite.sql.parser.*;
|
[
"com.google.common",
"org.apache.calcite"
] |
com.google.common; org.apache.calcite;
| 1,271,713
|
public Schema getTestSchema2()
{
Schema schema = new Schema();
schema.setNullValue(AbstractServiceTest.SCHEMA_NULL_VALUE_NULL_WORD);
schema.setDelimiter(AbstractServiceTest.SCHEMA_DELIMITER_COMMA);
schema.setEscapeCharacter(AbstractServiceTest.SCHEMA_ESCAPE_CHARACTER_TILDE);
schema.setPartitionKeyGroup(AbstractServiceTest.PARTITION_KEY_GROUP_2);
schema.setColumns(schemaColumnDaoTestHelper.getTestSchemaColumns(AbstractServiceTest.RANDOM_SUFFIX_2));
schema.setPartitions(schemaColumnDaoTestHelper.getTestPartitionColumns(AbstractServiceTest.RANDOM_SUFFIX_2));
return schema;
}
|
Schema function() { Schema schema = new Schema(); schema.setNullValue(AbstractServiceTest.SCHEMA_NULL_VALUE_NULL_WORD); schema.setDelimiter(AbstractServiceTest.SCHEMA_DELIMITER_COMMA); schema.setEscapeCharacter(AbstractServiceTest.SCHEMA_ESCAPE_CHARACTER_TILDE); schema.setPartitionKeyGroup(AbstractServiceTest.PARTITION_KEY_GROUP_2); schema.setColumns(schemaColumnDaoTestHelper.getTestSchemaColumns(AbstractServiceTest.RANDOM_SUFFIX_2)); schema.setPartitions(schemaColumnDaoTestHelper.getTestPartitionColumns(AbstractServiceTest.RANDOM_SUFFIX_2)); return schema; }
|
/**
* Returns a business object format schema that uses hard coded test values.
*
* @return the test business object format schema
*/
|
Returns a business object format schema that uses hard coded test values
|
getTestSchema2
|
{
"repo_name": "kusid/herd",
"path": "herd-code/herd-service/src/test/java/org/finra/herd/service/BusinessObjectFormatServiceTestHelper.java",
"license": "apache-2.0",
"size": 43893
}
|
[
"org.finra.herd.model.api.xml.Schema"
] |
import org.finra.herd.model.api.xml.Schema;
|
import org.finra.herd.model.api.xml.*;
|
[
"org.finra.herd"
] |
org.finra.herd;
| 595,366
|
public void cancel(Account account, OCFile file) {
Pair<DownloadFileOperation, String> removeResult = mPendingDownloads.remove(account, file.getRemotePath());
DownloadFileOperation download = removeResult.first;
if (download != null) {
download.cancel();
} else {
if (mCurrentDownload != null && mCurrentAccount != null &&
mCurrentDownload.getRemotePath().startsWith(file.getRemotePath()) &&
account.name.equals(mCurrentAccount.name)) {
mCurrentDownload.cancel();
}
}
}
|
void function(Account account, OCFile file) { Pair<DownloadFileOperation, String> removeResult = mPendingDownloads.remove(account, file.getRemotePath()); DownloadFileOperation download = removeResult.first; if (download != null) { download.cancel(); } else { if (mCurrentDownload != null && mCurrentAccount != null && mCurrentDownload.getRemotePath().startsWith(file.getRemotePath()) && account.name.equals(mCurrentAccount.name)) { mCurrentDownload.cancel(); } } }
|
/**
* Cancels a pending or current download of a remote file.
*
* @param account ownCloud account where the remote file is stored.
* @param file A file in the queue of pending downloads
*/
|
Cancels a pending or current download of a remote file
|
cancel
|
{
"repo_name": "ekeitho/android",
"path": "src/com/owncloud/android/files/services/FileDownloader.java",
"license": "gpl-2.0",
"size": 27274
}
|
[
"android.accounts.Account",
"android.util.Pair",
"com.owncloud.android.datamodel.OCFile",
"com.owncloud.android.operations.DownloadFileOperation"
] |
import android.accounts.Account; import android.util.Pair; import com.owncloud.android.datamodel.OCFile; import com.owncloud.android.operations.DownloadFileOperation;
|
import android.accounts.*; import android.util.*; import com.owncloud.android.datamodel.*; import com.owncloud.android.operations.*;
|
[
"android.accounts",
"android.util",
"com.owncloud.android"
] |
android.accounts; android.util; com.owncloud.android;
| 1,735,762
|
static void main(final String usage, final Printer printer, final String[] args)
throws IOException {
if (args.length < 1 || args.length > 2 || (args[0].equals("-debug") && args.length != 2)) {
System.err.println(usage);
return;
}
TraceClassVisitor traceClassVisitor =
new TraceClassVisitor(null, printer, new PrintWriter(System.out));
String className;
int parsingOptions;
if (args[0].equals("-debug")) {
className = args[1];
parsingOptions = ClassReader.SKIP_DEBUG;
} else {
className = args[0];
parsingOptions = 0;
}
if (className.endsWith(".class")
|| className.indexOf('\\') != -1
|| className.indexOf('/') != -1) {
InputStream inputStream =
new FileInputStream(className); // NOPMD(AvoidFileStream): can't fix for 1.5 compatibility
new ClassReader(inputStream).accept(traceClassVisitor, parsingOptions);
} else {
new ClassReader(className).accept(traceClassVisitor, parsingOptions);
}
}
|
static void main(final String usage, final Printer printer, final String[] args) throws IOException { if (args.length < 1 args.length > 2 (args[0].equals(STR) && args.length != 2)) { System.err.println(usage); return; } TraceClassVisitor traceClassVisitor = new TraceClassVisitor(null, printer, new PrintWriter(System.out)); String className; int parsingOptions; if (args[0].equals(STR)) { className = args[1]; parsingOptions = ClassReader.SKIP_DEBUG; } else { className = args[0]; parsingOptions = 0; } if (className.endsWith(STR) className.indexOf('\\') != -1 className.indexOf('/') != -1) { InputStream inputStream = new FileInputStream(className); new ClassReader(inputStream).accept(traceClassVisitor, parsingOptions); } else { new ClassReader(className).accept(traceClassVisitor, parsingOptions); } }
|
/**
* Prints a the given class to the standard output.
*
* <p>Command line arguments: [-debug] <binary class name or class file name >
*
* @param usage the help message to show when command line arguments are incorrect.
* @param printer the printer to convert the class into text.
* @param args the command line arguments.
* @throws IOException if the class cannot be found, or if an IOException occurs.
*/
|
Prints a the given class to the standard output. Command line arguments: [-debug] <binary class name or class file name >
|
main
|
{
"repo_name": "benjholla/JReFrameworker",
"path": "plugin/org.objectweb.asm/src/org/objectweb/asm/util/Printer.java",
"license": "mit",
"size": 53254
}
|
[
"java.io.FileInputStream",
"java.io.IOException",
"java.io.InputStream",
"java.io.PrintWriter",
"org.objectweb.asm.ClassReader"
] |
import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import org.objectweb.asm.ClassReader;
|
import java.io.*; import org.objectweb.asm.*;
|
[
"java.io",
"org.objectweb.asm"
] |
java.io; org.objectweb.asm;
| 746,068
|
@Type(type="org.jadira.usertype.dateandtime.threeten.PersistentLocalDateTime")
public LocalDateTime getInventoryenddate() {
return this.inventoryenddate;
}
|
@Type(type=STR) LocalDateTime function() { return this.inventoryenddate; }
|
/**
* Return the value associated with the column: inventoryenddate.
* @return A LocalDateTime object (this.inventoryenddate)
*/
|
Return the value associated with the column: inventoryenddate
|
getInventoryenddate
|
{
"repo_name": "servinglynk/hmis-lynk-open-source",
"path": "hmis-model-v2016/src/main/java/com/servinglynk/hmis/warehouse/model/v2016/Inventory.java",
"license": "mpl-2.0",
"size": 19463
}
|
[
"java.time.LocalDateTime",
"org.hibernate.annotations.Type"
] |
import java.time.LocalDateTime; import org.hibernate.annotations.Type;
|
import java.time.*; import org.hibernate.annotations.*;
|
[
"java.time",
"org.hibernate.annotations"
] |
java.time; org.hibernate.annotations;
| 1,309,832
|
public Class<? extends SPObject> getSuperChildClass() {
return superChildClass;
}
|
Class<? extends SPObject> function() { return superChildClass; }
|
/**
* Returns the highest {@link SPObject} class that the
* {@link SPObject#childPositionOffset(Class)} method looks at to
* determine the index.
*/
|
Returns the highest <code>SPObject</code> class that the <code>SPObject#childPositionOffset(Class)</code> method looks at to determine the index
|
getSuperChildClass
|
{
"repo_name": "iyerdude/wabit",
"path": "src/main/java/ca/sqlpower/wabit/report/ContentBox.java",
"license": "gpl-3.0",
"size": 16548
}
|
[
"ca.sqlpower.object.SPObject"
] |
import ca.sqlpower.object.SPObject;
|
import ca.sqlpower.object.*;
|
[
"ca.sqlpower.object"
] |
ca.sqlpower.object;
| 884,861
|
public Adapter createEObjectAdapter()
{
return null;
}
|
Adapter function() { return null; }
|
/**
* Creates a new adapter for the default case.
* <!-- begin-user-doc -->
* This default implementation returns null.
* <!-- end-user-doc -->
* @return the new adapter.
* @generated
*/
|
Creates a new adapter for the default case. This default implementation returns null.
|
createEObjectAdapter
|
{
"repo_name": "peterkir/org.eclipse.oomph",
"path": "plugins/org.eclipse.oomph.p2/src/org/eclipse/oomph/p2/util/P2AdapterFactory.java",
"license": "epl-1.0",
"size": 7133
}
|
[
"org.eclipse.emf.common.notify.Adapter"
] |
import org.eclipse.emf.common.notify.Adapter;
|
import org.eclipse.emf.common.notify.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 2,523,148
|
Promise<PullResponse> pull(
Path project, String refSpec, String remote, boolean rebase, Credentials credentials);
|
Promise<PullResponse> pull( Path project, String refSpec, String remote, boolean rebase, Credentials credentials);
|
/**
* Pull (fetch and merge) changes from remote repository to local one (sends request over
* WebSocket).
*
* @param project project (root of GIT repository)
* @param refSpec list of refspec to fetch.
* <p>Expected form is:
* <ul>
* <li>refs/heads/featured:refs/remotes/origin/featured - branch 'featured' from remote
* repository will be fetched to 'refs/remotes/origin/featured'.
* <li>featured - remote branch name.
* </ul>
*
* @param remote remote remote repository's name
* @param rebase use rebase instead of merge
* @param credentials credentials to perform vcs authorization
*/
|
Pull (fetch and merge) changes from remote repository to local one (sends request over WebSocket)
|
pull
|
{
"repo_name": "sleshchenko/che",
"path": "plugins/plugin-git/che-plugin-git-ext-git/src/main/java/org/eclipse/che/ide/ext/git/client/GitServiceClient.java",
"license": "epl-1.0",
"size": 14444
}
|
[
"org.eclipse.che.api.git.shared.PullResponse",
"org.eclipse.che.api.promises.client.Promise",
"org.eclipse.che.ide.api.auth.Credentials",
"org.eclipse.che.ide.resource.Path"
] |
import org.eclipse.che.api.git.shared.PullResponse; import org.eclipse.che.api.promises.client.Promise; import org.eclipse.che.ide.api.auth.Credentials; import org.eclipse.che.ide.resource.Path;
|
import org.eclipse.che.api.git.shared.*; import org.eclipse.che.api.promises.client.*; import org.eclipse.che.ide.api.auth.*; import org.eclipse.che.ide.resource.*;
|
[
"org.eclipse.che"
] |
org.eclipse.che;
| 770,554
|
@DeleteMapping()
public ResponseBody deleteUser(
@RequestParam(value = REQUEST_PARAM_ID, required = false) List<Long> userIds,
@RequestParam(value = REQUEST_PARAM_USERNAME, required = false) List<String> usernames)
throws ServiceLayerException, AuthenticationException, UserNotFoundException {
ValidationUtils.validateAnyListNonEmpty(userIds, usernames);
userService.deleteUsers(userIds != null? userIds : Collections.emptyList(),
usernames != null? usernames : Collections.emptyList());
ResponseBody responseBody = new ResponseBody();
Result result = new Result();
result.setResponse(DELETED);
responseBody.setResult(result);
return responseBody;
}
|
@DeleteMapping() ResponseBody function( @RequestParam(value = REQUEST_PARAM_ID, required = false) List<Long> userIds, @RequestParam(value = REQUEST_PARAM_USERNAME, required = false) List<String> usernames) throws ServiceLayerException, AuthenticationException, UserNotFoundException { ValidationUtils.validateAnyListNonEmpty(userIds, usernames); userService.deleteUsers(userIds != null? userIds : Collections.emptyList(), usernames != null? usernames : Collections.emptyList()); ResponseBody responseBody = new ResponseBody(); Result result = new Result(); result.setResponse(DELETED); responseBody.setResult(result); return responseBody; }
|
/**
* Delete users API
*
* @param userIds List of user identifiers
* @param usernames List of usernames
* @return Response object
*/
|
Delete users API
|
deleteUser
|
{
"repo_name": "craftercms/studio2",
"path": "src/main/java/org/craftercms/studio/controller/rest/v2/UsersController.java",
"license": "gpl-3.0",
"size": 23931
}
|
[
"java.util.Collections",
"java.util.List",
"org.craftercms.studio.api.v1.exception.ServiceLayerException",
"org.craftercms.studio.api.v1.exception.security.AuthenticationException",
"org.craftercms.studio.api.v1.exception.security.UserNotFoundException",
"org.craftercms.studio.model.rest.ResponseBody",
"org.craftercms.studio.model.rest.Result",
"org.springframework.web.bind.annotation.DeleteMapping",
"org.springframework.web.bind.annotation.RequestParam"
] |
import java.util.Collections; import java.util.List; import org.craftercms.studio.api.v1.exception.ServiceLayerException; import org.craftercms.studio.api.v1.exception.security.AuthenticationException; import org.craftercms.studio.api.v1.exception.security.UserNotFoundException; import org.craftercms.studio.model.rest.ResponseBody; import org.craftercms.studio.model.rest.Result; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.RequestParam;
|
import java.util.*; import org.craftercms.studio.api.v1.exception.*; import org.craftercms.studio.api.v1.exception.security.*; import org.craftercms.studio.model.rest.*; import org.springframework.web.bind.annotation.*;
|
[
"java.util",
"org.craftercms.studio",
"org.springframework.web"
] |
java.util; org.craftercms.studio; org.springframework.web;
| 1,414,226
|
public static List<DataAttribute> getDataAttributes(JSONArray attributeJsonArray) {
int arraySize = attributeJsonArray.length();
List<DataAttribute> dataAttributes = new ArrayList<>();
for (int i = 0; i < arraySize; i++) {
JSONObject dataAttributeJson = attributeJsonArray.getJSONObject(i);
DataAttribute dataAttribute = DataAttributeParser.parseDataAttribute(dataAttributeJson);
dataAttributes.add(dataAttribute);
}
NameValidation.validateNameFrequency(dataAttributes);
return dataAttributes;
}
|
static List<DataAttribute> function(JSONArray attributeJsonArray) { int arraySize = attributeJsonArray.length(); List<DataAttribute> dataAttributes = new ArrayList<>(); for (int i = 0; i < arraySize; i++) { JSONObject dataAttributeJson = attributeJsonArray.getJSONObject(i); DataAttribute dataAttribute = DataAttributeParser.parseDataAttribute(dataAttributeJson); dataAttributes.add(dataAttribute); } NameValidation.validateNameFrequency(dataAttributes); return dataAttributes; }
|
/**
* Create List of DataAttributes out of attributeJsonArray. Uses
* DataAttributeParser.
*
* @param attributeJsonArray
* @return List of DataAttributes
*/
|
Create List of DataAttributes out of attributeJsonArray. Uses DataAttributeParser
|
getDataAttributes
|
{
"repo_name": "bptlab/JEngine",
"path": "src/main/java/de/hpi/bpt/chimera/parser/datamodel/DataModelClassParser.java",
"license": "mit",
"size": 3233
}
|
[
"de.hpi.bpt.chimera.model.datamodel.DataAttribute",
"de.hpi.bpt.chimera.validation.NameValidation",
"java.util.ArrayList",
"java.util.List",
"org.json.JSONArray",
"org.json.JSONObject"
] |
import de.hpi.bpt.chimera.model.datamodel.DataAttribute; import de.hpi.bpt.chimera.validation.NameValidation; import java.util.ArrayList; import java.util.List; import org.json.JSONArray; import org.json.JSONObject;
|
import de.hpi.bpt.chimera.model.datamodel.*; import de.hpi.bpt.chimera.validation.*; import java.util.*; import org.json.*;
|
[
"de.hpi.bpt",
"java.util",
"org.json"
] |
de.hpi.bpt; java.util; org.json;
| 2,249,138
|
protected static void registerServer( String namingURL, Remote server )
throws RemoteException
{
if ( server == null )
{
throw new RemoteException( "Cannot register the server until it is created." );
}
if ( log.isInfoEnabled() )
{
log.info( "Binding server to " + namingURL );
}
try
{
Naming.rebind( namingURL, server );
}
catch ( MalformedURLException ex )
{
// impossible case.
throw new IllegalArgumentException( ex.getMessage() + "; url=" + namingURL );
}
}
|
static void function( String namingURL, Remote server ) throws RemoteException { if ( server == null ) { throw new RemoteException( STR ); } if ( log.isInfoEnabled() ) { log.info( STR + namingURL ); } try { Naming.rebind( namingURL, server ); } catch ( MalformedURLException ex ) { throw new IllegalArgumentException( ex.getMessage() + STR + namingURL ); } }
|
/**
* Registers the server with the registry. I broke this off because we might want to have code
* that will restart a dead registry. It will need to rebind the server.
* <p>
* @param namingURL
* @param server
* @throws RemoteException
*/
|
Registers the server with the registry. I broke this off because we might want to have code that will restart a dead registry. It will need to rebind the server.
|
registerServer
|
{
"repo_name": "mohanaraosv/commons-jcs",
"path": "commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServerFactory.java",
"license": "apache-2.0",
"size": 18352
}
|
[
"java.net.MalformedURLException",
"java.rmi.Naming",
"java.rmi.Remote",
"java.rmi.RemoteException"
] |
import java.net.MalformedURLException; import java.rmi.Naming; import java.rmi.Remote; import java.rmi.RemoteException;
|
import java.net.*; import java.rmi.*;
|
[
"java.net",
"java.rmi"
] |
java.net; java.rmi;
| 1,810,759
|
public static CompareOp getOperator(String condition, boolean idPresent, boolean useFilter)
{
if (condition.equals("="))
{
return CompareOp.EQUAL;
}
else if (condition.equals(">"))
{
return CompareOp.GREATER;
}
else if (condition.equals("<"))
{
return CompareOp.LESS;
}
else if (condition.equals(">="))
{
return CompareOp.GREATER_OR_EQUAL;
}
else if (condition.equals("<="))
{
return CompareOp.LESS_OR_EQUAL;
}
else if (useFilter)
{
if (!idPresent)
{
throw new UnsupportedOperationException(" Condition " + condition + " is not suported in hbase!");
}
else
{
throw new UnsupportedOperationException(" Condition " + condition
+ " is not suported for query on row key!");
}
}
return null;
}
|
static CompareOp function(String condition, boolean idPresent, boolean useFilter) { if (condition.equals("=")) { return CompareOp.EQUAL; } else if (condition.equals(">")) { return CompareOp.GREATER; } else if (condition.equals("<")) { return CompareOp.LESS; } else if (condition.equals(">=")) { return CompareOp.GREATER_OR_EQUAL; } else if (condition.equals("<=")) { return CompareOp.LESS_OR_EQUAL; } else if (useFilter) { if (!idPresent) { throw new UnsupportedOperationException(STR + condition + STR); } else { throw new UnsupportedOperationException(STR + condition + STR); } } return null; }
|
/**
* Gets the operator.
*
* @param condition
* the condition
* @param idPresent
* the id present
* @return the operator
*/
|
Gets the operator
|
getOperator
|
{
"repo_name": "lgscofield/Kundera",
"path": "src/kundera-hbase/kundera-hbase/src/main/java/com/impetus/client/hbase/utils/HBaseUtils.java",
"license": "apache-2.0",
"size": 6025
}
|
[
"org.apache.hadoop.hbase.filter.CompareFilter"
] |
import org.apache.hadoop.hbase.filter.CompareFilter;
|
import org.apache.hadoop.hbase.filter.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 2,060,779
|
public void editLastName(String text)
{
WebElement input = drone.findAndWait(By.cssSelector(LASTNAME));
input.clear();
input.sendKeys(text);
}
|
void function(String text) { WebElement input = drone.findAndWait(By.cssSelector(LASTNAME)); input.clear(); input.sendKeys(text); }
|
/**
* Enter LastName.
*/
|
Enter LastName
|
editLastName
|
{
"repo_name": "loftuxab/community-edition-old",
"path": "projects/share-po/src/main/java/org/alfresco/po/share/EditUserPage.java",
"license": "lgpl-3.0",
"size": 11832
}
|
[
"org.openqa.selenium.By",
"org.openqa.selenium.WebElement"
] |
import org.openqa.selenium.By; import org.openqa.selenium.WebElement;
|
import org.openqa.selenium.*;
|
[
"org.openqa.selenium"
] |
org.openqa.selenium;
| 313,415
|
private void checkForSufficientAvailablePositions(HttpServletRequest request, Map containerMap,
int storableCount, String objectName)
{
// Uncommented due to addition of 'auto' functionality. Bug 14263
if(request.getParameter("showRequest")==null)
{
int counter = 0;
if(!containerMap.isEmpty())
{
counter = StorageContainerUtil.checkForLocation(containerMap, storableCount, counter);
}
if(counter < storableCount)
{
ActionErrors errors = getActionErrors(request);
errors.add(ActionErrors.GLOBAL_ERROR, new ActionError(
"errors.locations.notSufficient.shipmentReceive",objectName));
saveErrors(request, errors);
}
}
}
|
void function(HttpServletRequest request, Map containerMap, int storableCount, String objectName) { if(request.getParameter(STR)==null) { int counter = 0; if(!containerMap.isEmpty()) { counter = StorageContainerUtil.checkForLocation(containerMap, storableCount, counter); } if(counter < storableCount) { ActionErrors errors = getActionErrors(request); errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( STR,objectName)); saveErrors(request, errors); } } }
|
/**
* This method checks whether there are sufficient storage locations are available or not.
* @param request object of HttpServletRequest.
* @param storableCount integer containing count.
* @param containerMap Map containing data for contaner.
* @param objectName string containing the name of the object.
*/
|
This method checks whether there are sufficient storage locations are available or not
|
checkForSufficientAvailablePositions
|
{
"repo_name": "NCIP/catissue-core",
"path": "software/caTissue/modules/core/src/main/java/edu/wustl/catissuecore/action/shippingtracking/ShowShipmentReceivingAction.java",
"license": "bsd-3-clause",
"size": 11142
}
|
[
"edu.wustl.catissuecore.util.StorageContainerUtil",
"java.util.Map",
"javax.servlet.http.HttpServletRequest",
"org.apache.struts.action.ActionError",
"org.apache.struts.action.ActionErrors"
] |
import edu.wustl.catissuecore.util.StorageContainerUtil; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.struts.action.ActionError; import org.apache.struts.action.ActionErrors;
|
import edu.wustl.catissuecore.util.*; import java.util.*; import javax.servlet.http.*; import org.apache.struts.action.*;
|
[
"edu.wustl.catissuecore",
"java.util",
"javax.servlet",
"org.apache.struts"
] |
edu.wustl.catissuecore; java.util; javax.servlet; org.apache.struts;
| 1,673,640
|
@NotNull
@SuppressWarnings("UnusedParameters")
protected ContextInvocationFactory<Call<Object>, Object> getFactory(
@NotNull final InvocationConfiguration configuration, @NotNull final Type responseType,
@NotNull final Annotation[] annotations, @NotNull final Retrofit retrofit) {
final CallAdapter.Factory delegateFactory = mDelegateFactory;
if (delegateFactory == null) {
return sCallInvocationFactory;
}
@SuppressWarnings("unchecked") final CallAdapter<Channel<?, ?>> channelAdapter =
(CallAdapter<Channel<?, ?>>) delegateFactory.get(getChannelType(responseType), annotations,
retrofit);
if (channelAdapter != null) {
return new ChannelAdapterInvocationFactory(
asArgs(delegateFactory, responseType, annotations, retrofit), channelAdapter);
}
final CallAdapter<?> bodyAdapter = delegateFactory.get(responseType, annotations, retrofit);
if (bodyAdapter != null) {
return new BodyAdapterInvocationFactory(
asArgs(delegateFactory, responseType, annotations, retrofit), bodyAdapter);
}
throw new IllegalArgumentException(
"The delegate factory does not support any of the required return types: " + delegateFactory
.getClass()
.getName());
}
private static class BodyAdapterInvocation
extends TemplateContextInvocation<Call<Object>, Object> {
private final CallAdapter<?> mCallAdapter;
private BodyAdapterInvocation(@NotNull final CallAdapter<?> callAdapter) {
mCallAdapter = callAdapter;
}
|
@SuppressWarnings(STR) ContextInvocationFactory<Call<Object>, Object> function( @NotNull final InvocationConfiguration configuration, @NotNull final Type responseType, @NotNull final Annotation[] annotations, @NotNull final Retrofit retrofit) { final CallAdapter.Factory delegateFactory = mDelegateFactory; if (delegateFactory == null) { return sCallInvocationFactory; } @SuppressWarnings(STR) final CallAdapter<Channel<?, ?>> channelAdapter = (CallAdapter<Channel<?, ?>>) delegateFactory.get(getChannelType(responseType), annotations, retrofit); if (channelAdapter != null) { return new ChannelAdapterInvocationFactory( asArgs(delegateFactory, responseType, annotations, retrofit), channelAdapter); } final CallAdapter<?> bodyAdapter = delegateFactory.get(responseType, annotations, retrofit); if (bodyAdapter != null) { return new BodyAdapterInvocationFactory( asArgs(delegateFactory, responseType, annotations, retrofit), bodyAdapter); } throw new IllegalArgumentException( STR + delegateFactory .getClass() .getName()); } private static class BodyAdapterInvocation extends TemplateContextInvocation<Call<Object>, Object> { private final CallAdapter<?> mCallAdapter; private BodyAdapterInvocation(@NotNull final CallAdapter<?> callAdapter) { mCallAdapter = callAdapter; }
|
/**
* Gets the Context invocation factory to handle the call execution.
*
* @param configuration the invocation configuration.
* @param responseType the response type.
* @param annotations the method annotations.
* @param retrofit the Retrofit instance.
* @return the invocation factory.
*/
|
Gets the Context invocation factory to handle the call execution
|
getFactory
|
{
"repo_name": "davide-maestroni/jroutine",
"path": "android-retrofit/src/main/java/com/github/dm/jrt/android/retrofit/ContextAdapterFactory.java",
"license": "apache-2.0",
"size": 7406
}
|
[
"com.github.dm.jrt.android.core.invocation.ContextInvocationFactory",
"com.github.dm.jrt.android.core.invocation.TemplateContextInvocation",
"com.github.dm.jrt.core.channel.Channel",
"com.github.dm.jrt.core.config.InvocationConfiguration",
"com.github.dm.jrt.core.util.Reflection",
"java.lang.annotation.Annotation",
"java.lang.reflect.Type",
"org.jetbrains.annotations.NotNull"
] |
import com.github.dm.jrt.android.core.invocation.ContextInvocationFactory; import com.github.dm.jrt.android.core.invocation.TemplateContextInvocation; import com.github.dm.jrt.core.channel.Channel; import com.github.dm.jrt.core.config.InvocationConfiguration; import com.github.dm.jrt.core.util.Reflection; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import org.jetbrains.annotations.NotNull;
|
import com.github.dm.jrt.android.core.invocation.*; import com.github.dm.jrt.core.channel.*; import com.github.dm.jrt.core.config.*; import com.github.dm.jrt.core.util.*; import java.lang.annotation.*; import java.lang.reflect.*; import org.jetbrains.annotations.*;
|
[
"com.github.dm",
"java.lang",
"org.jetbrains.annotations"
] |
com.github.dm; java.lang; org.jetbrains.annotations;
| 2,178,656
|
private void compileArguments(
VariableScope scope,
DebugInfo debugInfo,
AstAccessors debugAccessors,
List<ByteCodeAppender> code)
throws EvalException {
InternalVariable positionalsBuilder = scope.freshVariable(ImmutableList.Builder.class);
append(code, ByteCodeMethodCalls.BCImmutableList.builder);
code.add(positionalsBuilder.store());
InternalVariable keyWordArgs = scope.freshVariable(Map.class);
append(code, NewObject.fromConstructor(HashMap.class).arguments());
code.add(keyWordArgs.store());
InternalVariable duplicatesBuilder =
scope.freshVariable(new TypeDescription.ForLoadedType(ImmutableList.Builder.class));
append(code, ByteCodeMethodCalls.BCImmutableList.builder);
code.add(duplicatesBuilder.store());
StackManipulation builderAdd =
new StackManipulation.Compound(
ByteCodeMethodCalls.BCImmutableList.Builder.add, Removal.SINGLE);
// add an object the function is called on first
if (obj != null) {
append(code, positionalsBuilder.load());
code.add(obj.compile(scope, debugInfo));
append(code, builderAdd);
}
// add all arguments to their respective builder/map
for (Argument.Passed arg : args) {
ByteCodeAppender value = arg.getValue().compile(scope, debugInfo);
if (arg.isPositional()) {
append(code, positionalsBuilder.load());
code.add(value);
append(code, builderAdd);
} else if (arg.isStar()) {
// expand the starArg by adding all it's elements to the builder
append(code, positionalsBuilder.load());
code.add(value);
append(
code,
TypeCasting.to(new TypeDescription.ForLoadedType(Iterable.class)),
ByteCodeMethodCalls.BCImmutableList.Builder.addAll,
Removal.SINGLE);
} else if (arg.isStarStar()) {
append(code, keyWordArgs.load());
code.add(value);
append(code, duplicatesBuilder.load(), debugAccessors.loadLocation, addKeywordArgs);
} else {
append(code, keyWordArgs.load(), new TextConstant(arg.getName()));
code.add(value);
append(code, duplicatesBuilder.load(), addKeywordArg);
}
}
append(
code,
// check for duplicates in the key word arguments
duplicatesBuilder.load(),
new TextConstant(func.getName()),
debugAccessors.loadLocation,
checkDuplicates,
// load the arguments in the correct order for invokeObjectMethod and BaseFunction.call
positionalsBuilder.load(),
ByteCodeMethodCalls.BCImmutableList.Builder.build,
keyWordArgs.load(),
ByteCodeMethodCalls.BCImmutableMap.copyOf,
debugAccessors.loadAstNode,
TypeCasting.to(new TypeDescription.ForLoadedType(FuncallExpression.class)),
scope.loadEnvironment());
}
|
void function( VariableScope scope, DebugInfo debugInfo, AstAccessors debugAccessors, List<ByteCodeAppender> code) throws EvalException { InternalVariable positionalsBuilder = scope.freshVariable(ImmutableList.Builder.class); append(code, ByteCodeMethodCalls.BCImmutableList.builder); code.add(positionalsBuilder.store()); InternalVariable keyWordArgs = scope.freshVariable(Map.class); append(code, NewObject.fromConstructor(HashMap.class).arguments()); code.add(keyWordArgs.store()); InternalVariable duplicatesBuilder = scope.freshVariable(new TypeDescription.ForLoadedType(ImmutableList.Builder.class)); append(code, ByteCodeMethodCalls.BCImmutableList.builder); code.add(duplicatesBuilder.store()); StackManipulation builderAdd = new StackManipulation.Compound( ByteCodeMethodCalls.BCImmutableList.Builder.add, Removal.SINGLE); if (obj != null) { append(code, positionalsBuilder.load()); code.add(obj.compile(scope, debugInfo)); append(code, builderAdd); } for (Argument.Passed arg : args) { ByteCodeAppender value = arg.getValue().compile(scope, debugInfo); if (arg.isPositional()) { append(code, positionalsBuilder.load()); code.add(value); append(code, builderAdd); } else if (arg.isStar()) { append(code, positionalsBuilder.load()); code.add(value); append( code, TypeCasting.to(new TypeDescription.ForLoadedType(Iterable.class)), ByteCodeMethodCalls.BCImmutableList.Builder.addAll, Removal.SINGLE); } else if (arg.isStarStar()) { append(code, keyWordArgs.load()); code.add(value); append(code, duplicatesBuilder.load(), debugAccessors.loadLocation, addKeywordArgs); } else { append(code, keyWordArgs.load(), new TextConstant(arg.getName())); code.add(value); append(code, duplicatesBuilder.load(), addKeywordArg); } } append( code, duplicatesBuilder.load(), new TextConstant(func.getName()), debugAccessors.loadLocation, checkDuplicates, positionalsBuilder.load(), ByteCodeMethodCalls.BCImmutableList.Builder.build, keyWordArgs.load(), ByteCodeMethodCalls.BCImmutableMap.copyOf, debugAccessors.loadAstNode, TypeCasting.to(new TypeDescription.ForLoadedType(FuncallExpression.class)), scope.loadEnvironment()); }
|
/**
* Add code that compiles the argument expressions.
*
* <p>The byte code leaves the arguments on the stack in order of:
* positional arguments, key word arguments, this FuncallExpression, Environment
* This is the order required by {@link #invokeObjectMethod} and
* {@link BaseFunction#call(List, Map, FuncallExpression, Environment)}.
*/
|
Add code that compiles the argument expressions. The byte code leaves the arguments on the stack in order of: positional arguments, key word arguments, this FuncallExpression, Environment This is the order required by <code>#invokeObjectMethod</code> and <code>BaseFunction#call(List, Map, FuncallExpression, Environment)</code>
|
compileArguments
|
{
"repo_name": "UrbanCompass/bazel",
"path": "src/main/java/com/google/devtools/build/lib/syntax/FuncallExpression.java",
"license": "apache-2.0",
"size": 34906
}
|
[
"com.google.common.collect.ImmutableList",
"com.google.devtools.build.lib.syntax.compiler.ByteCodeMethodCalls",
"com.google.devtools.build.lib.syntax.compiler.ByteCodeUtils",
"com.google.devtools.build.lib.syntax.compiler.DebugInfo",
"com.google.devtools.build.lib.syntax.compiler.NewObject",
"com.google.devtools.build.lib.syntax.compiler.Variable",
"com.google.devtools.build.lib.syntax.compiler.VariableScope",
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"net.bytebuddy.description.type.TypeDescription",
"net.bytebuddy.implementation.bytecode.ByteCodeAppender",
"net.bytebuddy.implementation.bytecode.Removal",
"net.bytebuddy.implementation.bytecode.StackManipulation",
"net.bytebuddy.implementation.bytecode.assign.TypeCasting",
"net.bytebuddy.implementation.bytecode.constant.TextConstant"
] |
import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.syntax.compiler.ByteCodeMethodCalls; import com.google.devtools.build.lib.syntax.compiler.ByteCodeUtils; import com.google.devtools.build.lib.syntax.compiler.DebugInfo; import com.google.devtools.build.lib.syntax.compiler.NewObject; import com.google.devtools.build.lib.syntax.compiler.Variable; import com.google.devtools.build.lib.syntax.compiler.VariableScope; import java.util.HashMap; import java.util.List; import java.util.Map; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.implementation.bytecode.ByteCodeAppender; import net.bytebuddy.implementation.bytecode.Removal; import net.bytebuddy.implementation.bytecode.StackManipulation; import net.bytebuddy.implementation.bytecode.assign.TypeCasting; import net.bytebuddy.implementation.bytecode.constant.TextConstant;
|
import com.google.common.collect.*; import com.google.devtools.build.lib.syntax.compiler.*; import java.util.*; import net.bytebuddy.description.type.*; import net.bytebuddy.implementation.bytecode.*; import net.bytebuddy.implementation.bytecode.assign.*; import net.bytebuddy.implementation.bytecode.constant.*;
|
[
"com.google.common",
"com.google.devtools",
"java.util",
"net.bytebuddy.description",
"net.bytebuddy.implementation"
] |
com.google.common; com.google.devtools; java.util; net.bytebuddy.description; net.bytebuddy.implementation;
| 1,538,239
|
@Property(LOCATION)
public String getSourceLocation();
|
@Property(LOCATION) String function();
|
/**
* Contains the location of the XSLT file
*/
|
Contains the location of the XSLT file
|
getSourceLocation
|
{
"repo_name": "jsight/windup",
"path": "rules-xml/api/src/main/java/org/jboss/windup/rules/apps/xml/model/XsltTransformationModel.java",
"license": "epl-1.0",
"size": 2818
}
|
[
"org.jboss.windup.graph.Property"
] |
import org.jboss.windup.graph.Property;
|
import org.jboss.windup.graph.*;
|
[
"org.jboss.windup"
] |
org.jboss.windup;
| 2,899,401
|
public CoinbaseContacts getCoinbaseContacts() throws IOException {
return getCoinbaseContacts(null, null, null);
}
|
CoinbaseContacts function() throws IOException { return getCoinbaseContacts(null, null, null); }
|
/**
* Authenticated resource that returns contacts the user has previously sent to or received from.
* This is a paged resource and will return the first page by default.
*
* @return {@code CoinbaseContacts} the user has previously sent to or received from.
* @throws IOException
* @see <a
* href="https://coinbase.com/api/doc/1.0/contacts/index.html">coinbase.com/api/doc/1.0/contacts/index.html</a>
*/
|
Authenticated resource that returns contacts the user has previously sent to or received from. This is a paged resource and will return the first page by default
|
getCoinbaseContacts
|
{
"repo_name": "andre77/XChange",
"path": "xchange-coinbase/src/main/java/org/knowm/xchange/coinbase/service/CoinbaseAccountServiceRaw.java",
"license": "mit",
"size": 28855
}
|
[
"java.io.IOException",
"org.knowm.xchange.coinbase.dto.account.CoinbaseContacts"
] |
import java.io.IOException; import org.knowm.xchange.coinbase.dto.account.CoinbaseContacts;
|
import java.io.*; import org.knowm.xchange.coinbase.dto.account.*;
|
[
"java.io",
"org.knowm.xchange"
] |
java.io; org.knowm.xchange;
| 2,117,231
|
private void storeReporterMapping(String reporterName, ReporterAnnotation annotation) {
String reporterNameUC = reporterName.toUpperCase().trim();
reporterMap.put(reporterNameUC, annotation);
}
|
void function(String reporterName, ReporterAnnotation annotation) { String reporterNameUC = reporterName.toUpperCase().trim(); reporterMap.put(reporterNameUC, annotation); }
|
/**
* Method handles case issues when storing to the hash map
* @param reporterName
* @param annotation
*/
|
Method handles case issues when storing to the hash map
|
storeReporterMapping
|
{
"repo_name": "NCIP/i-spy",
"path": "src/gov/nih/nci/ispy/service/annotation/GeneExprFileBasedAnnotationService.java",
"license": "bsd-3-clause",
"size": 12119
}
|
[
"gov.nih.nci.caintegrator.application.service.annotation.ReporterAnnotation"
] |
import gov.nih.nci.caintegrator.application.service.annotation.ReporterAnnotation;
|
import gov.nih.nci.caintegrator.application.service.annotation.*;
|
[
"gov.nih.nci"
] |
gov.nih.nci;
| 1,133,561
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.