method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public void storeGroupGrading() {
FacesContext facesContext = FacesContext.getCurrentInstance();
ResourceBundle bundle = ResourceBundle.getBundle("messages",
facesContext.getViewRoot().getLocale());
Exam exam = null;
Group group = null;
try {
group = gradingService.getGroup(course, formGroup);
if (group == null) {
facesContext.addMessage(null, new FacesMessage(FacesMessage
.SEVERITY_FATAL, bundle.getString("common.error"),
bundle.getString("gradings.unknownGroup")));
return;
}
final Boolean overwrite = overwriting;
overwriting = false;
if (formExam == -1) {
log.debug("Storing pabo grading: " + formGrading);
PaboGrade paboGrade = PaboGrade.valueOf(formGrading);
gradingService.storePaboGrade(course, user, group,
paboGrade, formPrivateComment, formPublicComment,
overwrite);
} else {
exam = gradingService.getExam(course, formExam);
if (exam == null) {
facesContext.addMessage(null, new FacesMessage(FacesMessage
.SEVERITY_FATAL, bundle.getString("common.error"),
bundle.getString("gradings.unknownExam")));
return;
}
BigDecimal grading = null;
if (formGrading != null) {
grading = new BigDecimal(formGrading.replace(',', '.'));
}
gradingService.storeGrade(course, user, exam, group,
grading, formPrivateComment, formPublicComment,
overwrite);
}
} catch (IllegalAccessException e) {
if (e.getMessage().equals("NOT_GRADABLE")) {
facesContext.addMessage(null, new FacesMessage(FacesMessage
.SEVERITY_FATAL, bundle.getString("common.error"),
bundle.getString("common.notGradable")));
} else {
facesContext.addMessage(null, new FacesMessage(FacesMessage
.SEVERITY_FATAL, bundle.getString("common.error"),
bundle.getString("common.noAccess")));
}
return;
} catch (InvalidGradeException | NumberFormatException e) {
String errorMessage = bundle.getString("gradings.invalidGrading");
if (exam != null) {
if (exam.hasGradeType(GradeType.Boolean)) {
errorMessage += bundle.getString(
"gradings.invalidGrading.boolean");
} else if (exam.hasGradeType(GradeType.Numeric)) {
errorMessage += bundle.getString(
"gradings.invalidGrading.numeric");
} else if (exam.hasGradeType(GradeType.Percent)) {
errorMessage += bundle.getString(
"gradings.invalidGrading.percent");
} else if (exam.hasGradeType(GradeType.Point)) {
errorMessage += bundle.getString(
"gradings.invalidGrading.point");
errorMessage = MessageFormat.format(errorMessage,
exam.getMaxPoints());
}
}
facesContext.addMessage(null, new FacesMessage(FacesMessage
.SEVERITY_FATAL, bundle.getString("common.error"),
errorMessage));
return;
} catch (AlreadyGradedException e) {
facesContext.addMessage(null, new FacesMessage(FacesMessage
.SEVERITY_WARN, bundle.getString("common.warning"),
bundle.getString("gradings.overwriting")));
overwriting = true;
return;
}
resetFormValues();
gradingController.resetExamGradings(group);
facesContext.addMessage(null, new FacesMessage(FacesMessage
.SEVERITY_INFO, bundle.getString("common.success"),
bundle.getString("gradings.stored")));
}
|
void function() { FacesContext facesContext = FacesContext.getCurrentInstance(); ResourceBundle bundle = ResourceBundle.getBundle(STR, facesContext.getViewRoot().getLocale()); Exam exam = null; Group group = null; try { group = gradingService.getGroup(course, formGroup); if (group == null) { facesContext.addMessage(null, new FacesMessage(FacesMessage .SEVERITY_FATAL, bundle.getString(STR), bundle.getString(STR))); return; } final Boolean overwrite = overwriting; overwriting = false; if (formExam == -1) { log.debug(STR + formGrading); PaboGrade paboGrade = PaboGrade.valueOf(formGrading); gradingService.storePaboGrade(course, user, group, paboGrade, formPrivateComment, formPublicComment, overwrite); } else { exam = gradingService.getExam(course, formExam); if (exam == null) { facesContext.addMessage(null, new FacesMessage(FacesMessage .SEVERITY_FATAL, bundle.getString(STR), bundle.getString(STR))); return; } BigDecimal grading = null; if (formGrading != null) { grading = new BigDecimal(formGrading.replace(',', '.')); } gradingService.storeGrade(course, user, exam, group, grading, formPrivateComment, formPublicComment, overwrite); } } catch (IllegalAccessException e) { if (e.getMessage().equals(STR)) { facesContext.addMessage(null, new FacesMessage(FacesMessage .SEVERITY_FATAL, bundle.getString(STR), bundle.getString(STR))); } else { facesContext.addMessage(null, new FacesMessage(FacesMessage .SEVERITY_FATAL, bundle.getString(STR), bundle.getString(STR))); } return; } catch (InvalidGradeException NumberFormatException e) { String errorMessage = bundle.getString(STR); if (exam != null) { if (exam.hasGradeType(GradeType.Boolean)) { errorMessage += bundle.getString( STR); } else if (exam.hasGradeType(GradeType.Numeric)) { errorMessage += bundle.getString( STR); } else if (exam.hasGradeType(GradeType.Percent)) { errorMessage += bundle.getString( STR); } else if (exam.hasGradeType(GradeType.Point)) { errorMessage += bundle.getString( STR); errorMessage = MessageFormat.format(errorMessage, exam.getMaxPoints()); } } facesContext.addMessage(null, new FacesMessage(FacesMessage .SEVERITY_FATAL, bundle.getString(STR), errorMessage)); return; } catch (AlreadyGradedException e) { facesContext.addMessage(null, new FacesMessage(FacesMessage .SEVERITY_WARN, bundle.getString(STR), bundle.getString(STR))); overwriting = true; return; } resetFormValues(); gradingController.resetExamGradings(group); facesContext.addMessage(null, new FacesMessage(FacesMessage .SEVERITY_INFO, bundle.getString(STR), bundle.getString(STR))); }
|
/**
* Stores the group grading for the given course.
*/
|
Stores the group grading for the given course
|
storeGroupGrading
|
{
"repo_name": "stefanoberdoerfer/exmatrikulator",
"path": "src/main/java/de/unibremen/opensores/controller/grading/GradingInsertController.java",
"license": "agpl-3.0",
"size": 17031
}
|
[
"de.unibremen.opensores.exception.AlreadyGradedException",
"de.unibremen.opensores.exception.InvalidGradeException",
"de.unibremen.opensores.model.Exam",
"de.unibremen.opensores.model.GradeType",
"de.unibremen.opensores.model.Group",
"de.unibremen.opensores.model.PaboGrade",
"java.math.BigDecimal",
"java.text.MessageFormat",
"java.util.ResourceBundle",
"javax.faces.application.FacesMessage",
"javax.faces.context.FacesContext"
] |
import de.unibremen.opensores.exception.AlreadyGradedException; import de.unibremen.opensores.exception.InvalidGradeException; import de.unibremen.opensores.model.Exam; import de.unibremen.opensores.model.GradeType; import de.unibremen.opensores.model.Group; import de.unibremen.opensores.model.PaboGrade; import java.math.BigDecimal; import java.text.MessageFormat; import java.util.ResourceBundle; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext;
|
import de.unibremen.opensores.exception.*; import de.unibremen.opensores.model.*; import java.math.*; import java.text.*; import java.util.*; import javax.faces.application.*; import javax.faces.context.*;
|
[
"de.unibremen.opensores",
"java.math",
"java.text",
"java.util",
"javax.faces"
] |
de.unibremen.opensores; java.math; java.text; java.util; javax.faces;
| 2,815,764
|
void onFileChange(final FileChangeObserver observer,final File file);
|
void onFileChange(final FileChangeObserver observer,final File file);
|
/**
* File changed Event.
*
* @param file The file changed
*/
|
File changed Event
|
onFileChange
|
{
"repo_name": "leapframework/framework",
"path": "base/lang/src/main/java/leap/lang/io/FileChangeListener.java",
"license": "apache-2.0",
"size": 2608
}
|
[
"java.io.File"
] |
import java.io.File;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,245,385
|
private static String buildWhere(Logger logger, String signature, PaymentSearchFilter filter,
List<String> paramNames, List<Object> paramValues) throws OPMException {
StringBuilder sb = new StringBuilder();
if (filter.getAmount() != null && filter.getAmountComparisonType() != null) {
Helper.appendCondition(sb,
"e.amount " + convertAmountComparisonType(logger, signature, filter.getAmountComparisonType())
+ " :amount", filter.getAmount(), "amount", paramNames, paramValues);
}
if (filter.getDepositDate() != null && filter.getDepositComparisonType() != null) {
Helper.appendCondition(sb,
"e.depositDate " + convertDepositComparisonType(logger, signature, filter.getDepositComparisonType())
+ " :depositDate", filter.getDepositDate(), "depositDate", paramNames, paramValues);
}
Helper.appendCondition(sb, "e.batchNumber LIKE :batchNumber", filter.getBatchNumber(), "batchNumber",
paramNames, paramValues);
Helper.appendCondition(sb, "e.blockNumber LIKE :blockNumber", filter.getBlockNumber(), "blockNumber",
paramNames, paramValues);
Helper.appendCondition(sb, "e.sequenceNumber LIKE :sequenceNumber", filter.getSequenceNumber(),
"sequenceNumber", paramNames, paramValues);
Helper.appendCondition(sb, "e.paymentStatus.name = :paymentStatus", filter.getPaymentStatus(), "paymentStatus",
paramNames, paramValues);
Boolean resolvedSuspense = filter.getResolvedSuspense();
if (resolvedSuspense != null) {
sb.append(Helper.AND);
sb.append("e.paymentType " + (resolvedSuspense ? " = " : "<>") + "'SUSPENDED_PAYMENT'");
}
return sb.toString();
}
|
static String function(Logger logger, String signature, PaymentSearchFilter filter, List<String> paramNames, List<Object> paramValues) throws OPMException { StringBuilder sb = new StringBuilder(); if (filter.getAmount() != null && filter.getAmountComparisonType() != null) { Helper.appendCondition(sb, STR + convertAmountComparisonType(logger, signature, filter.getAmountComparisonType()) + STR, filter.getAmount(), STR, paramNames, paramValues); } if (filter.getDepositDate() != null && filter.getDepositComparisonType() != null) { Helper.appendCondition(sb, STR + convertDepositComparisonType(logger, signature, filter.getDepositComparisonType()) + STR, filter.getDepositDate(), STR, paramNames, paramValues); } Helper.appendCondition(sb, STR, filter.getBatchNumber(), STR, paramNames, paramValues); Helper.appendCondition(sb, STR, filter.getBlockNumber(), STR, paramNames, paramValues); Helper.appendCondition(sb, STR, filter.getSequenceNumber(), STR, paramNames, paramValues); Helper.appendCondition(sb, STR, filter.getPaymentStatus(), STR, paramNames, paramValues); Boolean resolvedSuspense = filter.getResolvedSuspense(); if (resolvedSuspense != null) { sb.append(Helper.AND); sb.append(STR + (resolvedSuspense ? STR : "<>") + STR); } return sb.toString(); }
|
/**
* Builds the WHERE string.
*
* @param logger
* the logger object.
* @param signature
* the signature of the method to be logged.
* @param filter
* the filter
* @param paramNames
* the parameter name
* @param paramValues
* the parameter values
*
* @return the WHERE string.
*
* @throws OPMException
* if any error occurs
*/
|
Builds the WHERE string
|
buildWhere
|
{
"repo_name": "NASA-Tournament-Lab/CoECI-OPM-Service-Credit-Redeposit-Deposit-Application",
"path": "Code/Batch_Processing/src/java/ejb/gov/opm/scrd/services/impl/PaymentServiceImpl.java",
"license": "apache-2.0",
"size": 21638
}
|
[
"gov.opm.scrd.entities.application.PaymentSearchFilter",
"gov.opm.scrd.entities.common.Helper",
"gov.opm.scrd.services.OPMException",
"java.util.List",
"org.jboss.logging.Logger"
] |
import gov.opm.scrd.entities.application.PaymentSearchFilter; import gov.opm.scrd.entities.common.Helper; import gov.opm.scrd.services.OPMException; import java.util.List; import org.jboss.logging.Logger;
|
import gov.opm.scrd.entities.application.*; import gov.opm.scrd.entities.common.*; import gov.opm.scrd.services.*; import java.util.*; import org.jboss.logging.*;
|
[
"gov.opm.scrd",
"java.util",
"org.jboss.logging"
] |
gov.opm.scrd; java.util; org.jboss.logging;
| 2,017,647
|
public float getCurrentUnscaledWidth(){
return this.getReferenceComp().getWidthXY(TransformSpace.RELATIVE_TO_PARENT);
}
|
float function(){ return this.getReferenceComp().getWidthXY(TransformSpace.RELATIVE_TO_PARENT); }
|
/**
* Gets the current unscaled width.
*
* @return the current unscaled width
*/
|
Gets the current unscaled width
|
getCurrentUnscaledWidth
|
{
"repo_name": "Twelve-60/mt4j",
"path": "src/org/mt4j/input/gestureAction/DefaultButtonClickAction.java",
"license": "gpl-2.0",
"size": 6789
}
|
[
"org.mt4j.components.TransformSpace"
] |
import org.mt4j.components.TransformSpace;
|
import org.mt4j.components.*;
|
[
"org.mt4j.components"
] |
org.mt4j.components;
| 339,966
|
public void clearMessage() {
currentMessage = new MovingMessage();
}
|
void function() { currentMessage = new MovingMessage(); }
|
/**
* To destroy a half-constructed message.
*/
|
To destroy a half-constructed message
|
clearMessage
|
{
"repo_name": "greenmail-mail-test/greenmail",
"path": "greenmail-core/src/main/java/com/icegreen/greenmail/smtp/SmtpState.java",
"license": "apache-2.0",
"size": 681
}
|
[
"com.icegreen.greenmail.mail.MovingMessage"
] |
import com.icegreen.greenmail.mail.MovingMessage;
|
import com.icegreen.greenmail.mail.*;
|
[
"com.icegreen.greenmail"
] |
com.icegreen.greenmail;
| 559,792
|
TimePicker timePicker = new TimePicker(getContext());
timePicker.setIs24HourView(android.text.format.DateFormat.is24HourFormat(getContext()));
Calendar calendar = getTime();
timePicker.setCurrentHour(calendar.get(Calendar.HOUR_OF_DAY));
timePicker.setCurrentMinute(calendar.get(Calendar.MINUTE));
timePicker.setOnTimeChangedListener(this);
return timePicker;
}
|
TimePicker timePicker = new TimePicker(getContext()); timePicker.setIs24HourView(android.text.format.DateFormat.is24HourFormat(getContext())); Calendar calendar = getTime(); timePicker.setCurrentHour(calendar.get(Calendar.HOUR_OF_DAY)); timePicker.setCurrentMinute(calendar.get(Calendar.MINUTE)); timePicker.setOnTimeChangedListener(this); return timePicker; }
|
/**
* Produces a TimePicker set to the time produced by {@link #getTime()}. When overriding be sure to call the super.
*
* @return a DatePicker with the date set
*/
|
Produces a TimePicker set to the time produced by <code>#getTime()</code>. When overriding be sure to call the super
|
onCreateDialogView
|
{
"repo_name": "NACC-Aus/Photomon-Android",
"path": "AndroidStudio/NACC/app/src/main/java/com/appiphany/nacc/ui/controls/TimePreference.java",
"license": "gpl-3.0",
"size": 8277
}
|
[
"android.widget.TimePicker",
"java.text.DateFormat",
"java.util.Calendar"
] |
import android.widget.TimePicker; import java.text.DateFormat; import java.util.Calendar;
|
import android.widget.*; import java.text.*; import java.util.*;
|
[
"android.widget",
"java.text",
"java.util"
] |
android.widget; java.text; java.util;
| 2,531,671
|
@Pure
@SuppressWarnings("unchecked")
public static <T> T[] newInstance(Class<T> clazz, int size) {
if (size < 0) {
throw new IndexOutOfBoundsException(size + "<0"); //$NON-NLS-1$
}
return (T[]) Array.newInstance(clazz, size);
}
|
@SuppressWarnings(STR) static <T> T[] function(Class<T> clazz, int size) { if (size < 0) { throw new IndexOutOfBoundsException(size + "<0"); } return (T[]) Array.newInstance(clazz, size); }
|
/** Create an instance of array.
*
* @param <T> is the type of the elements.
* @param clazz is the type of the elements.
* @param size is the size of the new array.
* @return the new array.
* @throws IndexOutOfBoundsException if the {@code size} is negative.
*/
|
Create an instance of array
|
newInstance
|
{
"repo_name": "gallandarakhneorg/afc",
"path": "core/util/src/main/java/org/arakhne/afc/util/ArrayUtil.java",
"license": "apache-2.0",
"size": 37810
}
|
[
"java.lang.reflect.Array"
] |
import java.lang.reflect.Array;
|
import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 2,325,632
|
public double gamma(BondFuturesOptionMarginSecurity security, BlackBondFuturesProviderInterface black) {
ArgumentChecker.notNull(security, "security");
ArgumentChecker.notNull(black, "Black data");
double priceFutures = _methodFutures.price(security.getUnderlyingFuture(), black.getIssuerProvider());
return gammaFromUnderlyingPrice(security, black, priceFutures);
}
|
double function(BondFuturesOptionMarginSecurity security, BlackBondFuturesProviderInterface black) { ArgumentChecker.notNull(security, STR); ArgumentChecker.notNull(black, STR); double priceFutures = _methodFutures.price(security.getUnderlyingFuture(), black.getIssuerProvider()); return gammaFromUnderlyingPrice(security, black, priceFutures); }
|
/**
* The theoretical gamma in the Black model. The underlying futures price is computed from the curves
* @param security The future option security, not null
* @param black The curve and Black volatility data, not null
* @return The gamma.
*/
|
The theoretical gamma in the Black model. The underlying futures price is computed from the curves
|
gamma
|
{
"repo_name": "ChinaQuants/OG-Platform",
"path": "projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/interestrate/future/provider/BondFutureOptionMarginSecurityBlackPriceMethod.java",
"license": "apache-2.0",
"size": 15826
}
|
[
"com.opengamma.analytics.financial.interestrate.future.derivative.BondFuturesOptionMarginSecurity",
"com.opengamma.analytics.financial.provider.description.interestrate.BlackBondFuturesProviderInterface",
"com.opengamma.util.ArgumentChecker"
] |
import com.opengamma.analytics.financial.interestrate.future.derivative.BondFuturesOptionMarginSecurity; import com.opengamma.analytics.financial.provider.description.interestrate.BlackBondFuturesProviderInterface; import com.opengamma.util.ArgumentChecker;
|
import com.opengamma.analytics.financial.interestrate.future.derivative.*; import com.opengamma.analytics.financial.provider.description.interestrate.*; import com.opengamma.util.*;
|
[
"com.opengamma.analytics",
"com.opengamma.util"
] |
com.opengamma.analytics; com.opengamma.util;
| 2,811,356
|
@Nullable RequestParameter cookieParameter(String name);
|
@Nullable RequestParameter cookieParameter(String name);
|
/**
* Get cookie parameter by name
*
* @param name Parameter name
* @return
*/
|
Get cookie parameter by name
|
cookieParameter
|
{
"repo_name": "mystdeim/vertx-web",
"path": "vertx-web-api-contract/src/main/java/io/vertx/ext/web/api/RequestParameters.java",
"license": "apache-2.0",
"size": 1761
}
|
[
"io.vertx.codegen.annotations.Nullable"
] |
import io.vertx.codegen.annotations.Nullable;
|
import io.vertx.codegen.annotations.*;
|
[
"io.vertx.codegen"
] |
io.vertx.codegen;
| 396,223
|
public static Part createPart(Element xml) {
Part part;
try {
part = new Part(xml);
} catch (Exception e) {
e.printStackTrace();
return null;
}
return part;
}
|
static Part function(Element xml) { Part part; try { part = new Part(xml); } catch (Exception e) { e.printStackTrace(); return null; } return part; }
|
/**
* part factory
* @param xml
* @return
*/
|
part factory
|
createPart
|
{
"repo_name": "cemfi/meico",
"path": "src/meico/mpm/elements/Part.java",
"license": "gpl-3.0",
"size": 9838
}
|
[
"nu.xom.Element"
] |
import nu.xom.Element;
|
import nu.xom.*;
|
[
"nu.xom"
] |
nu.xom;
| 778,844
|
static ParsedDateTime parse ( final CharSequence seq )
throws CalendarParseException
{
Preconditions.checkNotNull(seq, "value required");
String floating = seq.toString().toUpperCase();
boolean utc = false;
if ( floating.endsWith("Z") ) {
floating = floating.substring(0, floating.length() - 1);
utc = true;
}
try {
return new ParsedDateTime(
LocalDateTime.from(DATE_TIME_FORMATTER.parse(floating)), utc);
} catch ( DateTimeParseException e ) {
throw new CalendarParseException("invalid date-time: " + seq, e);
}
}
|
static ParsedDateTime parse ( final CharSequence seq ) throws CalendarParseException { Preconditions.checkNotNull(seq, STR); String floating = seq.toString().toUpperCase(); boolean utc = false; if ( floating.endsWith("Z") ) { floating = floating.substring(0, floating.length() - 1); utc = true; } try { return new ParsedDateTime( LocalDateTime.from(DATE_TIME_FORMATTER.parse(floating)), utc); } catch ( DateTimeParseException e ) { throw new CalendarParseException(STR + seq, e); } }
|
/**
* Parses a date-time. Does not support leap seconds, if a seconds field of 60
* is encountered, then an exception will be thrown.
*
* @param seq the value to parse. Must be non {@code null}.
* @return a date-time. Never {@code null}.
* @throws CalendarParseException if {@code seq} does not represent a valid
* date-time
*/
|
Parses a date-time. Does not support leap seconds, if a seconds field of 60 is encountered, then an exception will be thrown
|
parse
|
{
"repo_name": "calebrichardson/spiff",
"path": "src/main/java/com/outerspacecat/icalendar/ParsedDateTime.java",
"license": "apache-2.0",
"size": 4014
}
|
[
"com.google.common.base.Preconditions",
"java.time.LocalDateTime",
"java.time.format.DateTimeParseException"
] |
import com.google.common.base.Preconditions; import java.time.LocalDateTime; import java.time.format.DateTimeParseException;
|
import com.google.common.base.*; import java.time.*; import java.time.format.*;
|
[
"com.google.common",
"java.time"
] |
com.google.common; java.time;
| 949,683
|
@SuppressWarnings("unchecked")
private String handleIdomaarMessage(final String messageType, final String properties, final String entities) {
// write all data from the server to a file
// logger.info(messageType + "\t" + properties + "\t" + entities);
// create an jSON object from the String
final JSONObject jOP = (JSONObject) JSONValue.parse(properties);
final JSONObject jOE = (JSONObject) JSONValue.parse(entities);
// merge the different jsonObjects and correct missing itemIDs
jOP.putAll(jOE);
Object itemID = jOP.get("itemID");
if (itemID == null) {
jOP.put("itemID", 0);
}
// define a response object
String response = null;
if ("impression".equalsIgnoreCase(messageType) || "recommendation".equalsIgnoreCase(messageType)) {
// parse the type of the event
final RecommenderItem item = RecommenderItem.parseEventNotification(jOP.toJSONString());
final String eventNotificationType = messageType;
// impression refers to articles read by the user
if ("impression".equalsIgnoreCase(eventNotificationType) || "recommendation".equalsIgnoreCase(eventNotificationType)) {
// we mark this information in the article table
if (item.getItemID() != null) {
// new items shall be added to the list of items
recommenderItemTable.handleItemUpdate(item);
item.setNumberOfRequestedResults(6);
response = "handle impression eventNotification successful";
boolean recommendationExpected = false;
if (properties.contains("\"event_type\": \"recommendation_request\"")) {
recommendationExpected = true;
}
if (recommendationExpected) {
List<Long> suggestedItemIDs = recommenderItemTable.getLastItems(item);
response = "{" + "\"recs\": {" + "\"ints\": {" + "\"3\": " + suggestedItemIDs + "}" + "}}";
}
}
// click refers to recommendations clicked by the user
} else if ("click".equalsIgnoreCase(eventNotificationType)) {
// we mark this information in the article table
if (item.getItemID() != null) {
// new items shall be added to the list of items
recommenderItemTable.handleItemUpdate(item);
response = "handle impression eventNotification successful";
}
response = "handle click eventNotification successful";
} else {
System.out.println("unknown event-type: "
+ eventNotificationType + " (message ignored)");
}
} else if ("error_notification".equalsIgnoreCase(messageType)) {
System.out.println("error-notification: " + jOP.toString() + jOE.toJSONString());
} else {
System.out.println("unknown MessageType: " + messageType);
// Error handling
logger.info(jOP.toString() + jOE.toJSONString());
// this.contestRecommender.error(jObj.toString());
}
return response;
}
|
@SuppressWarnings(STR) String function(final String messageType, final String properties, final String entities) { final JSONObject jOP = (JSONObject) JSONValue.parse(properties); final JSONObject jOE = (JSONObject) JSONValue.parse(entities); jOP.putAll(jOE); Object itemID = jOP.get(STR); if (itemID == null) { jOP.put(STR, 0); } String response = null; if (STR.equalsIgnoreCase(messageType) STR.equalsIgnoreCase(messageType)) { final RecommenderItem item = RecommenderItem.parseEventNotification(jOP.toJSONString()); final String eventNotificationType = messageType; if (STR.equalsIgnoreCase(eventNotificationType) STR.equalsIgnoreCase(eventNotificationType)) { if (item.getItemID() != null) { recommenderItemTable.handleItemUpdate(item); item.setNumberOfRequestedResults(6); response = STR; boolean recommendationExpected = false; if (properties.contains("\"event_type\STRrecommendation_request\STR{STR\"recs\": {STR\"ints\": {STR\"3\": STR}STR}}STRclick".equalsIgnoreCase(eventNotificationType)) { if (item.getItemID() != null) { recommenderItemTable.handleItemUpdate(item); response = STR; } response = "handle click eventNotification successfulSTRunknown event-type: STR (message ignored)STRerror_notificationSTRerror-notification: STRunknown MessageType: " + messageType); logger.info(jOP.toString() + jOE.toJSONString()); } return response; }
|
/**
* Method to handle incoming messages from the server.
*
* @param messageType
* the messageType of the incoming contest server message.
* @param properties
*
* @param entities
* @return the response to the contest server
*/
|
Method to handle incoming messages from the server
|
handleIdomaarMessage
|
{
"repo_name": "jasjisdo/spark-newsreel-recommender",
"path": "epen-news-reel-template/src/main/java/de/dailab/plistacontest/client/ContestHandler.java",
"license": "apache-2.0",
"size": 12960
}
|
[
"org.json.simple.JSONObject",
"org.json.simple.JSONValue"
] |
import org.json.simple.JSONObject; import org.json.simple.JSONValue;
|
import org.json.simple.*;
|
[
"org.json.simple"
] |
org.json.simple;
| 1,866,827
|
boolean verifyPasswordResetToken(String username, String token) throws InvalidTokenException;
|
boolean verifyPasswordResetToken(String username, String token) throws InvalidTokenException;
|
/**
* Verifies if the password reset token of a user is valid. Removes the token if it is invalid.
*
* @param username the username of the user
* @param token the content of the token
* @return true if the token is
* @throws InvalidTokenException if the user does not have a token assigned
*/
|
Verifies if the password reset token of a user is valid. Removes the token if it is invalid
|
verifyPasswordResetToken
|
{
"repo_name": "Morbrolhc/kanbanboard",
"path": "kanbanboard/src/main/java/ch/fhnw/imvs/kanban/service/UserService.java",
"license": "mit",
"size": 3421
}
|
[
"ch.fhnw.imvs.kanban.exception.InvalidTokenException"
] |
import ch.fhnw.imvs.kanban.exception.InvalidTokenException;
|
import ch.fhnw.imvs.kanban.exception.*;
|
[
"ch.fhnw.imvs"
] |
ch.fhnw.imvs;
| 22,983
|
protected void validateCurrentPassword(UserVO user, String currentPassword) {
AccountVO userAccount = _accountDao.findById(user.getAccountId());
boolean currentPasswordMatchesDataBasePassword = false;
for (UserAuthenticator userAuthenticator : _userPasswordEncoders) {
Pair<Boolean, ActionOnFailedAuthentication> authenticationResult = userAuthenticator.authenticate(user.getUsername(), currentPassword, userAccount.getDomainId(), null);
if (authenticationResult == null) {
s_logger.trace(String.format("Authenticator [%s] is returning null for the authenticate mehtod.", userAuthenticator.getClass()));
continue;
}
if (BooleanUtils.toBoolean(authenticationResult.first())) {
s_logger.debug(String.format("User [id=%s] re-authenticated [authenticator=%s] during password update.", user.getUuid(), userAuthenticator.getName()));
currentPasswordMatchesDataBasePassword = true;
break;
}
}
if (!currentPasswordMatchesDataBasePassword) {
throw new InvalidParameterValueException("Current password is incorrect.");
}
}
|
void function(UserVO user, String currentPassword) { AccountVO userAccount = _accountDao.findById(user.getAccountId()); boolean currentPasswordMatchesDataBasePassword = false; for (UserAuthenticator userAuthenticator : _userPasswordEncoders) { Pair<Boolean, ActionOnFailedAuthentication> authenticationResult = userAuthenticator.authenticate(user.getUsername(), currentPassword, userAccount.getDomainId(), null); if (authenticationResult == null) { s_logger.trace(String.format(STR, userAuthenticator.getClass())); continue; } if (BooleanUtils.toBoolean(authenticationResult.first())) { s_logger.debug(String.format(STR, user.getUuid(), userAuthenticator.getName())); currentPasswordMatchesDataBasePassword = true; break; } } if (!currentPasswordMatchesDataBasePassword) { throw new InvalidParameterValueException(STR); } }
|
/**
* Iterates over all configured user authenticators and tries to authenticate the user using the current password.
* If the user is authenticated with success, we have nothing else to do here; otherwise, an {@link InvalidParameterValueException} is thrown.
*/
|
Iterates over all configured user authenticators and tries to authenticate the user using the current password. If the user is authenticated with success, we have nothing else to do here; otherwise, an <code>InvalidParameterValueException</code> is thrown
|
validateCurrentPassword
|
{
"repo_name": "wido/cloudstack",
"path": "server/src/main/java/com/cloud/user/AccountManagerImpl.java",
"license": "apache-2.0",
"size": 131438
}
|
[
"com.cloud.exception.InvalidParameterValueException",
"com.cloud.server.auth.UserAuthenticator",
"com.cloud.utils.Pair",
"org.apache.commons.lang3.BooleanUtils"
] |
import com.cloud.exception.InvalidParameterValueException; import com.cloud.server.auth.UserAuthenticator; import com.cloud.utils.Pair; import org.apache.commons.lang3.BooleanUtils;
|
import com.cloud.exception.*; import com.cloud.server.auth.*; import com.cloud.utils.*; import org.apache.commons.lang3.*;
|
[
"com.cloud.exception",
"com.cloud.server",
"com.cloud.utils",
"org.apache.commons"
] |
com.cloud.exception; com.cloud.server; com.cloud.utils; org.apache.commons;
| 26,498
|
public Cancellable updateDatafeedAsync(UpdateDatafeedRequest request, RequestOptions options,
ActionListener<PutDatafeedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::updateDatafeed,
options,
PutDatafeedResponse::fromXContent,
listener,
Collections.emptySet());
}
|
Cancellable function(UpdateDatafeedRequest request, RequestOptions options, ActionListener<PutDatafeedResponse> listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, MLRequestConverters::updateDatafeed, options, PutDatafeedResponse::fromXContent, listener, Collections.emptySet()); }
|
/**
* Updates a Machine Learning Datafeed asynchronously and notifies listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-datafeed.html">
* ML Update datafeed documentation</a>
*
* @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedUpdate} settings
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
|
Updates a Machine Learning Datafeed asynchronously and notifies listener on completion For additional info see ML Update datafeed documentation
|
updateDatafeedAsync
|
{
"repo_name": "HonzaKral/elasticsearch",
"path": "client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java",
"license": "apache-2.0",
"size": 130306
}
|
[
"java.util.Collections",
"org.elasticsearch.action.ActionListener",
"org.elasticsearch.client.ml.PutDatafeedResponse",
"org.elasticsearch.client.ml.UpdateDatafeedRequest"
] |
import java.util.Collections; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.ml.PutDatafeedResponse; import org.elasticsearch.client.ml.UpdateDatafeedRequest;
|
import java.util.*; import org.elasticsearch.action.*; import org.elasticsearch.client.ml.*;
|
[
"java.util",
"org.elasticsearch.action",
"org.elasticsearch.client"
] |
java.util; org.elasticsearch.action; org.elasticsearch.client;
| 1,418,691
|
void checkLayout(Container parent) {
if (parent.getLayout() != this) {
throw new IllegalArgumentException("wrong parent for BetterCardLayout");
}
}
|
void checkLayout(Container parent) { if (parent.getLayout() != this) { throw new IllegalArgumentException(STR); } }
|
/**
* Make sure that the Container really has a BetterCardLayout installed.
* Otherwise havoc can ensue!
*/
|
Make sure that the Container really has a BetterCardLayout installed. Otherwise havoc can ensue
|
checkLayout
|
{
"repo_name": "jedwards1211/breakout",
"path": "andork-ui/src/org/andork/awt/layout/BetterCardLayout.java",
"license": "gpl-2.0",
"size": 17253
}
|
[
"java.awt.Container"
] |
import java.awt.Container;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 1,705,890
|
public LatLngBounds getBoundingBox() {
return mBoundingBox;
}
|
LatLngBounds function() { return mBoundingBox; }
|
/**
* Gets the LatLngBounds containing the coordinates of the bounding box for the
* FeatureCollection. If the FeatureCollection did not have a bounding box or if the GeoJSON
* file did not contain a FeatureCollection then null will be returned.
*
* @return LatLngBounds containing bounding box of FeatureCollection, null if no bounding box
*/
|
Gets the LatLngBounds containing the coordinates of the bounding box for the FeatureCollection. If the FeatureCollection did not have a bounding box or if the GeoJSON file did not contain a FeatureCollection then null will be returned
|
getBoundingBox
|
{
"repo_name": "davidmrtz/TrackingApp",
"path": "myMapa/library/src/com/google/maps/android/geojson/GeoJsonLayer.java",
"license": "apache-2.0",
"size": 7445
}
|
[
"com.google.android.gms.maps.model.LatLngBounds"
] |
import com.google.android.gms.maps.model.LatLngBounds;
|
import com.google.android.gms.maps.model.*;
|
[
"com.google.android"
] |
com.google.android;
| 1,964,968
|
public java.util.List<fr.lip6.move.pnml.symmetricnet.finiteEnumerations.hlapi.FiniteEnumerationHLAPI> getInput_finiteEnumerations_FiniteEnumerationHLAPI(){
java.util.List<fr.lip6.move.pnml.symmetricnet.finiteEnumerations.hlapi.FiniteEnumerationHLAPI> retour = new ArrayList<fr.lip6.move.pnml.symmetricnet.finiteEnumerations.hlapi.FiniteEnumerationHLAPI>();
for (Sort elemnt : getInput()) {
if(elemnt.getClass().equals(fr.lip6.move.pnml.symmetricnet.finiteEnumerations.impl.FiniteEnumerationImpl.class)){
retour.add(new fr.lip6.move.pnml.symmetricnet.finiteEnumerations.hlapi.FiniteEnumerationHLAPI(
(fr.lip6.move.pnml.symmetricnet.finiteEnumerations.FiniteEnumeration)elemnt
));
}
}
return retour;
}
|
java.util.List<fr.lip6.move.pnml.symmetricnet.finiteEnumerations.hlapi.FiniteEnumerationHLAPI> function(){ java.util.List<fr.lip6.move.pnml.symmetricnet.finiteEnumerations.hlapi.FiniteEnumerationHLAPI> retour = new ArrayList<fr.lip6.move.pnml.symmetricnet.finiteEnumerations.hlapi.FiniteEnumerationHLAPI>(); for (Sort elemnt : getInput()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.symmetricnet.finiteEnumerations.impl.FiniteEnumerationImpl.class)){ retour.add(new fr.lip6.move.pnml.symmetricnet.finiteEnumerations.hlapi.FiniteEnumerationHLAPI( (fr.lip6.move.pnml.symmetricnet.finiteEnumerations.FiniteEnumeration)elemnt )); } } return retour; }
|
/**
* This accessor return a list of encapsulated subelement, only of FiniteEnumerationHLAPI kind.
* WARNING : this method can creates a lot of new object in memory.
*/
|
This accessor return a list of encapsulated subelement, only of FiniteEnumerationHLAPI kind. WARNING : this method can creates a lot of new object in memory
|
getInput_finiteEnumerations_FiniteEnumerationHLAPI
|
{
"repo_name": "lhillah/pnmlframework",
"path": "pnmlFw-SNNet/src/fr/lip6/move/pnml/symmetricnet/integers/hlapi/ModuloHLAPI.java",
"license": "epl-1.0",
"size": 89721
}
|
[
"fr.lip6.move.pnml.symmetricnet.terms.Sort",
"java.util.ArrayList",
"java.util.List"
] |
import fr.lip6.move.pnml.symmetricnet.terms.Sort; import java.util.ArrayList; import java.util.List;
|
import fr.lip6.move.pnml.symmetricnet.terms.*; import java.util.*;
|
[
"fr.lip6.move",
"java.util"
] |
fr.lip6.move; java.util;
| 1,424,627
|
public long getDuration() {
View view;
if ((view = mView.get()) != null) {
return IMPL.getDuration(this, view);
} else {
return 0;
}
}
|
long function() { View view; if ((view = mView.get()) != null) { return IMPL.getDuration(this, view); } else { return 0; } }
|
/**
* Returns the current duration of property animations. If the duration was set on this
* object, that value is returned. Otherwise, the default value of the underlying Animator
* is returned.
*
* <p>Prior to API 14, this method will return 0.</p>
*
* @see #setDuration(long)
* @return The duration of animations, in milliseconds.
*/
|
Returns the current duration of property animations. If the duration was set on this object, that value is returned. Otherwise, the default value of the underlying Animator is returned. Prior to API 14, this method will return 0
|
getDuration
|
{
"repo_name": "kingargyle/adt-leanback-support",
"path": "support-v4/src/main/java/android/support/v4/view/ViewPropertyAnimatorCompat.java",
"license": "apache-2.0",
"size": 46989
}
|
[
"android.view.View"
] |
import android.view.View;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 2,718,852
|
static InputStream openUrl(String url) throws IOException {
try {
URLConnection urlConnection = new URL(url).openConnection();
urlConnection.setConnectTimeout(TIMEOUT);
urlConnection.setReadTimeout(TIMEOUT);
urlConnection.setDoInput(true);
urlConnection.setDoOutput(false);
return urlConnection.getInputStream();
} catch (Exception e) {
IOException ioe = new IOException("Couldn't open " + url);
ioe.initCause(e);
throw ioe;
}
}
private static class EntityParser extends ExpatParser {
private int depth = 0;
private EntityParser(String encoding, ExpatReader xmlReader,
int pointer, String publicId, String systemId) {
super(encoding, xmlReader, pointer, publicId, systemId);
}
|
static InputStream openUrl(String url) throws IOException { try { URLConnection urlConnection = new URL(url).openConnection(); urlConnection.setConnectTimeout(TIMEOUT); urlConnection.setReadTimeout(TIMEOUT); urlConnection.setDoInput(true); urlConnection.setDoOutput(false); return urlConnection.getInputStream(); } catch (Exception e) { IOException ioe = new IOException(STR + url); ioe.initCause(e); throw ioe; } } private static class EntityParser extends ExpatParser { private int depth = 0; private EntityParser(String encoding, ExpatReader xmlReader, int pointer, String publicId, String systemId) { super(encoding, xmlReader, pointer, publicId, systemId); }
|
/**
* Opens an InputStream for the given URL.
*/
|
Opens an InputStream for the given URL
|
openUrl
|
{
"repo_name": "xdajog/samsung_sources_i927",
"path": "libcore/luni/src/main/java/org/apache/harmony/xml/ExpatParser.java",
"license": "gpl-2.0",
"size": 26443
}
|
[
"java.io.IOException",
"java.io.InputStream",
"java.net.URLConnection"
] |
import java.io.IOException; import java.io.InputStream; import java.net.URLConnection;
|
import java.io.*; import java.net.*;
|
[
"java.io",
"java.net"
] |
java.io; java.net;
| 2,375,448
|
Matrix inverse();
|
Matrix inverse();
|
/**
* Inverse matrix.
*
* @return
*/
|
Inverse matrix
|
inverse
|
{
"repo_name": "fernandoj92/mvca-parkinson",
"path": "ltm-analysis/src/main/java/org/la4j/inversion/MatrixInverter.java",
"license": "apache-2.0",
"size": 1228
}
|
[
"org.la4j.matrix.Matrix"
] |
import org.la4j.matrix.Matrix;
|
import org.la4j.matrix.*;
|
[
"org.la4j.matrix"
] |
org.la4j.matrix;
| 1,166,564
|
@Override
protected final void emit_unresolved_invokestatic(MethodReference methodRef) {
emitDynamicLinkingSequence(asm, S0, methodRef, true);
genParameterRegisterLoad(methodRef, false);
asm.emitCALL_RegDisp(S0, Magic.getTocPointer().toWord().toOffset());
genResultRegisterUnload(methodRef);
}
|
final void function(MethodReference methodRef) { emitDynamicLinkingSequence(asm, S0, methodRef, true); genParameterRegisterLoad(methodRef, false); asm.emitCALL_RegDisp(S0, Magic.getTocPointer().toWord().toOffset()); genResultRegisterUnload(methodRef); }
|
/**
* Emit code to implement a dynamically linked invokestatic
* @param methodRef the referenced method
*/
|
Emit code to implement a dynamically linked invokestatic
|
emit_unresolved_invokestatic
|
{
"repo_name": "ut-osa/laminar",
"path": "jikesrvm-3.0.0/rvm/src/org/jikesrvm/compilers/baseline/ia32/BaselineCompilerImpl.java",
"license": "bsd-3-clause",
"size": 160544
}
|
[
"org.jikesrvm.classloader.MethodReference",
"org.jikesrvm.runtime.Magic"
] |
import org.jikesrvm.classloader.MethodReference; import org.jikesrvm.runtime.Magic;
|
import org.jikesrvm.classloader.*; import org.jikesrvm.runtime.*;
|
[
"org.jikesrvm.classloader",
"org.jikesrvm.runtime"
] |
org.jikesrvm.classloader; org.jikesrvm.runtime;
| 230,971
|
public void insert_octet(byte _0)
throws TypeMismatch, InvalidValue
{
throw new MARSHAL(_DynAnyStub.NOT_APPLICABLE);
}
|
void function(byte _0) throws TypeMismatch, InvalidValue { throw new MARSHAL(_DynAnyStub.NOT_APPLICABLE); }
|
/**
* The remote call of DynAny methods is not possible.
*
* @throws MARSHAL, always.
*/
|
The remote call of DynAny methods is not possible
|
insert_octet
|
{
"repo_name": "SanDisk-Open-Source/SSD_Dashboard",
"path": "uefi/gcc/gcc-4.6.3/libjava/classpath/org/omg/DynamicAny/_DynUnionStub.java",
"license": "gpl-2.0",
"size": 16960
}
|
[
"org.omg.DynamicAny"
] |
import org.omg.DynamicAny;
|
import org.omg.*;
|
[
"org.omg"
] |
org.omg;
| 1,584,972
|
@Operation(desc = "List the message counters", impact = MBeanOperationInfo.INFO)
String listMessageCounter() throws Exception;
|
@Operation(desc = STR, impact = MBeanOperationInfo.INFO) String listMessageCounter() throws Exception;
|
/**
* Lists the message counter for this queue.
*/
|
Lists the message counter for this queue
|
listMessageCounter
|
{
"repo_name": "iweiss/activemq-artemis",
"path": "artemis-core-client/src/main/java/org/apache/activemq/artemis/api/core/management/QueueControl.java",
"license": "apache-2.0",
"size": 30161
}
|
[
"javax.management.MBeanOperationInfo"
] |
import javax.management.MBeanOperationInfo;
|
import javax.management.*;
|
[
"javax.management"
] |
javax.management;
| 2,243,446
|
public void setPercent (BigDecimal Percent);
|
void function (BigDecimal Percent);
|
/** Set Percent.
* Percentage
*/
|
Set Percent. Percentage
|
setPercent
|
{
"repo_name": "armenrz/adempiere",
"path": "base/src/org/compiere/model/I_GL_DistributionLine.java",
"license": "gpl-2.0",
"size": 15709
}
|
[
"java.math.BigDecimal"
] |
import java.math.BigDecimal;
|
import java.math.*;
|
[
"java.math"
] |
java.math;
| 939,597
|
private static Map makeSynonyms() {
final Map result = new HashMap();
result.put("vertex", "node");
result.put("vertices", "node");
result.put("vertexes", "node");
result.put("atom", "node");
result.put("atoms", "node");
result.put("nodes", "node");
result.put("bond", "edge");
result.put("bonds", "edge");
result.put("edges", "edge");
result.put("faces", "face");
result.put("ring", "face");
result.put("rings", "face");
result.put("tiles", "tile");
result.put("body", "tile");
result.put("bodies", "tile");
result.put("spacegroup", "group");
result.put("space_group", "group");
result.put("id", "name");
result.put("edge_centers", "edge_center");
result.put("edge_centre", "edge_center");
result.put("edge_centres", "edge_center");
result.put("edgecenter", "edge_center");
result.put("edgecenters", "edge_center");
result.put("edgecentre", "edge_center");
result.put("edgecentres", "edge_center");
result.put("coordination_sequences", "coordination_sequence");
result.put("coordinationsequence", "coordination_sequence");
result.put("coordinationsequences", "coordination_sequence");
result.put("cs", "coordination_sequence");
return Collections.unmodifiableMap(result);
}
|
static Map function() { final Map result = new HashMap(); result.put(STR, "node"); result.put(STR, "node"); result.put(STR, "node"); result.put("atom", "node"); result.put("atoms", "node"); result.put("nodes", "node"); result.put("bond", "edge"); result.put("bonds", "edge"); result.put("edges", "edge"); result.put("faces", "face"); result.put("ring", "face"); result.put("rings", "face"); result.put("tiles", "tile"); result.put("body", "tile"); result.put(STR, "tile"); result.put(STR, "group"); result.put(STR, "group"); result.put("id", "name"); result.put(STR, STR); result.put(STR, STR); result.put(STR, STR); result.put(STR, STR); result.put(STR, STR); result.put(STR, STR); result.put(STR, STR); result.put(STR, STR); result.put(STR, STR); result.put(STR, STR); result.put("cs", STR); return Collections.unmodifiableMap(result); }
|
/**
* Sets up a keyword map to be used by {@link GenericParser#parseDataBlock()}.
*
* @return the mapping of keywords.
*/
|
Sets up a keyword map to be used by <code>GenericParser#parseDataBlock()</code>
|
makeSynonyms
|
{
"repo_name": "BackupTheBerlios/gavrog",
"path": "src/org/gavrog/joss/pgraphs/io/NetParser.java",
"license": "apache-2.0",
"size": 71664
}
|
[
"java.util.Collections",
"java.util.HashMap",
"java.util.Map"
] |
import java.util.Collections; import java.util.HashMap; import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,820,860
|
@Test
public void test_getDeductionConversionFactor() {
BigDecimal value = new BigDecimal(1);
instance.setDeductionConversionFactor(value);
assertSame("'getDeductionConversionFactor' should be correct.",
value, instance.getDeductionConversionFactor());
}
|
void function() { BigDecimal value = new BigDecimal(1); instance.setDeductionConversionFactor(value); assertSame(STR, value, instance.getDeductionConversionFactor()); }
|
/**
* <p>
* Accuracy test for the method <code>getDeductionConversionFactor()</code>.<br>
* The value should be properly retrieved.
* </p>
*/
|
Accuracy test for the method <code>getDeductionConversionFactor()</code>. The value should be properly retrieved.
|
test_getDeductionConversionFactor
|
{
"repo_name": "NASA-Tournament-Lab/CoECI-OPM-Service-Credit-Redeposit-Deposit-Application",
"path": "Code/SCRD_BRE/src/java/tests/gov/opm/scrd/entities/application/DeductionRateUnitTests.java",
"license": "apache-2.0",
"size": 9812
}
|
[
"java.math.BigDecimal",
"org.junit.Assert"
] |
import java.math.BigDecimal; import org.junit.Assert;
|
import java.math.*; import org.junit.*;
|
[
"java.math",
"org.junit"
] |
java.math; org.junit;
| 1,591,907
|
File getSource();
|
File getSource();
|
/**
* The location on the file system which this mod came from
*/
|
The location on the file system which this mod came from
|
getSource
|
{
"repo_name": "jdpadrnos/MinecraftForge",
"path": "src/main/java/net/minecraftforge/fml/common/ModContainer.java",
"license": "lgpl-2.1",
"size": 4313
}
|
[
"java.io.File"
] |
import java.io.File;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,929,962
|
@SuppressWarnings("unchecked")
protected void rehash( final int newN ) {
int i = 0, pos;
final boolean used[] = this.used;
K k;
final K key[] = this.key;
final double value[] = this.value;
final int newMask = newN - 1;
final K newKey[] = (K[]) new Object[ newN ];
final double newValue[] = new double[newN];
final boolean newUsed[] = new boolean[ newN ];
for( int j = size; j-- != 0; ) {
while( ! used[ i ] ) i++;
k = key[ i ];
pos = ( (k) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(k) ) ) & newMask;
while ( newUsed[ pos ] ) pos = ( pos + 1 ) & newMask;
newUsed[ pos ] = true;
newKey[ pos ] = k;
newValue[ pos ] = value[ i ];
i++;
}
n = newN;
mask = newMask;
maxFill = maxFill( n, f );
this.key = newKey;
this.value = newValue;
this.used = newUsed;
}
|
@SuppressWarnings(STR) void function( final int newN ) { int i = 0, pos; final boolean used[] = this.used; K k; final K key[] = this.key; final double value[] = this.value; final int newMask = newN - 1; final K newKey[] = (K[]) new Object[ newN ]; final double newValue[] = new double[newN]; final boolean newUsed[] = new boolean[ newN ]; for( int j = size; j-- != 0; ) { while( ! used[ i ] ) i++; k = key[ i ]; pos = ( (k) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(k) ) ) & newMask; while ( newUsed[ pos ] ) pos = ( pos + 1 ) & newMask; newUsed[ pos ] = true; newKey[ pos ] = k; newValue[ pos ] = value[ i ]; i++; } n = newN; mask = newMask; maxFill = maxFill( n, f ); this.key = newKey; this.value = newValue; this.used = newUsed; }
|
/** Resizes the map.
*
* <P>This method implements the basic rehashing strategy, and may be
* overriden by subclasses implementing different rehashing strategies (e.g.,
* disk-based rehashing). However, you should not override this method
* unless you understand the internal workings of this class.
*
* @param newN the new size
*/
|
Resizes the map. This method implements the basic rehashing strategy, and may be overriden by subclasses implementing different rehashing strategies (e.g., disk-based rehashing). However, you should not override this method unless you understand the internal workings of this class
|
rehash
|
{
"repo_name": "karussell/fastutil",
"path": "src/it/unimi/dsi/fastutil/objects/Reference2DoubleOpenHashMap.java",
"license": "apache-2.0",
"size": 29721
}
|
[
"it.unimi.dsi.fastutil.HashCommon"
] |
import it.unimi.dsi.fastutil.HashCommon;
|
import it.unimi.dsi.fastutil.*;
|
[
"it.unimi.dsi"
] |
it.unimi.dsi;
| 253,320
|
public static synchronized void releaseSharedResources(long timeout, TimeUnit unit) {
if (EVENT_LOOP != null) {
try {
EVENT_LOOP.shutdownGracefully().await(timeout, unit);
} catch (InterruptedException e) {
LoggerFactory.getLogger(Modbus.class)
.warn("Interrupted awaiting event loop shutdown.", e);
}
EVENT_LOOP = null;
}
if (SCHEDULED_EXECUTOR_SERVICE != null) {
SCHEDULED_EXECUTOR_SERVICE.shutdown();
}
if (EXECUTOR_SERVICE != null) {
EXECUTOR_SERVICE.shutdown();
}
if (SCHEDULED_EXECUTOR_SERVICE != null) {
try {
SCHEDULED_EXECUTOR_SERVICE.awaitTermination(timeout, unit);
} catch (InterruptedException e) {
LoggerFactory.getLogger(Modbus.class)
.warn("Interrupted awaiting scheduled executor service shutdown.", e);
}
SCHEDULED_EXECUTOR_SERVICE = null;
}
if (EXECUTOR_SERVICE != null) {
try {
EXECUTOR_SERVICE.awaitTermination(timeout, unit);
} catch (InterruptedException e) {
LoggerFactory.getLogger(Modbus.class)
.warn("Interrupted awaiting executor service shutdown.", e);
}
EXECUTOR_SERVICE = null;
}
if (WHEEL_TIMER != null) {
WHEEL_TIMER.stop().forEach(Timeout::cancel);
WHEEL_TIMER = null;
}
}
|
static synchronized void function(long timeout, TimeUnit unit) { if (EVENT_LOOP != null) { try { EVENT_LOOP.shutdownGracefully().await(timeout, unit); } catch (InterruptedException e) { LoggerFactory.getLogger(Modbus.class) .warn(STR, e); } EVENT_LOOP = null; } if (SCHEDULED_EXECUTOR_SERVICE != null) { SCHEDULED_EXECUTOR_SERVICE.shutdown(); } if (EXECUTOR_SERVICE != null) { EXECUTOR_SERVICE.shutdown(); } if (SCHEDULED_EXECUTOR_SERVICE != null) { try { SCHEDULED_EXECUTOR_SERVICE.awaitTermination(timeout, unit); } catch (InterruptedException e) { LoggerFactory.getLogger(Modbus.class) .warn(STR, e); } SCHEDULED_EXECUTOR_SERVICE = null; } if (EXECUTOR_SERVICE != null) { try { EXECUTOR_SERVICE.awaitTermination(timeout, unit); } catch (InterruptedException e) { LoggerFactory.getLogger(Modbus.class) .warn(STR, e); } EXECUTOR_SERVICE = null; } if (WHEEL_TIMER != null) { WHEEL_TIMER.stop().forEach(Timeout::cancel); WHEEL_TIMER = null; } }
|
/**
* Release shared resources, waiting at most the specified timeout for the {@link NioEventLoopGroup} to shutdown
* gracefully.
*
* @param timeout the duration of the timeout.
* @param unit the unit of the timeout duration.
*/
|
Release shared resources, waiting at most the specified timeout for the <code>NioEventLoopGroup</code> to shutdown gracefully
|
releaseSharedResources
|
{
"repo_name": "digitalpetri/modbus",
"path": "modbus-codec/src/main/java/com/digitalpetri/modbus/codec/Modbus.java",
"license": "apache-2.0",
"size": 7307
}
|
[
"io.netty.util.Timeout",
"java.util.concurrent.TimeUnit",
"org.slf4j.LoggerFactory"
] |
import io.netty.util.Timeout; import java.util.concurrent.TimeUnit; import org.slf4j.LoggerFactory;
|
import io.netty.util.*; import java.util.concurrent.*; import org.slf4j.*;
|
[
"io.netty.util",
"java.util",
"org.slf4j"
] |
io.netty.util; java.util; org.slf4j;
| 1,061,456
|
@Override
public QName getName()
{
return new QName(null, attributeResult.getAttributeName());
}
|
QName function() { return new QName(null, attributeResult.getAttributeName()); }
|
/**
* Returns the name of this node. This is the attribute name.
*
* @return the name of this node
*/
|
Returns the name of this node. This is the attribute name
|
getName
|
{
"repo_name": "mohanaraosv/commons-configuration",
"path": "src/main/java/org/apache/commons/configuration2/tree/xpath/ConfigurationAttributePointer.java",
"license": "apache-2.0",
"size": 6143
}
|
[
"org.apache.commons.jxpath.ri.QName"
] |
import org.apache.commons.jxpath.ri.QName;
|
import org.apache.commons.jxpath.ri.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 2,648,777
|
void addMBeanServers(Set<MBeanServerConnection> pMBeanServers);
|
void addMBeanServers(Set<MBeanServerConnection> pMBeanServers);
|
/**
* Add server specific MBeanServers
*
* @param pMBeanServers set to add detected MBeanServers to
*/
|
Add server specific MBeanServers
|
addMBeanServers
|
{
"repo_name": "GabrielNicolasAvellaneda/jolokia",
"path": "agent/core/src/main/java/org/jolokia/detector/ServerDetector.java",
"license": "apache-2.0",
"size": 1783
}
|
[
"java.util.Set",
"javax.management.MBeanServerConnection"
] |
import java.util.Set; import javax.management.MBeanServerConnection;
|
import java.util.*; import javax.management.*;
|
[
"java.util",
"javax.management"
] |
java.util; javax.management;
| 328,311
|
public static void closeCustomShell() throws IOException {
Shell.closeCustomShell();
}
|
static void function() throws IOException { Shell.closeCustomShell(); }
|
/**
* This will close the custom shell that you opened.
*
* @throws IOException
*/
|
This will close the custom shell that you opened
|
closeCustomShell
|
{
"repo_name": "Morlunk/Mountie",
"path": "app/src/main/java/com/stericson/RootTools/RootTools.java",
"license": "gpl-3.0",
"size": 34186
}
|
[
"com.stericson.RootTools",
"java.io.IOException"
] |
import com.stericson.RootTools; import java.io.IOException;
|
import com.stericson.*; import java.io.*;
|
[
"com.stericson",
"java.io"
] |
com.stericson; java.io;
| 2,606,390
|
public MStyle convertStyle(DTableElementStyle dStyle) {
MStyle mStyle = null;
if (dStyle != null) {
mStyle = new MStyleImpl(null, dStyle.getLabelSize(), convertColor(dStyle.getForegroundColor()),
convertColor(dStyle.getBackgroundColor()), convertFontFormat(dStyle.getLabelFormat()));
}
return mStyle;
}
|
MStyle function(DTableElementStyle dStyle) { MStyle mStyle = null; if (dStyle != null) { mStyle = new MStyleImpl(null, dStyle.getLabelSize(), convertColor(dStyle.getForegroundColor()), convertColor(dStyle.getBackgroundColor()), convertFontFormat(dStyle.getLabelFormat())); } return mStyle; }
|
/**
* Converts a Sirius style to an m2doc style.
*
* @param dStyle
* the Sirius style.
* @return the converted style.
*/
|
Converts a Sirius style to an m2doc style
|
convertStyle
|
{
"repo_name": "ObeoNetwork/M2Doc",
"path": "plugins/org.obeonetwork.m2doc.sirius/src/org/obeonetwork/m2doc/sirius/util/DTable2MTableConverter.java",
"license": "epl-1.0",
"size": 11963
}
|
[
"org.eclipse.sirius.table.metamodel.table.DTableElementStyle",
"org.obeonetwork.m2doc.element.MStyle",
"org.obeonetwork.m2doc.element.impl.MStyleImpl"
] |
import org.eclipse.sirius.table.metamodel.table.DTableElementStyle; import org.obeonetwork.m2doc.element.MStyle; import org.obeonetwork.m2doc.element.impl.MStyleImpl;
|
import org.eclipse.sirius.table.metamodel.table.*; import org.obeonetwork.m2doc.element.*; import org.obeonetwork.m2doc.element.impl.*;
|
[
"org.eclipse.sirius",
"org.obeonetwork.m2doc"
] |
org.eclipse.sirius; org.obeonetwork.m2doc;
| 1,953,991
|
@Nullable
public String getAttribute(String attribute) {
checkNotNull(attribute, "attribute");
return attributes.get(attribute);
}
|
String function(String attribute) { checkNotNull(attribute, STR); return attributes.get(attribute); }
|
/**
* Returns the given attribute value. If not such attribute exists, returns null.
*/
|
Returns the given attribute value. If not such attribute exists, returns null
|
getAttribute
|
{
"repo_name": "xsm110/Apache-Beam",
"path": "sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java",
"license": "apache-2.0",
"size": 41978
}
|
[
"com.google.common.base.Preconditions"
] |
import com.google.common.base.Preconditions;
|
import com.google.common.base.*;
|
[
"com.google.common"
] |
com.google.common;
| 2,251,188
|
@Test
public void testValidateIndexesNoErrorEmptyCacheNameArg() {
injectTestSystemOut();
assertEquals(EXIT_CODE_OK, execute("--cache", "validate_indexes"));
assertContains(log, testOut.toString(), "no issues found");
}
|
void function() { injectTestSystemOut(); assertEquals(EXIT_CODE_OK, execute(STR, STR)); assertContains(log, testOut.toString(), STR); }
|
/**
* Test verifies that validate_indexes command finishes successfully when no cache names are specified.
*/
|
Test verifies that validate_indexes command finishes successfully when no cache names are specified
|
testValidateIndexesNoErrorEmptyCacheNameArg
|
{
"repo_name": "samaitra/ignite",
"path": "modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerIndexingClusterByClassTest.java",
"license": "apache-2.0",
"size": 6771
}
|
[
"org.apache.ignite.testframework.GridTestUtils"
] |
import org.apache.ignite.testframework.GridTestUtils;
|
import org.apache.ignite.testframework.*;
|
[
"org.apache.ignite"
] |
org.apache.ignite;
| 909,645
|
public void createLiterals(String timeIntervalValue, String timeValue)
{
if ((!"null".equals(timeIntervalValue)) && (!"null".equals(timeValue)))
{
//Creating the dateOffSet Literal
setTimeInterval(timeIntervalValue);
dateOffSetLiteral = QueryObjectFactory.createDateOffsetLiteral(timeValue, timeInterval);
}
else
{
checkAttributeType(timeValue);
}
}
|
void function(String timeIntervalValue, String timeValue) { if ((!"null".equals(timeIntervalValue)) && (!"null".equals(timeValue))) { setTimeInterval(timeIntervalValue); dateOffSetLiteral = QueryObjectFactory.createDateOffsetLiteral(timeValue, timeInterval); } else { checkAttributeType(timeValue); } }
|
/**
* This method creates either date Literal or dateOffset Literal depending on the time Interval values.
* @param timeIntervalValue timeIntervalValue
* @param timeValue timeValue
*/
|
This method creates either date Literal or dateOffset Literal depending on the time Interval values
|
createLiterals
|
{
"repo_name": "NCIP/catissue-advanced-query",
"path": "software/AdvancedQuery/src/main/java/edu/wustl/query/flex/dag/TwoNodesTemporalQuery.java",
"license": "bsd-3-clause",
"size": 15998
}
|
[
"edu.wustl.common.querysuite.factory.QueryObjectFactory"
] |
import edu.wustl.common.querysuite.factory.QueryObjectFactory;
|
import edu.wustl.common.querysuite.factory.*;
|
[
"edu.wustl.common"
] |
edu.wustl.common;
| 1,005,534
|
EndpointReferenceType mint(QName serviceName);
|
EndpointReferenceType mint(QName serviceName);
|
/**
* Walk the list of registered EndpointResolvers, so as to mint a new
* abstract EPR for a given service name.
*
* @param serviceName
* @return the newly minted EPR if appropriate, null otherwise
*/
|
Walk the list of registered EndpointResolvers, so as to mint a new abstract EPR for a given service name
|
mint
|
{
"repo_name": "zzsoszz/webservice_gzdx",
"path": "opensource_cxf/org/apache/cxf/endpoint/EndpointResolverRegistry.java",
"license": "apache-2.0",
"size": 3761
}
|
[
"javax.xml.namespace.QName",
"org.apache.cxf.ws.addressing.EndpointReferenceType"
] |
import javax.xml.namespace.QName; import org.apache.cxf.ws.addressing.EndpointReferenceType;
|
import javax.xml.namespace.*; import org.apache.cxf.ws.addressing.*;
|
[
"javax.xml",
"org.apache.cxf"
] |
javax.xml; org.apache.cxf;
| 1,503,360
|
public void concat(final Path trg, final Path [] psrcs) throws IOException {
throw new UnsupportedOperationException("Not implemented by the " +
getClass().getSimpleName() + " FileSystem implementation");
}
|
void function(final Path trg, final Path [] psrcs) throws IOException { throw new UnsupportedOperationException(STR + getClass().getSimpleName() + STR); }
|
/**
* Concat existing files together.
* @param trg the path to the target destination.
* @param psrcs the paths to the sources to use for the concatenation.
* @throws IOException
*/
|
Concat existing files together
|
concat
|
{
"repo_name": "Microsoft-CISL/hadoop-prototype",
"path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java",
"license": "apache-2.0",
"size": 116772
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 215,827
|
public Operation reload(OperationOption... options) {
return compute.getOperation(operationId, options);
}
|
Operation function(OperationOption... options) { return compute.getOperation(operationId, options); }
|
/**
* Fetches current operation's latest information. Returns {@code null} if the operation does not
* exist.
*
* @param options operation options
* @return an {@code Operation} object with latest information or {@code null} if not found
* @throws ComputeException upon failure
*/
|
Fetches current operation's latest information. Returns null if the operation does not exist
|
reload
|
{
"repo_name": "jabubake/google-cloud-java",
"path": "google-cloud-compute/src/main/java/com/google/cloud/compute/Operation.java",
"license": "apache-2.0",
"size": 33565
}
|
[
"com.google.cloud.compute.Compute"
] |
import com.google.cloud.compute.Compute;
|
import com.google.cloud.compute.*;
|
[
"com.google.cloud"
] |
com.google.cloud;
| 787,867
|
private int getResponseCode(HttpURLConnection connection) {
try {
return connection.getResponseCode();
} catch (IOException e) {
throw new RequestException(e);
}
}
|
int function(HttpURLConnection connection) { try { return connection.getResponseCode(); } catch (IOException e) { throw new RequestException(e); } }
|
/**
* Get http connection response status code
*/
|
Get http connection response status code
|
getResponseCode
|
{
"repo_name": "jojoaddison/oneapi-java",
"path": "src/main/java/oneapi/client/impl/OneAPIBaseClientImpl.java",
"license": "apache-2.0",
"size": 17150
}
|
[
"java.io.IOException",
"java.net.HttpURLConnection"
] |
import java.io.IOException; import java.net.HttpURLConnection;
|
import java.io.*; import java.net.*;
|
[
"java.io",
"java.net"
] |
java.io; java.net;
| 1,651,048
|
@Test
public void testThreadIsolatedObserveKnownAsyncFailureWithNoFallback() {
testObserveKnownFailureWithNoFallback(ExecutionIsolationStrategy.THREAD, true);
}
|
void function() { testObserveKnownFailureWithNoFallback(ExecutionIsolationStrategy.THREAD, true); }
|
/**
* Test a thread command execution that throws an HystrixException asynchronously and didn't implement getFallback.
*/
|
Test a thread command execution that throws an HystrixException asynchronously and didn't implement getFallback
|
testThreadIsolatedObserveKnownAsyncFailureWithNoFallback
|
{
"repo_name": "sasrin/Hystrix",
"path": "hystrix-core/src/test/java/com/netflix/hystrix/HystrixObservableCommandTest.java",
"license": "apache-2.0",
"size": 272384
}
|
[
"com.netflix.hystrix.HystrixCommandProperties"
] |
import com.netflix.hystrix.HystrixCommandProperties;
|
import com.netflix.hystrix.*;
|
[
"com.netflix.hystrix"
] |
com.netflix.hystrix;
| 2,069,726
|
private List<StorageUnitEntity> createStorageUnitEntitiesFromStorageUnits(List<StorageUnitCreateRequest> storageUnitCreateRequests,
BusinessObjectFormatEntity businessObjectFormatEntity, BusinessObjectDataEntity businessObjectDataEntity)
{
// Create the storage units for the data.
List<StorageUnitEntity> storageUnitEntities = new ArrayList<>();
// Get the storage unit status entity for the ENABLED status.
StorageUnitStatusEntity storageUnitStatusEntity = storageDaoHelper.getStorageUnitStatusEntity(StorageUnitStatusEntity.ENABLED);
for (StorageUnitCreateRequest storageUnit : storageUnitCreateRequests)
{
// Get the storage entity per request and verify that it exists.
StorageEntity storageEntity = storageDaoHelper.getStorageEntity(storageUnit.getStorageName());
// Set up flags which are used to make flow logic easier.
boolean isS3StoragePlatform = storageEntity.getStoragePlatform().getName().equals(StoragePlatformEntity.S3);
boolean isStorageDirectorySpecified = storageUnit.getStorageDirectory() != null;
boolean validatePathPrefix = storageDaoHelper
.getBooleanStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.S3_ATTRIBUTE_NAME_VALIDATE_PATH_PREFIX),
storageEntity, false, true);
boolean validateFileExistence = storageDaoHelper
.getBooleanStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.S3_ATTRIBUTE_NAME_VALIDATE_FILE_EXISTENCE),
storageEntity, false, true);
// If the storage has any validation configured, get the expected S3 key prefix.
String expectedS3KeyPrefix = null;
if ((validatePathPrefix || validateFileExistence) && isS3StoragePlatform)
{
// Build the expected S3 key prefix as per S3 naming convention.
expectedS3KeyPrefix = buildS3KeyPrefix(businessObjectFormatEntity, herdDaoHelper.getBusinessObjectDataKey(businessObjectDataEntity));
}
// Create the storage unit and associated storage files.
StorageUnitEntity storageUnitEntity = new StorageUnitEntity();
storageUnitEntities.add(storageUnitEntity);
storageUnitEntity.setStorage(storageEntity);
storageUnitEntity.setBusinessObjectData(businessObjectDataEntity);
storageUnitEntity.setStatus(storageUnitStatusEntity);
// Process storage directory path if it is specified.
String directoryPath = null;
if (isStorageDirectorySpecified)
{
// Get the specified directory path.
directoryPath = storageUnit.getStorageDirectory().getDirectoryPath();
// If the validate path prefix flag is configured for this storage, validate the directory path value.
if (validatePathPrefix && isS3StoragePlatform)
{
// Ensure the directory path adheres to the S3 naming convention.
Assert.isTrue(directoryPath.equals(expectedS3KeyPrefix),
String.format("Specified directory path \"%s\" does not match the expected S3 key prefix \"%s\".", directoryPath, expectedS3KeyPrefix));
// Ensure that the directory path is not already registered with another business object data instance.
StorageUnitEntity alreadyRegisteredStorageUnitEntity =
herdDao.getStorageUnitByStorageNameAndDirectoryPath(storageEntity.getName(), directoryPath);
if (alreadyRegisteredStorageUnitEntity != null)
{
throw new AlreadyExistsException(String
.format("Storage directory \"%s\" in \"%s\" storage is already registered by the business object data {%s}.", directoryPath,
storageEntity.getName(),
herdDaoHelper.businessObjectDataEntityAltKeyToString(alreadyRegisteredStorageUnitEntity.getBusinessObjectData())));
}
}
// Store the directory.
storageUnitEntity.setDirectoryPath(directoryPath);
}
// Discover storage files if storage file discovery is enabled. Otherwise, get the storage files specified in the request, if any.
List<StorageFile> storageFiles =
BooleanUtils.isTrue(storageUnit.isDiscoverStorageFiles()) ? discoverStorageFiles(storageEntity, directoryPath) : storageUnit.getStorageFiles();
// Create the storage file entities.
createStorageFileEntitiesFromStorageFiles(storageFiles, storageEntity, BooleanUtils.isTrue(storageUnit.isDiscoverStorageFiles()),
expectedS3KeyPrefix, storageUnitEntity, directoryPath, validatePathPrefix, validateFileExistence, isS3StoragePlatform);
}
return storageUnitEntities;
}
|
List<StorageUnitEntity> function(List<StorageUnitCreateRequest> storageUnitCreateRequests, BusinessObjectFormatEntity businessObjectFormatEntity, BusinessObjectDataEntity businessObjectDataEntity) { List<StorageUnitEntity> storageUnitEntities = new ArrayList<>(); StorageUnitStatusEntity storageUnitStatusEntity = storageDaoHelper.getStorageUnitStatusEntity(StorageUnitStatusEntity.ENABLED); for (StorageUnitCreateRequest storageUnit : storageUnitCreateRequests) { StorageEntity storageEntity = storageDaoHelper.getStorageEntity(storageUnit.getStorageName()); boolean isS3StoragePlatform = storageEntity.getStoragePlatform().getName().equals(StoragePlatformEntity.S3); boolean isStorageDirectorySpecified = storageUnit.getStorageDirectory() != null; boolean validatePathPrefix = storageDaoHelper .getBooleanStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.S3_ATTRIBUTE_NAME_VALIDATE_PATH_PREFIX), storageEntity, false, true); boolean validateFileExistence = storageDaoHelper .getBooleanStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.S3_ATTRIBUTE_NAME_VALIDATE_FILE_EXISTENCE), storageEntity, false, true); String expectedS3KeyPrefix = null; if ((validatePathPrefix validateFileExistence) && isS3StoragePlatform) { expectedS3KeyPrefix = buildS3KeyPrefix(businessObjectFormatEntity, herdDaoHelper.getBusinessObjectDataKey(businessObjectDataEntity)); } StorageUnitEntity storageUnitEntity = new StorageUnitEntity(); storageUnitEntities.add(storageUnitEntity); storageUnitEntity.setStorage(storageEntity); storageUnitEntity.setBusinessObjectData(businessObjectDataEntity); storageUnitEntity.setStatus(storageUnitStatusEntity); String directoryPath = null; if (isStorageDirectorySpecified) { directoryPath = storageUnit.getStorageDirectory().getDirectoryPath(); if (validatePathPrefix && isS3StoragePlatform) { Assert.isTrue(directoryPath.equals(expectedS3KeyPrefix), String.format(STR%s\STR%s\".", directoryPath, expectedS3KeyPrefix)); StorageUnitEntity alreadyRegisteredStorageUnitEntity = herdDao.getStorageUnitByStorageNameAndDirectoryPath(storageEntity.getName(), directoryPath); if (alreadyRegisteredStorageUnitEntity != null) { throw new AlreadyExistsException(String .format(STR%s\STR%s\STR, directoryPath, storageEntity.getName(), herdDaoHelper.businessObjectDataEntityAltKeyToString(alreadyRegisteredStorageUnitEntity.getBusinessObjectData()))); } } storageUnitEntity.setDirectoryPath(directoryPath); } List<StorageFile> storageFiles = BooleanUtils.isTrue(storageUnit.isDiscoverStorageFiles()) ? discoverStorageFiles(storageEntity, directoryPath) : storageUnit.getStorageFiles(); createStorageFileEntitiesFromStorageFiles(storageFiles, storageEntity, BooleanUtils.isTrue(storageUnit.isDiscoverStorageFiles()), expectedS3KeyPrefix, storageUnitEntity, directoryPath, validatePathPrefix, validateFileExistence, isS3StoragePlatform); } return storageUnitEntities; }
|
/**
* Creates a list of storage unit entities from a list of storage unit create requests.
*
* @param storageUnitCreateRequests the storage unit create requests.
* @param businessObjectFormatEntity the business object format entity.
* @param businessObjectDataEntity the business object data entity.
*
* @return the list of storage unit entities.
*/
|
Creates a list of storage unit entities from a list of storage unit create requests
|
createStorageUnitEntitiesFromStorageUnits
|
{
"repo_name": "seoj/herd",
"path": "herd-code/herd-service/src/main/java/org/finra/herd/service/helper/BusinessObjectDataHelper.java",
"license": "apache-2.0",
"size": 89946
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.apache.commons.lang3.BooleanUtils",
"org.finra.herd.model.AlreadyExistsException",
"org.finra.herd.model.api.xml.StorageFile",
"org.finra.herd.model.api.xml.StorageUnitCreateRequest",
"org.finra.herd.model.dto.ConfigurationValue",
"org.finra.herd.model.jpa.BusinessObjectDataEntity",
"org.finra.herd.model.jpa.BusinessObjectFormatEntity",
"org.finra.herd.model.jpa.StorageEntity",
"org.finra.herd.model.jpa.StoragePlatformEntity",
"org.finra.herd.model.jpa.StorageUnitEntity",
"org.finra.herd.model.jpa.StorageUnitStatusEntity",
"org.springframework.util.Assert"
] |
import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.BooleanUtils; import org.finra.herd.model.AlreadyExistsException; import org.finra.herd.model.api.xml.StorageFile; import org.finra.herd.model.api.xml.StorageUnitCreateRequest; import org.finra.herd.model.dto.ConfigurationValue; import org.finra.herd.model.jpa.BusinessObjectDataEntity; import org.finra.herd.model.jpa.BusinessObjectFormatEntity; import org.finra.herd.model.jpa.StorageEntity; import org.finra.herd.model.jpa.StoragePlatformEntity; import org.finra.herd.model.jpa.StorageUnitEntity; import org.finra.herd.model.jpa.StorageUnitStatusEntity; import org.springframework.util.Assert;
|
import java.util.*; import org.apache.commons.lang3.*; import org.finra.herd.model.*; import org.finra.herd.model.api.xml.*; import org.finra.herd.model.dto.*; import org.finra.herd.model.jpa.*; import org.springframework.util.*;
|
[
"java.util",
"org.apache.commons",
"org.finra.herd",
"org.springframework.util"
] |
java.util; org.apache.commons; org.finra.herd; org.springframework.util;
| 1,602,492
|
public void editClaim(String claim, String claimDescription, Date start,Date end) {
//This will edit the claim
this.claimList.getClaims().get(claimListNumber).editClaim(claim, claimDescription, start, end);
//this will notify the listener
this.claimList.notifyListeners();
}
//////////////////////////
/////////////////////////////////
|
void function(String claim, String claimDescription, Date start,Date end) { this.claimList.getClaims().get(claimListNumber).editClaim(claim, claimDescription, start, end); this.claimList.notifyListeners(); }
|
/**
* This will call the edit claim calls and edit the current current claim
* @param claim
* @param claimDescription
* @param start
* @param end
*/
|
This will call the edit claim calls and edit the current current claim
|
editClaim
|
{
"repo_name": "eorodrig/eorodrigTravelLogger",
"path": "TravelLogger/src/com/eorodrig/TravelLogger/ClaimController.java",
"license": "lgpl-3.0",
"size": 6159
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,773,786
|
private void fillGuiFromSensor (LineChartSensor config)
{
if (config == null)
{
return;
}
axisRangeModeAuto.setSelected (config.getAxisRangeMode () == LineChartSensor.MODE_AUTO);
axisRangeModeManuel.setSelected (config.getAxisRangeMode () == LineChartSensor.MODE_MANUAL);
axisRangeModeTakeFrom.setSelected (
config.getAxisRangeMode () == LineChartSensor.MODE_TAKEFROM);
axisRangeStart.setValue (new Double(config.getAxisRangeStart ()));
axisRangeStop.setValue (new Double(config.getAxisRangeStop ()));
axisTakeFrom.setModel (axisTakeFromModel = new DefaultComboBoxModel());
stationSensorSelector.setStationIdSensorId (config.getStationId (), config.getSensorId ());
warnMin.setSelected (config.isWarnMin ());
warnMax.setSelected (config.isWarnMax ());
warnMinValue.setValue (new Double(config.getWarnMinValue ()));
warnMaxValue.setValue (new Double(config.getWarnMaxValue ()));
checkWarningRange (config);
lineColorDisplay.setBackground (new Color(config.getColor ()));
}
|
void function (LineChartSensor config) { if (config == null) { return; } axisRangeModeAuto.setSelected (config.getAxisRangeMode () == LineChartSensor.MODE_AUTO); axisRangeModeManuel.setSelected (config.getAxisRangeMode () == LineChartSensor.MODE_MANUAL); axisRangeModeTakeFrom.setSelected ( config.getAxisRangeMode () == LineChartSensor.MODE_TAKEFROM); axisRangeStart.setValue (new Double(config.getAxisRangeStart ())); axisRangeStop.setValue (new Double(config.getAxisRangeStop ())); axisTakeFrom.setModel (axisTakeFromModel = new DefaultComboBoxModel()); stationSensorSelector.setStationIdSensorId (config.getStationId (), config.getSensorId ()); warnMin.setSelected (config.isWarnMin ()); warnMax.setSelected (config.isWarnMax ()); warnMinValue.setValue (new Double(config.getWarnMinValue ())); warnMaxValue.setValue (new Double(config.getWarnMaxValue ())); checkWarningRange (config); lineColorDisplay.setBackground (new Color(config.getColor ())); }
|
/**
* Fill the sensor gui components with sensor configuration attributes.
*
* @param config The sensor configuration from which to load the attributes.
*/
|
Fill the sensor gui components with sensor configuration attributes
|
fillGuiFromSensor
|
{
"repo_name": "grappendorf/openmetix",
"path": "plugin/metix-linechart/src/java/de/iritgo/openmetix/linechart/gui/LineChartConfigurator.java",
"license": "gpl-2.0",
"size": 27967
}
|
[
"de.iritgo.openmetix.linechart.LineChartSensor",
"java.awt.Color",
"javax.swing.DefaultComboBoxModel"
] |
import de.iritgo.openmetix.linechart.LineChartSensor; import java.awt.Color; import javax.swing.DefaultComboBoxModel;
|
import de.iritgo.openmetix.linechart.*; import java.awt.*; import javax.swing.*;
|
[
"de.iritgo.openmetix",
"java.awt",
"javax.swing"
] |
de.iritgo.openmetix; java.awt; javax.swing;
| 2,873,946
|
static Object decompress(RegionEntryContext context, Object value) {
if (isCompressible(context, value)) {
long time = context.getCachePerfStats().startDecompression();
value = EntryEventImpl.deserialize(context.getCompressor().decompress((byte[]) value));
context.getCachePerfStats().endDecompression(time);
}
return value;
}
|
static Object decompress(RegionEntryContext context, Object value) { if (isCompressible(context, value)) { long time = context.getCachePerfStats().startDecompression(); value = EntryEventImpl.deserialize(context.getCompressor().decompress((byte[]) value)); context.getCachePerfStats().endDecompression(time); } return value; }
|
/**
* This method determines if the value is in a compressed representation and decompresses it if it
* is.
*
* @param context the values context.
* @param value a region entry value.
*
* @return the decompressed form of the value parameter.
*/
|
This method determines if the value is in a compressed representation and decompresses it if it is
|
decompress
|
{
"repo_name": "PurelyApplied/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/entries/AbstractRegionEntry.java",
"license": "apache-2.0",
"size": 73811
}
|
[
"org.apache.geode.internal.cache.EntryEventImpl",
"org.apache.geode.internal.cache.RegionEntryContext"
] |
import org.apache.geode.internal.cache.EntryEventImpl; import org.apache.geode.internal.cache.RegionEntryContext;
|
import org.apache.geode.internal.cache.*;
|
[
"org.apache.geode"
] |
org.apache.geode;
| 1,089,751
|
List<MapInfo> getMapInfo();
|
List<MapInfo> getMapInfo();
|
/**
* Returns information about all the consistent maps in the system.
*
* @return list of map information
*/
|
Returns information about all the consistent maps in the system
|
getMapInfo
|
{
"repo_name": "ravikumaran2015/ravikumaran201504",
"path": "core/api/src/main/java/org/onosproject/store/service/StorageAdminService.java",
"license": "apache-2.0",
"size": 1652
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,043,395
|
protected void updateFromFile() throws IOException {
if (m_lastModified != m_viewsDisplayFile.lastModified()) {
reload();
}
}
|
void function() throws IOException { if (m_lastModified != m_viewsDisplayFile.lastModified()) { reload(); } }
|
/**
* Reload the viewsdisplay.xml file if it has been changed since we last
* read it.
*
* @throws java.io.IOException if any.
*/
|
Reload the viewsdisplay.xml file if it has been changed since we last read it
|
updateFromFile
|
{
"repo_name": "jeffgdotorg/opennms",
"path": "opennms-config/src/main/java/org/opennms/netmgt/config/ViewsDisplayFactory.java",
"license": "gpl-2.0",
"size": 7330
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,513,037
|
private void prepareResult(ZNRecord znRecord) {
// The map fields are keyed on partition name to a pair of node and state, i.e. it
// indicates that the partition with given state is served by that node
//
// The list fields are also keyed on partition and list all the nodes serving that partition.
// This is useful to verify that there is no node serving multiple replicas of the same
// partition.
Map<String, List<String>> newPreferences = new TreeMap<String, List<String>>();
for (String partition : _partitions) {
znRecord.setMapField(partition, new TreeMap<String, String>());
znRecord.setListField(partition, new ArrayList<String>());
newPreferences.put(partition, new ArrayList<String>());
}
// for preference lists, the rough priority that we want is:
// [existing preferred, existing non-preferred, non-existing preferred, non-existing
// non-preferred]
for (Node node : _liveNodesList) {
for (Replica replica : node.preferred) {
if (node.newReplicas.contains(replica)) {
newPreferences.get(replica.partition).add(node.id);
} else {
znRecord.getListField(replica.partition).add(node.id);
}
}
}
for (Node node : _liveNodesList) {
for (Replica replica : node.nonPreferred) {
if (node.newReplicas.contains(replica)) {
newPreferences.get(replica.partition).add(node.id);
} else {
znRecord.getListField(replica.partition).add(node.id);
}
}
}
normalizePreferenceLists(znRecord.getListFields(), newPreferences);
// generate preference maps based on the preference lists
for (String partition : _partitions) {
List<String> preferenceList = znRecord.getListField(partition);
int i = 0;
for (String participant : preferenceList) {
znRecord.getMapField(partition).put(participant, _stateMap.get(i));
i++;
}
}
}
|
void function(ZNRecord znRecord) { Map<String, List<String>> newPreferences = new TreeMap<String, List<String>>(); for (String partition : _partitions) { znRecord.setMapField(partition, new TreeMap<String, String>()); znRecord.setListField(partition, new ArrayList<String>()); newPreferences.put(partition, new ArrayList<String>()); } for (Node node : _liveNodesList) { for (Replica replica : node.preferred) { if (node.newReplicas.contains(replica)) { newPreferences.get(replica.partition).add(node.id); } else { znRecord.getListField(replica.partition).add(node.id); } } } for (Node node : _liveNodesList) { for (Replica replica : node.nonPreferred) { if (node.newReplicas.contains(replica)) { newPreferences.get(replica.partition).add(node.id); } else { znRecord.getListField(replica.partition).add(node.id); } } } normalizePreferenceLists(znRecord.getListFields(), newPreferences); for (String partition : _partitions) { List<String> preferenceList = znRecord.getListField(partition); int i = 0; for (String participant : preferenceList) { znRecord.getMapField(partition).put(participant, _stateMap.get(i)); i++; } } }
|
/**
* Update a ZNRecord with the results of the rebalancing.
* @param znRecord
*/
|
Update a ZNRecord with the results of the rebalancing
|
prepareResult
|
{
"repo_name": "kishoreg/helix-actors",
"path": "helix-core/src/main/java/org/apache/helix/controller/strategy/AutoRebalanceStrategy.java",
"license": "apache-2.0",
"size": 27268
}
|
[
"java.util.ArrayList",
"java.util.List",
"java.util.Map",
"java.util.TreeMap",
"org.apache.helix.ZNRecord"
] |
import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.helix.ZNRecord;
|
import java.util.*; import org.apache.helix.*;
|
[
"java.util",
"org.apache.helix"
] |
java.util; org.apache.helix;
| 704,429
|
public List<String> findPackageIdsForTest(String testClassName);
|
List<String> function(String testClassName);
|
/**
* Attempt to find the package ids for a given test class name
*
* @param testClassName the test class name
* @return a {@link List} of package ids.
*/
|
Attempt to find the package ids for a given test class name
|
findPackageIdsForTest
|
{
"repo_name": "s20121035/rk3288_android5.1_repo",
"path": "cts/tools/tradefed-host/src/com/android/cts/tradefed/testtype/ITestPackageRepo.java",
"license": "gpl-3.0",
"size": 1835
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,237,115
|
public long getMemStoreFlushSize() {
byte [] value = getValue(MEMSTORE_FLUSHSIZE_KEY);
if (value != null) {
return Long.parseLong(Bytes.toString(value));
}
return -1;
}
|
long function() { byte [] value = getValue(MEMSTORE_FLUSHSIZE_KEY); if (value != null) { return Long.parseLong(Bytes.toString(value)); } return -1; }
|
/**
* Returns the size of the memstore after which a flush to filesystem is triggered.
*
* @return memory cache flush size for each hregion, -1 if not set.
*
* @see #setMemStoreFlushSize(long)
*/
|
Returns the size of the memstore after which a flush to filesystem is triggered
|
getMemStoreFlushSize
|
{
"repo_name": "intel-hadoop/hbase-rhino",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java",
"license": "apache-2.0",
"size": 50246
}
|
[
"org.apache.hadoop.hbase.util.Bytes"
] |
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 850,873
|
public OffsetDateTime getEndTime() {
return this.endTime;
}
|
OffsetDateTime function() { return this.endTime; }
|
/**
* Get the endTime property: The end time of this indexer execution, if the execution has already completed.
*
* @return the endTime value.
*/
|
Get the endTime property: The end time of this indexer execution, if the execution has already completed
|
getEndTime
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/models/IndexerExecutionResult.java",
"license": "mit",
"size": 6655
}
|
[
"java.time.OffsetDateTime"
] |
import java.time.OffsetDateTime;
|
import java.time.*;
|
[
"java.time"
] |
java.time;
| 1,893,885
|
public String getDefaultDomain()
throws IOException;
|
String function() throws IOException;
|
/**
* Returns the default domain used for naming the MBean.
* The default domain name is used as the domain part in the ObjectName
* of MBeans if no domain is specified by the user.
*
* @return the default domain.
*
* @exception IOException A communication problem occurred when
* talking to the MBean server.
*/
|
Returns the default domain used for naming the MBean. The default domain name is used as the domain part in the ObjectName of MBeans if no domain is specified by the user
|
getDefaultDomain
|
{
"repo_name": "wangsongpeng/jdk-src",
"path": "src/main/java/javax/management/MBeanServerConnection.java",
"license": "apache-2.0",
"size": 47640
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,770,146
|
void snapshotState(FunctionSnapshotContext context) throws Exception;
|
void snapshotState(FunctionSnapshotContext context) throws Exception;
|
/**
* This method is called when a snapshot for a checkpoint is requested. This acts as a hook to the function to
* ensure that all state is exposed by means previously offered through {@link FunctionInitializationContext} when
* the Function was initialized, or offered now by {@link FunctionSnapshotContext} itself.
*
* @param context the context for drawing a snapshot of the operator
* @throws Exception
*/
|
This method is called when a snapshot for a checkpoint is requested. This acts as a hook to the function to ensure that all state is exposed by means previously offered through <code>FunctionInitializationContext</code> when the Function was initialized, or offered now by <code>FunctionSnapshotContext</code> itself
|
snapshotState
|
{
"repo_name": "ueshin/apache-flink",
"path": "flink-streaming-java/src/main/java/org/apache/flink/streaming/api/checkpoint/CheckpointedFunction.java",
"license": "apache-2.0",
"size": 7370
}
|
[
"org.apache.flink.runtime.state.FunctionSnapshotContext"
] |
import org.apache.flink.runtime.state.FunctionSnapshotContext;
|
import org.apache.flink.runtime.state.*;
|
[
"org.apache.flink"
] |
org.apache.flink;
| 2,372,573
|
public void getCumulativeMonitorReport(PeerID peerID, MonitorFilter monitorFilter, MonitorListener monitorListener, long timeout) throws MonitorException;
|
void function(PeerID peerID, MonitorFilter monitorFilter, MonitorListener monitorListener, long timeout) throws MonitorException;
|
/**
* Get a MonitorReport of total accumulated metrics from the ServiceMonitors (specified in the
* MonitorFilter) since they were created/reset for the specified remote Peer.
*
* @param peerID The PeerID of the Peer you wish information about
* @param monitorFilter The MonitorFilter containing the specific ServiceMonitors and types of Service Metrics desired
* @param monitorListener The Listener to obtain the report when it arrives (or timed out)
* @param timeout The timeout for reporting that the information has not arrived.
* @throws net.jxta.meter.MonitorException if a monitor error occurs
*/
|
Get a MonitorReport of total accumulated metrics from the ServiceMonitors (specified in the MonitorFilter) since they were created/reset for the specified remote Peer
|
getCumulativeMonitorReport
|
{
"repo_name": "johnjianfang/jxse",
"path": "src/main/java/net/jxta/peer/PeerInfoService.java",
"license": "apache-2.0",
"size": 15983
}
|
[
"net.jxta.meter.MonitorException",
"net.jxta.meter.MonitorFilter",
"net.jxta.meter.MonitorListener"
] |
import net.jxta.meter.MonitorException; import net.jxta.meter.MonitorFilter; import net.jxta.meter.MonitorListener;
|
import net.jxta.meter.*;
|
[
"net.jxta.meter"
] |
net.jxta.meter;
| 2,409,667
|
private String buildViewContext(SessionState state, Context context)
{
if (Log.getLogger("chef").isDebugEnabled())
{
Log.debug("chef", this + ".buildViewContext");
}
// get current user's id
String id = (String) state.getAttribute("user-id");
// get the user and put in state as "user"
try
{
User user = UserDirectoryService.getUser(id);
context.put("user", user);
// name the html form for user edit fields
context.put("form-name", "user-form");
state.setAttribute("mode", "view");
// make sure we can do an edit
try
{
UserEdit edit = UserDirectoryService.editUser(id);
UserDirectoryService.cancelEdit(edit);
context.put("enableEdit", "true");
}
catch (UserNotDefinedException e)
{
}
catch (UserPermissionException e)
{
}
catch (UserLockedException e)
{
}
// disable auto-updates while not in list mode
disableObservers(state);
}
catch (UserNotDefinedException e)
{
Log.warn("chef", "UsersAction.doEdit: user not found: " + id);
Object[] params = new Object[]{id};
addAlert(state, rb.getFormattedMessage("useact.use_notfou", params));
state.removeAttribute("mode");
// make sure auto-updates are enabled
enableObserver(state);
}
return "_view";
} // buildViewContext
|
String function(SessionState state, Context context) { if (Log.getLogger("chef").isDebugEnabled()) { Log.debug("chef", this + STR); } String id = (String) state.getAttribute(STR); try { User user = UserDirectoryService.getUser(id); context.put("user", user); context.put(STR, STR); state.setAttribute("mode", "view"); try { UserEdit edit = UserDirectoryService.editUser(id); UserDirectoryService.cancelEdit(edit); context.put(STR, "true"); } catch (UserNotDefinedException e) { } catch (UserPermissionException e) { } catch (UserLockedException e) { } disableObservers(state); } catch (UserNotDefinedException e) { Log.warn("chef", STR + id); Object[] params = new Object[]{id}; addAlert(state, rb.getFormattedMessage(STR, params)); state.removeAttribute("mode"); enableObserver(state); } return "_view"; }
|
/**
* Build the context for the view user mode.
*/
|
Build the context for the view user mode
|
buildViewContext
|
{
"repo_name": "eemirtekin/Sakai-10.6-TR",
"path": "user/user-tool/tool/src/java/org/sakaiproject/user/tool/UsersAction.java",
"license": "apache-2.0",
"size": 56071
}
|
[
"org.sakaiproject.cheftool.Context",
"org.sakaiproject.event.api.SessionState",
"org.sakaiproject.user.api.User",
"org.sakaiproject.user.api.UserEdit",
"org.sakaiproject.user.api.UserLockedException",
"org.sakaiproject.user.api.UserNotDefinedException",
"org.sakaiproject.user.api.UserPermissionException",
"org.sakaiproject.user.cover.UserDirectoryService"
] |
import org.sakaiproject.cheftool.Context; import org.sakaiproject.event.api.SessionState; import org.sakaiproject.user.api.User; import org.sakaiproject.user.api.UserEdit; import org.sakaiproject.user.api.UserLockedException; import org.sakaiproject.user.api.UserNotDefinedException; import org.sakaiproject.user.api.UserPermissionException; import org.sakaiproject.user.cover.UserDirectoryService;
|
import org.sakaiproject.cheftool.*; import org.sakaiproject.event.api.*; import org.sakaiproject.user.api.*; import org.sakaiproject.user.cover.*;
|
[
"org.sakaiproject.cheftool",
"org.sakaiproject.event",
"org.sakaiproject.user"
] |
org.sakaiproject.cheftool; org.sakaiproject.event; org.sakaiproject.user;
| 1,042,906
|
public Iterator getHTTPRequestProperties() {
return fHTTPRequestProperties.entrySet().iterator();
} // getHTTPRequestProperties():Iterator
|
Iterator function() { return fHTTPRequestProperties.entrySet().iterator(); }
|
/**
* Returns an iterator for the request properties this
* input source contains. Each object returned by the
* iterator is an instance of <code>java.util.Map.Entry</code>
* where each key and value are a pair of strings corresponding
* to the name and value of a request property.
*
* @return an iterator for the request properties this
* input source contains
*/
|
Returns an iterator for the request properties this input source contains. Each object returned by the iterator is an instance of <code>java.util.Map.Entry</code> where each key and value are a pair of strings corresponding to the name and value of a request property
|
getHTTPRequestProperties
|
{
"repo_name": "BIORIMP/biorimp",
"path": "BIO-RIMP/test_data/code/xerces/src/org/apache/xerces/util/HTTPInputSource.java",
"license": "gpl-2.0",
"size": 7502
}
|
[
"java.util.Iterator"
] |
import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 506,614
|
public Observable<ServiceResponse<Page<ApplicationInner>>> listByClusterSinglePageAsync(final String resourceGroupName, final String clusterName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (clusterName == null) {
throw new IllegalArgumentException("Parameter clusterName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
|
Observable<ServiceResponse<Page<ApplicationInner>>> function(final String resourceGroupName, final String clusterName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (clusterName == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); }
|
/**
* Lists all of the applications for the HDInsight cluster.
*
ServiceResponse<PageImpl<ApplicationInner>> * @param resourceGroupName The name of the resource group.
ServiceResponse<PageImpl<ApplicationInner>> * @param clusterName The name of the cluster.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ApplicationInner> object wrapped in {@link ServiceResponse} if successful.
*/
|
Lists all of the applications for the HDInsight cluster
|
listByClusterSinglePageAsync
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/hdinsight/mgmt-v2018_06_01_preview/src/main/java/com/microsoft/azure/management/hdinsight/v2018_06_01_preview/implementation/ApplicationsInner.java",
"license": "mit",
"size": 45428
}
|
[
"com.microsoft.azure.Page",
"com.microsoft.rest.ServiceResponse"
] |
import com.microsoft.azure.Page; import com.microsoft.rest.ServiceResponse;
|
import com.microsoft.azure.*; import com.microsoft.rest.*;
|
[
"com.microsoft.azure",
"com.microsoft.rest"
] |
com.microsoft.azure; com.microsoft.rest;
| 2,111,874
|
boolean isNameOfEmailValid(PerunSessionImpl sess, String email);
|
boolean isNameOfEmailValid(PerunSessionImpl sess, String email);
|
/**
* Checks name of an email by standard pattern and returns true, if it is valid.
*
* @param sess
* @param email name of the email
*
* @return true if the name of email is valid
*/
|
Checks name of an email by standard pattern and returns true, if it is valid
|
isNameOfEmailValid
|
{
"repo_name": "jirmauritz/perun",
"path": "perun-core/src/main/java/cz/metacentrum/perun/core/bl/ModulesUtilsBl.java",
"license": "bsd-2-clause",
"size": 19388
}
|
[
"cz.metacentrum.perun.core.impl.PerunSessionImpl"
] |
import cz.metacentrum.perun.core.impl.PerunSessionImpl;
|
import cz.metacentrum.perun.core.impl.*;
|
[
"cz.metacentrum.perun"
] |
cz.metacentrum.perun;
| 2,763,170
|
private void addValue(Attribute attribute) {
this.getAttributes().add(attribute);
}
|
void function(Attribute attribute) { this.getAttributes().add(attribute); }
|
/**
* Adds an attribute value to the list.
*
* @param attribute The attribute to add.
*/
|
Adds an attribute value to the list
|
addValue
|
{
"repo_name": "brettryan/tiles",
"path": "tiles-jsp/src/main/java/org/apache/tiles/jsp/taglib/PutListAttributeTag.java",
"license": "apache-2.0",
"size": 3156
}
|
[
"org.apache.tiles.Attribute"
] |
import org.apache.tiles.Attribute;
|
import org.apache.tiles.*;
|
[
"org.apache.tiles"
] |
org.apache.tiles;
| 166,592
|
void onItemClick(StaggeredGridView parent, View view, int position, long id);
}
|
void onItemClick(StaggeredGridView parent, View view, int position, long id); }
|
/**
* Callback method to be invoked when an item in this AdapterView has
* been clicked.
* <p>
* Implementers can call getItemAtPosition(position) if they need
* to access the data associated with the selected item.
*
* @param parent The AdapterView where the click happened.
* @param view The view within the AdapterView that was clicked (this
* will be a view provided by the adapter)
* @param position The position of the view in the adapter.
* @param id The row id of the item that was clicked.
*/
|
Callback method to be invoked when an item in this AdapterView has been clicked. Implementers can call getItemAtPosition(position) if they need to access the data associated with the selected item
|
onItemClick
|
{
"repo_name": "GMAndroidTeam/Android-PullToRefresh",
"path": "library/src/main/java/com/handmark/pulltorefresh/library/internal/StaggeredGridView.java",
"license": "apache-2.0",
"size": 92730
}
|
[
"android.view.View"
] |
import android.view.View;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 2,553,494
|
public Date getModifiedSinceConstraint() {
return modifiedSinceConstraint;
}
|
Date function() { return modifiedSinceConstraint; }
|
/**
* <p>
* Gets the optional modified constraint that restricts this
* request to executing only if the source object <b>has</b> been
* modified after the specified date.
* </p>
* <p>
* The modified constraint may be used with non-matching ETag
* constraints, but not with any other type of constraint.
* </p>
*
* @return The optional modified constraint that restricts this
* request to executing only if the source object <b>has</b>
* been modified after the specified date.
*/
|
Gets the optional modified constraint that restricts this request to executing only if the source object has been modified after the specified date. The modified constraint may be used with non-matching ETag constraints, but not with any other type of constraint.
|
getModifiedSinceConstraint
|
{
"repo_name": "mahaliachante/aws-sdk-java",
"path": "aws-java-sdk-s3/src/main/java/com/amazonaws/services/s3/model/CopyObjectRequest.java",
"license": "apache-2.0",
"size": 40571
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,547,491
|
public static IntBuffer fill(IntBuffer buffer, int value, int count) {
if (count > buffer.remaining()) {
throw msg.bufferUnderflow();
}
if (buffer.hasArray()) {
final int offs = buffer.arrayOffset();
Arrays.fill(buffer.array(), offs + buffer.position(), offs + buffer.limit(), value);
skip(buffer, count);
} else {
for (int i = count; i > 0; i--) {
buffer.put(value);
}
}
return buffer;
}
|
static IntBuffer function(IntBuffer buffer, int value, int count) { if (count > buffer.remaining()) { throw msg.bufferUnderflow(); } if (buffer.hasArray()) { final int offs = buffer.arrayOffset(); Arrays.fill(buffer.array(), offs + buffer.position(), offs + buffer.limit(), value); skip(buffer, count); } else { for (int i = count; i > 0; i--) { buffer.put(value); } } return buffer; }
|
/**
* Fill a buffer with a repeated value.
*
* @param buffer the buffer to fill
* @param value the value to fill
* @param count the number of ints to fill
* @return the buffer instance
*/
|
Fill a buffer with a repeated value
|
fill
|
{
"repo_name": "stuartwdouglas/xnio",
"path": "api/src/main/java/org/xnio/Buffers.java",
"license": "apache-2.0",
"size": 86113
}
|
[
"java.nio.IntBuffer",
"java.util.Arrays"
] |
import java.nio.IntBuffer; import java.util.Arrays;
|
import java.nio.*; import java.util.*;
|
[
"java.nio",
"java.util"
] |
java.nio; java.util;
| 864,570
|
public void validateMessage(OAuthMessage message, OAuthAccessor accessor, OAuthToken requestToken)
throws OAuthException, IOException, URISyntaxException {
checkSingleParameters(message);
validateVersion(message);
validateSignature(message, accessor);
validateTimestampAndNonce(message, requestToken);
}
|
void function(OAuthMessage message, OAuthAccessor accessor, OAuthToken requestToken) throws OAuthException, IOException, URISyntaxException { checkSingleParameters(message); validateVersion(message); validateSignature(message, accessor); validateTimestampAndNonce(message, requestToken); }
|
/**
* Overridden to validate the timestamp and nonces last since they have side-effects of storing
* data about the message, so we have to make sure the message is valid before we do that.
*/
|
Overridden to validate the timestamp and nonces last since they have side-effects of storing data about the message, so we have to make sure the message is valid before we do that
|
validateMessage
|
{
"repo_name": "raphaelning/resteasy-client-android",
"path": "jaxrs/security/resteasy-oauth/src/main/java/org/jboss/resteasy/auth/oauth/OAuthValidator.java",
"license": "apache-2.0",
"size": 2680
}
|
[
"java.io.IOException",
"java.net.URISyntaxException",
"net.oauth.OAuthAccessor",
"net.oauth.OAuthException",
"net.oauth.OAuthMessage"
] |
import java.io.IOException; import java.net.URISyntaxException; import net.oauth.OAuthAccessor; import net.oauth.OAuthException; import net.oauth.OAuthMessage;
|
import java.io.*; import java.net.*; import net.oauth.*;
|
[
"java.io",
"java.net",
"net.oauth"
] |
java.io; java.net; net.oauth;
| 241,813
|
@Override public void exitTypeParameter(@NotNull JavaParser.TypeParameterContext ctx) { }
|
@Override public void exitTypeParameter(@NotNull JavaParser.TypeParameterContext ctx) { }
|
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
|
The default implementation does nothing
|
enterTypeParameter
|
{
"repo_name": "martinaguero/deep",
"path": "src/org/trimatek/deep/lexer/JavaBaseListener.java",
"license": "apache-2.0",
"size": 39286
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 1,030,637
|
private String getExternalDbNameAsPrefix() {
if(StringUtils.isNotBlank(externalDbName)) {
return externalDbName+".";
} else {
return "";
}
}
|
String function() { if(StringUtils.isNotBlank(externalDbName)) { return externalDbName+"."; } else { return ""; } }
|
/**
* Helper to get the externalDbName as a prefix to be used directly in queries, . is appended
* If its not configured, then an empty string is returned so that whatever this returns can be used as-is
* @return
*/
|
Helper to get the externalDbName as a prefix to be used directly in queries, . is appended If its not configured, then an empty string is returned so that whatever this returns can be used as-is
|
getExternalDbNameAsPrefix
|
{
"repo_name": "pushyamig/sakai",
"path": "sitestats/sitestats-impl/src/java/org/sakaiproject/sitestats/impl/ServerWideReportManagerImpl.java",
"license": "apache-2.0",
"size": 59882
}
|
[
"org.apache.commons.lang.StringUtils"
] |
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 2,344,528
|
private void resolve() throws PersistenceException {
this.permission = super.getPersistenceManager().find(this.permission);
}
|
void function() throws PersistenceException { this.permission = super.getPersistenceManager().find(this.permission); }
|
/**
* Resolve the permission.
*
* @throws PersistenceException
* when the permission is not persistent
*/
|
Resolve the permission
|
resolve
|
{
"repo_name": "NABUCCO/org.nabucco.framework.common.authorization",
"path": "org.nabucco.framework.common.authorization.impl.service/src/main/man/org/nabucco/framework/common/authorization/impl/service/resolve/ResolveAuthorizationPermissionServiceHandlerImpl.java",
"license": "epl-1.0",
"size": 5538
}
|
[
"org.nabucco.framework.base.facade.exception.persistence.PersistenceException"
] |
import org.nabucco.framework.base.facade.exception.persistence.PersistenceException;
|
import org.nabucco.framework.base.facade.exception.persistence.*;
|
[
"org.nabucco.framework"
] |
org.nabucco.framework;
| 1,132,404
|
private synchronized void enqueueSignerInfoRequest(final WaveletFederationProvider provider,
final ByteString signerId, final WaveletName waveletName,
ProtocolHashedVersion deltaEndVersion, SignerInfoPrefetchResultListener callback) {
final String domain = waveletName.waveletId.getDomain();
Multimap<String, SignerInfoPrefetchResultListener> domainCallbacks =
signerInfoRequests.get(signerId);
if (domainCallbacks == null) {
domainCallbacks = ArrayListMultimap.create();
signerInfoRequests.put(signerId, domainCallbacks);
}
// The thing is, we need to add multiple callbacks for the same domain, but we only want to
// have one outstanding request per domain
domainCallbacks.put(domain, callback);
|
synchronized void function(final WaveletFederationProvider provider, final ByteString signerId, final WaveletName waveletName, ProtocolHashedVersion deltaEndVersion, SignerInfoPrefetchResultListener callback) { final String domain = waveletName.waveletId.getDomain(); Multimap<String, SignerInfoPrefetchResultListener> domainCallbacks = signerInfoRequests.get(signerId); if (domainCallbacks == null) { domainCallbacks = ArrayListMultimap.create(); signerInfoRequests.put(signerId, domainCallbacks); } domainCallbacks.put(domain, callback);
|
/**
* Enqueue a signer info request for a signed delta on a given domain.
*/
|
Enqueue a signer info request for a signed delta on a given domain
|
enqueueSignerInfoRequest
|
{
"repo_name": "scrosby/fedone",
"path": "src/org/waveprotocol/wave/examples/fedone/waveserver/CertificateManagerImpl.java",
"license": "apache-2.0",
"size": 11896
}
|
[
"com.google.common.collect.ArrayListMultimap",
"com.google.common.collect.Multimap",
"com.google.protobuf.ByteString",
"org.waveprotocol.wave.federation.Proto",
"org.waveprotocol.wave.model.id.WaveletName",
"org.waveprotocol.wave.waveserver.WaveletFederationProvider"
] |
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.google.protobuf.ByteString; import org.waveprotocol.wave.federation.Proto; import org.waveprotocol.wave.model.id.WaveletName; import org.waveprotocol.wave.waveserver.WaveletFederationProvider;
|
import com.google.common.collect.*; import com.google.protobuf.*; import org.waveprotocol.wave.federation.*; import org.waveprotocol.wave.model.id.*; import org.waveprotocol.wave.waveserver.*;
|
[
"com.google.common",
"com.google.protobuf",
"org.waveprotocol.wave"
] |
com.google.common; com.google.protobuf; org.waveprotocol.wave;
| 2,673,953
|
@Test
public void testRandomSeeks() throws Throwable {
int limit = getContract().getLimit(TEST_RANDOM_SEEK_COUNT,
DEFAULT_RANDOM_SEEK_COUNT);
describe("Testing " + limit + " random seeks");
int filesize = 10 * 1024;
byte[] buf = dataset(filesize, 0, 255);
Path randomSeekFile = path("testrandomseeks.bin");
createFile(getFileSystem(), randomSeekFile, false, buf);
Random r = new Random();
// Record the sequence of seeks and reads which trigger a failure.
int[] seeks = new int[10];
int[] reads = new int[10];
try (FSDataInputStream stm = getFileSystem().open(randomSeekFile)) {
for (int i = 0; i < limit; i++) {
int seekOff = r.nextInt(buf.length);
int toRead = r.nextInt(Math.min(buf.length - seekOff, 32000));
seeks[i % seeks.length] = seekOff;
reads[i % reads.length] = toRead;
verifyRead(stm, buf, seekOff, toRead);
}
} catch (AssertionError afe) {
StringBuilder sb = new StringBuilder();
sb.append("Sequence of actions:\n");
for (int j = 0; j < seeks.length; j++) {
sb.append("seek @ ").append(seeks[j]).append(" ")
.append("read ").append(reads[j]).append("\n");
}
LOG.error(sb.toString());
throw afe;
}
}
|
void function() throws Throwable { int limit = getContract().getLimit(TEST_RANDOM_SEEK_COUNT, DEFAULT_RANDOM_SEEK_COUNT); describe(STR + limit + STR); int filesize = 10 * 1024; byte[] buf = dataset(filesize, 0, 255); Path randomSeekFile = path(STR); createFile(getFileSystem(), randomSeekFile, false, buf); Random r = new Random(); int[] seeks = new int[10]; int[] reads = new int[10]; try (FSDataInputStream stm = getFileSystem().open(randomSeekFile)) { for (int i = 0; i < limit; i++) { int seekOff = r.nextInt(buf.length); int toRead = r.nextInt(Math.min(buf.length - seekOff, 32000)); seeks[i % seeks.length] = seekOff; reads[i % reads.length] = toRead; verifyRead(stm, buf, seekOff, toRead); } } catch (AssertionError afe) { StringBuilder sb = new StringBuilder(); sb.append(STR); for (int j = 0; j < seeks.length; j++) { sb.append(STR).append(seeks[j]).append(" ") .append(STR).append(reads[j]).append("\n"); } LOG.error(sb.toString()); throw afe; } }
|
/**
* Lifted from TestLocalFileSystem:
* Regression test for HADOOP-9307: BufferedFSInputStream returning
* wrong results after certain sequences of seeks and reads.
*/
|
Lifted from TestLocalFileSystem: Regression test for HADOOP-9307: BufferedFSInputStream returning wrong results after certain sequences of seeks and reads
|
testRandomSeeks
|
{
"repo_name": "NJUJYB/disYarn",
"path": "hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java",
"license": "apache-2.0",
"size": 19850
}
|
[
"java.util.Random",
"org.apache.hadoop.fs.FSDataInputStream",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.fs.contract.ContractTestUtils"
] |
import java.util.Random; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils;
|
import java.util.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.contract.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 311,505
|
private void readObject(ObjectInputStream s)
throws IOException, ClassNotFoundException {
ObjectInputStream.GetField fields = s.readFields();
int[] tmp = (int[])fields.get("updateCounts", null);
long[] tmp2 = (long[])fields.get("longUpdateCounts", null);
if(tmp != null && tmp2 != null && tmp.length != tmp2.length)
throw new InvalidObjectException("update counts are not the expected size");
if (tmp != null)
updateCounts = tmp.clone();
if (tmp2 != null)
longUpdateCounts = tmp2.clone();
if(updateCounts == null && longUpdateCounts != null)
updateCounts = copyUpdateCount(longUpdateCounts);
if(longUpdateCounts == null && updateCounts != null)
longUpdateCounts = copyUpdateCount(updateCounts);
}
/**
* writeObject is called to save the state of the {@code BatchUpdateException}
|
void function(ObjectInputStream s) throws IOException, ClassNotFoundException { ObjectInputStream.GetField fields = s.readFields(); int[] tmp = (int[])fields.get(STR, null); long[] tmp2 = (long[])fields.get(STR, null); if(tmp != null && tmp2 != null && tmp.length != tmp2.length) throw new InvalidObjectException(STR); if (tmp != null) updateCounts = tmp.clone(); if (tmp2 != null) longUpdateCounts = tmp2.clone(); if(updateCounts == null && longUpdateCounts != null) updateCounts = copyUpdateCount(longUpdateCounts); if(longUpdateCounts == null && updateCounts != null) longUpdateCounts = copyUpdateCount(updateCounts); } /** * writeObject is called to save the state of the {@code BatchUpdateException}
|
/**
* readObject is called to restore the state of the
* {@code BatchUpdateException} from a stream.
*/
|
readObject is called to restore the state of the BatchUpdateException from a stream
|
readObject
|
{
"repo_name": "isaacl/openjdk-jdk",
"path": "src/share/classes/java/sql/BatchUpdateException.java",
"license": "gpl-2.0",
"size": 26351
}
|
[
"java.io.IOException",
"java.io.InvalidObjectException",
"java.io.ObjectInputStream"
] |
import java.io.IOException; import java.io.InvalidObjectException; import java.io.ObjectInputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 400,571
|
private PrefabGraphTypeDao createPrefabGraphType(String type,
Resource sourceResource) {
// Default to adding a callback
return this.createPrefabGraphType(type, sourceResource, true);
}
|
PrefabGraphTypeDao function(String type, Resource sourceResource) { return this.createPrefabGraphType(type, sourceResource, true); }
|
/**
* Wrapper around the createPrefabGraphType which takes a reloadable
* argument, defaulting to reloadable = true For simplicity in the
* default/expected case.
*
* @param type
* @param sourceResource
* @return
*/
|
Wrapper around the createPrefabGraphType which takes a reloadable argument, defaulting to reloadable = true For simplicity in the default/expected case
|
createPrefabGraphType
|
{
"repo_name": "bugcy013/opennms-tmp-tools",
"path": "opennms-dao/src/main/java/org/opennms/netmgt/dao/support/PropertiesGraphDao.java",
"license": "gpl-2.0",
"size": 45120
}
|
[
"org.springframework.core.io.Resource"
] |
import org.springframework.core.io.Resource;
|
import org.springframework.core.io.*;
|
[
"org.springframework.core"
] |
org.springframework.core;
| 1,006,550
|
public LatLong fromPixels(double x, double y) {
if (this.mapView.getWidth() <= 0 || this.mapView.getHeight() <= 0) {
return null;
}
// this uses the framebuffer position, the mapview position can be out of sync with
// what the user sees on the screen if an animation is in progress
MapPosition mapPosition = this.mapView.getModel().frameBufferModel.getMapPosition();
// this means somehow the mapview is not yet properly set up, see issue #308.
if (mapPosition == null) {
return null;
}
// calculate the pixel coordinates of the top left corner
LatLong latLong = mapPosition.latLong;
long mapSize = MercatorProjection.getMapSize(mapPosition.zoomLevel, this.mapView.getModel().displayModel.getTileSize());
double pixelX = MercatorProjection.longitudeToPixelX(latLong.longitude, mapSize);
double pixelY = MercatorProjection.latitudeToPixelY(latLong.latitude, mapSize);
pixelX -= this.mapView.getWidth() >> 1;
pixelY -= this.mapView.getHeight() >> 1;
// catch outer map limits
try {
// convert the pixel coordinates to a LatLong and return it
return new LatLong(MercatorProjection.pixelYToLatitude(pixelY + y, mapSize),
MercatorProjection.pixelXToLongitude(pixelX + x, mapSize));
} catch (Exception e) {
return null;
}
}
|
LatLong function(double x, double y) { if (this.mapView.getWidth() <= 0 this.mapView.getHeight() <= 0) { return null; } MapPosition mapPosition = this.mapView.getModel().frameBufferModel.getMapPosition(); if (mapPosition == null) { return null; } LatLong latLong = mapPosition.latLong; long mapSize = MercatorProjection.getMapSize(mapPosition.zoomLevel, this.mapView.getModel().displayModel.getTileSize()); double pixelX = MercatorProjection.longitudeToPixelX(latLong.longitude, mapSize); double pixelY = MercatorProjection.latitudeToPixelY(latLong.latitude, mapSize); pixelX -= this.mapView.getWidth() >> 1; pixelY -= this.mapView.getHeight() >> 1; try { return new LatLong(MercatorProjection.pixelYToLatitude(pixelY + y, mapSize), MercatorProjection.pixelXToLongitude(pixelX + x, mapSize)); } catch (Exception e) { return null; } }
|
/**
* Computes the geographic coordinates of a screen point.
*
* @return the coordinates of the x/y point
*/
|
Computes the geographic coordinates of a screen point
|
fromPixels
|
{
"repo_name": "lincomatic/mapsforge",
"path": "mapsforge-map/src/main/java/org/mapsforge/map/util/MapViewProjection.java",
"license": "lgpl-3.0",
"size": 4814
}
|
[
"org.mapsforge.core.model.LatLong",
"org.mapsforge.core.model.MapPosition",
"org.mapsforge.core.util.MercatorProjection"
] |
import org.mapsforge.core.model.LatLong; import org.mapsforge.core.model.MapPosition; import org.mapsforge.core.util.MercatorProjection;
|
import org.mapsforge.core.model.*; import org.mapsforge.core.util.*;
|
[
"org.mapsforge.core"
] |
org.mapsforge.core;
| 531,208
|
protected boolean createCashDocumentForPurchase() {
return createCashDocumentBasedOnHoldingCost(EndowConstants.DocumentTypeNames.ENDOWMENT_ASSET_INCREASE, EndowParameterKeyConstants.PURCHASE_DESCRIPTION, EndowConstants.TRANSACTION_SECURITY_TYPE_TARGET, EndowParameterKeyConstants.PURCHASE_NO_ROUTE_IND, EndowConstants.IncomePrincipalIndicator.PRINCIPAL);
}
|
boolean function() { return createCashDocumentBasedOnHoldingCost(EndowConstants.DocumentTypeNames.ENDOWMENT_ASSET_INCREASE, EndowParameterKeyConstants.PURCHASE_DESCRIPTION, EndowConstants.TRANSACTION_SECURITY_TYPE_TARGET, EndowParameterKeyConstants.PURCHASE_NO_ROUTE_IND, EndowConstants.IncomePrincipalIndicator.PRINCIPAL); }
|
/**
* Creates an ECI or an ECDD eDoc according to the total amount of holding cost for EAI
*/
|
Creates an ECI or an ECDD eDoc according to the total amount of holding cost for EAI
|
createCashDocumentForPurchase
|
{
"repo_name": "ua-eas/ua-kfs-5.3",
"path": "work/src/org/kuali/kfs/module/endow/batch/service/impl/PooledFundControlTransactionsServiceImpl.java",
"license": "agpl-3.0",
"size": 39120
}
|
[
"org.kuali.kfs.module.endow.EndowConstants",
"org.kuali.kfs.module.endow.EndowParameterKeyConstants"
] |
import org.kuali.kfs.module.endow.EndowConstants; import org.kuali.kfs.module.endow.EndowParameterKeyConstants;
|
import org.kuali.kfs.module.endow.*;
|
[
"org.kuali.kfs"
] |
org.kuali.kfs;
| 1,356,218
|
Logger getLogger();
|
Logger getLogger();
|
/**
* Gets the logger.
*
* @return The logger.
*/
|
Gets the logger
|
getLogger
|
{
"repo_name": "UltimateGames/UltimateGames",
"path": "api/src/main/java/me/ampayne2/ultimategames/api/message/Messenger.java",
"license": "lgpl-3.0",
"size": 3697
}
|
[
"java.util.logging.Logger"
] |
import java.util.logging.Logger;
|
import java.util.logging.*;
|
[
"java.util"
] |
java.util;
| 2,545,951
|
private List _internalGetAsArray(final String path)
throws IllegalArgumentException {
final List tokens = new ResultTokenizer(path).tokenize(namespaceAliases);
if (tokens.isEmpty()) {
return tokens;
}
final StructuredContent obj = apply(root, tokens, 0);
if (obj == null) {
return new Vector();
}
String key = (String) tokens.get(tokens.size() - 1);
// if the last element of expression is a glob, handle it here
if ((tokens.size() > 1)
&& SELECT_GLOB.equals(tokens.get(tokens.size() - 2))) {
return obj.getDescendants(key);
}
// if the last element of expression is an attribute, handle it here
if (key.startsWith("@")) {
key = key.substring(1);
String v = obj.getAttribute(key);
List array = new Vector();
if (v != null) {
// this will allow caller to get parent of an attribute if
// needed
array.add(new MapContent(v, obj));
}
return array;
} else if (key.charAt(0) == Result.ARRAY_END && tokens.size() >= 4) {
// Handle path ending with a predicate instead of a key
//key = (String)tokens.get(tokens.size() - 4);
List array = new Vector();
//array.add(new MapContent(key, obj));
array.add(obj);
return array;
}
// otherwise, last element of expression selects a child node.
return obj.getChildren(key);
}
|
List function(final String path) throws IllegalArgumentException { final List tokens = new ResultTokenizer(path).tokenize(namespaceAliases); if (tokens.isEmpty()) { return tokens; } final StructuredContent obj = apply(root, tokens, 0); if (obj == null) { return new Vector(); } String key = (String) tokens.get(tokens.size() - 1); if ((tokens.size() > 1) && SELECT_GLOB.equals(tokens.get(tokens.size() - 2))) { return obj.getDescendants(key); } if (key.startsWith("@")) { key = key.substring(1); String v = obj.getAttribute(key); List array = new Vector(); if (v != null) { array.add(new MapContent(v, obj)); } return array; } else if (key.charAt(0) == Result.ARRAY_END && tokens.size() >= 4) { List array = new Vector(); array.add(obj); return array; } return obj.getChildren(key); }
|
/**
* Internal function, do not use. This version does not convert the
* structured content nodes in the array, so not to be called by end user.
*
* @param path
* @return
* @throws IllegalArgumentException
*/
|
Internal function, do not use. This version does not convert the structured content nodes in the array, so not to be called by end user
|
_internalGetAsArray
|
{
"repo_name": "saeder/CodenameOne",
"path": "CodenameOne/src/com/codename1/processing/Result.java",
"license": "gpl-2.0",
"size": 36091
}
|
[
"java.util.List",
"java.util.Vector"
] |
import java.util.List; import java.util.Vector;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,710,083
|
private boolean wasDismissedBefore() {
return getDefaultSharedPreferences(getContext()).getBoolean(mAttributes.getKey().getValue(), false);
}
|
boolean function() { return getDefaultSharedPreferences(getContext()).getBoolean(mAttributes.getKey().getValue(), false); }
|
/**
* Checks if this view has been dismissed before.
*
* @return True if this view has been dismissed before, false otherwise.
*/
|
Checks if this view has been dismissed before
|
wasDismissedBefore
|
{
"repo_name": "joaocsousa/OneTimeHintView",
"path": "library/src/main/java/com/tinycoolthings/onetimehintview/OneTimeHintView.java",
"license": "apache-2.0",
"size": 19799
}
|
[
"android.preference.PreferenceManager"
] |
import android.preference.PreferenceManager;
|
import android.preference.*;
|
[
"android.preference"
] |
android.preference;
| 2,302,053
|
protected final boolean markObsolete0(GridCacheVersion ver, boolean clear, GridCacheObsoleteEntryExtras extras) {
assert Thread.holdsLock(this);
if (evictionDisabled()) {
assert !obsolete() : this;
return false;
}
GridCacheVersion obsoleteVer = obsoleteVersionExtras();
if (ver != null) {
// If already obsolete, then do nothing.
if (obsoleteVer != null)
return true;
GridCacheMvcc mvcc = mvccExtras();
if (mvcc == null || mvcc.isEmpty(ver)) {
obsoleteVer = ver;
obsoleteVersionExtras(obsoleteVer, extras);
if (clear)
value(null);
}
return obsoleteVer != null;
}
else
return obsoleteVer != null;
}
|
final boolean function(GridCacheVersion ver, boolean clear, GridCacheObsoleteEntryExtras extras) { assert Thread.holdsLock(this); if (evictionDisabled()) { assert !obsolete() : this; return false; } GridCacheVersion obsoleteVer = obsoleteVersionExtras(); if (ver != null) { if (obsoleteVer != null) return true; GridCacheMvcc mvcc = mvccExtras(); if (mvcc == null mvcc.isEmpty(ver)) { obsoleteVer = ver; obsoleteVersionExtras(obsoleteVer, extras); if (clear) value(null); } return obsoleteVer != null; } else return obsoleteVer != null; }
|
/**
* <p>
* Note that {@link #onMarkedObsolete()} should always be called after this method
* returns {@code true}.
*
* @param ver Version.
* @param clear {@code True} to clear.
* @param extras Predefined extras.
* @return {@code True} if entry is obsolete, {@code false} if entry is still used by other threads or nodes.
*/
|
Note that <code>#onMarkedObsolete()</code> should always be called after this method returns true
|
markObsolete0
|
{
"repo_name": "ryanzz/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMapEntry.java",
"license": "apache-2.0",
"size": 148118
}
|
[
"org.apache.ignite.internal.processors.cache.extras.GridCacheObsoleteEntryExtras",
"org.apache.ignite.internal.processors.cache.version.GridCacheVersion"
] |
import org.apache.ignite.internal.processors.cache.extras.GridCacheObsoleteEntryExtras; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
|
import org.apache.ignite.internal.processors.cache.extras.*; import org.apache.ignite.internal.processors.cache.version.*;
|
[
"org.apache.ignite"
] |
org.apache.ignite;
| 2,019,887
|
@JsonProperty("is-new-service")
boolean isNewService();
|
@JsonProperty(STR) boolean isNewService();
|
/**
* Set to true if this is a new stack. Otherwise, set to false.
*/
|
Set to true if this is a new stack. Otherwise, set to false
|
isNewService
|
{
"repo_name": "EvilMcJerkface/atlasdb",
"path": "timelock-agent/src/main/java/com/palantir/timelock/config/PaxosInstallConfiguration.java",
"license": "apache-2.0",
"size": 4333
}
|
[
"com.fasterxml.jackson.annotation.JsonProperty"
] |
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.*;
|
[
"com.fasterxml.jackson"
] |
com.fasterxml.jackson;
| 2,896,404
|
@Basic
@Column(name = "STATUS")
public String getStatus() {
return status;
}
|
@Column(name = STR) String function() { return status; }
|
/**
* Gets the status.
*
* @return the status
*/
|
Gets the status
|
getStatus
|
{
"repo_name": "Hack23/cia",
"path": "model.external.riksdagen.utskottsforslag.impl/src/main/java/com/hack23/cia/model/external/riksdagen/utskottsforslag/impl/CommitteeDocumentData.java",
"license": "apache-2.0",
"size": 16553
}
|
[
"javax.persistence.Column"
] |
import javax.persistence.Column;
|
import javax.persistence.*;
|
[
"javax.persistence"
] |
javax.persistence;
| 1,099,025
|
private void gatherActionsForFile(
ConfiguredTarget configuredTarget,
ActionFilter filter,
CommandEnvironment env,
ActionGraph actionGraph,
ActionKeyContext actionKeyContext)
throws CommandLineExpansionException {
NestedSet<Artifact> artifacts = OutputGroupInfo.get(configuredTarget)
.getOutputGroup(OutputGroupInfo.FILES_TO_COMPILE);
if (artifacts.isEmpty()) {
return;
}
for (Artifact artifact : artifacts) {
ActionAnalysisMetadata action = actionGraph.getGeneratingAction(artifact);
if (filter.shouldOutput(action, configuredTarget, env)) {
if (action instanceof Action) {
DetailedExtraActionInfo.Builder detail = DetailedExtraActionInfo.newBuilder();
detail.setAction(((Action) action).getExtraActionInfo(actionKeyContext));
summaryBuilder.addAction(detail);
}
}
}
}
|
void function( ConfiguredTarget configuredTarget, ActionFilter filter, CommandEnvironment env, ActionGraph actionGraph, ActionKeyContext actionKeyContext) throws CommandLineExpansionException { NestedSet<Artifact> artifacts = OutputGroupInfo.get(configuredTarget) .getOutputGroup(OutputGroupInfo.FILES_TO_COMPILE); if (artifacts.isEmpty()) { return; } for (Artifact artifact : artifacts) { ActionAnalysisMetadata action = actionGraph.getGeneratingAction(artifact); if (filter.shouldOutput(action, configuredTarget, env)) { if (action instanceof Action) { DetailedExtraActionInfo.Builder detail = DetailedExtraActionInfo.newBuilder(); detail.setAction(((Action) action).getExtraActionInfo(actionKeyContext)); summaryBuilder.addAction(detail); } } } }
|
/**
* Looks for files to compile in the given configured target and outputs the corresponding
* extra_action if the filter evaluates to {@code true}.
*/
|
Looks for files to compile in the given configured target and outputs the corresponding extra_action if the filter evaluates to true
|
gatherActionsForFile
|
{
"repo_name": "ButterflyNetwork/bazel",
"path": "src/main/java/com/google/devtools/build/lib/runtime/commands/PrintActionCommand.java",
"license": "apache-2.0",
"size": 17737
}
|
[
"com.google.devtools.build.lib.actions.Action",
"com.google.devtools.build.lib.actions.ActionAnalysisMetadata",
"com.google.devtools.build.lib.actions.ActionGraph",
"com.google.devtools.build.lib.actions.ActionKeyContext",
"com.google.devtools.build.lib.actions.Artifact",
"com.google.devtools.build.lib.actions.CommandLineExpansionException",
"com.google.devtools.build.lib.actions.extra.DetailedExtraActionInfo",
"com.google.devtools.build.lib.analysis.ConfiguredTarget",
"com.google.devtools.build.lib.analysis.OutputGroupInfo",
"com.google.devtools.build.lib.collect.nestedset.NestedSet",
"com.google.devtools.build.lib.runtime.CommandEnvironment"
] |
import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; import com.google.devtools.build.lib.actions.ActionGraph; import com.google.devtools.build.lib.actions.ActionKeyContext; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.CommandLineExpansionException; import com.google.devtools.build.lib.actions.extra.DetailedExtraActionInfo; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.OutputGroupInfo; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.runtime.CommandEnvironment;
|
import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.actions.extra.*; import com.google.devtools.build.lib.analysis.*; import com.google.devtools.build.lib.collect.nestedset.*; import com.google.devtools.build.lib.runtime.*;
|
[
"com.google.devtools"
] |
com.google.devtools;
| 774,563
|
public void searchType(SearchType searchType) {
this.searchType = Objects.requireNonNull(searchType, "searchType must not be null");
}
|
void function(SearchType searchType) { this.searchType = Objects.requireNonNull(searchType, STR); }
|
/**
* The search type to execute, defaults to {@link SearchType#DEFAULT}.
*/
|
The search type to execute, defaults to <code>SearchType#DEFAULT</code>
|
searchType
|
{
"repo_name": "ern/elasticsearch",
"path": "modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java",
"license": "apache-2.0",
"size": 4923
}
|
[
"java.util.Objects",
"org.elasticsearch.action.search.SearchType"
] |
import java.util.Objects; import org.elasticsearch.action.search.SearchType;
|
import java.util.*; import org.elasticsearch.action.search.*;
|
[
"java.util",
"org.elasticsearch.action"
] |
java.util; org.elasticsearch.action;
| 2,034,501
|
public String getExecutingServer() {
if ( executingServer == null ) {
setExecutingServer( Const.getHostname() );
}
return executingServer;
}
|
String function() { if ( executingServer == null ) { setExecutingServer( Const.getHostname() ); } return executingServer; }
|
/**
* Gets the executing server.
*
* @return the executingServer
*/
|
Gets the executing server
|
getExecutingServer
|
{
"repo_name": "pedrofvteixeira/pentaho-kettle",
"path": "engine/src/main/java/org/pentaho/di/job/Job.java",
"license": "apache-2.0",
"size": 72377
}
|
[
"org.pentaho.di.core.Const"
] |
import org.pentaho.di.core.Const;
|
import org.pentaho.di.core.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 1,840,380
|
public Future<Channel> handshakeFuture() {
return handshakePromise;
}
|
Future<Channel> function() { return handshakePromise; }
|
/**
* Returns a {@link Future} that will get notified once the handshake completes.
*/
|
Returns a <code>Future</code> that will get notified once the handshake completes
|
handshakeFuture
|
{
"repo_name": "menacher/netty",
"path": "handler/src/main/java/io/netty/handler/ssl/SslHandler.java",
"license": "apache-2.0",
"size": 41264
}
|
[
"io.netty.channel.Channel",
"io.netty.util.concurrent.Future"
] |
import io.netty.channel.Channel; import io.netty.util.concurrent.Future;
|
import io.netty.channel.*; import io.netty.util.concurrent.*;
|
[
"io.netty.channel",
"io.netty.util"
] |
io.netty.channel; io.netty.util;
| 2,876,576
|
public static int FSOUND_Stream_GetNumSubStreams(FSOUND_STREAM stream)
{
return FmodJNI.FSOUND_Stream_GetNumSubStreams(Pointer.getPointer(stream));
}
|
static int function(FSOUND_STREAM stream) { return FmodJNI.FSOUND_Stream_GetNumSubStreams(Pointer.getPointer(stream)); }
|
/**
* Returns the number of substreams inside a multi-stream FSB bank file.<br>
* <br><b>Remarks :</b><br>
* ___________________<br>
* Supported on the following platforms : Win32, WinCE, Linux, Macintosh, XBox, PlayStation 2, GameCube
* @param stream Pointer to the stream to query.
* @return On success, the number of FSB substreams is returned.<br>
* On failure, 0 is returned.
* @see Fmod#FSOUND_Stream_SetSubStream(FSOUND_STREAM, int)
* @see Fmod#FSOUND_Stream_SetSubStreamSentence(FSOUND_STREAM, IntBuffer, int)
*/
|
Returns the number of substreams inside a multi-stream FSB bank file. Remarks : ___________________ Supported on the following platforms : Win32, WinCE, Linux, Macintosh, XBox, PlayStation 2, GameCube
|
FSOUND_Stream_GetNumSubStreams
|
{
"repo_name": "jerome-jouvie/NativeFmod",
"path": "src-java/org/jouvieje/Fmod/Fmod.java",
"license": "lgpl-2.1",
"size": 364285
}
|
[
"org.jouvieje.Fmod"
] |
import org.jouvieje.Fmod;
|
import org.jouvieje.*;
|
[
"org.jouvieje"
] |
org.jouvieje;
| 606,230
|
public void setIterations(int val) {
if ( iterations == null ) {
iterations = (SFInt32)getField( "iterations" );
}
iterations.setValue( val );
}
|
void function(int val) { if ( iterations == null ) { iterations = (SFInt32)getField( STR ); } iterations.setValue( val ); }
|
/** Set the iterations field.
* @param val The int to set. */
|
Set the iterations field
|
setIterations
|
{
"repo_name": "Norkart/NK-VirtualGlobe",
"path": "Xj3D/src/java/org/xj3d/sai/internal/node/rigidbodyphysics/SAIRigidBodyCollection.java",
"license": "gpl-2.0",
"size": 11399
}
|
[
"org.web3d.x3d.sai.SFInt32"
] |
import org.web3d.x3d.sai.SFInt32;
|
import org.web3d.x3d.sai.*;
|
[
"org.web3d.x3d"
] |
org.web3d.x3d;
| 2,159,976
|
public List<String> getAllUniqueMembersInGroup(int groupId, int voId);
|
List<String> function(int groupId, int voId);
|
/**
* Get all 'uniqueMember' values of group in LDAP.
*
* @param groupId group Id
* @param voId vo Id
* @return list of uniqueMember values
*/
|
Get all 'uniqueMember' values of group in LDAP
|
getAllUniqueMembersInGroup
|
{
"repo_name": "ondrejvelisek/perun",
"path": "perun-ldapc/src/main/java/cz/metacentrum/perun/ldapc/processor/LdapConnector.java",
"license": "bsd-2-clause",
"size": 8243
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,567,568
|
private Method findBestExceptionHandlerMethod(Object handler, final Exception thrownException) {
final Class<?> handlerType = handler.getClass();
final Class<? extends Throwable> thrownExceptionType = thrownException.getClass();
Method handlerMethod;
Map<Class<? extends Throwable>, Method> handlers = this.exceptionHandlerCache.get(handlerType);
if (handlers != null) {
handlerMethod = handlers.get(thrownExceptionType);
if (handlerMethod != null) {
return (handlerMethod == NO_METHOD_FOUND ? null : handlerMethod);
}
}
else {
handlers = new ConcurrentHashMap<Class<? extends Throwable>, Method>(16);
this.exceptionHandlerCache.put(handlerType, handlers);
}
final Map<Class<? extends Throwable>, Method> matchedHandlers = new HashMap<Class<? extends Throwable>, Method>();
|
Method function(Object handler, final Exception thrownException) { final Class<?> handlerType = handler.getClass(); final Class<? extends Throwable> thrownExceptionType = thrownException.getClass(); Method handlerMethod; Map<Class<? extends Throwable>, Method> handlers = this.exceptionHandlerCache.get(handlerType); if (handlers != null) { handlerMethod = handlers.get(thrownExceptionType); if (handlerMethod != null) { return (handlerMethod == NO_METHOD_FOUND ? null : handlerMethod); } } else { handlers = new ConcurrentHashMap<Class<? extends Throwable>, Method>(16); this.exceptionHandlerCache.put(handlerType, handlers); } final Map<Class<? extends Throwable>, Method> matchedHandlers = new HashMap<Class<? extends Throwable>, Method>();
|
/**
* Finds the handler method that matches the thrown exception best.
* @param handler the handler object
* @param thrownException the exception to be handled
* @return the best matching method; or {@code null} if none is found
*/
|
Finds the handler method that matches the thrown exception best
|
findBestExceptionHandlerMethod
|
{
"repo_name": "qobel/esoguproject",
"path": "spring-framework/spring-webmvc-portlet/src/main/java/org/springframework/web/portlet/mvc/annotation/AnnotationMethodHandlerExceptionResolver.java",
"license": "apache-2.0",
"size": 15893
}
|
[
"java.lang.reflect.Method",
"java.util.HashMap",
"java.util.Map",
"java.util.concurrent.ConcurrentHashMap"
] |
import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap;
|
import java.lang.reflect.*; import java.util.*; import java.util.concurrent.*;
|
[
"java.lang",
"java.util"
] |
java.lang; java.util;
| 129,205
|
public IndexedFolder getFolder(JSONArray jsonArray) {
try {
return getFolder(jsonArray.getString(IndexedItem.FOLDER_UID));
}
catch (JSONException e) {
Log.e(Utils.tag(this), "hm?", e);
return null;
}
}
|
IndexedFolder function(JSONArray jsonArray) { try { return getFolder(jsonArray.getString(IndexedItem.FOLDER_UID)); } catch (JSONException e) { Log.e(Utils.tag(this), "hm?", e); return null; } }
|
/**
* Get a folder by its jsonArray
*
* @param jsonArray the jsonArray of the folder
* @return the folder if it was found. Otherwise null
*/
|
Get a folder by its jsonArray
|
getFolder
|
{
"repo_name": "droidstealth/droid-stealth",
"path": "DroidStealth/src/main/java/com/stealth/files/FileIndex.java",
"license": "gpl-2.0",
"size": 9514
}
|
[
"android.util.Log",
"com.stealth.utils.Utils",
"org.json.JSONArray",
"org.json.JSONException"
] |
import android.util.Log; import com.stealth.utils.Utils; import org.json.JSONArray; import org.json.JSONException;
|
import android.util.*; import com.stealth.utils.*; import org.json.*;
|
[
"android.util",
"com.stealth.utils",
"org.json"
] |
android.util; com.stealth.utils; org.json;
| 2,670,519
|
public void showMessageDialog(String windowTitle, String briefMessage, String longMessageTitle, String longMessage,
boolean wrapLines, int MessageType) {
JTextArea textArea = new JTextArea(longMessage);
textArea.setEditable(false);
textArea.setOpaque(false);
if (wrapLines) {
textArea.setLineWrap(true);
textArea.setWrapStyleWord(true);
}
JPanel messagePanel = new JPanel(new BorderLayout(15, 15));
JScrollPane scrollPanel = new JScrollPane(textArea);
scrollPanel.setBorder(BorderFactory.createTitledBorder(longMessageTitle));
messagePanel.add(scrollPanel, BorderLayout.CENTER);
messagePanel.add(new JLabel("<html>" + briefMessage + "</html>"), BorderLayout.NORTH);
messagePanel.setPreferredSize(new Dimension(400, 200));
JOptionPane.showMessageDialog(mainWindow, messagePanel, windowTitle, MessageType);
}
|
void function(String windowTitle, String briefMessage, String longMessageTitle, String longMessage, boolean wrapLines, int MessageType) { JTextArea textArea = new JTextArea(longMessage); textArea.setEditable(false); textArea.setOpaque(false); if (wrapLines) { textArea.setLineWrap(true); textArea.setWrapStyleWord(true); } JPanel messagePanel = new JPanel(new BorderLayout(15, 15)); JScrollPane scrollPanel = new JScrollPane(textArea); scrollPanel.setBorder(BorderFactory.createTitledBorder(longMessageTitle)); messagePanel.add(scrollPanel, BorderLayout.CENTER); messagePanel.add(new JLabel(STR + briefMessage + STR), BorderLayout.NORTH); messagePanel.setPreferredSize(new Dimension(400, 200)); JOptionPane.showMessageDialog(mainWindow, messagePanel, windowTitle, MessageType); }
|
/**
* Show a message dialog with a specific long text inside
* @param windowTitle the window title
* @param briefMessage the brief message to be shown
* @param longMessageTitle the title of the long message
* @param longMessage the long message to be shown
* @param wrapLines controls whether lines will be automatically wrapped (if true) or not (if false)
* @param MessageType the message type
*/
|
Show a message dialog with a specific long text inside
|
showMessageDialog
|
{
"repo_name": "gems-uff/dyevc",
"path": "src/main/java/br/uff/ic/dyevc/gui/utils/GUIManager.java",
"license": "mit",
"size": 7143
}
|
[
"java.awt.BorderLayout",
"java.awt.Dimension",
"javax.swing.BorderFactory",
"javax.swing.JLabel",
"javax.swing.JOptionPane",
"javax.swing.JPanel",
"javax.swing.JScrollPane",
"javax.swing.JTextArea"
] |
import java.awt.BorderLayout; import java.awt.Dimension; import javax.swing.BorderFactory; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextArea;
|
import java.awt.*; import javax.swing.*;
|
[
"java.awt",
"javax.swing"
] |
java.awt; javax.swing;
| 385,803
|
public void setChild3Url(String[] val) {
if ( child3Url == null ) {
child3Url = (MFString)getField( "child3Url" );
}
child3Url.setValue( val.length, val );
}
|
void function(String[] val) { if ( child3Url == null ) { child3Url = (MFString)getField( STR ); } child3Url.setValue( val.length, val ); }
|
/** Set the child3Url field.
* @param val The String[] to set. */
|
Set the child3Url field
|
setChild3Url
|
{
"repo_name": "Norkart/NK-VirtualGlobe",
"path": "Xj3D/src/java/org/xj3d/sai/external/node/geospatial/SAIGeoLOD.java",
"license": "gpl-2.0",
"size": 11112
}
|
[
"org.web3d.x3d.sai.MFString"
] |
import org.web3d.x3d.sai.MFString;
|
import org.web3d.x3d.sai.*;
|
[
"org.web3d.x3d"
] |
org.web3d.x3d;
| 331,327
|
TransportConfiguration adaptTransportConfiguration(TransportConfiguration tc);
|
TransportConfiguration adaptTransportConfiguration(TransportConfiguration tc);
|
/**
* Adapt the transport configuration passed in parameter and return an adapted one that is suitable to use with ClientProtocolManager
* created by this factory.
*
* @param tc the original TransportConfiguration
* @return the adapted TransportConfiguration
*/
|
Adapt the transport configuration passed in parameter and return an adapted one that is suitable to use with ClientProtocolManager created by this factory
|
adaptTransportConfiguration
|
{
"repo_name": "kjniemi/activemq-artemis",
"path": "artemis-core-client/src/main/java/org/apache/activemq/artemis/spi/core/remoting/ClientProtocolManagerFactory.java",
"license": "apache-2.0",
"size": 1537
}
|
[
"org.apache.activemq.artemis.api.core.TransportConfiguration"
] |
import org.apache.activemq.artemis.api.core.TransportConfiguration;
|
import org.apache.activemq.artemis.api.core.*;
|
[
"org.apache.activemq"
] |
org.apache.activemq;
| 2,418,633
|
public void setLocation(TLAbsFileLocation location) {
this.location = location;
}
|
void function(TLAbsFileLocation location) { this.location = location; }
|
/**
* Sets location.
*
* @param location the location
*/
|
Sets location
|
setLocation
|
{
"repo_name": "rubenlagus/TelegramApi",
"path": "src/main/java/org/telegram/api/photo/size/TLPhotoCachedSize.java",
"license": "mit",
"size": 3011
}
|
[
"org.telegram.api.file.location.TLAbsFileLocation"
] |
import org.telegram.api.file.location.TLAbsFileLocation;
|
import org.telegram.api.file.location.*;
|
[
"org.telegram.api"
] |
org.telegram.api;
| 132,628
|
public boolean processReport(final DatanodeID nodeID,
final DatanodeStorage storage,
final BlockListAsLongs newReport, BlockReportContext context,
boolean lastStorageInRpc) throws IOException {
namesystem.writeLock();
final long startTime = Time.monotonicNow(); //after acquiring write lock
final long endTime;
DatanodeDescriptor node;
Collection<Block> invalidatedBlocks = null;
try {
node = datanodeManager.getDatanode(nodeID);
if (node == null || !node.isAlive) {
throw new IOException(
"ProcessReport from dead or unregistered node: " + nodeID);
}
// To minimize startup time, we discard any second (or later) block reports
// that we receive while still in startup phase.
DatanodeStorageInfo storageInfo = node.getStorageInfo(storage.getStorageID());
if (storageInfo == null) {
// We handle this for backwards compatibility.
storageInfo = node.updateStorage(storage);
}
if (namesystem.isInStartupSafeMode()
&& storageInfo.getBlockReportCount() > 0) {
blockLog.info("BLOCK* processReport: "
+ "discarded non-initial block report from {}"
+ " because namenode still in startup phase", nodeID);
return !node.hasStaleStorages();
}
if (storageInfo.getBlockReportCount() == 0) {
// The first block report can be processed a lot more efficiently than
// ordinary block reports. This shortens restart times.
processFirstBlockReport(storageInfo, newReport);
} else {
invalidatedBlocks = processReport(storageInfo, newReport);
}
storageInfo.receivedBlockReport();
if (context != null) {
storageInfo.setLastBlockReportId(context.getReportId());
if (lastStorageInRpc) {
int rpcsSeen = node.updateBlockReportContext(context);
if (rpcsSeen >= context.getTotalRpcs()) {
List<DatanodeStorageInfo> zombies = node.removeZombieStorages();
if (zombies.isEmpty()) {
LOG.debug("processReport 0x{}: no zombie storages found.",
Long.toHexString(context.getReportId()));
} else {
for (DatanodeStorageInfo zombie : zombies) {
removeZombieReplicas(context, zombie);
}
}
node.clearBlockReportContext();
} else {
LOG.debug("processReport 0x{}: {} more RPCs remaining in this " +
"report.", Long.toHexString(context.getReportId()),
(context.getTotalRpcs() - rpcsSeen)
);
}
}
}
} finally {
endTime = Time.monotonicNow();
namesystem.writeUnlock();
}
if (invalidatedBlocks != null) {
for (Block b : invalidatedBlocks) {
blockLog.info("BLOCK* processReport: {} on node {} size {} does not " +
"belong to any file", b, node, b.getNumBytes());
}
}
// Log the block report processing stats from Namenode perspective
final NameNodeMetrics metrics = NameNode.getNameNodeMetrics();
if (metrics != null) {
metrics.addBlockReport((int) (endTime - startTime));
}
blockLog.info("BLOCK* processReport: from storage {} node {}, " +
"blocks: {}, hasStaleStorage: {}, processing time: {} msecs", storage
.getStorageID(), nodeID, newReport.getNumberOfBlocks(),
node.hasStaleStorages(), (endTime - startTime));
return !node.hasStaleStorages();
}
|
boolean function(final DatanodeID nodeID, final DatanodeStorage storage, final BlockListAsLongs newReport, BlockReportContext context, boolean lastStorageInRpc) throws IOException { namesystem.writeLock(); final long startTime = Time.monotonicNow(); final long endTime; DatanodeDescriptor node; Collection<Block> invalidatedBlocks = null; try { node = datanodeManager.getDatanode(nodeID); if (node == null !node.isAlive) { throw new IOException( STR + nodeID); } DatanodeStorageInfo storageInfo = node.getStorageInfo(storage.getStorageID()); if (storageInfo == null) { storageInfo = node.updateStorage(storage); } if (namesystem.isInStartupSafeMode() && storageInfo.getBlockReportCount() > 0) { blockLog.info(STR + STR + STR, nodeID); return !node.hasStaleStorages(); } if (storageInfo.getBlockReportCount() == 0) { processFirstBlockReport(storageInfo, newReport); } else { invalidatedBlocks = processReport(storageInfo, newReport); } storageInfo.receivedBlockReport(); if (context != null) { storageInfo.setLastBlockReportId(context.getReportId()); if (lastStorageInRpc) { int rpcsSeen = node.updateBlockReportContext(context); if (rpcsSeen >= context.getTotalRpcs()) { List<DatanodeStorageInfo> zombies = node.removeZombieStorages(); if (zombies.isEmpty()) { LOG.debug(STR, Long.toHexString(context.getReportId())); } else { for (DatanodeStorageInfo zombie : zombies) { removeZombieReplicas(context, zombie); } } node.clearBlockReportContext(); } else { LOG.debug(STR + STR, Long.toHexString(context.getReportId()), (context.getTotalRpcs() - rpcsSeen) ); } } } } finally { endTime = Time.monotonicNow(); namesystem.writeUnlock(); } if (invalidatedBlocks != null) { for (Block b : invalidatedBlocks) { blockLog.info(STR + STR, b, node, b.getNumBytes()); } } final NameNodeMetrics metrics = NameNode.getNameNodeMetrics(); if (metrics != null) { metrics.addBlockReport((int) (endTime - startTime)); } blockLog.info(STR + STR, storage .getStorageID(), nodeID, newReport.getNumberOfBlocks(), node.hasStaleStorages(), (endTime - startTime)); return !node.hasStaleStorages(); }
|
/**
* The given storage is reporting all its blocks.
* Update the (storage-->block list) and (block-->storage list) maps.
*
* @return true if all known storages of the given DN have finished reporting.
* @throws IOException
*/
|
The given storage is reporting all its blocks. Update the (storage-->block list) and (block-->storage list) maps
|
processReport
|
{
"repo_name": "myeoje/PhillyYarn",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java",
"license": "apache-2.0",
"size": 147809
}
|
[
"java.io.IOException",
"java.util.Collection",
"java.util.List",
"org.apache.hadoop.hdfs.protocol.Block",
"org.apache.hadoop.hdfs.protocol.BlockListAsLongs",
"org.apache.hadoop.hdfs.protocol.DatanodeID",
"org.apache.hadoop.hdfs.server.namenode.NameNode",
"org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics",
"org.apache.hadoop.hdfs.server.protocol.BlockReportContext",
"org.apache.hadoop.hdfs.server.protocol.DatanodeStorage",
"org.apache.hadoop.util.Time"
] |
import java.io.IOException; import java.util.Collection; import java.util.List; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.BlockListAsLongs; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics; import org.apache.hadoop.hdfs.server.protocol.BlockReportContext; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage; import org.apache.hadoop.util.Time;
|
import java.io.*; import java.util.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.namenode.*; import org.apache.hadoop.hdfs.server.namenode.metrics.*; import org.apache.hadoop.hdfs.server.protocol.*; import org.apache.hadoop.util.*;
|
[
"java.io",
"java.util",
"org.apache.hadoop"
] |
java.io; java.util; org.apache.hadoop;
| 136,955
|
@SuppressWarnings({ "unchecked" })
public static <K, T extends Persistent> DataStore<K, T> getDataStore(
String dataStoreClass, String keyClass, String persistentClass, Configuration conf)
throws GoraException {
try {
Class<? extends DataStore<K,T>> c
= (Class<? extends DataStore<K, T>>) Class.forName(dataStoreClass);
Class<K> k = (Class<K>) ClassLoadingUtils.loadClass(keyClass);
Class<T> p = (Class<T>) ClassLoadingUtils.loadClass(persistentClass);
return createDataStore(c, k, p, conf, createProps(), null);
} catch(GoraException ex) {
throw ex;
} catch (Exception ex) {
throw new GoraException(ex);
}
}
|
@SuppressWarnings({ STR }) static <K, T extends Persistent> DataStore<K, T> function( String dataStoreClass, String keyClass, String persistentClass, Configuration conf) throws GoraException { try { Class<? extends DataStore<K,T>> c = (Class<? extends DataStore<K, T>>) Class.forName(dataStoreClass); Class<K> k = (Class<K>) ClassLoadingUtils.loadClass(keyClass); Class<T> p = (Class<T>) ClassLoadingUtils.loadClass(persistentClass); return createDataStore(c, k, p, conf, createProps(), null); } catch(GoraException ex) { throw ex; } catch (Exception ex) { throw new GoraException(ex); } }
|
/**
* Instantiate a new {@link DataStore}. Uses default properties. Uses 'null' schema.
*
* @param dataStoreClass The datastore implementation class <i>as string</i>.
* @param keyClass The key class <i>as string</i>.
* @param persistentClass The value class <i>as string</i>.
* @param conf {@link Configuration} to be used be the store.
* @return A new store instance.
* @throws GoraException
*/
|
Instantiate a new <code>DataStore</code>. Uses default properties. Uses 'null' schema
|
getDataStore
|
{
"repo_name": "cguzel/gora",
"path": "gora-core/src/main/java/org/apache/gora/store/DataStoreFactory.java",
"license": "apache-2.0",
"size": 18590
}
|
[
"org.apache.gora.persistency.Persistent",
"org.apache.gora.util.ClassLoadingUtils",
"org.apache.gora.util.GoraException",
"org.apache.hadoop.conf.Configuration"
] |
import org.apache.gora.persistency.Persistent; import org.apache.gora.util.ClassLoadingUtils; import org.apache.gora.util.GoraException; import org.apache.hadoop.conf.Configuration;
|
import org.apache.gora.persistency.*; import org.apache.gora.util.*; import org.apache.hadoop.conf.*;
|
[
"org.apache.gora",
"org.apache.hadoop"
] |
org.apache.gora; org.apache.hadoop;
| 1,289,759
|
protected void sequence_QuotedChars(EObject context, QuotedChars semanticObject) {
if(errorAcceptor != null) {
if(transientValues.isValueTransient(semanticObject, JsonPackage.Literals.JSON_SIMPLE_VALUE__VALUE) == ValueTransient.YES)
errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, JsonPackage.Literals.JSON_SIMPLE_VALUE__VALUE));
}
INodesForEObjectProvider nodes = createNodeProvider(semanticObject);
SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes);
feeder.accept(grammarAccess.getQuotedCharsAccess().getValueTERMINAL_QUOTED_CHARSTerminalRuleCall_0(), semanticObject.getValue());
feeder.finish();
}
|
void function(EObject context, QuotedChars semanticObject) { if(errorAcceptor != null) { if(transientValues.isValueTransient(semanticObject, JsonPackage.Literals.JSON_SIMPLE_VALUE__VALUE) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, JsonPackage.Literals.JSON_SIMPLE_VALUE__VALUE)); } INodesForEObjectProvider nodes = createNodeProvider(semanticObject); SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes); feeder.accept(grammarAccess.getQuotedCharsAccess().getValueTERMINAL_QUOTED_CHARSTerminalRuleCall_0(), semanticObject.getValue()); feeder.finish(); }
|
/**
* Constraint:
* value=TERMINAL_QUOTED_CHARS
*/
|
Constraint: value=TERMINAL_QUOTED_CHARS
|
sequence_QuotedChars
|
{
"repo_name": "xcoulon/jbosstools-json",
"path": "plugins/org.jboss.tools.json.core/src-gen/org/jboss/tools/json/serializer/JSONSemanticSequencer.java",
"license": "epl-1.0",
"size": 10220
}
|
[
"org.eclipse.emf.ecore.EObject",
"org.eclipse.xtext.serializer.acceptor.SequenceFeeder",
"org.eclipse.xtext.serializer.sequencer.ISemanticNodeProvider",
"org.eclipse.xtext.serializer.sequencer.ITransientValueService",
"org.jboss.tools.json.json.JsonPackage",
"org.jboss.tools.json.json.QuotedChars"
] |
import org.eclipse.emf.ecore.EObject; import org.eclipse.xtext.serializer.acceptor.SequenceFeeder; import org.eclipse.xtext.serializer.sequencer.ISemanticNodeProvider; import org.eclipse.xtext.serializer.sequencer.ITransientValueService; import org.jboss.tools.json.json.JsonPackage; import org.jboss.tools.json.json.QuotedChars;
|
import org.eclipse.emf.ecore.*; import org.eclipse.xtext.serializer.acceptor.*; import org.eclipse.xtext.serializer.sequencer.*; import org.jboss.tools.json.json.*;
|
[
"org.eclipse.emf",
"org.eclipse.xtext",
"org.jboss.tools"
] |
org.eclipse.emf; org.eclipse.xtext; org.jboss.tools;
| 1,352,981
|
public boolean processOutgoingAckRequestMessage (RMMsgContext ackRequestRMMsg) throws AxisFault {
if (log.isDebugEnabled())
log.debug("Enter: AckRequestedProcessor::processOutgoingAckRequestMessage");
setupOutMessage(ackRequestRMMsg);
AxisOperation ackOperation = SpecSpecificConstants.getWSRMOperation(
Sandesha2Constants.MessageTypes.ACK,
getRMVersion(),
getMsgContext().getAxisService());
getMsgContext().setAxisOperation(ackOperation);
ServiceContext serviceCtx = getMsgContext().getServiceContext();
OperationContext opcontext = OperationContextFactory.createOperationContext(ackOperation.getAxisSpecificMEPConstant(), ackOperation, serviceCtx);
opcontext.setParent(getMsgContext().getServiceContext());
getConfigurationContext().registerOperationContext(ackRequestRMMsg.getMessageId(), opcontext);
getMsgContext().setOperationContext(opcontext);
Iterator<AckRequested> iterator = ackRequestRMMsg.getAckRequests();
AckRequested ackRequested = null;
while (iterator.hasNext()) {
ackRequested = (AckRequested) iterator.next();
}
if (iterator.hasNext()) {
throw new SandeshaException (SandeshaMessageHelper.getMessage(SandeshaMessageKeys.ackRequestMultipleParts));
}
if (ackRequested==null) {
throw new SandeshaException (SandeshaMessageHelper.getMessage(SandeshaMessageKeys.noAckRequestPartFound));
}
ackRequestRMMsg.setWSAAction(SpecSpecificConstants.getAckRequestAction (getRMVersion()));
ackRequestRMMsg.setSOAPAction(SpecSpecificConstants.getAckRequestSOAPAction (getRMVersion()));
sendOutgoingMessage(ackRequestRMMsg, Sandesha2Constants.MessageTypes.ACK_REQUEST, 0, null);
// Pause the message context
ackRequestRMMsg.pause();
if (log.isDebugEnabled())
log.debug("Exit: AckRequestedProcessor::processOutgoingAckRequestMessage " + Boolean.TRUE);
return true;
}
|
boolean function (RMMsgContext ackRequestRMMsg) throws AxisFault { if (log.isDebugEnabled()) log.debug(STR); setupOutMessage(ackRequestRMMsg); AxisOperation ackOperation = SpecSpecificConstants.getWSRMOperation( Sandesha2Constants.MessageTypes.ACK, getRMVersion(), getMsgContext().getAxisService()); getMsgContext().setAxisOperation(ackOperation); ServiceContext serviceCtx = getMsgContext().getServiceContext(); OperationContext opcontext = OperationContextFactory.createOperationContext(ackOperation.getAxisSpecificMEPConstant(), ackOperation, serviceCtx); opcontext.setParent(getMsgContext().getServiceContext()); getConfigurationContext().registerOperationContext(ackRequestRMMsg.getMessageId(), opcontext); getMsgContext().setOperationContext(opcontext); Iterator<AckRequested> iterator = ackRequestRMMsg.getAckRequests(); AckRequested ackRequested = null; while (iterator.hasNext()) { ackRequested = (AckRequested) iterator.next(); } if (iterator.hasNext()) { throw new SandeshaException (SandeshaMessageHelper.getMessage(SandeshaMessageKeys.ackRequestMultipleParts)); } if (ackRequested==null) { throw new SandeshaException (SandeshaMessageHelper.getMessage(SandeshaMessageKeys.noAckRequestPartFound)); } ackRequestRMMsg.setWSAAction(SpecSpecificConstants.getAckRequestAction (getRMVersion())); ackRequestRMMsg.setSOAPAction(SpecSpecificConstants.getAckRequestSOAPAction (getRMVersion())); sendOutgoingMessage(ackRequestRMMsg, Sandesha2Constants.MessageTypes.ACK_REQUEST, 0, null); ackRequestRMMsg.pause(); if (log.isDebugEnabled()) log.debug(STR + Boolean.TRUE); return true; }
|
/**
* This is used to capture AckRequest messages send by the SandeshaClient.
* This will send that message using the Sandesha2 Sender.
*
* @param rmMsgContext
*/
|
This is used to capture AckRequest messages send by the SandeshaClient. This will send that message using the Sandesha2 Sender
|
processOutgoingAckRequestMessage
|
{
"repo_name": "apache/sandesha",
"path": "modules/core/src/main/java/org/apache/sandesha2/msgprocessors/AckRequestedProcessor.java",
"license": "apache-2.0",
"size": 10907
}
|
[
"java.util.Iterator",
"org.apache.axis2.AxisFault",
"org.apache.axis2.context.OperationContext",
"org.apache.axis2.context.OperationContextFactory",
"org.apache.axis2.context.ServiceContext",
"org.apache.axis2.description.AxisOperation",
"org.apache.sandesha2.RMMsgContext",
"org.apache.sandesha2.Sandesha2Constants",
"org.apache.sandesha2.SandeshaException",
"org.apache.sandesha2.i18n.SandeshaMessageHelper",
"org.apache.sandesha2.i18n.SandeshaMessageKeys",
"org.apache.sandesha2.util.SpecSpecificConstants",
"org.apache.sandesha2.wsrm.AckRequested"
] |
import java.util.Iterator; import org.apache.axis2.AxisFault; import org.apache.axis2.context.OperationContext; import org.apache.axis2.context.OperationContextFactory; import org.apache.axis2.context.ServiceContext; import org.apache.axis2.description.AxisOperation; import org.apache.sandesha2.RMMsgContext; import org.apache.sandesha2.Sandesha2Constants; import org.apache.sandesha2.SandeshaException; import org.apache.sandesha2.i18n.SandeshaMessageHelper; import org.apache.sandesha2.i18n.SandeshaMessageKeys; import org.apache.sandesha2.util.SpecSpecificConstants; import org.apache.sandesha2.wsrm.AckRequested;
|
import java.util.*; import org.apache.axis2.*; import org.apache.axis2.context.*; import org.apache.axis2.description.*; import org.apache.sandesha2.*; import org.apache.sandesha2.i18n.*; import org.apache.sandesha2.util.*; import org.apache.sandesha2.wsrm.*;
|
[
"java.util",
"org.apache.axis2",
"org.apache.sandesha2"
] |
java.util; org.apache.axis2; org.apache.sandesha2;
| 1,510,506
|
public Iterator<QueryResult> iterator() {
return new QueryResultsIterator( this.results.iterator() );
}
// public Map<String, Declaration> getDeclarations() {
// return this.declarations;
// }
|
Iterator<QueryResult> function() { return new QueryResultsIterator( this.results.iterator() ); }
|
/**
* Returns an Iterator for the results.
*
* @return
*/
|
Returns an Iterator for the results
|
iterator
|
{
"repo_name": "pperboires/PocDrools",
"path": "drools-core/src/main/java/org/drools/QueryResults.java",
"license": "apache-2.0",
"size": 3864
}
|
[
"java.util.Iterator"
] |
import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 846,680
|
public LuisApp withPatternAnyEntities(List<PatternAny> patternAnyEntities) {
this.patternAnyEntities = patternAnyEntities;
return this;
}
|
LuisApp function(List<PatternAny> patternAnyEntities) { this.patternAnyEntities = patternAnyEntities; return this; }
|
/**
* Set the patternAnyEntities value.
*
* @param patternAnyEntities the patternAnyEntities value to set
* @return the LuisApp object itself.
*/
|
Set the patternAnyEntities value
|
withPatternAnyEntities
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/cognitiveservices/ms-azure-cs-luis-authoring/src/main/java/com/microsoft/azure/cognitiveservices/language/luis/authoring/models/LuisApp.java",
"license": "mit",
"size": 10717
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,288,028
|
protected TAttributeOption copyInto(TAttributeOption copyObj, boolean deepcopy) throws TorqueException
{
copyObj.setObjectID(objectID);
copyObj.setAttributeID(attributeID);
copyObj.setParentOption(parentOption);
copyObj.setOptionName(optionName);
copyObj.setDeleted(deleted);
copyObj.setSortorder(sortorder);
copyObj.setUuid(uuid);
copyObj.setObjectID((Integer)null);
if (deepcopy)
{
List<TIssueAttributeValue> vTIssueAttributeValues = getTIssueAttributeValues();
if (vTIssueAttributeValues != null)
{
for (int i = 0; i < vTIssueAttributeValues.size(); i++)
{
TIssueAttributeValue obj = vTIssueAttributeValues.get(i);
copyObj.addTIssueAttributeValue(obj.copy());
}
}
else
{
copyObj.collTIssueAttributeValues = null;
}
List<TAttribute> vTAttributes = getTAttributes();
if (vTAttributes != null)
{
for (int i = 0; i < vTAttributes.size(); i++)
{
TAttribute obj = vTAttributes.get(i);
copyObj.addTAttribute(obj.copy());
}
}
else
{
copyObj.collTAttributes = null;
}
}
return copyObj;
}
|
TAttributeOption function(TAttributeOption copyObj, boolean deepcopy) throws TorqueException { copyObj.setObjectID(objectID); copyObj.setAttributeID(attributeID); copyObj.setParentOption(parentOption); copyObj.setOptionName(optionName); copyObj.setDeleted(deleted); copyObj.setSortorder(sortorder); copyObj.setUuid(uuid); copyObj.setObjectID((Integer)null); if (deepcopy) { List<TIssueAttributeValue> vTIssueAttributeValues = getTIssueAttributeValues(); if (vTIssueAttributeValues != null) { for (int i = 0; i < vTIssueAttributeValues.size(); i++) { TIssueAttributeValue obj = vTIssueAttributeValues.get(i); copyObj.addTIssueAttributeValue(obj.copy()); } } else { copyObj.collTIssueAttributeValues = null; } List<TAttribute> vTAttributes = getTAttributes(); if (vTAttributes != null) { for (int i = 0; i < vTAttributes.size(); i++) { TAttribute obj = vTAttributes.get(i); copyObj.addTAttribute(obj.copy()); } } else { copyObj.collTAttributes = null; } } return copyObj; }
|
/**
* Fills the copyObj with the contents of this object.
* If deepcopy is true, The associated objects are also copied
* and treated as new objects.
*
* @param copyObj the object to fill.
* @param deepcopy whether the associated objects should be copied.
*/
|
Fills the copyObj with the contents of this object. If deepcopy is true, The associated objects are also copied and treated as new objects
|
copyInto
|
{
"repo_name": "trackplus/Genji",
"path": "src/main/java/com/aurel/track/persist/BaseTAttributeOption.java",
"license": "gpl-3.0",
"size": 56949
}
|
[
"com.aurel.track.persist.TAttribute",
"java.util.List",
"org.apache.torque.TorqueException"
] |
import com.aurel.track.persist.TAttribute; import java.util.List; import org.apache.torque.TorqueException;
|
import com.aurel.track.persist.*; import java.util.*; import org.apache.torque.*;
|
[
"com.aurel.track",
"java.util",
"org.apache.torque"
] |
com.aurel.track; java.util; org.apache.torque;
| 168,328
|
public double getDepth() {
return this.depth;
}
/**
* Returns the first {@link Body}.
* @return {@link Body}
|
double function() { return this.depth; } /** * Returns the first {@link Body}. * @return {@link Body}
|
/**
* Returns the depth.
* @return double
*/
|
Returns the depth
|
getDepth
|
{
"repo_name": "satishbabusee/dyn4j",
"path": "src/org/dyn4j/dynamics/contact/ContactPoint.java",
"license": "bsd-3-clause",
"size": 6189
}
|
[
"org.dyn4j.dynamics.Body"
] |
import org.dyn4j.dynamics.Body;
|
import org.dyn4j.dynamics.*;
|
[
"org.dyn4j.dynamics"
] |
org.dyn4j.dynamics;
| 612,178
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.