method stringlengths 13 441k | clean_method stringlengths 7 313k | doc stringlengths 17 17.3k | comment stringlengths 3 1.42k | method_name stringlengths 1 273 | extra dict | imports list | imports_info stringlengths 19 34.8k | cluster_imports_info stringlengths 15 3.66k | libraries list | libraries_info stringlengths 6 661 | id int64 0 2.92M |
|---|---|---|---|---|---|---|---|---|---|---|---|
@Deployment(resources = { "org/flowable/engine/test/history/oneTaskProcess.bpmn20.xml" })
public void testQueryHistoricProcessInstanceIncludeBinaryVariable() throws Exception {
// Start process with a binary variable
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.singletonMap("binaryVariable", (Object) "It is I, le binary".getBytes()));
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertNotNull(task);
// Complete task to end process
taskService.complete(task.getId());
// Query task, including processVariables
HistoricProcessInstance historicProcess = historyService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getId()).includeProcessVariables().singleResult();
assertNotNull(historicProcess);
assertNotNull(historicProcess.getProcessVariables());
byte[] bytes = (byte[]) historicProcess.getProcessVariables().get("binaryVariable");
assertEquals("It is I, le binary", new String(bytes));
} | @Deployment(resources = { STR }) void function() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey(STR, Collections.singletonMap(STR, (Object) STR.getBytes())); Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNotNull(task); taskService.complete(task.getId()); HistoricProcessInstance historicProcess = historyService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getId()).includeProcessVariables().singleResult(); assertNotNull(historicProcess); assertNotNull(historicProcess.getProcessVariables()); byte[] bytes = (byte[]) historicProcess.getProcessVariables().get(STR); assertEquals(STR, new String(bytes)); } | /**
* Test confirming fix for ACT-1731
*/ | Test confirming fix for ACT-1731 | testQueryHistoricProcessInstanceIncludeBinaryVariable | {
"repo_name": "stephraleigh/flowable-engine",
"path": "modules/flowable-engine/src/test/java/org/flowable/standalone/history/FullHistoryTest.java",
"license": "apache-2.0",
"size": 82664
} | [
"java.util.Collections",
"org.flowable.engine.history.HistoricProcessInstance",
"org.flowable.engine.runtime.ProcessInstance",
"org.flowable.engine.task.Task",
"org.flowable.engine.test.Deployment"
] | import java.util.Collections; import org.flowable.engine.history.HistoricProcessInstance; import org.flowable.engine.runtime.ProcessInstance; import org.flowable.engine.task.Task; import org.flowable.engine.test.Deployment; | import java.util.*; import org.flowable.engine.history.*; import org.flowable.engine.runtime.*; import org.flowable.engine.task.*; import org.flowable.engine.test.*; | [
"java.util",
"org.flowable.engine"
] | java.util; org.flowable.engine; | 893,571 |
public void invokeAction(CPAction action, Vector optionalHeaderLines) throws InvokeActionException,
ActionFailedException
{
action.getCPService().invokeAction(action, optionalHeaderLines);
} | void function(CPAction action, Vector optionalHeaderLines) throws InvokeActionException, ActionFailedException { action.getCPService().invokeAction(action, optionalHeaderLines); } | /**
* Sends an action request to a remote service.
*
* @param action
* The action
* @param optionalHeaderLines
* A vector containing additional headers for the SOAP request
*
* @throws InvokeActionException
* if action could not be invoked
* @throws ActionFailedException
* if action produced an error at the server
*/ | Sends an action request to a remote service | invokeAction | {
"repo_name": "fraunhoferfokus/fokus-upnp",
"path": "upnp-core/src/main/java/de/fraunhofer/fokus/upnp/core/control_point/ControlPoint.java",
"license": "gpl-3.0",
"size": 43538
} | [
"de.fraunhofer.fokus.upnp.core.exceptions.InvokeActionException",
"de.fraunhofer.fokus.upnp.util.exceptions.ActionFailedException",
"java.util.Vector"
] | import de.fraunhofer.fokus.upnp.core.exceptions.InvokeActionException; import de.fraunhofer.fokus.upnp.util.exceptions.ActionFailedException; import java.util.Vector; | import de.fraunhofer.fokus.upnp.core.exceptions.*; import de.fraunhofer.fokus.upnp.util.exceptions.*; import java.util.*; | [
"de.fraunhofer.fokus",
"java.util"
] | de.fraunhofer.fokus; java.util; | 2,591,730 |
@Override
public boolean updateUserModel(UserModel model) {
return updateUserModel(model.username, model);
}
| boolean function(UserModel model) { return updateUserModel(model.username, model); } | /**
* Updates/writes a complete user object.
*
* @param model
* @return true if update is successful
*/ | Updates/writes a complete user object | updateUserModel | {
"repo_name": "pdinc-oss/gitblit",
"path": "src/com/gitblit/ConfigUserService.java",
"license": "apache-2.0",
"size": 25525
} | [
"com.gitblit.models.UserModel"
] | import com.gitblit.models.UserModel; | import com.gitblit.models.*; | [
"com.gitblit.models"
] | com.gitblit.models; | 1,675,325 |
public static native boolean isValid(Buffer header); | static native boolean function(Buffer header); | /**
* Check if a PKM header is correctly formatted.
* @param header native order direct buffer of the header.
*/ | Check if a PKM header is correctly formatted | isValid | {
"repo_name": "mateor/pdroid",
"path": "android-2.3.4_r1/tags/1.32/frameworks/base/opengl/java/android/opengl/ETC1.java",
"license": "gpl-3.0",
"size": 4895
} | [
"java.nio.Buffer"
] | import java.nio.Buffer; | import java.nio.*; | [
"java.nio"
] | java.nio; | 577,787 |
public void saveGates() {
Gates.save(null);
} | void function() { Gates.save(null); } | /**
* Saves all gate configurations.
*/ | Saves all gate configurations | saveGates | {
"repo_name": "InsomniaxGaming/TransporterReloaded",
"path": "src/com/frdfsnlght/transporter/api/API.java",
"license": "gpl-2.0",
"size": 7048
} | [
"com.frdfsnlght.transporter.Gates"
] | import com.frdfsnlght.transporter.Gates; | import com.frdfsnlght.transporter.*; | [
"com.frdfsnlght.transporter"
] | com.frdfsnlght.transporter; | 1,926,858 |
public static CandidateEntry findByG_P(long groupId, long wikiPageId)
throws com.liferay.micro.maintainance.candidate.exception.NoSuchEntryException {
return getPersistence().findByG_P(groupId, wikiPageId);
} | static CandidateEntry function(long groupId, long wikiPageId) throws com.liferay.micro.maintainance.candidate.exception.NoSuchEntryException { return getPersistence().findByG_P(groupId, wikiPageId); } | /**
* Returns the candidate entry where groupId = ? and wikiPageId = ? or throws a {@link NoSuchEntryException} if it could not be found.
*
* @param groupId the group ID
* @param wikiPageId the wiki page ID
* @return the matching candidate entry
* @throws NoSuchEntryException if a matching candidate entry could not be found
*/ | Returns the candidate entry where groupId = ? and wikiPageId = ? or throws a <code>NoSuchEntryException</code> if it could not be found | findByG_P | {
"repo_name": "moltam89/OWXP",
"path": "modules/micro-maintainance-candidate/micro-maintainance-candidate-api/src/main/java/com/liferay/micro/maintainance/candidate/service/persistence/CandidateEntryUtil.java",
"license": "gpl-3.0",
"size": 103522
} | [
"com.liferay.micro.maintainance.candidate.model.CandidateEntry"
] | import com.liferay.micro.maintainance.candidate.model.CandidateEntry; | import com.liferay.micro.maintainance.candidate.model.*; | [
"com.liferay.micro"
] | com.liferay.micro; | 1,684,294 |
void setDbcellPosition( int DbcellNumber, int DbOffset )
{
if( offsetStart == 0 )
{
offsetStart = getSheet().getMyBof().getOffset();
}
int insertOffset = DbOffset - offsetStart;
log.trace( "Setting DBBiffRec Position, offsetStart:" + offsetStart + " & InsertOffset = " + insertOffset );
offsetStart += insertOffset;
int insertloc = 16 + (DbcellNumber * 4);
byte[] off = ByteTools.cLongToLEBytes( insertOffset );
System.arraycopy( off, 0, data, insertloc, 4 );
}
class dbCellPointer implements Serializable
{
int cellloc = 0;
int datasiz = 0;
short s2;
short s3;
byte[] cdb = new byte[4];
private static final long serialVersionUID = -5132922970171084839L;
dbCellPointer( byte[] b )
{
cdb = b;
cellloc = ByteTools.readShort( b[0], b[1] );
datasiz = ByteTools.readShort( b[2], b[3] );
} | void setDbcellPosition( int DbcellNumber, int DbOffset ) { if( offsetStart == 0 ) { offsetStart = getSheet().getMyBof().getOffset(); } int insertOffset = DbOffset - offsetStart; log.trace( STR + offsetStart + STR + insertOffset ); offsetStart += insertOffset; int insertloc = 16 + (DbcellNumber * 4); byte[] off = ByteTools.cLongToLEBytes( insertOffset ); System.arraycopy( off, 0, data, insertloc, 4 ); } class dbCellPointer implements Serializable { int cellloc = 0; int datasiz = 0; short s2; short s3; byte[] cdb = new byte[4]; private static final long serialVersionUID = -5132922970171084839L; dbCellPointer( byte[] b ) { cdb = b; cellloc = ByteTools.readShort( b[0], b[1] ); datasiz = ByteTools.readShort( b[2], b[3] ); } | /**
* Called from streamer, this updates individual dbcell offset values.
* <p/>
* Will only run correctly if called sequentially, ie dboffset [0], [1], [2]
*
* @param DbcellNumber - which dbcell to update
* @param DbOffset - the pure offset from beginning of file
*/ | Called from streamer, this updates individual dbcell offset values. Will only run correctly if called sequentially, ie dboffset [0], [1], [2] | setDbcellPosition | {
"repo_name": "Maxels88/openxls",
"path": "src/main/java/org/openxls/formats/XLS/Index.java",
"license": "gpl-3.0",
"size": 10304
} | [
"java.io.Serializable",
"org.openxls.toolkit.ByteTools"
] | import java.io.Serializable; import org.openxls.toolkit.ByteTools; | import java.io.*; import org.openxls.toolkit.*; | [
"java.io",
"org.openxls.toolkit"
] | java.io; org.openxls.toolkit; | 230,155 |
public Map<TopicPartition, List<ConsumerRecord<K, V>>> fetchedRecords() {
if (this.subscriptions.partitionAssignmentNeeded()) {
return Collections.emptyMap();
} else {
Map<TopicPartition, List<ConsumerRecord<K, V>>> drained = new HashMap<TopicPartition, List<ConsumerRecord<K, V>>>();
for (PartitionRecords<K, V> part : this.records) {
Long consumed = subscriptions.consumed(part.partition);
if (this.subscriptions.assignedPartitions().contains(part.partition)
&& consumed != null && part.fetchOffset == consumed) {
List<ConsumerRecord<K, V>> records = drained.get(part.partition);
if (records == null) {
records = part.records;
drained.put(part.partition, records);
} else {
records.addAll(part.records);
}
subscriptions.consumed(part.partition, part.records.get(part.records.size() - 1).offset() + 1);
} else {
// these records aren't next in line based on the last consumed position, ignore them
// they must be from an obsolete request
log.debug("Ignoring fetched records for {} at offset {}", part.partition, part.fetchOffset);
}
}
this.records.clear();
return drained;
}
} | Map<TopicPartition, List<ConsumerRecord<K, V>>> function() { if (this.subscriptions.partitionAssignmentNeeded()) { return Collections.emptyMap(); } else { Map<TopicPartition, List<ConsumerRecord<K, V>>> drained = new HashMap<TopicPartition, List<ConsumerRecord<K, V>>>(); for (PartitionRecords<K, V> part : this.records) { Long consumed = subscriptions.consumed(part.partition); if (this.subscriptions.assignedPartitions().contains(part.partition) && consumed != null && part.fetchOffset == consumed) { List<ConsumerRecord<K, V>> records = drained.get(part.partition); if (records == null) { records = part.records; drained.put(part.partition, records); } else { records.addAll(part.records); } subscriptions.consumed(part.partition, part.records.get(part.records.size() - 1).offset() + 1); } else { log.debug(STR, part.partition, part.fetchOffset); } } this.records.clear(); return drained; } } | /**
* Return the fetched records, empty the record buffer and update the consumed position.
*
* @return The fetched records per partition
*/ | Return the fetched records, empty the record buffer and update the consumed position | fetchedRecords | {
"repo_name": "seancaffery/kafka",
"path": "clients/src/main/java/org/apache/kafka/clients/consumer/internals/Fetcher.java",
"license": "apache-2.0",
"size": 24234
} | [
"java.util.Collections",
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"org.apache.kafka.clients.consumer.ConsumerRecord",
"org.apache.kafka.common.TopicPartition"
] | import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.TopicPartition; | import java.util.*; import org.apache.kafka.clients.consumer.*; import org.apache.kafka.common.*; | [
"java.util",
"org.apache.kafka"
] | java.util; org.apache.kafka; | 1,534,643 |
private CharacterDefinition parseCharacter(String line) {
CharacterDefinition characterDefinition= new CharacterDefinition();
StringTokenizer tokens = new StringTokenizer(line," =");
tokens.nextToken(); // char
tokens.nextToken(); // id
characterDefinition.id = Integer.parseInt(tokens.nextToken()); // id value
tokens.nextToken(); // x
characterDefinition.x = Integer.parseInt(tokens.nextToken()); // x value
tokens.nextToken(); // y
characterDefinition.y = Integer.parseInt(tokens.nextToken()); // y value
tokens.nextToken(); // width
characterDefinition.width = Integer.parseInt(tokens.nextToken()); // width value
tokens.nextToken(); // height
characterDefinition.height = Integer.parseInt(tokens.nextToken()); // height value
tokens.nextToken(); // x offset
characterDefinition.xOffset = Integer.parseInt(tokens.nextToken()); // xoffset value
tokens.nextToken(); // y offset
characterDefinition.yOffset = Integer.parseInt(tokens.nextToken()); // yoffset value
tokens.nextToken(); // xadvance
characterDefinition.xAdvance = Integer.parseInt(tokens.nextToken()); // xadvance
// line height
if (characterDefinition.id != ' ') {
lineHeight = Math.max(characterDefinition.height + characterDefinition.yOffset, lineHeight);
}
return characterDefinition;
} | CharacterDefinition function(String line) { CharacterDefinition characterDefinition= new CharacterDefinition(); StringTokenizer tokens = new StringTokenizer(line,STR); tokens.nextToken(); tokens.nextToken(); characterDefinition.id = Integer.parseInt(tokens.nextToken()); tokens.nextToken(); characterDefinition.x = Integer.parseInt(tokens.nextToken()); tokens.nextToken(); characterDefinition.y = Integer.parseInt(tokens.nextToken()); tokens.nextToken(); characterDefinition.width = Integer.parseInt(tokens.nextToken()); tokens.nextToken(); characterDefinition.height = Integer.parseInt(tokens.nextToken()); tokens.nextToken(); characterDefinition.xOffset = Integer.parseInt(tokens.nextToken()); tokens.nextToken(); characterDefinition.yOffset = Integer.parseInt(tokens.nextToken()); tokens.nextToken(); characterDefinition.xAdvance = Integer.parseInt(tokens.nextToken()); if (characterDefinition.id != ' ') { lineHeight = Math.max(characterDefinition.height + characterDefinition.yOffset, lineHeight); } return characterDefinition; } | /**
* Parse a single character line from the definition
*
* @param line The line to be parsed
* @return The character definition from the line
*/ | Parse a single character line from the definition | parseCharacter | {
"repo_name": "andreasdr/tdme",
"path": "src/net/drewke/tdme/gui/renderer/GUIFont.java",
"license": "mit",
"size": 13608
} | [
"java.util.StringTokenizer"
] | import java.util.StringTokenizer; | import java.util.*; | [
"java.util"
] | java.util; | 2,103,042 |
public void plot() throws Exception
{
boolean firstOneInLayer = true;
for(int i = 0; i < allPolygons.length; i++)
{
firstOneInLayer = true;
PolygonList pl = allPolygons[i];
for(int j = 0; j < pl.size(); j++)
{
plot(pl.polygon(j), firstOneInLayer);
firstOneInLayer = false;
}
}
}
| void function() throws Exception { boolean firstOneInLayer = true; for(int i = 0; i < allPolygons.length; i++) { firstOneInLayer = true; PolygonList pl = allPolygons[i]; for(int j = 0; j < pl.size(); j++) { plot(pl.polygon(j), firstOneInLayer); firstOneInLayer = false; } } } | /**
* Master plot function - draw everything. Supress border and/or hatch by
* setting borderPolygons and/or hatchedPolygons null
* @throws Exception
*/ | Master plot function - draw everything. Supress border and/or hatch by setting borderPolygons and/or hatchedPolygons null | plot | {
"repo_name": "reprap/host",
"path": "src/org/reprap/geometry/LayerProducer.java",
"license": "lgpl-2.1",
"size": 13500
} | [
"org.reprap.geometry.polygons.PolygonList"
] | import org.reprap.geometry.polygons.PolygonList; | import org.reprap.geometry.polygons.*; | [
"org.reprap.geometry"
] | org.reprap.geometry; | 830,007 |
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String login = req.getParameter("login");
String password = req.getParameter("password");
resp.setContentType("text/plain");
resp.setCharacterEncoding("UTF-8");
UserDaoImpl users = null;
try (Connection conn = daoFactory.getConnection()) {
users = new UserDaoImpl(conn);
if (users.isCredentional(login, password)) {
HttpSession session = req.getSession();
User user = users.getByLogin(login);
session.setAttribute("login", login);
if ("ADMIN".equals(user.getRole().getName())) {
resp.getWriter().write("./adminView.html");
} else if ("USER".equals(user.getRole().getName())) {
resp.getWriter().write("./userView.html");
} else if ("MANDATOR".equals(user.getRole().getName())) {
resp.getWriter().write("./mandatorView.html");
}
} else {
resp.getWriter().write("false");
}
} catch (SQLException e) {
e.printStackTrace();
}
} | void function(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String login = req.getParameter("login"); String password = req.getParameter(STR); resp.setContentType(STR); resp.setCharacterEncoding("UTF-8"); UserDaoImpl users = null; try (Connection conn = daoFactory.getConnection()) { users = new UserDaoImpl(conn); if (users.isCredentional(login, password)) { HttpSession session = req.getSession(); User user = users.getByLogin(login); session.setAttribute("login", login); if ("ADMIN".equals(user.getRole().getName())) { resp.getWriter().write(STR); } else if ("USER".equals(user.getRole().getName())) { resp.getWriter().write(STR); } else if (STR.equals(user.getRole().getName())) { resp.getWriter().write(STR); } } else { resp.getWriter().write("false"); } } catch (SQLException e) { e.printStackTrace(); } } | /**
* Overrides method doPost.
*
* @param req request.
* @param resp response.
* @throws ServletException ServletException.
* @throws IOException IOException.
*/ | Overrides method doPost | doPost | {
"repo_name": "amezgin/amezgin",
"path": "chapter_009/testtask/src/main/java/ru/job4j/web/SignInController.java",
"license": "apache-2.0",
"size": 2753
} | [
"java.io.IOException",
"java.sql.Connection",
"java.sql.SQLException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"javax.servlet.http.HttpSession",
"ru.job4j.dao.UserDaoImpl",
"ru.job4j.model.User"
] | import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import ru.job4j.dao.UserDaoImpl; import ru.job4j.model.User; | import java.io.*; import java.sql.*; import javax.servlet.*; import javax.servlet.http.*; import ru.job4j.dao.*; import ru.job4j.model.*; | [
"java.io",
"java.sql",
"javax.servlet",
"ru.job4j.dao",
"ru.job4j.model"
] | java.io; java.sql; javax.servlet; ru.job4j.dao; ru.job4j.model; | 131,177 |
EReference getProperties_Property(); | EReference getProperties_Property(); | /**
* Returns the meta object for the containment reference list '{@link acmm.Properties#getProperty <em>Property</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference list '<em>Property</em>'.
* @see acmm.Properties#getProperty()
* @see #getProperties()
* @generated
*/ | Returns the meta object for the containment reference list '<code>acmm.Properties#getProperty Property</code>'. | getProperties_Property | {
"repo_name": "acgtic211/COScore-Community",
"path": "cos/src/main/java/acmm/AcmmPackage.java",
"license": "gpl-3.0",
"size": 90938
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 418,856 |
public boolean mayBuyFromBM() {
StringTokenizer blockedFactions = new StringTokenizer(this.getConfig("BMNoBuy"), "$");
while (blockedFactions.hasMoreTokens()) {
if (getName().equals(blockedFactions.nextToken())) {
return false;
}
}
return true;
}
| boolean function() { StringTokenizer blockedFactions = new StringTokenizer(this.getConfig(STR), "$"); while (blockedFactions.hasMoreTokens()) { if (getName().equals(blockedFactions.nextToken())) { return false; } } return true; } | /**
* Simple method which determines whether a given SHouse (and its players)
* may access the market to BUY units. We check this loop continuously
* instead of saving a value in the SHouse (inefficient) b/c the config may
* change between checks.
*/ | Simple method which determines whether a given SHouse (and its players) may access the market to BUY units. We check this loop continuously instead of saving a value in the SHouse (inefficient) b/c the config may change between checks | mayBuyFromBM | {
"repo_name": "mekwars-legends/mekwars-upstream",
"path": "src/server/campaign/SHouse.java",
"license": "gpl-2.0",
"size": 146456
} | [
"java.util.StringTokenizer"
] | import java.util.StringTokenizer; | import java.util.*; | [
"java.util"
] | java.util; | 1,840 |
private void printHeader(Dataset data, String fileName) {
String cadena = "";
int i, j;
cadena += "@relation " + data.getRelacion() + "\n";
for (i = 0; i < data.getNVariables(); i++) {
cadena += "@attribute " + data.getAttributeIndex(i) + " ";
if (data.getAttributeTypeIndex(i).equalsIgnoreCase("nominal")) { //list
cadena += "{";
for (j = 0; j < data.getRange(i).size(); j++) {
cadena += (String) data.getRange(i).elementAt(j);
if (j < data.getRange(i).size() - 1) {
cadena += ", ";
}
}
cadena += "}\n";
} else if (data.getAttributeTypeIndex(i).equalsIgnoreCase("integer")) { //int
cadena += "integer" + " [" + data.getRangesInt(i, 0) + ", " +
data.getRangesInt(i, 1) + "]\n";
} else { //real
cadena += "real" + " [" + data.getRangesReal(i, 0) + ", " +
data.getRangesReal(i, 1) + "]\n";
}
}
cadena += "@inputs ";
boolean poner = false;
for (j = 0; j < data.getNInputs(); j++) {
if (!poner) {
cadena += (String) (data.getInputs().elementAt(j));
poner = true;
} else {
cadena += ", " + (String) (data.getInputs().elementAt(j));
}
}
cadena += "\n";
cadena += "@outputs ";
poner = false;
for (j = 0; j < data.getNOutputs(); j++) {
if (!poner) {
cadena += (String) (data.getOutputs().elementAt(j));
poner = true;
} else {
cadena += ", " + (String) (data.getOutputs().elementAt(j));
}
}
cadena += "\n";
cadena += "@data\n";
Files.writeFile(fileName, cadena);
}
| void function(Dataset data, String fileName) { String cadena = STR@relation STR\nSTR@attribute STR STRnominalSTR{STR, STR}\nSTRintegerSTRintegerSTR [STR, STR]\nSTRrealSTR [STR, STR]\nSTR@inputs STR, STR\nSTR@outputs STR, STR\nSTR@data\n"; Files.writeFile(fileName, cadena); } | /**
* <p>
* Writes the header of a dataset in given file
* </p>
* @param data Dataset to extract the header
* @param fileName Name of the output file
*/ | Writes the header of a dataset in given file | printHeader | {
"repo_name": "triguero/Keel3.0",
"path": "src/keel/GraphInterKeel/datacf/partitionData/PartitionGenerator.java",
"license": "gpl-3.0",
"size": 44607
} | [
"org.core.Files"
] | import org.core.Files; | import org.core.*; | [
"org.core"
] | org.core; | 259,700 |
public void xMinYMin() throws ParseException {
align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMINYMIN;
} | void function() throws ParseException { align = SVGPreserveAspectRatio.SVG_PRESERVEASPECTRATIO_XMINYMIN; } | /**
* Implements {@link PreserveAspectRatioHandler#xMinYMin()}.
*/ | Implements <code>PreserveAspectRatioHandler#xMinYMin()</code> | xMinYMin | {
"repo_name": "Uni-Sol/batik",
"path": "sources/org/apache/batik/bridge/SVGAnimationEngine.java",
"license": "apache-2.0",
"size": 54992
} | [
"org.apache.batik.parser.ParseException",
"org.w3c.dom.svg.SVGPreserveAspectRatio"
] | import org.apache.batik.parser.ParseException; import org.w3c.dom.svg.SVGPreserveAspectRatio; | import org.apache.batik.parser.*; import org.w3c.dom.svg.*; | [
"org.apache.batik",
"org.w3c.dom"
] | org.apache.batik; org.w3c.dom; | 1,402,986 |
public DetectorResponseInner getHostingEnvironmentDetectorResponse(String resourceGroupName, String name, String detectorName, DateTime startTime, DateTime endTime, String timeGrain) {
return getHostingEnvironmentDetectorResponseWithServiceResponseAsync(resourceGroupName, name, detectorName, startTime, endTime, timeGrain).toBlocking().single().body();
} | DetectorResponseInner function(String resourceGroupName, String name, String detectorName, DateTime startTime, DateTime endTime, String timeGrain) { return getHostingEnvironmentDetectorResponseWithServiceResponseAsync(resourceGroupName, name, detectorName, startTime, endTime, timeGrain).toBlocking().single().body(); } | /**
* Get Hosting Environment Detector Response.
* Get Hosting Environment Detector Response.
*
* @param resourceGroupName Name of the resource group to which the resource belongs.
* @param name App Service Environment Name
* @param detectorName Detector Resource Name
* @param startTime Start Time
* @param endTime End Time
* @param timeGrain Time Grain
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws DefaultErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the DetectorResponseInner object if successful.
*/ | Get Hosting Environment Detector Response. Get Hosting Environment Detector Response | getHostingEnvironmentDetectorResponse | {
"repo_name": "navalev/azure-sdk-for-java",
"path": "sdk/appservice/mgmt-v2018_02_01/src/main/java/com/microsoft/azure/management/appservice/v2018_02_01/implementation/DiagnosticsInner.java",
"license": "mit",
"size": 295384
} | [
"org.joda.time.DateTime"
] | import org.joda.time.DateTime; | import org.joda.time.*; | [
"org.joda.time"
] | org.joda.time; | 1,904,746 |
ExecutionContext context = createMock(ExecutionContext.class);
replay(context);
File testIn = new File(tmpDir.getRoot(), "testIn");
ZipOutputStream zipOut = new ZipOutputStream(
new BufferedOutputStream(new FileOutputStream(testIn)));
try {
zipOut.putNextEntry(new ZipEntry("foobar"));
zipOut.write(new byte[] { 0 });
} finally {
zipOut.close();
}
File outputFile = tmpDir.newFile("out.dex");
Path outputHashFile = new File(tmpDir.getRoot(), "out.dex.hash").toPath();
Files.write("dummy", outputHashFile.toFile(), Charsets.UTF_8);
ProjectFilesystem filesystem = new ProjectFilesystem(tmpDir.getRoot().toPath());
Sha1HashCode actualHashCode = Sha1HashCode.of(Strings.repeat("a", 40));
DxPseudoRule rule = new DxPseudoRule(
filesystem,
ImmutableMap.of(testIn.toPath(), actualHashCode),
ImmutableSet.of(testIn.toPath()),
outputFile.toPath(),
outputHashFile,
EnumSet.of(DxStep.Option.NO_OPTIMIZE));
assertFalse("'dummy' is not a matching input hash", rule.checkIsCached());
// Write the real hash into the output hash file and ensure that checkIsCached now
// yields true.
String actualHash = rule.hashInputs();
assertFalse(actualHash.isEmpty());
Files.write(actualHash, outputHashFile.toFile(), Charsets.UTF_8);
assertTrue("Matching input hash should be considered cached", rule.checkIsCached());
} | ExecutionContext context = createMock(ExecutionContext.class); replay(context); File testIn = new File(tmpDir.getRoot(), STR); ZipOutputStream zipOut = new ZipOutputStream( new BufferedOutputStream(new FileOutputStream(testIn))); try { zipOut.putNextEntry(new ZipEntry(STR)); zipOut.write(new byte[] { 0 }); } finally { zipOut.close(); } File outputFile = tmpDir.newFile(STR); Path outputHashFile = new File(tmpDir.getRoot(), STR).toPath(); Files.write("dummy", outputHashFile.toFile(), Charsets.UTF_8); ProjectFilesystem filesystem = new ProjectFilesystem(tmpDir.getRoot().toPath()); Sha1HashCode actualHashCode = Sha1HashCode.of(Strings.repeat("a", 40)); DxPseudoRule rule = new DxPseudoRule( filesystem, ImmutableMap.of(testIn.toPath(), actualHashCode), ImmutableSet.of(testIn.toPath()), outputFile.toPath(), outputHashFile, EnumSet.of(DxStep.Option.NO_OPTIMIZE)); assertFalse(STR, rule.checkIsCached()); String actualHash = rule.hashInputs(); assertFalse(actualHash.isEmpty()); Files.write(actualHash, outputHashFile.toFile(), Charsets.UTF_8); assertTrue(STR, rule.checkIsCached()); } | /**
* Tests whether pseudo rule cache detection is working properly.
*/ | Tests whether pseudo rule cache detection is working properly | testDxPseudoRuleCaching | {
"repo_name": "MarkRunWu/buck",
"path": "test/com/facebook/buck/android/SmartDexingStepTest.java",
"license": "apache-2.0",
"size": 7964
} | [
"com.facebook.buck.android.SmartDexingStep",
"com.facebook.buck.io.ProjectFilesystem",
"com.facebook.buck.rules.Sha1HashCode",
"com.facebook.buck.step.ExecutionContext",
"com.google.common.base.Charsets",
"com.google.common.base.Strings",
"com.google.common.collect.ImmutableMap",
"com.google.common.co... | import com.facebook.buck.android.SmartDexingStep; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.rules.Sha1HashCode; import com.facebook.buck.step.ExecutionContext; import com.google.common.base.Charsets; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.io.Files; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.nio.file.Path; import java.util.EnumSet; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import org.easymock.EasyMock; import org.junit.Assert; | import com.facebook.buck.android.*; import com.facebook.buck.io.*; import com.facebook.buck.rules.*; import com.facebook.buck.step.*; import com.google.common.base.*; import com.google.common.collect.*; import com.google.common.io.*; import java.io.*; import java.nio.file.*; import java.util.*; import java.util.zip.*; import org.easymock.*; import org.junit.*; | [
"com.facebook.buck",
"com.google.common",
"java.io",
"java.nio",
"java.util",
"org.easymock",
"org.junit"
] | com.facebook.buck; com.google.common; java.io; java.nio; java.util; org.easymock; org.junit; | 1,075,838 |
@Nullable
public T3 get() throws ClientException {
return buildFromResponse(send());
} | T3 function() throws ClientException { return buildFromResponse(send()); } | /**
* Gets the collection of items
*
* @return the collection page
*/ | Gets the collection of items | get | {
"repo_name": "microsoftgraph/msgraph-sdk-java-core",
"path": "src/main/java/com/microsoft/graph/http/BaseEntityCollectionRequest.java",
"license": "mit",
"size": 4324
} | [
"com.microsoft.graph.core.ClientException"
] | import com.microsoft.graph.core.ClientException; | import com.microsoft.graph.core.*; | [
"com.microsoft.graph"
] | com.microsoft.graph; | 2,209,846 |
public String registerTenant(TenantInfoBean tenantInfoBean, CaptchaInfoBean captchaInfoBean)
throws Exception {
// validate the email
try {
CommonUtil.validateEmail(tenantInfoBean.getEmail());
} catch (Exception e) {
String msg = "Invalid email is provided.";
log.error(msg, e);
throw new AxisFault(msg);
}
// validate the domain
String domainName = tenantInfoBean.getTenantDomain();
try {
TenantMgtUtil.validateDomain(domainName);
} catch (Exception e) {
String msg = "Domain Validation Failed.";
log.error(msg, e);
throw new AxisFault(msg);
}
// validate the first/last names
String firstname = tenantInfoBean.getFirstname();
String lastname = tenantInfoBean.getLastname();
try {
CommonUtil.validateName(firstname, "First Name");
CommonUtil.validateName(lastname, "Last Name");
} catch (Exception e) {
String msg = "First/Last Name Validation Failed.";
log.error(msg, e);
throw new AxisFault(msg);
} // now validate the captcha
try {
CaptchaUtil.validateCaptcha(captchaInfoBean);
if (log.isDebugEnabled()) {
log.debug("Captcha Successfully Validated.");
}
} catch (Exception e) {
String msg = CaptchaMgtConstants.CAPTCHA_ERROR_MSG;
log.error(msg, e);
throw new AxisFault(msg);
} finally {
try {
CaptchaUtil.cleanCaptcha(captchaInfoBean.getSecretKey());
} catch (Exception e) {
String msg = "Error in cleaning captcha. ";
log.error(msg, e);
// not throwing the exception in finally more up.
}
}
// persists the tenant.
Tenant tenant = TenantMgtUtil.initializeTenant(tenantInfoBean);
TenantPersistor persistor = TenantMgtServiceComponent.getTenantPersistor();
int tenantId = persistor.persistTenant(tenant, true, tenantInfoBean.getSuccessKey(),
tenantInfoBean.getOriginatedService());
tenantInfoBean.setTenantId(tenantId);
TenantMgtUtil.addClaimsToUserStoreManager(tenant);
//Notify tenant addition
try {
TenantMgtUtil.triggerAddTenant(tenantInfoBean);
} catch (StratosException e) {
String msg = "Error in notifying tenant addition.";
log.error(msg, e);
throw new Exception(msg, e);
}
//adding the subscription entry
try {
if (TenantMgtServiceComponent.getBillingService() != null) {
TenantMgtServiceComponent.getBillingService().addUsagePlan(tenant,
tenantInfoBean.getUsagePlan());
if (log.isDebugEnabled()) {
log.debug("Subscription added successfully for the tenant: " +
tenantInfoBean.getTenantDomain());
}
}
} catch (Exception e) {
String msg = "Error occurred while adding the subscription for tenant: " + domainName;
log.error(msg, e);
}
// If Email Validation is made optional, tenant will be activated now.
if (CommonUtil.isTenantManagementEmailsDisabled() ||
!CommonUtil.isEmailValidationMandatory()) {
TenantMgtUtil.activateTenantInitially(tenantInfoBean, tenantId);
}
return TenantMgtUtil.prepareStringToShowThemeMgtPage(tenant.getId());
} | String function(TenantInfoBean tenantInfoBean, CaptchaInfoBean captchaInfoBean) throws Exception { try { CommonUtil.validateEmail(tenantInfoBean.getEmail()); } catch (Exception e) { String msg = STR; log.error(msg, e); throw new AxisFault(msg); } String domainName = tenantInfoBean.getTenantDomain(); try { TenantMgtUtil.validateDomain(domainName); } catch (Exception e) { String msg = STR; log.error(msg, e); throw new AxisFault(msg); } String firstname = tenantInfoBean.getFirstname(); String lastname = tenantInfoBean.getLastname(); try { CommonUtil.validateName(firstname, STR); CommonUtil.validateName(lastname, STR); } catch (Exception e) { String msg = STR; log.error(msg, e); throw new AxisFault(msg); } try { CaptchaUtil.validateCaptcha(captchaInfoBean); if (log.isDebugEnabled()) { log.debug(STR); } } catch (Exception e) { String msg = CaptchaMgtConstants.CAPTCHA_ERROR_MSG; log.error(msg, e); throw new AxisFault(msg); } finally { try { CaptchaUtil.cleanCaptcha(captchaInfoBean.getSecretKey()); } catch (Exception e) { String msg = STR; log.error(msg, e); } } Tenant tenant = TenantMgtUtil.initializeTenant(tenantInfoBean); TenantPersistor persistor = TenantMgtServiceComponent.getTenantPersistor(); int tenantId = persistor.persistTenant(tenant, true, tenantInfoBean.getSuccessKey(), tenantInfoBean.getOriginatedService()); tenantInfoBean.setTenantId(tenantId); TenantMgtUtil.addClaimsToUserStoreManager(tenant); try { TenantMgtUtil.triggerAddTenant(tenantInfoBean); } catch (StratosException e) { String msg = STR; log.error(msg, e); throw new Exception(msg, e); } try { if (TenantMgtServiceComponent.getBillingService() != null) { TenantMgtServiceComponent.getBillingService().addUsagePlan(tenant, tenantInfoBean.getUsagePlan()); if (log.isDebugEnabled()) { log.debug(STR + tenantInfoBean.getTenantDomain()); } } } catch (Exception e) { String msg = STR + domainName; log.error(msg, e); } if (CommonUtil.isTenantManagementEmailsDisabled() !CommonUtil.isEmailValidationMandatory()) { TenantMgtUtil.activateTenantInitially(tenantInfoBean, tenantId); } return TenantMgtUtil.prepareStringToShowThemeMgtPage(tenant.getId()); } | /**
* Registers a tenant - Tenant Self Registration
*
* @param tenantInfoBean - tenantInformation
* @param captchaInfoBean - captchaInformation
* @return String UUID
* @throws Exception if the tenant registration fails.
*/ | Registers a tenant - Tenant Self Registration | registerTenant | {
"repo_name": "panelion/incubator-stratos",
"path": "components/org.apache.stratos.tenant.mgt/src/main/java/org/apache/stratos/tenant/mgt/services/TenantSelfRegistrationService.java",
"license": "apache-2.0",
"size": 7859
} | [
"org.apache.axis2.AxisFault",
"org.apache.stratos.common.beans.TenantInfoBean",
"org.apache.stratos.common.exception.StratosException",
"org.apache.stratos.common.util.CommonUtil",
"org.apache.stratos.tenant.mgt.internal.TenantMgtServiceComponent",
"org.apache.stratos.tenant.mgt.util.TenantMgtUtil",
"or... | import org.apache.axis2.AxisFault; import org.apache.stratos.common.beans.TenantInfoBean; import org.apache.stratos.common.exception.StratosException; import org.apache.stratos.common.util.CommonUtil; import org.apache.stratos.tenant.mgt.internal.TenantMgtServiceComponent; import org.apache.stratos.tenant.mgt.util.TenantMgtUtil; import org.wso2.carbon.captcha.mgt.beans.CaptchaInfoBean; import org.wso2.carbon.captcha.mgt.constants.CaptchaMgtConstants; import org.wso2.carbon.captcha.mgt.util.CaptchaUtil; import org.wso2.carbon.core.multitenancy.persistence.TenantPersistor; import org.wso2.carbon.user.core.tenant.Tenant; | import org.apache.axis2.*; import org.apache.stratos.common.beans.*; import org.apache.stratos.common.exception.*; import org.apache.stratos.common.util.*; import org.apache.stratos.tenant.mgt.internal.*; import org.apache.stratos.tenant.mgt.util.*; import org.wso2.carbon.captcha.mgt.beans.*; import org.wso2.carbon.captcha.mgt.constants.*; import org.wso2.carbon.captcha.mgt.util.*; import org.wso2.carbon.core.multitenancy.persistence.*; import org.wso2.carbon.user.core.tenant.*; | [
"org.apache.axis2",
"org.apache.stratos",
"org.wso2.carbon"
] | org.apache.axis2; org.apache.stratos; org.wso2.carbon; | 2,561,757 |
public boolean collectCycle(List<JavaPackage> list)
{
if (list.contains(this))
{
list.add(this);
return true;
}
list.add(this);
for (JavaPackage efferent : getDependsUpon())
{
if (efferent.collectCycle(list))
{
return true;
}
}
list.remove(this);
return false;
} | boolean function(List<JavaPackage> list) { if (list.contains(this)) { list.add(this); return true; } list.add(this); for (JavaPackage efferent : getDependsUpon()) { if (efferent.collectCycle(list)) { return true; } } list.remove(this); return false; } | /**
* Collects the packages participating in the first package dependency cycle
* detected which originates from this package.
*
* @param list Collecting object to be populated with the list of
* JavaPackage instances in a cycle.
* @return <code>true</code> if a cycle exist; <code>false</code>
* otherwise.
*/ | Collects the packages participating in the first package dependency cycle detected which originates from this package | collectCycle | {
"repo_name": "timtiemens/secretshare",
"path": "src/test/java/com/tiemens/secretshare/jdeps/model/JavaPackage.java",
"license": "lgpl-2.1",
"size": 7789
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,613,822 |
@Override
public Response subscriptionsSubscriptionIdDelete(String subscriptionId, String ifMatch,
String ifUnmodifiedSince, Request request) throws
NotFoundException {
String username = RestApiUtil.getLoggedInUsername(request);
try {
APIStore apiStore = RestApiUtil.getConsumer(username);
String existingFingerprint = subscriptionsSubscriptionIdGetFingerprint(subscriptionId, null, null,
request);
if (!StringUtils.isEmpty(ifMatch) && !StringUtils.isEmpty(existingFingerprint) && !ifMatch
.contains(existingFingerprint)) {
return Response.status(Response.Status.PRECONDITION_FAILED).build();
}
apiStore.deleteAPISubscription(subscriptionId);
} catch (GatewayException e) {
String errorMessage = "Failed to remove subscription :" + subscriptionId + " from gateway";
log.error(errorMessage, e);
return Response.status(Response.Status.ACCEPTED).build();
} catch (APIManagementException e) {
String errorMessage = "Error while deleting subscription";
HashMap<String, String> paramList = new HashMap<String, String>();
paramList.put(APIMgtConstants.ExceptionsConstants.SUBSCRIPTION_ID, subscriptionId);
ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList);
log.error(errorMessage, e);
return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build();
}
return Response.ok().build();
} | Response function(String subscriptionId, String ifMatch, String ifUnmodifiedSince, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(request); try { APIStore apiStore = RestApiUtil.getConsumer(username); String existingFingerprint = subscriptionsSubscriptionIdGetFingerprint(subscriptionId, null, null, request); if (!StringUtils.isEmpty(ifMatch) && !StringUtils.isEmpty(existingFingerprint) && !ifMatch .contains(existingFingerprint)) { return Response.status(Response.Status.PRECONDITION_FAILED).build(); } apiStore.deleteAPISubscription(subscriptionId); } catch (GatewayException e) { String errorMessage = STR + subscriptionId + STR; log.error(errorMessage, e); return Response.status(Response.Status.ACCEPTED).build(); } catch (APIManagementException e) { String errorMessage = STR; HashMap<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.SUBSCRIPTION_ID, subscriptionId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } return Response.ok().build(); } | /**
* Delete a subscription
*
* @param subscriptionId Id of the subscription
* @param ifMatch If-Match header value
* @param ifUnmodifiedSince If-Unmodified-Since header value
* @param request msf4j request object
* @return 200 OK response if the deletion was successful
* @throws NotFoundException When the particular resource does not exist in the system
*/ | Delete a subscription | subscriptionsSubscriptionIdDelete | {
"repo_name": "lakmali/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.rest.api.store/src/main/java/org/wso2/carbon/apimgt/rest/api/store/impl/SubscriptionsApiServiceImpl.java",
"license": "apache-2.0",
"size": 17608
} | [
"java.util.HashMap",
"javax.ws.rs.core.Response",
"org.apache.commons.lang3.StringUtils",
"org.wso2.carbon.apimgt.core.api.APIStore",
"org.wso2.carbon.apimgt.core.exception.APIManagementException",
"org.wso2.carbon.apimgt.core.exception.GatewayException",
"org.wso2.carbon.apimgt.core.util.APIMgtConstant... | import java.util.HashMap; import javax.ws.rs.core.Response; import org.apache.commons.lang3.StringUtils; import org.wso2.carbon.apimgt.core.api.APIStore; import org.wso2.carbon.apimgt.core.exception.APIManagementException; import org.wso2.carbon.apimgt.core.exception.GatewayException; import org.wso2.carbon.apimgt.core.util.APIMgtConstants; import org.wso2.carbon.apimgt.rest.api.common.dto.ErrorDTO; import org.wso2.carbon.apimgt.rest.api.common.util.RestApiUtil; import org.wso2.carbon.apimgt.rest.api.store.NotFoundException; import org.wso2.msf4j.Request; | import java.util.*; import javax.ws.rs.core.*; import org.apache.commons.lang3.*; import org.wso2.carbon.apimgt.core.api.*; import org.wso2.carbon.apimgt.core.exception.*; import org.wso2.carbon.apimgt.core.util.*; import org.wso2.carbon.apimgt.rest.api.common.dto.*; import org.wso2.carbon.apimgt.rest.api.common.util.*; import org.wso2.carbon.apimgt.rest.api.store.*; import org.wso2.msf4j.*; | [
"java.util",
"javax.ws",
"org.apache.commons",
"org.wso2.carbon",
"org.wso2.msf4j"
] | java.util; javax.ws; org.apache.commons; org.wso2.carbon; org.wso2.msf4j; | 56,530 |
@Test
public void testMappingTreatmentDecode() throws IOException {
MappingTreatment treatment = getTreatment("MappingTreatment.json");
List<MappingInstruction> insts = treatment.instructions();
assertThat(insts.size(), is(2));
ImmutableSet<String> types = ImmutableSet.of("UNICAST", "MULTICAST");
assertThat(types.contains(insts.get(0).type().name()), is(true));
assertThat(types.contains(insts.get(1).type().name()), is(true));
}
public static final class MappingTreatmentJsonMatcher
extends TypeSafeDiagnosingMatcher<JsonNode> {
private final MappingTreatment mappingTreatment;
private MappingTreatmentJsonMatcher(MappingTreatment mappingTreatment) {
this.mappingTreatment = mappingTreatment;
} | void function() throws IOException { MappingTreatment treatment = getTreatment(STR); List<MappingInstruction> insts = treatment.instructions(); assertThat(insts.size(), is(2)); ImmutableSet<String> types = ImmutableSet.of(STR, STR); assertThat(types.contains(insts.get(0).type().name()), is(true)); assertThat(types.contains(insts.get(1).type().name()), is(true)); } public static final class MappingTreatmentJsonMatcher extends TypeSafeDiagnosingMatcher<JsonNode> { private final MappingTreatment mappingTreatment; private MappingTreatmentJsonMatcher(MappingTreatment mappingTreatment) { this.mappingTreatment = mappingTreatment; } | /**
* Tests decoding of a mapping treatment JSON object.
*/ | Tests decoding of a mapping treatment JSON object | testMappingTreatmentDecode | {
"repo_name": "LorenzReinhart/ONOSnew",
"path": "apps/mappingmanagement/api/src/test/java/org/onosproject/mapping/codec/MappingTreatmentCodecTest.java",
"license": "apache-2.0",
"size": 8234
} | [
"com.fasterxml.jackson.databind.JsonNode",
"com.google.common.collect.ImmutableSet",
"java.io.IOException",
"java.util.List",
"org.hamcrest.MatcherAssert",
"org.hamcrest.Matchers",
"org.hamcrest.TypeSafeDiagnosingMatcher",
"org.onosproject.mapping.MappingTreatment",
"org.onosproject.mapping.instruct... | import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableSet; import java.io.IOException; import java.util.List; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.hamcrest.TypeSafeDiagnosingMatcher; import org.onosproject.mapping.MappingTreatment; import org.onosproject.mapping.instructions.MappingInstruction; | import com.fasterxml.jackson.databind.*; import com.google.common.collect.*; import java.io.*; import java.util.*; import org.hamcrest.*; import org.onosproject.mapping.*; import org.onosproject.mapping.instructions.*; | [
"com.fasterxml.jackson",
"com.google.common",
"java.io",
"java.util",
"org.hamcrest",
"org.onosproject.mapping"
] | com.fasterxml.jackson; com.google.common; java.io; java.util; org.hamcrest; org.onosproject.mapping; | 1,981,627 |
public final Vector3 getNormal() {
return n;
}
| final Vector3 function() { return n; } | /**
* Get shading normal at the hit point. This may differ from the geometric
* normal
*
* @return shading normal
*/ | Get shading normal at the hit point. This may differ from the geometric normal | getNormal | {
"repo_name": "David-Carlson/BlackBody",
"path": "src/org/sunflow/core/ShadingState.java",
"license": "mit",
"size": 26052
} | [
"org.sunflow.math.Vector3"
] | import org.sunflow.math.Vector3; | import org.sunflow.math.*; | [
"org.sunflow.math"
] | org.sunflow.math; | 2,790,084 |
Collection<ImporterUIElement> getImportElements()
{
return uiElements.values();
} | Collection<ImporterUIElement> getImportElements() { return uiElements.values(); } | /**
* Returns the elements with data to import.
*
* @return See above.
*/ | Returns the elements with data to import | getImportElements | {
"repo_name": "dominikl/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/fsimporter/view/ImporterUI.java",
"license": "gpl-2.0",
"size": 21218
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 2,588,256 |
public DeltaSecret shareSecret(String identityId, String recipientId, String secretId)
throws DeltaClientException, DeltaServiceException {
checkId(identityId);
checkId(recipientId);
checkId(secretId);
DeltaIdentity recipient = getIdentity(identityId, recipientId);
DeltaSecret secret = getSecret(identityId, secretId);
SecretKey key = cryptoService.generateSecretKey();
byte[] iv = cryptoService.generateInitialisationVector();
String contentsBase64 = cryptoService.encrypt(secret.getContent(), key, iv);
String encryptedKey = cryptoService.encryptKeyWithPublicKey(key,
cryptoService.getPublicKey(recipient.getEncryptionPublicKeyBase64()));
ShareSecretResponse response =
apiClient.shareSecret(ShareSecretRequest.builder(identityId)
.withBaseSecret(secretId)
.withRsaKeyOwnerId(recipientId)
.withContent(contentsBase64)
.withEncryptionDetails(encryptedKey, iv)
.build());
return DeltaSecret.builder(this, cryptoService)
.withId(response.getSecretId())
.withRsaKeyOwnerId(recipientId)
.withCreatedBy(identityId)
.withSymmetricKey(encryptedKey)
.withInitialisationVector(BaseEncoding.base64().encode(iv))
.withBaseSecret(secretId)
.withDerived(true)
.build();
} | DeltaSecret function(String identityId, String recipientId, String secretId) throws DeltaClientException, DeltaServiceException { checkId(identityId); checkId(recipientId); checkId(secretId); DeltaIdentity recipient = getIdentity(identityId, recipientId); DeltaSecret secret = getSecret(identityId, secretId); SecretKey key = cryptoService.generateSecretKey(); byte[] iv = cryptoService.generateInitialisationVector(); String contentsBase64 = cryptoService.encrypt(secret.getContent(), key, iv); String encryptedKey = cryptoService.encryptKeyWithPublicKey(key, cryptoService.getPublicKey(recipient.getEncryptionPublicKeyBase64())); ShareSecretResponse response = apiClient.shareSecret(ShareSecretRequest.builder(identityId) .withBaseSecret(secretId) .withRsaKeyOwnerId(recipientId) .withContent(contentsBase64) .withEncryptionDetails(encryptedKey, iv) .build()); return DeltaSecret.builder(this, cryptoService) .withId(response.getSecretId()) .withRsaKeyOwnerId(recipientId) .withCreatedBy(identityId) .withSymmetricKey(encryptedKey) .withInitialisationVector(BaseEncoding.base64().encode(iv)) .withBaseSecret(secretId) .withDerived(true) .build(); } | /**
* Shares the base secret with the specified recipient. The
* contents will be encrypted with the public encryption key of the
* RSA key owner, and a new secret key and initialisation vector
* will be generated. This call will result in a new derived secret
* being created and returned.
*
* @param identityId the authenticating identity id
* @param recipientId the target identity id to share the base secret
* @param secretId the base secret id
* @return the derived secret
* @throws DeltaClientException upon client-side exception
* @throws DeltaServiceException upon service exception
*/ | Shares the base secret with the specified recipient. The contents will be encrypted with the public encryption key of the RSA key owner, and a new secret key and initialisation vector will be generated. This call will result in a new derived secret being created and returned | shareSecret | {
"repo_name": "Covata/delta-sdk-java",
"path": "sdk/src/main/java/com/covata/delta/sdk/DeltaClient.java",
"license": "apache-2.0",
"size": 36892
} | [
"com.covata.delta.sdk.api.request.ShareSecretRequest",
"com.covata.delta.sdk.api.response.ShareSecretResponse",
"com.covata.delta.sdk.exception.DeltaClientException",
"com.covata.delta.sdk.exception.DeltaServiceException",
"com.covata.delta.sdk.model.DeltaIdentity",
"com.covata.delta.sdk.model.DeltaSecret... | import com.covata.delta.sdk.api.request.ShareSecretRequest; import com.covata.delta.sdk.api.response.ShareSecretResponse; import com.covata.delta.sdk.exception.DeltaClientException; import com.covata.delta.sdk.exception.DeltaServiceException; import com.covata.delta.sdk.model.DeltaIdentity; import com.covata.delta.sdk.model.DeltaSecret; import com.google.common.io.BaseEncoding; import javax.crypto.SecretKey; | import com.covata.delta.sdk.api.request.*; import com.covata.delta.sdk.api.response.*; import com.covata.delta.sdk.exception.*; import com.covata.delta.sdk.model.*; import com.google.common.io.*; import javax.crypto.*; | [
"com.covata.delta",
"com.google.common",
"javax.crypto"
] | com.covata.delta; com.google.common; javax.crypto; | 996,754 |
private void applyToNodes(String[] nodes1, String[] nodes2, BiConsumer<MockTransportService, MockTransportService> consumer) {
for (String node1 : nodes1) {
if (disruptedLinks.nodes().contains(node1)) {
for (String node2 : nodes2) {
if (disruptedLinks.nodes().contains(node2)) {
if (node1.equals(node2) == false) {
if (disruptedLinks.disrupt(node1, node2)) {
consumer.accept(transport(node1), transport(node2));
}
}
}
}
}
}
} | void function(String[] nodes1, String[] nodes2, BiConsumer<MockTransportService, MockTransportService> consumer) { for (String node1 : nodes1) { if (disruptedLinks.nodes().contains(node1)) { for (String node2 : nodes2) { if (disruptedLinks.nodes().contains(node2)) { if (node1.equals(node2) == false) { if (disruptedLinks.disrupt(node1, node2)) { consumer.accept(transport(node1), transport(node2)); } } } } } } } | /**
* Applies action to all disrupted links between two sets of nodes.
*/ | Applies action to all disrupted links between two sets of nodes | applyToNodes | {
"repo_name": "jimczi/elasticsearch",
"path": "test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java",
"license": "apache-2.0",
"size": 18982
} | [
"java.util.function.BiConsumer",
"org.elasticsearch.test.transport.MockTransportService"
] | import java.util.function.BiConsumer; import org.elasticsearch.test.transport.MockTransportService; | import java.util.function.*; import org.elasticsearch.test.transport.*; | [
"java.util",
"org.elasticsearch.test"
] | java.util; org.elasticsearch.test; | 522,376 |
private void checkHashModeEfficiency() throws HiveException {
if (lastModeCheckRowCount > numRowsCompareHashAggr) {
lastModeCheckRowCount = 0;
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("checkHashModeEfficiency: HT:%d RC:%d MIN:%d",
numEntriesHashTable, sumBatchSize, (long)(sumBatchSize * minReductionHashAggr)));
}
if (numEntriesHashTable > sumBatchSize * minReductionHashAggr) {
flush(true);
changeToUnsortedStreamingMode();
}
}
}
}
private class ProcessingModeUnsortedStreaming extends ProcessingModeBase {
private VectorAggregationBufferRow currentStreamingAggregators;
private VectorHashKeyWrapper streamingKey;
private final VectorHashKeyWrapper[] keysToFlush =
new VectorHashKeyWrapper[VectorizedRowBatch.DEFAULT_SIZE];
private final VectorAggregationBufferRow[] rowsToFlush =
new VectorAggregationBufferRow[VectorizedRowBatch.DEFAULT_SIZE];
private VectorUtilBatchObjectPool<VectorAggregationBufferRow>
streamAggregationBufferRowPool; | void function() throws HiveException { if (lastModeCheckRowCount > numRowsCompareHashAggr) { lastModeCheckRowCount = 0; if (LOG.isDebugEnabled()) { LOG.debug(String.format(STR, numEntriesHashTable, sumBatchSize, (long)(sumBatchSize * minReductionHashAggr))); } if (numEntriesHashTable > sumBatchSize * minReductionHashAggr) { flush(true); changeToUnsortedStreamingMode(); } } } } private class ProcessingModeUnsortedStreaming extends ProcessingModeBase { private VectorAggregationBufferRow currentStreamingAggregators; private VectorHashKeyWrapper streamingKey; private final VectorHashKeyWrapper[] keysToFlush = new VectorHashKeyWrapper[VectorizedRowBatch.DEFAULT_SIZE]; private final VectorAggregationBufferRow[] rowsToFlush = new VectorAggregationBufferRow[VectorizedRowBatch.DEFAULT_SIZE]; private VectorUtilBatchObjectPool<VectorAggregationBufferRow> streamAggregationBufferRowPool; | /**
* Checks if the HT reduces the number of entries by at least minReductionHashAggr factor
* @throws HiveException
*/ | Checks if the HT reduces the number of entries by at least minReductionHashAggr factor | checkHashModeEfficiency | {
"repo_name": "WANdisco/hive",
"path": "ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java",
"license": "apache-2.0",
"size": 34920
} | [
"org.apache.hadoop.hive.ql.metadata.HiveException"
] | import org.apache.hadoop.hive.ql.metadata.HiveException; | import org.apache.hadoop.hive.ql.metadata.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,700,531 |
Conversation cloneConversation(ConversationReceiveListener receiveListener) // F173772
throws SIResourceException; | Conversation cloneConversation(ConversationReceiveListener receiveListener) throws SIResourceException; | /**
* "Clones" this conversation, creating a new conversation object which
* shares the same underlying physical link.
* @param receiveListener The receive listener to associated with the cloned
* conversation.
* @return Conversation The "cloned" conversation.
*
* @throws SIResourceException
*/ | "Clones" this conversation, creating a new conversation object which shares the same underlying physical link | cloneConversation | {
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/com.ibm.ws.messaging.comms.client/src/com/ibm/ws/sib/jfapchannel/Conversation.java",
"license": "epl-1.0",
"size": 22513
} | [
"com.ibm.websphere.sib.exception.SIResourceException"
] | import com.ibm.websphere.sib.exception.SIResourceException; | import com.ibm.websphere.sib.exception.*; | [
"com.ibm.websphere"
] | com.ibm.websphere; | 2,639,214 |
@Test
public void test232RunPropagationBeforeInterval() throws Exception {
final String TEST_NAME = "test235RunPropagationAfterInterval";
displayTestTitle(TEST_NAME);
// GIVEN
Task task = createTask(TEST_NAME);
OperationResult result = task.getResult();
// WHEN
displayWhen(TEST_NAME);
runPropagation();
// THEN
displayThen(TEST_NAME);
assertAccountWillAfterChangePasswordAndEnable(TEST_NAME);
} | void function() throws Exception { final String TEST_NAME = STR; displayTestTitle(TEST_NAME); Task task = createTask(TEST_NAME); OperationResult result = task.getResult(); displayWhen(TEST_NAME); runPropagation(); displayThen(TEST_NAME); assertAccountWillAfterChangePasswordAndEnable(TEST_NAME); } | /**
* Run propagation before the interval is over. Nothing should happen.
*/ | Run propagation before the interval is over. Nothing should happen | test232RunPropagationBeforeInterval | {
"repo_name": "bshp/midPoint",
"path": "model/model-intest/src/test/java/com/evolveum/midpoint/model/intest/manual/AbstractGroupingManualResourceTest.java",
"license": "apache-2.0",
"size": 52429
} | [
"com.evolveum.midpoint.schema.result.OperationResult",
"com.evolveum.midpoint.task.api.Task"
] | import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.task.api.Task; | import com.evolveum.midpoint.schema.result.*; import com.evolveum.midpoint.task.api.*; | [
"com.evolveum.midpoint"
] | com.evolveum.midpoint; | 33,879 |
@SuppressWarnings("unchecked")
private <Request extends TransportRequest,
Response extends TransportResponse> TransportAction<Request, Response> transportAction(ActionType<Response> action) {
if (actions == null) {
throw new IllegalStateException("NodeClient has not been initialized");
}
TransportAction<Request, Response> transportAction = actions.get(action);
if (transportAction == null) {
throw new IllegalStateException("failed to find action [" + action + "] to execute");
}
return transportAction;
} | @SuppressWarnings(STR) <Request extends TransportRequest, Response extends TransportResponse> TransportAction<Request, Response> function(ActionType<Response> action) { if (actions == null) { throw new IllegalStateException(STR); } TransportAction<Request, Response> transportAction = actions.get(action); if (transportAction == null) { throw new IllegalStateException(STR + action + STR); } return transportAction; } | /**
* Get the {@link TransportAction} for an {@link ActionType}, throwing exceptions if the action isn't available.
*/ | Get the <code>TransportAction</code> for an <code>ActionType</code>, throwing exceptions if the action isn't available | transportAction | {
"repo_name": "EvilMcJerkface/crate",
"path": "server/src/main/java/org/elasticsearch/client/node/NodeClient.java",
"license": "apache-2.0",
"size": 3733
} | [
"org.elasticsearch.action.ActionType",
"org.elasticsearch.action.support.TransportAction",
"org.elasticsearch.transport.TransportRequest",
"org.elasticsearch.transport.TransportResponse"
] | import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportResponse; | import org.elasticsearch.action.*; import org.elasticsearch.action.support.*; import org.elasticsearch.transport.*; | [
"org.elasticsearch.action",
"org.elasticsearch.transport"
] | org.elasticsearch.action; org.elasticsearch.transport; | 2,259,913 |
@Test
public void testECOWithFormB29() {
LocalTime eventStartTime = new LocalTime(16, 30, 0);
LocalTime eventStopTime = new LocalTime(17, 50, 0);
LocalTime formStartTime = new LocalTime(16, 35, 0);
LocalTime formStopTime = new LocalTime(17, 45, 0);
LocalTime absenceTime = new LocalTime(16, 28, 0);
absenceWithClassConflictFormHelper(eventStartTime, eventStopTime,
formStartTime, formStopTime, absenceTime,
Absence.Type.EarlyCheckOut, Absence.Status.Pending);
} | void function() { LocalTime eventStartTime = new LocalTime(16, 30, 0); LocalTime eventStopTime = new LocalTime(17, 50, 0); LocalTime formStartTime = new LocalTime(16, 35, 0); LocalTime formStopTime = new LocalTime(17, 45, 0); LocalTime absenceTime = new LocalTime(16, 28, 0); absenceWithClassConflictFormHelper(eventStartTime, eventStopTime, formStartTime, formStopTime, absenceTime, Absence.Type.EarlyCheckOut, Absence.Status.Pending); } | /**
* class times within event but buffer eclipses, tardy in buffer before
* event
*/ | class times within event but buffer eclipses, tardy in buffer before event | testECOWithFormB29 | {
"repo_name": "curtisullerich/attendance",
"path": "src/test/java/edu/iastate/music/marching/attendance/test/model/interact/FormClassConflictSimpleTest.java",
"license": "mit",
"size": 48344
} | [
"edu.iastate.music.marching.attendance.model.store.Absence",
"org.joda.time.LocalTime"
] | import edu.iastate.music.marching.attendance.model.store.Absence; import org.joda.time.LocalTime; | import edu.iastate.music.marching.attendance.model.store.*; import org.joda.time.*; | [
"edu.iastate.music",
"org.joda.time"
] | edu.iastate.music; org.joda.time; | 2,368,353 |
public Button getSelectionButton( Composite group )
{
if ( fButton == null )
{
assertCompositeNotNull( group );
fButton = new Button( group, fButtonStyle );
fButton.setFont( group.getFont( ) );
fButton.setText( fLabelText );
fButton.setEnabled( isEnabled( ) );
fButton.setSelection( fIsSelected );
fButton.addSelectionListener( new SelectionListener( ) { | Button function( Composite group ) { if ( fButton == null ) { assertCompositeNotNull( group ); fButton = new Button( group, fButtonStyle ); fButton.setFont( group.getFont( ) ); fButton.setText( fLabelText ); fButton.setEnabled( isEnabled( ) ); fButton.setSelection( fIsSelected ); fButton.addSelectionListener( new SelectionListener( ) { | /**
* Returns the selection button widget. When called the first time, the
* widget will be created.
*
* @param group
* The parent composite when called the first time, or
* <code>null</code> after.
*/ | Returns the selection button widget. When called the first time, the widget will be created | getSelectionButton | {
"repo_name": "Charling-Huang/birt",
"path": "UI/org.eclipse.birt.report.designer.ui/src/org/eclipse/birt/report/designer/ui/preferences/SelectionButtonDialogField.java",
"license": "epl-1.0",
"size": 6210
} | [
"org.eclipse.swt.events.SelectionListener",
"org.eclipse.swt.widgets.Button",
"org.eclipse.swt.widgets.Composite"
] | import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; | import org.eclipse.swt.events.*; import org.eclipse.swt.widgets.*; | [
"org.eclipse.swt"
] | org.eclipse.swt; | 570,389 |
public Enumeration<?> getAttributeNames() {
return table.keys();
} | Enumeration<?> function() { return table.keys(); } | /**
* Gets the names of the attributes in the set.
*
* @return the names as an <code>Enumeration</code>
*/ | Gets the names of the attributes in the set | getAttributeNames | {
"repo_name": "haikuowuya/android_system_code",
"path": "src/javax/swing/text/SimpleAttributeSet.java",
"license": "apache-2.0",
"size": 10607
} | [
"java.util.Enumeration"
] | import java.util.Enumeration; | import java.util.*; | [
"java.util"
] | java.util; | 2,615,276 |
ServiceResponse<List<Integer>> getNull() throws ErrorException, IOException; | ServiceResponse<List<Integer>> getNull() throws ErrorException, IOException; | /**
* Get null array value.
*
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the List<Integer> object wrapped in {@link ServiceResponse} if successful.
*/ | Get null array value | getNull | {
"repo_name": "haocs/autorest",
"path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodyarray/Arrays.java",
"license": "mit",
"size": 72234
} | [
"com.microsoft.rest.ServiceResponse",
"java.io.IOException",
"java.util.List"
] | import com.microsoft.rest.ServiceResponse; import java.io.IOException; import java.util.List; | import com.microsoft.rest.*; import java.io.*; import java.util.*; | [
"com.microsoft.rest",
"java.io",
"java.util"
] | com.microsoft.rest; java.io; java.util; | 2,501,102 |
public boolean isDeviceInUse(String deviceId) throws IOException, FedoraClientException {
String query = QUERY_FIND_DEVICE_REFERRERS.replace("${devicePid}", deviceId);
RiSearch search = buildSearch(query);
search.limit(1);
search.stream(true);
List<Item> result = consumeSearch(search.execute(fedora));
return !result.isEmpty();
} | boolean function(String deviceId) throws IOException, FedoraClientException { String query = QUERY_FIND_DEVICE_REFERRERS.replace(STR, deviceId); RiSearch search = buildSearch(query); search.limit(1); search.stream(true); List<Item> result = consumeSearch(search.execute(fedora)); return !result.isEmpty(); } | /**
* Is the device referred with {@code hasDevice} relation by any digital object?
* @param deviceId device PID
* @return {@code true} if it is connected
* @throws IOException
* @throws FedoraClientException
*/ | Is the device referred with hasDevice relation by any digital object | isDeviceInUse | {
"repo_name": "proarc/proarc",
"path": "proarc-common/src/main/java/cz/cas/lib/proarc/common/fedora/SearchView.java",
"license": "gpl-3.0",
"size": 51403
} | [
"com.yourmediashelf.fedora.client.FedoraClientException",
"com.yourmediashelf.fedora.client.request.RiSearch",
"java.io.IOException",
"java.util.List"
] | import com.yourmediashelf.fedora.client.FedoraClientException; import com.yourmediashelf.fedora.client.request.RiSearch; import java.io.IOException; import java.util.List; | import com.yourmediashelf.fedora.client.*; import com.yourmediashelf.fedora.client.request.*; import java.io.*; import java.util.*; | [
"com.yourmediashelf.fedora",
"java.io",
"java.util"
] | com.yourmediashelf.fedora; java.io; java.util; | 1,583,026 |
public void testMath341() throws MathException {
double[] f_coeff = { 3.0, 6.0, -2.0, 1.0 };
double[] g_coeff = { 6.0, -4.0, 3.0 };
double[] h_coeff = { -4.0, 6.0 };
PolynomialFunction f = new PolynomialFunction( f_coeff );
PolynomialFunction g = new PolynomialFunction( g_coeff );
PolynomialFunction h = new PolynomialFunction( h_coeff );
// compare f' = g
assertEquals( f.derivative().value(0.0), g.value(0.0), tolerance );
assertEquals( f.derivative().value(1.0), g.value(1.0), tolerance );
assertEquals( f.derivative().value(100.0), g.value(100.0), tolerance );
assertEquals( f.derivative().value(4.1), g.value(4.1), tolerance );
assertEquals( f.derivative().value(-3.25), g.value(-3.25), tolerance );
// compare g' = h
assertEquals( g.derivative().value(FastMath.PI), h.value(FastMath.PI), tolerance );
assertEquals( g.derivative().value(FastMath.E), h.value(FastMath.E), tolerance );
} | void function() throws MathException { double[] f_coeff = { 3.0, 6.0, -2.0, 1.0 }; double[] g_coeff = { 6.0, -4.0, 3.0 }; double[] h_coeff = { -4.0, 6.0 }; PolynomialFunction f = new PolynomialFunction( f_coeff ); PolynomialFunction g = new PolynomialFunction( g_coeff ); PolynomialFunction h = new PolynomialFunction( h_coeff ); assertEquals( f.derivative().value(0.0), g.value(0.0), tolerance ); assertEquals( f.derivative().value(1.0), g.value(1.0), tolerance ); assertEquals( f.derivative().value(100.0), g.value(100.0), tolerance ); assertEquals( f.derivative().value(4.1), g.value(4.1), tolerance ); assertEquals( f.derivative().value(-3.25), g.value(-3.25), tolerance ); assertEquals( g.derivative().value(FastMath.PI), h.value(FastMath.PI), tolerance ); assertEquals( g.derivative().value(FastMath.E), h.value(FastMath.E), tolerance ); } | /**
* tests the firstDerivative function by comparison
*
* <p>This will test the functions
* <tt>f(x) = x^3 - 2x^2 + 6x + 3, g(x) = 3x^2 - 4x + 6</tt>
* and <tt>h(x) = 6x - 4</tt>
*/ | tests the firstDerivative function by comparison This will test the functions f(x) = x^3 - 2x^2 + 6x + 3, g(x) = 3x^2 - 4x + 6 and h(x) = 6x - 4 | testMath341 | {
"repo_name": "SpoonLabs/astor",
"path": "examples/math_63/src/test/java/org/apache/commons/math/analysis/polynomials/PolynomialFunctionTest.java",
"license": "gpl-2.0",
"size": 10224
} | [
"org.apache.commons.math.MathException",
"org.apache.commons.math.util.FastMath"
] | import org.apache.commons.math.MathException; import org.apache.commons.math.util.FastMath; | import org.apache.commons.math.*; import org.apache.commons.math.util.*; | [
"org.apache.commons"
] | org.apache.commons; | 2,070,124 |
public LocalDateTime getReceivedTime() {
return receivedTime;
} | LocalDateTime function() { return receivedTime; } | /**
* Get time of this JDP packet was received.
*
* @return Received time.
*/ | Get time of this JDP packet was received | getReceivedTime | {
"repo_name": "HeapStats/heapstats",
"path": "analyzer/fx/src/main/java/jp/co/ntt/oss/heapstats/fx/plugin/builtin/jvmlive/jdp/JdpDecoder.java",
"license": "gpl-2.0",
"size": 11724
} | [
"java.time.LocalDateTime"
] | import java.time.LocalDateTime; | import java.time.*; | [
"java.time"
] | java.time; | 121,228 |
@Override
public Node<?> parse(InputStream stream, ConfigurationHandler parser) throws IOException {
return XMLParser.parse(stream);
} | Node<?> function(InputStream stream, ConfigurationHandler parser) throws IOException { return XMLParser.parse(stream); } | /**
* Parse the xml file.
* @param stream The stream to parse.
* @param parser The configuration handler that parses the stream.
* @return The node tree.
* @throws IOException If an I/O-Operation fails.
*/ | Parse the xml file | parse | {
"repo_name": "StuxSoftware/SimpleDev",
"path": "Configuration/src/main/java/net/stuxcrystal/simpledev/configuration/parser/generators/xml/XmlGenerator.java",
"license": "apache-2.0",
"size": 2429
} | [
"java.io.IOException",
"java.io.InputStream",
"net.stuxcrystal.simpledev.configuration.parser.ConfigurationHandler",
"net.stuxcrystal.simpledev.configuration.parser.node.Node"
] | import java.io.IOException; import java.io.InputStream; import net.stuxcrystal.simpledev.configuration.parser.ConfigurationHandler; import net.stuxcrystal.simpledev.configuration.parser.node.Node; | import java.io.*; import net.stuxcrystal.simpledev.configuration.parser.*; import net.stuxcrystal.simpledev.configuration.parser.node.*; | [
"java.io",
"net.stuxcrystal.simpledev"
] | java.io; net.stuxcrystal.simpledev; | 2,524,090 |
public ApplicationGatewayFrontendIpConfiguration withPrivateIpAddress(String privateIpAddress) {
if (this.innerProperties() == null) {
this.innerProperties = new ApplicationGatewayFrontendIpConfigurationPropertiesFormat();
}
this.innerProperties().withPrivateIpAddress(privateIpAddress);
return this;
} | ApplicationGatewayFrontendIpConfiguration function(String privateIpAddress) { if (this.innerProperties() == null) { this.innerProperties = new ApplicationGatewayFrontendIpConfigurationPropertiesFormat(); } this.innerProperties().withPrivateIpAddress(privateIpAddress); return this; } | /**
* Set the privateIpAddress property: PrivateIPAddress of the network interface IP Configuration.
*
* @param privateIpAddress the privateIpAddress value to set.
* @return the ApplicationGatewayFrontendIpConfiguration object itself.
*/ | Set the privateIpAddress property: PrivateIPAddress of the network interface IP Configuration | withPrivateIpAddress | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/models/ApplicationGatewayFrontendIpConfiguration.java",
"license": "mit",
"size": 8314
} | [
"com.azure.resourcemanager.network.fluent.models.ApplicationGatewayFrontendIpConfigurationPropertiesFormat"
] | import com.azure.resourcemanager.network.fluent.models.ApplicationGatewayFrontendIpConfigurationPropertiesFormat; | import com.azure.resourcemanager.network.fluent.models.*; | [
"com.azure.resourcemanager"
] | com.azure.resourcemanager; | 1,608,451 |
public static ExpectedCondition<Boolean> urlMatches(final String regex) {
return new ExpectedCondition<Boolean>() {
private String currentUrl;
private Pattern pattern;
private Matcher matcher; | static ExpectedCondition<Boolean> function(final String regex) { return new ExpectedCondition<Boolean>() { private String currentUrl; private Pattern pattern; private Matcher matcher; | /**
* Expectation for the URL to match a specific regular expression
*
* @param regex the regular expression that the URL should match
* @return <code>true</code> if the URL matches the specified regular expression
*/ | Expectation for the URL to match a specific regular expression | urlMatches | {
"repo_name": "jabbrwcky/selenium",
"path": "java/client/src/org/openqa/selenium/support/ui/ExpectedConditions.java",
"license": "apache-2.0",
"size": 51640
} | [
"java.util.regex.Matcher",
"java.util.regex.Pattern"
] | import java.util.regex.Matcher; import java.util.regex.Pattern; | import java.util.regex.*; | [
"java.util"
] | java.util; | 2,892,671 |
public static long fieldOffset(Class<?> cls, String fieldName) {
try {
return objectFieldOffset(cls.getDeclaredField(fieldName));
}
catch (NoSuchFieldException e) {
throw new IllegalStateException(e);
}
} | static long function(Class<?> cls, String fieldName) { try { return objectFieldOffset(cls.getDeclaredField(fieldName)); } catch (NoSuchFieldException e) { throw new IllegalStateException(e); } } | /**
* Gets object field offset.
*
* @param cls Object class.
* @param fieldName Field name.
* @return Field offset.
*/ | Gets object field offset | fieldOffset | {
"repo_name": "pperalta/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java",
"license": "apache-2.0",
"size": 314980
} | [
"org.apache.ignite.internal.util.GridUnsafe"
] | import org.apache.ignite.internal.util.GridUnsafe; | import org.apache.ignite.internal.util.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,151,883 |
public Dimension getPreferredSize(JComponent a) {
Dimension returnValue =
((ComponentUI) (uis.elementAt(0))).getPreferredSize(a);
for (int i = 1; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).getPreferredSize(a);
}
return returnValue;
} | Dimension function(JComponent a) { Dimension returnValue = ((ComponentUI) (uis.elementAt(0))).getPreferredSize(a); for (int i = 1; i < uis.size(); i++) { ((ComponentUI) (uis.elementAt(i))).getPreferredSize(a); } return returnValue; } | /**
* Invokes the <code>getPreferredSize</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/ | Invokes the <code>getPreferredSize</code> method on each UI handled by this object | getPreferredSize | {
"repo_name": "flyzsd/java-code-snippets",
"path": "ibm.jdk8/src/javax/swing/plaf/multi/MultiPanelUI.java",
"license": "mit",
"size": 6886
} | [
"java.awt.Dimension",
"javax.swing.JComponent",
"javax.swing.plaf.ComponentUI"
] | import java.awt.Dimension; import javax.swing.JComponent; import javax.swing.plaf.ComponentUI; | import java.awt.*; import javax.swing.*; import javax.swing.plaf.*; | [
"java.awt",
"javax.swing"
] | java.awt; javax.swing; | 423,054 |
TimeMeasurer timeMeasurer = new TimeMeasurer(1000, 1000);
try {
timeMeasurer.run(new NonLinearTimedRunnable());
fail();
} catch (UserException.DoesNotScaleLinearlyException e) {
}
}
private static class NonLinearTimedRunnable extends ConfiguredBenchmark
implements Supplier<ConfiguredBenchmark> {
private NonLinearTimedRunnable() {
super(new NoOpBenchmark());
} | TimeMeasurer timeMeasurer = new TimeMeasurer(1000, 1000); try { timeMeasurer.run(new NonLinearTimedRunnable()); fail(); } catch (UserException.DoesNotScaleLinearlyException e) { } } private static class NonLinearTimedRunnable extends ConfiguredBenchmark implements Supplier<ConfiguredBenchmark> { private NonLinearTimedRunnable() { super(new NoOpBenchmark()); } | /**
* Test we detect and fail when benchmarks don't scale properly.
* @throws Exception
*/ | Test we detect and fail when benchmarks don't scale properly | testBenchmarkScalesNonLinearly | {
"repo_name": "antigremlin/caliper-fork",
"path": "test/com/google/caliper/CaliperTest.java",
"license": "apache-2.0",
"size": 2516
} | [
"com.google.common.base.Supplier"
] | import com.google.common.base.Supplier; | import com.google.common.base.*; | [
"com.google.common"
] | com.google.common; | 1,180,322 |
public static void load(InputStream inputStream, Scene scene, TaskTracker taskTracker)
throws IOException, IllegalStateException {
int magicNumberLength = DUMP_FORMAT_MAGIC_NUMBER.length;
PushbackInputStream pushbackInputStream = new PushbackInputStream(inputStream, magicNumberLength);
byte[] magicNumber = new byte[magicNumberLength];
// If the file starts with the magic number, it is the new format containing a version number
if (magicNumberLength == pushbackInputStream.read(magicNumber, 0, magicNumberLength)
&& Arrays.equals(DUMP_FORMAT_MAGIC_NUMBER, magicNumber)) {
DataInputStream dataInputStream = new DataInputStream(pushbackInputStream);
int dumpVersion = dataInputStream.readInt();
getDumpFormatForVersion(dumpVersion).load(dataInputStream, scene, taskTracker);
} else {
// Old format that is a gzipped stream, the header needs to be pushed back
pushbackInputStream.unread(magicNumber, 0, 4);
DataInputStream dataInputStream = new DataInputStream(new GZIPInputStream(pushbackInputStream));
getDumpFormatForVersion(0).load(dataInputStream, scene, taskTracker);
}
} | static void function(InputStream inputStream, Scene scene, TaskTracker taskTracker) throws IOException, IllegalStateException { int magicNumberLength = DUMP_FORMAT_MAGIC_NUMBER.length; PushbackInputStream pushbackInputStream = new PushbackInputStream(inputStream, magicNumberLength); byte[] magicNumber = new byte[magicNumberLength]; if (magicNumberLength == pushbackInputStream.read(magicNumber, 0, magicNumberLength) && Arrays.equals(DUMP_FORMAT_MAGIC_NUMBER, magicNumber)) { DataInputStream dataInputStream = new DataInputStream(pushbackInputStream); int dumpVersion = dataInputStream.readInt(); getDumpFormatForVersion(dumpVersion).load(dataInputStream, scene, taskTracker); } else { pushbackInputStream.unread(magicNumber, 0, 4); DataInputStream dataInputStream = new DataInputStream(new GZIPInputStream(pushbackInputStream)); getDumpFormatForVersion(0).load(dataInputStream, scene, taskTracker); } } | /**
* Load a scene dump from the given file into the scene. This overwrites ssp, renderTime and samples in the scene.
*
* @throws IllegalStateException If the width or height of the scene do not match the width or height in the dump.
* @throws IOException If the dump format is unknown or file access fails
*/ | Load a scene dump from the given file into the scene. This overwrites ssp, renderTime and samples in the scene | load | {
"repo_name": "llbit/chunky",
"path": "chunky/src/java/se/llbit/chunky/renderer/renderdump/RenderDump.java",
"license": "gpl-3.0",
"size": 5317
} | [
"java.io.DataInputStream",
"java.io.IOException",
"java.io.InputStream",
"java.io.PushbackInputStream",
"java.util.Arrays",
"java.util.zip.GZIPInputStream",
"se.llbit.chunky.renderer.scene.Scene",
"se.llbit.util.TaskTracker"
] | import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.io.PushbackInputStream; import java.util.Arrays; import java.util.zip.GZIPInputStream; import se.llbit.chunky.renderer.scene.Scene; import se.llbit.util.TaskTracker; | import java.io.*; import java.util.*; import java.util.zip.*; import se.llbit.chunky.renderer.scene.*; import se.llbit.util.*; | [
"java.io",
"java.util",
"se.llbit.chunky",
"se.llbit.util"
] | java.io; java.util; se.llbit.chunky; se.llbit.util; | 279,551 |
public void setSelectedText(String selectedText)
{
if(!editable)
{
throw new InternalError("Text component"
+ " read only");
}
document.beginCompoundEdit();
try
{
if(rectSelect)
{
Element map = document.getDefaultRootElement();
int start = selectionStart - map.getElement(selectionStartLine)
.getStartOffset();
int end = selectionEnd - map.getElement(selectionEndLine)
.getStartOffset();
// Certain rectangles satisfy this condition...
if(end < start)
{
int tmp = end;
end = start;
start = tmp;
}
int lastNewline = 0;
int currNewline = 0;
for(int i = selectionStartLine; i <= selectionEndLine; i++)
{
Element lineElement = map.getElement(i);
int lineStart = lineElement.getStartOffset();
int lineEnd = lineElement.getEndOffset() - 1;
int rectStart = Math.min(lineEnd,lineStart + start);
document.remove(rectStart,Math.min(lineEnd - rectStart,
end - start));
if(selectedText == null)
continue;
currNewline = selectedText.indexOf('\n',lastNewline);
if(currNewline == -1)
currNewline = selectedText.length();
document.insertString(rectStart,selectedText
.substring(lastNewline,currNewline),null);
lastNewline = Math.min(selectedText.length(),
currNewline + 1);
}
if(selectedText != null &&
currNewline != selectedText.length())
{
int offset = map.getElement(selectionEndLine)
.getEndOffset() - 1;
document.insertString(offset,"\n",null);
document.insertString(offset + 1,selectedText
.substring(currNewline + 1),null);
}
}
else
{
document.remove(selectionStart,
selectionEnd - selectionStart);
if(selectedText != null)
{
document.insertString(selectionStart,
selectedText,null);
}
}
}
catch(BadLocationException bl)
{
bl.printStackTrace();
throw new InternalError("Cannot replace"
+ " selection");
}
// No matter what happends... stops us from leaving document
// in a bad state
finally
{
document.endCompoundEdit();
}
setCaretPosition(selectionEnd);
} | void function(String selectedText) { if(!editable) { throw new InternalError(STR + STR); } document.beginCompoundEdit(); try { if(rectSelect) { Element map = document.getDefaultRootElement(); int start = selectionStart - map.getElement(selectionStartLine) .getStartOffset(); int end = selectionEnd - map.getElement(selectionEndLine) .getStartOffset(); if(end < start) { int tmp = end; end = start; start = tmp; } int lastNewline = 0; int currNewline = 0; for(int i = selectionStartLine; i <= selectionEndLine; i++) { Element lineElement = map.getElement(i); int lineStart = lineElement.getStartOffset(); int lineEnd = lineElement.getEndOffset() - 1; int rectStart = Math.min(lineEnd,lineStart + start); document.remove(rectStart,Math.min(lineEnd - rectStart, end - start)); if(selectedText == null) continue; currNewline = selectedText.indexOf('\n',lastNewline); if(currNewline == -1) currNewline = selectedText.length(); document.insertString(rectStart,selectedText .substring(lastNewline,currNewline),null); lastNewline = Math.min(selectedText.length(), currNewline + 1); } if(selectedText != null && currNewline != selectedText.length()) { int offset = map.getElement(selectionEndLine) .getEndOffset() - 1; document.insertString(offset,"\n",null); document.insertString(offset + 1,selectedText .substring(currNewline + 1),null); } } else { document.remove(selectionStart, selectionEnd - selectionStart); if(selectedText != null) { document.insertString(selectionStart, selectedText,null); } } } catch(BadLocationException bl) { bl.printStackTrace(); throw new InternalError(STR + STR); } finally { document.endCompoundEdit(); } setCaretPosition(selectionEnd); } | /**
* Replaces the selection with the specified text.
* @param selectedText The replacement text for the selection
*/ | Replaces the selection with the specified text | setSelectedText | {
"repo_name": "natetrue/ReplicatorG",
"path": "src/replicatorg/app/syntax/JEditTextArea.java",
"license": "gpl-2.0",
"size": 60498
} | [
"javax.swing.text.BadLocationException",
"javax.swing.text.Element"
] | import javax.swing.text.BadLocationException; import javax.swing.text.Element; | import javax.swing.text.*; | [
"javax.swing"
] | javax.swing; | 1,317,109 |
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
ViewHolder holder;
View v = convertView;
if (v == null || (((ViewHolder)v.getTag()).listmode != mListMode)) {
LayoutInflater inflater = (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
if (!mListMode)
v = inflater.inflate(R.layout.video_grid_card, parent, false);
else
v = inflater.inflate(R.layout.video_list_card, parent, false);
holder = new ViewHolder();
holder.thumbnail = (ImageView) v.findViewById(R.id.ml_item_thumbnail);
holder.title = (TextView) v.findViewById(R.id.ml_item_title);
holder.time = (TextView) v.findViewById(R.id.ml_item_time);
holder.resolution = (TextView) v.findViewById(R.id.ml_item_resolution);
holder.progress = (ProgressBar) v.findViewById(R.id.ml_item_progress);
holder.more = (ImageView) v.findViewById(R.id.item_more);
holder.listmode = mListMode;
v.setTag(holder);
v.setLayoutParams(new GridView.LayoutParams(v.getLayoutParams().width, v.getLayoutParams().height));
} else {
holder = (ViewHolder) v.getTag();
}
if (position >= getCount() || position < 0)
return v; | View function(final int position, View convertView, ViewGroup parent) { ViewHolder holder; View v = convertView; if (v == null (((ViewHolder)v.getTag()).listmode != mListMode)) { LayoutInflater inflater = (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE); if (!mListMode) v = inflater.inflate(R.layout.video_grid_card, parent, false); else v = inflater.inflate(R.layout.video_list_card, parent, false); holder = new ViewHolder(); holder.thumbnail = (ImageView) v.findViewById(R.id.ml_item_thumbnail); holder.title = (TextView) v.findViewById(R.id.ml_item_title); holder.time = (TextView) v.findViewById(R.id.ml_item_time); holder.resolution = (TextView) v.findViewById(R.id.ml_item_resolution); holder.progress = (ProgressBar) v.findViewById(R.id.ml_item_progress); holder.more = (ImageView) v.findViewById(R.id.item_more); holder.listmode = mListMode; v.setTag(holder); v.setLayoutParams(new GridView.LayoutParams(v.getLayoutParams().width, v.getLayoutParams().height)); } else { holder = (ViewHolder) v.getTag(); } if (position >= getCount() position < 0) return v; | /**
* Display the view of a file browser item.
*/ | Display the view of a file browser item | getView | {
"repo_name": "binsys/vlc-android",
"path": "vlc-android/src/org/videolan/vlc/gui/video/VideoListAdapter.java",
"license": "gpl-2.0",
"size": 10340
} | [
"android.content.Context",
"android.view.LayoutInflater",
"android.view.View",
"android.view.ViewGroup",
"android.widget.GridView",
"android.widget.ImageView",
"android.widget.ProgressBar",
"android.widget.TextView"
] | import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.GridView; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.TextView; | import android.content.*; import android.view.*; import android.widget.*; | [
"android.content",
"android.view",
"android.widget"
] | android.content; android.view; android.widget; | 1,776,310 |
@Deprecated
public SearchRequestBuilder addPartialField(String name, @Nullable String include, @Nullable String exclude) {
sourceBuilder().partialField(name, include, exclude);
return this;
} | SearchRequestBuilder function(String name, @Nullable String include, @Nullable String exclude) { sourceBuilder().partialField(name, include, exclude); return this; } | /**
* Adds a partial field based on _source, with an "include" and/or "exclude" set which can include simple wildcard
* elements.
*
* @deprecated since 1.0.0
* use {@link org.elasticsearch.action.search.SearchRequestBuilder#setFetchSource(String, String)} instead
*
* @param name The name of the field
* @param include An optional include (optionally wildcarded) pattern from _source
* @param exclude An optional exclude (optionally wildcarded) pattern from _source
*/ | Adds a partial field based on _source, with an "include" and/or "exclude" set which can include simple wildcard elements | addPartialField | {
"repo_name": "gmile/elasticsearch",
"path": "src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java",
"license": "apache-2.0",
"size": 38410
} | [
"org.elasticsearch.common.Nullable"
] | import org.elasticsearch.common.Nullable; | import org.elasticsearch.common.*; | [
"org.elasticsearch.common"
] | org.elasticsearch.common; | 2,683,092 |
protected void insertContentSecurityPolicyHeader(final HttpServletResponse httpServletResponse,
final HttpServletRequest httpServletRequest,
final String contentSecurityPolicy) {
val uri = httpServletRequest.getRequestURI();
httpServletResponse.addHeader("Content-Security-Policy", contentSecurityPolicy);
LOGGER.trace("Adding Content-Security-Policy response header [{}] for [{}]", contentSecurityPolicy, uri);
} | void function(final HttpServletResponse httpServletResponse, final HttpServletRequest httpServletRequest, final String contentSecurityPolicy) { val uri = httpServletRequest.getRequestURI(); httpServletResponse.addHeader(STR, contentSecurityPolicy); LOGGER.trace(STR, contentSecurityPolicy, uri); } | /**
* Insert content security policy header.
*
* @param httpServletResponse the http servlet response
* @param httpServletRequest the http servlet request
* @param contentSecurityPolicy the content security policy
*/ | Insert content security policy header | insertContentSecurityPolicyHeader | {
"repo_name": "Jasig/cas",
"path": "core/cas-server-core-web-api/src/main/java/org/apereo/cas/web/support/filters/ResponseHeadersEnforcementFilter.java",
"license": "apache-2.0",
"size": 20172
} | [
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] | import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; | import javax.servlet.http.*; | [
"javax.servlet"
] | javax.servlet; | 534,209 |
public static void append(StringBuilder pSb, Object pArg) {
try {
append((Appendable) pSb, pArg);
} catch (IOException e) {
throw Exceptions.newUncheckedIOException(e);
}
}
| static void function(StringBuilder pSb, Object pArg) { try { append((Appendable) pSb, pArg); } catch (IOException e) { throw Exceptions.newUncheckedIOException(e); } } | /** Writes the given object as a string to the given {@link StringBuilder}.
* @param pSb The target stream.
* @param pArg The object being written.
* @throws UncheckedIOException The writing failed.
*/ | Writes the given object as a string to the given <code>StringBuilder</code> | append | {
"repo_name": "jochenw/afw",
"path": "afw-core/src/main/java/com/github/jochenw/afw/core/util/Strings.java",
"license": "apache-2.0",
"size": 27279
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,042,667 |
@NotNull
MapRecord readMap(@NotNull RecordId id); | MapRecord readMap(@NotNull RecordId id); | /**
* Read the map identified by {@code id}.
* @throws SegmentNotFoundException see class comment for exception semantics
*/ | Read the map identified by id | readMap | {
"repo_name": "trekawek/jackrabbit-oak",
"path": "oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentReader.java",
"license": "apache-2.0",
"size": 3184
} | [
"org.jetbrains.annotations.NotNull"
] | import org.jetbrains.annotations.NotNull; | import org.jetbrains.annotations.*; | [
"org.jetbrains.annotations"
] | org.jetbrains.annotations; | 1,073,803 |
WorkloadGroup create(Context context);
} | WorkloadGroup create(Context context); } | /**
* Executes the create request.
*
* @param context The context to associate with this operation.
* @return the created resource.
*/ | Executes the create request | create | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/synapse/azure-resourcemanager-synapse/src/main/java/com/azure/resourcemanager/synapse/models/WorkloadGroup.java",
"license": "mit",
"size": 12066
} | [
"com.azure.core.util.Context"
] | import com.azure.core.util.Context; | import com.azure.core.util.*; | [
"com.azure.core"
] | com.azure.core; | 1,494,844 |
@Override
public void reset() {
this.clear(false); // Clear all current batch holders and hash table (i.e. free their memory)
freeIndex = 0; // all batch holders are gone
// reallocate batch holders, and the hash table to the original size
batchHolders = new ArrayList<BatchHolder>();
prevIndexSize = 0;
currentIndexSize = 0;
totalIndexSize = 0;
startIndices = allocMetadataVector(originalTableSize, EMPTY_SLOT);
} | void function() { this.clear(false); freeIndex = 0; batchHolders = new ArrayList<BatchHolder>(); prevIndexSize = 0; currentIndexSize = 0; totalIndexSize = 0; startIndices = allocMetadataVector(originalTableSize, EMPTY_SLOT); } | /**
* Reinit the hash table to its original size, and clear up all its prior batch holder
*
*/ | Reinit the hash table to its original size, and clear up all its prior batch holder | reset | {
"repo_name": "Ben-Zvi/drill",
"path": "exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java",
"license": "apache-2.0",
"size": 39347
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 1,354,043 |
public DataNode setEndnote(IDataset endnote); | DataNode function(IDataset endnote); | /**
* Bibliographic reference data in EndNote format.
* <p>
* <b>Type:</b> NX_CHAR
* </p>
*
* @param endnote the endnote
*/ | Bibliographic reference data in EndNote format. Type: NX_CHAR | setEndnote | {
"repo_name": "xen-0/dawnsci",
"path": "org.eclipse.dawnsci.nexus/autogen/org/eclipse/dawnsci/nexus/NXcite.java",
"license": "epl-1.0",
"size": 5339
} | [
"org.eclipse.dawnsci.analysis.api.tree.DataNode",
"org.eclipse.january.dataset.IDataset"
] | import org.eclipse.dawnsci.analysis.api.tree.DataNode; import org.eclipse.january.dataset.IDataset; | import org.eclipse.dawnsci.analysis.api.tree.*; import org.eclipse.january.dataset.*; | [
"org.eclipse.dawnsci",
"org.eclipse.january"
] | org.eclipse.dawnsci; org.eclipse.january; | 491,558 |
super.onCreate(savedInstanceState);
// Set the activity to full screen mode.
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_credits);
// Calculate screen metrics
DisplayMetrics dm = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(dm);
mScreenWidth = dm.widthPixels;
mScreenHeight = dm.heightPixels;
// Attach the views to their corresponding resource ids.
mLlCreditsBody = (RelativeLayout) findViewById(R.id.llCreditsBody);
mSvCreditsBody = (ScrollView) findViewById(R.id.svCreditsBody);
// Attach the images to the LinearLayout that the ScrollView contains.
addImagesToView(); | super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); setContentView(R.layout.activity_credits); DisplayMetrics dm = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(dm); mScreenWidth = dm.widthPixels; mScreenHeight = dm.heightPixels; mLlCreditsBody = (RelativeLayout) findViewById(R.id.llCreditsBody); mSvCreditsBody = (ScrollView) findViewById(R.id.svCreditsBody); addImagesToView(); | /**
* Implements onCreate().
*/ | Implements onCreate() | onCreate | {
"repo_name": "byronsanchez/coloring-book-android",
"path": "src/net/globide/coloring_book_08/CreditsActivity.java",
"license": "gpl-2.0",
"size": 13942
} | [
"android.util.DisplayMetrics",
"android.view.Window",
"android.view.WindowManager",
"android.widget.RelativeLayout",
"android.widget.ScrollView"
] | import android.util.DisplayMetrics; import android.view.Window; import android.view.WindowManager; import android.widget.RelativeLayout; import android.widget.ScrollView; | import android.util.*; import android.view.*; import android.widget.*; | [
"android.util",
"android.view",
"android.widget"
] | android.util; android.view; android.widget; | 2,865,807 |
private ResourceInstance createResource(String repoId) {
final Map<Resource.Type, String> mapIds = new HashMap<Resource.Type, String>();
mapIds.putAll(parentKeyProperties);
mapIds.put(Resource.Type.Repository, repoId);
return createResource(Resource.Type.Repository, mapIds);
} | ResourceInstance function(String repoId) { final Map<Resource.Type, String> mapIds = new HashMap<Resource.Type, String>(); mapIds.putAll(parentKeyProperties); mapIds.put(Resource.Type.Repository, repoId); return createResource(Resource.Type.Repository, mapIds); } | /**
* Create a repository resource instance.
*
* @param repoId repository id
*
* @return a repository instance
*/ | Create a repository resource instance | createResource | {
"repo_name": "zouzhberk/ambaridemo",
"path": "demo-server/src/main/java/org/apache/ambari/server/api/services/RepositoryService.java",
"license": "apache-2.0",
"size": 5365
} | [
"java.util.HashMap",
"java.util.Map",
"org.apache.ambari.server.api.resources.ResourceInstance",
"org.apache.ambari.server.controller.spi.Resource"
] | import java.util.HashMap; import java.util.Map; import org.apache.ambari.server.api.resources.ResourceInstance; import org.apache.ambari.server.controller.spi.Resource; | import java.util.*; import org.apache.ambari.server.api.resources.*; import org.apache.ambari.server.controller.spi.*; | [
"java.util",
"org.apache.ambari"
] | java.util; org.apache.ambari; | 1,747,678 |
static native int getpgid(int pid) throws IOException; | static native int getpgid(int pid) throws IOException; | /**
* Invokes the linux syscall "getpgid"
*
* @param pid pid to query
* @return pgid of pid in question
* @throws IOException on error
*/ | Invokes the linux syscall "getpgid" | getpgid | {
"repo_name": "szpaddy/android-4.1.2_r2-core",
"path": "java/com/android/internal/os/ZygoteInit.java",
"license": "apache-2.0",
"size": 27453
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,673,581 |
@Path("forgot-credentials")
public Object forgotCredentialsPage() {
AuthorizationEndpoint endpoint = new AuthorizationEndpoint(realm, event);
ResteasyProviderFactory.getInstance().injectProperties(endpoint);
return endpoint.forgotCredentials();
} | @Path(STR) Object function() { AuthorizationEndpoint endpoint = new AuthorizationEndpoint(realm, event); ResteasyProviderFactory.getInstance().injectProperties(endpoint); return endpoint.forgotCredentials(); } | /**
* Forgot-Credentials endpoint
*/ | Forgot-Credentials endpoint | forgotCredentialsPage | {
"repo_name": "brat000012001/keycloak",
"path": "services/src/main/java/org/keycloak/protocol/oidc/OIDCLoginProtocolService.java",
"license": "apache-2.0",
"size": 10546
} | [
"javax.ws.rs.Path",
"org.jboss.resteasy.spi.ResteasyProviderFactory",
"org.keycloak.protocol.oidc.endpoints.AuthorizationEndpoint"
] | import javax.ws.rs.Path; import org.jboss.resteasy.spi.ResteasyProviderFactory; import org.keycloak.protocol.oidc.endpoints.AuthorizationEndpoint; | import javax.ws.rs.*; import org.jboss.resteasy.spi.*; import org.keycloak.protocol.oidc.endpoints.*; | [
"javax.ws",
"org.jboss.resteasy",
"org.keycloak.protocol"
] | javax.ws; org.jboss.resteasy; org.keycloak.protocol; | 2,314,608 |
public FrdRelatedFraudCase mergeFrdRelatedFraudCase(FrdRelatedFraudCase frdRelatedFraudCase); | FrdRelatedFraudCase function(FrdRelatedFraudCase frdRelatedFraudCase); | /**
* mergeFrdRelatedFraudCase - merges a FrdRelatedFraudCase
*
* @param frdRelatedFraudCase
* @return the merged FrdRelatedFraudCase
*/ | mergeFrdRelatedFraudCase - merges a FrdRelatedFraudCase | mergeFrdRelatedFraudCase | {
"repo_name": "yauritux/venice-legacy",
"path": "Venice/Venice-Interface-Model/src/main/java/com/gdn/venice/facade/FrdRelatedFraudCaseSessionEJBRemote.java",
"license": "apache-2.0",
"size": 2842
} | [
"com.gdn.venice.persistence.FrdRelatedFraudCase"
] | import com.gdn.venice.persistence.FrdRelatedFraudCase; | import com.gdn.venice.persistence.*; | [
"com.gdn.venice"
] | com.gdn.venice; | 1,473,228 |
public boolean isDone() {
for (AppliedPTransform<?, ?, ?> transform : graph.getPrimitiveTransforms()) {
if (!isDone(transform)) {
return false;
}
}
return true;
} | boolean function() { for (AppliedPTransform<?, ?, ?> transform : graph.getPrimitiveTransforms()) { if (!isDone(transform)) { return false; } } return true; } | /**
* Returns true if all steps are done.
*/ | Returns true if all steps are done | isDone | {
"repo_name": "chamikaramj/beam",
"path": "runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java",
"license": "apache-2.0",
"size": 17565
} | [
"org.apache.beam.sdk.transforms.AppliedPTransform"
] | import org.apache.beam.sdk.transforms.AppliedPTransform; | import org.apache.beam.sdk.transforms.*; | [
"org.apache.beam"
] | org.apache.beam; | 2,768,344 |
protected synchronized void initContext(ServletContext context) throws CmsInitException {
m_gwtServiceContexts = new HashMap<String, CmsGwtServiceContext>();
// automatic servlet container recognition and specific behavior:
CmsServletContainerSettings servletContainerSettings = new CmsServletContainerSettings(context);
getSystemInfo().init(servletContainerSettings);
// Collect the configurations
CmsParameterConfiguration configuration;
try {
configuration = new CmsParameterConfiguration(getSystemInfo().getConfigurationFileRfsPath());
} catch (Exception e) {
throw new CmsInitException(Messages.get().container(
Messages.ERR_CRITICAL_INIT_PROPFILE_1,
getSystemInfo().getConfigurationFileRfsPath()), e);
}
String throwException = configuration.getString("servlet.exception.enabled", "auto");
if (!throwException.equals("auto")) {
// set the parameter is not automatic, the rest of the servlet container dependent parameters
// will be set when reading the system configuration, if not set to auto
boolean throwExc = Boolean.valueOf(throwException).booleanValue();
getSystemInfo().getServletContainerSettings().setServletThrowsException(throwExc);
}
// check if the wizard is enabled, if so stop initialization
if (configuration.getBoolean("wizard.enabled", true)) {
throw new CmsInitException(Messages.get().container(Messages.ERR_CRITICAL_INIT_WIZARD_0));
}
// output startup message and copyright to STDERR
System.err.println(Messages.get().getBundle().key(
Messages.LOG_STARTUP_CONSOLE_NOTE_2,
OpenCms.getSystemInfo().getVersionNumber(),
getSystemInfo().getWebApplicationName()));
for (int i = 0; i < Messages.COPYRIGHT_BY_ALKACON.length; i++) {
System.err.println(Messages.COPYRIGHT_BY_ALKACON[i]);
}
System.err.println();
// output startup message to log file
if (CmsLog.INIT.isInfoEnabled()) {
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_DOT_0));
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_DOT_0));
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_DOT_0));
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_DOT_0));
for (int i = 0; i < Messages.COPYRIGHT_BY_ALKACON.length; i++) {
CmsLog.INIT.info(". " + Messages.COPYRIGHT_BY_ALKACON[i]);
}
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_LINE_0));
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_STARTUP_TIME_1,
new Date(System.currentTimeMillis())));
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_OPENCMS_VERSION_1,
OpenCms.getSystemInfo().getVersionNumber()));
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_SERVLET_CONTAINER_1, context.getServerInfo()));
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_WEBAPP_NAME_1,
getSystemInfo().getWebApplicationName()));
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_SERVLET_PATH_1,
getSystemInfo().getServletPath()));
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_OPENCMS_CONTEXT_1,
getSystemInfo().getOpenCmsContext()));
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_WEBINF_PATH_1,
getSystemInfo().getWebInfRfsPath()));
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_PROPERTY_FILE_1,
getSystemInfo().getConfigurationFileRfsPath()));
CmsLog.INIT.info(Messages.get().getBundle().key(
Messages.INIT_LOG_FILE_1,
getSystemInfo().getLogFileRfsPath()));
}
// initialize the configuration
initConfiguration(configuration);
} | synchronized void function(ServletContext context) throws CmsInitException { m_gwtServiceContexts = new HashMap<String, CmsGwtServiceContext>(); CmsServletContainerSettings servletContainerSettings = new CmsServletContainerSettings(context); getSystemInfo().init(servletContainerSettings); CmsParameterConfiguration configuration; try { configuration = new CmsParameterConfiguration(getSystemInfo().getConfigurationFileRfsPath()); } catch (Exception e) { throw new CmsInitException(Messages.get().container( Messages.ERR_CRITICAL_INIT_PROPFILE_1, getSystemInfo().getConfigurationFileRfsPath()), e); } String throwException = configuration.getString(STR, "auto"); if (!throwException.equals("auto")) { boolean throwExc = Boolean.valueOf(throwException).booleanValue(); getSystemInfo().getServletContainerSettings().setServletThrowsException(throwExc); } if (configuration.getBoolean(STR, true)) { throw new CmsInitException(Messages.get().container(Messages.ERR_CRITICAL_INIT_WIZARD_0)); } System.err.println(Messages.get().getBundle().key( Messages.LOG_STARTUP_CONSOLE_NOTE_2, OpenCms.getSystemInfo().getVersionNumber(), getSystemInfo().getWebApplicationName())); for (int i = 0; i < Messages.COPYRIGHT_BY_ALKACON.length; i++) { System.err.println(Messages.COPYRIGHT_BY_ALKACON[i]); } System.err.println(); if (CmsLog.INIT.isInfoEnabled()) { CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_DOT_0)); CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_DOT_0)); CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_DOT_0)); CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_DOT_0)); for (int i = 0; i < Messages.COPYRIGHT_BY_ALKACON.length; i++) { CmsLog.INIT.info(STR + Messages.COPYRIGHT_BY_ALKACON[i]); } CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_LINE_0)); CmsLog.INIT.info(Messages.get().getBundle().key( Messages.INIT_STARTUP_TIME_1, new Date(System.currentTimeMillis()))); CmsLog.INIT.info(Messages.get().getBundle().key( Messages.INIT_OPENCMS_VERSION_1, OpenCms.getSystemInfo().getVersionNumber())); CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_SERVLET_CONTAINER_1, context.getServerInfo())); CmsLog.INIT.info(Messages.get().getBundle().key( Messages.INIT_WEBAPP_NAME_1, getSystemInfo().getWebApplicationName())); CmsLog.INIT.info(Messages.get().getBundle().key( Messages.INIT_SERVLET_PATH_1, getSystemInfo().getServletPath())); CmsLog.INIT.info(Messages.get().getBundle().key( Messages.INIT_OPENCMS_CONTEXT_1, getSystemInfo().getOpenCmsContext())); CmsLog.INIT.info(Messages.get().getBundle().key( Messages.INIT_WEBINF_PATH_1, getSystemInfo().getWebInfRfsPath())); CmsLog.INIT.info(Messages.get().getBundle().key( Messages.INIT_PROPERTY_FILE_1, getSystemInfo().getConfigurationFileRfsPath())); CmsLog.INIT.info(Messages.get().getBundle().key( Messages.INIT_LOG_FILE_1, getSystemInfo().getLogFileRfsPath())); } initConfiguration(configuration); } | /**
* Initialization of the OpenCms runtime environment.<p>
*
* The connection information for the database is read
* from the <code>opencms.properties</code> configuration file and all
* driver manager are initialized via the initializer,
* which usually will be an instance of a <code>OpenCms</code> class.
*
* @param context configuration of OpenCms from <code>web.xml</code>
* @throws CmsInitException in case OpenCms can not be initialized
*/ | Initialization of the OpenCms runtime environment. The connection information for the database is read from the <code>opencms.properties</code> configuration file and all driver manager are initialized via the initializer, which usually will be an instance of a <code>OpenCms</code> class | initContext | {
"repo_name": "serrapos/opencms-core",
"path": "src/org/opencms/main/OpenCmsCore.java",
"license": "lgpl-2.1",
"size": 97900
} | [
"java.util.Date",
"java.util.HashMap",
"javax.servlet.ServletContext",
"org.opencms.configuration.CmsParameterConfiguration",
"org.opencms.gwt.CmsGwtServiceContext"
] | import java.util.Date; import java.util.HashMap; import javax.servlet.ServletContext; import org.opencms.configuration.CmsParameterConfiguration; import org.opencms.gwt.CmsGwtServiceContext; | import java.util.*; import javax.servlet.*; import org.opencms.configuration.*; import org.opencms.gwt.*; | [
"java.util",
"javax.servlet",
"org.opencms.configuration",
"org.opencms.gwt"
] | java.util; javax.servlet; org.opencms.configuration; org.opencms.gwt; | 2,780,672 |
EAttribute getStandardLoopCharacteristics_TestBefore(); | EAttribute getStandardLoopCharacteristics_TestBefore(); | /**
* Returns the meta object for the attribute '{@link org.eclipse.bpmn2.StandardLoopCharacteristics#isTestBefore <em>Test Before</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Test Before</em>'.
* @see org.eclipse.bpmn2.StandardLoopCharacteristics#isTestBefore()
* @see #getStandardLoopCharacteristics()
* @generated
*/ | Returns the meta object for the attribute '<code>org.eclipse.bpmn2.StandardLoopCharacteristics#isTestBefore Test Before</code>'. | getStandardLoopCharacteristics_TestBefore | {
"repo_name": "lqjack/fixflow",
"path": "modules/fixflow-core/src/main/java/org/eclipse/bpmn2/Bpmn2Package.java",
"license": "apache-2.0",
"size": 1014933
} | [
"org.eclipse.emf.ecore.EAttribute"
] | import org.eclipse.emf.ecore.EAttribute; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,408,355 |
protected void addDonePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_StatusNode_done_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_StatusNode_done_feature", "_UI_StatusNode_type"),
MethodPackage.Literals.STATUS_NODE__DONE,
true,
false,
false,
ItemPropertyDescriptor.BOOLEAN_VALUE_IMAGE,
null,
null));
} | void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), MethodPackage.Literals.STATUS_NODE__DONE, true, false, false, ItemPropertyDescriptor.BOOLEAN_VALUE_IMAGE, null, null)); } | /**
* This adds a property descriptor for the Done feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This adds a property descriptor for the Done feature. | addDonePropertyDescriptor | {
"repo_name": "CloudScale-Project/Environment",
"path": "plugins/eu.cloudscaleproject.env.method.editor/src/eu/cloudscaleproject/env/method/common/method/provider/StatusNodeItemProvider.java",
"license": "epl-1.0",
"size": 6836
} | [
"eu.cloudscaleproject.env.method.common.method.MethodPackage",
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory",
"org.eclipse.emf.edit.provider.ItemPropertyDescriptor"
] | import eu.cloudscaleproject.env.method.common.method.MethodPackage; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; | import eu.cloudscaleproject.env.method.common.method.*; import org.eclipse.emf.edit.provider.*; | [
"eu.cloudscaleproject.env",
"org.eclipse.emf"
] | eu.cloudscaleproject.env; org.eclipse.emf; | 1,105,171 |
public static void copyFromArrayToBuffer(ByteBuffer out, byte[] in, int inOffset, int length) {
if (out.hasArray()) {
System.arraycopy(in, inOffset, out.array(), out.arrayOffset() + out.position(), length);
// Move the position in out by length
out.position(out.position() + length);
} else if (UnsafeAccess.isAvailable()) {
UnsafeAccess.copy(in, inOffset, out, out.position(), length);
// Move the position in out by length
out.position(out.position() + length);
} else {
out.put(in, inOffset, length);
}
} | static void function(ByteBuffer out, byte[] in, int inOffset, int length) { if (out.hasArray()) { System.arraycopy(in, inOffset, out.array(), out.arrayOffset() + out.position(), length); out.position(out.position() + length); } else if (UnsafeAccess.isAvailable()) { UnsafeAccess.copy(in, inOffset, out, out.position(), length); out.position(out.position() + length); } else { out.put(in, inOffset, length); } } | /**
* Copies the bytes from given array's offset to length part into the given buffer. Puts the bytes
* to buffer's current position. This also advances the position in the 'out' buffer by 'length'
* @param out
* @param in
* @param inOffset
* @param length
*/ | Copies the bytes from given array's offset to length part into the given buffer. Puts the bytes to buffer's current position. This also advances the position in the 'out' buffer by 'length' | copyFromArrayToBuffer | {
"repo_name": "lshmouse/hbase",
"path": "hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java",
"license": "apache-2.0",
"size": 24392
} | [
"java.nio.ByteBuffer"
] | import java.nio.ByteBuffer; | import java.nio.*; | [
"java.nio"
] | java.nio; | 2,010,377 |
public int read( final byte[] buffer )
throws IOException
{
return read( buffer, 0, buffer.length );
} | int function( final byte[] buffer ) throws IOException { return read( buffer, 0, buffer.length ); } | /**
* Reads bytes from the current tar archive entry. This method simply calls
* read( byte[], int, int ).
*
* @param buffer The buffer into which to place bytes read.
* @return The number of bytes read, or -1 at EOF.
* @exception IOException when an IO error causes operation to fail
*/ | Reads bytes from the current tar archive entry. This method simply calls read( byte[], int, int ) | read | {
"repo_name": "liuyq/bootchart",
"path": "lib/org/apache/commons/compress/tar/TarInputStream.java",
"license": "gpl-2.0",
"size": 13850
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,229,742 |
public static Parser<String> stringParser() {
String alpha = "abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789()#@-_+-*&^%$!~?<>,";
Parser<Character> charParser = ParserBuilder.anyCharOf(alpha);
return Combinator.many(charParser).map(l -> l.foldLeft("", (acc, c) -> acc + String.valueOf(c)));
} | static Parser<String> function() { String alpha = STR; Parser<Character> charParser = ParserBuilder.anyCharOf(alpha); return Combinator.many(charParser).map(l -> l.foldLeft("", (acc, c) -> acc + String.valueOf(c))); } | /**
* Basic parser that will parse any input from the input stream
*
* @return parser that will parse any thing from the input stream
*/ | Basic parser that will parse any input from the input stream | stringParser | {
"repo_name": "tarek-nawara/ParserCombinators",
"path": "src/main/java/edu/parsec/parser/combinators/Combinator.java",
"license": "mit",
"size": 4825
} | [
"edu.parsec.parser.imp.Parser"
] | import edu.parsec.parser.imp.Parser; | import edu.parsec.parser.imp.*; | [
"edu.parsec.parser"
] | edu.parsec.parser; | 1,970,489 |
public void mouseClicked ( MouseEvent evt ) {
InputUnctContrib IUCmatch = null;
boolean match = false;
Iterator it = reducedZL.iterator();
while (it.hasNext() && match == false) {
InputUnctContrib IUCref = ((InputUnctContrib) it.next());
match =
IUCref.getSelectionBar().contains( evt.getX(), evt.getY() ) ? true : false;
if ( match == true ) {
IUCmatch = IUCref;
}
}
// if a bar was clicked
if ( IUCmatch != null ) {
infoPanel.update(//
IUCmatch.toStringCovNames(),
IUCmatch.getTrueUnct() / zoomLayer.getTotalUncertaintyContribs() );
selectedBar =
IUCmatch;
selectedBar.setSelected( true );
repaint();
} else {
// System.out.println("No bar clicked");
}
} | void function ( MouseEvent evt ) { InputUnctContrib IUCmatch = null; boolean match = false; Iterator it = reducedZL.iterator(); while (it.hasNext() && match == false) { InputUnctContrib IUCref = ((InputUnctContrib) it.next()); match = IUCref.getSelectionBar().contains( evt.getX(), evt.getY() ) ? true : false; if ( match == true ) { IUCmatch = IUCref; } } if ( IUCmatch != null ) { infoPanel.update( IUCmatch.getTrueUnct() / zoomLayer.getTotalUncertaintyContribs() ); selectedBar = IUCmatch; selectedBar.setSelected( true ); repaint(); } else { } } | /**
* method mouseClicked checks to see if one of the IUC bars were clicked
*
* @param evt
*/ | method mouseClicked checks to see if one of the IUC bars were clicked | mouseClicked | {
"repo_name": "johnzeringue/ET_Redux",
"path": "src/main/java/org/earthtime/UPb_Redux/fractions/UPbReduxFractions/fractionReduction/UncertaintyGraphPanel.java",
"license": "apache-2.0",
"size": 15653
} | [
"java.awt.event.MouseEvent",
"java.util.Iterator"
] | import java.awt.event.MouseEvent; import java.util.Iterator; | import java.awt.event.*; import java.util.*; | [
"java.awt",
"java.util"
] | java.awt; java.util; | 2,229,613 |
public TableLayoutBuilder cell(JComponent component, String attributes) {
Cell cc = cellInternal(component, attributes);
lastCC = cc;
items.add(cc);
return this;
} | TableLayoutBuilder function(JComponent component, String attributes) { Cell cc = cellInternal(component, attributes); lastCC = cc; items.add(cc); return this; } | /**
* Inserts a component at the current row/column. Attributes may be zero or
* more of rowSpec, columnSpec, colGrId, rowGrId, align and valign.
*/ | Inserts a component at the current row/column. Attributes may be zero or more of rowSpec, columnSpec, colGrId, rowGrId, align and valign | cell | {
"repo_name": "springrichclient/springrcp",
"path": "spring-richclient-core/src/main/java/org/springframework/richclient/layout/TableLayoutBuilder.java",
"license": "apache-2.0",
"size": 27078
} | [
"javax.swing.JComponent"
] | import javax.swing.JComponent; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 679,126 |
protected void quickSearch() {
if ((m_quickSearch != null) && CmsStringUtil.isNotEmptyOrWhitespaceOnly(m_quickSearch.getFormValueAsString())) {
getTabHandler().setSearchQuery(m_quickSearch.getFormValueAsString());
getTabHandler().selectResultTab();
}
} | void function() { if ((m_quickSearch != null) && CmsStringUtil.isNotEmptyOrWhitespaceOnly(m_quickSearch.getFormValueAsString())) { getTabHandler().setSearchQuery(m_quickSearch.getFormValueAsString()); getTabHandler().selectResultTab(); } } | /**
* Sets the search query an selects the result tab.<p>
*/ | Sets the search query an selects the result tab | quickSearch | {
"repo_name": "it-tavis/opencms-core",
"path": "src-gwt/org/opencms/ade/galleries/client/ui/A_CmsListTab.java",
"license": "lgpl-2.1",
"size": 22797
} | [
"org.opencms.util.CmsStringUtil"
] | import org.opencms.util.CmsStringUtil; | import org.opencms.util.*; | [
"org.opencms.util"
] | org.opencms.util; | 2,410,422 |
public Trade getTrade() {
return trade;
} | Trade function() { return trade; } | /**
* Getting payment handling information
*
* @return The payment handling class
*/ | Getting payment handling information | getTrade | {
"repo_name": "drtshock/Essentials",
"path": "Essentials/src/main/java/net/ess3/api/events/UserWarpEvent.java",
"license": "gpl-3.0",
"size": 1469
} | [
"com.earth2me.essentials.Trade"
] | import com.earth2me.essentials.Trade; | import com.earth2me.essentials.*; | [
"com.earth2me.essentials"
] | com.earth2me.essentials; | 2,596,161 |
private void parseBrief(String input, Map<String, String> result)
{
if (input == null)
{
return;
}
input = trimUntilLetter(input);
String authorList = null;
int nextPos = 0;
int et_al_pos = input.indexOf("et al.");
if (et_al_pos >= 0)
{
authorList = input.substring(0, et_al_pos + 6);
nextPos = et_al_pos + 6;
}
else
{
Matcher m = pAuthors.matcher(input);
StringBuilder authors = new StringBuilder();
while (m.find())
{
if (nextPos != m.start()) // assume that author names must be adjacent
break;
if (nextPos > 0)
authors.append(" , ");
authors.append(m.group(1));
nextPos = m.end();
}
authorList = authors.toString();
}
result.put(AUTHOR_LIST, authorList);
int beginTitle = nextPos;
nextPos = nextPos + MINIMAL_TITLE_LENGTH;
if (nextPos > input.length())
nextPos = input.length();
nextPos = skipCharsUntil(input, nextPos, ",.");
int endTitle = nextPos;
nextPos = skipChars(input, nextPos, ", .");
if (Character.isLowerCase(input.charAt(nextPos)) && !input.startsWith("in ", nextPos))
{
endTitle = skipCharsUntil(input, nextPos, ",.");
nextPos = skipChars(input, endTitle, ", .");
}
String title = input.substring(beginTitle, endTitle);
result.put(TITLE, trimUntilLetter(title).trim());
String other = input.substring(nextPos);
result.put(OTHER, trimUntilLetter(other).trim());
}
| void function(String input, Map<String, String> result) { if (input == null) { return; } input = trimUntilLetter(input); String authorList = null; int nextPos = 0; int et_al_pos = input.indexOf(STR); if (et_al_pos >= 0) { authorList = input.substring(0, et_al_pos + 6); nextPos = et_al_pos + 6; } else { Matcher m = pAuthors.matcher(input); StringBuilder authors = new StringBuilder(); while (m.find()) { if (nextPos != m.start()) break; if (nextPos > 0) authors.append(STR); authors.append(m.group(1)); nextPos = m.end(); } authorList = authors.toString(); } result.put(AUTHOR_LIST, authorList); int beginTitle = nextPos; nextPos = nextPos + MINIMAL_TITLE_LENGTH; if (nextPos > input.length()) nextPos = input.length(); nextPos = skipCharsUntil(input, nextPos, ",."); int endTitle = nextPos; nextPos = skipChars(input, nextPos, STR); if (Character.isLowerCase(input.charAt(nextPos)) && !input.startsWith(STR, nextPos)) { endTitle = skipCharsUntil(input, nextPos, ",."); nextPos = skipChars(input, endTitle, STR); } String title = input.substring(beginTitle, endTitle); result.put(TITLE, trimUntilLetter(title).trim()); String other = input.substring(nextPos); result.put(OTHER, trimUntilLetter(other).trim()); } | /**
* Parse ACM references in a non-standard, brief format.
*
* @param input
* @param result
*/ | Parse ACM references in a non-standard, brief format | parseBrief | {
"repo_name": "ecologylab/BigSemanticsJava",
"path": "BigSemanticsCore/src/ecologylab/bigsemantics/metametadata/fieldparsers/FieldParserForAcmReferences.java",
"license": "apache-2.0",
"size": 7228
} | [
"java.util.Map",
"java.util.regex.Matcher"
] | import java.util.Map; import java.util.regex.Matcher; | import java.util.*; import java.util.regex.*; | [
"java.util"
] | java.util; | 264,575 |
public final void cleanTemp() {
try {
File tmp = getTempDir();
if (tmp.exists()) {
File[] files = tmp.listFiles();
for (File f : files) {
if (f.isDirectory()) {
FileUtils.cleanDirectory(f);
FileUtils.deleteDirectory(f);
} else {
if (!f.getName().endsWith(".tsv")) { // avoid cleaning up files from TSVMeasurementCollector
f.delete();
}
}
}
}
} catch (IOException e) {
// don't care
}
} | final void function() { try { File tmp = getTempDir(); if (tmp.exists()) { File[] files = tmp.listFiles(); for (File f : files) { if (f.isDirectory()) { FileUtils.cleanDirectory(f); FileUtils.deleteDirectory(f); } else { if (!f.getName().endsWith(".tsv")) { f.delete(); } } } } } catch (IOException e) { } } | /**
* Cleans the temporary directory.
*/ | Cleans the temporary directory | cleanTemp | {
"repo_name": "SSEHUB/EASyProducer",
"path": "Plugins/Instantiation/de.uni_hildesheim.sse.vil.buildlang.tests/src/test/de/uni_hildesheim/sse/vil/buildlang/AbstractTest.java",
"license": "apache-2.0",
"size": 21827
} | [
"java.io.File",
"java.io.IOException",
"org.apache.commons.io.FileUtils"
] | import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; | import java.io.*; import org.apache.commons.io.*; | [
"java.io",
"org.apache.commons"
] | java.io; org.apache.commons; | 456,855 |
boolean showSurfaceRobustlyLocked() {
try {
if (mSurfaceControl != null) {
mSurfaceShown = true;
mSurfaceControl.show();
if (mWin.mTurnOnScreen) {
if (DEBUG_VISIBILITY) Slog.v(TAG,
"Show surface turning screen on: " + mWin);
mWin.mTurnOnScreen = false;
mAnimator.mBulkUpdateParams |= SET_TURN_ON_SCREEN;
}
}
return true;
} catch (RuntimeException e) {
Slog.w(TAG, "Failure showing surface " + mSurfaceControl + " in " + mWin, e);
}
mService.reclaimSomeSurfaceMemoryLocked(this, "show", true);
return false;
} | boolean showSurfaceRobustlyLocked() { try { if (mSurfaceControl != null) { mSurfaceShown = true; mSurfaceControl.show(); if (mWin.mTurnOnScreen) { if (DEBUG_VISIBILITY) Slog.v(TAG, STR + mWin); mWin.mTurnOnScreen = false; mAnimator.mBulkUpdateParams = SET_TURN_ON_SCREEN; } } return true; } catch (RuntimeException e) { Slog.w(TAG, STR + mSurfaceControl + STR + mWin, e); } mService.reclaimSomeSurfaceMemoryLocked(this, "show", true); return false; } | /**
* Have the surface flinger show a surface, robustly dealing with
* error conditions. In particular, if there is not enough memory
* to show the surface, then we will try to get rid of other surfaces
* in order to succeed.
*
* @return Returns true if the surface was successfully shown.
*/ | Have the surface flinger show a surface, robustly dealing with error conditions. In particular, if there is not enough memory to show the surface, then we will try to get rid of other surfaces in order to succeed | showSurfaceRobustlyLocked | {
"repo_name": "JuudeDemos/android-sdk-20",
"path": "src/com/android/server/wm/WindowStateAnimator.java",
"license": "apache-2.0",
"size": 75276
} | [
"android.util.Slog"
] | import android.util.Slog; | import android.util.*; | [
"android.util"
] | android.util; | 2,469,860 |
public void setTransformerFactoryChooser(TransformerFactoryChooser transformerFactoryChooser) {
ConstraintUtilities.ensureNotNull(transformerFactoryChooser, "transformerFactoryChooser");
this.transformerFactoryChooser = transformerFactoryChooser;
} | void function(TransformerFactoryChooser transformerFactoryChooser) { ConstraintUtilities.ensureNotNull(transformerFactoryChooser, STR); this.transformerFactoryChooser = transformerFactoryChooser; } | /**
* Sets the {@link TransformerFactoryChooser} for this {@link StylesheetManager}, which is
* used to initialise {@link TransformerFactory} instances.
*
* @param transformerFactoryChooser new {@link TransformerFactoryChooser}, which must not be null.
*/ | Sets the <code>TransformerFactoryChooser</code> for this <code>StylesheetManager</code>, which is used to initialise <code>TransformerFactory</code> instances | setTransformerFactoryChooser | {
"repo_name": "ktisha/snuggletex",
"path": "snuggletex-core/src/main/java/uk/ac/ed/ph/snuggletex/utilities/StylesheetManager.java",
"license": "bsd-3-clause",
"size": 15356
} | [
"uk.ac.ed.ph.snuggletex.internal.util.ConstraintUtilities"
] | import uk.ac.ed.ph.snuggletex.internal.util.ConstraintUtilities; | import uk.ac.ed.ph.snuggletex.internal.util.*; | [
"uk.ac.ed"
] | uk.ac.ed; | 1,070,952 |
@Test
public final void testThatSourcePathCanBeRetrieved_LocalFile() {
String expectedPath = getFormattedTestResourcePath("html/pageobjects/_image.png");
assertThat(page.image.getSourcePath(), is(expectedPath));
} | final void function() { String expectedPath = getFormattedTestResourcePath(STR); assertThat(page.image.getSourcePath(), is(expectedPath)); } | /**
* This test - in comparison to similar unit tests - checks if we are using
* the correct attribute to retrieve the image's source path for local
* files.
*/ | This test - in comparison to similar unit tests - checks if we are using the correct attribute to retrieve the image's source path for local files | testThatSourcePathCanBeRetrieved_LocalFile | {
"repo_name": "testIT-WebTester/webtester-core",
"path": "webtester-core/src/test/java/integration/pageobjects/ImageIntegrationTest.java",
"license": "apache-2.0",
"size": 2858
} | [
"org.hamcrest.MatcherAssert",
"org.hamcrest.Matchers"
] | import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; | import org.hamcrest.*; | [
"org.hamcrest"
] | org.hamcrest; | 244,670 |
private static Parcel unmarshall(byte[] bytes, int offset, int length) {
Parcel parcel = Parcel.obtain();
parcel.unmarshall(bytes, offset, length);
parcel.setDataPosition(0); // this is extremely important!
return parcel;
} | static Parcel function(byte[] bytes, int offset, int length) { Parcel parcel = Parcel.obtain(); parcel.unmarshall(bytes, offset, length); parcel.setDataPosition(0); return parcel; } | /**
* Unmarshall a parcel object from a byte array.
* @param bytes
* @return
*/ | Unmarshall a parcel object from a byte array | unmarshall | {
"repo_name": "Yndal/ArduPilot-SensorPlatform",
"path": "Tower_with_3drservices/AidlLib/src/com/o3dr/services/android/lib/util/ParcelableUtils.java",
"license": "mit",
"size": 2168
} | [
"android.os.Parcel"
] | import android.os.Parcel; | import android.os.*; | [
"android.os"
] | android.os; | 54,982 |
public void setCurrentDate(boolean bDisplayOption, int iChangeType)
{
boolean[] rgbEnabled = m_field.setEnableListeners(false);
Calendar calAfter = m_field.getCalendar();
Calendar calBefore = m_field.getCalendar();
m_field.setValue(DateTimeField.currentTime(), bDisplayOption, iChangeType); // File written or updated, set the update date
calAfter = m_field.getCalendar();
if (calBefore != null)
if (calAfter.before(calBefore))
calAfter = calBefore; // If this was set with a different computer (clock), make sure it always increases!
if (calAfter != null)
if (calAfter.equals(calBefore))
{
calAfter.add(Calendar.SECOND, 1); // Can't be the same as last time.
m_field.setCalendar(calAfter, bDisplayOption, iChangeType);
}
Utility.getLogger().info("Set date: " + m_field.toString());
m_field.setEnableListeners(rgbEnabled);
} | void function(boolean bDisplayOption, int iChangeType) { boolean[] rgbEnabled = m_field.setEnableListeners(false); Calendar calAfter = m_field.getCalendar(); Calendar calBefore = m_field.getCalendar(); m_field.setValue(DateTimeField.currentTime(), bDisplayOption, iChangeType); calAfter = m_field.getCalendar(); if (calBefore != null) if (calAfter.before(calBefore)) calAfter = calBefore; if (calAfter != null) if (calAfter.equals(calBefore)) { calAfter.add(Calendar.SECOND, 1); m_field.setCalendar(calAfter, bDisplayOption, iChangeType); } Utility.getLogger().info(STR + m_field.toString()); m_field.setEnableListeners(rgbEnabled); } | /**
* Set the date field to the current time.
* Also make sure the time is not the same as it is currently.
* @param bDisplayOption
* @param iChangeType
*/ | Set the date field to the current time. Also make sure the time is not the same as it is currently | setCurrentDate | {
"repo_name": "jbundle/jbundle",
"path": "base/base/src/main/java/org/jbundle/base/db/event/RecordChangedHandler.java",
"license": "gpl-3.0",
"size": 12384
} | [
"java.util.Calendar",
"org.jbundle.base.field.DateTimeField",
"org.jbundle.base.model.Utility"
] | import java.util.Calendar; import org.jbundle.base.field.DateTimeField; import org.jbundle.base.model.Utility; | import java.util.*; import org.jbundle.base.field.*; import org.jbundle.base.model.*; | [
"java.util",
"org.jbundle.base"
] | java.util; org.jbundle.base; | 2,448,513 |
@OAuthEndpoint(SCHEDULE_SCOPE)
@GET
@Path("person/calendar/evaluations")
public Response calendarEvaluation(@QueryParam("format") String format) {
validateFormat(format);
final Person person = getPerson();
if (!new ActiveStudentsGroup().isMember(person.getUser())) {
return Response.status(Status.OK).header(HttpHeaders.CONTENT_TYPE, JSON_UTF8).entity("{}").build();
}
if ("calendar".equals(format)) {
String evaluationCalendarICal = evaluationCalendarICal(person);
return Response.ok(evaluationCalendarICal, "text/calendar;charset=UTF-8").build();
} else {
return Response.status(Status.OK).header(HttpHeaders.CONTENT_TYPE, JSON_UTF8).entity(evaluationCalendarJson(person))
.build();
}
} | @OAuthEndpoint(SCHEDULE_SCOPE) @Path(STR) Response function(@QueryParam(STR) String format) { validateFormat(format); final Person person = getPerson(); if (!new ActiveStudentsGroup().isMember(person.getUser())) { return Response.status(Status.OK).header(HttpHeaders.CONTENT_TYPE, JSON_UTF8).entity("{}").build(); } if (STR.equals(format)) { String evaluationCalendarICal = evaluationCalendarICal(person); return Response.ok(evaluationCalendarICal, STR).build(); } else { return Response.status(Status.OK).header(HttpHeaders.CONTENT_TYPE, JSON_UTF8).entity(evaluationCalendarJson(person)) .build(); } } | /**
* calendar of all written evaluations (tests and exams). Available for
* students and teachers.
*
* @summary Evaluations calendar
* @param format
* ("calendar" or "json")
* @return If format is "calendar", returns iCal format. If not returns the
* following json.
* @servicetag SCHEDULE_SCOPE
*/ | calendar of all written evaluations (tests and exams). Available for students and teachers | calendarEvaluation | {
"repo_name": "Luis-Cruz/fenixedu-academic-api-example",
"path": "src/main/java/pt/ist/fenixedu/integration/api/FenixAPIv1.java",
"license": "lgpl-3.0",
"size": 69139
} | [
"com.google.common.net.HttpHeaders",
"javax.ws.rs.Path",
"javax.ws.rs.QueryParam",
"javax.ws.rs.core.Response",
"org.fenixedu.academic.domain.Person",
"org.fenixedu.academic.domain.accessControl.ActiveStudentsGroup",
"org.fenixedu.bennu.oauth.annotation.OAuthEndpoint"
] | import com.google.common.net.HttpHeaders; import javax.ws.rs.Path; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Response; import org.fenixedu.academic.domain.Person; import org.fenixedu.academic.domain.accessControl.ActiveStudentsGroup; import org.fenixedu.bennu.oauth.annotation.OAuthEndpoint; | import com.google.common.net.*; import javax.ws.rs.*; import javax.ws.rs.core.*; import org.fenixedu.academic.domain.*; import org.fenixedu.bennu.oauth.annotation.*; | [
"com.google.common",
"javax.ws",
"org.fenixedu.academic",
"org.fenixedu.bennu"
] | com.google.common; javax.ws; org.fenixedu.academic; org.fenixedu.bennu; | 2,354,313 |
private void updateKeyspaceAccess()
{
if (user.get() == null || keyspace.get() == null)
// user is not logged in or keyspace is not set
keyspaceAccess.set(null);
else
// authorize the user for the current keyspace
keyspaceAccess.set(DatabaseDescriptor.getAuthority().authorize(user.get(), keyspace.get()));
} | void function() { if (user.get() == null keyspace.get() == null) keyspaceAccess.set(null); else keyspaceAccess.set(DatabaseDescriptor.getAuthority().authorize(user.get(), keyspace.get())); } | /**
* Called when the keyspace or user have changed.
*/ | Called when the keyspace or user have changed | updateKeyspaceAccess | {
"repo_name": "aguynamedben/cassandra-counters",
"path": "src/java/org/apache/cassandra/service/ClientState.java",
"license": "apache-2.0",
"size": 4366
} | [
"org.apache.cassandra.config.DatabaseDescriptor"
] | import org.apache.cassandra.config.DatabaseDescriptor; | import org.apache.cassandra.config.*; | [
"org.apache.cassandra"
] | org.apache.cassandra; | 2,448,792 |
public int eraseCachedBookCover(String uuid) {
CoversDbHelper db = getCoversDb();
if (db != null)
return db.eraseCachedBookCover(uuid);
else
return 0;
}
private static Calendar mCalendar = null;
private static SimpleDateFormat mMonthNameFormatter = null;
| int function(String uuid) { CoversDbHelper db = getCoversDb(); if (db != null) return db.eraseCachedBookCover(uuid); else return 0; } private static Calendar mCalendar = null; private static SimpleDateFormat mMonthNameFormatter = null; | /**
* Erase contents of covers cache
*/ | Erase contents of covers cache | eraseCachedBookCover | {
"repo_name": "Grunthos/Book-Catalogue",
"path": "src/com/eleybourn/bookcatalogue/utils/Utils.java",
"license": "gpl-3.0",
"size": 70195
} | [
"com.eleybourn.bookcatalogue.database.CoversDbHelper",
"java.text.SimpleDateFormat",
"java.util.Calendar"
] | import com.eleybourn.bookcatalogue.database.CoversDbHelper; import java.text.SimpleDateFormat; import java.util.Calendar; | import com.eleybourn.bookcatalogue.database.*; import java.text.*; import java.util.*; | [
"com.eleybourn.bookcatalogue",
"java.text",
"java.util"
] | com.eleybourn.bookcatalogue; java.text; java.util; | 2,883,158 |
public void write() throws IOException {
corruptPreUpgradeStorage(root);
write(getVersionFile());
} | void function() throws IOException { corruptPreUpgradeStorage(root); write(getVersionFile()); } | /**
* Write version file.
*
* @throws IOException
*/ | Write version file | write | {
"repo_name": "Shmuma/hadoop",
"path": "src/hdfs/org/apache/hadoop/hdfs/server/common/Storage.java",
"license": "apache-2.0",
"size": 29482
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 697,334 |
@Override
public Object prevItemId(Object itemId) {
return ((Container.Ordered) items).prevItemId(itemId);
} | Object function(Object itemId) { return ((Container.Ordered) items).prevItemId(itemId); } | /**
* Gets the ID of the Item preceding the Item that corresponds to the
* itemId.
*
* @see com.vaadin.data.Container.Ordered#prevItemId(java.lang.Object)
*/ | Gets the ID of the Item preceding the Item that corresponds to the itemId | prevItemId | {
"repo_name": "jdahlstrom/vaadin.react",
"path": "server/src/main/java/com/vaadin/ui/Table.java",
"license": "apache-2.0",
"size": 223299
} | [
"com.vaadin.data.Container"
] | import com.vaadin.data.Container; | import com.vaadin.data.*; | [
"com.vaadin.data"
] | com.vaadin.data; | 755,796 |
private void close() throws IOException {
final FileChannel localOutput = this.output;
// close the local file channel
if (localOutput != null) {
localOutput.close();
}
if (!completeFuture.isDone()) {
//noinspection ThrowableInstanceNeverThrown
completeFuture.failure(new IOException("Connection was closed"));
}
} | void function() throws IOException { final FileChannel localOutput = this.output; if (localOutput != null) { localOutput.close(); } if (!completeFuture.isDone()) { completeFuture.failure(new IOException(STR)); } } | /**
* Method closes the local file channel, and if download wasn't completed -
* notify {@link FutureImpl} about download failure.
*
* @throws IOException If failed to close <em>localOutput</em>.
*/ | Method closes the local file channel, and if download wasn't completed - notify <code>FutureImpl</code> about download failure | close | {
"repo_name": "teslarframwork/buntty",
"path": "server/src/main/java/org/egreen/teslar/core/server/filter/ClientDownloadFilter.java",
"license": "apache-2.0",
"size": 6760
} | [
"java.io.IOException",
"java.nio.channels.FileChannel"
] | import java.io.IOException; import java.nio.channels.FileChannel; | import java.io.*; import java.nio.channels.*; | [
"java.io",
"java.nio"
] | java.io; java.nio; | 1,609,249 |
private void regionOffline(final HRegionInfo regionInfo, final State state) {
regionStates.regionOffline(regionInfo, state);
removeClosedRegion(regionInfo);
// remove the region plan as well just in case.
clearRegionPlan(regionInfo);
balancer.regionOffline(regionInfo);
// Tell our listeners that a region was closed
sendRegionClosedNotification(regionInfo);
} | void function(final HRegionInfo regionInfo, final State state) { regionStates.regionOffline(regionInfo, state); removeClosedRegion(regionInfo); clearRegionPlan(regionInfo); balancer.regionOffline(regionInfo); sendRegionClosedNotification(regionInfo); } | /**
* A region is offline. The new state should be the specified one,
* if not null. If the specified state is null, the new state is Offline.
* The specified state can be Split/Merged/Offline/null only.
*/ | A region is offline. The new state should be the specified one, if not null. If the specified state is null, the new state is Offline. The specified state can be Split/Merged/Offline/null only | regionOffline | {
"repo_name": "Jackygq1982/hbase_src",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java",
"license": "apache-2.0",
"size": 167470
} | [
"org.apache.hadoop.hbase.HRegionInfo",
"org.apache.hadoop.hbase.master.RegionState"
] | import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.master.RegionState; | import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.master.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,284,967 |
QueryFilter<SemanticDescriptor> byLocationUrl(String locationUrl); | QueryFilter<SemanticDescriptor> byLocationUrl(String locationUrl); | /**
* Filters the descriptors by their location.
*
* @param locationUrl
* location of semantic descriptor.
* @return produced filter.
*/ | Filters the descriptors by their location | byLocationUrl | {
"repo_name": "psnc-dl/darceo",
"path": "wrdz/wrdz-ru/dao/src/main/java/pl/psnc/synat/wrdz/ru/dao/services/descriptors/SemanticDescriptorFilterFactory.java",
"license": "gpl-3.0",
"size": 3016
} | [
"pl.psnc.synat.wrdz.common.dao.QueryFilter",
"pl.psnc.synat.wrdz.ru.entity.services.descriptors.SemanticDescriptor"
] | import pl.psnc.synat.wrdz.common.dao.QueryFilter; import pl.psnc.synat.wrdz.ru.entity.services.descriptors.SemanticDescriptor; | import pl.psnc.synat.wrdz.common.dao.*; import pl.psnc.synat.wrdz.ru.entity.services.descriptors.*; | [
"pl.psnc.synat"
] | pl.psnc.synat; | 1,292,888 |
@Override
public RDFFormat getRDFFormat() {
if (targetFormat.getRDFFormat() == null) {
return targetFormat.getRDFFormat();
} else {
return TargetFormats.defaultEnum.getRDFFormat();
}
} | RDFFormat function() { if (targetFormat.getRDFFormat() == null) { return targetFormat.getRDFFormat(); } else { return TargetFormats.defaultEnum.getRDFFormat(); } } | /**
* Returns the RDF format for this factory.
*/ | Returns the RDF format for this factory | getRDFFormat | {
"repo_name": "edmcouncil/rdf-serializer",
"path": "src/main/java/org/edmcouncil/rdf_toolkit/SesameSortedRDFWriterFactory.java",
"license": "mit",
"size": 10408
} | [
"org.eclipse.rdf4j.rio.RDFFormat"
] | import org.eclipse.rdf4j.rio.RDFFormat; | import org.eclipse.rdf4j.rio.*; | [
"org.eclipse.rdf4j"
] | org.eclipse.rdf4j; | 2,054,264 |
FoodValues getFoodValues(@Nonnull ItemStack food); | FoodValues getFoodValues(@Nonnull ItemStack food); | /**
* Get player-agnostic food values.
*
* @return The food values, or {@link ItemStack#EMPTY} if none were found.
*/ | Get player-agnostic food values | getFoodValues | {
"repo_name": "squeek502/AppleCore",
"path": "java/squeek/applecore/api/IAppleCoreAccessor.java",
"license": "unlicense",
"size": 3202
} | [
"javax.annotation.Nonnull",
"net.minecraft.item.ItemStack"
] | import javax.annotation.Nonnull; import net.minecraft.item.ItemStack; | import javax.annotation.*; import net.minecraft.item.*; | [
"javax.annotation",
"net.minecraft.item"
] | javax.annotation; net.minecraft.item; | 2,122,875 |
public MasterReport createReport()
{
final Band levelA1 = createBand("A1", Color.magenta, 0, 0, 100, 100);
levelA1.addElement(createBand("A1-B1", Color.blue, 0, 50, 50, 50));
levelA1.addElement(createBand("A1-B2", Color.yellow, 50, 0, 150, 50));
// x=55%, y=5%, width=40%, height=100%
final Band levelA2 = createBand("A2", Color.green, -50, 0, -50, -100);
// x=5%, y=55%, width=40%, height=40%
levelA2.addElement(createBand("A2-B1", Color.red, 0, -50, -50, -50));
// x=55%, y=5%, width=40%, height=40%
levelA2.addElement(createBand("A2-B2", Color.darkGray, -55, -5, -40, -40));
final ReportHeader header = new ReportHeader();
header.setName("Report-Header");
header.getStyle().setStyleProperty(ElementStyleKeys.MIN_WIDTH, new Float(-100));
header.getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, new Float(100));
header.getStyle().setStyleProperty(ElementStyleKeys.MAX_WIDTH, new Float(Short.MAX_VALUE));
header.getStyle().setStyleProperty(ElementStyleKeys.MAX_HEIGHT, new Float(100));
header.getStyle().setStyleProperty(ElementStyleKeys.BACKGROUND_COLOR, Color.ORANGE);
header.addElement(levelA1);
header.addElement(levelA2);
final ContentFieldElementFactory cfef = new ContentFieldElementFactory();
cfef.setFieldname("CreateComponent");
cfef.setMinimumSize(new FloatDimension(400, 400));
cfef.setAbsolutePosition(new Point2D.Float(0, 0));
final ReportFooter footer = new ReportFooter();
footer.addElement(cfef.createElement());
final MasterReport report = new MasterReport();
report.setReportHeader(header);
report.setReportFooter(footer);
report.setName("Band in Band stacking");
report.addExpression(new ComplexComponentExpression("CreateComponent"));
return report;
} | MasterReport function() { final Band levelA1 = createBand("A1", Color.magenta, 0, 0, 100, 100); levelA1.addElement(createBand("A1-B1", Color.blue, 0, 50, 50, 50)); levelA1.addElement(createBand("A1-B2", Color.yellow, 50, 0, 150, 50)); final Band levelA2 = createBand("A2", Color.green, -50, 0, -50, -100); levelA2.addElement(createBand("A2-B1", Color.red, 0, -50, -50, -50)); levelA2.addElement(createBand("A2-B2", Color.darkGray, -55, -5, -40, -40)); final ReportHeader header = new ReportHeader(); header.setName(STR); header.getStyle().setStyleProperty(ElementStyleKeys.MIN_WIDTH, new Float(-100)); header.getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, new Float(100)); header.getStyle().setStyleProperty(ElementStyleKeys.MAX_WIDTH, new Float(Short.MAX_VALUE)); header.getStyle().setStyleProperty(ElementStyleKeys.MAX_HEIGHT, new Float(100)); header.getStyle().setStyleProperty(ElementStyleKeys.BACKGROUND_COLOR, Color.ORANGE); header.addElement(levelA1); header.addElement(levelA2); final ContentFieldElementFactory cfef = new ContentFieldElementFactory(); cfef.setFieldname(STR); cfef.setMinimumSize(new FloatDimension(400, 400)); cfef.setAbsolutePosition(new Point2D.Float(0, 0)); final ReportFooter footer = new ReportFooter(); footer.addElement(cfef.createElement()); final MasterReport report = new MasterReport(); report.setReportHeader(header); report.setReportFooter(footer); report.setName(STR); report.addExpression(new ComplexComponentExpression(STR)); return report; } | /**
* Create a report with a single report header band. This band contains several sub bands.
*
* @return the created report.
*/ | Create a report with a single report header band. This band contains several sub bands | createReport | {
"repo_name": "EgorZhuk/pentaho-reporting",
"path": "engine/demo/src/main/java/org/pentaho/reporting/engine/classic/demo/ancient/demo/layouts/BandInBandStackingDemoHandler.java",
"license": "lgpl-2.1",
"size": 8416
} | [
"java.awt.Color",
"java.awt.geom.Point2D",
"org.pentaho.reporting.engine.classic.core.Band",
"org.pentaho.reporting.engine.classic.core.MasterReport",
"org.pentaho.reporting.engine.classic.core.ReportFooter",
"org.pentaho.reporting.engine.classic.core.ReportHeader",
"org.pentaho.reporting.engine.classic... | import java.awt.Color; import java.awt.geom.Point2D; import org.pentaho.reporting.engine.classic.core.Band; import org.pentaho.reporting.engine.classic.core.MasterReport; import org.pentaho.reporting.engine.classic.core.ReportFooter; import org.pentaho.reporting.engine.classic.core.ReportHeader; import org.pentaho.reporting.engine.classic.core.elementfactory.ContentFieldElementFactory; import org.pentaho.reporting.engine.classic.core.style.ElementStyleKeys; import org.pentaho.reporting.libraries.base.util.FloatDimension; | import java.awt.*; import java.awt.geom.*; import org.pentaho.reporting.engine.classic.core.*; import org.pentaho.reporting.engine.classic.core.elementfactory.*; import org.pentaho.reporting.engine.classic.core.style.*; import org.pentaho.reporting.libraries.base.util.*; | [
"java.awt",
"org.pentaho.reporting"
] | java.awt; org.pentaho.reporting; | 1,313,526 |
@Test
public void testRepairImplicitDirectory()
throws IOException, URISyntaxException {
GoogleHadoopFileSystemBase myghfs = (GoogleHadoopFileSystemBase) ghfs;
GoogleCloudStorageFileSystem gcsfs = myghfs.getGcsFs();
URI seedUri = GoogleCloudStorageFileSystemIntegrationTest.getTempFilePath();
Path parentPath = castAsHadoopPath(seedUri);
URI parentUri = myghfs.getGcsPath(parentPath);
// A subdir path that looks like gs://<bucket>/<generated-tempdir>/foo-subdir where
// neither the subdir nor gs://<bucket>/<generated-tempdir> exist yet.
Path subdirPath = new Path(parentPath, "foo-subdir");
URI subdirUri = myghfs.getGcsPath(subdirPath);
Path leafPath = new Path(subdirPath, "bar-subdir");
URI leafUri = myghfs.getGcsPath(leafPath);
gcsfs.mkdir(leafUri);
Assert.assertTrue("Expected to exist: " + leafUri, gcsfs.exists(leafUri));
Assert.assertFalse("Expected to !exist: " + subdirUri, gcsfs.exists(subdirUri));
Assert.assertFalse("Expected to !exist: " + parentUri, gcsfs.exists(parentUri));
myghfs.listStatus(parentPath);
Assert.assertTrue("Expected to exist: " + leafUri, gcsfs.exists(leafUri));
Assert.assertTrue("Expected to exist: " + subdirUri, gcsfs.exists(subdirUri));
Assert.assertTrue("Expected to exist: " + parentUri, gcsfs.exists(parentUri));
clearBucket(bucketName);
// Reset for globStatus.
gcsfs.mkdir(leafUri);
Assert.assertTrue("Expected to exist: " + leafUri, gcsfs.exists(leafUri));
Assert.assertFalse("Expected to !exist: " + subdirUri, gcsfs.exists(subdirUri));
Assert.assertFalse("Expected to !exist: " + parentUri, gcsfs.exists(parentUri));
myghfs.globStatus(parentPath);
// Globbing the single directory only repairs that top-level directory; it is *not* the same
// as listStatus.
Assert.assertTrue("Expected to exist: " + leafUri, gcsfs.exists(leafUri));
Assert.assertFalse("Expected to !exist: " + subdirUri, gcsfs.exists(subdirUri));
Assert.assertTrue("Expected to exist: " + parentUri, gcsfs.exists(parentUri));
clearBucket(bucketName);
// Reset for globStatus(path/*)
gcsfs.mkdir(leafUri);
Assert.assertTrue("Expected to exist: " + leafUri, gcsfs.exists(leafUri));
Assert.assertFalse("Expected to !exist: " + subdirUri, gcsfs.exists(subdirUri));
Assert.assertFalse("Expected to !exist: " + parentUri, gcsfs.exists(parentUri));
// When globbing children, the parent will only be repaired if flat-globbing is not enabled.
Path globChildrenPath = new Path(parentPath.toString() + "/*");
myghfs.globStatus(globChildrenPath);
boolean expectParentRepair = !myghfs.shouldUseFlatGlob(globChildrenPath);
// This will internally call listStatus, so will have the same behavior of repairing both
// levels of subdirectories.
Assert.assertTrue("Expected to exist: " + leafUri, gcsfs.exists(leafUri));
HadoopVersionInfo versionInfo = new HadoopVersionInfo();
if (versionInfo.isLessThan(2, 0) || versionInfo.isGreaterThan(2, 3)) {
Assert.assertTrue("Expected to exist: " + subdirUri, gcsfs.exists(subdirUri));
if (expectParentRepair) {
Assert.assertTrue("Expected to exist: " + parentUri, gcsfs.exists(parentUri));
} else {
Assert.assertFalse(
"Expected not to exist due to flat globbing: " + parentUri, gcsfs.exists(parentUri));
}
}
clearBucket(bucketName);
// Reset for globStatus(path*)
gcsfs.mkdir(leafUri);
Assert.assertTrue("Expected to exist: " + leafUri, gcsfs.exists(leafUri));
Assert.assertFalse("Expected to !exist: " + subdirUri, gcsfs.exists(subdirUri));
Assert.assertFalse("Expected to !exist: " + parentUri, gcsfs.exists(parentUri));
// Globbing with a wildcard in the parentUri itself also only repairs one level, but for
// a different reason than globbing with no wildcard. Globbing with no wildcard requires
// catching 'null' in globStatus, whereas having the wildcard causes the repair to happen
// when listing parentOf(parentUri).
myghfs.globStatus(new Path(parentPath.toString() + "*"));
Assert.assertTrue("Expected to exist: " + leafUri, gcsfs.exists(leafUri));
Assert.assertFalse("Expected to !exist: " + subdirUri, gcsfs.exists(subdirUri));
if (versionInfo.isLessThan(2, 0) || versionInfo.isGreaterThan(2, 3)) {
Assert.assertTrue("Expected to exist: " + parentUri, gcsfs.exists(parentUri));
}
clearBucket(bucketName);
} | void function() throws IOException, URISyntaxException { GoogleHadoopFileSystemBase myghfs = (GoogleHadoopFileSystemBase) ghfs; GoogleCloudStorageFileSystem gcsfs = myghfs.getGcsFs(); URI seedUri = GoogleCloudStorageFileSystemIntegrationTest.getTempFilePath(); Path parentPath = castAsHadoopPath(seedUri); URI parentUri = myghfs.getGcsPath(parentPath); Path subdirPath = new Path(parentPath, STR); URI subdirUri = myghfs.getGcsPath(subdirPath); Path leafPath = new Path(subdirPath, STR); URI leafUri = myghfs.getGcsPath(leafPath); gcsfs.mkdir(leafUri); Assert.assertTrue(STR + leafUri, gcsfs.exists(leafUri)); Assert.assertFalse(STR + subdirUri, gcsfs.exists(subdirUri)); Assert.assertFalse(STR + parentUri, gcsfs.exists(parentUri)); myghfs.listStatus(parentPath); Assert.assertTrue(STR + leafUri, gcsfs.exists(leafUri)); Assert.assertTrue(STR + subdirUri, gcsfs.exists(subdirUri)); Assert.assertTrue(STR + parentUri, gcsfs.exists(parentUri)); clearBucket(bucketName); gcsfs.mkdir(leafUri); Assert.assertTrue(STR + leafUri, gcsfs.exists(leafUri)); Assert.assertFalse(STR + subdirUri, gcsfs.exists(subdirUri)); Assert.assertFalse(STR + parentUri, gcsfs.exists(parentUri)); myghfs.globStatus(parentPath); Assert.assertTrue(STR + leafUri, gcsfs.exists(leafUri)); Assert.assertFalse(STR + subdirUri, gcsfs.exists(subdirUri)); Assert.assertTrue(STR + parentUri, gcsfs.exists(parentUri)); clearBucket(bucketName); gcsfs.mkdir(leafUri); Assert.assertTrue(STR + leafUri, gcsfs.exists(leafUri)); Assert.assertFalse(STR + subdirUri, gcsfs.exists(subdirUri)); Assert.assertFalse(STR + parentUri, gcsfs.exists(parentUri)); Path globChildrenPath = new Path(parentPath.toString() + "/*"); myghfs.globStatus(globChildrenPath); boolean expectParentRepair = !myghfs.shouldUseFlatGlob(globChildrenPath); Assert.assertTrue(STR + leafUri, gcsfs.exists(leafUri)); HadoopVersionInfo versionInfo = new HadoopVersionInfo(); if (versionInfo.isLessThan(2, 0) versionInfo.isGreaterThan(2, 3)) { Assert.assertTrue(STR + subdirUri, gcsfs.exists(subdirUri)); if (expectParentRepair) { Assert.assertTrue(STR + parentUri, gcsfs.exists(parentUri)); } else { Assert.assertFalse( STR + parentUri, gcsfs.exists(parentUri)); } } clearBucket(bucketName); gcsfs.mkdir(leafUri); Assert.assertTrue(STR + leafUri, gcsfs.exists(leafUri)); Assert.assertFalse(STR + subdirUri, gcsfs.exists(subdirUri)); Assert.assertFalse(STR + parentUri, gcsfs.exists(parentUri)); myghfs.globStatus(new Path(parentPath.toString() + "*")); Assert.assertTrue(STR + leafUri, gcsfs.exists(leafUri)); Assert.assertFalse(STR + subdirUri, gcsfs.exists(subdirUri)); if (versionInfo.isLessThan(2, 0) versionInfo.isGreaterThan(2, 3)) { Assert.assertTrue(STR + parentUri, gcsfs.exists(parentUri)); } clearBucket(bucketName); } | /**
* Makes listStatus and globStatus perform repairs by first creating an object directly without
* creating its parent directory object.
*/ | Makes listStatus and globStatus perform repairs by first creating an object directly without creating its parent directory object | testRepairImplicitDirectory | {
"repo_name": "ravwojdyla/bigdata-interop",
"path": "gcs/src/test/java/com/google/cloud/hadoop/fs/gcs/GoogleHadoopFileSystemTestBase.java",
"license": "apache-2.0",
"size": 23913
} | [
"com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystem",
"com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystemIntegrationTest",
"com.google.cloud.hadoop.util.HadoopVersionInfo",
"java.io.IOException",
"java.net.URISyntaxException",
"org.apache.hadoop.fs.Path",
"org.junit.Assert"
] | import com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystem; import com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystemIntegrationTest; import com.google.cloud.hadoop.util.HadoopVersionInfo; import java.io.IOException; import java.net.URISyntaxException; import org.apache.hadoop.fs.Path; import org.junit.Assert; | import com.google.cloud.hadoop.gcsio.*; import com.google.cloud.hadoop.util.*; import java.io.*; import java.net.*; import org.apache.hadoop.fs.*; import org.junit.*; | [
"com.google.cloud",
"java.io",
"java.net",
"org.apache.hadoop",
"org.junit"
] | com.google.cloud; java.io; java.net; org.apache.hadoop; org.junit; | 977,272 |
@Override
public synchronized boolean clearExpired(final Date date) {
if (date == null) {
return false;
}
boolean removed = false;
for (final Iterator<Cookie> it = cookies.iterator(); it.hasNext();) {
Cookie cookie = it.next();
if (nonExpiringDomains.contains(cookie.getDomain())) {
continue;
}
if (cookie.isExpired(date)) {
it.remove();
removed = true;
}
}
return removed;
} | synchronized boolean function(final Date date) { if (date == null) { return false; } boolean removed = false; for (final Iterator<Cookie> it = cookies.iterator(); it.hasNext();) { Cookie cookie = it.next(); if (nonExpiringDomains.contains(cookie.getDomain())) { continue; } if (cookie.isExpired(date)) { it.remove(); removed = true; } } return removed; } | /**
* Removes all of {@link Cookie cookies} in this HTTP state that have
* expired by the specified {@link java.util.Date date}.
*
* @return true if any cookies were purged.
*
* @see Cookie#isExpired(Date)
*/ | Removes all of <code>Cookie cookies</code> in this HTTP state that have expired by the specified <code>java.util.Date date</code> | clearExpired | {
"repo_name": "mozvip/hclient",
"path": "src/main/java/com/github/mozvip/hclient/cookies/CustomCookieStore.java",
"license": "apache-2.0",
"size": 3185
} | [
"java.util.Date",
"java.util.Iterator",
"org.apache.http.cookie.Cookie"
] | import java.util.Date; import java.util.Iterator; import org.apache.http.cookie.Cookie; | import java.util.*; import org.apache.http.cookie.*; | [
"java.util",
"org.apache.http"
] | java.util; org.apache.http; | 720,854 |
LineSegment line = (LineSegment) obj;
// check first coordinate
Coordinate first = line.p0;
Node n1 = retrieveNode(first);
if (n1 == null) {
n1 = createNode(first);
}
// check second coordinate
Coordinate last = line.p1;
Node n2 = retrieveNode(last);
if (n2 == null) {
n2 = createNode(last);
}
// build the edge setting underlying object to line
Edge e = getGraphBuilder().buildEdge(n1, n2);
getGraphBuilder().addEdge(e);
if (useTolerance()) {
line = alterLine(line, n1, n2);
}
setObject(e, line);
// return the created edge
return (e);
} | LineSegment line = (LineSegment) obj; Coordinate first = line.p0; Node n1 = retrieveNode(first); if (n1 == null) { n1 = createNode(first); } Coordinate last = line.p1; Node n2 = retrieveNode(last); if (n2 == null) { n2 = createNode(last); } Edge e = getGraphBuilder().buildEdge(n1, n2); getGraphBuilder().addEdge(e); if (useTolerance()) { line = alterLine(line, n1, n2); } setObject(e, line); return (e); } | /**
* Adds a line to the graph.
*
* @param obj An instance of LineSegment.
* @return A BasicEdge.
* @see LineSegment
* @see GraphGenerator#add(Object)
*/ | Adds a line to the graph | add | {
"repo_name": "geotools/geotools",
"path": "modules/extension/graph/src/main/java/org/geotools/graph/build/line/BasicLineGraphGenerator.java",
"license": "lgpl-2.1",
"size": 9081
} | [
"org.geotools.graph.structure.Edge",
"org.geotools.graph.structure.Node",
"org.locationtech.jts.geom.Coordinate",
"org.locationtech.jts.geom.LineSegment"
] | import org.geotools.graph.structure.Edge; import org.geotools.graph.structure.Node; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.LineSegment; | import org.geotools.graph.structure.*; import org.locationtech.jts.geom.*; | [
"org.geotools.graph",
"org.locationtech.jts"
] | org.geotools.graph; org.locationtech.jts; | 393,640 |
@WebMethod
@Path("/removeUser")
@Produces("text/plain")
@GET
public String removeUser(
@WebParam(name = "sessionid", partName = "sessionid") @QueryParam("sessionid") String sessionid,
@WebParam(name = "eid", partName = "eid") @QueryParam("eid") String eid) {
Session session = establishSession(sessionid);
try {
UserEdit userEdit = null;
String userid = userDirectoryService.getUserByEid(eid).getId();
userEdit = userDirectoryService.editUser(userid);
userDirectoryService.removeUser(userEdit);
} catch (Exception e) {
LOG.error("WS removeUser(): " + e.getClass().getName() + " : " + e.getMessage());
return e.getClass().getName() + " : " + e.getMessage();
}
return "success";
} | @Path(STR) @Produces(STR) String function( @WebParam(name = STR, partName = STR) @QueryParam(STR) String sessionid, @WebParam(name = "eid", partName = "eid") @QueryParam("eid") String eid) { Session session = establishSession(sessionid); try { UserEdit userEdit = null; String userid = userDirectoryService.getUserByEid(eid).getId(); userEdit = userDirectoryService.editUser(userid); userDirectoryService.removeUser(userEdit); } catch (Exception e) { LOG.error(STR + e.getClass().getName() + STR + e.getMessage()); return e.getClass().getName() + STR + e.getMessage(); } return STR; } | /**
* Remove a user account
*
* @param sessionid the id of a valid session
* @param eid the login username (ie jsmith26) of the user whose account you want to remove
* @return success or exception message
* @throws RuntimeException
*/ | Remove a user account | removeUser | {
"repo_name": "pushyamig/sakai",
"path": "webservices/cxf/src/java/org/sakaiproject/webservices/SakaiScript.java",
"license": "apache-2.0",
"size": 209455
} | [
"javax.jws.WebParam",
"javax.ws.rs.Path",
"javax.ws.rs.Produces",
"javax.ws.rs.QueryParam",
"org.sakaiproject.tool.api.Session",
"org.sakaiproject.user.api.UserEdit"
] | import javax.jws.WebParam; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import org.sakaiproject.tool.api.Session; import org.sakaiproject.user.api.UserEdit; | import javax.jws.*; import javax.ws.rs.*; import org.sakaiproject.tool.api.*; import org.sakaiproject.user.api.*; | [
"javax.jws",
"javax.ws",
"org.sakaiproject.tool",
"org.sakaiproject.user"
] | javax.jws; javax.ws; org.sakaiproject.tool; org.sakaiproject.user; | 1,446,545 |
@SuppressWarnings("resource")
@Override
public void setup(OperatorContext context, OutputMutator outputMutator) throws ExecutionSetupException {
oContext = context;
// Note: DO NOT use managed buffers here. They remain in existence
// until the fragment is shut down. The buffers here are large.
// If we scan 1000 files, and allocate 1 MB for each, we end up
// holding onto 1 GB of memory in managed buffers.
// Instead, we allocate the buffers explicitly, and must free
// them.
// readBuffer = context.getManagedBuffer(READ_BUFFER);
// whitespaceBuffer = context.getManagedBuffer(WHITE_SPACE_BUFFER);
readBuffer = context.getAllocator().buffer(READ_BUFFER);
whitespaceBuffer = context.getAllocator().buffer(WHITE_SPACE_BUFFER);
// setup Output, Input, and Reader
try {
TextOutput output = null;
TextInput input = null;
InputStream stream = null;
// setup Output using OutputMutator
if (settings.isHeaderExtractionEnabled()){
//extract header and use that to setup a set of VarCharVectors
String [] fieldNames = extractHeader();
output = new FieldVarCharOutput(outputMutator, fieldNames, getColumns(), isStarQuery());
} else {
//simply use RepeatedVarCharVector
output = new RepeatedVarCharOutput(outputMutator, getColumns(), isStarQuery());
}
// setup Input using InputStream
logger.trace("Opening file {}", split.getPath());
stream = dfs.openPossiblyCompressedStream(split.getPath());
input = new TextInput(settings, stream, readBuffer, split.getStart(), split.getStart() + split.getLength());
// setup Reader using Input and Output
reader = new TextReader(settings, input, output, whitespaceBuffer);
reader.start();
} catch (SchemaChangeException | IOException e) {
throw new ExecutionSetupException(String.format("Failure while setting up text reader for file %s", split.getPath()), e);
} catch (IllegalArgumentException e) {
throw UserException.dataReadError(e).addContext("File Path", split.getPath().toString()).build(logger);
}
} | @SuppressWarnings(STR) void function(OperatorContext context, OutputMutator outputMutator) throws ExecutionSetupException { oContext = context; readBuffer = context.getAllocator().buffer(READ_BUFFER); whitespaceBuffer = context.getAllocator().buffer(WHITE_SPACE_BUFFER); try { TextOutput output = null; TextInput input = null; InputStream stream = null; if (settings.isHeaderExtractionEnabled()){ String [] fieldNames = extractHeader(); output = new FieldVarCharOutput(outputMutator, fieldNames, getColumns(), isStarQuery()); } else { output = new RepeatedVarCharOutput(outputMutator, getColumns(), isStarQuery()); } logger.trace(STR, split.getPath()); stream = dfs.openPossiblyCompressedStream(split.getPath()); input = new TextInput(settings, stream, readBuffer, split.getStart(), split.getStart() + split.getLength()); reader = new TextReader(settings, input, output, whitespaceBuffer); reader.start(); } catch (SchemaChangeException IOException e) { throw new ExecutionSetupException(String.format(STR, split.getPath()), e); } catch (IllegalArgumentException e) { throw UserException.dataReadError(e).addContext(STR, split.getPath().toString()).build(logger); } } | /**
* Performs the initial setup required for the record reader.
* Initializes the input stream, handling of the output record batch
* and the actual reader to be used.
* @param context operator context from which buffer's will be allocated and managed
* @param outputMutator Used to create the schema in the output record batch
* @throws ExecutionSetupException
*/ | Performs the initial setup required for the record reader. Initializes the input stream, handling of the output record batch and the actual reader to be used | setup | {
"repo_name": "pwong-mapr/incubator-drill",
"path": "exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/CompliantTextRecordReader.java",
"license": "apache-2.0",
"size": 9440
} | [
"java.io.IOException",
"java.io.InputStream",
"org.apache.drill.common.exceptions.ExecutionSetupException",
"org.apache.drill.common.exceptions.UserException",
"org.apache.drill.exec.exception.SchemaChangeException",
"org.apache.drill.exec.ops.OperatorContext",
"org.apache.drill.exec.physical.impl.Outpu... | import java.io.IOException; import java.io.InputStream; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.exec.exception.SchemaChangeException; import org.apache.drill.exec.ops.OperatorContext; import org.apache.drill.exec.physical.impl.OutputMutator; | import java.io.*; import org.apache.drill.common.exceptions.*; import org.apache.drill.exec.exception.*; import org.apache.drill.exec.ops.*; import org.apache.drill.exec.physical.impl.*; | [
"java.io",
"org.apache.drill"
] | java.io; org.apache.drill; | 878,346 |
@SuppressWarnings("unused")
private void createGroundOverlay(final JSONArray args, final CallbackContext callbackContext) throws JSONException {
JSONObject opts = args.getJSONObject(1);
GroundOverlayOptions options = new GroundOverlayOptions();
if (opts.has("anchor")) {
JSONArray anchor = opts.getJSONArray("anchor");
options.anchor((float)anchor.getDouble(0), (float)anchor.getDouble(1));
}
if (opts.has("bearing")) {
options.bearing((float)opts.getDouble("bearing"));
}
if (opts.has("opacity")) {
options.transparency((float)opts.getDouble("opacity"));
}
if (opts.has("zIndex")) {
options.zIndex((float)opts.getDouble("zIndex"));
}
if (opts.has("visible")) {
options.visible(opts.getBoolean("visible"));
}
if (opts.has("bounds") == true) {
JSONArray points = opts.getJSONArray("bounds");
LatLngBounds bounds = PluginUtil.JSONArray2LatLngBounds(points);
options.positionFromBounds(bounds);
}
// Load a dummy image
options.image(this.dummyImg);
GroundOverlay groundOverlay = this.map.addGroundOverlay(options);
// Load image
String url = opts.getString("url");
if (url != null && url.length() > 0) {
if (url.indexOf("http") == 0) {
AsyncLoadImage task = new AsyncLoadImage(groundOverlay, "setImage");
task.execute(url);
} else {
groundOverlay.setImage(BitmapDescriptorFactory.fromAsset(url));
}
}
String id = "ground_" + groundOverlay.getId();
this.objects.put(id, groundOverlay);
JSONObject result = new JSONObject();
result.put("hashCode", groundOverlay.hashCode());
result.put("id", id);
callbackContext.success(result);
} | @SuppressWarnings(STR) void function(final JSONArray args, final CallbackContext callbackContext) throws JSONException { JSONObject opts = args.getJSONObject(1); GroundOverlayOptions options = new GroundOverlayOptions(); if (opts.has(STR)) { JSONArray anchor = opts.getJSONArray(STR); options.anchor((float)anchor.getDouble(0), (float)anchor.getDouble(1)); } if (opts.has(STR)) { options.bearing((float)opts.getDouble(STR)); } if (opts.has(STR)) { options.transparency((float)opts.getDouble(STR)); } if (opts.has(STR)) { options.zIndex((float)opts.getDouble(STR)); } if (opts.has(STR)) { options.visible(opts.getBoolean(STR)); } if (opts.has(STR) == true) { JSONArray points = opts.getJSONArray(STR); LatLngBounds bounds = PluginUtil.JSONArray2LatLngBounds(points); options.positionFromBounds(bounds); } options.image(this.dummyImg); GroundOverlay groundOverlay = this.map.addGroundOverlay(options); String url = opts.getString("url"); if (url != null && url.length() > 0) { if (url.indexOf("http") == 0) { AsyncLoadImage task = new AsyncLoadImage(groundOverlay, STR); task.execute(url); } else { groundOverlay.setImage(BitmapDescriptorFactory.fromAsset(url)); } } String id = STR + groundOverlay.getId(); this.objects.put(id, groundOverlay); JSONObject result = new JSONObject(); result.put(STR, groundOverlay.hashCode()); result.put("id", id); callbackContext.success(result); } | /**
* Create ground overlay
*
* @param args
* @param callbackContext
* @throws JSONException
*/ | Create ground overlay | createGroundOverlay | {
"repo_name": "smithimage/phonegap-googlemaps-plugin",
"path": "src/android/plugin/google/maps/PluginGroundOverlay.java",
"license": "apache-2.0",
"size": 3439
} | [
"com.google.android.gms.maps.model.BitmapDescriptorFactory",
"com.google.android.gms.maps.model.GroundOverlay",
"com.google.android.gms.maps.model.GroundOverlayOptions",
"com.google.android.gms.maps.model.LatLngBounds",
"org.apache.cordova.CallbackContext",
"org.json.JSONArray",
"org.json.JSONException"... | import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.GroundOverlay; import com.google.android.gms.maps.model.GroundOverlayOptions; import com.google.android.gms.maps.model.LatLngBounds; import org.apache.cordova.CallbackContext; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; | import com.google.android.gms.maps.model.*; import org.apache.cordova.*; import org.json.*; | [
"com.google.android",
"org.apache.cordova",
"org.json"
] | com.google.android; org.apache.cordova; org.json; | 112,760 |
public String getSharedStringProperty(String propertyName) {
final EMFStoreProperty property = sharedProperties.get(propertyName);
if (property == null || property.getValue() == null) {
return null;
}
return ((PropertyStringValue) property.getValue()).getValue();
} | String function(String propertyName) { final EMFStoreProperty property = sharedProperties.get(propertyName); if (property == null property.getValue() == null) { return null; } return ((PropertyStringValue) property.getValue()).getValue(); } | /**
* Retrieves a shared string property.
*
* @param propertyName
* of the shared property as String
* @return the string value if it exists, otherwise <code>null</code>
**/ | Retrieves a shared string property | getSharedStringProperty | {
"repo_name": "edgarmueller/emfstore-rest",
"path": "bundles/org.eclipse.emf.emfstore.client/src/org/eclipse/emf/emfstore/internal/client/properties/PropertyManager.java",
"license": "epl-1.0",
"size": 14870
} | [
"org.eclipse.emf.emfstore.internal.common.model.EMFStoreProperty",
"org.eclipse.emf.emfstore.internal.common.model.PropertyStringValue"
] | import org.eclipse.emf.emfstore.internal.common.model.EMFStoreProperty; import org.eclipse.emf.emfstore.internal.common.model.PropertyStringValue; | import org.eclipse.emf.emfstore.internal.common.model.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,083,278 |
public DrawerBuilder withHeader(@LayoutRes int headerViewRes) {
if (mActivity == null) {
throw new RuntimeException("please pass an activity first to use this call");
}
if (headerViewRes != -1) {
//i know there should be a root, bit i got none here
this.mHeaderView = mActivity.getLayoutInflater().inflate(headerViewRes, null, false);
}
return this;
} | DrawerBuilder function(@LayoutRes int headerViewRes) { if (mActivity == null) { throw new RuntimeException(STR); } if (headerViewRes != -1) { this.mHeaderView = mActivity.getLayoutInflater().inflate(headerViewRes, null, false); } return this; } | /**
* Add a header to the DrawerBuilder ListView defined by a resource.
*
* @param headerViewRes
* @return
*/ | Add a header to the DrawerBuilder ListView defined by a resource | withHeader | {
"repo_name": "democedes/MaterialDrawer",
"path": "library/src/main/java/com/mikepenz/materialdrawer/DrawerBuilder.java",
"license": "apache-2.0",
"size": 61635
} | [
"android.support.annotation.LayoutRes"
] | import android.support.annotation.LayoutRes; | import android.support.annotation.*; | [
"android.support"
] | android.support; | 875,783 |
public void offerService() throws InterruptedException, IOException {
taskScheduler.start();
// refresh the node list as the recovery manager might have added
// disallowed trackers
refreshHosts();
this.expireTrackersThread = new Thread(this.expireTrackers,
"expireTrackers");
this.expireTrackersThread.setDaemon(true);
this.expireTrackersThread.start();
this.retireJobsThread = new Thread(this.retireJobs, "retireJobs");
this.retireJobsThread.setDaemon(true);
this.retireJobsThread.start();
expireLaunchingTaskThread.setDaemon(true);
expireLaunchingTaskThread.start();
speculationUpdaterThread.setDaemon(true);
speculationUpdaterThread.start();
if (completedJobStatusStore.isActive()) {
completedJobsStoreThread = new Thread(completedJobStatusStore,
"completedjobsStore-housekeeper");
completedJobsStoreThread.start();
}
// start the inter-tracker server once the jt is ready
this.interTrackerServer.start();
synchronized (this) {
state = State.RUNNING;
}
LOG.info("Starting RUNNING");
this.interTrackerServer.join();
LOG.info("Stopped interTrackerServer");
} | void function() throws InterruptedException, IOException { taskScheduler.start(); refreshHosts(); this.expireTrackersThread = new Thread(this.expireTrackers, STR); this.expireTrackersThread.setDaemon(true); this.expireTrackersThread.start(); this.retireJobsThread = new Thread(this.retireJobs, STR); this.retireJobsThread.setDaemon(true); this.retireJobsThread.start(); expireLaunchingTaskThread.setDaemon(true); expireLaunchingTaskThread.start(); speculationUpdaterThread.setDaemon(true); speculationUpdaterThread.start(); if (completedJobStatusStore.isActive()) { completedJobsStoreThread = new Thread(completedJobStatusStore, STR); completedJobsStoreThread.start(); } this.interTrackerServer.start(); synchronized (this) { state = State.RUNNING; } LOG.info(STR); this.interTrackerServer.join(); LOG.info(STR); } | /**
* Run forever
*/ | Run forever | offerService | {
"repo_name": "jchen123/hadoop-20-warehouse-fix",
"path": "src/mapred/org/apache/hadoop/mapred/JobTracker.java",
"license": "apache-2.0",
"size": 153244
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,613,915 |
protected String getForLabelText(String forElementId) {
return findElement(By.cssSelector("label[for=" + forElementId + "]")).getText();
} | String function(String forElementId) { return findElement(By.cssSelector(STR + forElementId + "]")).getText(); } | /**
* Returns the label text of a label-for element
* <p>
* For usage with elements like this: <label for="some-element-id">The text of the Label</label>
* </p>
*
* @param forElementId the id of the element for which to find the label text
* @return label text
*/ | Returns the label text of a label-for element For usage with elements like this: The text of the Label | getForLabelText | {
"repo_name": "rojlarge/rice-kc",
"path": "rice-tools-test/src/main/java/org/kuali/rice/testtools/selenium/WebDriverAftBase.java",
"license": "apache-2.0",
"size": 75494
} | [
"org.openqa.selenium.By"
] | import org.openqa.selenium.By; | import org.openqa.selenium.*; | [
"org.openqa.selenium"
] | org.openqa.selenium; | 1,478,324 |
NetworkInfo info = NetworkUtil.getNetworkInfo(context);
if (info == null || !info.isConnected()) return false;
switch (info.getType()) {
case ConnectivityManager.TYPE_WIFI:
case ConnectivityManager.TYPE_BLUETOOTH:
case ConnectivityManager.TYPE_ETHERNET:
return true;
default:
return !MainApplication.getEnvironment(context).getUserPrefs().isDownloadOverWifiOnly() ||
NetworkUtil.isOnZeroRatedNetwork(context, config);
}
} | NetworkInfo info = NetworkUtil.getNetworkInfo(context); if (info == null !info.isConnected()) return false; switch (info.getType()) { case ConnectivityManager.TYPE_WIFI: case ConnectivityManager.TYPE_BLUETOOTH: case ConnectivityManager.TYPE_ETHERNET: return true; default: return !MainApplication.getEnvironment(context).getUserPrefs().isDownloadOverWifiOnly() NetworkUtil.isOnZeroRatedNetwork(context, config); } } | /**
* Returns true if media can be streamed on the active network
* without requiring user consent.
*/ | Returns true if media can be streamed on the active network without requiring user consent | canStreamMedia | {
"repo_name": "edx/edx-app-android",
"path": "OpenEdXMobile/src/main/java/org/edx/mobile/util/MediaConsentUtils.java",
"license": "apache-2.0",
"size": 3499
} | [
"android.net.ConnectivityManager",
"android.net.NetworkInfo",
"org.edx.mobile.base.MainApplication"
] | import android.net.ConnectivityManager; import android.net.NetworkInfo; import org.edx.mobile.base.MainApplication; | import android.net.*; import org.edx.mobile.base.*; | [
"android.net",
"org.edx.mobile"
] | android.net; org.edx.mobile; | 2,125,694 |
public void setOnPageChangeListener(ViewPager.OnPageChangeListener listener) {
mViewPagerPageChangeListener = listener;
} | void function(ViewPager.OnPageChangeListener listener) { mViewPagerPageChangeListener = listener; } | /**
* Set the {@link ViewPager.OnPageChangeListener}. When using {@link SlidingTabLayout} you are
* required to set any {@link ViewPager.OnPageChangeListener} through this method. This is so
* that the layout can update it's scroll position correctly.
*
* @see ViewPager#setOnPageChangeListener(ViewPager.OnPageChangeListener)
*/ | Set the <code>ViewPager.OnPageChangeListener</code>. When using <code>SlidingTabLayout</code> you are required to set any <code>ViewPager.OnPageChangeListener</code> through this method. This is so that the layout can update it's scroll position correctly | setOnPageChangeListener | {
"repo_name": "shivamsriva31093/MovieFinder",
"path": "app/src/main/java/task/application/com/colette/ui/utility/SlidingTabLayout.java",
"license": "apache-2.0",
"size": 11713
} | [
"android.support.v4.view.ViewPager"
] | import android.support.v4.view.ViewPager; | import android.support.v4.view.*; | [
"android.support"
] | android.support; | 2,466,470 |
public AttributeDefRefImpl makeAttributeDefRef(String name, Object value, Location location) {
AttributeDefRefImpl.Builder atBuilder = new AttributeDefRefImpl.Builder();
atBuilder.setDescriptor(DefDescriptorImpl.getInstance(name == null ? defaultAttributeName : name,
AttributeDef.class));
atBuilder.setLocation((location == null) ? getLocation() : location);
atBuilder.setValue((value == null) ? defaultAttributeValue : value);
return atBuilder.build();
} | AttributeDefRefImpl function(String name, Object value, Location location) { AttributeDefRefImpl.Builder atBuilder = new AttributeDefRefImpl.Builder(); atBuilder.setDescriptor(DefDescriptorImpl.getInstance(name == null ? defaultAttributeName : name, AttributeDef.class)); atBuilder.setLocation((location == null) ? getLocation() : location); atBuilder.setValue((value == null) ? defaultAttributeValue : value); return atBuilder.build(); } | /**
* A null parameter indicates you don't care what the value is, and thus it
* replaces the parameter with a default object. If you want null values for
* the parameter, you have to call the objects constructor directly.
*/ | A null parameter indicates you don't care what the value is, and thus it replaces the parameter with a default object. If you want null values for the parameter, you have to call the objects constructor directly | makeAttributeDefRef | {
"repo_name": "badlogicmanpreet/aura",
"path": "aura-impl/src/test/java/org/auraframework/impl/test/util/AuraImplUnitTestingUtil.java",
"license": "apache-2.0",
"size": 39548
} | [
"org.auraframework.def.AttributeDef",
"org.auraframework.impl.root.AttributeDefRefImpl",
"org.auraframework.impl.root.component.BaseComponentDefImpl",
"org.auraframework.impl.system.DefDescriptorImpl",
"org.auraframework.system.Location"
] | import org.auraframework.def.AttributeDef; import org.auraframework.impl.root.AttributeDefRefImpl; import org.auraframework.impl.root.component.BaseComponentDefImpl; import org.auraframework.impl.system.DefDescriptorImpl; import org.auraframework.system.Location; | import org.auraframework.def.*; import org.auraframework.impl.root.*; import org.auraframework.impl.root.component.*; import org.auraframework.impl.system.*; import org.auraframework.system.*; | [
"org.auraframework.def",
"org.auraframework.impl",
"org.auraframework.system"
] | org.auraframework.def; org.auraframework.impl; org.auraframework.system; | 2,275,752 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.