method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public Rectangle2D getSensitiveBounds() {
if (source == null)
return null;
return source.getSensitiveBounds();
}
|
Rectangle2D function() { if (source == null) return null; return source.getSensitiveBounds(); }
|
/**
* Returns the bounds of the sensitive area covered by this node,
* This includes the stroked area but does not include the effects
* of clipping, masking or filtering.
*/
|
Returns the bounds of the sensitive area covered by this node, This includes the stroked area but does not include the effects of clipping, masking or filtering
|
getSensitiveBounds
|
{
"repo_name": "sflyphotobooks/crp-batik",
"path": "sources/org/apache/batik/gvt/ProxyGraphicsNode.java",
"license": "apache-2.0",
"size": 4832
}
|
[
"java.awt.geom.Rectangle2D"
] |
import java.awt.geom.Rectangle2D;
|
import java.awt.geom.*;
|
[
"java.awt"
] |
java.awt;
| 582,314
|
private void warnAboutNamespaceRedefinition(Name nameObj, Ref ref) {
compiler.report(
JSError.make(ref.getSourceName(), ref.node,
NAMESPACE_REDEFINED_WARNING, nameObj.getFullName()));
}
/**
* Flattens all references to collapsible properties of a global name except
* their initial definitions. Recurses on subnames.
*
* @param n An object representing a global name
* @param alias The flattened name for {@code n}
|
void function(Name nameObj, Ref ref) { compiler.report( JSError.make(ref.getSourceName(), ref.node, NAMESPACE_REDEFINED_WARNING, nameObj.getFullName())); } /** * Flattens all references to collapsible properties of a global name except * their initial definitions. Recurses on subnames. * * @param n An object representing a global name * @param alias The flattened name for {@code n}
|
/**
* Reports a warning because a namespace was redefined.
*
* @param nameObj A namespace that is being redefined
* @param ref The reference that set the namespace
*/
|
Reports a warning because a namespace was redefined
|
warnAboutNamespaceRedefinition
|
{
"repo_name": "bramstein/closure-compiler-inline",
"path": "src/com/google/javascript/jscomp/CollapseProperties.java",
"license": "apache-2.0",
"size": 34769
}
|
[
"com.google.javascript.jscomp.GlobalNamespace"
] |
import com.google.javascript.jscomp.GlobalNamespace;
|
import com.google.javascript.jscomp.*;
|
[
"com.google.javascript"
] |
com.google.javascript;
| 1,740,063
|
@Test
public void testModelCollectionCopy() throws Exception {
Logger.getLogger(getClass()).debug("TEST " + name.getMethodName());
DescriptorRelationship rel = new DescriptorRelationshipJpa();
ProxyTester tester = new ProxyTester(rel);
tester.proxy(Map.class, 1, map1);
rel = (DescriptorRelationship) tester.createObject(1);
ProxyTester tester2 = new ProxyTester(new DescriptorJpa());
tester.proxy(Map.class, 1, map1);
tester.proxy(Map.class, 2, map2);
Descriptor fromDescriptor = (Descriptor) tester2.createObject(1);
Descriptor toDescriptor = (Descriptor) tester2.createObject(2);
ProxyTester tester3 = new ProxyTester(new AttributeJpa());
Attribute att = (Attribute) tester3.createObject(1);
rel.setFrom(fromDescriptor);
rel.setTo(toDescriptor);
rel.getAttributes().add(att);
DescriptorRelationship rel2 = new DescriptorRelationshipJpa(rel, false);
assertEquals(0, rel2.getAttributes().size());
DescriptorRelationship rel3 = new DescriptorRelationshipJpa(rel, true);
assertEquals(1, rel3.getAttributes().size());
assertEquals(att, rel3.getAttributes().iterator().next());
}
|
void function() throws Exception { Logger.getLogger(getClass()).debug(STR + name.getMethodName()); DescriptorRelationship rel = new DescriptorRelationshipJpa(); ProxyTester tester = new ProxyTester(rel); tester.proxy(Map.class, 1, map1); rel = (DescriptorRelationship) tester.createObject(1); ProxyTester tester2 = new ProxyTester(new DescriptorJpa()); tester.proxy(Map.class, 1, map1); tester.proxy(Map.class, 2, map2); Descriptor fromDescriptor = (Descriptor) tester2.createObject(1); Descriptor toDescriptor = (Descriptor) tester2.createObject(2); ProxyTester tester3 = new ProxyTester(new AttributeJpa()); Attribute att = (Attribute) tester3.createObject(1); rel.setFrom(fromDescriptor); rel.setTo(toDescriptor); rel.getAttributes().add(att); DescriptorRelationship rel2 = new DescriptorRelationshipJpa(rel, false); assertEquals(0, rel2.getAttributes().size()); DescriptorRelationship rel3 = new DescriptorRelationshipJpa(rel, true); assertEquals(1, rel3.getAttributes().size()); assertEquals(att, rel3.getAttributes().iterator().next()); }
|
/**
* Test deep copy constructor.
*
* @throws Exception the exception
*/
|
Test deep copy constructor
|
testModelCollectionCopy
|
{
"repo_name": "WestCoastInformatics/UMLS-Terminology-Server",
"path": "jpa-model/src/test/java/com/wci/umls/server/jpa/test/content/DescriptorRelationshipJpaUnitTest.java",
"license": "apache-2.0",
"size": 8805
}
|
[
"com.wci.umls.server.helpers.ProxyTester",
"com.wci.umls.server.jpa.content.AttributeJpa",
"com.wci.umls.server.jpa.content.DescriptorJpa",
"com.wci.umls.server.jpa.content.DescriptorRelationshipJpa",
"com.wci.umls.server.model.content.Attribute",
"com.wci.umls.server.model.content.Descriptor",
"com.wci.umls.server.model.content.DescriptorRelationship",
"java.util.Map",
"org.apache.log4j.Logger",
"org.junit.Assert"
] |
import com.wci.umls.server.helpers.ProxyTester; import com.wci.umls.server.jpa.content.AttributeJpa; import com.wci.umls.server.jpa.content.DescriptorJpa; import com.wci.umls.server.jpa.content.DescriptorRelationshipJpa; import com.wci.umls.server.model.content.Attribute; import com.wci.umls.server.model.content.Descriptor; import com.wci.umls.server.model.content.DescriptorRelationship; import java.util.Map; import org.apache.log4j.Logger; import org.junit.Assert;
|
import com.wci.umls.server.helpers.*; import com.wci.umls.server.jpa.content.*; import com.wci.umls.server.model.content.*; import java.util.*; import org.apache.log4j.*; import org.junit.*;
|
[
"com.wci.umls",
"java.util",
"org.apache.log4j",
"org.junit"
] |
com.wci.umls; java.util; org.apache.log4j; org.junit;
| 1,976,556
|
public void setBasedir(final File dir)
{
this.baseDir = dir;
}
/**
* {@inheritDoc}
|
void function(final File dir) { this.baseDir = dir; } /** * {@inheritDoc}
|
/**
* Sets the base directory of the unit test build.
*
* @param dir the base directory of the unit test build.
*/
|
Sets the base directory of the unit test build
|
setBasedir
|
{
"repo_name": "pombredanne/buildmetadata-maven-plugin",
"path": "src/test/java/com/redhat/rcm/maven/plugin/buildmetadata/stub/BuildMetaDataProjectStub.java",
"license": "apache-2.0",
"size": 3648
}
|
[
"java.io.File"
] |
import java.io.File;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,137,691
|
public void add(Action a, String text, Icon icon) {
add(new JButton(a), text, icon);
}
|
void function(Action a, String text, Icon icon) { add(new JButton(a), text, icon); }
|
/**
* Add an Action that will be added as a button to the toolbar. If the icon
* is not null it will be placed on the button.
*
* @param a
* The Action which to add to the ToolBar
* @param text
* The text to set as the button's label.
* If text is null, ToolBar button will get name from <code>a</code>
* @param icon
* The icon that will be placed on the button. If this is null
* then the icon defined in the action will be used.
*/
|
Add an Action that will be added as a button to the toolbar. If the icon is not null it will be placed on the button
|
add
|
{
"repo_name": "iyerdude/wabit",
"path": "src/main/java/ca/sqlpower/wabit/swingui/WabitToolBarBuilder.java",
"license": "gpl-3.0",
"size": 14770
}
|
[
"javax.swing.Action",
"javax.swing.Icon",
"javax.swing.JButton"
] |
import javax.swing.Action; import javax.swing.Icon; import javax.swing.JButton;
|
import javax.swing.*;
|
[
"javax.swing"
] |
javax.swing;
| 2,137,627
|
@Test
public void testSuccessfulRequestContextWithThreadIsolatedSynchronousObservable() {
RequestContextTestResults results = testRequestContextOnSuccess(ExecutionIsolationStrategy.THREAD, Schedulers.immediate());
assertTrue(results.isContextInitialized.get());
assertTrue(results.originThread.get().getName().startsWith("hystrix-OWNER_ONE")); // thread isolated on a HystrixThreadPool
assertTrue(results.isContextInitializedObserveOn.get());
assertTrue(results.observeOnThread.get().getName().startsWith("hystrix-OWNER_ONE"));
// thread isolated
assertTrue(results.command.isExecutedInThread());
}
|
void function() { RequestContextTestResults results = testRequestContextOnSuccess(ExecutionIsolationStrategy.THREAD, Schedulers.immediate()); assertTrue(results.isContextInitialized.get()); assertTrue(results.originThread.get().getName().startsWith(STR)); assertTrue(results.isContextInitializedObserveOn.get()); assertTrue(results.observeOnThread.get().getName().startsWith(STR)); assertTrue(results.command.isExecutedInThread()); }
|
/**
* Synchronous Observable and thread isolation. Work done on [hystrix-OWNER_ONE] thread and then observed on [RxComputation]
*/
|
Synchronous Observable and thread isolation. Work done on [hystrix-OWNER_ONE] thread and then observed on [RxComputation]
|
testSuccessfulRequestContextWithThreadIsolatedSynchronousObservable
|
{
"repo_name": "sasrin/Hystrix",
"path": "hystrix-core/src/test/java/com/netflix/hystrix/HystrixObservableCommandTest.java",
"license": "apache-2.0",
"size": 272384
}
|
[
"com.netflix.hystrix.HystrixCommandProperties",
"org.junit.Assert"
] |
import com.netflix.hystrix.HystrixCommandProperties; import org.junit.Assert;
|
import com.netflix.hystrix.*; import org.junit.*;
|
[
"com.netflix.hystrix",
"org.junit"
] |
com.netflix.hystrix; org.junit;
| 2,069,791
|
public void clearSnapshot(String tag, String... keyspaces) throws IOException
{
ssProxy.clearSnapshot(tag, keyspaces);
}
|
void function(String tag, String... keyspaces) throws IOException { ssProxy.clearSnapshot(tag, keyspaces); }
|
/**
* Remove all the existing snapshots.
*/
|
Remove all the existing snapshots
|
clearSnapshot
|
{
"repo_name": "ollie314/cassandra",
"path": "src/java/org/apache/cassandra/tools/NodeProbe.java",
"license": "apache-2.0",
"size": 55695
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 81,474
|
protected Node getNodeByFragment(String frag, Element ref) {
return ref.getOwnerDocument().getElementById(frag);
}
|
Node function(String frag, Element ref) { return ref.getOwnerDocument().getElementById(frag); }
|
/**
* Returns the node referenced by the given fragment identifier.
* This is called when the whole URI just contains a fragment identifier
* and there is no XML Base URI in effect.
* @param frag the URI fragment
* @param ref the context element from which to resolve the URI fragment
*/
|
Returns the node referenced by the given fragment identifier. This is called when the whole URI just contains a fragment identifier and there is no XML Base URI in effect
|
getNodeByFragment
|
{
"repo_name": "Uni-Sol/batik",
"path": "sources/org/apache/batik/bridge/URIResolver.java",
"license": "apache-2.0",
"size": 5070
}
|
[
"org.w3c.dom.Element",
"org.w3c.dom.Node"
] |
import org.w3c.dom.Element; import org.w3c.dom.Node;
|
import org.w3c.dom.*;
|
[
"org.w3c.dom"
] |
org.w3c.dom;
| 678,557
|
private void submitForm() {
if (!validateWorkTimePeriod()) {
Toast.makeText(thisContext.getApplicationContext(), "Invalid workTimePeriod valid (8:30)", Toast.LENGTH_SHORT).show();
return;
}
if (!validateLongitude()) {
Toast.makeText(thisContext.getApplicationContext(), "Invalid Longitude valid (8.32450)", Toast.LENGTH_SHORT).show();
return;
}
if (!validateLatitude()) {
Toast.makeText(thisContext.getApplicationContext(), "Invalid Latitude valid (8.32450)", Toast.LENGTH_SHORT).show();
return;
}
Toast.makeText(thisContext.getApplicationContext(), "Everything is valid and saved", Toast.LENGTH_SHORT).show();
}
|
void function() { if (!validateWorkTimePeriod()) { Toast.makeText(thisContext.getApplicationContext(), STR, Toast.LENGTH_SHORT).show(); return; } if (!validateLongitude()) { Toast.makeText(thisContext.getApplicationContext(), STR, Toast.LENGTH_SHORT).show(); return; } if (!validateLatitude()) { Toast.makeText(thisContext.getApplicationContext(), STR, Toast.LENGTH_SHORT).show(); return; } Toast.makeText(thisContext.getApplicationContext(), STR, Toast.LENGTH_SHORT).show(); }
|
/**
* Validating form
*/
|
Validating form
|
submitForm
|
{
"repo_name": "AdamLuptak/Time_MNA",
"path": "Workingtimemanager/app/src/main/java/com/adam/sk/workingtimemanager/Setup.java",
"license": "mit",
"size": 13990
}
|
[
"android.widget.Toast"
] |
import android.widget.Toast;
|
import android.widget.*;
|
[
"android.widget"
] |
android.widget;
| 2,461,494
|
@Test
public void listNoteOccurrencesTest() throws ApiException {
String name = null;
String filter = null;
Integer pageSize = null;
String pageToken = null;
ApiListNoteOccurrencesResponse response = api.listNoteOccurrences(name, filter, pageSize, pageToken);
// TODO: test validations
}
|
void function() throws ApiException { String name = null; String filter = null; Integer pageSize = null; String pageToken = null; ApiListNoteOccurrencesResponse response = api.listNoteOccurrences(name, filter, pageSize, pageToken); }
|
/**
* Lists `Occurrences` referencing the specified `Note`. Use this method to get all occurrences referencing your `Note` across all your customer projects.
*
*
*
* @throws ApiException
* if the Api call fails
*/
|
Lists `Occurrences` referencing the specified `Note`. Use this method to get all occurrences referencing your `Note` across all your customer projects
|
listNoteOccurrencesTest
|
{
"repo_name": "grafeas/client-java",
"path": "src/test/java/io/grafeas/api/GrafeasApiTest.java",
"license": "apache-2.0",
"size": 4644
}
|
[
"io.grafeas.ApiException",
"io.grafeas.model.ApiListNoteOccurrencesResponse"
] |
import io.grafeas.ApiException; import io.grafeas.model.ApiListNoteOccurrencesResponse;
|
import io.grafeas.*; import io.grafeas.model.*;
|
[
"io.grafeas",
"io.grafeas.model"
] |
io.grafeas; io.grafeas.model;
| 537,654
|
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<WorkbookInner> listByResourceGroup(String resourceGroupName, CategoryType category) {
final List<String> tags = null;
final String sourceId = null;
final Boolean canFetchContent = null;
return new PagedIterable<>(
listByResourceGroupAsync(resourceGroupName, category, tags, sourceId, canFetchContent));
}
|
@ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<WorkbookInner> function(String resourceGroupName, CategoryType category) { final List<String> tags = null; final String sourceId = null; final Boolean canFetchContent = null; return new PagedIterable<>( listByResourceGroupAsync(resourceGroupName, category, tags, sourceId, canFetchContent)); }
|
/**
* Get all Workbooks defined within a specified resource group and category.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param category Category of workbook to return.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws WorkbookErrorDefinitionException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all Workbooks defined within a specified resource group and category.
*/
|
Get all Workbooks defined within a specified resource group and category
|
listByResourceGroup
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/applicationinsights/azure-resourcemanager-applicationinsights/src/main/java/com/azure/resourcemanager/applicationinsights/implementation/WorkbooksClientImpl.java",
"license": "mit",
"size": 99702
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedIterable",
"com.azure.resourcemanager.applicationinsights.fluent.models.WorkbookInner",
"com.azure.resourcemanager.applicationinsights.models.CategoryType",
"java.util.List"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.resourcemanager.applicationinsights.fluent.models.WorkbookInner; import com.azure.resourcemanager.applicationinsights.models.CategoryType; import java.util.List;
|
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.applicationinsights.fluent.models.*; import com.azure.resourcemanager.applicationinsights.models.*; import java.util.*;
|
[
"com.azure.core",
"com.azure.resourcemanager",
"java.util"
] |
com.azure.core; com.azure.resourcemanager; java.util;
| 2,385,204
|
private void getTriggeredIds (CallbackContext command) {
List<Integer> ids = getNotificationMgr().getIdsByType(
Notification.Type.TRIGGERED);
command.success(new JSONArray(ids));
}
|
void function (CallbackContext command) { List<Integer> ids = getNotificationMgr().getIdsByType( Notification.Type.TRIGGERED); command.success(new JSONArray(ids)); }
|
/**
* Set of IDs from all triggered notifications.
*
* @param command
* The callback context used when calling back into JavaScript.
*/
|
Set of IDs from all triggered notifications
|
getTriggeredIds
|
{
"repo_name": "sdharnasi/LocalNotification",
"path": "src/android/LocalNotification.java",
"license": "apache-2.0",
"size": 18280
}
|
[
"de.appplant.cordova.plugin.notification.Notification",
"java.util.List",
"org.apache.cordova.CallbackContext",
"org.json.JSONArray"
] |
import de.appplant.cordova.plugin.notification.Notification; import java.util.List; import org.apache.cordova.CallbackContext; import org.json.JSONArray;
|
import de.appplant.cordova.plugin.notification.*; import java.util.*; import org.apache.cordova.*; import org.json.*;
|
[
"de.appplant.cordova",
"java.util",
"org.apache.cordova",
"org.json"
] |
de.appplant.cordova; java.util; org.apache.cordova; org.json;
| 1,695,525
|
public Any get_slot(int slot_id) throws InvalidSlot, BAD_INV_ORDER
{
try
{
return get_slots() [ slot_id ];
}
catch (ArrayIndexOutOfBoundsException e)
{
throw new InvalidSlot("Slot " + slot_id);
}
}
|
Any function(int slot_id) throws InvalidSlot, BAD_INV_ORDER { try { return get_slots() [ slot_id ]; } catch (ArrayIndexOutOfBoundsException e) { throw new InvalidSlot(STR + slot_id); } }
|
/**
* Get value for the slot with the given id. If the array of Currents has not
* been yet allocated for the current thread, it is allocated during the
* invocation of this method.
*/
|
Get value for the slot with the given id. If the array of Currents has not been yet allocated for the current thread, it is allocated during the invocation of this method
|
get_slot
|
{
"repo_name": "shaotuanchen/sunflower_exp",
"path": "tools/source/gcc-4.2.4/libjava/classpath/gnu/CORBA/Interceptor/gnuIcCurrent.java",
"license": "bsd-3-clause",
"size": 6974
}
|
[
"org.omg.CORBA",
"org.omg.PortableInterceptor"
] |
import org.omg.CORBA; import org.omg.PortableInterceptor;
|
import org.omg.*;
|
[
"org.omg"
] |
org.omg;
| 2,356,074
|
private Node parseUnionTypeWithAlternate(JsDocToken token, Node alternate) {
Node union = newNode(Token.PIPE);
if (alternate != null) {
union.addChildToBack(alternate);
}
Node expr = null;
do {
if (expr != null) {
skipEOLs();
token = next();
Preconditions.checkState(token == JsDocToken.PIPE);
skipEOLs();
token = next();
}
expr = parseTypeExpression(token);
if (expr == null) {
return null;
}
union.addChildToBack(expr);
} while (match(JsDocToken.PIPE));
if (alternate == null) {
skipEOLs();
if (!match(JsDocToken.RIGHT_PAREN)) {
return reportTypeSyntaxWarning("msg.jsdoc.missing.rp");
}
next();
}
if (union.getChildCount() == 1) {
Node firstChild = union.getFirstChild();
union.removeChild(firstChild);
return firstChild;
}
return union;
}
|
Node function(JsDocToken token, Node alternate) { Node union = newNode(Token.PIPE); if (alternate != null) { union.addChildToBack(alternate); } Node expr = null; do { if (expr != null) { skipEOLs(); token = next(); Preconditions.checkState(token == JsDocToken.PIPE); skipEOLs(); token = next(); } expr = parseTypeExpression(token); if (expr == null) { return null; } union.addChildToBack(expr); } while (match(JsDocToken.PIPE)); if (alternate == null) { skipEOLs(); if (!match(JsDocToken.RIGHT_PAREN)) { return reportTypeSyntaxWarning(STR); } next(); } if (union.getChildCount() == 1) { Node firstChild = union.getFirstChild(); union.removeChild(firstChild); return firstChild; } return union; }
|
/**
* Create a new union type, with an alternate that has already been
* parsed. The alternate may be null.
*/
|
Create a new union type, with an alternate that has already been parsed. The alternate may be null
|
parseUnionTypeWithAlternate
|
{
"repo_name": "Medium/closure-compiler",
"path": "src/com/google/javascript/jscomp/parsing/JsDocInfoParser.java",
"license": "apache-2.0",
"size": 85063
}
|
[
"com.google.common.base.Preconditions",
"com.google.javascript.rhino.Node",
"com.google.javascript.rhino.Token"
] |
import com.google.common.base.Preconditions; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token;
|
import com.google.common.base.*; import com.google.javascript.rhino.*;
|
[
"com.google.common",
"com.google.javascript"
] |
com.google.common; com.google.javascript;
| 2,253,444
|
Revision commit(CommitParams params) throws GitException;
/**
* Show diff between commits.
*
* @param params diff params
* @return diff page. Diff info can be serialized to stream by using method {@link
* DiffPage#writeTo(java.io.OutputStream)}
|
Revision commit(CommitParams params) throws GitException; /** * Show diff between commits. * * @param params diff params * @return diff page. Diff info can be serialized to stream by using method { * DiffPage#writeTo(java.io.OutputStream)}
|
/**
* Commit current state of index in new commit.
*
* @param params commit params
* @return new commit
* @throws GitException if any error occurs
* @see CommitParams
*/
|
Commit current state of index in new commit
|
commit
|
{
"repo_name": "TypeFox/che",
"path": "wsagent/che-core-api-git/src/main/java/org/eclipse/che/api/git/GitConnection.java",
"license": "epl-1.0",
"size": 13456
}
|
[
"org.eclipse.che.api.git.exception.GitException",
"org.eclipse.che.api.git.params.CommitParams",
"org.eclipse.che.api.git.shared.Revision"
] |
import org.eclipse.che.api.git.exception.GitException; import org.eclipse.che.api.git.params.CommitParams; import org.eclipse.che.api.git.shared.Revision;
|
import org.eclipse.che.api.git.exception.*; import org.eclipse.che.api.git.params.*; import org.eclipse.che.api.git.shared.*;
|
[
"org.eclipse.che"
] |
org.eclipse.che;
| 1,420,767
|
protected void genAlt(Alternative alt, AlternativeBlock blk) {
// Save the AST generation state, and set it to that of the alt
boolean savegenAST = genAST;
genAST = genAST && alt.getAutoGen();
boolean oldsaveTest = saveText;
saveText = saveText && alt.getAutoGen();
// Reset the variable name map for the alternative
Hashtable saveMap = treeVariableMap;
treeVariableMap = new Hashtable();
// Generate try block around the alt for error handling
if (alt.exceptionSpec != null) {
println("try:");
tabs++;
}
println("pass"); // make sure that always something gets generated ..
AlternativeElement elem = alt.head;
while (!(elem instanceof BlockEndElement)) {
elem.generate(); // alt can begin with anything. Ask target to gen.
elem = elem.next;
}
if (genAST) {
if (blk instanceof RuleBlock) {
// Set the AST return value for the rule
RuleBlock rblk = (RuleBlock)blk;
if (grammar.hasSyntacticPredicate) {
}
println(rblk.getRuleName() + "_AST = currentAST.root");
if (grammar.hasSyntacticPredicate) {
}
}
else if (blk.getLabel() != null) {
antlrTool.warning(
"Labeled subrules not yet supported",
grammar.getFilename(), blk.getLine(), blk.getColumn());
}
}
if (alt.exceptionSpec != null) {
tabs--;
genErrorHandler(alt.exceptionSpec);
}
genAST = savegenAST;
saveText = oldsaveTest;
treeVariableMap = saveMap;
}
|
void function(Alternative alt, AlternativeBlock blk) { boolean savegenAST = genAST; genAST = genAST && alt.getAutoGen(); boolean oldsaveTest = saveText; saveText = saveText && alt.getAutoGen(); Hashtable saveMap = treeVariableMap; treeVariableMap = new Hashtable(); if (alt.exceptionSpec != null) { println("try:"); tabs++; } println("pass"); AlternativeElement elem = alt.head; while (!(elem instanceof BlockEndElement)) { elem.generate(); elem = elem.next; } if (genAST) { if (blk instanceof RuleBlock) { RuleBlock rblk = (RuleBlock)blk; if (grammar.hasSyntacticPredicate) { } println(rblk.getRuleName() + STR); if (grammar.hasSyntacticPredicate) { } } else if (blk.getLabel() != null) { antlrTool.warning( STR, grammar.getFilename(), blk.getLine(), blk.getColumn()); } } if (alt.exceptionSpec != null) { tabs--; genErrorHandler(alt.exceptionSpec); } genAST = savegenAST; saveText = oldsaveTest; treeVariableMap = saveMap; }
|
/** Generate an alternative.
* @param alt The alternative to generate
* @param blk The block to which the alternative belongs
*/
|
Generate an alternative
|
genAlt
|
{
"repo_name": "bamboo/boo",
"path": "lib/antlr-2.7.5/antlr/PythonCodeGenerator.java",
"license": "bsd-3-clause",
"size": 115511
}
|
[
"java.util.Hashtable"
] |
import java.util.Hashtable;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,707,835
|
public WebServer getWebServer() {
return this.webServer;
}
public static class ExistingWebApplicationScopes {
private static final Set<String> SCOPES;
static {
Set<String> scopes = new LinkedHashSet<>();
scopes.add(WebApplicationContext.SCOPE_REQUEST);
scopes.add(WebApplicationContext.SCOPE_SESSION);
SCOPES = Collections.unmodifiableSet(scopes);
}
private final ConfigurableListableBeanFactory beanFactory;
private final Map<String, Scope> scopes = new HashMap<>();
public ExistingWebApplicationScopes(ConfigurableListableBeanFactory beanFactory) {
this.beanFactory = beanFactory;
for (String scopeName : SCOPES) {
Scope scope = beanFactory.getRegisteredScope(scopeName);
if (scope != null) {
this.scopes.put(scopeName, scope);
}
}
}
|
WebServer function() { return this.webServer; } public static class ExistingWebApplicationScopes { private static final Set<String> SCOPES; static { Set<String> scopes = new LinkedHashSet<>(); scopes.add(WebApplicationContext.SCOPE_REQUEST); scopes.add(WebApplicationContext.SCOPE_SESSION); SCOPES = Collections.unmodifiableSet(scopes); } private final ConfigurableListableBeanFactory beanFactory; private final Map<String, Scope> scopes = new HashMap<>(); public ExistingWebApplicationScopes(ConfigurableListableBeanFactory beanFactory) { this.beanFactory = beanFactory; for (String scopeName : SCOPES) { Scope scope = beanFactory.getRegisteredScope(scopeName); if (scope != null) { this.scopes.put(scopeName, scope); } } }
|
/**
* Returns the {@link WebServer} that was created by the context or {@code null} if
* the server has not yet been created.
* @return the embedded web server
*/
|
Returns the <code>WebServer</code> that was created by the context or null if the server has not yet been created
|
getWebServer
|
{
"repo_name": "ihoneymon/spring-boot",
"path": "spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/servlet/context/ServletWebServerApplicationContext.java",
"license": "apache-2.0",
"size": 13842
}
|
[
"java.util.Collections",
"java.util.HashMap",
"java.util.LinkedHashSet",
"java.util.Map",
"java.util.Set",
"org.springframework.beans.factory.config.ConfigurableListableBeanFactory",
"org.springframework.beans.factory.config.Scope",
"org.springframework.boot.web.server.WebServer",
"org.springframework.web.context.WebApplicationContext"
] |
import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.Scope; import org.springframework.boot.web.server.WebServer; import org.springframework.web.context.WebApplicationContext;
|
import java.util.*; import org.springframework.beans.factory.config.*; import org.springframework.boot.web.server.*; import org.springframework.web.context.*;
|
[
"java.util",
"org.springframework.beans",
"org.springframework.boot",
"org.springframework.web"
] |
java.util; org.springframework.beans; org.springframework.boot; org.springframework.web;
| 2,022,150
|
public void putAll(Map<? extends K, ? extends V> m) {
for (Map.Entry<? extends K, ? extends V> e : m.entrySet())
put(e.getKey(), e.getValue());
}
|
void function(Map<? extends K, ? extends V> m) { for (Map.Entry<? extends K, ? extends V> e : m.entrySet()) put(e.getKey(), e.getValue()); }
|
/**
* Copies all of the mappings from the specified map to this one.
* These mappings replace any mappings that this map had for any of the
* keys currently in the specified map.
*
* @param m mappings to be stored in this map
*/
|
Copies all of the mappings from the specified map to this one. These mappings replace any mappings that this map had for any of the keys currently in the specified map
|
putAll
|
{
"repo_name": "tootedom/concurrent-reflect-proxy",
"path": "src/main/java/org/greencheek/concurrent/ConcurrentWeakHashMap.java",
"license": "gpl-3.0",
"size": 53928
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,620,354
|
public Model1SvcResource getAddressedResource() throws Exception {
Model1SvcResource thisResource;
thisResource = (Model1SvcResource) ResourceContext.getResourceContext().getResource();
return thisResource;
}
|
Model1SvcResource function() throws Exception { Model1SvcResource thisResource; thisResource = (Model1SvcResource) ResourceContext.getResourceContext().getResource(); return thisResource; }
|
/**
* Get the resouce that is being addressed in this current context
*/
|
Get the resouce that is being addressed in this current context
|
getAddressedResource
|
{
"repo_name": "NCIP/cagrid",
"path": "cagrid/Software/portal/cagrid-portal/exp/model1-svc/src/model1/service/globus/resource/Model1SvcResourceHome.java",
"license": "bsd-3-clause",
"size": 2943
}
|
[
"org.globus.wsrf.ResourceContext"
] |
import org.globus.wsrf.ResourceContext;
|
import org.globus.wsrf.*;
|
[
"org.globus.wsrf"
] |
org.globus.wsrf;
| 2,305,036
|
public static void makeColumnFamilyDirs(FileSystem fs, Path tabledir,
final HRegionInfo hri, byte [] colFamily)
throws IOException {
Path dir = Store.getStoreHomedir(tabledir, hri.getEncodedName(), colFamily);
if (!fs.mkdirs(dir)) {
LOG.warn("Failed to create " + dir);
}
}
|
static void function(FileSystem fs, Path tabledir, final HRegionInfo hri, byte [] colFamily) throws IOException { Path dir = Store.getStoreHomedir(tabledir, hri.getEncodedName(), colFamily); if (!fs.mkdirs(dir)) { LOG.warn(STR + dir); } }
|
/**
* Make the directories for a specific column family
*
* @param fs the file system
* @param tabledir base directory where region will live (usually the table dir)
* @param hri
* @param colFamily the column family
* @throws IOException
*/
|
Make the directories for a specific column family
|
makeColumnFamilyDirs
|
{
"repo_name": "ay65535/hbase-0.94.0",
"path": "src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java",
"license": "apache-2.0",
"size": 178035
}
|
[
"java.io.IOException",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hbase.HRegionInfo"
] |
import java.io.IOException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo;
|
import java.io.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hbase.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 153,007
|
@Override
public void tearDown() throws Exception {
m_config.shutDown();
for (final Client c : m_clients) {
c.close();
}
synchronized (m_clientChannels) {
for (final SocketChannel sc : m_clientChannels) {
try {
ConnectionUtil.closeConnection(sc);
} catch (final IOException e) {
e.printStackTrace();
}
}
m_clientChannels.clear();
}
m_clients.clear();
}
|
void function() throws Exception { m_config.shutDown(); for (final Client c : m_clients) { c.close(); } synchronized (m_clientChannels) { for (final SocketChannel sc : m_clientChannels) { try { ConnectionUtil.closeConnection(sc); } catch (final IOException e) { e.printStackTrace(); } } m_clientChannels.clear(); } m_clients.clear(); }
|
/**
* JUnit special method called to shutdown the test. This instance will
* stop the VoltDB server using the VoltServerConfig instance provided.
*/
|
JUnit special method called to shutdown the test. This instance will stop the VoltDB server using the VoltServerConfig instance provided
|
tearDown
|
{
"repo_name": "ifcharming/original2.0",
"path": "tests/frontend/org/voltdb/regressionsuites/RegressionSuite.java",
"license": "gpl-3.0",
"size": 6921
}
|
[
"java.io.IOException",
"java.nio.channels.SocketChannel",
"org.voltdb.client.Client",
"org.voltdb.client.ConnectionUtil"
] |
import java.io.IOException; import java.nio.channels.SocketChannel; import org.voltdb.client.Client; import org.voltdb.client.ConnectionUtil;
|
import java.io.*; import java.nio.channels.*; import org.voltdb.client.*;
|
[
"java.io",
"java.nio",
"org.voltdb.client"
] |
java.io; java.nio; org.voltdb.client;
| 2,265,238
|
public void gettingImportance(Map<URI, Importance> uris);
}
|
void function(Map<URI, Importance> uris); }
|
/**
* This method is called whenever the importance of data is requested.
*
* @param uris
* contains the determined {@link Importance}s. May be
* altered if wanted.
*/
|
This method is called whenever the importance of data is requested
|
gettingImportance
|
{
"repo_name": "bkahlert/api-usability-analyzer",
"path": "de.fu_berlin.imp.apiua.core/src/de/fu_berlin/imp/apiua/core/services/IImportanceService.java",
"license": "mit",
"size": 2728
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,351,220
|
public Histogram<Double, int[]> createHistogram ()
{
// create the histogram map
Histogram<Double, int[]> histogram = new Histogram<> ();
histogram.setAcceptableRange (0.0, 1.0e50);
// fill the histogram map
for (IntArray arrParams : m_mapCachedExecutions.keySet ())
{
// convert to actual parameters
int[] rgActualParams = new int[arrParams.length ()];
for (int i = 0; i < arrParams.length (); i++)
rgActualParams[i] = getParameterSets ()[i][arrParams.get (i)];
double fVal = m_mapCachedExecutions.get (arrParams).getExecutionTime ();
if (fVal != Double.MAX_VALUE)
histogram.addSample (fVal, rgActualParams);
}
return histogram;
}
|
Histogram<Double, int[]> function () { Histogram<Double, int[]> histogram = new Histogram<> (); histogram.setAcceptableRange (0.0, 1.0e50); for (IntArray arrParams : m_mapCachedExecutions.keySet ()) { int[] rgActualParams = new int[arrParams.length ()]; for (int i = 0; i < arrParams.length (); i++) rgActualParams[i] = getParameterSets ()[i][arrParams.get (i)]; double fVal = m_mapCachedExecutions.get (arrParams).getExecutionTime (); if (fVal != Double.MAX_VALUE) histogram.addSample (fVal, rgActualParams); } return histogram; }
|
/**
* Returns an execution time histogram as a map data structure.
* @return
*/
|
Returns an execution time histogram as a map data structure
|
createHistogram
|
{
"repo_name": "intersense/patus-gw",
"path": "src/ch/unibas/cs/hpwc/patus/autotuner/AbstractRunExecutable.java",
"license": "lgpl-2.1",
"size": 7469
}
|
[
"ch.unibas.cs.hpwc.patus.util.IntArray"
] |
import ch.unibas.cs.hpwc.patus.util.IntArray;
|
import ch.unibas.cs.hpwc.patus.util.*;
|
[
"ch.unibas.cs"
] |
ch.unibas.cs;
| 128,727
|
protected void save(ModuleHandle moduleHandle) throws IOException {
os = new ByteArrayOutputStream();
if (moduleHandle != null)
moduleHandle.serialize(os);
os.close();
}
|
void function(ModuleHandle moduleHandle) throws IOException { os = new ByteArrayOutputStream(); if (moduleHandle != null) moduleHandle.serialize(os); os.close(); }
|
/**
* Eventually, this method will call
* {@link ReportDesignHandle#serialize(java.io.OutputStream)}to save the
* output file of some unit test.
*
* @param moduleHandle
* the module to save, either a report design or a library
* @throws IOException
* if error occurs while saving the file.
*/
|
Eventually, this method will call <code>ReportDesignHandle#serialize(java.io.OutputStream)</code>to save the output file of some unit test
|
save
|
{
"repo_name": "Charling-Huang/birt",
"path": "testsuites/org.eclipse.birt.report.tests.model/src/org/eclipse/birt/report/tests/model/BaseTestCase.java",
"license": "epl-1.0",
"size": 41475
}
|
[
"java.io.ByteArrayOutputStream",
"java.io.IOException",
"org.eclipse.birt.report.model.api.ModuleHandle"
] |
import java.io.ByteArrayOutputStream; import java.io.IOException; import org.eclipse.birt.report.model.api.ModuleHandle;
|
import java.io.*; import org.eclipse.birt.report.model.api.*;
|
[
"java.io",
"org.eclipse.birt"
] |
java.io; org.eclipse.birt;
| 220,336
|
protected void doExplicitGlyphLayout() {
this.gv.performDefaultLayout();
float baselineAscent
= vertical ?
(float) gv.getLogicalBounds().getWidth() :
(metrics.getAscent() + Math.abs(metrics.getDescent()));
int numGlyphs = gv.getNumGlyphs();
// System.out.println("NumGlyphs: " + numGlyphs);
float[] gp = gv.getGlyphPositions(0, numGlyphs+1, null);
float verticalFirstOffset = 0f;
float horizontalFirstOffset = 0f;
boolean glyphOrientationAuto = isGlyphOrientationAuto();
int glyphOrientationAngle = 0;
if (!glyphOrientationAuto) {
glyphOrientationAngle = getGlyphOrientationAngle();
}
int i=0;
int aciStart = aci.getBeginIndex();
int aciIndex = 0;
char ch = aci.first();
int runLimit = aciIndex+aciStart;
Float x=null, y=null, dx=null, dy=null, rotation=null;
Object baseline=null;
float shift_x_pos = 0;
float shift_y_pos = 0;
float curr_x_pos = (float)offset.getX();
float curr_y_pos = (float)offset.getY();
Point2D.Float pos = new Point2D.Float();
boolean hasArabicTransparent = false;
while (i < numGlyphs) {
//System.out.println("limit: " + runLimit + ", " + aciIndex);
if (aciIndex+aciStart >= runLimit) {
runLimit = aci.getRunLimit(runAtts);
x = (Float) aci.getAttribute(X);
y = (Float) aci.getAttribute(Y);
dx = (Float) aci.getAttribute(DX);
dy = (Float) aci.getAttribute(DY);
rotation = (Float) aci.getAttribute(ROTATION);
baseline = aci.getAttribute(BASELINE_SHIFT);
}
GVTGlyphMetrics gm = gv.getGlyphMetrics(i);
if (i==0) {
if (isVertical()) {
if (glyphOrientationAuto) {
if (isLatinChar(ch)) {
// it will be rotated 90
verticalFirstOffset = 0f;
} else {
// it won't be rotated
float advY = gm.getVerticalAdvance();
float asc = metrics.getAscent();
float dsc = metrics.getDescent();
verticalFirstOffset = asc+(advY-(asc+dsc))/2;
}
} else {
if (glyphOrientationAngle == 0) {
float advY = gm.getVerticalAdvance();
float asc = metrics.getAscent();
float dsc = metrics.getDescent();
verticalFirstOffset = asc+(advY-(asc+dsc))/2;
} else {
// 90, 180, 270
verticalFirstOffset = 0f;
}
}
} else { // not vertical
if ((glyphOrientationAngle == 270)) {
horizontalFirstOffset =
(float)gm.getBounds2D().getHeight();
} else {
// 0, 90, 180
horizontalFirstOffset = 0;
}
}
} else { // not the first char
if (glyphOrientationAuto &&
(verticalFirstOffset == 0f) && !isLatinChar(ch)) {
float advY = gm.getVerticalAdvance();
float asc = metrics.getAscent();
float dsc = metrics.getDescent();
verticalFirstOffset = asc + (advY - (asc+dsc))/2;
}
}
// ox and oy are origin adjustments for each glyph,
// computed on the basis of baseline-shifts, etc.
float ox = 0f;
float oy = 0f;
float glyphOrientationRotation = 0f;
float glyphRotation = 0f;
if (ch != CharacterIterator.DONE) {
if (vertical) {
if (glyphOrientationAuto) {
if (isLatinChar(ch)) {
// If character is Latin, then rotate by
// 90 degrees
glyphOrientationRotation = (float) (Math.PI / 2f);
} else {
glyphOrientationRotation = 0f;
}
} else {
glyphOrientationRotation = (float)Math.toRadians(glyphOrientationAngle);
}
if (textPath != null) {
// if vertical and on a path, any x's are ignored
x = null;
}
} else {
glyphOrientationRotation = (float)Math.toRadians(glyphOrientationAngle);
if (textPath != null) {
// if horizontal and on a path, any y's are ignored
y = null;
}
}
// calculate the total rotation for this glyph
if (rotation == null || rotation.isNaN()) {
glyphRotation = glyphOrientationRotation;
} else {
glyphRotation = (rotation.floatValue() +
glyphOrientationRotation);
}
if ((x != null) && !x.isNaN()) {
if (i == 0)
shift_x_pos = (float)(x.floatValue()-offset.getX());
curr_x_pos = x.floatValue()-shift_x_pos;
}
if (dx != null && !dx.isNaN()) {
curr_x_pos += dx.floatValue();
}
if ((y != null) && !y.isNaN()) {
if (i == 0)
shift_y_pos = (float)(y.floatValue()-offset.getY());
curr_y_pos = y.floatValue()-shift_y_pos;
}
if (dy != null && !dy.isNaN()) {
curr_y_pos += dy.floatValue();
} else if (i > 0) {
curr_y_pos += gp[i*2 + 1]-gp[i*2 - 1];
}
float baselineAdjust = 0f;
if (baseline != null) {
if (baseline instanceof Integer) {
if (baseline==TextAttribute.SUPERSCRIPT_SUPER) {
baselineAdjust = baselineAscent*0.5f;
} else if (baseline==TextAttribute.SUPERSCRIPT_SUB) {
baselineAdjust = -baselineAscent*0.5f;
}
} else if (baseline instanceof Float) {
baselineAdjust = ((Float) baseline).floatValue();
}
if (vertical) {
ox = baselineAdjust;
} else {
oy = -baselineAdjust;
}
}
if (vertical) {
// offset due to rotation of first character
oy += verticalFirstOffset;
if (glyphOrientationAuto) {
if (isLatinChar(ch)) {
ox += metrics.getStrikethroughOffset();
} else {
Rectangle2D glyphBounds
= gv.getGlyphVisualBounds(i).getBounds2D();
ox -= (float)((glyphBounds.getMaxX() - gp[2*i]) -
glyphBounds.getWidth()/2);
}
} else {
// center the character if it's not auto orient
Rectangle2D glyphBounds
= gv.getGlyphVisualBounds(i).getBounds2D();
if (glyphOrientationAngle == 0) {
ox -= (float)((glyphBounds.getMaxX() - gp[2*i]) -
glyphBounds.getWidth()/2);
} else if (glyphOrientationAngle == 180) {
ox += (float)((glyphBounds.getMaxX() - gp[2*i]) -
glyphBounds.getWidth()/2);
} else if (glyphOrientationAngle == 90) {
ox += metrics.getStrikethroughOffset();
} else { // 270
ox -= metrics.getStrikethroughOffset();
}
}
} else {
ox += horizontalFirstOffset;
if (glyphOrientationAngle == 90) {
oy -= gm.getHorizontalAdvance();
} else if (glyphOrientationAngle == 180) {
oy -= metrics.getAscent();
}
}
}
// set the new glyph position
pos.x = curr_x_pos+ox;
pos.y = curr_y_pos+oy;
gv.setGlyphPosition(i, pos);
// calculate the position of the next glyph
if (ArabicTextHandler.arabicCharTransparent(ch)) {
hasArabicTransparent = true;
} else {
// Apply the advance if the current char is not transparent
if (vertical) {
float advanceY = 0;
if (glyphOrientationAuto) {
if (isLatinChar(ch)) {
advanceY = gm.getHorizontalAdvance();
} else {
advanceY = gm.getVerticalAdvance();
}
} else {
if ((glyphOrientationAngle == 0) ||
(glyphOrientationAngle == 180)) {
advanceY = gm.getVerticalAdvance();
} else if (glyphOrientationAngle == 90) {
advanceY = gm.getHorizontalAdvance();
} else { // 270
advanceY = gm.getHorizontalAdvance();
// need to translate so that the spacing
// between chars is correct
gv.setGlyphTransform
(i, AffineTransform.getTranslateInstance
(0, advanceY));
}
}
curr_y_pos += advanceY;
} else {
float advanceX = 0;
if (glyphOrientationAngle == 0) {
advanceX = gm.getHorizontalAdvance();
} else if (glyphOrientationAngle == 180) {
advanceX = gm.getHorizontalAdvance();
// need to translate so that the spacing
// between chars is correct
gv.setGlyphTransform
(i, AffineTransform.getTranslateInstance
(advanceX, 0));
} else {
// 90, 270
advanceX = gm.getVerticalAdvance();
}
curr_x_pos += advanceX;
}
}
// rotate the glyph
if (!epsEQ(glyphRotation,0)) {
AffineTransform glyphTransform = gv.getGlyphTransform(i);
if (glyphTransform == null) {
glyphTransform = new AffineTransform();
}
AffineTransform rotAt;
// Make the 90Deg rotations slightly 'snap to'.
// Also use explicit matrix to avoid round-off.
if (epsEQ(glyphRotation, Math.PI/2)) {
rotAt = new AffineTransform(0, 1, -1, 0, 0, 0);
} else if (epsEQ(glyphRotation, Math.PI)) {
rotAt = new AffineTransform(-1, 0, 0, -1, 0, 0);
} else if (epsEQ(glyphRotation, 3*Math.PI/2)) {
rotAt = new AffineTransform(0, -1, 1, 0, 0, 0);
} else {
rotAt = AffineTransform.getRotateInstance(glyphRotation);
}
glyphTransform.concatenate(rotAt);
gv.setGlyphTransform(i, glyphTransform);
}
aciIndex += gv.getCharacterCount(i,i);
if (aciIndex >= charMap.length)
aciIndex = charMap.length-1;
ch = aci.setIndex(aciIndex+aciStart);
i++;
}
// Update last glyph pos
pos.x = curr_x_pos;
pos.y = curr_y_pos;
gv.setGlyphPosition(i, pos);
advance = new Point2D.Float((float)(curr_x_pos - offset.getX()),
(float)(curr_y_pos - offset.getY()));
// Do a last pass positioning the transparent/mark glyphs on the
// base glyphs.
if (hasArabicTransparent) {
ch = aci.first();
aciIndex = 0;
i=0;
int transparentStart = -1;
while (i < numGlyphs) {
if (ArabicTextHandler.arabicCharTransparent(ch)) {
if (transparentStart == -1) transparentStart = i;
} else {
if (transparentStart != -1) {
Point2D loc = gv.getGlyphPosition(i);
GVTGlyphMetrics gm = gv.getGlyphMetrics(i);
int tyS=0, txS=0; // these never changed ?? todo
float advX=0, advY=0;
if (vertical) {
if (glyphOrientationAuto ||
(glyphOrientationAngle == 90))
advY = gm.getHorizontalAdvance();
else if (glyphOrientationAngle == 270)
advY = 0;
else if (glyphOrientationAngle == 0)
advX = gm.getHorizontalAdvance();
else // 180
advX = -gm.getHorizontalAdvance();
} else {
if (glyphOrientationAngle == 0)
advX = gm.getHorizontalAdvance();
else if (glyphOrientationAngle == 90)
advY = gm.getHorizontalAdvance();
else if (glyphOrientationAngle == 180)
advX = 0;
else // 270
advY = -gm.getHorizontalAdvance();
}
float baseX = (float)(loc.getX()+advX);
float baseY = (float)(loc.getY()+advY);
for (int j=transparentStart; j<i; j++) {
Point2D locT = gv.getGlyphPosition(j);
GVTGlyphMetrics gmT = gv.getGlyphMetrics(j);
float locX = (float)locT.getX();
float locY = (float)locT.getY();
float tx=0, ty=0;
float advT = gmT.getHorizontalAdvance();
if (vertical) {
if (glyphOrientationAuto ||
(glyphOrientationAngle == 90))
locY = baseY-advT;
else if (glyphOrientationAngle == 270)
locY = baseY+advT;
else if (glyphOrientationAngle == 0)
locX = baseX-advT;
else // 180deg
locX = baseX+advT;
} else {
if (glyphOrientationAngle == 0)
locX = baseX-advT;
else if (glyphOrientationAngle == 90)
locY = baseY-advT;
else if (glyphOrientationAngle == 180)
locX = baseX+advT;
else // 270
locY = baseY+advT;
}
locT = new Point2D.Double(locX, locY);
gv.setGlyphPosition(j, locT);
if ((txS != 0) || (tyS != 0)) { // because txS, tyS are set to 0 and not
AffineTransform at; // changed, this path is never used todo
at = AffineTransform.getTranslateInstance
(tx,ty);
at.concatenate(gv.getGlyphTransform(i));
gv.setGlyphTransform(i, at);
}
}
transparentStart = -1;
}
}
aciIndex += gv.getCharacterCount(i,i);
if (aciIndex >= charMap.length)
aciIndex = charMap.length-1;
ch = aci.setIndex(aciIndex+aciStart);
i++;
}
}
layoutApplied = true;
spacingApplied = false;
glyphAdvances = null;
pathApplied = false;
}
|
void function() { this.gv.performDefaultLayout(); float baselineAscent = vertical ? (float) gv.getLogicalBounds().getWidth() : (metrics.getAscent() + Math.abs(metrics.getDescent())); int numGlyphs = gv.getNumGlyphs(); float[] gp = gv.getGlyphPositions(0, numGlyphs+1, null); float verticalFirstOffset = 0f; float horizontalFirstOffset = 0f; boolean glyphOrientationAuto = isGlyphOrientationAuto(); int glyphOrientationAngle = 0; if (!glyphOrientationAuto) { glyphOrientationAngle = getGlyphOrientationAngle(); } int i=0; int aciStart = aci.getBeginIndex(); int aciIndex = 0; char ch = aci.first(); int runLimit = aciIndex+aciStart; Float x=null, y=null, dx=null, dy=null, rotation=null; Object baseline=null; float shift_x_pos = 0; float shift_y_pos = 0; float curr_x_pos = (float)offset.getX(); float curr_y_pos = (float)offset.getY(); Point2D.Float pos = new Point2D.Float(); boolean hasArabicTransparent = false; while (i < numGlyphs) { if (aciIndex+aciStart >= runLimit) { runLimit = aci.getRunLimit(runAtts); x = (Float) aci.getAttribute(X); y = (Float) aci.getAttribute(Y); dx = (Float) aci.getAttribute(DX); dy = (Float) aci.getAttribute(DY); rotation = (Float) aci.getAttribute(ROTATION); baseline = aci.getAttribute(BASELINE_SHIFT); } GVTGlyphMetrics gm = gv.getGlyphMetrics(i); if (i==0) { if (isVertical()) { if (glyphOrientationAuto) { if (isLatinChar(ch)) { verticalFirstOffset = 0f; } else { float advY = gm.getVerticalAdvance(); float asc = metrics.getAscent(); float dsc = metrics.getDescent(); verticalFirstOffset = asc+(advY-(asc+dsc))/2; } } else { if (glyphOrientationAngle == 0) { float advY = gm.getVerticalAdvance(); float asc = metrics.getAscent(); float dsc = metrics.getDescent(); verticalFirstOffset = asc+(advY-(asc+dsc))/2; } else { verticalFirstOffset = 0f; } } } else { if ((glyphOrientationAngle == 270)) { horizontalFirstOffset = (float)gm.getBounds2D().getHeight(); } else { horizontalFirstOffset = 0; } } } else { if (glyphOrientationAuto && (verticalFirstOffset == 0f) && !isLatinChar(ch)) { float advY = gm.getVerticalAdvance(); float asc = metrics.getAscent(); float dsc = metrics.getDescent(); verticalFirstOffset = asc + (advY - (asc+dsc))/2; } } float ox = 0f; float oy = 0f; float glyphOrientationRotation = 0f; float glyphRotation = 0f; if (ch != CharacterIterator.DONE) { if (vertical) { if (glyphOrientationAuto) { if (isLatinChar(ch)) { glyphOrientationRotation = (float) (Math.PI / 2f); } else { glyphOrientationRotation = 0f; } } else { glyphOrientationRotation = (float)Math.toRadians(glyphOrientationAngle); } if (textPath != null) { x = null; } } else { glyphOrientationRotation = (float)Math.toRadians(glyphOrientationAngle); if (textPath != null) { y = null; } } if (rotation == null rotation.isNaN()) { glyphRotation = glyphOrientationRotation; } else { glyphRotation = (rotation.floatValue() + glyphOrientationRotation); } if ((x != null) && !x.isNaN()) { if (i == 0) shift_x_pos = (float)(x.floatValue()-offset.getX()); curr_x_pos = x.floatValue()-shift_x_pos; } if (dx != null && !dx.isNaN()) { curr_x_pos += dx.floatValue(); } if ((y != null) && !y.isNaN()) { if (i == 0) shift_y_pos = (float)(y.floatValue()-offset.getY()); curr_y_pos = y.floatValue()-shift_y_pos; } if (dy != null && !dy.isNaN()) { curr_y_pos += dy.floatValue(); } else if (i > 0) { curr_y_pos += gp[i*2 + 1]-gp[i*2 - 1]; } float baselineAdjust = 0f; if (baseline != null) { if (baseline instanceof Integer) { if (baseline==TextAttribute.SUPERSCRIPT_SUPER) { baselineAdjust = baselineAscent*0.5f; } else if (baseline==TextAttribute.SUPERSCRIPT_SUB) { baselineAdjust = -baselineAscent*0.5f; } } else if (baseline instanceof Float) { baselineAdjust = ((Float) baseline).floatValue(); } if (vertical) { ox = baselineAdjust; } else { oy = -baselineAdjust; } } if (vertical) { oy += verticalFirstOffset; if (glyphOrientationAuto) { if (isLatinChar(ch)) { ox += metrics.getStrikethroughOffset(); } else { Rectangle2D glyphBounds = gv.getGlyphVisualBounds(i).getBounds2D(); ox -= (float)((glyphBounds.getMaxX() - gp[2*i]) - glyphBounds.getWidth()/2); } } else { Rectangle2D glyphBounds = gv.getGlyphVisualBounds(i).getBounds2D(); if (glyphOrientationAngle == 0) { ox -= (float)((glyphBounds.getMaxX() - gp[2*i]) - glyphBounds.getWidth()/2); } else if (glyphOrientationAngle == 180) { ox += (float)((glyphBounds.getMaxX() - gp[2*i]) - glyphBounds.getWidth()/2); } else if (glyphOrientationAngle == 90) { ox += metrics.getStrikethroughOffset(); } else { ox -= metrics.getStrikethroughOffset(); } } } else { ox += horizontalFirstOffset; if (glyphOrientationAngle == 90) { oy -= gm.getHorizontalAdvance(); } else if (glyphOrientationAngle == 180) { oy -= metrics.getAscent(); } } } pos.x = curr_x_pos+ox; pos.y = curr_y_pos+oy; gv.setGlyphPosition(i, pos); if (ArabicTextHandler.arabicCharTransparent(ch)) { hasArabicTransparent = true; } else { if (vertical) { float advanceY = 0; if (glyphOrientationAuto) { if (isLatinChar(ch)) { advanceY = gm.getHorizontalAdvance(); } else { advanceY = gm.getVerticalAdvance(); } } else { if ((glyphOrientationAngle == 0) (glyphOrientationAngle == 180)) { advanceY = gm.getVerticalAdvance(); } else if (glyphOrientationAngle == 90) { advanceY = gm.getHorizontalAdvance(); } else { advanceY = gm.getHorizontalAdvance(); gv.setGlyphTransform (i, AffineTransform.getTranslateInstance (0, advanceY)); } } curr_y_pos += advanceY; } else { float advanceX = 0; if (glyphOrientationAngle == 0) { advanceX = gm.getHorizontalAdvance(); } else if (glyphOrientationAngle == 180) { advanceX = gm.getHorizontalAdvance(); gv.setGlyphTransform (i, AffineTransform.getTranslateInstance (advanceX, 0)); } else { advanceX = gm.getVerticalAdvance(); } curr_x_pos += advanceX; } } if (!epsEQ(glyphRotation,0)) { AffineTransform glyphTransform = gv.getGlyphTransform(i); if (glyphTransform == null) { glyphTransform = new AffineTransform(); } AffineTransform rotAt; if (epsEQ(glyphRotation, Math.PI/2)) { rotAt = new AffineTransform(0, 1, -1, 0, 0, 0); } else if (epsEQ(glyphRotation, Math.PI)) { rotAt = new AffineTransform(-1, 0, 0, -1, 0, 0); } else if (epsEQ(glyphRotation, 3*Math.PI/2)) { rotAt = new AffineTransform(0, -1, 1, 0, 0, 0); } else { rotAt = AffineTransform.getRotateInstance(glyphRotation); } glyphTransform.concatenate(rotAt); gv.setGlyphTransform(i, glyphTransform); } aciIndex += gv.getCharacterCount(i,i); if (aciIndex >= charMap.length) aciIndex = charMap.length-1; ch = aci.setIndex(aciIndex+aciStart); i++; } pos.x = curr_x_pos; pos.y = curr_y_pos; gv.setGlyphPosition(i, pos); advance = new Point2D.Float((float)(curr_x_pos - offset.getX()), (float)(curr_y_pos - offset.getY())); if (hasArabicTransparent) { ch = aci.first(); aciIndex = 0; i=0; int transparentStart = -1; while (i < numGlyphs) { if (ArabicTextHandler.arabicCharTransparent(ch)) { if (transparentStart == -1) transparentStart = i; } else { if (transparentStart != -1) { Point2D loc = gv.getGlyphPosition(i); GVTGlyphMetrics gm = gv.getGlyphMetrics(i); int tyS=0, txS=0; float advX=0, advY=0; if (vertical) { if (glyphOrientationAuto (glyphOrientationAngle == 90)) advY = gm.getHorizontalAdvance(); else if (glyphOrientationAngle == 270) advY = 0; else if (glyphOrientationAngle == 0) advX = gm.getHorizontalAdvance(); else advX = -gm.getHorizontalAdvance(); } else { if (glyphOrientationAngle == 0) advX = gm.getHorizontalAdvance(); else if (glyphOrientationAngle == 90) advY = gm.getHorizontalAdvance(); else if (glyphOrientationAngle == 180) advX = 0; else advY = -gm.getHorizontalAdvance(); } float baseX = (float)(loc.getX()+advX); float baseY = (float)(loc.getY()+advY); for (int j=transparentStart; j<i; j++) { Point2D locT = gv.getGlyphPosition(j); GVTGlyphMetrics gmT = gv.getGlyphMetrics(j); float locX = (float)locT.getX(); float locY = (float)locT.getY(); float tx=0, ty=0; float advT = gmT.getHorizontalAdvance(); if (vertical) { if (glyphOrientationAuto (glyphOrientationAngle == 90)) locY = baseY-advT; else if (glyphOrientationAngle == 270) locY = baseY+advT; else if (glyphOrientationAngle == 0) locX = baseX-advT; else locX = baseX+advT; } else { if (glyphOrientationAngle == 0) locX = baseX-advT; else if (glyphOrientationAngle == 90) locY = baseY-advT; else if (glyphOrientationAngle == 180) locX = baseX+advT; else locY = baseY+advT; } locT = new Point2D.Double(locX, locY); gv.setGlyphPosition(j, locT); if ((txS != 0) (tyS != 0)) { AffineTransform at; at = AffineTransform.getTranslateInstance (tx,ty); at.concatenate(gv.getGlyphTransform(i)); gv.setGlyphTransform(i, at); } } transparentStart = -1; } } aciIndex += gv.getCharacterCount(i,i); if (aciIndex >= charMap.length) aciIndex = charMap.length-1; ch = aci.setIndex(aciIndex+aciStart); i++; } } layoutApplied = true; spacingApplied = false; glyphAdvances = null; pathApplied = false; }
|
/**
* Explicitly lays out each of the glyphs in the glyph
* vector. This will handle any glyph position adjustments such as
* dx, dy and baseline offsets. It will also handle vertical
* layouts.
*/
|
Explicitly lays out each of the glyphs in the glyph vector. This will handle any glyph position adjustments such as dx, dy and baseline offsets. It will also handle vertical layouts
|
doExplicitGlyphLayout
|
{
"repo_name": "Squeegee/batik",
"path": "sources/org/apache/batik/gvt/text/GlyphLayout.java",
"license": "apache-2.0",
"size": 79479
}
|
[
"java.awt.font.TextAttribute",
"java.awt.geom.AffineTransform",
"java.awt.geom.Point2D",
"java.awt.geom.Rectangle2D",
"java.text.CharacterIterator",
"org.apache.batik.gvt.font.GVTGlyphMetrics"
] |
import java.awt.font.TextAttribute; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.text.CharacterIterator; import org.apache.batik.gvt.font.GVTGlyphMetrics;
|
import java.awt.font.*; import java.awt.geom.*; import java.text.*; import org.apache.batik.gvt.font.*;
|
[
"java.awt",
"java.text",
"org.apache.batik"
] |
java.awt; java.text; org.apache.batik;
| 1,892,784
|
Page<Politico> search(String query, Pageable pageable);
|
Page<Politico> search(String query, Pageable pageable);
|
/**
* Search for the politico corresponding to the query.
*
* @param query the query of the search
*
* @param pageable the pagination information
* @return the list of entities
*/
|
Search for the politico corresponding to the query
|
search
|
{
"repo_name": "lucasa/Politicos",
"path": "site/src/main/java/org/politicos/service/PoliticoService.java",
"license": "mit",
"size": 1194
}
|
[
"org.politicos.domain.Politico",
"org.springframework.data.domain.Page",
"org.springframework.data.domain.Pageable"
] |
import org.politicos.domain.Politico; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable;
|
import org.politicos.domain.*; import org.springframework.data.domain.*;
|
[
"org.politicos.domain",
"org.springframework.data"
] |
org.politicos.domain; org.springframework.data;
| 1,671,481
|
public static boolean hasInternet(Activity a) {
boolean hasConnectedWifi = false;
boolean hasConnectedMobile = false;
ConnectivityManager cm = (ConnectivityManager) a
.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo[] netInfo = cm.getAllNetworkInfo();
for (NetworkInfo ni : netInfo) {
if (ni.getTypeName().equalsIgnoreCase("wifi"))
if (ni.isConnected())
hasConnectedWifi = true;
if (ni.getTypeName().equalsIgnoreCase("mobile"))
if (ni.isConnected())
hasConnectedMobile = true;
}
return hasConnectedWifi || hasConnectedMobile;
}
|
static boolean function(Activity a) { boolean hasConnectedWifi = false; boolean hasConnectedMobile = false; ConnectivityManager cm = (ConnectivityManager) a .getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo[] netInfo = cm.getAllNetworkInfo(); for (NetworkInfo ni : netInfo) { if (ni.getTypeName().equalsIgnoreCase("wifi")) if (ni.isConnected()) hasConnectedWifi = true; if (ni.getTypeName().equalsIgnoreCase(STR)) if (ni.isConnected()) hasConnectedMobile = true; } return hasConnectedWifi hasConnectedMobile; }
|
/**
* metodo que vaida que el telefono tenga internet
*
* @param a
* @return
*/
|
metodo que vaida que el telefono tenga internet
|
hasInternet
|
{
"repo_name": "citydevs/safebus",
"path": "SafeBus/src/com/bm/savebus/utilerias/Utils.java",
"license": "apache-2.0",
"size": 12951
}
|
[
"android.app.Activity",
"android.content.Context",
"android.net.ConnectivityManager",
"android.net.NetworkInfo"
] |
import android.app.Activity; import android.content.Context; import android.net.ConnectivityManager; import android.net.NetworkInfo;
|
import android.app.*; import android.content.*; import android.net.*;
|
[
"android.app",
"android.content",
"android.net"
] |
android.app; android.content; android.net;
| 1,718,215
|
try {
StringTokenizer tokenizer = new StringTokenizer(identifier, ":");
tokenizer.nextToken();
tokenizer.nextToken();
return tokenizer.nextToken();
} catch (Exception e) {
return null;
}
}
|
try { StringTokenizer tokenizer = new StringTokenizer(identifier, ":"); tokenizer.nextToken(); tokenizer.nextToken(); return tokenizer.nextToken(); } catch (Exception e) { return null; } }
|
/**
* Utility method to parse the 'local identifier' from the OAI identifier
*
* @param identifier OAI identifier (e.g. oai:oaicat.oclc.org:ID/12345)
* @return local identifier (e.g. ID/12345).
*/
|
Utility method to parse the 'local identifier' from the OAI identifier
|
fromOAIIdentifier
|
{
"repo_name": "openpreserve/oaicat",
"path": "src/main/java/ORG/oclc/oai/server/catalog/FileRecordFactory.java",
"license": "apache-2.0",
"size": 5425
}
|
[
"java.util.StringTokenizer"
] |
import java.util.StringTokenizer;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 84,052
|
public void setPriorVersion(Ontology value) {
Base.set(this.model, this.getResource(), PRIORVERSION, value);
}
|
void function(Ontology value) { Base.set(this.model, this.getResource(), PRIORVERSION, value); }
|
/**
* Sets a value of property PriorVersion from an instance of Ontology
* First, all existing values are removed, then this value is added.
* Cardinality constraints are not checked, but this method exists only for properties with
* no minCardinality or minCardinality == 1.
* @param value the value to be added
*
* [Generated from RDFReactor template rule #set4dynamic]
*/
|
Sets a value of property PriorVersion from an instance of Ontology First, all existing values are removed, then this value is added. Cardinality constraints are not checked, but this method exists only for properties with no minCardinality or minCardinality == 1
|
setPriorVersion
|
{
"repo_name": "josectoledo/semweb4j",
"path": "org.semweb4j.rdfreactor.runtime/src/main/java/org/ontoware/rdfreactor/schema/owl/Ontology.java",
"license": "bsd-2-clause",
"size": 50761
}
|
[
"org.ontoware.rdfreactor.runtime.Base"
] |
import org.ontoware.rdfreactor.runtime.Base;
|
import org.ontoware.rdfreactor.runtime.*;
|
[
"org.ontoware.rdfreactor"
] |
org.ontoware.rdfreactor;
| 84,581
|
public void parseVGWMsg(InputStream stream) { //throws IOException {
String pVgwId = "";
Vector<EnabledNodesListItem> confirmedEnabledNodesVec = new Vector<EnabledNodesListItem>();
long timestamp = 0;
long currentTimestamp = (new Date()).getTime();
XMLStreamReader2 sr = null;
try{
WstxInputFactory f = null;
SMInputCursor inputRootElement = null;
f = new WstxInputFactory();
f.configureForConvenience();
// Let's configure factory 'optimally'...
f.setProperty(XMLInputFactory.IS_COALESCING, Boolean.FALSE);
f.setProperty(XMLInputFactory.IS_NAMESPACE_AWARE, Boolean.FALSE);
sr = (XMLStreamReader2)f.createXMLStreamReader(stream);
inputRootElement = SMInputFactory.rootElementCursor(sr);
// If we needed to store some information about preceding siblings,
// we should enable tracking. (we need it for mygetElementValueStaxMultiple method)
inputRootElement.setElementTracking(SMInputCursor.Tracking.PARENTS);
inputRootElement.getNext();
SMInputCursor childInElement = inputRootElement.childCursor();
while (childInElement.getNext() != null) {
if(!childInElement.getCurrEvent().hasText() ) {
if( childInElement.getLocalName().compareToIgnoreCase(DisabledNodesVGWSynch.TAG_MSGTYPE) == 0 ) {
SMInputCursor childInElement2 = childInElement.childMixedCursor();
String tmpMessageTypeValue = "";
while (childInElement2.getNext() != null)
{
if(childInElement2.getCurrEvent().hasText())
{
tmpMessageTypeValue = childInElement2.getText();
logger.debug(tmpMessageTypeValue);
break;
}
}
if(tmpMessageTypeValue.compareToIgnoreCase(DisabledNodesVGWSynch.fromVGWMsgType) != 0 )
{
logger.error("This is not the expected type of message (DisabledNodesVGWSynch Response)");
//throw new IOException(); // (++++) maybe throw some other kind of exception
return;
}
//Model3dStylesList.getListofStyleEntriesVec().add(new Model3dStylesEntry(childInElement));
}
else if( childInElement.getLocalName().compareToIgnoreCase(DisabledNodesVGWSynch.TAG_VGW_ID) == 0 )
{
SMInputCursor childInElement2 = childInElement.childMixedCursor();
while (childInElement2.getNext() != null)
{
if(childInElement2.getCurrEvent().hasText())
{
pVgwId = childInElement2.getText();
DBRegisteredGateway tmpDbRGw = AbstractGatewayManager.getInstance().getDBRegisteredGatewayByName(pVgwId);
if(tmpDbRGw == null) {
logger.error("Equiv list en/dis message received from invalid GW id");
// TODO: deal with this
}
logger.debug(pVgwId);
break;
}
}
}
else if( childInElement.getLocalName().compareToIgnoreCase(DisabledNodesVGWSynch.TAG_TIMESTAMP) == 0 ) {
SMInputCursor childInElement2 = childInElement.childMixedCursor();
while (childInElement2.getNext() != null)
{
if(childInElement2.getCurrEvent().hasText())
{
try {
timestamp = Long.valueOf( childInElement2.getText());
}
catch (Exception efrmt) {
logger.error("timestamp format exception");
timestamp = 0;
}
logger.debug(Long.toString(timestamp));
break;
}
}
}
else if (childInElement.getLocalName().compareToIgnoreCase(DisabledNodesVGWSynch.TAG_CONFIRMED_LIST ) == 0) {
SMInputCursor childInElement2 = childInElement.childCursor();
while (childInElement2.getNext() != null) {
if(!childInElement2.getCurrEvent().hasText() && childInElement2.getLocalName().compareToIgnoreCase(DisabledNodesVGWSynch.TAG_CONFIRMED_ITEM) ==0 ) {
EnabledNodesListItem candNode = new EnabledNodesListItem();
SMInputCursor childInElement3 = childInElement2.childCursor();
while(childInElement3.getNext() != null) {
if(!childInElement3.getCurrEvent().hasText() ) {
if( childInElement3.getLocalName().compareToIgnoreCase(DisabledNodesVGWSynch.TAG_NODE_ID) == 0 ) {
SMInputCursor childInElement4 = childInElement3.childMixedCursor();
while (childInElement4.getNext() != null)
{
if(childInElement4.getCurrEvent().hasText())
{
candNode.setNodeId(childInElement4.getText());
break;
}
}
} else if(childInElement3.getLocalName().compareToIgnoreCase(DisabledNodesVGWSynch.TAG_NODE_STATUS) == 0 ) {
SMInputCursor childInElement4 = childInElement3.childMixedCursor();
while (childInElement4.getNext() != null)
{
if(childInElement4.getCurrEvent().hasText())
{
if (childInElement4.getText().trim().compareToIgnoreCase("enabled") == 0) {
candNode.setStatus(true);
} else if(childInElement4.getText().trim().compareToIgnoreCase("disabled") == 0) {
candNode.setStatus(false);
}
break;
}
}
} else if(childInElement3.getLocalName().compareToIgnoreCase(DisabledNodesVGWSynch.TAG_REMOTE_TS) == 0 ) {
SMInputCursor childInElement4 = childInElement3.childMixedCursor();
while (childInElement4.getNext() != null)
{
if(childInElement4.getCurrEvent().hasText())
{
long candTs = 0; // the VGW will send messages with 0 remoteTimestamp when initiated (and also probably when something went wrong)
try {
candTs = Long.valueOf( childInElement4.getText());
}
catch (Exception efrmt) {
logger.error("timestamp format exception");
}
if(candTs == 0) {
candTs = currentTimestamp;
}
candNode.setOfRemoteTimestamp(candTs);
break;
}
}
} else if(childInElement3.getLocalName().compareToIgnoreCase(DisabledNodesVGWSynch.TAG_GW_INITIATED) == 0 ) {
SMInputCursor childInElement4 = childInElement3.childMixedCursor();
while (childInElement4.getNext() != null)
{
if(childInElement4.getCurrEvent().hasText())
{
boolean gwInitiatedTheStatus = false;
if(childInElement4.getText().compareToIgnoreCase("1") == 0) {
gwInitiatedTheStatus = true;
}
candNode.setGwInitFlag(gwInitiatedTheStatus);
break;
}
}
}
}
}
if(candNode.isValid() ) {
confirmedEnabledNodesVec.addElement(candNode);
}
}
}
}
}
}
logger.debug("Parsed en/dis message from VGW: " +pVgwId+" successfully!!");
if(!cacheOfTimestampsForMessagesFromVGW.containsKey(pVgwId)) {
cacheOfTimestampsForMessagesFromVGW.put(pVgwId, currentTimestamp);
}
Vector<EnabledNodesListItem> confirmed_AND_FOUND_EnabledNodesVec = new Vector<EnabledNodesListItem>();
if(!confirmedEnabledNodesVec.isEmpty() && UserNode.getUserNode() !=null ) {
HashMap<String, GatewayWithSmartNodes> gatewaysToSmartDevsHM = UserNode.getUserNode().getGatewaysToSmartDevsHM();
if(gatewaysToSmartDevsHM.containsKey(pVgwId)) {
GatewayWithSmartNodes tmpGWWithNodes = gatewaysToSmartDevsHM.get(pVgwId);
if(tmpGWWithNodes!= null && tmpGWWithNodes.getSmartNodesVec()!=null){
Iterator<SmartNode> smVecIt = tmpGWWithNodes.getSmartNodesVec().iterator();
while(smVecIt.hasNext()) {
SmartNode nodeInMemTmp = smVecIt.next() ;
EnabledNodesListItem nodeItMatched = null;
boolean foundAndHandledMatch = false;
for(EnabledNodesListItem nodeIt: confirmedEnabledNodesVec) {
//check if it exists in memory and change its properties!
if(nodeIt.getNodeId().compareToIgnoreCase(nodeInMemTmp.getId()) == 0) {
//match with timestamp for confirmation
logger.debug("Match for node: " + nodeInMemTmp.getId() + " with timestamp: "+ nodeInMemTmp.getRegistryProperties().getTimeStampEnabledStatusRemotelySynch());
if(nodeInMemTmp.getRegistryProperties().getTimeStampEnabledStatusRemotelySynch() == nodeIt.getOfRemoteTimestamp() ||
(nodeInMemTmp.getRegistryProperties().getTimeStampEnabledStatusRemotelySynch() == 0 ) ){
// sets the confirmation date to the current local time of the VSP (time of message reception)
nodeInMemTmp.getRegistryProperties().setTimeStampEnabledStatusSynch(currentTimestamp);
nodeInMemTmp.getRegistryProperties().setEnabled(nodeIt.getStatus());
// TODO: if the status was initiated by the VGW we could manage it here to set the status anew!
// BUT SET THE STATUS ANYWAY!
// TODO: if something special is done here (for initiated by VGW), the 2nd level cache should also be updated
// TODO: so update the confirmedEnabledNodesVec item (nodeIt) as well
nodeInMemTmp.getRegistryProperties().setEnabledStatusWasInitiatedByVGW(nodeIt.isGwInitFlag()); // this just logs the update from vgw, but TODO: it should need more code to handle this case
nodeItMatched = nodeIt;
foundAndHandledMatch = true;
}
// +++++++++++++++++++++
break;
}
}
if(foundAndHandledMatch) {
confirmed_AND_FOUND_EnabledNodesVec.addElement(nodeItMatched);
}
}
}
}
}
//update the 2nd level cache
// TODO: check if here we should keep only the found nodes (in mem) or all...
cacheOfEnabledNodeItems.put(pVgwId, confirmed_AND_FOUND_EnabledNodesVec);
logger.debug("Updated memory based on en/dis message from VGW: " +pVgwId+" successfully!!");
}
catch (Exception ex)
{
logger.error("Error parsing the en/dis message from vgw");
ex.printStackTrace();
}
finally {
if(sr != null){
try {
sr.closeCompletely();
}catch (XMLStreamException ex2)
{
logger.error("Error while trying to close up XML reader");
}
}
}
}
private class EnabledNodesListItem {
private String nodeId;
private boolean status;
private long ofRemoteTimestamp;
private boolean gwInitFlag;
EnabledNodesListItem() {
setNodeId("");
setStatus(false);
setOfRemoteTimestamp(0);
setGwInitFlag(false);
}
|
void function(InputStream stream) { String pVgwId = STRSTRThis is not the expected type of message (DisabledNodesVGWSynch Response)STREquiv list en/dis message received from invalid GW idSTRtimestamp format exceptionSTRenabledSTRdisabledSTRtimestamp format exceptionSTR1STRParsed en/dis message from VGW: STR successfully!!STRMatch for node: STR with timestamp: STRUpdated memory based on en/dis message from VGW: STR successfully!!STRError parsing the en/dis message from vgwSTRError while trying to close up XML readerSTR"); setStatus(false); setOfRemoteTimestamp(0); setGwInitFlag(false); }
|
/**
*
* parse the incoming message for nodes list confirmation
* update the in-mem table when needed!
* @param stream the InputStream source of the message from the VGW
*/
|
parse the incoming message for nodes list confirmation update the in-mem table when needed
|
parseVGWMsg
|
{
"repo_name": "vitrofp7/vitro",
"path": "source/trunk/Demo/vitroUI/vspEngine/src/main/java/vitro/vspEngine/service/query/DisabledNodesVGWSynch.java",
"license": "lgpl-3.0",
"size": 33408
}
|
[
"java.io.InputStream"
] |
import java.io.InputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,852,072
|
public void testPresentEmpty() throws Throwable {
GlobalAttrDefaultT testDoc =
GlobalAttrDefaultDocDocument.Factory.parse("<pre:GlobalAttrDefaultDoc" +
" xmlns:pre=\"http://xbean/scomp/attribute/GlobalAttrDefault\" " +
" pre:testattribute=\"\"/>").getGlobalAttrDefaultDoc();
assertEquals("", testDoc.getTestattribute());
try {
assertTrue(testDoc.validate(validateOptions));
}
catch (Throwable t) {
showErrors();
throw t;
}
}
|
void function() throws Throwable { GlobalAttrDefaultT testDoc = GlobalAttrDefaultDocDocument.Factory.parse(STR + STRhttp: STR\"/>").getGlobalAttrDefaultDoc(); assertEquals("", testDoc.getTestattribute()); try { assertTrue(testDoc.validate(validateOptions)); } catch (Throwable t) { showErrors(); throw t; } }
|
/**
* Test empty string: should be preserved
*/
|
Test empty string: should be preserved
|
testPresentEmpty
|
{
"repo_name": "crow-misia/xmlbeans",
"path": "test/src/scomp/attributes/detailed/GlobalAttrDefault.java",
"license": "apache-2.0",
"size": 2963
}
|
[
"xbean.scomp.attribute.globalAttrDefault.GlobalAttrDefaultDocDocument",
"xbean.scomp.attribute.globalAttrDefault.GlobalAttrDefaultT"
] |
import xbean.scomp.attribute.globalAttrDefault.GlobalAttrDefaultDocDocument; import xbean.scomp.attribute.globalAttrDefault.GlobalAttrDefaultT;
|
import xbean.scomp.attribute.*;
|
[
"xbean.scomp.attribute"
] |
xbean.scomp.attribute;
| 1,243,432
|
@Test
public void testDescribeInstancesBasic() throws Exception {
DescribeInstancesResponseMessage result = null;
DescribeInstancesRequestMessage.Builder builder =
DescribeInstancesRequestMessage.newBuilder();
builder.setTypeId(true);
builder.setCallerAccessKey(getCreds().getAWSAccessKeyId());
builder.setRequestId("test");
builder.setInstanceDescribeDepth(InstanceDescribeDepth.BASIC_ONLY);
result = describeInstancesWorker.doWork(builder.build());
assertNotNull(result);
logger.debug("Got results:" +result.getReservationsList());
assertTrue("Expect some instances to be running.",
result.getReservationsList().size() > 0);
}
|
void function() throws Exception { DescribeInstancesResponseMessage result = null; DescribeInstancesRequestMessage.Builder builder = DescribeInstancesRequestMessage.newBuilder(); builder.setTypeId(true); builder.setCallerAccessKey(getCreds().getAWSAccessKeyId()); builder.setRequestId("test"); builder.setInstanceDescribeDepth(InstanceDescribeDepth.BASIC_ONLY); result = describeInstancesWorker.doWork(builder.build()); assertNotNull(result); logger.debug(STR +result.getReservationsList()); assertTrue(STR, result.getReservationsList().size() > 0); }
|
/**
* This test assumes there's always some instances running.
*/
|
This test assumes there's always some instances running
|
testDescribeInstancesBasic
|
{
"repo_name": "TranscendComputing/TopStackCompute",
"path": "test/java/com/msi/compute/actions/DescribeInstancesLocalTest.java",
"license": "apache-2.0",
"size": 4872
}
|
[
"com.transcend.compute.message.DescribeInstancesMessage",
"org.junit.Assert"
] |
import com.transcend.compute.message.DescribeInstancesMessage; import org.junit.Assert;
|
import com.transcend.compute.message.*; import org.junit.*;
|
[
"com.transcend.compute",
"org.junit"
] |
com.transcend.compute; org.junit;
| 2,292,520
|
public static ReadsKey hashKeyForPassthroughRead(final GATKRead read) {
return new KeyForFragment(read.getName().hashCode()) ;
}
|
static ReadsKey function(final GATKRead read) { return new KeyForFragment(read.getName().hashCode()) ; }
|
/**
* Makes a hash key for the read.
*/
|
Makes a hash key for the read
|
hashKeyForPassthroughRead
|
{
"repo_name": "ksthesis/gatk",
"path": "src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/ReadsKey.java",
"license": "bsd-3-clause",
"size": 5446
}
|
[
"org.broadinstitute.hellbender.utils.read.GATKRead"
] |
import org.broadinstitute.hellbender.utils.read.GATKRead;
|
import org.broadinstitute.hellbender.utils.read.*;
|
[
"org.broadinstitute.hellbender"
] |
org.broadinstitute.hellbender;
| 249,161
|
public void unmarkAsToBeDeleted(Transaction transaction) throws SIResourceException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "unmarkAsToBeDeleted", transaction);
toBeDeleted = Boolean.FALSE;
try
{
this.requestUpdate(transaction);
}
catch (MessageStoreException e)
{
// MessageStoreException shouldn't occur so FFDC.
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.store.itemstreams.PtoPMessageItemStream.unmarkAsToBeDeleted",
"1:336:1.93.1.14",
this);
SibTr.exception(tc, e);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "unmarkAsToBeDeleted", e);
throw new SIResourceException(e);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "unmarkAsToBeDeleted");
return;
}
|
void function(Transaction transaction) throws SIResourceException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, STR, transaction); toBeDeleted = Boolean.FALSE; try { this.requestUpdate(transaction); } catch (MessageStoreException e) { FFDCFilter.processException( e, STR, STR, this); SibTr.exception(tc, e); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, STR, e); throw new SIResourceException(e); } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, STR); return; }
|
/**
* Mark this itemstream as awaiting deletion and harden the indicator
* @throws SIStoreException
*/
|
Mark this itemstream as awaiting deletion and harden the indicator
|
unmarkAsToBeDeleted
|
{
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/com.ibm.ws.messaging.runtime/src/com/ibm/ws/sib/processor/impl/store/itemstreams/PtoPMessageItemStream.java",
"license": "epl-1.0",
"size": 33148
}
|
[
"com.ibm.websphere.ras.TraceComponent",
"com.ibm.websphere.sib.exception.SIResourceException",
"com.ibm.ws.ffdc.FFDCFilter",
"com.ibm.ws.sib.msgstore.MessageStoreException",
"com.ibm.ws.sib.msgstore.transactions.Transaction",
"com.ibm.ws.sib.utils.ras.SibTr"
] |
import com.ibm.websphere.ras.TraceComponent; import com.ibm.websphere.sib.exception.SIResourceException; import com.ibm.ws.ffdc.FFDCFilter; import com.ibm.ws.sib.msgstore.MessageStoreException; import com.ibm.ws.sib.msgstore.transactions.Transaction; import com.ibm.ws.sib.utils.ras.SibTr;
|
import com.ibm.websphere.ras.*; import com.ibm.websphere.sib.exception.*; import com.ibm.ws.ffdc.*; import com.ibm.ws.sib.msgstore.*; import com.ibm.ws.sib.msgstore.transactions.*; import com.ibm.ws.sib.utils.ras.*;
|
[
"com.ibm.websphere",
"com.ibm.ws"
] |
com.ibm.websphere; com.ibm.ws;
| 1,763,438
|
public void list(CloudQuery query, CloudCallbackHandler<List<CloudEntity>> handler) {
// register the query as continuous query
if (query.isContinuous()) {
CloudQuery ncq = new CloudQuery(query);
ncq.setScope(Scope.PAST);
ContinuousQueryHandler cqh = new ContinuousQueryHandler(handler, ncq, getCredential());
continuousQueries.put(query.getQueryId(), cqh);
}
// execute the query
_list(query, handler, new Handler());
}
|
void function(CloudQuery query, CloudCallbackHandler<List<CloudEntity>> handler) { if (query.isContinuous()) { CloudQuery ncq = new CloudQuery(query); ncq.setScope(Scope.PAST); ContinuousQueryHandler cqh = new ContinuousQueryHandler(handler, ncq, getCredential()); continuousQueries.put(query.getQueryId(), cqh); } _list(query, handler, new Handler()); }
|
/**
* Executes a query with specified {@link CloudQuery}.
*
* @param query
* {@link CloudQuery} to execute.
* @param handler
* {@link CloudCallbackHandler} that handles the response.
*/
|
Executes a query with specified <code>CloudQuery</code>
|
list
|
{
"repo_name": "eonoe/mobileBackendStarterAndroid",
"path": "AndroidClient/src/com/google/cloud/backend/android/CloudBackendAsync.java",
"license": "apache-2.0",
"size": 16481
}
|
[
"android.os.Handler",
"com.google.cloud.backend.android.CloudQuery",
"java.util.List"
] |
import android.os.Handler; import com.google.cloud.backend.android.CloudQuery; import java.util.List;
|
import android.os.*; import com.google.cloud.backend.android.*; import java.util.*;
|
[
"android.os",
"com.google.cloud",
"java.util"
] |
android.os; com.google.cloud; java.util;
| 2,354,908
|
public static void splitFilters(
ImmutableBitSet childBitmap,
RexNode predicate,
List<RexNode> pushable,
List<RexNode> notPushable) {
// for each filter, if the filter only references the child inputs,
// then it can be pushed
for (RexNode filter : conjunctions(predicate)) {
ImmutableBitSet filterRefs = InputFinder.bits(filter);
if (childBitmap.contains(filterRefs)) {
pushable.add(filter);
} else {
notPushable.add(filter);
}
}
}
|
static void function( ImmutableBitSet childBitmap, RexNode predicate, List<RexNode> pushable, List<RexNode> notPushable) { for (RexNode filter : conjunctions(predicate)) { ImmutableBitSet filterRefs = InputFinder.bits(filter); if (childBitmap.contains(filterRefs)) { pushable.add(filter); } else { notPushable.add(filter); } } }
|
/**
* Splits a filter into two lists, depending on whether or not the filter
* only references its child input
*
* @param childBitmap Fields in the child
* @param predicate filters that will be split
* @param pushable returns the list of filters that can be pushed to the
* child input
* @param notPushable returns the list of filters that cannot be pushed to
* the child input
*/
|
Splits a filter into two lists, depending on whether or not the filter only references its child input
|
splitFilters
|
{
"repo_name": "minji-kim/calcite",
"path": "core/src/main/java/org/apache/calcite/plan/RelOptUtil.java",
"license": "apache-2.0",
"size": 131877
}
|
[
"java.util.List",
"org.apache.calcite.rex.RexNode",
"org.apache.calcite.util.ImmutableBitSet"
] |
import java.util.List; import org.apache.calcite.rex.RexNode; import org.apache.calcite.util.ImmutableBitSet;
|
import java.util.*; import org.apache.calcite.rex.*; import org.apache.calcite.util.*;
|
[
"java.util",
"org.apache.calcite"
] |
java.util; org.apache.calcite;
| 2,539,165
|
InternalCache getCache();
|
InternalCache getCache();
|
/**
* The returned cache will be null if the cache does not yet exist. Note that the returned cache
* may be one that is already closed. Callers of GemFireCacheImpl.getInstance() should try to use
* this method.
*/
|
The returned cache will be null if the cache does not yet exist. Note that the returned cache may be one that is already closed. Callers of GemFireCacheImpl.getInstance() should try to use this method
|
getCache
|
{
"repo_name": "masaki-yamakawa/geode",
"path": "geode-core/src/main/java/org/apache/geode/distributed/internal/DistributionManager.java",
"license": "apache-2.0",
"size": 15041
}
|
[
"org.apache.geode.internal.cache.InternalCache"
] |
import org.apache.geode.internal.cache.InternalCache;
|
import org.apache.geode.internal.cache.*;
|
[
"org.apache.geode"
] |
org.apache.geode;
| 1,920,396
|
public com.mozu.api.contracts.commerceruntime.returns.ReturnCollection performReturnActions(com.mozu.api.contracts.commerceruntime.returns.ReturnAction action, String responseFields) throws Exception
{
MozuClient<com.mozu.api.contracts.commerceruntime.returns.ReturnCollection> client = com.mozu.api.clients.commerce.ReturnClient.performReturnActionsClient( action, responseFields);
client.setContext(_apiContext);
client.executeRequest();
return client.getResult();
}
|
com.mozu.api.contracts.commerceruntime.returns.ReturnCollection function(com.mozu.api.contracts.commerceruntime.returns.ReturnAction action, String responseFields) throws Exception { MozuClient<com.mozu.api.contracts.commerceruntime.returns.ReturnCollection> client = com.mozu.api.clients.commerce.ReturnClient.performReturnActionsClient( action, responseFields); client.setContext(_apiContext); client.executeRequest(); return client.getResult(); }
|
/**
* Updates the return by performing the action specified in the request.
* <p><pre><code>
* Return return = new Return();
* ReturnCollection returnCollection = return.performReturnActions( action, responseFields);
* </code></pre></p>
* @param responseFields Use this field to include those fields which are not included by default.
* @param action Properties of an action a user can perform for a return.
* @return com.mozu.api.contracts.commerceruntime.returns.ReturnCollection
* @see com.mozu.api.contracts.commerceruntime.returns.ReturnCollection
* @see com.mozu.api.contracts.commerceruntime.returns.ReturnAction
*/
|
Updates the return by performing the action specified in the request. <code><code> Return return = new Return(); ReturnCollection returnCollection = return.performReturnActions( action, responseFields); </code></code>
|
performReturnActions
|
{
"repo_name": "johngatti/mozu-java",
"path": "mozu-java-core/src/main/java/com/mozu/api/resources/commerce/ReturnResource.java",
"license": "mit",
"size": 28030
}
|
[
"com.mozu.api.MozuClient"
] |
import com.mozu.api.MozuClient;
|
import com.mozu.api.*;
|
[
"com.mozu.api"
] |
com.mozu.api;
| 2,315,398
|
public static InvalidationP convertToInvalidationProto(Invalidation invalidation) {
Preconditions.checkNotNull(invalidation);
ObjectIdP objectId = convertToObjectIdProto(invalidation.getObjectId());
return CommonProtos2.newInvalidationP(objectId, invalidation.getVersion(),
invalidation.getPayload() == null ? null : ByteString.copyFrom(invalidation.getPayload()),
null);
}
private ProtoConverter() { // To prevent instantiation.
}
|
static InvalidationP function(Invalidation invalidation) { Preconditions.checkNotNull(invalidation); ObjectIdP objectId = convertToObjectIdProto(invalidation.getObjectId()); return CommonProtos2.newInvalidationP(objectId, invalidation.getVersion(), invalidation.getPayload() == null ? null : ByteString.copyFrom(invalidation.getPayload()), null); } private ProtoConverter() { }
|
/**
* Converts an invalidation {@code invalidation} to the corresponding protocol
* buffer and returns it.
*/
|
Converts an invalidation invalidation to the corresponding protocol buffer and returns it
|
convertToInvalidationProto
|
{
"repo_name": "leighpauls/k2cro4",
"path": "third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/ticl/ProtoConverter.java",
"license": "bsd-3-clause",
"size": 3988
}
|
[
"com.google.common.base.Preconditions",
"com.google.ipc.invalidation.common.CommonProtos2",
"com.google.ipc.invalidation.external.client.types.Invalidation",
"com.google.protobuf.ByteString",
"com.google.protos.ipc.invalidation.ClientProtocol"
] |
import com.google.common.base.Preconditions; import com.google.ipc.invalidation.common.CommonProtos2; import com.google.ipc.invalidation.external.client.types.Invalidation; import com.google.protobuf.ByteString; import com.google.protos.ipc.invalidation.ClientProtocol;
|
import com.google.common.base.*; import com.google.ipc.invalidation.common.*; import com.google.ipc.invalidation.external.client.types.*; import com.google.protobuf.*; import com.google.protos.ipc.invalidation.*;
|
[
"com.google.common",
"com.google.ipc",
"com.google.protobuf",
"com.google.protos"
] |
com.google.common; com.google.ipc; com.google.protobuf; com.google.protos;
| 1,704,748
|
@Test
public void testWrappingOfRemoteErrorMessage() throws Exception {
EmbeddedChannel ch = createEmbeddedChannel();
PartitionRequestClientHandler handler = getClientHandler(ch);
// Create input channels
RemoteInputChannel[] rich = new RemoteInputChannel[] {
createRemoteInputChannel(), createRemoteInputChannel()};
for (RemoteInputChannel r : rich) {
when(r.getInputChannelId()).thenReturn(new InputChannelID());
handler.addInputChannel(r);
}
// Error msg for channel[0]
ch.pipeline().fireChannelRead(new NettyMessage.ErrorResponse(
new RuntimeException("Expected test exception"),
rich[0].getInputChannelId()));
try {
// Exception should not reach end of pipeline...
ch.checkException();
}
catch (Exception e) {
fail("The exception reached the end of the pipeline and "
+ "was not handled correctly by the last handler.");
}
verify(rich[0], times(1)).onError(isA(RemoteTransportException.class));
verify(rich[1], never()).onError(any(Throwable.class));
// Fatal error for all channels
ch.pipeline().fireChannelRead(new NettyMessage.ErrorResponse(
new RuntimeException("Expected test exception")));
try {
// Exception should not reach end of pipeline...
ch.checkException();
}
catch (Exception e) {
fail("The exception reached the end of the pipeline and "
+ "was not handled correctly by the last handler.");
}
verify(rich[0], times(2)).onError(isA(RemoteTransportException.class));
verify(rich[1], times(1)).onError(isA(RemoteTransportException.class));
}
|
void function() throws Exception { EmbeddedChannel ch = createEmbeddedChannel(); PartitionRequestClientHandler handler = getClientHandler(ch); RemoteInputChannel[] rich = new RemoteInputChannel[] { createRemoteInputChannel(), createRemoteInputChannel()}; for (RemoteInputChannel r : rich) { when(r.getInputChannelId()).thenReturn(new InputChannelID()); handler.addInputChannel(r); } ch.pipeline().fireChannelRead(new NettyMessage.ErrorResponse( new RuntimeException(STR), rich[0].getInputChannelId())); try { ch.checkException(); } catch (Exception e) { fail(STR + STR); } verify(rich[0], times(1)).onError(isA(RemoteTransportException.class)); verify(rich[1], never()).onError(any(Throwable.class)); ch.pipeline().fireChannelRead(new NettyMessage.ErrorResponse( new RuntimeException(STR))); try { ch.checkException(); } catch (Exception e) { fail(STR + STR); } verify(rich[0], times(2)).onError(isA(RemoteTransportException.class)); verify(rich[1], times(1)).onError(isA(RemoteTransportException.class)); }
|
/**
* Verifies that {@link NettyMessage.ErrorResponse} messages are correctly wrapped in
* {@link RemoteTransportException} instances.
*/
|
Verifies that <code>NettyMessage.ErrorResponse</code> messages are correctly wrapped in <code>RemoteTransportException</code> instances
|
testWrappingOfRemoteErrorMessage
|
{
"repo_name": "WangTaoTheTonic/flink",
"path": "flink-runtime/src/test/java/org/apache/flink/runtime/io/network/netty/ClientTransportErrorHandlingTest.java",
"license": "apache-2.0",
"size": 13750
}
|
[
"io.netty.channel.embedded.EmbeddedChannel",
"org.apache.flink.runtime.io.network.netty.exception.RemoteTransportException",
"org.apache.flink.runtime.io.network.partition.consumer.InputChannelID",
"org.apache.flink.runtime.io.network.partition.consumer.RemoteInputChannel",
"org.junit.Assert",
"org.mockito.Mockito"
] |
import io.netty.channel.embedded.EmbeddedChannel; import org.apache.flink.runtime.io.network.netty.exception.RemoteTransportException; import org.apache.flink.runtime.io.network.partition.consumer.InputChannelID; import org.apache.flink.runtime.io.network.partition.consumer.RemoteInputChannel; import org.junit.Assert; import org.mockito.Mockito;
|
import io.netty.channel.embedded.*; import org.apache.flink.runtime.io.network.netty.exception.*; import org.apache.flink.runtime.io.network.partition.consumer.*; import org.junit.*; import org.mockito.*;
|
[
"io.netty.channel",
"org.apache.flink",
"org.junit",
"org.mockito"
] |
io.netty.channel; org.apache.flink; org.junit; org.mockito;
| 610,110
|
protected org.eclipse.bpel4chor.model.pbd.MessageExchange xml2MessageExchange(Element messageExchangeElement) {
if (!messageExchangeElement.getLocalName().equals("messageExchange"))
return null;
org.eclipse.bpel4chor.model.pbd.MessageExchange messageExchange = PbdFactory.eINSTANCE
.createMessageExchange();
// Set name
if (messageExchangeElement.hasAttribute("name"))
messageExchange
.setName(messageExchangeElement.getAttribute("name"));
//xml2ExtensibleElement(messageExchange, messageExchangeElement);
return messageExchange;
}
|
org.eclipse.bpel4chor.model.pbd.MessageExchange function(Element messageExchangeElement) { if (!messageExchangeElement.getLocalName().equals(STR)) return null; org.eclipse.bpel4chor.model.pbd.MessageExchange messageExchange = PbdFactory.eINSTANCE .createMessageExchange(); if (messageExchangeElement.hasAttribute("name")) messageExchange .setName(messageExchangeElement.getAttribute("name")); return messageExchange; }
|
/**
* Converts an XML messageExchange element to a BPEL MessageExchange object.
*/
|
Converts an XML messageExchange element to a BPEL MessageExchange object
|
xml2MessageExchange
|
{
"repo_name": "chorsystem/middleware",
"path": "chorDataModel/src/main/java/org/eclipse/bpel/model/resource/BPELReader2.java",
"license": "mit",
"size": 57396
}
|
[
"org.eclipse.bpel4chor.model.pbd.PbdFactory",
"org.w3c.dom.Element"
] |
import org.eclipse.bpel4chor.model.pbd.PbdFactory; import org.w3c.dom.Element;
|
import org.eclipse.bpel4chor.model.pbd.*; import org.w3c.dom.*;
|
[
"org.eclipse.bpel4chor",
"org.w3c.dom"
] |
org.eclipse.bpel4chor; org.w3c.dom;
| 1,097,699
|
public java.util.List<fr.lip6.move.pnml.hlpn.booleans.hlapi.BoolHLAPI> getInput_booleans_BoolHLAPI(){
java.util.List<fr.lip6.move.pnml.hlpn.booleans.hlapi.BoolHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.booleans.hlapi.BoolHLAPI>();
for (Sort elemnt : getInput()) {
if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.booleans.impl.BoolImpl.class)){
retour.add(new fr.lip6.move.pnml.hlpn.booleans.hlapi.BoolHLAPI(
(fr.lip6.move.pnml.hlpn.booleans.Bool)elemnt
));
}
}
return retour;
}
|
java.util.List<fr.lip6.move.pnml.hlpn.booleans.hlapi.BoolHLAPI> function(){ java.util.List<fr.lip6.move.pnml.hlpn.booleans.hlapi.BoolHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.booleans.hlapi.BoolHLAPI>(); for (Sort elemnt : getInput()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.booleans.impl.BoolImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.booleans.hlapi.BoolHLAPI( (fr.lip6.move.pnml.hlpn.booleans.Bool)elemnt )); } } return retour; }
|
/**
* This accessor return a list of encapsulated subelement, only of BoolHLAPI kind.
* WARNING : this method can creates a lot of new object in memory.
*/
|
This accessor return a list of encapsulated subelement, only of BoolHLAPI kind. WARNING : this method can creates a lot of new object in memory
|
getInput_booleans_BoolHLAPI
|
{
"repo_name": "lhillah/pnmlframework",
"path": "pnmlFw-HLPN/src/fr/lip6/move/pnml/hlpn/strings/hlapi/GreaterThanHLAPI.java",
"license": "epl-1.0",
"size": 108533
}
|
[
"fr.lip6.move.pnml.hlpn.terms.Sort",
"java.util.ArrayList",
"java.util.List"
] |
import fr.lip6.move.pnml.hlpn.terms.Sort; import java.util.ArrayList; import java.util.List;
|
import fr.lip6.move.pnml.hlpn.terms.*; import java.util.*;
|
[
"fr.lip6.move",
"java.util"
] |
fr.lip6.move; java.util;
| 1,394,614
|
@ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.MERGE }, fetch = FetchType.LAZY )
@org.hibernate.annotations.Cascade({org.hibernate.annotations.CascadeType.SAVE_UPDATE})
@Basic( optional = true )
@JoinColumn(name = "enrollmentid", nullable = true )
public Enrollment getEnrollmentid() {
return this.enrollmentid;
}
|
@ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.MERGE }, fetch = FetchType.LAZY ) @org.hibernate.annotations.Cascade({org.hibernate.annotations.CascadeType.SAVE_UPDATE}) @Basic( optional = true ) @JoinColumn(name = STR, nullable = true ) Enrollment function() { return this.enrollmentid; }
|
/**
* Return the value associated with the column: enrollmentid.
* @return A Enrollment object (this.enrollmentid)
*/
|
Return the value associated with the column: enrollmentid
|
getEnrollmentid
|
{
"repo_name": "servinglynk/servinglynk-hmis",
"path": "hmis-model-v2014/src/main/java/com/servinglynk/hmis/warehouse/model/v2014/Schoolstatus.java",
"license": "mpl-2.0",
"size": 9262
}
|
[
"javax.persistence.Basic",
"javax.persistence.CascadeType",
"javax.persistence.FetchType",
"javax.persistence.JoinColumn",
"javax.persistence.ManyToOne"
] |
import javax.persistence.Basic; import javax.persistence.CascadeType; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne;
|
import javax.persistence.*;
|
[
"javax.persistence"
] |
javax.persistence;
| 1,157,573
|
RestTemplate client = new RestTemplate(ClientHttpRequestFactorySelector.getRequestFactory());
OAuth1RequestInterceptor interceptor = new OAuth1RequestInterceptor(credentials);
List<ClientHttpRequestInterceptor> interceptors = new LinkedList<ClientHttpRequestInterceptor>();
interceptors.add(interceptor);
client.setInterceptors(interceptors);
return client;
}
|
RestTemplate client = new RestTemplate(ClientHttpRequestFactorySelector.getRequestFactory()); OAuth1RequestInterceptor interceptor = new OAuth1RequestInterceptor(credentials); List<ClientHttpRequestInterceptor> interceptors = new LinkedList<ClientHttpRequestInterceptor>(); interceptors.add(interceptor); client.setInterceptors(interceptors); return client; }
|
/**
* Constructs a RestTemplate that adds the OAuth1 Authorization header to each request before it is executed.
*/
|
Constructs a RestTemplate that adds the OAuth1 Authorization header to each request before it is executed
|
create
|
{
"repo_name": "codeconsole/spring-social",
"path": "spring-social-core/src/main/java/org/springframework/social/oauth1/ProtectedResourceClientFactory.java",
"license": "apache-2.0",
"size": 2737
}
|
[
"java.util.LinkedList",
"java.util.List",
"org.springframework.http.client.ClientHttpRequestInterceptor",
"org.springframework.social.support.ClientHttpRequestFactorySelector",
"org.springframework.web.client.RestTemplate"
] |
import java.util.LinkedList; import java.util.List; import org.springframework.http.client.ClientHttpRequestInterceptor; import org.springframework.social.support.ClientHttpRequestFactorySelector; import org.springframework.web.client.RestTemplate;
|
import java.util.*; import org.springframework.http.client.*; import org.springframework.social.support.*; import org.springframework.web.client.*;
|
[
"java.util",
"org.springframework.http",
"org.springframework.social",
"org.springframework.web"
] |
java.util; org.springframework.http; org.springframework.social; org.springframework.web;
| 1,852,579
|
@Override
public Set<Type> getTypes()
{
return _bean.getTypes();
}
|
Set<Type> function() { return _bean.getTypes(); }
|
/**
* Returns the types that the bean implements
*/
|
Returns the types that the bean implements
|
getTypes
|
{
"repo_name": "CleverCloud/Quercus",
"path": "resin/src/main/java/com/caucho/config/inject/DecoratorBean.java",
"license": "gpl-2.0",
"size": 11154
}
|
[
"java.lang.reflect.Type",
"java.util.Set"
] |
import java.lang.reflect.Type; import java.util.Set;
|
import java.lang.reflect.*; import java.util.*;
|
[
"java.lang",
"java.util"
] |
java.lang; java.util;
| 454,074
|
@Exported(name="hash")
public @NonNull String getHashString() {
return Util.toHexString(md5sum);
}
|
@Exported(name="hash") @NonNull String function() { return Util.toHexString(md5sum); }
|
/**
* Gets the MD5 hash string.
*/
|
Gets the MD5 hash string
|
getHashString
|
{
"repo_name": "DanielWeber/jenkins",
"path": "core/src/main/java/hudson/model/Fingerprint.java",
"license": "mit",
"size": 51963
}
|
[
"edu.umd.cs.findbugs.annotations.NonNull",
"org.kohsuke.stapler.export.Exported"
] |
import edu.umd.cs.findbugs.annotations.NonNull; import org.kohsuke.stapler.export.Exported;
|
import edu.umd.cs.findbugs.annotations.*; import org.kohsuke.stapler.export.*;
|
[
"edu.umd.cs",
"org.kohsuke.stapler"
] |
edu.umd.cs; org.kohsuke.stapler;
| 717,415
|
public void onCommit(Map<TopicPartition, OffsetAndMetadata> offsets);
|
void function(Map<TopicPartition, OffsetAndMetadata> offsets);
|
/**
* This is called when offsets get committed.
* <p>
* Any exception thrown by this method will be ignored by the caller.
*
* @param offsets A map of offsets by partition with associated metadata
*/
|
This is called when offsets get committed. Any exception thrown by this method will be ignored by the caller
|
onCommit
|
{
"repo_name": "KevinLiLu/kafka",
"path": "clients/src/main/java/org/apache/kafka/clients/consumer/ConsumerInterceptor.java",
"license": "apache-2.0",
"size": 4712
}
|
[
"java.util.Map",
"org.apache.kafka.common.TopicPartition"
] |
import java.util.Map; import org.apache.kafka.common.TopicPartition;
|
import java.util.*; import org.apache.kafka.common.*;
|
[
"java.util",
"org.apache.kafka"
] |
java.util; org.apache.kafka;
| 1,780,004
|
public static synchronized void addSharedToMachine(String diskName, String mountpoint, Resource host) {
Machine resource = MACHINE_TO_SHARED.get(host);
if (resource != null) {
resource.addSharedDisk(diskName, mountpoint);
List<Resource> machines = SHARED_TO_MACHINES.get(diskName);
if (machines == null) {
machines = new LinkedList<>();
SHARED_TO_MACHINES.put(diskName, machines);
}
synchronized (machines) {
machines.add(host);
}
} else {
ErrorManager.warn("Host " + host.getName() + " not registered as machine in the Shared disk manager.");
}
}
|
static synchronized void function(String diskName, String mountpoint, Resource host) { Machine resource = MACHINE_TO_SHARED.get(host); if (resource != null) { resource.addSharedDisk(diskName, mountpoint); List<Resource> machines = SHARED_TO_MACHINES.get(diskName); if (machines == null) { machines = new LinkedList<>(); SHARED_TO_MACHINES.put(diskName, machines); } synchronized (machines) { machines.add(host); } } else { ErrorManager.warn(STR + host.getName() + STR); } }
|
/**
* Links a shared disk with a resource.
*
* @param diskName shared disk identifier
* @param mountpoint path where the shared disk is mounted
* @param host containing resource
*/
|
Links a shared disk with a resource
|
addSharedToMachine
|
{
"repo_name": "mF2C/COMPSs",
"path": "compss/runtime/adaptors/engine/src/main/java/es/bsc/compss/util/SharedDiskManager.java",
"license": "apache-2.0",
"size": 9432
}
|
[
"es.bsc.compss.types.resources.Resource",
"java.util.LinkedList",
"java.util.List"
] |
import es.bsc.compss.types.resources.Resource; import java.util.LinkedList; import java.util.List;
|
import es.bsc.compss.types.resources.*; import java.util.*;
|
[
"es.bsc.compss",
"java.util"
] |
es.bsc.compss; java.util;
| 1,631,992
|
int executeBatch() throws SQLException;
|
int executeBatch() throws SQLException;
|
/**
* Executes (flushes) all statements currently batched.
*
* @return the number of rows updated in the batch
* @throws java.sql.SQLException If the batch could not be executed or if any of the statements
* fails.
*/
|
Executes (flushes) all statements currently batched
|
executeBatch
|
{
"repo_name": "cavajtennis/ibatis",
"path": "java/mapper/mapper2/src/com/ibatis/sqlmap/client/SqlMapExecutor.java",
"license": "apache-2.0",
"size": 9902
}
|
[
"java.sql.SQLException"
] |
import java.sql.SQLException;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 1,260,654
|
public void endEntity(String name, Augmentations augs) throws IOException, XNIException {
super.endEntity(name, augs);
if(name.equals("[xml]")){
//if fMarkupDepth has reached 0.
//and driver is fTrailingMiscDriver (which
//handles end of document in normal case)
//set the scanner state of SCANNER_STATE_TERMINATED
if(fMarkupDepth == 0 && fDriver == fTrailingMiscDriver){
//set the scanner set to SCANNER_STATE_TERMINATED
setScannerState(SCANNER_STATE_TERMINATED) ;
} else{
//else we have reached the end of document prematurely
//so throw EOFException.
throw new java.io.EOFException();
}
//this is taken care in wrapper which generates XNI callbacks, There are no next events
//if (fDocumentHandler != null) {
//fDocumentHandler.endDocument(null);
//}
}
} // endEntity(String)
|
void function(String name, Augmentations augs) throws IOException, XNIException { super.endEntity(name, augs); if(name.equals("[xml]")){ if(fMarkupDepth == 0 && fDriver == fTrailingMiscDriver){ setScannerState(SCANNER_STATE_TERMINATED) ; } else{ throw new java.io.EOFException(); } } }
|
/**
* This method notifies the end of an entity. The DTD has the pseudo-name
* of "[dtd]" parameter entity names start with '%'; and general entities
* are just specified by their name.
*
* @param name The name of the entity.
*
* @throws XNIException Thrown by handler to signal an error.
*/
|
This method notifies the end of an entity. The DTD has the pseudo-name of "[dtd]" parameter entity names start with '%'; and general entities are just specified by their name
|
endEntity
|
{
"repo_name": "openjdk-mirror/jdk7u-jaxp",
"path": "src/com/sun/org/apache/xerces/internal/impl/XMLDocumentScannerImpl.java",
"license": "gpl-2.0",
"size": 59182
}
|
[
"com.sun.org.apache.xerces.internal.xni.Augmentations",
"com.sun.org.apache.xerces.internal.xni.XNIException",
"java.io.EOFException",
"java.io.IOException"
] |
import com.sun.org.apache.xerces.internal.xni.Augmentations; import com.sun.org.apache.xerces.internal.xni.XNIException; import java.io.EOFException; import java.io.IOException;
|
import com.sun.org.apache.xerces.internal.xni.*; import java.io.*;
|
[
"com.sun.org",
"java.io"
] |
com.sun.org; java.io;
| 306,477
|
//establish and open a CacheLoaderServer to handle cache
//cache loading requests from a CacheLoaderClient
cacheLoaderServer = new CacheLoaderServer<String, String>(10000);
cacheLoaderServer.open();
//establish the CacheManager for the tests
cacheManager = Caching.getCachingProvider().getCacheManager();
//establish a CacheLoaderClient that a Cache can use for loading entries
//(via the CacheLoaderServer)
CacheLoaderClient<String, String> cacheLoader =
new CacheLoaderClient<>(cacheLoaderServer.getInetAddress(), cacheLoaderServer.getPort());
//establish a Cache Configuration that uses a CacheLoader (no Read-Through)
MutableConfiguration<String, String> configuration = new MutableConfiguration<>();
configuration.setTypes(String.class, String.class);
configuration.setCacheLoaderFactory(FactoryBuilder.factoryOf(cacheLoader));
configuration.setReadThrough(false);
//configure the cache
cacheManager.createCache("cache-loader-test", configuration);
cache = cacheManager.getCache("cache-loader-test", String.class, String.class);
}
|
cacheLoaderServer = new CacheLoaderServer<String, String>(10000); cacheLoaderServer.open(); cacheManager = Caching.getCachingProvider().getCacheManager(); CacheLoaderClient<String, String> cacheLoader = new CacheLoaderClient<>(cacheLoaderServer.getInetAddress(), cacheLoaderServer.getPort()); MutableConfiguration<String, String> configuration = new MutableConfiguration<>(); configuration.setTypes(String.class, String.class); configuration.setCacheLoaderFactory(FactoryBuilder.factoryOf(cacheLoader)); configuration.setReadThrough(false); cacheManager.createCache(STR, configuration); cache = cacheManager.getCache(STR, String.class, String.class); }
|
/**
* Establish the {@link javax.cache.CacheManager} and {@link javax.cache.Cache} for a test.
*/
|
Establish the <code>javax.cache.CacheManager</code> and <code>javax.cache.Cache</code> for a test
|
onBeforeEachTest
|
{
"repo_name": "headissue/cache2k",
"path": "cache2k-jcache-tests/src/test/java/org/jsr107/tck/integration/CacheLoaderWithoutReadThroughTest.java",
"license": "gpl-3.0",
"size": 27230
}
|
[
"javax.cache.Caching",
"javax.cache.configuration.FactoryBuilder",
"javax.cache.configuration.MutableConfiguration"
] |
import javax.cache.Caching; import javax.cache.configuration.FactoryBuilder; import javax.cache.configuration.MutableConfiguration;
|
import javax.cache.*; import javax.cache.configuration.*;
|
[
"javax.cache"
] |
javax.cache;
| 918,125
|
public DataBuilder add(final Cmd... cmds) {
final List<Cmd> cmdsList = Arrays.asList(Preconditions.checkNotNull(cmds));
if (Iterables.any(cmdsList, Predicates.<Cmd>isNull())) {
throw new DataBuilderException("cmds has a null cmd: " + cmdsList);
}
LOGGER.debug("Add cmds: {}.", cmdsList);
store().commands.addAll(cmdsList);
LOGGER.debug("All cmds are now: {}.", store().commands);
return this;
}
|
DataBuilder function(final Cmd... cmds) { final List<Cmd> cmdsList = Arrays.asList(Preconditions.checkNotNull(cmds)); if (Iterables.any(cmdsList, Predicates.<Cmd>isNull())) { throw new DataBuilderException(STR + cmdsList); } LOGGER.debug(STR, cmdsList); store().commands.addAll(cmdsList); LOGGER.debug(STR, store().commands); return this; }
|
/**
* Queue some {@link Cmd}s that are to be stored in {@link DataBuilderStore#commands}.
*
* @param cmds Cmds to later build on the {@link DataBuilder#build()}.
* @return This instance, for fluent api.
*/
|
Queue some <code>Cmd</code>s that are to be stored in <code>DataBuilderStore#commands</code>
|
add
|
{
"repo_name": "adben002/test-data-builder",
"path": "core/src/main/java/com/adben/testdatabuilder/core/DataBuilder.java",
"license": "mit",
"size": 5981
}
|
[
"com.adben.testdatabuilder.core.cmds.Cmd",
"com.adben.testdatabuilder.core.exception.DataBuilderException",
"com.adben.testdatabuilder.core.store.DataBuilderStore",
"com.google.common.base.Preconditions",
"com.google.common.base.Predicates",
"com.google.common.collect.Iterables",
"java.util.Arrays",
"java.util.List"
] |
import com.adben.testdatabuilder.core.cmds.Cmd; import com.adben.testdatabuilder.core.exception.DataBuilderException; import com.adben.testdatabuilder.core.store.DataBuilderStore; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; import java.util.Arrays; import java.util.List;
|
import com.adben.testdatabuilder.core.cmds.*; import com.adben.testdatabuilder.core.exception.*; import com.adben.testdatabuilder.core.store.*; import com.google.common.base.*; import com.google.common.collect.*; import java.util.*;
|
[
"com.adben.testdatabuilder",
"com.google.common",
"java.util"
] |
com.adben.testdatabuilder; com.google.common; java.util;
| 2,380,409
|
public void setEndpointReference(SOAPOverUDPEndpointReferenceType endpointReference) {
this.endpointReference = endpointReference;
}
|
void function(SOAPOverUDPEndpointReferenceType endpointReference) { this.endpointReference = endpointReference; }
|
/**
* Sets the endpoint reference.
* @param endpointReference WS-Discovery endpoint reference.
*/
|
Sets the endpoint reference
|
setEndpointReference
|
{
"repo_name": "nateridderman/java-ws-discovery",
"path": "wsdiscovery-lib/src/main/java/com/ms/wsdiscovery/servicedirectory/WsDiscoveryService.java",
"license": "lgpl-3.0",
"size": 12415
}
|
[
"com.skjegstad.soapoverudp.datatypes.SOAPOverUDPEndpointReferenceType"
] |
import com.skjegstad.soapoverudp.datatypes.SOAPOverUDPEndpointReferenceType;
|
import com.skjegstad.soapoverudp.datatypes.*;
|
[
"com.skjegstad.soapoverudp"
] |
com.skjegstad.soapoverudp;
| 1,854,704
|
private static String toString(Loggable value) {
if (value == null) {
return null;
} else {
return value.toJSONString();
}
}
|
static String function(Loggable value) { if (value == null) { return null; } else { return value.toJSONString(); } }
|
/**
* Converts the param value (<code>Loggable</code>) value to a string.
*
* @param value the param value
* @return the string
*/
|
Converts the param value (<code>Loggable</code>) value to a string
|
toString
|
{
"repo_name": "Small-Bodies-Node/ntl_archive_db_demo",
"path": "import_and_persistence/src/java/main/gov/nasa/pds/services/impl/Helper.java",
"license": "bsd-3-clause",
"size": 13010
}
|
[
"gov.nasa.pds.entities.Loggable"
] |
import gov.nasa.pds.entities.Loggable;
|
import gov.nasa.pds.entities.*;
|
[
"gov.nasa.pds"
] |
gov.nasa.pds;
| 1,356,983
|
public void saveOrUpdate(com.floreantpos.model.ZipCodeVsDeliveryCharge zipCodeVsDeliveryCharge, Session s)
throws org.hibernate.HibernateException {
saveOrUpdate((Object) zipCodeVsDeliveryCharge, s);
}
|
void function(com.floreantpos.model.ZipCodeVsDeliveryCharge zipCodeVsDeliveryCharge, Session s) throws org.hibernate.HibernateException { saveOrUpdate((Object) zipCodeVsDeliveryCharge, s); }
|
/**
* Either save() or update() the given instance, depending upon the value of its identifier property. By default the
* instance is always saved. This behaviour may be adjusted by specifying an unsaved-value attribute of the identifier
* property mapping.
* Use the Session given.
* @param zipCodeVsDeliveryCharge a transient instance containing new or updated state.
* @param s the Session.
*/
|
Either save() or update() the given instance, depending upon the value of its identifier property. By default the instance is always saved. This behaviour may be adjusted by specifying an unsaved-value attribute of the identifier property mapping. Use the Session given
|
saveOrUpdate
|
{
"repo_name": "meyerdg/floreant",
"path": "src/com/floreantpos/model/dao/BaseZipCodeVsDeliveryChargeDAO.java",
"license": "gpl-2.0",
"size": 8977
}
|
[
"org.hibernate.Session"
] |
import org.hibernate.Session;
|
import org.hibernate.*;
|
[
"org.hibernate"
] |
org.hibernate;
| 2,814,346
|
private LockResult transformCurrent(LockResult result) throws IOException {
final Cursor c = mSource;
final byte[] key = c.key();
if (key == null) {
mKey = null;
mValue = null;
return LockResult.UNOWNED;
}
byte[] tkey = mTransformer.transformKey(c);
mKey = tkey;
if (c.value() == null) {
mValue = null;
if (tkey != null) {
// Retain the position and lock when value doesn't exist.
return result;
}
} else {
if (tkey != null) {
byte[] tvalue = mTransformer.transformValue(c, tkey);
if (tvalue != null) {
mValue = tvalue;
return result;
}
}
mValue = null;
}
// This point is reached when the entry was filtered out and the cursor must move.
if (result == LockResult.ACQUIRED) {
// Release the lock when filtered out, but maintain the cursor position.
c.link().unlock();
}
return null;
}
|
LockResult function(LockResult result) throws IOException { final Cursor c = mSource; final byte[] key = c.key(); if (key == null) { mKey = null; mValue = null; return LockResult.UNOWNED; } byte[] tkey = mTransformer.transformKey(c); mKey = tkey; if (c.value() == null) { mValue = null; if (tkey != null) { return result; } } else { if (tkey != null) { byte[] tvalue = mTransformer.transformValue(c, tkey); if (tvalue != null) { mValue = tvalue; return result; } } mValue = null; } if (result == LockResult.ACQUIRED) { c.link().unlock(); } return null; }
|
/**
* Method returns null if entry was filtered out and cursor must be moved. As a
* side-effect, the mKey and mValue fields are set to null when filtered out.
*
* @param result must not be null
* @return null if cursor must be moved
*/
|
Method returns null if entry was filtered out and cursor must be moved. As a side-effect, the mKey and mValue fields are set to null when filtered out
|
transformCurrent
|
{
"repo_name": "cojen/Tupl",
"path": "src/main/java/org/cojen/tupl/views/TransformedCursor.java",
"license": "agpl-3.0",
"size": 20406
}
|
[
"java.io.IOException",
"org.cojen.tupl.Cursor",
"org.cojen.tupl.LockResult"
] |
import java.io.IOException; import org.cojen.tupl.Cursor; import org.cojen.tupl.LockResult;
|
import java.io.*; import org.cojen.tupl.*;
|
[
"java.io",
"org.cojen.tupl"
] |
java.io; org.cojen.tupl;
| 228,189
|
public static RenderType getRenderType (BlockState state) {
return RENDER_TYPES.computeIfAbsent(state.getBlock(), k -> findRenderType(state));
}
|
static RenderType function (BlockState state) { return RENDER_TYPES.computeIfAbsent(state.getBlock(), k -> findRenderType(state)); }
|
/**
* Gets a RenderType for a given block.
*
* @param state The block to get the type of.
* @return The RenderType for the block.
*/
|
Gets a RenderType for a given block
|
getRenderType
|
{
"repo_name": "Darkhax-Minecraft/Bookshelf",
"path": "src/main/java/net/darkhax/bookshelf/util/RenderUtils.java",
"license": "lgpl-2.1",
"size": 26872
}
|
[
"net.minecraft.block.BlockState",
"net.minecraft.client.renderer.RenderType"
] |
import net.minecraft.block.BlockState; import net.minecraft.client.renderer.RenderType;
|
import net.minecraft.block.*; import net.minecraft.client.renderer.*;
|
[
"net.minecraft.block",
"net.minecraft.client"
] |
net.minecraft.block; net.minecraft.client;
| 1,465,406
|
@Override
protected long doGetLastModifiedTime() throws Exception {
final DavProperty property = getProperty((URLFileName) getName(),
DavConstants.PROPERTY_GETLASTMODIFIED);
if (property != null) {
final String value = (String) property.getValue();
return DateUtil.parseDate(value).getTime();
}
return 0;
}
|
long function() throws Exception { final DavProperty property = getProperty((URLFileName) getName(), DavConstants.PROPERTY_GETLASTMODIFIED); if (property != null) { final String value = (String) property.getValue(); return DateUtil.parseDate(value).getTime(); } return 0; }
|
/**
* Returns the last modified time of this file. Is only called if
* {@link #doGetType} does not return {@link FileType#IMAGINARY}.
*/
|
Returns the last modified time of this file. Is only called if <code>#doGetType</code> does not return <code>FileType#IMAGINARY</code>
|
doGetLastModifiedTime
|
{
"repo_name": "kichenko/apache-vfs2-fix",
"path": "core/src/main/java/org/apache/commons/vfs2/provider/webdav/WebdavFileObject.java",
"license": "apache-2.0",
"size": 27030
}
|
[
"org.apache.commons.httpclient.util.DateUtil",
"org.apache.commons.vfs2.provider.URLFileName",
"org.apache.jackrabbit.webdav.DavConstants",
"org.apache.jackrabbit.webdav.property.DavProperty"
] |
import org.apache.commons.httpclient.util.DateUtil; import org.apache.commons.vfs2.provider.URLFileName; import org.apache.jackrabbit.webdav.DavConstants; import org.apache.jackrabbit.webdav.property.DavProperty;
|
import org.apache.commons.httpclient.util.*; import org.apache.commons.vfs2.provider.*; import org.apache.jackrabbit.webdav.*; import org.apache.jackrabbit.webdav.property.*;
|
[
"org.apache.commons",
"org.apache.jackrabbit"
] |
org.apache.commons; org.apache.jackrabbit;
| 2,525,706
|
public ServiceCall<Void> getBooleanNullAsync(final ServiceCallback<Void> serviceCallback) {
return ServiceCall.fromResponse(getBooleanNullWithServiceResponseAsync(), serviceCallback);
}
|
ServiceCall<Void> function(final ServiceCallback<Void> serviceCallback) { return ServiceCall.fromResponse(getBooleanNullWithServiceResponseAsync(), serviceCallback); }
|
/**
* Get null Boolean value on query (query string should be absent).
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
|
Get null Boolean value on query (query string should be absent)
|
getBooleanNullAsync
|
{
"repo_name": "matthchr/autorest",
"path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/url/implementation/QueriesImpl.java",
"license": "mit",
"size": 139435
}
|
[
"com.microsoft.rest.ServiceCall",
"com.microsoft.rest.ServiceCallback"
] |
import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback;
|
import com.microsoft.rest.*;
|
[
"com.microsoft.rest"
] |
com.microsoft.rest;
| 1,679,384
|
public void loadData(ChunkChopper chopper)
{
chopper.pushData(chopper.getID(), new Boolean(true));
}
|
void function(ChunkChopper chopper) { chopper.pushData(chopper.getID(), new Boolean(true)); }
|
/**
* If this method is being called then
* a boolean true will be set on the chunk chopper
* with a key that is the id of this chunk.
*
* @param chopper the chopper on which the boolean true data is to be set
*
*/
|
If this method is being called then a boolean true will be set on the chunk chopper with a key that is the id of this chunk
|
loadData
|
{
"repo_name": "windybell/jME3-3dsmax-plugins",
"path": "jME3-Max3dsPlugin/src/com/jme3/asset/max3ds/chunks/BooleanChunk.java",
"license": "gpl-2.0",
"size": 778
}
|
[
"com.jme3.asset.max3ds.ChunkChopper"
] |
import com.jme3.asset.max3ds.ChunkChopper;
|
import com.jme3.asset.max3ds.*;
|
[
"com.jme3.asset"
] |
com.jme3.asset;
| 2,482,693
|
public Map getPolicyMap() {
return policyMap;
}
|
Map function() { return policyMap; }
|
/**
* Gets the policy map.
*
* @return Returns Map.
*/
|
Gets the policy map
|
getPolicyMap
|
{
"repo_name": "apache/axis2-java",
"path": "modules/codegen/src/org/apache/axis2/wsdl/codegen/CodeGenConfiguration.java",
"license": "apache-2.0",
"size": 23545
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,562,689
|
@Override
int getMessageCount(String clientId, String subscriberName) throws IOException;
|
int getMessageCount(String clientId, String subscriberName) throws IOException;
|
/**
* Get the number of messages ready to deliver from the store to a durable
* subscriber
*
* @param clientId
* @param subscriberName
*
* @return the outstanding message count
*
* @throws IOException
*/
|
Get the number of messages ready to deliver from the store to a durable subscriber
|
getMessageCount
|
{
"repo_name": "chirino/activemq",
"path": "activemq-broker/src/main/java/org/apache/activemq/store/TopicReferenceStore.java",
"license": "apache-2.0",
"size": 5039
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 982,222
|
public MediaRouter.RouteInfo getRoute() {
return mRoute;
}
|
MediaRouter.RouteInfo function() { return mRoute; }
|
/**
* Gets the route that this dialog is controlling.
*/
|
Gets the route that this dialog is controlling
|
getRoute
|
{
"repo_name": "s20121035/rk3288_android5.1_repo",
"path": "frameworks/support/v7/mediarouter/src/android/support/v7/app/MediaRouteControllerDialog.java",
"license": "gpl-3.0",
"size": 15984
}
|
[
"android.support.v7.media.MediaRouter"
] |
import android.support.v7.media.MediaRouter;
|
import android.support.v7.media.*;
|
[
"android.support"
] |
android.support;
| 2,677,428
|
void onDeleted(T item) throws IOException;
|
void onDeleted(T item) throws IOException;
|
/**
* Internal method. Called by {@link Item}s when they are deleted by users.
*/
|
Internal method. Called by <code>Item</code>s when they are deleted by users
|
onDeleted
|
{
"repo_name": "DanielWeber/jenkins",
"path": "core/src/main/java/hudson/model/ItemGroup.java",
"license": "mit",
"size": 6178
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 479,635
|
public AbstractZyGraph<NodeType, EdgeType> getGraph() {
return m_graph;
}
|
AbstractZyGraph<NodeType, EdgeType> function() { return m_graph; }
|
/**
* Returns the graph the exited node belongs to.
*
* @return The graph the exited node belongs to.
*/
|
Returns the graph the exited node belongs to
|
getGraph
|
{
"repo_name": "mayl8822/binnavi",
"path": "src/main/java/com/google/security/zynamics/zylib/yfileswrap/gui/zygraph/editmode/states/CNodeExitState.java",
"license": "apache-2.0",
"size": 4085
}
|
[
"com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.AbstractZyGraph"
] |
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.AbstractZyGraph;
|
import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.*;
|
[
"com.google.security"
] |
com.google.security;
| 1,555,679
|
public ServiceCall getByteValidAsync(final ServiceCallback<Map<String, byte[]>> serviceCallback) throws IllegalArgumentException {
if (serviceCallback == null) {
throw new IllegalArgumentException("ServiceCallback is required for async calls.");
}
|
ServiceCall function(final ServiceCallback<Map<String, byte[]>> serviceCallback) throws IllegalArgumentException { if (serviceCallback == null) { throw new IllegalArgumentException(STR); }
|
/**
* Get byte dictionary value {"0": hex(FF FF FF FA), "1": hex(01 02 03), "2": hex (25, 29, 43)} with each item encoded in base64.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if callback is null
* @return the {@link Call} object
*/
|
Get byte dictionary value {"0": hex(FF FF FF FA), "1": hex(01 02 03), "2": hex (25, 29, 43)} with each item encoded in base64
|
getByteValidAsync
|
{
"repo_name": "John-Hart/autorest",
"path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodydictionary/implementation/DictionarysImpl.java",
"license": "mit",
"size": 172079
}
|
[
"com.microsoft.rest.ServiceCall",
"com.microsoft.rest.ServiceCallback",
"java.util.Map"
] |
import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; import java.util.Map;
|
import com.microsoft.rest.*; import java.util.*;
|
[
"com.microsoft.rest",
"java.util"
] |
com.microsoft.rest; java.util;
| 2,322,902
|
private void process(Iterator<Document> docs) throws IOException {
long numTokens = 0;
while (docs.hasNext()) {
Document doc = docs.next();
BufferedReader br = doc.reader();
String header = br.readLine();
String[] pieces = header.split("\\s+");
int index = Integer.parseInt(pieces[3]);
DependencyTreeNode[] nodes = extractor.readNextTree(br);
foundTokens.add(nodes[index].word().toLowerCase());
for (int i = index+1; i < index+10 && i < nodes.length; ++i)
foundTokens.add(nodes[i].word().toLowerCase());
for (int i = Math.max(0, index-10); i < index; ++i)
foundTokens.add(nodes[i].word().toLowerCase());
}
}
|
void function(Iterator<Document> docs) throws IOException { long numTokens = 0; while (docs.hasNext()) { Document doc = docs.next(); BufferedReader br = doc.reader(); String header = br.readLine(); String[] pieces = header.split("\\s+"); int index = Integer.parseInt(pieces[3]); DependencyTreeNode[] nodes = extractor.readNextTree(br); foundTokens.add(nodes[index].word().toLowerCase()); for (int i = index+1; i < index+10 && i < nodes.length; ++i) foundTokens.add(nodes[i].word().toLowerCase()); for (int i = Math.max(0, index-10); i < index; ++i) foundTokens.add(nodes[i].word().toLowerCase()); } }
|
/**
* Counts all of the tokens in the iterator
*/
|
Counts all of the tokens in the iterator
|
process
|
{
"repo_name": "fozziethebeat/S-Space",
"path": "src/main/java/edu/ucla/sspace/tools/DepPsdTokenCounter.java",
"license": "gpl-2.0",
"size": 5285
}
|
[
"edu.ucla.sspace.dependency.DependencyTreeNode",
"edu.ucla.sspace.text.Document",
"java.io.BufferedReader",
"java.io.IOException",
"java.util.Iterator"
] |
import edu.ucla.sspace.dependency.DependencyTreeNode; import edu.ucla.sspace.text.Document; import java.io.BufferedReader; import java.io.IOException; import java.util.Iterator;
|
import edu.ucla.sspace.dependency.*; import edu.ucla.sspace.text.*; import java.io.*; import java.util.*;
|
[
"edu.ucla.sspace",
"java.io",
"java.util"
] |
edu.ucla.sspace; java.io; java.util;
| 2,441,459
|
public void setNickname(org.ontoware.rdf2go.model.node.Node value) {
Base.set(this.model, this.getResource(), NICKNAME, value);
}
|
void function(org.ontoware.rdf2go.model.node.Node value) { Base.set(this.model, this.getResource(), NICKNAME, value); }
|
/**
* Sets a value of property Nickname from an RDF2Go node. First, all
* existing values are removed, then this value is added. Cardinality
* constraints are not checked, but this method exists only for properties
* with no minCardinality or minCardinality == 1.
*
* @param value the value to be added [Generated from RDFReactor template
* rule #set1dynamic]
*/
|
Sets a value of property Nickname from an RDF2Go node. First, all existing values are removed, then this value is added. Cardinality constraints are not checked, but this method exists only for properties with no minCardinality or minCardinality == 1
|
setNickname
|
{
"repo_name": "m0ep/master-thesis",
"path": "source/apis/rdf2go/rdf2go-foaf/src/main/java/com/xmlns/foaf/Thing.java",
"license": "mit",
"size": 274766
}
|
[
"org.ontoware.rdfreactor.runtime.Base"
] |
import org.ontoware.rdfreactor.runtime.Base;
|
import org.ontoware.rdfreactor.runtime.*;
|
[
"org.ontoware.rdfreactor"
] |
org.ontoware.rdfreactor;
| 2,810,013
|
public final void doCloseLines()
{
final IPOSFiltering service = getService();
final Set<IPOSTableRow> selectedRows = getRowsSelected();
service.closeRows(selectedRows);
refreshLines(true);
}
|
final void function() { final IPOSFiltering service = getService(); final Set<IPOSTableRow> selectedRows = getRowsSelected(); service.closeRows(selectedRows); refreshLines(true); }
|
/**
* Closes selected lines
*/
|
Closes selected lines
|
doCloseLines
|
{
"repo_name": "klst-com/metasfresh",
"path": "de.metas.handlingunits.client/src/main/java/de/metas/handlingunits/client/terminal/select/model/AbstractHUSelectModel.java",
"license": "gpl-2.0",
"size": 23023
}
|
[
"de.metas.handlingunits.client.terminal.select.api.IPOSFiltering",
"de.metas.handlingunits.client.terminal.select.api.IPOSTableRow",
"java.util.Set"
] |
import de.metas.handlingunits.client.terminal.select.api.IPOSFiltering; import de.metas.handlingunits.client.terminal.select.api.IPOSTableRow; import java.util.Set;
|
import de.metas.handlingunits.client.terminal.select.api.*; import java.util.*;
|
[
"de.metas.handlingunits",
"java.util"
] |
de.metas.handlingunits; java.util;
| 1,322,515
|
public static String check(GenericStatistics fstats) {
String msg = null;
if (fstats == null) {
msg = "flow statistics is null.";
} else if (fstats.getPacketCount() == null) {
msg = "No packet count.";
} else if (fstats.getByteCount() == null) {
msg = "No byte count.";
} else {
Duration duration = fstats.getDuration();
if (duration == null) {
msg = "No duration.";
} else if (duration.getSecond() == null) {
msg = "No second in duration.";
} else if (duration.getNanosecond() == null) {
msg = "No nanosecond in duration.";
}
}
return msg;
}
|
static String function(GenericStatistics fstats) { String msg = null; if (fstats == null) { msg = STR; } else if (fstats.getPacketCount() == null) { msg = STR; } else if (fstats.getByteCount() == null) { msg = STR; } else { Duration duration = fstats.getDuration(); if (duration == null) { msg = STR; } else if (duration.getSecond() == null) { msg = STR; } else if (duration.getNanosecond() == null) { msg = STR; } } return msg; }
|
/**
* Ensure that the given flow statistics contains valid values.
*
* @param fstats A flow statistics to be checked.
* @return {@code null} if the given flow statistics is valid.
* An error message if the given flow statistics is invalid.
*/
|
Ensure that the given flow statistics contains valid values
|
check
|
{
"repo_name": "opendaylight/vtn",
"path": "manager/implementation/src/main/java/org/opendaylight/vtn/manager/internal/util/flow/FlowStatsUtils.java",
"license": "epl-1.0",
"size": 13897
}
|
[
"org.opendaylight.yang.gen.v1.urn.opendaylight.model.statistics.types.rev130925.GenericStatistics",
"org.opendaylight.yang.gen.v1.urn.opendaylight.model.statistics.types.rev130925.duration.Duration"
] |
import org.opendaylight.yang.gen.v1.urn.opendaylight.model.statistics.types.rev130925.GenericStatistics; import org.opendaylight.yang.gen.v1.urn.opendaylight.model.statistics.types.rev130925.duration.Duration;
|
import org.opendaylight.yang.gen.v1.urn.opendaylight.model.statistics.types.rev130925.*; import org.opendaylight.yang.gen.v1.urn.opendaylight.model.statistics.types.rev130925.duration.*;
|
[
"org.opendaylight.yang"
] |
org.opendaylight.yang;
| 1,249,467
|
public static @NonNull QName create(final XMLNamespace namespace, final Optional<Revision> revision,
final String localName) {
return create(QNameModule.create(namespace, revision), localName);
}
|
static @NonNull QName function(final XMLNamespace namespace, final Optional<Revision> revision, final String localName) { return create(QNameModule.create(namespace, revision), localName); }
|
/**
* Creates new QName.
*
* @param namespace Namespace of QName or null if namespace is undefined.
* @param revision Revision of namespace.
* @param localName Local name part of QName. MUST NOT BE null.
* @return Instance of QName
*/
|
Creates new QName
|
create
|
{
"repo_name": "opendaylight/yangtools",
"path": "common/yang-common/src/main/java/org/opendaylight/yangtools/yang/common/QName.java",
"license": "epl-1.0",
"size": 15163
}
|
[
"java.util.Optional",
"org.eclipse.jdt.annotation.NonNull"
] |
import java.util.Optional; import org.eclipse.jdt.annotation.NonNull;
|
import java.util.*; import org.eclipse.jdt.annotation.*;
|
[
"java.util",
"org.eclipse.jdt"
] |
java.util; org.eclipse.jdt;
| 1,528,764
|
@SmallTest
@Feature({"Gestures"})
public void testScrollUpdateCoordinates() {
final int deltaX = 16;
final int deltaY = 84;
final long downTime = SystemClock.uptimeMillis();
MotionEvent event = motionEvent(MotionEvent.ACTION_DOWN, downTime, downTime);
assertTrue(mGestureHandler.onTouchEvent(event));
// Move twice so that we get two GESTURE_SCROLL_BY events and can compare
// the relative and absolute coordinates.
event = MotionEvent.obtain(
downTime, downTime + 5, MotionEvent.ACTION_MOVE,
FAKE_COORD_X - deltaX / 2, FAKE_COORD_Y - deltaY / 2, 0);
assertTrue(mGestureHandler.onTouchEvent(event));
event = MotionEvent.obtain(
downTime, downTime + 10, MotionEvent.ACTION_MOVE,
FAKE_COORD_X - deltaX, FAKE_COORD_Y - deltaY, 0);
assertTrue(mGestureHandler.onTouchEvent(event));
// Make sure the reported gesture event has all the expected data.
MockMotionEventDelegate.GestureEvent gestureEvent =
mMockMotionEventDelegate.mMostRecentGestureEvent;
assertNotNull(gestureEvent);
assertEquals(GestureEventType.SCROLL_BY, gestureEvent.getType());
assertEquals(downTime + 10, gestureEvent.getTimeMs());
assertEquals(FAKE_COORD_X - deltaX, gestureEvent.getX());
assertEquals(FAKE_COORD_Y - deltaY, gestureEvent.getY());
Bundle extraParams = gestureEvent.getExtraParams();
assertNotNull(extraParams);
// No horizontal delta because of snapping.
assertEquals(0, extraParams.getInt(ContentViewGestureHandler.DISTANCE_X));
assertEquals(deltaY / 2, extraParams.getInt(ContentViewGestureHandler.DISTANCE_Y));
}
|
@Feature({STR}) void function() { final int deltaX = 16; final int deltaY = 84; final long downTime = SystemClock.uptimeMillis(); MotionEvent event = motionEvent(MotionEvent.ACTION_DOWN, downTime, downTime); assertTrue(mGestureHandler.onTouchEvent(event)); event = MotionEvent.obtain( downTime, downTime + 5, MotionEvent.ACTION_MOVE, FAKE_COORD_X - deltaX / 2, FAKE_COORD_Y - deltaY / 2, 0); assertTrue(mGestureHandler.onTouchEvent(event)); event = MotionEvent.obtain( downTime, downTime + 10, MotionEvent.ACTION_MOVE, FAKE_COORD_X - deltaX, FAKE_COORD_Y - deltaY, 0); assertTrue(mGestureHandler.onTouchEvent(event)); MockMotionEventDelegate.GestureEvent gestureEvent = mMockMotionEventDelegate.mMostRecentGestureEvent; assertNotNull(gestureEvent); assertEquals(GestureEventType.SCROLL_BY, gestureEvent.getType()); assertEquals(downTime + 10, gestureEvent.getTimeMs()); assertEquals(FAKE_COORD_X - deltaX, gestureEvent.getX()); assertEquals(FAKE_COORD_Y - deltaY, gestureEvent.getY()); Bundle extraParams = gestureEvent.getExtraParams(); assertNotNull(extraParams); assertEquals(0, extraParams.getInt(ContentViewGestureHandler.DISTANCE_X)); assertEquals(deltaY / 2, extraParams.getInt(ContentViewGestureHandler.DISTANCE_Y)); }
|
/**
* Generate a scroll gesture and verify that the resulting scroll motion event has both absolute
* and relative position information.
*/
|
Generate a scroll gesture and verify that the resulting scroll motion event has both absolute and relative position information
|
testScrollUpdateCoordinates
|
{
"repo_name": "ChromiumWebApps/chromium",
"path": "content/public/android/javatests/src/org/chromium/content/browser/ContentViewGestureHandlerTest.java",
"license": "bsd-3-clause",
"size": 80785
}
|
[
"android.os.Bundle",
"android.os.SystemClock",
"android.view.MotionEvent",
"org.chromium.base.test.util.Feature"
] |
import android.os.Bundle; import android.os.SystemClock; import android.view.MotionEvent; import org.chromium.base.test.util.Feature;
|
import android.os.*; import android.view.*; import org.chromium.base.test.util.*;
|
[
"android.os",
"android.view",
"org.chromium.base"
] |
android.os; android.view; org.chromium.base;
| 2,343,502
|
String getLoginRedirect(long companyId, String returnRequestUri,
List<String> scopes, boolean isRefreshTokenRequested)
throws Exception;
|
String getLoginRedirect(long companyId, String returnRequestUri, List<String> scopes, boolean isRefreshTokenRequested) throws Exception;
|
/**
* Get the IAM URL where the user has to be re-direct
* for the authentication.
*
* @param companyId
* Company Id
* @param returnRequestUri
* URI where the user is redirect after the authentication
* @param scopes
* The list of requested scopes
* @param isRefreshTokenRequested
* True if the associated user has not a refresh token, false
* otherwise
* @return The URL
* @throws Exception
* If there is a configuration problem
*/
|
Get the IAM URL where the user has to be re-direct for the authentication
|
getLoginRedirect
|
{
"repo_name": "FutureGateway/LiferayIAM",
"path": "portal-security-sso-iam/src/main/java/com/liferay/portal/security/sso/iam/IAM.java",
"license": "apache-2.0",
"size": 5677
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,604,738
|
public ItemGroup getParent(Item seedJob, String path) {
Jenkins jenkins = Jenkins.getInstance();
int i = path.lastIndexOf('/');
switch (i) {
case -1:
return getContext(seedJob);
case 0:
return jenkins;
default:
Item item = jenkins.getItem(path.substring(0, i), getContext(seedJob));
return item instanceof ItemGroup ? (ItemGroup) item : null;
}
}
|
ItemGroup function(Item seedJob, String path) { Jenkins jenkins = Jenkins.getInstance(); int i = path.lastIndexOf('/'); switch (i) { case -1: return getContext(seedJob); case 0: return jenkins; default: Item item = jenkins.getItem(path.substring(0, i), getContext(seedJob)); return item instanceof ItemGroup ? (ItemGroup) item : null; } }
|
/**
* Get the parent {@link hudson.model.ItemGroup} of the item addressed by the given path.
*
* @param seedJob the seed job
* @param path path to the item for which the parent should be looked up
* @return parent {@link hudson.model.ItemGroup} of the item with the given path
*/
|
Get the parent <code>hudson.model.ItemGroup</code> of the item addressed by the given path
|
getParent
|
{
"repo_name": "JeremyMarshall/job-dsl-plugin",
"path": "job-dsl-plugin/src/main/groovy/javaposse/jobdsl/plugin/LookupStrategy.java",
"license": "apache-2.0",
"size": 3040
}
|
[
"hudson.model.Item",
"hudson.model.ItemGroup"
] |
import hudson.model.Item; import hudson.model.ItemGroup;
|
import hudson.model.*;
|
[
"hudson.model"
] |
hudson.model;
| 550,231
|
@Override
public void visitClassContext(ClassContext classContext) {
JavaClass javaClass = classContext.getJavaClass();
Method[] methodList = javaClass.getMethods();
for (Method method : methodList) {
if (method.getCode() == null) {
continue;
}
try {
analyzeMethod(classContext, method);
} catch (MethodUnprofitableException e) {
assert true; // move along; nothing to see
} catch (CFGBuilderException e) {
String msg = "Detector " + this.getClass().getName() + " caught exception while analyzing "
+ javaClass.getClassName() + "." + method.getName() + " : " + method.getSignature();
bugReporter.logError(msg, e);
} catch (DataflowAnalysisException e) {
String msg = "Detector " + this.getClass().getName() + " caught exception while analyzing "
+ javaClass.getClassName() + "." + method.getName() + " : " + method.getSignature();
bugReporter.logError(msg, e);
}
}
}
|
void function(ClassContext classContext) { JavaClass javaClass = classContext.getJavaClass(); Method[] methodList = javaClass.getMethods(); for (Method method : methodList) { if (method.getCode() == null) { continue; } try { analyzeMethod(classContext, method); } catch (MethodUnprofitableException e) { assert true; } catch (CFGBuilderException e) { String msg = STR + this.getClass().getName() + STR + javaClass.getClassName() + "." + method.getName() + STR + method.getSignature(); bugReporter.logError(msg, e); } catch (DataflowAnalysisException e) { String msg = STR + this.getClass().getName() + STR + javaClass.getClassName() + "." + method.getName() + STR + method.getSignature(); bugReporter.logError(msg, e); } } }
|
/**
* Visit the class context
*
* @see edu.umd.cs.findbugs.Detector#visitClassContext(edu.umd.cs.findbugs.ba.ClassContext)
*/
|
Visit the class context
|
visitClassContext
|
{
"repo_name": "KengoTODA/spotbugs",
"path": "spotbugs/src/main/java/edu/umd/cs/findbugs/detect/FindUnrelatedTypesInGenericContainer.java",
"license": "lgpl-2.1",
"size": 41485
}
|
[
"edu.umd.cs.findbugs.ba.CFGBuilderException",
"edu.umd.cs.findbugs.ba.ClassContext",
"edu.umd.cs.findbugs.ba.DataflowAnalysisException",
"edu.umd.cs.findbugs.ba.MethodUnprofitableException",
"org.apache.bcel.classfile.JavaClass",
"org.apache.bcel.classfile.Method"
] |
import edu.umd.cs.findbugs.ba.CFGBuilderException; import edu.umd.cs.findbugs.ba.ClassContext; import edu.umd.cs.findbugs.ba.DataflowAnalysisException; import edu.umd.cs.findbugs.ba.MethodUnprofitableException; import org.apache.bcel.classfile.JavaClass; import org.apache.bcel.classfile.Method;
|
import edu.umd.cs.findbugs.ba.*; import org.apache.bcel.classfile.*;
|
[
"edu.umd.cs",
"org.apache.bcel"
] |
edu.umd.cs; org.apache.bcel;
| 1,810,291
|
public Instruction getInstruction(int index) {
if (this.instructions.get(index) != null) {
this.currentInstruction = index;
return this.instructions.get(index);
} else {
Log.e("InstructionManager", "Could not get instruction at index "
+ index);
return null;
}
}
|
Instruction function(int index) { if (this.instructions.get(index) != null) { this.currentInstruction = index; return this.instructions.get(index); } else { Log.e(STR, STR + index); return null; } }
|
/**
* Get the instruction at the desired index
*
* @param index
* Index of the instruction
* @return The instruction
*/
|
Get the instruction at the desired index
|
getInstruction
|
{
"repo_name": "mrunde/Bachelor-Thesis",
"path": "src/de/mrunde/bachelorthesis/instructions/InstructionManager.java",
"license": "mit",
"size": 30635
}
|
[
"android.util.Log"
] |
import android.util.Log;
|
import android.util.*;
|
[
"android.util"
] |
android.util;
| 1,955,766
|
public static Multimap<TableName, String> getTableSnapshots(Connection conn) throws IOException {
try (Table quotaTable = conn.getTable(QUOTA_TABLE_NAME);
ResultScanner rs = quotaTable.getScanner(createScanForSpaceSnapshotSizes())) {
Multimap<TableName, String> snapshots = HashMultimap.create();
for (Result r : rs) {
CellScanner cs = r.cellScanner();
while (cs.advance()) {
Cell c = cs.current();
final String snapshot = extractSnapshotNameFromSizeCell(c);
snapshots.put(getTableFromRowKey(r.getRow()), snapshot);
}
}
return snapshots;
}
}
|
static Multimap<TableName, String> function(Connection conn) throws IOException { try (Table quotaTable = conn.getTable(QUOTA_TABLE_NAME); ResultScanner rs = quotaTable.getScanner(createScanForSpaceSnapshotSizes())) { Multimap<TableName, String> snapshots = HashMultimap.create(); for (Result r : rs) { CellScanner cs = r.cellScanner(); while (cs.advance()) { Cell c = cs.current(); final String snapshot = extractSnapshotNameFromSizeCell(c); snapshots.put(getTableFromRowKey(r.getRow()), snapshot); } } return snapshots; } }
|
/**
* Returns a multimap for all existing table snapshot entries.
* @param conn connection to re-use
*/
|
Returns a multimap for all existing table snapshot entries
|
getTableSnapshots
|
{
"repo_name": "ultratendency/hbase",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java",
"license": "apache-2.0",
"size": 39872
}
|
[
"java.io.IOException",
"org.apache.hadoop.hbase.Cell",
"org.apache.hadoop.hbase.CellScanner",
"org.apache.hadoop.hbase.TableName",
"org.apache.hadoop.hbase.client.Connection",
"org.apache.hadoop.hbase.client.Result",
"org.apache.hadoop.hbase.client.ResultScanner",
"org.apache.hadoop.hbase.client.Table",
"org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap",
"org.apache.hbase.thirdparty.com.google.common.collect.Multimap"
] |
import java.io.IOException; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Table; import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap; import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
|
import java.io.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.*; import org.apache.hbase.thirdparty.com.google.common.collect.*;
|
[
"java.io",
"org.apache.hadoop",
"org.apache.hbase"
] |
java.io; org.apache.hadoop; org.apache.hbase;
| 1,364,593
|
@Beta
protected NavigableSet<K> standardDescendingKeySet() {
return descendingMap().navigableKeySet();
}
|
NavigableSet<K> function() { return descendingMap().navigableKeySet(); }
|
/**
* A sensible definition of {@link #descendingKeySet} as the {@code navigableKeySet} of
* {@link #descendingMap}. (The {@link StandardDescendingMap} implementation implements
* {@code navigableKeySet} on its own, so as not to cause an infinite loop.) If you override
* {@code descendingMap}, you may wish to override {@code descendingKeySet} to forward to this
* implementation.
*/
|
A sensible definition of <code>#descendingKeySet</code> as the navigableKeySet of <code>#descendingMap</code>. (The <code>StandardDescendingMap</code> implementation implements navigableKeySet on its own, so as not to cause an infinite loop.) If you override descendingMap, you may wish to override descendingKeySet to forward to this implementation
|
standardDescendingKeySet
|
{
"repo_name": "mike10004/appengine-imaging",
"path": "gaecompat-awt-imaging/src/common/com/gaecompat/repackaged/com/google/common/collect/ForwardingNavigableMap.java",
"license": "apache-2.0",
"size": 13306
}
|
[
"java.util.NavigableSet"
] |
import java.util.NavigableSet;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,684,639
|
private void remove(Txn transaction, IndexQuery query, BTreeCallback callback)
throws IOException, BTreeException, TerminatedException {
if (query != null
&& query.getOperator() != IndexQuery.ANY
&& query.getOperator() != IndexQuery.TRUNC_LEFT) {
Value[] qvals = query.getValues();
int leftIdx = searchKey(qvals[0]);
int rightIdx =
qvals.length > 1
? searchKey(qvals[qvals.length - 1])
: leftIdx;
boolean pos = query.getOperator() >= 0;
switch (ph.getStatus()) {
case BRANCH :
leftIdx = leftIdx < 0 ? - (leftIdx + 1) : leftIdx + 1;
rightIdx = rightIdx < 0 ? - (rightIdx + 1) : rightIdx + 1;
switch (query.getOperator()) {
case IndexQuery.BWX :
case IndexQuery.NBWX :
case IndexQuery.BW :
case IndexQuery.NBW :
case IndexQuery.IN :
case IndexQuery.NIN :
case IndexQuery.TRUNC_RIGHT :
for (int i = 0; i < nPtrs; i++) {
if ((i >= leftIdx && i <= rightIdx) == pos) {
getChildNode(i).remove(transaction, query, callback);
if (query.getOperator() == IndexQuery.TRUNC_RIGHT)
break;
}
}
break;
case IndexQuery.EQ :
case IndexQuery.NEQ :
for (int i = 0; i < nPtrs; i++)
if (!pos || i == leftIdx)
getChildNode(i).remove(transaction, query, callback);
case IndexQuery.LT :
case IndexQuery.GEQ :
for (int i = 0; i < nPtrs; i++)
if ((pos && (i <= leftIdx)) || (!pos && (i >= leftIdx)))
getChildNode(i).remove(transaction, query, callback);
break;
case IndexQuery.GT :
case IndexQuery.LEQ :
for (int i = 0; i < nPtrs; i++)
if ((pos && (i >= leftIdx)) || (!pos && (i <= leftIdx)))
getChildNode(i).remove(transaction, query, callback);
break;
default :
// If it's not implemented, we walk the tree
for (int i = 0; i < nPtrs; i++)
getChildNode(i).remove(transaction, query, callback);
break;
}
break;
case LEAF :
try {
allowUnload = false;
switch (query.getOperator()) {
case IndexQuery.EQ :
if (leftIdx >= 0) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), leftIdx,
keys[leftIdx], ptrs[leftIdx]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[leftIdx], ptrs[leftIdx]);
removeKey(leftIdx);
removePointer(leftIdx);
recalculateDataLen();
}
break;
case IndexQuery.NEQ :
for (int i = 0; i < nPtrs; i++)
if (i != leftIdx) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i,
keys[i], ptrs[i]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[i], ptrs[i]);
removeKey(i);
removePointer(i);
recalculateDataLen();
}
break;
case IndexQuery.BWX :
case IndexQuery.NBWX :
case IndexQuery.BW :
case IndexQuery.NBW :
if (leftIdx < 0)
leftIdx = - (leftIdx + 1);
if (rightIdx < 0)
rightIdx = - (rightIdx + 1);
for (int i = 0; i < nPtrs; i++)
if ((pos && (i >= leftIdx && i <= rightIdx))
|| (!pos && (i <= leftIdx || i >= rightIdx))) {
if (query.testValue(keys[i])) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i,
keys[i], ptrs[i]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[i], ptrs[i]);
removeKey(i);
removePointer(i);
recalculateDataLen();
--i;
}
}
break;
case IndexQuery.TRUNC_RIGHT :
if (leftIdx < 0)
leftIdx = - (leftIdx + 1);
if (rightIdx < 0)
rightIdx = - (rightIdx + 1);
for (int i = leftIdx; i < rightIdx && i < nPtrs; i++) {
if (query.testValue(keys[i])) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i,
keys[i], ptrs[i]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[i], ptrs[i]);
removeKey(i);
removePointer(i);
recalculateDataLen();
--i;
}
}
if (rightIdx >= nPtrs) removeSequential(transaction, this, query, callback);
break;
case IndexQuery.IN :
case IndexQuery.NIN :
if (leftIdx < 0)
leftIdx = - (leftIdx + 1);
if (rightIdx < 0)
rightIdx = - (rightIdx + 1);
for (int i = 0; i < nPtrs; i++)
if (!pos || (i >= leftIdx && i <= rightIdx))
if (query.testValue(keys[i])) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i,
keys[i], ptrs[i]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[i], ptrs[i]);
removeKey(i);
removePointer(i);
recalculateDataLen();
--i;
}
break;
case IndexQuery.LT :
case IndexQuery.GEQ :
if (leftIdx < 0)
leftIdx = - (leftIdx + 1);
for (int i = 0; i < nPtrs; i++)
if ((pos && (i <= leftIdx)) || (!pos && (i >= leftIdx)))
if (query.testValue(keys[i])) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i,
keys[i], ptrs[i]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[i], ptrs[i]);
removeKey(i);
removePointer(i);
recalculateDataLen();
--i;
}
break;
case IndexQuery.GT :
case IndexQuery.LEQ :
if (leftIdx < 0)
leftIdx = - (leftIdx + 1);
for (int i = 0; i < nPtrs; i++)
if ((pos && (i >= leftIdx)) || (!pos && (i <= leftIdx))) {
if (query.testValue(keys[i])) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i,
keys[i], ptrs[i]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[i], ptrs[i]);
removeKey(i);
removePointer(i);
recalculateDataLen();
--i;
} else if (query.getOperator() == IndexQuery.TRUNC_RIGHT)
break;
}
break;
default :
// If it's not implemented, it falls right through
for (int i = 0; i < nPtrs; i++)
if (query.testValue(keys[i])) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i,
keys[i], ptrs[i]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[i], ptrs[i]);
removeKey(i);
removePointer(i);
recalculateDataLen();
--i;
}
break;
}
} finally {
allowUnload = true;
}
break;
default :
throw new BTreeException("Invalid Page Type In query");
}
} else
// No Query - Just Walk The Tree
switch (ph.getStatus()) {
case BRANCH :
for (int i = 0; i < nPtrs; i++) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i,
keys[i], ptrs[i]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[i], ptrs[i]);
removeKey(i);
removePointer(i);
recalculateDataLen();
--i;
}
break;
case LEAF :
try {
allowUnload = false;
for (int i = 0; i < nKeys; i++)
if (query.getOperator() != IndexQuery.TRUNC_LEFT
|| query.testValue(keys[i])) {
if (isTransactional && transaction != null) {
RemoveValueLoggable log =
new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i,
keys[i], ptrs[i]);
writeToLog(log, this);
}
if (callback != null)
callback.indexInfo(keys[i], ptrs[i]);
removeKey(i);
removePointer(i);
recalculateDataLen();
--i;
}
} finally {
allowUnload = true;
}
break;
default :
throw new BTreeException("Invalid Page Type In query");
}
}
|
void function(Txn transaction, IndexQuery query, BTreeCallback callback) throws IOException, BTreeException, TerminatedException { if (query != null && query.getOperator() != IndexQuery.ANY && query.getOperator() != IndexQuery.TRUNC_LEFT) { Value[] qvals = query.getValues(); int leftIdx = searchKey(qvals[0]); int rightIdx = qvals.length > 1 ? searchKey(qvals[qvals.length - 1]) : leftIdx; boolean pos = query.getOperator() >= 0; switch (ph.getStatus()) { case BRANCH : leftIdx = leftIdx < 0 ? - (leftIdx + 1) : leftIdx + 1; rightIdx = rightIdx < 0 ? - (rightIdx + 1) : rightIdx + 1; switch (query.getOperator()) { case IndexQuery.BWX : case IndexQuery.NBWX : case IndexQuery.BW : case IndexQuery.NBW : case IndexQuery.IN : case IndexQuery.NIN : case IndexQuery.TRUNC_RIGHT : for (int i = 0; i < nPtrs; i++) { if ((i >= leftIdx && i <= rightIdx) == pos) { getChildNode(i).remove(transaction, query, callback); if (query.getOperator() == IndexQuery.TRUNC_RIGHT) break; } } break; case IndexQuery.EQ : case IndexQuery.NEQ : for (int i = 0; i < nPtrs; i++) if (!pos i == leftIdx) getChildNode(i).remove(transaction, query, callback); case IndexQuery.LT : case IndexQuery.GEQ : for (int i = 0; i < nPtrs; i++) if ((pos && (i <= leftIdx)) (!pos && (i >= leftIdx))) getChildNode(i).remove(transaction, query, callback); break; case IndexQuery.GT : case IndexQuery.LEQ : for (int i = 0; i < nPtrs; i++) if ((pos && (i >= leftIdx)) (!pos && (i <= leftIdx))) getChildNode(i).remove(transaction, query, callback); break; default : for (int i = 0; i < nPtrs; i++) getChildNode(i).remove(transaction, query, callback); break; } break; case LEAF : try { allowUnload = false; switch (query.getOperator()) { case IndexQuery.EQ : if (leftIdx >= 0) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), leftIdx, keys[leftIdx], ptrs[leftIdx]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[leftIdx], ptrs[leftIdx]); removeKey(leftIdx); removePointer(leftIdx); recalculateDataLen(); } break; case IndexQuery.NEQ : for (int i = 0; i < nPtrs; i++) if (i != leftIdx) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i, keys[i], ptrs[i]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[i], ptrs[i]); removeKey(i); removePointer(i); recalculateDataLen(); } break; case IndexQuery.BWX : case IndexQuery.NBWX : case IndexQuery.BW : case IndexQuery.NBW : if (leftIdx < 0) leftIdx = - (leftIdx + 1); if (rightIdx < 0) rightIdx = - (rightIdx + 1); for (int i = 0; i < nPtrs; i++) if ((pos && (i >= leftIdx && i <= rightIdx)) (!pos && (i <= leftIdx i >= rightIdx))) { if (query.testValue(keys[i])) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i, keys[i], ptrs[i]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[i], ptrs[i]); removeKey(i); removePointer(i); recalculateDataLen(); --i; } } break; case IndexQuery.TRUNC_RIGHT : if (leftIdx < 0) leftIdx = - (leftIdx + 1); if (rightIdx < 0) rightIdx = - (rightIdx + 1); for (int i = leftIdx; i < rightIdx && i < nPtrs; i++) { if (query.testValue(keys[i])) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i, keys[i], ptrs[i]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[i], ptrs[i]); removeKey(i); removePointer(i); recalculateDataLen(); --i; } } if (rightIdx >= nPtrs) removeSequential(transaction, this, query, callback); break; case IndexQuery.IN : case IndexQuery.NIN : if (leftIdx < 0) leftIdx = - (leftIdx + 1); if (rightIdx < 0) rightIdx = - (rightIdx + 1); for (int i = 0; i < nPtrs; i++) if (!pos (i >= leftIdx && i <= rightIdx)) if (query.testValue(keys[i])) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i, keys[i], ptrs[i]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[i], ptrs[i]); removeKey(i); removePointer(i); recalculateDataLen(); --i; } break; case IndexQuery.LT : case IndexQuery.GEQ : if (leftIdx < 0) leftIdx = - (leftIdx + 1); for (int i = 0; i < nPtrs; i++) if ((pos && (i <= leftIdx)) (!pos && (i >= leftIdx))) if (query.testValue(keys[i])) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i, keys[i], ptrs[i]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[i], ptrs[i]); removeKey(i); removePointer(i); recalculateDataLen(); --i; } break; case IndexQuery.GT : case IndexQuery.LEQ : if (leftIdx < 0) leftIdx = - (leftIdx + 1); for (int i = 0; i < nPtrs; i++) if ((pos && (i >= leftIdx)) (!pos && (i <= leftIdx))) { if (query.testValue(keys[i])) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i, keys[i], ptrs[i]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[i], ptrs[i]); removeKey(i); removePointer(i); recalculateDataLen(); --i; } else if (query.getOperator() == IndexQuery.TRUNC_RIGHT) break; } break; default : for (int i = 0; i < nPtrs; i++) if (query.testValue(keys[i])) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i, keys[i], ptrs[i]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[i], ptrs[i]); removeKey(i); removePointer(i); recalculateDataLen(); --i; } break; } } finally { allowUnload = true; } break; default : throw new BTreeException(STR); } } else switch (ph.getStatus()) { case BRANCH : for (int i = 0; i < nPtrs; i++) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i, keys[i], ptrs[i]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[i], ptrs[i]); removeKey(i); removePointer(i); recalculateDataLen(); --i; } break; case LEAF : try { allowUnload = false; for (int i = 0; i < nKeys; i++) if (query.getOperator() != IndexQuery.TRUNC_LEFT query.testValue(keys[i])) { if (isTransactional && transaction != null) { RemoveValueLoggable log = new RemoveValueLoggable(transaction, fileId, page.getPageNum(), i, keys[i], ptrs[i]); writeToLog(log, this); } if (callback != null) callback.indexInfo(keys[i], ptrs[i]); removeKey(i); removePointer(i); recalculateDataLen(); --i; } } finally { allowUnload = true; } break; default : throw new BTreeException(STR); } }
|
/**
* Search for keys matching the given {@link IndexQuery} and
* remove them from the node. Every match is reported
* to the specified {@link BTreeCallback}.
*
* @param query
* @param callback
* @throws IOException
* @throws BTreeException
* @throws TerminatedException
*/
|
Search for keys matching the given <code>IndexQuery</code> and remove them from the node. Every match is reported to the specified <code>BTreeCallback</code>
|
remove
|
{
"repo_name": "kingargyle/exist-1.4.x",
"path": "src/org/exist/storage/btree/BTree.java",
"license": "lgpl-2.1",
"size": 95097
}
|
[
"java.io.IOException",
"org.exist.storage.txn.Txn",
"org.exist.xquery.TerminatedException"
] |
import java.io.IOException; import org.exist.storage.txn.Txn; import org.exist.xquery.TerminatedException;
|
import java.io.*; import org.exist.storage.txn.*; import org.exist.xquery.*;
|
[
"java.io",
"org.exist.storage",
"org.exist.xquery"
] |
java.io; org.exist.storage; org.exist.xquery;
| 2,486,163
|
static void resizeIconDrawable(Drawable icon) {
icon.setBounds(0, 0, sIconWidth, sIconHeight);
}
|
static void resizeIconDrawable(Drawable icon) { icon.setBounds(0, 0, sIconWidth, sIconHeight); }
|
/**
* Resizes an icon drawable to the correct icon size.
*/
|
Resizes an icon drawable to the correct icon size
|
resizeIconDrawable
|
{
"repo_name": "mkodekar/LB-Launcher",
"path": "app/src/main/java/com/lb/launcher/Utilities.java",
"license": "apache-2.0",
"size": 21435
}
|
[
"android.graphics.drawable.Drawable"
] |
import android.graphics.drawable.Drawable;
|
import android.graphics.drawable.*;
|
[
"android.graphics"
] |
android.graphics;
| 368,425
|
public static byte[] getLMResponse ( CIFSContext tc, Type2Message type2, String password ) throws GeneralSecurityException {
if ( type2 == null || password == null )
return null;
return NtlmUtil.getPreNTLMResponse(tc, password, type2.getChallenge());
}
|
static byte[] function ( CIFSContext tc, Type2Message type2, String password ) throws GeneralSecurityException { if ( type2 == null password == null ) return null; return NtlmUtil.getPreNTLMResponse(tc, password, type2.getChallenge()); }
|
/**
* Constructs the LanManager response to the given Type-2 message using
* the supplied password.
*
* @param tc
* context to use
* @param type2
* The Type-2 message.
* @param password
* The password.
* @return A <code>byte[]</code> containing the LanManager response.
* @throws GeneralSecurityException
*/
|
Constructs the LanManager response to the given Type-2 message using the supplied password
|
getLMResponse
|
{
"repo_name": "codelibs/jcifs",
"path": "src/main/java/jcifs/ntlmssp/Type3Message.java",
"license": "lgpl-2.1",
"size": 31310
}
|
[
"java.security.GeneralSecurityException",
"jcifs.smb.NtlmUtil"
] |
import java.security.GeneralSecurityException; import jcifs.smb.NtlmUtil;
|
import java.security.*; import jcifs.smb.*;
|
[
"java.security",
"jcifs.smb"
] |
java.security; jcifs.smb;
| 1,852,035
|
private IEditorPart openTileEditor(final IEditorInput editorInput, final String name) {
final IEditorPart[] part = new IEditorPart[1];
PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() {
|
IEditorPart function(final IEditorInput editorInput, final String name) { final IEditorPart[] part = new IEditorPart[1]; PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() {
|
/**
* Open an XML Editor with the tile content.
*/
|
Open an XML Editor with the tile content
|
openTileEditor
|
{
"repo_name": "GEBIT/maven-tiles-m2e",
"path": "io.repaint.maven.tiles.m2e.plugin/src/main/java/io/repaint/maven/tiles/m2e/TilesHyperlinkDetector.java",
"license": "apache-2.0",
"size": 13676
}
|
[
"org.eclipse.ui.IEditorInput",
"org.eclipse.ui.IEditorPart",
"org.eclipse.ui.PlatformUI"
] |
import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorPart; import org.eclipse.ui.PlatformUI;
|
import org.eclipse.ui.*;
|
[
"org.eclipse.ui"
] |
org.eclipse.ui;
| 2,601,221
|
public static String encodeRoomsInfo(Node[] rooms) {
Properties p = new Properties();
for (int i = 0; i < rooms.length; ++i) {
if (rooms[i].getInfo() != null && rooms[i].getInfo().length() > 0)
p.setProperty(rooms[i].getId(), rooms[i].getInfo());
}
String value = new PropertiesEncoder(p).getStringValue();
return value == null ? ROOM_INFO : ROOM_INFO + value;
}
|
static String function(Node[] rooms) { Properties p = new Properties(); for (int i = 0; i < rooms.length; ++i) { if (rooms[i].getInfo() != null && rooms[i].getInfo().length() > 0) p.setProperty(rooms[i].getId(), rooms[i].getInfo()); } String value = new PropertiesEncoder(p).getStringValue(); return value == null ? ROOM_INFO : ROOM_INFO + value; }
|
/**
* Sent when a room's info changes, or to update all rooms' info at once
*
* @param rooms
* @return
*/
|
Sent when a room's info changes, or to update all rooms' info at once
|
encodeRoomsInfo
|
{
"repo_name": "caiusb/vassal",
"path": "src/VASSAL/chat/node/Protocol.java",
"license": "lgpl-2.1",
"size": 10019
}
|
[
"java.util.Properties"
] |
import java.util.Properties;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,660,711
|
protected void enterNotificationType(Token node)
throws ParseException {
}
|
void function(Token node) throws ParseException { }
|
/**
* Called when entering a parse tree node.
*
* @param node the node being entered
*
* @throws ParseException if the node analysis discovered errors
*/
|
Called when entering a parse tree node
|
enterNotificationType
|
{
"repo_name": "richb-hanover/mibble-2.9.2",
"path": "src/java/net/percederberg/mibble/asn1/Asn1Analyzer.java",
"license": "gpl-2.0",
"size": 275483
}
|
[
"net.percederberg.grammatica.parser.ParseException",
"net.percederberg.grammatica.parser.Token"
] |
import net.percederberg.grammatica.parser.ParseException; import net.percederberg.grammatica.parser.Token;
|
import net.percederberg.grammatica.parser.*;
|
[
"net.percederberg.grammatica"
] |
net.percederberg.grammatica;
| 447,429
|
public Delete addFamilyVersion(final byte [] family, final long timestamp) {
List<Cell> list = getCellList(family);
list.add(new KeyValue(row, family, null, timestamp,
KeyValue.Type.DeleteFamilyVersion));
return this;
}
|
Delete function(final byte [] family, final long timestamp) { List<Cell> list = getCellList(family); list.add(new KeyValue(row, family, null, timestamp, KeyValue.Type.DeleteFamilyVersion)); return this; }
|
/**
* Delete all columns of the specified family with a timestamp equal to
* the specified timestamp.
* @param family family name
* @param timestamp version timestamp
* @return this for invocation chaining
*/
|
Delete all columns of the specified family with a timestamp equal to the specified timestamp
|
addFamilyVersion
|
{
"repo_name": "JingchengDu/hbase",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java",
"license": "apache-2.0",
"size": 12757
}
|
[
"java.util.List",
"org.apache.hadoop.hbase.Cell",
"org.apache.hadoop.hbase.KeyValue"
] |
import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue;
|
import java.util.*; import org.apache.hadoop.hbase.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 1,255,136
|
public ScienceAppFinder getScienceAppFinder() {
return scienceAppFinder;
}
|
ScienceAppFinder function() { return scienceAppFinder; }
|
/**
* Returns the science app finder.
*
* @return the science app finder
*/
|
Returns the science app finder
|
getScienceAppFinder
|
{
"repo_name": "queza85/edison",
"path": "edison-portal-framework/edison-appstore-2016-portlet/docroot/WEB-INF/src/org/kisti/edison/science/service/base/AppTestHistoryLocalServiceBaseImpl.java",
"license": "gpl-3.0",
"size": 53177
}
|
[
"org.kisti.edison.science.service.persistence.ScienceAppFinder"
] |
import org.kisti.edison.science.service.persistence.ScienceAppFinder;
|
import org.kisti.edison.science.service.persistence.*;
|
[
"org.kisti.edison"
] |
org.kisti.edison;
| 948,808
|
public Output<TFloat32> outputMin() {
return outputMin;
}
|
Output<TFloat32> function() { return outputMin; }
|
/**
* Gets outputMin.
* The computed min output.
* @return outputMin.
*/
|
Gets outputMin. The computed min output
|
outputMin
|
{
"repo_name": "tensorflow/java",
"path": "tensorflow-core/tensorflow-core-api/src/gen/java/org/tensorflow/op/quantization/RequantizationRange.java",
"license": "apache-2.0",
"size": 4528
}
|
[
"org.tensorflow.Output",
"org.tensorflow.types.TFloat32"
] |
import org.tensorflow.Output; import org.tensorflow.types.TFloat32;
|
import org.tensorflow.*; import org.tensorflow.types.*;
|
[
"org.tensorflow",
"org.tensorflow.types"
] |
org.tensorflow; org.tensorflow.types;
| 1,599,920
|
public static Boolean triState(final FuzzySource source) {
final float v = source.fuzzLevel();
return (v <= 1.0f / 3.0f) ? Boolean.FALSE
: (v >= 2.0f / 3.0f ? Boolean.TRUE : null);
}
|
static Boolean function(final FuzzySource source) { final float v = source.fuzzLevel(); return (v <= 1.0f / 3.0f) ? Boolean.FALSE : (v >= 2.0f / 3.0f ? Boolean.TRUE : null); }
|
/**
* Converts a fuzzy logic "random event" to a "tri-state" value
* (TRUE/likely, null/undecided, FALSE/unlikely)
*
* Not @Inline, because it would not actually save a method call.
*/
|
Converts a fuzzy logic "random event" to a "tri-state" value (TRUE/likely, null/undecided, FALSE/unlikely) Not @Inline, because it would not actually save a method call
|
triState
|
{
"repo_name": "skunkiferous/Util",
"path": "xtend/src/main/java/com/blockwithme/util/xtend/FuzzyExtension.java",
"license": "apache-2.0",
"size": 29001
}
|
[
"com.blockwithme.util.shared.FuzzySource"
] |
import com.blockwithme.util.shared.FuzzySource;
|
import com.blockwithme.util.shared.*;
|
[
"com.blockwithme.util"
] |
com.blockwithme.util;
| 685,683
|
public CELLTYPE join(Object... propertyIds) {
assert propertyIds.length > 1 : "You need to merge at least 2 properties";
Set<CELLTYPE> cells = new HashSet<CELLTYPE>();
for (int i = 0; i < propertyIds.length; ++i) {
cells.add(getCell(propertyIds[i]));
}
return join(cells);
}
|
CELLTYPE function(Object... propertyIds) { assert propertyIds.length > 1 : STR; Set<CELLTYPE> cells = new HashSet<CELLTYPE>(); for (int i = 0; i < propertyIds.length; ++i) { cells.add(getCell(propertyIds[i])); } return join(cells); }
|
/**
* Merges columns cells in a row
*
* @param propertyIds
* The property ids of columns to merge
* @return The remaining visible cell after the merge
*/
|
Merges columns cells in a row
|
join
|
{
"repo_name": "synes/vaadin",
"path": "server/src/com/vaadin/ui/Grid.java",
"license": "apache-2.0",
"size": 239096
}
|
[
"java.util.HashSet",
"java.util.Set"
] |
import java.util.HashSet; import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,974,284
|
public static void activateLineItems(
DfpServices dfpServices, DfpSession session, String LIDs) throws Exception {
String LIDQuery = LIDs.replace("]", ")").replace("[", "(");
// Get the LineItemService.
LineItemServiceInterface lineItemService =
dfpServices.get(session, LineItemServiceInterface.class);
// Create a statement to select a line item.
StatementBuilder inactiveLineItemStatement = new StatementBuilder()
.where("status = :status AND id IN " + LIDQuery)
.orderBy("id ASC")
.limit(StatementBuilder.SUGGESTED_PAGE_LIMIT)
.withBindVariableValue("status", "INACTIVE");
// Default for total result set size.
int totalResultSetSize = 0;
do {
try {
// Get line items by statement.
LineItemPage page = lineItemService.getLineItemsByStatement(inactiveLineItemStatement.toStatement());
if (page.getResults() != null) {
totalResultSetSize = page.getTotalResultSetSize();
int i = page.getStartIndex() + 1;
for (LineItem lineItem : page.getResults()) {
System.out.printf(
"%d) Line item with ID %d will be activated.%n", i++, lineItem.getId());
}
}
} catch (Exception e) {
System.out.println("Something went wrong assembling the query to activate Line Items.");
System.out.println("This task may need to be done manually.");
}
inactiveLineItemStatement.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT);
} while (inactiveLineItemStatement.getOffset() < totalResultSetSize);
System.out.printf("Number of line items to be activated: %d%n", totalResultSetSize);
if (totalResultSetSize > 0) {
// Remove limit and offset from statement.
inactiveLineItemStatement.removeLimitAndOffset();
// Create action.
com.google.api.ads.dfp.axis.v201702.ActivateLineItems activateAction =
new com.google.api.ads.dfp.axis.v201702.ActivateLineItems();
try{
// Perform action.
UpdateResult activateResult =
lineItemService.performLineItemAction(activateAction, inactiveLineItemStatement.toStatement());
if (activateResult != null && activateResult.getNumChanges() > 0) {
System.out.printf("Number of inactive line items activated: %d%n", activateResult.getNumChanges());
} else {
System.out.println("No inactive line items were activated.");
}
} catch (Exception e) {
System.out.println("Something went wrong when activating your inactive line items.");
System.out.println("This may need to be done manually");
}
}
// Separate statement for paused line items.
int pausedTotalResultSetSize = 0;
StatementBuilder pausedLineItemStatement = new StatementBuilder()
.where("status = :status AND id IN " + LIDQuery)
.orderBy("id ASC")
.limit(StatementBuilder.SUGGESTED_PAGE_LIMIT)
.withBindVariableValue("status", "PAUSED");
do {
// Get paused line items by statement.
LineItemPage pausedPage = lineItemService.getLineItemsByStatement(pausedLineItemStatement.toStatement());
if (pausedPage.getResults() != null) {
pausedTotalResultSetSize = pausedPage.getTotalResultSetSize();
int i = pausedPage.getStartIndex();
for (LineItem lineItem : pausedPage.getResults()) {
System.out.printf(
"%d) Line item with ID %d will be resumed.%n", i++, lineItem.getId());
}
}
pausedLineItemStatement.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT);
} while (pausedLineItemStatement.getOffset() < pausedTotalResultSetSize);
System.out.printf("Number of line items to be resumed: %d%n", pausedTotalResultSetSize);
if (pausedTotalResultSetSize > 0) {
// Remove limit and offset from statement.
pausedLineItemStatement.removeLimitAndOffset();
// Create action.
com.google.api.ads.dfp.axis.v201702.ResumeLineItems resumeAction =
new com.google.api.ads.dfp.axis.v201702.ResumeLineItems();
try{
// Perform action.
UpdateResult result =
lineItemService.performLineItemAction(resumeAction, pausedLineItemStatement.toStatement());
if (result != null && result.getNumChanges() > 0) {
System.out.printf("Number of paused line items resumed: %d%n", result.getNumChanges());
} else {
System.out.println("No paused line items were resumed.");
}
} catch (Exception e) {
System.out.println("Something went wrong when resuming your paused line items.");
System.out.println("This may need to be done manually");
}
}
}
|
static void function( DfpServices dfpServices, DfpSession session, String LIDs) throws Exception { String LIDQuery = LIDs.replace("]", ")").replace("[", "("); LineItemServiceInterface lineItemService = dfpServices.get(session, LineItemServiceInterface.class); StatementBuilder inactiveLineItemStatement = new StatementBuilder() .where(STR + LIDQuery) .orderBy(STR) .limit(StatementBuilder.SUGGESTED_PAGE_LIMIT) .withBindVariableValue(STR, STR); int totalResultSetSize = 0; do { try { LineItemPage page = lineItemService.getLineItemsByStatement(inactiveLineItemStatement.toStatement()); if (page.getResults() != null) { totalResultSetSize = page.getTotalResultSetSize(); int i = page.getStartIndex() + 1; for (LineItem lineItem : page.getResults()) { System.out.printf( STR, i++, lineItem.getId()); } } } catch (Exception e) { System.out.println(STR); System.out.println(STR); } inactiveLineItemStatement.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT); } while (inactiveLineItemStatement.getOffset() < totalResultSetSize); System.out.printf(STR, totalResultSetSize); if (totalResultSetSize > 0) { inactiveLineItemStatement.removeLimitAndOffset(); com.google.api.ads.dfp.axis.v201702.ActivateLineItems activateAction = new com.google.api.ads.dfp.axis.v201702.ActivateLineItems(); try{ UpdateResult activateResult = lineItemService.performLineItemAction(activateAction, inactiveLineItemStatement.toStatement()); if (activateResult != null && activateResult.getNumChanges() > 0) { System.out.printf(STR, activateResult.getNumChanges()); } else { System.out.println(STR); } } catch (Exception e) { System.out.println(STR); System.out.println(STR); } } int pausedTotalResultSetSize = 0; StatementBuilder pausedLineItemStatement = new StatementBuilder() .where(STR + LIDQuery) .orderBy(STR) .limit(StatementBuilder.SUGGESTED_PAGE_LIMIT) .withBindVariableValue(STR, STR); do { LineItemPage pausedPage = lineItemService.getLineItemsByStatement(pausedLineItemStatement.toStatement()); if (pausedPage.getResults() != null) { pausedTotalResultSetSize = pausedPage.getTotalResultSetSize(); int i = pausedPage.getStartIndex(); for (LineItem lineItem : pausedPage.getResults()) { System.out.printf( STR, i++, lineItem.getId()); } } pausedLineItemStatement.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT); } while (pausedLineItemStatement.getOffset() < pausedTotalResultSetSize); System.out.printf(STR, pausedTotalResultSetSize); if (pausedTotalResultSetSize > 0) { pausedLineItemStatement.removeLimitAndOffset(); com.google.api.ads.dfp.axis.v201702.ResumeLineItems resumeAction = new com.google.api.ads.dfp.axis.v201702.ResumeLineItems(); try{ UpdateResult result = lineItemService.performLineItemAction(resumeAction, pausedLineItemStatement.toStatement()); if (result != null && result.getNumChanges() > 0) { System.out.printf(STR, result.getNumChanges()); } else { System.out.println(STR); } } catch (Exception e) { System.out.println(STR); System.out.println(STR); } } }
|
/**Checks that each line item that was trafficked is now ACTIVE.
* If a trafficked line item is not active, this method attempts to
* resume the line (if it is PAUSED) or activated the line (if it is INACTIVE).
*
* @param dfpServices
* @param session
* @param LIDs
* @throws Exception
*/
|
Checks that each line item that was trafficked is now ACTIVE. If a trafficked line item is not active, this method attempts to resume the line (if it is PAUSED) or activated the line (if it is INACTIVE)
|
activateLineItems
|
{
"repo_name": "aspic2/DFP-CreativeShare",
"path": "src/main/java/dfpAPI/project/DFPMethods.java",
"license": "apache-2.0",
"size": 22398
}
|
[
"com.google.api.ads.dfp.axis.factory.DfpServices",
"com.google.api.ads.dfp.axis.utils.v201702.StatementBuilder",
"com.google.api.ads.dfp.axis.v201702.LineItem",
"com.google.api.ads.dfp.axis.v201702.LineItemPage",
"com.google.api.ads.dfp.axis.v201702.LineItemServiceInterface",
"com.google.api.ads.dfp.axis.v201702.UpdateResult",
"com.google.api.ads.dfp.lib.client.DfpSession"
] |
import com.google.api.ads.dfp.axis.factory.DfpServices; import com.google.api.ads.dfp.axis.utils.v201702.StatementBuilder; import com.google.api.ads.dfp.axis.v201702.LineItem; import com.google.api.ads.dfp.axis.v201702.LineItemPage; import com.google.api.ads.dfp.axis.v201702.LineItemServiceInterface; import com.google.api.ads.dfp.axis.v201702.UpdateResult; import com.google.api.ads.dfp.lib.client.DfpSession;
|
import com.google.api.ads.dfp.axis.factory.*; import com.google.api.ads.dfp.axis.utils.v201702.*; import com.google.api.ads.dfp.axis.v201702.*; import com.google.api.ads.dfp.lib.client.*;
|
[
"com.google.api"
] |
com.google.api;
| 1,512,186
|
FileInputStream fis;
ChessBoard board = new ChessBoard();
try {
fis = new FileInputStream(fileName);
//Construct BufferedReader from InputStreamReader
BufferedReader br = new BufferedReader(new InputStreamReader(fis));
String line;
while ((line = br.readLine()) != null) {
Piece piece = parsePiece(line);
if(piece!=null) {
board.addPiece(piece);
}
}
br.close();
} catch (IOException ex) {
throw new RuntimeException("Can not read boar", ex);
}
return board;
}
|
FileInputStream fis; ChessBoard board = new ChessBoard(); try { fis = new FileInputStream(fileName); BufferedReader br = new BufferedReader(new InputStreamReader(fis)); String line; while ((line = br.readLine()) != null) { Piece piece = parsePiece(line); if(piece!=null) { board.addPiece(piece); } } br.close(); } catch (IOException ex) { throw new RuntimeException(STR, ex); } return board; }
|
/**
* Build board from text file.
* @param fileName file name
* @return board
*/
|
Build board from text file
|
readBoard
|
{
"repo_name": "adamzimny/Chess4f",
"path": "src/chess4f/file/FileReader.java",
"license": "gpl-3.0",
"size": 3484
}
|
[
"java.io.BufferedReader",
"java.io.FileInputStream",
"java.io.IOException",
"java.io.InputStreamReader"
] |
import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,198,140
|
public ArrayList<CharSequence> getCharSequenceArrayListExtra(String name) {
return mExtras == null ? null : mExtras.getCharSequenceArrayList(name);
}
|
ArrayList<CharSequence> function(String name) { return mExtras == null ? null : mExtras.getCharSequenceArrayList(name); }
|
/**
* Retrieve extended data from the intent.
*
* @param name The name of the desired item.
*
* @return the value of an item that previously added with putExtra()
* or null if no ArrayList<CharSequence> value was found.
*
* @see #putCharSequenceArrayListExtra(String, ArrayList)
*/
|
Retrieve extended data from the intent
|
getCharSequenceArrayListExtra
|
{
"repo_name": "indashnet/InDashNet.Open.UN2000",
"path": "android/frameworks/base/core/java/android/content/Intent.java",
"license": "apache-2.0",
"size": 299722
}
|
[
"java.util.ArrayList"
] |
import java.util.ArrayList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 981,320
|
public Builder withOutputTransformer(Class<? extends UnaryOperator<CharSequence>> newValue) {
return withOutputTransformer(asSupplier(newValue));
}
|
Builder function(Class<? extends UnaryOperator<CharSequence>> newValue) { return withOutputTransformer(asSupplier(newValue)); }
|
/**
* Sets the output transformer class.
* @param newValue the output transformer class
* @return this
*/
|
Sets the output transformer class
|
withOutputTransformer
|
{
"repo_name": "cocoatomo/asakusafw",
"path": "core-project/asakusa-runtime/src/main/java/com/asakusafw/runtime/io/text/csv/CsvTextFormat.java",
"license": "apache-2.0",
"size": 10179
}
|
[
"java.util.function.UnaryOperator"
] |
import java.util.function.UnaryOperator;
|
import java.util.function.*;
|
[
"java.util"
] |
java.util;
| 2,525,780
|
List<BleDeviceInfo> getDeviceInfoList() {
return mDeviceInfoList;
}
|
List<BleDeviceInfo> getDeviceInfoList() { return mDeviceInfoList; }
|
/**
* Get for list of Bluetooth devices information
* @return List of BleDeviceInfo
*/
|
Get for list of Bluetooth devices information
|
getDeviceInfoList
|
{
"repo_name": "foxtrot94/DotProd",
"path": "app/src/main/java/me/dotteam/dotprod/hw/SensorTagConnector.java",
"license": "apache-2.0",
"size": 15943
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 72,448
|
@Test
public void testMergingWithClosedDoesNotPoison() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(
Sessions.withGapDuration(Duration.millis(10)),
mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES,
Duration.millis(50),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// 1 element, force its trigger to close.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
tester.injectElements(TimestampedValue.of(2, new Instant(2)));
// 3 elements, one already closed.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(false);
tester.injectElements(
TimestampedValue.of(1, new Instant(1)),
TimestampedValue.of(2, new Instant(2)),
TimestampedValue.of(3, new Instant(3)));
tester.advanceInputWatermark(new Instant(100));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output.size(), equalTo(2));
assertThat(
output.get(0),
isSingleWindowedValue(
containsInAnyOrder(2),
2, // timestamp
2, // window start
12)); // window end
assertThat(
output.get(0).getPane(), equalTo(PaneInfo.createPane(true, true, Timing.EARLY, 0, 0)));
assertThat(
output.get(1),
isSingleWindowedValue(
containsInAnyOrder(1, 2, 3),
1, // timestamp
1, // window start
13)); // window end
assertThat(
output.get(1).getPane(), equalTo(PaneInfo.createPane(true, true, Timing.ON_TIME, 0, 0)));
}
|
void function() throws Exception { ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester = ReduceFnTester.nonCombining( Sessions.withGapDuration(Duration.millis(10)), mockTriggerStateMachine, AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(50), ClosingBehavior.FIRE_IF_NON_EMPTY); when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true); triggerShouldFinish(mockTriggerStateMachine); tester.injectElements(TimestampedValue.of(2, new Instant(2))); when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(false); tester.injectElements( TimestampedValue.of(1, new Instant(1)), TimestampedValue.of(2, new Instant(2)), TimestampedValue.of(3, new Instant(3))); tester.advanceInputWatermark(new Instant(100)); List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput(); assertThat(output.size(), equalTo(2)); assertThat( output.get(0), isSingleWindowedValue( containsInAnyOrder(2), 2, 2, 12)); assertThat( output.get(0).getPane(), equalTo(PaneInfo.createPane(true, true, Timing.EARLY, 0, 0))); assertThat( output.get(1), isSingleWindowedValue( containsInAnyOrder(1, 2, 3), 1, 1, 13)); assertThat( output.get(1).getPane(), equalTo(PaneInfo.createPane(true, true, Timing.ON_TIME, 0, 0))); }
|
/**
* If an element for a closed session window ends up being merged into other still-open session
* windows, the resulting session window is not 'poisoned'.
*/
|
If an element for a closed session window ends up being merged into other still-open session windows, the resulting session window is not 'poisoned'
|
testMergingWithClosedDoesNotPoison
|
{
"repo_name": "rangadi/incubator-beam",
"path": "runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnRunnerTest.java",
"license": "apache-2.0",
"size": 98675
}
|
[
"java.util.List",
"org.apache.beam.runners.core.WindowMatchers",
"org.apache.beam.sdk.transforms.windowing.IntervalWindow",
"org.apache.beam.sdk.transforms.windowing.PaneInfo",
"org.apache.beam.sdk.transforms.windowing.Sessions",
"org.apache.beam.sdk.transforms.windowing.Window",
"org.apache.beam.sdk.util.WindowedValue",
"org.apache.beam.sdk.values.TimestampedValue",
"org.apache.beam.sdk.values.WindowingStrategy",
"org.hamcrest.Matchers",
"org.joda.time.Duration",
"org.joda.time.Instant",
"org.junit.Assert",
"org.mockito.Mockito"
] |
import java.util.List; import org.apache.beam.runners.core.WindowMatchers; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.transforms.windowing.Sessions; import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.WindowingStrategy; import org.hamcrest.Matchers; import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Assert; import org.mockito.Mockito;
|
import java.util.*; import org.apache.beam.runners.core.*; import org.apache.beam.sdk.transforms.windowing.*; import org.apache.beam.sdk.util.*; import org.apache.beam.sdk.values.*; import org.hamcrest.*; import org.joda.time.*; import org.junit.*; import org.mockito.*;
|
[
"java.util",
"org.apache.beam",
"org.hamcrest",
"org.joda.time",
"org.junit",
"org.mockito"
] |
java.util; org.apache.beam; org.hamcrest; org.joda.time; org.junit; org.mockito;
| 448,288
|
private static File getFileFromUri(Context context, Uri uri) {
// first try by direct path
File file = new File(uri.getPath());
if (file.exists()) {
return file;
}
// try reading real path from content resolver (gallery images)
Cursor cursor = null;
try {
String[] proj = {MediaStore.Images.Media.DATA};
cursor = context.getContentResolver().query(uri, proj, null, null, null);
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
String realPath = cursor.getString(column_index);
file = new File(realPath);
} catch (Exception ignored) {
} finally {
if (cursor != null) {
cursor.close();
}
}
return file;
}
|
static File function(Context context, Uri uri) { File file = new File(uri.getPath()); if (file.exists()) { return file; } Cursor cursor = null; try { String[] proj = {MediaStore.Images.Media.DATA}; cursor = context.getContentResolver().query(uri, proj, null, null, null); int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA); cursor.moveToFirst(); String realPath = cursor.getString(column_index); file = new File(realPath); } catch (Exception ignored) { } finally { if (cursor != null) { cursor.close(); } } return file; }
|
/**
* Get {@link File} object for the given Android URI.<br>
* Use content resolver to get real path if direct path doesn't return valid file.
*/
|
Get <code>File</code> object for the given Android URI. Use content resolver to get real path if direct path doesn't return valid file
|
getFileFromUri
|
{
"repo_name": "Minitour/crofis-android-uikit",
"path": "ui/src/main/java/net/crofis/ui/custom/cropper/BitmapUtils.java",
"license": "apache-2.0",
"size": 21597
}
|
[
"android.content.Context",
"android.database.Cursor",
"android.net.Uri",
"android.provider.MediaStore",
"java.io.File"
] |
import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.provider.MediaStore; import java.io.File;
|
import android.content.*; import android.database.*; import android.net.*; import android.provider.*; import java.io.*;
|
[
"android.content",
"android.database",
"android.net",
"android.provider",
"java.io"
] |
android.content; android.database; android.net; android.provider; java.io;
| 275,969
|
private static void parseTimeZoneOffset(String dateStr, ParserState state, String zoneStr) throws CalendarParserException {
int place = PLACE_HOUR;
final boolean isNegative = (zoneStr.charAt(0) == '-');
if (!isNegative && zoneStr.charAt(0) != '+') {
throw new CalendarParserException("Bad time zone offset \"" +
zoneStr + "\" in date \"" +
dateStr + "\"");
}
int hour = UNSET;
int minute = UNSET;
String[] tList = zoneStr.substring(1).split(":");
for (int i = 0; i < tList.length; i++) {
String token = tList[i];
if (DEBUG) {
System.err.println("TZ_HOUR " +
(hour != UNSET ?
Integer.toString(hour) : "UNSET") +
", TZ_MINUTE " +
(minute != UNSET ?
Integer.toString(minute) : "UNSET") +
", TOKEN=\"" + token + "\"");
}
final int val;
try {
val = Integer.parseInt(token);
} catch (NumberFormatException nfe) {
throw new CalendarParserException("Bad time zone " +
getTimePlaceString(place) +
" offset \"" + token +
"\" in \"" + dateStr + "\"");
}
switch (place) {
case PLACE_HOUR:
hour = val;
if (DEBUG) {
System.err.println("Set time zone offset hour to " + val);
}
place = PLACE_MINUTE;
break;
case PLACE_MINUTE:
minute = val;
if (DEBUG) {
System.err.println("Set time zone offset minute to " +
val);
}
place = PLACE_UNKNOWN;
break;
default:
throw new CalendarParserException("Unexpected place value " +
place);
}
}
String customID = "GMT" + (isNegative ? "-" : "+") + hour + ":" +
(minute < 10 ? "0" : "") + minute;
state.setTimeZone(TimeZone.getTimeZone(customID));
}
|
static void function(String dateStr, ParserState state, String zoneStr) throws CalendarParserException { int place = PLACE_HOUR; final boolean isNegative = (zoneStr.charAt(0) == '-'); if (!isNegative && zoneStr.charAt(0) != '+') { throw new CalendarParserException(STRSTR\STRSTR\STR:STRTZ_HOUR STRUNSETSTR, TZ_MINUTE STRUNSETSTR, TOKEN=\STR\STRBad time zone STR offset \STR\STRSTR\STRSet time zone offset hour to STRSet time zone offset minute to STRUnexpected place value STRGMTSTR-STR+STR:STR0STR") + minute; state.setTimeZone(TimeZone.getTimeZone(customID)); }
|
/**
* Parse a time zone offset string.
*
* @param dateStr full date string
* @param state parser state
* @param zoneStr string containing colon-separated time zone offset
*
* @throws CalendarParserException if there is a problem with the time
*/
|
Parse a time zone offset string
|
parseTimeZoneOffset
|
{
"repo_name": "smhoekstra/iaf",
"path": "JavaSource/nl/nn/adapterframework/util/CalendarParser.java",
"license": "apache-2.0",
"size": 64203
}
|
[
"java.util.TimeZone"
] |
import java.util.TimeZone;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 432,915
|
private TaskData doInBackGroundRepositionField(TaskData... params){
Timber.d("doInBackgroundRepositionField");
Object[] objects = params[0].getObjArray();
JSONObject model = (JSONObject) objects[0];
JSONObject field = (JSONObject) objects[1];
int index = (Integer) objects[2];
Collection col = CollectionHelper.getInstance().getCol(mContext);
try {
col.getModels().moveField(model, field, index);
col.save();
} catch (ConfirmModSchemaException e) {
//Should never be reached
return new TaskData(false);
}
return new TaskData(true);
}
|
TaskData function(TaskData... params){ Timber.d(STR); Object[] objects = params[0].getObjArray(); JSONObject model = (JSONObject) objects[0]; JSONObject field = (JSONObject) objects[1]; int index = (Integer) objects[2]; Collection col = CollectionHelper.getInstance().getCol(mContext); try { col.getModels().moveField(model, field, index); col.save(); } catch (ConfirmModSchemaException e) { return new TaskData(false); } return new TaskData(true); }
|
/**
* Repositions the given field in the given model
*/
|
Repositions the given field in the given model
|
doInBackGroundRepositionField
|
{
"repo_name": "timrae/Anki-Android",
"path": "AnkiDroid/src/main/java/com/ichi2/async/DeckTask.java",
"license": "gpl-3.0",
"size": 55212
}
|
[
"com.ichi2.anki.CollectionHelper",
"com.ichi2.anki.exception.ConfirmModSchemaException",
"com.ichi2.libanki.Collection",
"org.json.JSONObject"
] |
import com.ichi2.anki.CollectionHelper; import com.ichi2.anki.exception.ConfirmModSchemaException; import com.ichi2.libanki.Collection; import org.json.JSONObject;
|
import com.ichi2.anki.*; import com.ichi2.anki.exception.*; import com.ichi2.libanki.*; import org.json.*;
|
[
"com.ichi2.anki",
"com.ichi2.libanki",
"org.json"
] |
com.ichi2.anki; com.ichi2.libanki; org.json;
| 1,881,814
|
@Deprecated
public static TikaInputStream get(File file) throws FileNotFoundException {
return get(file, new Metadata());
}
|
static TikaInputStream function(File file) throws FileNotFoundException { return get(file, new Metadata()); }
|
/**
* Creates a TikaInputStream from the given file.
* <p>
* Note that you must always explicitly close the returned stream to
* prevent leaking open file handles.
*
* @param file input file
* @return a TikaInputStream instance
* @throws FileNotFoundException if the file does not exist
* @deprecated use {@link #get(Path)}. In Tika 2.0, this will be removed
* or modified to throw an IOException.
*/
|
Creates a TikaInputStream from the given file. Note that you must always explicitly close the returned stream to prevent leaking open file handles
|
get
|
{
"repo_name": "zamattiac/tika",
"path": "tika-core/src/main/java/org/apache/tika/io/TikaInputStream.java",
"license": "apache-2.0",
"size": 25019
}
|
[
"java.io.File",
"java.io.FileNotFoundException",
"org.apache.tika.metadata.Metadata"
] |
import java.io.File; import java.io.FileNotFoundException; import org.apache.tika.metadata.Metadata;
|
import java.io.*; import org.apache.tika.metadata.*;
|
[
"java.io",
"org.apache.tika"
] |
java.io; org.apache.tika;
| 1,441,882
|
private static native void doReleaseTunerActions(long o);
private AbstractGrabber fg;
private DeviceInfo deviceInfo;
private ControlList controls;
private TunerList tuners;
private String deviceFile;
private State state;
private boolean supportJPEG, supportRGB24, supportBGR24, supportYUV420, supportYVU420;
private final long v4l4jObject;
private ThreadFactory threadFactory;
public VideoDevice(String dev) throws V4L4JException {
if (dev == null)
throw new NullPointerException("The device file cannot be null");
if (!(new File(dev).canRead()))
throw new V4L4JException("The device file (" + dev + ") is not readable");
this.threadFactory = Executors.defaultThreadFactory();
this.state = new State();
this.deviceFile = dev;
this.v4l4jObject = doInit(deviceFile);
try {
initDeviceInfo();
} catch (V4L4JException e) {
// error getting DeviceInfo
// keep going so v4l4j can be used with drivers which supports
// multiple simultaneous open() calls.
// However, set things accordingly
deviceInfo = null;
supportJPEG = false;
supportRGB24 = false;
supportBGR24 = false;
supportYUV420 = false;
supportYVU420 = false;
}
}
|
static native void function(long o); private AbstractGrabber fg; private DeviceInfo deviceInfo; private ControlList controls; private TunerList tuners; private String deviceFile; private State state; private boolean supportJPEG, supportRGB24, supportBGR24, supportYUV420, supportYVU420; private final long v4l4jObject; private ThreadFactory threadFactory; public VideoDevice(String dev) throws V4L4JException { if (dev == null) throw new NullPointerException(STR); if (!(new File(dev).canRead())) throw new V4L4JException(STR + dev + STR); this.threadFactory = Executors.defaultThreadFactory(); this.state = new State(); this.deviceFile = dev; this.v4l4jObject = doInit(deviceFile); try { initDeviceInfo(); } catch (V4L4JException e) { deviceInfo = null; supportJPEG = false; supportRGB24 = false; supportBGR24 = false; supportYUV420 = false; supportYVU420 = false; } }
|
/**
* This JNI method releases the tuner interface
*
* @param o
* A C pointer to a struct v4l4j_device
* @throws ReleaseException
* if this device is sill in use, and has not been released.
*/
|
This JNI method releases the tuner interface
|
doReleaseTunerActions
|
{
"repo_name": "mailmindlin/v4l4j",
"path": "src/au/edu/jcu/v4l4j/VideoDevice.java",
"license": "gpl-3.0",
"size": 72177
}
|
[
"au.edu.jcu.v4l4j.exceptions.V4L4JException",
"java.io.File",
"java.util.concurrent.Executors",
"java.util.concurrent.ThreadFactory"
] |
import au.edu.jcu.v4l4j.exceptions.V4L4JException; import java.io.File; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory;
|
import au.edu.jcu.v4l4j.exceptions.*; import java.io.*; import java.util.concurrent.*;
|
[
"au.edu.jcu",
"java.io",
"java.util"
] |
au.edu.jcu; java.io; java.util;
| 2,103,588
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.