method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public RelativePath relativize(RelativePath other) {
if (isEmpty()) {
return other;
}
List<String> normalized = normalize().elements;
List<String> normalizedOther = other.normalize().elements;
// The source may not be longer that target
if (normalized.size() > normalizedOther.size()) {
throw new IllegalArgumentException("Cannot relativize " + other.getAbsolutePath() + " to " + getAbsolutePath());
}
// Source and target must have the same start.
if (!normalizedOther.subList(0, normalized.size()).equals(normalized)) {
throw new IllegalArgumentException("Cannot relativize " + other.getAbsolutePath() + " to " + getAbsolutePath());
}
return new RelativePath(separator, normalizedOther.subList(normalized.size(), normalizedOther.size()));
}
|
RelativePath function(RelativePath other) { if (isEmpty()) { return other; } List<String> normalized = normalize().elements; List<String> normalizedOther = other.normalize().elements; if (normalized.size() > normalizedOther.size()) { throw new IllegalArgumentException(STR + other.getAbsolutePath() + STR + getAbsolutePath()); } if (!normalizedOther.subList(0, normalized.size()).equals(normalized)) { throw new IllegalArgumentException(STR + other.getAbsolutePath() + STR + getAbsolutePath()); } return new RelativePath(separator, normalizedOther.subList(normalized.size(), normalizedOther.size())); }
|
/**
* Create a relative RelativePath between the given RelativePath and this RelativePath.
*
* Relativation is the inverse of resolving. This method returns a RelativePath that, when resolved against this RelativePath,
* results in the given RelativePath <code>other</code>.
*
* @param other
* the RelativePath to relativize.
*
* @return a RelativePath representing a relative path between the given path and this path.
*
* @throws IllegalArgumentException
* If the path can not be relativized to this path.
*/
|
Create a relative RelativePath between the given RelativePath and this RelativePath. Relativation is the inverse of resolving. This method returns a RelativePath that, when resolved against this RelativePath, results in the given RelativePath <code>other</code>
|
relativize
|
{
"repo_name": "benvanwerkhoven/Xenon",
"path": "src/main/java/nl/esciencecenter/xenon/files/RelativePath.java",
"license": "apache-2.0",
"size": 19354
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 88,142
|
public void removeColumn(TableColumn column)
{
columnModel.removeColumn(column);
}
|
void function(TableColumn column) { columnModel.removeColumn(column); }
|
/**
* Removes specified column from displayable columns of this table.
*
* @param column column to removed
*/
|
Removes specified column from displayable columns of this table
|
removeColumn
|
{
"repo_name": "unofficial-opensource-apple/gcc_40",
"path": "libjava/javax/swing/JTable.java",
"license": "gpl-2.0",
"size": 50166
}
|
[
"javax.swing.table.TableColumn"
] |
import javax.swing.table.TableColumn;
|
import javax.swing.table.*;
|
[
"javax.swing"
] |
javax.swing;
| 2,126,111
|
@Test
public void testLongStreamFromRight() throws Exception {
System.out.println("********** testLongStreamFromRight");
ContentName basename = ContentName.fromNative(prefix, String.format("content_%016X", _rnd.nextLong()));
TestListener listener = new TestListener();
TestVIM vim = new TestVIM(sinkhandle, basename, null, VersionNumber.getMinimumVersion(), listener);
vim.setSendInterest(true);
vim.start();
// Verify that we have 1 interest pending
Assert.assertTrue( sinkhandle.count.waitForValue(1, TIMEOUT) );
// send MAX_FILL items, should only be one interest
CCNTime now = CCNTime.now();
long t = now.getTime();
int tosend = VersioningInterestManager.MAX_FILL * LONG_SEND_MULTIPLE;
// How many interets will this be? Every time it fills an interest, it will
// leave MAX_FILL - MIN_FILL in that interest, then shift MIN_FILL to the left.
int packets = 1;
int occupancy = 0;
for(int i = 0; i < tosend; i++) {
if( occupancy >= VersioningInterestManager.MAX_FILL ) {
packets++;
occupancy = VersioningInterestManager.MIN_FILL;
}
occupancy++;
}
System.out.println(String.format("Sending %d exclusions should result in %d interest packets", tosend, packets));
System.out.println("***** Sending stream 1 *****");
TreeSet<CCNTime> sent1 = sendStreamRight(sinkhandle, vim, basename, t, tosend);
// There will be 1 interest per exclusion plus the number of outstanding packets
boolean b = sinkhandle.total_count.waitForValue(tosend + packets, TIMEOUT);
Assert.assertTrue("sinkhandle incorrect count: " + sinkhandle.total_count.getValue(), b);
// we should see only the desired number of interests
Assert.assertEquals(packets, vim.getInterestDataTree().size());
Assert.assertEquals(sent1.size(), vim.getExclusions().size());
vim.stop();
}
|
void function() throws Exception { System.out.println(STR); ContentName basename = ContentName.fromNative(prefix, String.format(STR, _rnd.nextLong())); TestListener listener = new TestListener(); TestVIM vim = new TestVIM(sinkhandle, basename, null, VersionNumber.getMinimumVersion(), listener); vim.setSendInterest(true); vim.start(); Assert.assertTrue( sinkhandle.count.waitForValue(1, TIMEOUT) ); CCNTime now = CCNTime.now(); long t = now.getTime(); int tosend = VersioningInterestManager.MAX_FILL * LONG_SEND_MULTIPLE; int packets = 1; int occupancy = 0; for(int i = 0; i < tosend; i++) { if( occupancy >= VersioningInterestManager.MAX_FILL ) { packets++; occupancy = VersioningInterestManager.MIN_FILL; } occupancy++; } System.out.println(String.format(STR, tosend, packets)); System.out.println(STR); TreeSet<CCNTime> sent1 = sendStreamRight(sinkhandle, vim, basename, t, tosend); boolean b = sinkhandle.total_count.waitForValue(tosend + packets, TIMEOUT); Assert.assertTrue(STR + sinkhandle.total_count.getValue(), b); Assert.assertEquals(packets, vim.getInterestDataTree().size()); Assert.assertEquals(sent1.size(), vim.getExclusions().size()); vim.stop(); }
|
/**
* Send a very long stream from the right
* @throws Exception
*/
|
Send a very long stream from the right
|
testLongStreamFromRight
|
{
"repo_name": "yyhpys/ccnx-trace-interest",
"path": "javasrc/src/org/ccnx/ccn/test/profiles/versioning/VersioningInterestManagerTestRepo.java",
"license": "lgpl-2.1",
"size": 17246
}
|
[
"java.util.TreeSet",
"junit.framework.Assert",
"org.ccnx.ccn.profiles.versioning.VersionNumber",
"org.ccnx.ccn.profiles.versioning.VersioningInterestManager",
"org.ccnx.ccn.protocol.CCNTime",
"org.ccnx.ccn.protocol.ContentName",
"org.ccnx.ccn.test.profiles.versioning.VersioningHelper"
] |
import java.util.TreeSet; import junit.framework.Assert; import org.ccnx.ccn.profiles.versioning.VersionNumber; import org.ccnx.ccn.profiles.versioning.VersioningInterestManager; import org.ccnx.ccn.protocol.CCNTime; import org.ccnx.ccn.protocol.ContentName; import org.ccnx.ccn.test.profiles.versioning.VersioningHelper;
|
import java.util.*; import junit.framework.*; import org.ccnx.ccn.profiles.versioning.*; import org.ccnx.ccn.protocol.*; import org.ccnx.ccn.test.profiles.versioning.*;
|
[
"java.util",
"junit.framework",
"org.ccnx.ccn"
] |
java.util; junit.framework; org.ccnx.ccn;
| 1,696,394
|
@Test
public void testSortInfoNoOrderBy1() throws Exception {
String query = "a = load 'bla' as (i:int, n:chararray, d:double);" +
"b = filter a by i > 10;" +
"store b into 'foo';";
PhysicalPlan pp = Util.buildPp( pigServer, query );
SortInfo si = ((POStore)(pp.getLeaves().get(0))).getSortInfo();
assertEquals(null, si);
}
|
void function() throws Exception { String query = STR + STR + STR; PhysicalPlan pp = Util.buildPp( pigServer, query ); SortInfo si = ((POStore)(pp.getLeaves().get(0))).getSortInfo(); assertEquals(null, si); }
|
/**
* tests that sortInfo is null when there is no order by
* before the store
* @throws Exception
*/
|
tests that sortInfo is null when there is no order by before the store
|
testSortInfoNoOrderBy1
|
{
"repo_name": "miyakawataku/piggybank-ltsv",
"path": "test/org/apache/pig/test/TestLogToPhyCompiler.java",
"license": "apache-2.0",
"size": 19966
}
|
[
"org.apache.pig.SortInfo",
"org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan",
"org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore",
"org.junit.Assert"
] |
import org.apache.pig.SortInfo; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore; import org.junit.Assert;
|
import org.apache.pig.*; import org.apache.pig.backend.hadoop.executionengine.*; import org.junit.*;
|
[
"org.apache.pig",
"org.junit"
] |
org.apache.pig; org.junit;
| 2,525,735
|
private static ExpressionFactory newInstance(Properties properties, String className, ClassLoader classLoader) {
Class<?> clazz = null;
try {
clazz = classLoader.loadClass(className.trim());
if (!ExpressionFactory.class.isAssignableFrom(clazz)) {
throw new ELException("Invalid expression factory class: " + clazz.getName());
}
} catch (ClassNotFoundException e) {
throw new ELException("Could not find expression factory class", e);
}
try {
if (properties != null) {
Constructor<?> constructor = null;
try {
constructor = clazz.getConstructor(Properties.class);
} catch (Exception e) {
// do nothing
}
if (constructor != null) {
return (ExpressionFactory) constructor.newInstance(properties);
}
}
return (ExpressionFactory) clazz.newInstance();
} catch (Exception e) {
throw new ELException("Could not create expression factory instance", e);
}
}
|
static ExpressionFactory function(Properties properties, String className, ClassLoader classLoader) { Class<?> clazz = null; try { clazz = classLoader.loadClass(className.trim()); if (!ExpressionFactory.class.isAssignableFrom(clazz)) { throw new ELException(STR + clazz.getName()); } } catch (ClassNotFoundException e) { throw new ELException(STR, e); } try { if (properties != null) { Constructor<?> constructor = null; try { constructor = clazz.getConstructor(Properties.class); } catch (Exception e) { } if (constructor != null) { return (ExpressionFactory) constructor.newInstance(properties); } } return (ExpressionFactory) clazz.newInstance(); } catch (Exception e) { throw new ELException(STR, e); } }
|
/**
* Create an ExpressionFactory instance.
*
* @param properties
* Properties passed to the constructor of the implementation.
* @return an instance of ExpressionFactory
* @param className
* The name of the ExpressionFactory class.
* @param classLoader
* The class loader to be used to load the class.
* @return An instance of ExpressionFactory.
* @throws ELException
* if the class could not be found or if it is not a subclass of ExpressionFactory
* or if the class could not be instantiated.
*/
|
Create an ExpressionFactory instance
|
newInstance
|
{
"repo_name": "yvoswillens/flowable-engine",
"path": "modules/flowable-engine-common/src/main/java/org/flowable/common/engine/impl/javax/el/ExpressionFactory.java",
"license": "apache-2.0",
"size": 15389
}
|
[
"java.lang.reflect.Constructor",
"java.util.Properties"
] |
import java.lang.reflect.Constructor; import java.util.Properties;
|
import java.lang.reflect.*; import java.util.*;
|
[
"java.lang",
"java.util"
] |
java.lang; java.util;
| 142,311
|
@Override
public int take(int amount, boolean notify) {
VirtualCurrency currency = null;
try {
currency = (VirtualCurrency)StoreInfo.getVirtualItem(mCurrencyItemId);
} catch (VirtualItemNotFoundException e) {
StoreUtils.LogError(TAG, "VirtualCurrency with itemId: " + mCurrencyItemId + " doesn't exist! Can't take this pack.");
return 0;
}
return StorageManager.getVirtualCurrencyStorage().remove(currency, mCurrencyAmount * amount, notify);
}
|
int function(int amount, boolean notify) { VirtualCurrency currency = null; try { currency = (VirtualCurrency)StoreInfo.getVirtualItem(mCurrencyItemId); } catch (VirtualItemNotFoundException e) { StoreUtils.LogError(TAG, STR + mCurrencyItemId + STR); return 0; } return StorageManager.getVirtualCurrencyStorage().remove(currency, mCurrencyAmount * amount, notify); }
|
/**
* see parent
* @param amount the amount of the specific item to be taken.
* @return balance after the taking process
*/
|
see parent
|
take
|
{
"repo_name": "NatWeiss/RGP",
"path": "src/cocos2dx-store/submodules/android-store/SoomlaAndroidStore/src/com/soomla/store/domain/virtualCurrencies/VirtualCurrencyPack.java",
"license": "mit",
"size": 5532
}
|
[
"com.soomla.store.StoreUtils",
"com.soomla.store.data.StorageManager",
"com.soomla.store.data.StoreInfo",
"com.soomla.store.exceptions.VirtualItemNotFoundException"
] |
import com.soomla.store.StoreUtils; import com.soomla.store.data.StorageManager; import com.soomla.store.data.StoreInfo; import com.soomla.store.exceptions.VirtualItemNotFoundException;
|
import com.soomla.store.*; import com.soomla.store.data.*; import com.soomla.store.exceptions.*;
|
[
"com.soomla.store"
] |
com.soomla.store;
| 2,159,688
|
public static <K, V, R> R executeRemotely(IgniteCacheProcessProxy<K, V> cache,
final TestCacheCallable<K, V, R> job) {
IgniteProcessProxy proxy = (IgniteProcessProxy)cache.unwrap(Ignite.class);
final UUID id = proxy.getId();
final String cacheName = cache.getName();
return proxy.remoteCompute().call(new IgniteCallable<R>() {
private static final long serialVersionUID = -3868429485920845137L;
|
static <K, V, R> R function(IgniteCacheProcessProxy<K, V> cache, final TestCacheCallable<K, V, R> job) { IgniteProcessProxy proxy = (IgniteProcessProxy)cache.unwrap(Ignite.class); final UUID id = proxy.getId(); final String cacheName = cache.getName(); return proxy.remoteCompute().call(new IgniteCallable<R>() { private static final long serialVersionUID = -3868429485920845137L;
|
/**
* Runs job on remote JVM.
*
* @param cache Cache.
* @param job Job.
*/
|
Runs job on remote JVM
|
executeRemotely
|
{
"repo_name": "ilantukh/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java",
"license": "apache-2.0",
"size": 84789
}
|
[
"org.apache.ignite.Ignite",
"org.apache.ignite.lang.IgniteCallable",
"org.apache.ignite.testframework.junits.multijvm.IgniteCacheProcessProxy",
"org.apache.ignite.testframework.junits.multijvm.IgniteProcessProxy"
] |
import org.apache.ignite.Ignite; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.testframework.junits.multijvm.IgniteCacheProcessProxy; import org.apache.ignite.testframework.junits.multijvm.IgniteProcessProxy;
|
import org.apache.ignite.*; import org.apache.ignite.lang.*; import org.apache.ignite.testframework.junits.multijvm.*;
|
[
"org.apache.ignite"
] |
org.apache.ignite;
| 2,309,815
|
public Map toMap() {
Map map = new HashMap();
map.put("segNo",StringUtils.toString(segNo, eiMetadata.getMeta("segNo").getFieldLength(), eiMetadata.getMeta("segNo").getScaleLength()));
map.put("settleId",StringUtils.toString(settleId, eiMetadata.getMeta("settleId").getFieldLength(), eiMetadata.getMeta("settleId").getScaleLength()));
map.put("settleSubid",StringUtils.toString(settleSubid, eiMetadata.getMeta("settleSubid").getFieldLength(), eiMetadata.getMeta("settleSubid").getScaleLength()));
map.put("instructionId",StringUtils.toString(instructionId, eiMetadata.getMeta("instructionId").getFieldLength(), eiMetadata.getMeta("instructionId").getScaleLength()));
map.put("instructionSubid",StringUtils.toString(instructionSubid, eiMetadata.getMeta("instructionSubid").getFieldLength(), eiMetadata.getMeta("instructionSubid").getScaleLength()));
map.put("fmProductId",StringUtils.toString(fmProductId, eiMetadata.getMeta("fmProductId").getFieldLength(), eiMetadata.getMeta("fmProductId").getScaleLength()));
map.put("fmSettleWeight",StringUtils.toString(fmSettleWeight, eiMetadata.getMeta("fmSettleWeight").getFieldLength(), eiMetadata.getMeta("fmSettleWeight").getScaleLength()));
map.put("fmSettleQuantity",StringUtils.toString(fmSettleQuantity, eiMetadata.getMeta("fmSettleQuantity").getFieldLength(), eiMetadata.getMeta("fmSettleQuantity").getScaleLength()));
map.put("fmWeightUnit",StringUtils.toString(fmWeightUnit, eiMetadata.getMeta("fmWeightUnit").getFieldLength(), eiMetadata.getMeta("fmWeightUnit").getScaleLength()));
map.put("fmQuantityUnit",StringUtils.toString(fmQuantityUnit, eiMetadata.getMeta("fmQuantityUnit").getFieldLength(), eiMetadata.getMeta("fmQuantityUnit").getScaleLength()));
map.put("fmUnitConversion",StringUtils.toString(fmUnitConversion, eiMetadata.getMeta("fmUnitConversion").getFieldLength(), eiMetadata.getMeta("fmUnitConversion").getScaleLength()));
map.put("fmUnitWeight",StringUtils.toString(fmUnitWeight, eiMetadata.getMeta("fmUnitWeight").getFieldLength(), eiMetadata.getMeta("fmUnitWeight").getScaleLength()));
map.put("rmlistId",StringUtils.toString(rmlistId, eiMetadata.getMeta("rmlistId").getFieldLength(), eiMetadata.getMeta("rmlistId").getScaleLength()));
map.put("rmProductId",StringUtils.toString(rmProductId, eiMetadata.getMeta("rmProductId").getFieldLength(), eiMetadata.getMeta("rmProductId").getScaleLength()));
map.put("actRmWeight",StringUtils.toString(actRmWeight, eiMetadata.getMeta("actRmWeight").getFieldLength(), eiMetadata.getMeta("actRmWeight").getScaleLength()));
map.put("actRmQuantity",StringUtils.toString(actRmQuantity, eiMetadata.getMeta("actRmQuantity").getFieldLength(), eiMetadata.getMeta("actRmQuantity").getScaleLength()));
map.put("rmWeightUnit",StringUtils.toString(rmWeightUnit, eiMetadata.getMeta("rmWeightUnit").getFieldLength(), eiMetadata.getMeta("rmWeightUnit").getScaleLength()));
map.put("rmQuantityUnit",StringUtils.toString(rmQuantityUnit, eiMetadata.getMeta("rmQuantityUnit").getFieldLength(), eiMetadata.getMeta("rmQuantityUnit").getScaleLength()));
map.put("rmUnitConversion",StringUtils.toString(rmUnitConversion, eiMetadata.getMeta("rmUnitConversion").getFieldLength(), eiMetadata.getMeta("rmUnitConversion").getScaleLength()));
map.put("rmUnitWeight",StringUtils.toString(rmUnitWeight, eiMetadata.getMeta("rmUnitWeight").getFieldLength(), eiMetadata.getMeta("rmUnitWeight").getScaleLength()));
map.put("actUsefulRate",StringUtils.toString(actUsefulRate, eiMetadata.getMeta("actUsefulRate").getFieldLength(), eiMetadata.getMeta("actUsefulRate").getScaleLength()));
map.put("machPrice",StringUtils.toString(machPrice, eiMetadata.getMeta("machPrice").getFieldLength(), eiMetadata.getMeta("machPrice").getScaleLength()));
map.put("settleAmount",StringUtils.toString(settleAmount, eiMetadata.getMeta("settleAmount").getFieldLength(), eiMetadata.getMeta("settleAmount").getScaleLength()));
map.put("otherAmount",StringUtils.toString(otherAmount, eiMetadata.getMeta("otherAmount").getFieldLength(), eiMetadata.getMeta("otherAmount").getScaleLength()));
map.put("taxRate",StringUtils.toString(taxRate, eiMetadata.getMeta("taxRate").getFieldLength(), eiMetadata.getMeta("taxRate").getScaleLength()));
map.put("taxAmount",StringUtils.toString(taxAmount, eiMetadata.getMeta("taxAmount").getFieldLength(), eiMetadata.getMeta("taxAmount").getScaleLength()));
map.put("totalAmount",StringUtils.toString(totalAmount, eiMetadata.getMeta("totalAmount").getFieldLength(), eiMetadata.getMeta("totalAmount").getScaleLength()));
map.put("weightMethod",StringUtils.toString(weightMethod, eiMetadata.getMeta("weightMethod").getFieldLength(), eiMetadata.getMeta("weightMethod").getScaleLength()));
map.put("valuationType",StringUtils.toString(valuationType, eiMetadata.getMeta("valuationType").getFieldLength(), eiMetadata.getMeta("valuationType").getScaleLength()));
map.put("modiPerson",StringUtils.toString(modiPerson, eiMetadata.getMeta("modiPerson").getFieldLength(), eiMetadata.getMeta("modiPerson").getScaleLength()));
map.put("modiDate",StringUtils.toString(modiDate, eiMetadata.getMeta("modiDate").getFieldLength(), eiMetadata.getMeta("modiDate").getScaleLength()));
map.put("settleAmountAt",StringUtils.toString(settleAmountAt, eiMetadata.getMeta("settleAmountAt").getFieldLength(), eiMetadata.getMeta("settleAmountAt").getScaleLength()));
map.put("otherPrice",StringUtils.toString(otherPrice, eiMetadata.getMeta("otherPrice").getFieldLength(), eiMetadata.getMeta("otherPrice").getScaleLength()));
map.put("transPriceAt",StringUtils.toString(transPriceAt, eiMetadata.getMeta("transPriceAt").getFieldLength(), eiMetadata.getMeta("transPriceAt").getScaleLength()));
map.put("goodId",StringUtils.toString(goodId, eiMetadata.getMeta("goodId").getFieldLength(), eiMetadata.getMeta("goodId").getScaleLength()));
map.put("techStandard",StringUtils.toString(techStandard, eiMetadata.getMeta("techStandard").getFieldLength(), eiMetadata.getMeta("techStandard").getScaleLength()));
map.put("shopsign",StringUtils.toString(shopsign, eiMetadata.getMeta("shopsign").getFieldLength(), eiMetadata.getMeta("shopsign").getScaleLength()));
map.put("spec",StringUtils.toString(spec, eiMetadata.getMeta("spec").getFieldLength(), eiMetadata.getMeta("spec").getScaleLength()));
map.put("prodTypeDesc",StringUtils.toString(prodTypeDesc, eiMetadata.getMeta("prodTypeDesc").getFieldLength(), eiMetadata.getMeta("prodTypeDesc").getScaleLength()));
map.put("qualityGradeName",StringUtils.toString(qualityGradeName, eiMetadata.getMeta("qualityGradeName").getFieldLength(), eiMetadata.getMeta("qualityGradeName").getScaleLength()));
map.put("producingAreaName",StringUtils.toString(producingAreaName, eiMetadata.getMeta("producingAreaName").getFieldLength(), eiMetadata.getMeta("producingAreaName").getScaleLength()));
return map;
}
|
Map function() { Map map = new HashMap(); map.put("segNo",StringUtils.toString(segNo, eiMetadata.getMeta("segNo").getFieldLength(), eiMetadata.getMeta("segNo").getScaleLength())); map.put(STR,StringUtils.toString(settleId, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(settleSubid, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(instructionId, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(instructionSubid, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(fmProductId, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(fmSettleWeight, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(fmSettleQuantity, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(fmWeightUnit, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(fmQuantityUnit, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(fmUnitConversion, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(fmUnitWeight, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(rmlistId, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(rmProductId, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(actRmWeight, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(actRmQuantity, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(rmWeightUnit, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(rmQuantityUnit, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(rmUnitConversion, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(rmUnitWeight, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(actUsefulRate, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(machPrice, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(settleAmount, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(otherAmount, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(taxRate, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(taxAmount, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(totalAmount, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(weightMethod, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(valuationType, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(modiPerson, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(modiDate, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(settleAmountAt, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(otherPrice, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(transPriceAt, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(goodId, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(techStandard, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(shopsign, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put("spec",StringUtils.toString(spec, eiMetadata.getMeta("spec").getFieldLength(), eiMetadata.getMeta("spec").getScaleLength())); map.put(STR,StringUtils.toString(prodTypeDesc, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(qualityGradeName, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); map.put(STR,StringUtils.toString(producingAreaName, eiMetadata.getMeta(STR).getFieldLength(), eiMetadata.getMeta(STR).getScaleLength())); return map; }
|
/**
* set the value to Map
*/
|
set the value to Map
|
toMap
|
{
"repo_name": "stserp/erp1",
"path": "source/src/com/baosight/sts/st/jg/domain/STJG0402.java",
"license": "apache-2.0",
"size": 36668
}
|
[
"com.baosight.iplat4j.util.StringUtils",
"java.util.HashMap",
"java.util.Map"
] |
import com.baosight.iplat4j.util.StringUtils; import java.util.HashMap; import java.util.Map;
|
import com.baosight.iplat4j.util.*; import java.util.*;
|
[
"com.baosight.iplat4j",
"java.util"
] |
com.baosight.iplat4j; java.util;
| 2,523,596
|
public Notification schedule (JSONObject options, Class<?> receiver) {
return schedule(new Options(context).parse(options), receiver);
}
|
Notification function (JSONObject options, Class<?> receiver) { return schedule(new Options(context).parse(options), receiver); }
|
/**
* Schedule local notification specified by JSON object.
*
* @param options
* JSON object with set of options
* @param receiver
* Receiver to handle the trigger event
*/
|
Schedule local notification specified by JSON object
|
schedule
|
{
"repo_name": "sysfolko/sysfolko-de.appplant.cordova.plugin.local-notification-sysfo-fix",
"path": "src/android/notification/Manager.java",
"license": "apache-2.0",
"size": 12408
}
|
[
"org.json.JSONObject"
] |
import org.json.JSONObject;
|
import org.json.*;
|
[
"org.json"
] |
org.json;
| 1,916,249
|
public static void addChangedListener(final ClipboardManager.OnPrimaryClipChangedListener listener) {
ClipboardManager cm = (ClipboardManager) Utils.getApp().getSystemService(Context.CLIPBOARD_SERVICE);
//noinspection ConstantConditions
cm.addPrimaryClipChangedListener(listener);
}
|
static void function(final ClipboardManager.OnPrimaryClipChangedListener listener) { ClipboardManager cm = (ClipboardManager) Utils.getApp().getSystemService(Context.CLIPBOARD_SERVICE); cm.addPrimaryClipChangedListener(listener); }
|
/**
* Add the clipboard changed listener.
*/
|
Add the clipboard changed listener
|
addChangedListener
|
{
"repo_name": "Blankj/AndroidUtilCode",
"path": "lib/utilcode/src/main/java/com/blankj/utilcode/util/ClipboardUtils.java",
"license": "apache-2.0",
"size": 3539
}
|
[
"android.content.ClipboardManager",
"android.content.Context"
] |
import android.content.ClipboardManager; import android.content.Context;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 2,762,507
|
public static void initNative(Context context, KeyInterface keyImpl,
Class<? extends Activity> mainActivity, Class<? extends Activity> loginActivity) {
SalesforceSDKManager.init(context, keyImpl, mainActivity, loginActivity);
}
|
static void function(Context context, KeyInterface keyImpl, Class<? extends Activity> mainActivity, Class<? extends Activity> loginActivity) { SalesforceSDKManager.init(context, keyImpl, mainActivity, loginActivity); }
|
/**
* Initializes components required for this class
* to properly function. This method should be called
* by native apps using the Salesforce Mobile SDK.
*
* @param context Application context.
* @param keyImpl Implementation of KeyInterface.
* @param mainActivity Activity that should be launched after the login flow.
* @param loginActivity Login activity.
*/
|
Initializes components required for this class to properly function. This method should be called by native apps using the Salesforce Mobile SDK
|
initNative
|
{
"repo_name": "seethaa/force_analytics_example",
"path": "native/SalesforceSDK/src/com/salesforce/androidsdk/app/SalesforceSDKManager.java",
"license": "apache-2.0",
"size": 38136
}
|
[
"android.app.Activity",
"android.content.Context"
] |
import android.app.Activity; import android.content.Context;
|
import android.app.*; import android.content.*;
|
[
"android.app",
"android.content"
] |
android.app; android.content;
| 244,313
|
@Override
public void begin(String namespace, String theName, Attributes attributes)
throws Exception {
// Identify the actual property name and value to be used
String actualName = null;
String actualValue = null;
for (int i = 0; i < attributes.getLength(); i++) {
String name = attributes.getLocalName(i);
if ("".equals(name)) {
name = attributes.getQName(i);
}
String value = attributes.getValue(i);
if (name.equals(this.name)) {
actualName = value;
} else if (name.equals(this.value)) {
actualValue = value;
}
}
// Get a reference to the top object
Object top = digester.peek();
// Log some debugging information
if (digester.log.isDebugEnabled()) {
digester.log.debug("[SetPropertyRule]{" + digester.match +
"} Set " + top.getClass().getName() + " property " +
actualName + " to " + actualValue);
}
// Set the property (with conversion as necessary)
if (!digester.isFakeAttribute(top, actualName)
&& !IntrospectionUtils.setProperty(top, actualName, actualValue)
&& digester.getRulesValidation()) {
digester.log.warn("[SetPropertyRule]{" + digester.match +
"} Setting property '" + name + "' to '" +
value + "' did not find a matching property.");
}
}
|
void function(String namespace, String theName, Attributes attributes) throws Exception { String actualName = null; String actualValue = null; for (int i = 0; i < attributes.getLength(); i++) { String name = attributes.getLocalName(i); if (STR[SetPropertyRule]{STR} Set STR property STR to STR[SetPropertyRule]{STR} Setting property 'STR' to 'STR' did not find a matching property."); } }
|
/**
* Process the beginning of this element.
*
* @param namespace the namespace URI of the matching element, or an
* empty string if the parser is not namespace aware or the element has
* no namespace
* @param theName the local name if the parser is namespace aware, or just
* the element name otherwise
* @param attributes The attribute list for this element
*
* @exception NoSuchMethodException if the bean does not
* have a writable property of the specified name
*/
|
Process the beginning of this element
|
begin
|
{
"repo_name": "plumer/codana",
"path": "tomcat_files/7.0.0/SetPropertyRule.java",
"license": "mit",
"size": 5271
}
|
[
"org.xml.sax.Attributes"
] |
import org.xml.sax.Attributes;
|
import org.xml.sax.*;
|
[
"org.xml.sax"
] |
org.xml.sax;
| 641,648
|
public static void processRevisionEntry(TreeWalkListener listener,
GeneratedYangParser.RevisionStatementContext ctx) {
// Check for stack to be non empty.
checkStackIsNotEmpty(listener, MISSING_HOLDER, REVISION_DATA, ctx.dateArgumentString().getText(), ENTRY);
Date date = getValidDateFromString(ctx.dateArgumentString().getText(), ctx);
YangRevision revisionNode = new YangRevision();
revisionNode.setRevDate(date);
listener.getParsedDataStack().push(revisionNode);
}
|
static void function(TreeWalkListener listener, GeneratedYangParser.RevisionStatementContext ctx) { checkStackIsNotEmpty(listener, MISSING_HOLDER, REVISION_DATA, ctx.dateArgumentString().getText(), ENTRY); Date date = getValidDateFromString(ctx.dateArgumentString().getText(), ctx); YangRevision revisionNode = new YangRevision(); revisionNode.setRevDate(date); listener.getParsedDataStack().push(revisionNode); }
|
/**
* It is called when parser receives an input matching the grammar rule
* (revision),perform validations and update the data model tree.
*
* @param listener Listener's object
* @param ctx context object of the grammar rule
*/
|
It is called when parser receives an input matching the grammar rule (revision),perform validations and update the data model tree
|
processRevisionEntry
|
{
"repo_name": "VinodKumarS-Huawei/ietf96yang",
"path": "utils/yangutils/plugin/src/main/java/org/onosproject/yangutils/parser/impl/listeners/RevisionListener.java",
"license": "apache-2.0",
"size": 7467
}
|
[
"java.util.Date",
"org.onosproject.yangutils.datamodel.YangRevision",
"org.onosproject.yangutils.parser.antlrgencode.GeneratedYangParser",
"org.onosproject.yangutils.parser.impl.TreeWalkListener",
"org.onosproject.yangutils.parser.impl.parserutils.ListenerUtil",
"org.onosproject.yangutils.parser.impl.parserutils.ListenerValidation"
] |
import java.util.Date; import org.onosproject.yangutils.datamodel.YangRevision; import org.onosproject.yangutils.parser.antlrgencode.GeneratedYangParser; import org.onosproject.yangutils.parser.impl.TreeWalkListener; import org.onosproject.yangutils.parser.impl.parserutils.ListenerUtil; import org.onosproject.yangutils.parser.impl.parserutils.ListenerValidation;
|
import java.util.*; import org.onosproject.yangutils.datamodel.*; import org.onosproject.yangutils.parser.antlrgencode.*; import org.onosproject.yangutils.parser.impl.*; import org.onosproject.yangutils.parser.impl.parserutils.*;
|
[
"java.util",
"org.onosproject.yangutils"
] |
java.util; org.onosproject.yangutils;
| 2,423,620
|
public void testCreateUser() throws Exception {
String testName = "CreateUser";
if (LOGGER.isInfoEnabled()) {
LOGGER.info("\n\t\tRunning Test: " + testName);
}
UserServiceImpl fixture = getFixture();
// 1. test do not create existing user:
User user = getStoredUser();
ExecutionResult result = fixture.createUser(user);
assertTrue("user already exists.", result.isSuccessful() == false);
// 2. test create a new user with the same email address:
User user2 = getNewUser();
ExecutionResult result2 = fixture.createUser(user2);
assertTrue("user with the same email address exists.", result2.isSuccessful() == false);
// 3. test create a new user:
User user3 = getNewUser();
user3.setEmailAddressString("newEmail454545");
ExecutionResult result3 = fixture.createUser(user3);
assertTrue("user created.", result3.isSuccessful());
if (LOGGER.isInfoEnabled()) {
LOGGER.info(testName + " - end "); //$NON-NLS-1$
}
}
|
void function() throws Exception { String testName = STR; if (LOGGER.isInfoEnabled()) { LOGGER.info(STR + testName); } UserServiceImpl fixture = getFixture(); User user = getStoredUser(); ExecutionResult result = fixture.createUser(user); assertTrue(STR, result.isSuccessful() == false); User user2 = getNewUser(); ExecutionResult result2 = fixture.createUser(user2); assertTrue(STR, result2.isSuccessful() == false); User user3 = getNewUser(); user3.setEmailAddressString(STR); ExecutionResult result3 = fixture.createUser(user3); assertTrue(STR, result3.isSuccessful()); if (LOGGER.isInfoEnabled()) { LOGGER.info(testName + STR); } }
|
/**
* Run the void createUser(User) method test.
*
* @generatedBy CodePro at 10/13/05 4:18 PM
*/
|
Run the void createUser(User) method test
|
testCreateUser
|
{
"repo_name": "TreeBASE/treebasetest",
"path": "treebase-core/src/test/java/org/cipres/treebase/service/admin/UserServiceImplTest.java",
"license": "bsd-3-clause",
"size": 5868
}
|
[
"org.cipres.treebase.domain.admin.User",
"org.cipres.treebase.framework.ExecutionResult"
] |
import org.cipres.treebase.domain.admin.User; import org.cipres.treebase.framework.ExecutionResult;
|
import org.cipres.treebase.domain.admin.*; import org.cipres.treebase.framework.*;
|
[
"org.cipres.treebase"
] |
org.cipres.treebase;
| 298,635
|
public void setOwner(Property owner) {
this.owner = owner;
}
|
void function(Property owner) { this.owner = owner; }
|
/**
* Setter for property owner.
*
* @param owner new value of property owner
*/
|
Setter for property owner
|
setOwner
|
{
"repo_name": "accesstest3/cfunambol",
"path": "common/pim-framework/src/main/java/com/funambol/common/pim/calendar/Task.java",
"license": "agpl-3.0",
"size": 18189
}
|
[
"com.funambol.common.pim.common.Property"
] |
import com.funambol.common.pim.common.Property;
|
import com.funambol.common.pim.common.*;
|
[
"com.funambol.common"
] |
com.funambol.common;
| 422,149
|
public void batchPut(JSONArray tableRows) throws Exception {
for(IProvider provider : this.providers) {
provider.batchPut(tableRows);
}
}
|
void function(JSONArray tableRows) throws Exception { for(IProvider provider : this.providers) { provider.batchPut(tableRows); } }
|
/**
* batch put rows
* @param tableRows
* @throws Exception
*/
|
batch put rows
|
batchPut
|
{
"repo_name": "mychaelstyle/mydatastore",
"path": "src/main/java/com/mychaelstyle/mydatastore/MyDataStore.java",
"license": "apache-2.0",
"size": 4350
}
|
[
"org.json.JSONArray"
] |
import org.json.JSONArray;
|
import org.json.*;
|
[
"org.json"
] |
org.json;
| 2,462,318
|
void write(ObjectOutputStream os, int[] indexes) throws IOException;
|
void write(ObjectOutputStream os, int[] indexes) throws IOException;
|
/**
* Writes content for this array to the output stream
* @param os the output stream to write to
* @param indexes the indexes of records to write
* @throws IOException if there is an I/O exception
*/
|
Writes content for this array to the output stream
|
write
|
{
"repo_name": "zavtech/morpheus-core",
"path": "src/main/java/com/zavtech/morpheus/array/Array.java",
"license": "apache-2.0",
"size": 34601
}
|
[
"java.io.IOException",
"java.io.ObjectOutputStream"
] |
import java.io.IOException; import java.io.ObjectOutputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,219,145
|
@LuaProperty
public World getWorld() {
return delegate.getWorld();
}
}
|
World function() { return delegate.getWorld(); } }
|
/**
* This is the [world](/modules/World) in which this entity is going to join.
*/
|
This is the [world](/modules/World) in which this entity is going to join
|
getWorld
|
{
"repo_name": "wizards-of-lua/wizards-of-lua",
"path": "src/main/java/net/wizardsoflua/lua/classes/event/EntityJoinWorldEventClass.java",
"license": "gpl-3.0",
"size": 2505
}
|
[
"net.minecraft.world.World"
] |
import net.minecraft.world.World;
|
import net.minecraft.world.*;
|
[
"net.minecraft.world"
] |
net.minecraft.world;
| 570,153
|
@Override
public GridCoverage read(final GridGeometry domain, final int... range) throws DataStoreException {
final long startTime = System.nanoTime();
final RangeArgument rangeIndices = validateRangeArgument(ranges.length, range);
final Variable first = data[bandDimension >= 0 ? 0 : rangeIndices.getFirstSpecified()];
final DataType dataType = first.getDataType();
if (bandDimension < 0) {
for (int i=0; i<rangeIndices.getNumBands(); i++) {
final Variable variable = data[rangeIndices.getSourceIndex(i)];
if (!dataType.equals(variable.getDataType())) {
throw new DataStoreContentException(Resources.forLocale(getLocale()).getString(
Resources.Keys.MismatchedVariableType_3, getFilename(), first.getName(), variable.getName()));
}
}
}
final GridGeometry targetDomain;
final DataBuffer imageBuffer;
final SampleDimension[] bands = new SampleDimension[rangeIndices.getNumBands()];
int[] bandOffsets = null; // By default, all bands start at index 0.
try {
final GridDerivation targetGeometry = gridGeometry.derive()
.rounding(GridRoundingMode.ENCLOSING)
.subgrid((domain != null) ? domain : gridGeometry);
GridExtent areaOfInterest = targetGeometry.getIntersection(); // Pixel indices of data to read.
int[] subsampling = targetGeometry.getSubsampling(); // Slice to read or subsampling to apply.
int numBuffers = bands.length; // By default, one variable per band.
targetDomain = targetGeometry.build(); // Adjust user-specified domain to data geometry.
if (bandDimension >= 0) {
areaOfInterest = rangeIndices.insertBandDimension(areaOfInterest, bandDimension);
subsampling = rangeIndices.insertSubsampling (subsampling, bandDimension);
if (bandDimension == 0) {
bandOffsets = new int[numBuffers]; // Will be set to non-zero values later.
}
numBuffers = 1; // One variable for all bands.
}
Buffer[] sampleValues = new Buffer[numBuffers];
synchronized (lock) {
for (int i=0; i<bands.length; i++) {
int indexInResource = rangeIndices.getSourceIndex(i); // In strictly increasing order.
int indexInRaster = rangeIndices.getTargetIndex(i);
Variable variable = getVariable(indexInResource);
SampleDimension b = ranges[indexInResource];
if (b == null) {
ranges[indexInResource] = b = createSampleDimension(rangeIndices.builder(), variable, i);
}
bands[indexInRaster] = b;
if (bandOffsets != null) {
bandOffsets[indexInRaster] = i;
indexInRaster = 0; // Pixels interleaved in one bank: sampleValues.length = 1.
}
if (i < numBuffers) try {
// Optional.orElseThrow() below should never fail since Variable.read(…) wraps primitive array.
sampleValues[indexInRaster] = variable.read(areaOfInterest, subsampling).buffer().get();
} catch (ArithmeticException e) {
throw variable.canNotComputePosition(e);
}
}
}
if (bandDimension > 0) { // Really > 0, not >= 0.
final int stride = Math.toIntExact(data[0].getBandStride());
Buffer values = sampleValues[0].limit(stride);
sampleValues = new Buffer[bands.length];
for (int i=0; i<sampleValues.length; i++) {
if (i != 0) {
values = JDK9.duplicate(values);
final int p = values.limit();
values.position(p).limit(Math.addExact(p, stride));
}
sampleValues[i] = values;
}
}
imageBuffer = RasterFactory.wrap(dataType.rasterDataType, sampleValues);
} catch (IOException | RuntimeException e) {
throw canNotRead(getFilename(), domain, e);
}
if (imageBuffer == null) {
throw new DataStoreContentException(Errors.getResources(getLocale()).getString(Errors.Keys.UnsupportedType_1, dataType.name()));
}
final Variable main = data[visibleBand];
final Raster raster = new Raster(targetDomain, UnmodifiableArrayList.wrap(bands), imageBuffer,
String.valueOf(identifier), rangeIndices.getPixelStride(), bandOffsets, visibleBand,
main.decoder.convention().getColors(main));
logReadOperation(location, targetDomain, startTime);
return raster;
}
|
GridCoverage function(final GridGeometry domain, final int... range) throws DataStoreException { final long startTime = System.nanoTime(); final RangeArgument rangeIndices = validateRangeArgument(ranges.length, range); final Variable first = data[bandDimension >= 0 ? 0 : rangeIndices.getFirstSpecified()]; final DataType dataType = first.getDataType(); if (bandDimension < 0) { for (int i=0; i<rangeIndices.getNumBands(); i++) { final Variable variable = data[rangeIndices.getSourceIndex(i)]; if (!dataType.equals(variable.getDataType())) { throw new DataStoreContentException(Resources.forLocale(getLocale()).getString( Resources.Keys.MismatchedVariableType_3, getFilename(), first.getName(), variable.getName())); } } } final GridGeometry targetDomain; final DataBuffer imageBuffer; final SampleDimension[] bands = new SampleDimension[rangeIndices.getNumBands()]; int[] bandOffsets = null; try { final GridDerivation targetGeometry = gridGeometry.derive() .rounding(GridRoundingMode.ENCLOSING) .subgrid((domain != null) ? domain : gridGeometry); GridExtent areaOfInterest = targetGeometry.getIntersection(); int[] subsampling = targetGeometry.getSubsampling(); int numBuffers = bands.length; targetDomain = targetGeometry.build(); if (bandDimension >= 0) { areaOfInterest = rangeIndices.insertBandDimension(areaOfInterest, bandDimension); subsampling = rangeIndices.insertSubsampling (subsampling, bandDimension); if (bandDimension == 0) { bandOffsets = new int[numBuffers]; } numBuffers = 1; } Buffer[] sampleValues = new Buffer[numBuffers]; synchronized (lock) { for (int i=0; i<bands.length; i++) { int indexInResource = rangeIndices.getSourceIndex(i); int indexInRaster = rangeIndices.getTargetIndex(i); Variable variable = getVariable(indexInResource); SampleDimension b = ranges[indexInResource]; if (b == null) { ranges[indexInResource] = b = createSampleDimension(rangeIndices.builder(), variable, i); } bands[indexInRaster] = b; if (bandOffsets != null) { bandOffsets[indexInRaster] = i; indexInRaster = 0; } if (i < numBuffers) try { sampleValues[indexInRaster] = variable.read(areaOfInterest, subsampling).buffer().get(); } catch (ArithmeticException e) { throw variable.canNotComputePosition(e); } } } if (bandDimension > 0) { final int stride = Math.toIntExact(data[0].getBandStride()); Buffer values = sampleValues[0].limit(stride); sampleValues = new Buffer[bands.length]; for (int i=0; i<sampleValues.length; i++) { if (i != 0) { values = JDK9.duplicate(values); final int p = values.limit(); values.position(p).limit(Math.addExact(p, stride)); } sampleValues[i] = values; } } imageBuffer = RasterFactory.wrap(dataType.rasterDataType, sampleValues); } catch (IOException RuntimeException e) { throw canNotRead(getFilename(), domain, e); } if (imageBuffer == null) { throw new DataStoreContentException(Errors.getResources(getLocale()).getString(Errors.Keys.UnsupportedType_1, dataType.name())); } final Variable main = data[visibleBand]; final Raster raster = new Raster(targetDomain, UnmodifiableArrayList.wrap(bands), imageBuffer, String.valueOf(identifier), rangeIndices.getPixelStride(), bandOffsets, visibleBand, main.decoder.convention().getColors(main)); logReadOperation(location, targetDomain, startTime); return raster; }
|
/**
* Loads a subset of the grid coverage represented by this resource.
*
* @param domain desired grid extent and resolution, or {@code null} for reading the whole domain.
* @param range 0-based indices of sample dimensions to read, or {@code null} or an empty sequence for reading them all.
* @return the grid coverage for the specified domain and range.
* @throws DataStoreException if an error occurred while reading the grid coverage data.
*/
|
Loads a subset of the grid coverage represented by this resource
|
read
|
{
"repo_name": "apache/sis",
"path": "storage/sis-netcdf/src/main/java/org/apache/sis/internal/netcdf/RasterResource.java",
"license": "apache-2.0",
"size": 39569
}
|
[
"java.awt.image.DataBuffer",
"java.io.IOException",
"java.nio.Buffer",
"org.apache.sis.coverage.SampleDimension",
"org.apache.sis.coverage.grid.GridCoverage",
"org.apache.sis.coverage.grid.GridDerivation",
"org.apache.sis.coverage.grid.GridExtent",
"org.apache.sis.coverage.grid.GridGeometry",
"org.apache.sis.coverage.grid.GridRoundingMode",
"org.apache.sis.internal.coverage.j2d.RasterFactory",
"org.apache.sis.internal.util.UnmodifiableArrayList",
"org.apache.sis.storage.DataStoreContentException",
"org.apache.sis.storage.DataStoreException",
"org.apache.sis.util.resources.Errors"
] |
import java.awt.image.DataBuffer; import java.io.IOException; import java.nio.Buffer; import org.apache.sis.coverage.SampleDimension; import org.apache.sis.coverage.grid.GridCoverage; import org.apache.sis.coverage.grid.GridDerivation; import org.apache.sis.coverage.grid.GridExtent; import org.apache.sis.coverage.grid.GridGeometry; import org.apache.sis.coverage.grid.GridRoundingMode; import org.apache.sis.internal.coverage.j2d.RasterFactory; import org.apache.sis.internal.util.UnmodifiableArrayList; import org.apache.sis.storage.DataStoreContentException; import org.apache.sis.storage.DataStoreException; import org.apache.sis.util.resources.Errors;
|
import java.awt.image.*; import java.io.*; import java.nio.*; import org.apache.sis.coverage.*; import org.apache.sis.coverage.grid.*; import org.apache.sis.internal.coverage.j2d.*; import org.apache.sis.internal.util.*; import org.apache.sis.storage.*; import org.apache.sis.util.resources.*;
|
[
"java.awt",
"java.io",
"java.nio",
"org.apache.sis"
] |
java.awt; java.io; java.nio; org.apache.sis;
| 1,791,057
|
return (DecimalFormat) getFormatter();
}
|
return (DecimalFormat) getFormatter(); }
|
/**
* Returns the format for the filter. The DecimalFormatParser has only DecimalFormat objects assigned.
*
* @return the formatter.
*/
|
Returns the format for the filter. The DecimalFormatParser has only DecimalFormat objects assigned
|
getDecimalFormat
|
{
"repo_name": "EgorZhuk/pentaho-reporting",
"path": "engine/core/src/main/java/org/pentaho/reporting/engine/classic/core/filter/DecimalFormatParser.java",
"license": "lgpl-2.1",
"size": 6235
}
|
[
"java.text.DecimalFormat"
] |
import java.text.DecimalFormat;
|
import java.text.*;
|
[
"java.text"
] |
java.text;
| 1,656,807
|
public final void dump(DataOutputStream file) throws IOException {
file.writeByte(tag);
file.writeShort(name_index);
file.writeShort(signature_index);
}
|
final void function(DataOutputStream file) throws IOException { file.writeByte(tag); file.writeShort(name_index); file.writeShort(signature_index); }
|
/**
* Dump name and signature index to file stream in binary format.
*
* @param file Output file stream
* @throws IOException
*/
|
Dump name and signature index to file stream in binary format
|
dump
|
{
"repo_name": "tempbottle/CJBE",
"path": "src/org/apache/bcel/classfile/ConstantNameAndType.java",
"license": "gpl-3.0",
"size": 6268
}
|
[
"java.io.DataOutputStream",
"java.io.IOException"
] |
import java.io.DataOutputStream; import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 191,593
|
protected String getHeader(Object connection, String header) throws IOException {
return Util.getImplementation().getHeaderField(header, connection);
}
|
String function(Object connection, String header) throws IOException { return Util.getImplementation().getHeaderField(header, connection); }
|
/**
* Returns the HTTP header field for the given connection, this method is only guaranteed to work
* when invoked from the readHeaders method.
*
* @param connection the connection to the network
* @param header the name of the header
* @return the value of the header
* @throws java.io.IOException thrown on failure
*/
|
Returns the HTTP header field for the given connection, this method is only guaranteed to work when invoked from the readHeaders method
|
getHeader
|
{
"repo_name": "codenameone/CodenameOne",
"path": "CodenameOne/src/com/codename1/io/ConnectionRequest.java",
"license": "gpl-2.0",
"size": 108017
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 937,675
|
QueryFilter<DataFile> byContentVersionNo(Long id, boolean isMainFile);
|
QueryFilter<DataFile> byContentVersionNo(Long id, boolean isMainFile);
|
/**
* Filters the entities by the number of the version they belong to.
*
* @param id
* identifier (primary key value) of the content version.
* @param isMainFile
* if <code>true</code> then files being main files for the version will be searched, otherwise files
* belonging to the version will be returned.
* @return current representations of filters set
*/
|
Filters the entities by the number of the version they belong to
|
byContentVersionNo
|
{
"repo_name": "psnc-dl/darceo",
"path": "wrdz/wrdz-zmd/dao/src/main/java/pl/psnc/synat/wrdz/zmd/dao/object/content/DataFileFilterFactory.java",
"license": "gpl-3.0",
"size": 4177
}
|
[
"pl.psnc.synat.wrdz.common.dao.QueryFilter",
"pl.psnc.synat.wrdz.zmd.entity.object.content.DataFile"
] |
import pl.psnc.synat.wrdz.common.dao.QueryFilter; import pl.psnc.synat.wrdz.zmd.entity.object.content.DataFile;
|
import pl.psnc.synat.wrdz.common.dao.*; import pl.psnc.synat.wrdz.zmd.entity.object.content.*;
|
[
"pl.psnc.synat"
] |
pl.psnc.synat;
| 1,717,413
|
public void damageArmor(float par1) {
par1 /= 4.0F;
if (par1 < 1.0F) {
par1 = 1.0F;
}
for (int var2 = 0; var2 < this.armorInventory.length; ++var2) {
if (this.armorInventory[var2] != null && this.armorInventory[var2].getItem() instanceof ItemArmor) {
this.armorInventory[var2].damageItem((int) par1, this.player);
if (this.armorInventory[var2].stackSize == 0) {
this.armorInventory[var2] = null;
}
}
}
}
|
void function(float par1) { par1 /= 4.0F; if (par1 < 1.0F) { par1 = 1.0F; } for (int var2 = 0; var2 < this.armorInventory.length; ++var2) { if (this.armorInventory[var2] != null && this.armorInventory[var2].getItem() instanceof ItemArmor) { this.armorInventory[var2].damageItem((int) par1, this.player); if (this.armorInventory[var2].stackSize == 0) { this.armorInventory[var2] = null; } } } }
|
/**
* Damages armor in each slot by the specified amount.
*/
|
Damages armor in each slot by the specified amount
|
damageArmor
|
{
"repo_name": "DirectCodeGraveyard/Minetweak",
"path": "src/main/java/net/minecraft/inventory/InventoryPlayer.java",
"license": "lgpl-3.0",
"size": 20067
}
|
[
"net.minecraft.item.ItemArmor"
] |
import net.minecraft.item.ItemArmor;
|
import net.minecraft.item.*;
|
[
"net.minecraft.item"
] |
net.minecraft.item;
| 879,180
|
@Override
public FileObject process(FileObject fileObject) {
lastFileIn = new File(fileObject.getFile().getAbsolutePath());
lastTimeIn = System.currentTimeMillis();
//Now process the object.
try {
String date = StringUtil.getDate("");
String ptID = fileObject.getPatientID();
String ptName = fileObject.getPatientName();
String siUID = fileObject.getStudyInstanceUID();
String sopiUID = fileObject.getSOPInstanceUID();
synchronized (unverifiedList) {
try {
unverifiedList.insert(sopiUID,
new UnverifiedObject(
System.currentTimeMillis(),
fileObject.getDigest()),
true);
sopiIndex.put(sopiUID, siUID);
StudyObject sob = (StudyObject)studyTable.get(siUID);
if (sob == null) {
//The study object does not exist.
//We haven't seen this study before.
//Create the StudyObject.
sob = new StudyObject(date, siUID, ptID, ptName);
//Enter the study into the index of studies by date
HashSet<String> studies = (HashSet<String>)dateIndex.find(date);
if (studies == null) {
studies = new HashSet<String>();
}
studies.add(siUID);
dateIndex.insert(date, studies, true);
//Enter the study into the index of studies by ptid
studies = (HashSet<String>)ptidIndex.find(ptID);
if (studies == null) {
studies = new HashSet<String>();
}
studies.add(siUID);
ptidIndex.insert(ptID, studies, true);
}
sob.putSubmitDate(sopiUID);
sob.putEntryDate(sopiUID, "");
studyTable.put(siUID, sob);
}
catch (Exception ignore) {
logger.warn("Unable to update the verification tables for:");
logger.warn(" sopiUID = "+sopiUID);
logger.warn(" siUID = "+siUID);
logger.warn(" ",ignore);
}
//Now commit everything
recman.commit();
}
}
catch (Exception skip) {
logger.debug("Unable to process "+fileObject.getFile());
}
lastFileOut = new File(fileObject.getFile().getAbsolutePath());
lastTimeOut = System.currentTimeMillis();
return fileObject;
}
|
FileObject function(FileObject fileObject) { lastFileIn = new File(fileObject.getFile().getAbsolutePath()); lastTimeIn = System.currentTimeMillis(); try { String date = StringUtil.getDate(STRSTRUnable to update the verification tables for:STR sopiUID = STR siUID = STR STRUnable to process "+fileObject.getFile()); } lastFileOut = new File(fileObject.getFile().getAbsolutePath()); lastTimeOut = System.currentTimeMillis(); return fileObject; }
|
/**
* Update the tables for the IDs contained in this object.
* @param fileObject the object to process.
* @return the same FileObject if the result is true; otherwise null.
*/
|
Update the tables for the IDs contained in this object
|
process
|
{
"repo_name": "blezek/Notion",
"path": "src/main/java/org/rsna/ctp/stdstages/DatabaseVerifier.java",
"license": "bsd-3-clause",
"size": 10701
}
|
[
"java.io.File",
"org.rsna.ctp.objects.FileObject",
"org.rsna.util.StringUtil"
] |
import java.io.File; import org.rsna.ctp.objects.FileObject; import org.rsna.util.StringUtil;
|
import java.io.*; import org.rsna.ctp.objects.*; import org.rsna.util.*;
|
[
"java.io",
"org.rsna.ctp",
"org.rsna.util"
] |
java.io; org.rsna.ctp; org.rsna.util;
| 1,389,082
|
public static void setEnableSideMenuSwipe(boolean aEnableSideMenuSwipe) {
enableSideMenuSwipe = aEnableSideMenuSwipe;
}
private Component titleComponent;
private ToolbarSideMenu sideMenu;
private Vector<Command> overflowCommands;
private Button menuButton;
private Command leftSideMenuCommand;
private Command rightSideMenuCommand;
private ScrollListener scrollListener;
private ActionListener releasedListener;
private boolean scrollOff = false;
private int initialY;
private int actualPaneInitialY;
private int actualPaneInitialH;
private Motion hideShowMotion;
private boolean showing;
private boolean layered = false;
private boolean initialized = false;
private static boolean permanentSideMenu;
private static boolean onTopSideMenu = true;
private InteractionDialog sidemenuDialog;
private InteractionDialog rightSidemenuDialog;
private boolean isPointerReleasedListenerAdded = false;
private boolean isPointerPressedListenerAdded = false;
private boolean isPointerDraggedListenerAdded = false;
private boolean rightSideMenuCmdsAlignedToLeft = false;
private Container permanentSideMenuContainer;
private Container permanentRightSideMenuContainer;
private static boolean globalToolbar;
private static boolean centeredDefault = true;
private Command searchCommand;
private Component sidemenuSouthComponent;
private Component rightSidemenuSouthComponent;
private float searchIconSize;
public Toolbar() {
setLayout(new BorderLayout());
if (UIManager.getInstance().isThemeConstant("landscapeTitleUiidBool", false)) {
setUIID("Toolbar", "ToolbarLandscape");
} else {
setUIID("Toolbar");
}
sideMenu = new ToolbarSideMenu();
if (centeredDefault
&& UIManager.getInstance().getComponentStyle("Title").getAlignment() == CENTER) {
setTitleCentered(true);
}
//setSafeArea(true);
}
|
static void function(boolean aEnableSideMenuSwipe) { enableSideMenuSwipe = aEnableSideMenuSwipe; } private Component titleComponent; private ToolbarSideMenu sideMenu; private Vector<Command> overflowCommands; private Button menuButton; private Command leftSideMenuCommand; private Command rightSideMenuCommand; private ScrollListener scrollListener; private ActionListener releasedListener; private boolean scrollOff = false; private int initialY; private int actualPaneInitialY; private int actualPaneInitialH; private Motion hideShowMotion; private boolean showing; private boolean layered = false; private boolean initialized = false; private static boolean permanentSideMenu; private static boolean onTopSideMenu = true; private InteractionDialog sidemenuDialog; private InteractionDialog rightSidemenuDialog; private boolean isPointerReleasedListenerAdded = false; private boolean isPointerPressedListenerAdded = false; private boolean isPointerDraggedListenerAdded = false; private boolean rightSideMenuCmdsAlignedToLeft = false; private Container permanentSideMenuContainer; private Container permanentRightSideMenuContainer; private static boolean globalToolbar; private static boolean centeredDefault = true; private Command searchCommand; private Component sidemenuSouthComponent; private Component rightSidemenuSouthComponent; private float searchIconSize; public Toolbar() { setLayout(new BorderLayout()); if (UIManager.getInstance().isThemeConstant(STR, false)) { setUIID(STR, STR); } else { setUIID(STR); } sideMenu = new ToolbarSideMenu(); if (centeredDefault && UIManager.getInstance().getComponentStyle("Title").getAlignment() == CENTER) { setTitleCentered(true); } }
|
/**
* Enables/Disables the side menu bar swipe, defaults to true
* @param aEnableSideMenuSwipe the enableSideMenuSwipe to set
*/
|
Enables/Disables the side menu bar swipe, defaults to true
|
setEnableSideMenuSwipe
|
{
"repo_name": "codenameone/CodenameOne",
"path": "CodenameOne/src/com/codename1/ui/Toolbar.java",
"license": "gpl-2.0",
"size": 114065
}
|
[
"com.codename1.components.InteractionDialog",
"com.codename1.ui.animations.Motion",
"com.codename1.ui.events.ActionListener",
"com.codename1.ui.events.ScrollListener",
"com.codename1.ui.layouts.BorderLayout",
"com.codename1.ui.plaf.UIManager",
"java.util.Vector"
] |
import com.codename1.components.InteractionDialog; import com.codename1.ui.animations.Motion; import com.codename1.ui.events.ActionListener; import com.codename1.ui.events.ScrollListener; import com.codename1.ui.layouts.BorderLayout; import com.codename1.ui.plaf.UIManager; import java.util.Vector;
|
import com.codename1.components.*; import com.codename1.ui.animations.*; import com.codename1.ui.events.*; import com.codename1.ui.layouts.*; import com.codename1.ui.plaf.*; import java.util.*;
|
[
"com.codename1.components",
"com.codename1.ui",
"java.util"
] |
com.codename1.components; com.codename1.ui; java.util;
| 1,465,152
|
public SubAggCollectionMode collectMode() {
return collectMode;
}
|
SubAggCollectionMode function() { return collectMode; }
|
/**
* Expert: get the collection mode.
*/
|
Expert: get the collection mode
|
collectMode
|
{
"repo_name": "coding0011/elasticsearch",
"path": "server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java",
"license": "apache-2.0",
"size": 16225
}
|
[
"org.elasticsearch.search.aggregations.Aggregator"
] |
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.*;
|
[
"org.elasticsearch.search"
] |
org.elasticsearch.search;
| 1,761,151
|
protected void readStatusLine(HttpState state, HttpConnection conn)
throws IOException, HttpException {
LOG.trace("enter HttpMethodBase.readStatusLine(HttpState, HttpConnection)");
final int maxGarbageLines = getParams().
getIntParameter(HttpMethodParams.STATUS_LINE_GARBAGE_LIMIT, Integer.MAX_VALUE);
//read out the HTTP status string
int count = 0;
String s;
do {
s = conn.readLine(getParams().getHttpElementCharset());
if (s == null && count == 0) {
// The server just dropped connection on us
throw new NoHttpResponseException("The server " + conn.getHost() +
" failed to respond");
}
if (s != null && StatusLine.startsWithHTTP(s)) {
// Got one
break;
} else if (s == null || count >= maxGarbageLines) {
// Giving up
throw new ProtocolException("The server " + conn.getHost() +
" failed to respond with a valid HTTP response");
}
count++;
} while(true);
//create the status line from the status string
statusLine = new StatusLine(s);
//check for a valid HTTP-Version
String versionStr = statusLine.getHttpVersion();
if (getParams().isParameterFalse(HttpMethodParams.UNAMBIGUOUS_STATUS_LINE)
&& versionStr.equals("HTTP")) {
getParams().setVersion(HttpVersion.HTTP_1_0);
if (LOG.isWarnEnabled()) {
LOG.warn("Ambiguous status line (HTTP protocol version missing):" +
statusLine.toString());
}
} else {
this.effectiveVersion = HttpVersion.parse(versionStr);
}
}
// ------------------------------------------------------ Protected Methods
|
void function(HttpState state, HttpConnection conn) throws IOException, HttpException { LOG.trace(STR); final int maxGarbageLines = getParams(). getIntParameter(HttpMethodParams.STATUS_LINE_GARBAGE_LIMIT, Integer.MAX_VALUE); int count = 0; String s; do { s = conn.readLine(getParams().getHttpElementCharset()); if (s == null && count == 0) { throw new NoHttpResponseException(STR + conn.getHost() + STR); } if (s != null && StatusLine.startsWithHTTP(s)) { break; } else if (s == null count >= maxGarbageLines) { throw new ProtocolException(STR + conn.getHost() + STR); } count++; } while(true); statusLine = new StatusLine(s); String versionStr = statusLine.getHttpVersion(); if (getParams().isParameterFalse(HttpMethodParams.UNAMBIGUOUS_STATUS_LINE) && versionStr.equals("HTTP")) { getParams().setVersion(HttpVersion.HTTP_1_0); if (LOG.isWarnEnabled()) { LOG.warn(STR + statusLine.toString()); } } else { this.effectiveVersion = HttpVersion.parse(versionStr); } }
|
/**
* Read the status line from the given {@link HttpConnection}, setting my
* {@link #getStatusCode status code} and {@link #getStatusText status
* text}.
*
* <p>
* Subclasses may want to override this method to to customize the
* processing.
* </p>
*
* @param state the {@link HttpState state} information associated with this method
* @param conn the {@link HttpConnection connection} used to execute
* this HTTP method
*
* @throws IOException if an I/O (transport) error occurs. Some transport exceptions
* can be recovered from.
* @throws HttpException if a protocol exception occurs. Usually protocol exceptions
* cannot be recovered from.
*
* @see StatusLine
*/
|
Read the status line from the given <code>HttpConnection</code>, setting my <code>#getStatusCode status code</code> and <code>#getStatusText status text</code>. Subclasses may want to override this method to to customize the processing.
|
readStatusLine
|
{
"repo_name": "j4nnis/bproxy",
"path": "src/org/apache/commons/httpclient/HttpMethodBase.java",
"license": "apache-2.0",
"size": 98546
}
|
[
"java.io.IOException",
"org.apache.commons.httpclient.params.HttpMethodParams"
] |
import java.io.IOException; import org.apache.commons.httpclient.params.HttpMethodParams;
|
import java.io.*; import org.apache.commons.httpclient.params.*;
|
[
"java.io",
"org.apache.commons"
] |
java.io; org.apache.commons;
| 650,900
|
@Nullable
Release getPreviousRelease() throws RepositoryException;
|
Release getPreviousRelease() throws RepositoryException;
|
/**
* Returns the release from which this release was created.
*/
|
Returns the release from which this release was created
|
getPreviousRelease
|
{
"repo_name": "ist-dresden/composum-platform",
"path": "services/staging/src/main/java/com/composum/sling/platform/staging/Release.java",
"license": "mit",
"size": 4249
}
|
[
"javax.jcr.RepositoryException"
] |
import javax.jcr.RepositoryException;
|
import javax.jcr.*;
|
[
"javax.jcr"
] |
javax.jcr;
| 2,177,685
|
Set<Cell<R, C, V>> cellSet();
|
Set<Cell<R, C, V>> cellSet();
|
/**
* Returns a set of all row key / column key / value triplets. Changes to the
* returned set will update the underlying table, and vice versa. The cell set
* does not support the {@code add} or {@code addAll} methods.
*
* @return set of table cells consisting of row key / column key / value
* triplets
*/
|
Returns a set of all row key / column key / value triplets. Changes to the returned set will update the underlying table, and vice versa. The cell set does not support the add or addAll methods
|
cellSet
|
{
"repo_name": "Allive1/pinpoint",
"path": "thirdparty/google-guava/src/main/java/com/google/common/collect/Table.java",
"license": "apache-2.0",
"size": 9890
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,946,575
|
public static <T extends ModifierReviewable.OfByteCodeElement> ElementMatcher.Junction<T> isPackagePrivate() {
return not(isPublic().or(isProtected()).or(isPrivate()));
}
|
static <T extends ModifierReviewable.OfByteCodeElement> ElementMatcher.Junction<T> function() { return not(isPublic().or(isProtected()).or(isPrivate())); }
|
/**
* Matches a {@link ModifierReviewable} that is package-private.
*
* @param <T> The type of the matched object.
* @return A matcher for a package-private modifier reviewable.
*/
|
Matches a <code>ModifierReviewable</code> that is package-private
|
isPackagePrivate
|
{
"repo_name": "DALDEI/byte-buddy",
"path": "byte-buddy-dep/src/main/java/net/bytebuddy/matcher/ElementMatchers.java",
"license": "apache-2.0",
"size": 103880
}
|
[
"net.bytebuddy.description.ModifierReviewable"
] |
import net.bytebuddy.description.ModifierReviewable;
|
import net.bytebuddy.description.*;
|
[
"net.bytebuddy.description"
] |
net.bytebuddy.description;
| 1,827,791
|
private final boolean explosible( int col, int row )
{
if( col <= 0 || col >= this.engine.colNumber ||
row <= 0 || row >= this.engine.rowsNumber )
return false;
byte content = this.engine.getCell( col, row );
if( content != GameEngine.SOFT_WALL_PLACE &&
content != GameEngine.HARD_WALL_PLACE )
return true;
else
return false;
}
|
final boolean function( int col, int row ) { if( col <= 0 col >= this.engine.colNumber row <= 0 row >= this.engine.rowsNumber ) return false; byte content = this.engine.getCell( col, row ); if( content != GameEngine.SOFT_WALL_PLACE && content != GameEngine.HARD_WALL_PLACE ) return true; else return false; }
|
/**
* Returns <code>true</code> if specified cell can be destructed.
* @param col the column of cell.
* @param row the row of cell.
* @return <code>true</code> if this cell can be blowed up by bomb explosion.
*/
|
Returns <code>true</code> if specified cell can be destructed
|
explosible
|
{
"repo_name": "yaricom/bombman-RL-AI-J2ME",
"path": "src/common/ng/games/bombman/sprites/BombSprite.java",
"license": "mit",
"size": 17094
}
|
[
"ng.games.bombman.GameEngine"
] |
import ng.games.bombman.GameEngine;
|
import ng.games.bombman.*;
|
[
"ng.games.bombman"
] |
ng.games.bombman;
| 1,640,064
|
public final void destroy() throws PluginException {
LOG.info("Destroying PreviousEntriesPlugin.");
}
|
final void function() throws PluginException { LOG.info(STR); }
|
/**
* Writes plugin destroy message.
* @throws PluginException when there's an error in finalising this plugin
*/
|
Writes plugin destroy message
|
destroy
|
{
"repo_name": "cliffano/bloojm",
"path": "previousentriesplugin/src/main/java/com/mbledug/blojsom/plugin/previousentries/PreviousEntriesPlugin.java",
"license": "bsd-3-clause",
"size": 5857
}
|
[
"org.blojsom.plugin.PluginException"
] |
import org.blojsom.plugin.PluginException;
|
import org.blojsom.plugin.*;
|
[
"org.blojsom.plugin"
] |
org.blojsom.plugin;
| 2,562,511
|
Set<Integer> getObjectsId(int tx, int ty);
|
Set<Integer> getObjectsId(int tx, int ty);
|
/**
* Get objects ID at this location.
*
* @param tx The horizontal tile index.
* @param ty The vertical tile index.
* @return The objects ID found.
*/
|
Get objects ID at this location
|
getObjectsId
|
{
"repo_name": "b3dgs/lionengine",
"path": "lionengine-game/src/main/java/com/b3dgs/lionengine/game/feature/tile/map/pathfinding/MapTilePath.java",
"license": "gpl-3.0",
"size": 6471
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 700,423
|
@Override
public void stopExecutionId(long executionId) throws UnknownExecutionIdException {
@Nullable ActiveExecution execution = getActiveExecution(executionId);
if (execution == null) {
throw new UnknownExecutionIdException(executionId);
}
execution.workflowExecution.cancel();
}
|
void function(long executionId) throws UnknownExecutionIdException { @Nullable ActiveExecution execution = getActiveExecution(executionId); if (execution == null) { throw new UnknownExecutionIdException(executionId); } execution.workflowExecution.cancel(); }
|
/**
* Stops the execution with the given execution id.
*
* @param executionId execution id
* @throws UnknownExecutionIdException if the given execution id does not belong to an active workflow execution
*/
|
Stops the execution with the given execution id
|
stopExecutionId
|
{
"repo_name": "cloudkeeper-project/cloudkeeper-all-inclusive",
"path": "workflow-service/src/main/java/com/svbio/workflow/service/WorkflowServiceImpl.java",
"license": "apache-2.0",
"size": 18697
}
|
[
"com.svbio.workflow.api.UnknownExecutionIdException",
"javax.annotation.Nullable"
] |
import com.svbio.workflow.api.UnknownExecutionIdException; import javax.annotation.Nullable;
|
import com.svbio.workflow.api.*; import javax.annotation.*;
|
[
"com.svbio.workflow",
"javax.annotation"
] |
com.svbio.workflow; javax.annotation;
| 848,107
|
protected void createCache( ) {
_cache = new SoftReference<T>( _data );
}
|
void function( ) { _cache = new SoftReference<T>( _data ); }
|
/**
* Creates a new cache soft reference to the currently
* referenced cache block.
*/
|
Creates a new cache soft reference to the currently referenced cache block
|
createCache
|
{
"repo_name": "asurve/arvind-sysml",
"path": "src/main/java/org/apache/sysml/runtime/controlprogram/caching/CacheableData.java",
"license": "apache-2.0",
"size": 42801
}
|
[
"java.lang.ref.SoftReference"
] |
import java.lang.ref.SoftReference;
|
import java.lang.ref.*;
|
[
"java.lang"
] |
java.lang;
| 1,222,010
|
public static Set getDirectDependsOnTables(IDatabaseConnection connection,
String tableName) throws SearchException
{
logger.debug("getDirectDependsOnTables(connection={}, tableName={}) - start",
connection, tableName);
ExportedKeysSearchCallback callback = new ExportedKeysSearchCallback(connection);
// Do a depthFirstSearch with a recursion depth of 1
DepthFirstSearch search = new DepthFirstSearch(1);
Set tables = search.search( new String[]{tableName}, callback );
return tables;
}
|
static Set function(IDatabaseConnection connection, String tableName) throws SearchException { logger.debug(STR, connection, tableName); ExportedKeysSearchCallback callback = new ExportedKeysSearchCallback(connection); DepthFirstSearch search = new DepthFirstSearch(1); Set tables = search.search( new String[]{tableName}, callback ); return tables; }
|
/**
* Returns a set of tables on which the given table directly depends on.
* @param connection The connection to be used for the database lookup.
* @param tableName
* @return a set of tables on which the given table directly depends on.
* @throws SearchException
* @since 2.4
*/
|
Returns a set of tables on which the given table directly depends on
|
getDirectDependsOnTables
|
{
"repo_name": "wbstr/dbunit",
"path": "src/main/java/org/dbunit/database/search/TablesDependencyHelper.java",
"license": "lgpl-2.1",
"size": 11588
}
|
[
"java.util.Set",
"org.dbunit.database.IDatabaseConnection",
"org.dbunit.util.search.DepthFirstSearch",
"org.dbunit.util.search.SearchException"
] |
import java.util.Set; import org.dbunit.database.IDatabaseConnection; import org.dbunit.util.search.DepthFirstSearch; import org.dbunit.util.search.SearchException;
|
import java.util.*; import org.dbunit.database.*; import org.dbunit.util.search.*;
|
[
"java.util",
"org.dbunit.database",
"org.dbunit.util"
] |
java.util; org.dbunit.database; org.dbunit.util;
| 2,342,669
|
public Collection<String> getUrlPatterns() {
return this.urlPatterns;
}
|
Collection<String> function() { return this.urlPatterns; }
|
/**
* Return a mutable collection of URL patterns that the filter will be registered
* against.
* @return the URL patterns
*/
|
Return a mutable collection of URL patterns that the filter will be registered against
|
getUrlPatterns
|
{
"repo_name": "habuma/spring-boot",
"path": "spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/servlet/AbstractFilterRegistrationBean.java",
"license": "apache-2.0",
"size": 9139
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,182,131
|
private void init()
{
// Get the context handler
_ctxtHandler = (ContextHandlerCollection) _server.getChildHandlerByClass(ContextHandlerCollection.class);
// get a deployerManager
List<DeploymentManager> deployers = _server.getBeans(DeploymentManager.class);
if (deployers != null && !deployers.isEmpty())
{
_deploymentManager = deployers.get(0);
for (AppProvider provider : _deploymentManager.getAppProviders())
{
if (provider instanceof OSGiAppProvider)
{
_provider = (OSGiAppProvider) provider;
break;
}
}
if (_provider == null)
{
// create it on the fly with reasonable default values.
try
{
_provider = new OSGiAppProvider();
_provider.setMonitoredDirResource(Resource.newResource(getDefaultOSGiContextsHome(new File(System.getProperty("jetty.home"))).toURI()));
}
catch (IOException e)
{
LOG.warn(e);
}
_deploymentManager.addAppProvider(_provider);
}
}
if (_ctxtHandler == null || _provider == null) throw new IllegalStateException("ERROR: No ContextHandlerCollection or OSGiAppProvider configured");
}
|
void function() { _ctxtHandler = (ContextHandlerCollection) _server.getChildHandlerByClass(ContextHandlerCollection.class); List<DeploymentManager> deployers = _server.getBeans(DeploymentManager.class); if (deployers != null && !deployers.isEmpty()) { _deploymentManager = deployers.get(0); for (AppProvider provider : _deploymentManager.getAppProviders()) { if (provider instanceof OSGiAppProvider) { _provider = (OSGiAppProvider) provider; break; } } if (_provider == null) { try { _provider = new OSGiAppProvider(); _provider.setMonitoredDirResource(Resource.newResource(getDefaultOSGiContextsHome(new File(System.getProperty(STR))).toURI())); } catch (IOException e) { LOG.warn(e); } _deploymentManager.addAppProvider(_provider); } } if (_ctxtHandler == null _provider == null) throw new IllegalStateException(STR); }
|
/**
* Must be called after the server is configured.
*
* Locate the actual instance of the ContextDeployer and WebAppDeployer that
* was created when configuring the server through jetty.xml. If there is no
* such thing it won't be possible to deploy webapps from a context and we
* throw IllegalStateExceptions.
*/
|
Must be called after the server is configured. Locate the actual instance of the ContextDeployer and WebAppDeployer that was created when configuring the server through jetty.xml. If there is no such thing it won't be possible to deploy webapps from a context and we throw IllegalStateExceptions
|
init
|
{
"repo_name": "thomasbecker/jetty-spdy",
"path": "jetty-osgi/jetty-osgi-boot/src/main/java/org/eclipse/jetty/osgi/boot/internal/serverfactory/ServerInstanceWrapper.java",
"license": "apache-2.0",
"size": 15518
}
|
[
"java.io.File",
"java.io.IOException",
"java.util.List",
"org.eclipse.jetty.deploy.AppProvider",
"org.eclipse.jetty.deploy.DeploymentManager",
"org.eclipse.jetty.osgi.boot.OSGiAppProvider",
"org.eclipse.jetty.server.handler.ContextHandlerCollection",
"org.eclipse.jetty.util.resource.Resource"
] |
import java.io.File; import java.io.IOException; import java.util.List; import org.eclipse.jetty.deploy.AppProvider; import org.eclipse.jetty.deploy.DeploymentManager; import org.eclipse.jetty.osgi.boot.OSGiAppProvider; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.util.resource.Resource;
|
import java.io.*; import java.util.*; import org.eclipse.jetty.deploy.*; import org.eclipse.jetty.osgi.boot.*; import org.eclipse.jetty.server.handler.*; import org.eclipse.jetty.util.resource.*;
|
[
"java.io",
"java.util",
"org.eclipse.jetty"
] |
java.io; java.util; org.eclipse.jetty;
| 2,537,040
|
public UnaryCallSettings.Builder<
MutateConversionGoalCampaignConfigsRequest, MutateConversionGoalCampaignConfigsResponse>
mutateConversionGoalCampaignConfigsSettings() {
return getStubSettingsBuilder().mutateConversionGoalCampaignConfigsSettings();
}
|
UnaryCallSettings.Builder< MutateConversionGoalCampaignConfigsRequest, MutateConversionGoalCampaignConfigsResponse> function() { return getStubSettingsBuilder().mutateConversionGoalCampaignConfigsSettings(); }
|
/**
* Returns the builder for the settings used for calls to mutateConversionGoalCampaignConfigs.
*/
|
Returns the builder for the settings used for calls to mutateConversionGoalCampaignConfigs
|
mutateConversionGoalCampaignConfigsSettings
|
{
"repo_name": "googleads/google-ads-java",
"path": "google-ads-stubs-v9/src/main/java/com/google/ads/googleads/v9/services/ConversionGoalCampaignConfigServiceSettings.java",
"license": "apache-2.0",
"size": 8115
}
|
[
"com.google.api.gax.rpc.UnaryCallSettings"
] |
import com.google.api.gax.rpc.UnaryCallSettings;
|
import com.google.api.gax.rpc.*;
|
[
"com.google.api"
] |
com.google.api;
| 709,649
|
public void setCreateDate(Date createDate) { this.createDate = DateUtils.cloneDate(createDate); }
|
public void setCreateDate(Date createDate) { this.createDate = DateUtils.cloneDate(createDate); }
|
/**
* Creation date.
*/
|
Creation date
|
getCreateDate
|
{
"repo_name": "shred/cilla",
"path": "cilla-core/src/main/java/org/shredzone/cilla/core/model/Picture.java",
"license": "agpl-3.0",
"size": 6776
}
|
[
"java.util.Date",
"org.shredzone.cilla.core.util.DateUtils"
] |
import java.util.Date; import org.shredzone.cilla.core.util.DateUtils;
|
import java.util.*; import org.shredzone.cilla.core.util.*;
|
[
"java.util",
"org.shredzone.cilla"
] |
java.util; org.shredzone.cilla;
| 1,328,050
|
public static <T> T extractWrite(final DeviceAttribute da, final AttrDataFormat format,
final AttrWriteType writeType, final Class<T> type) throws DevFailed {
if (da == null) {
throw DevFailedUtils.newDevFailed(ERROR_MSG_DA);
}
return TypeConversionUtil.castToType(type, extractWrite(da, writeType, format));
}
|
static <T> T function(final DeviceAttribute da, final AttrDataFormat format, final AttrWriteType writeType, final Class<T> type) throws DevFailed { if (da == null) { throw DevFailedUtils.newDevFailed(ERROR_MSG_DA); } return TypeConversionUtil.castToType(type, extractWrite(da, writeType, format)); }
|
/**
* Extract write part values to an object for SCALAR, SPECTRUM and IMAGE to the requested type
*
* @param <T>
* @param da
* @param type
* the output type (e.g. double.class for SCALAR, double[].class for SPECTRUM, double[][].class for
* IMAGE)
* @return single value for SCALAR, array of primitives for SPECTRUM, 2D array of primitives for IMAGE
* @throws DevFailed
*/
|
Extract write part values to an object for SCALAR, SPECTRUM and IMAGE to the requested type
|
extractWrite
|
{
"repo_name": "tango-controls/JTango",
"path": "client/src/main/java/fr/soleil/tango/clientapi/InsertExtractUtils.java",
"license": "lgpl-3.0",
"size": 8344
}
|
[
"fr.esrf.Tango",
"fr.esrf.TangoApi",
"fr.soleil.tango.clientapi.util.TypeConversionUtil",
"org.tango.utils.DevFailedUtils"
] |
import fr.esrf.Tango; import fr.esrf.TangoApi; import fr.soleil.tango.clientapi.util.TypeConversionUtil; import org.tango.utils.DevFailedUtils;
|
import fr.esrf.*; import fr.soleil.tango.clientapi.util.*; import org.tango.utils.*;
|
[
"fr.esrf",
"fr.soleil.tango",
"org.tango.utils"
] |
fr.esrf; fr.soleil.tango; org.tango.utils;
| 1,942,351
|
private void discoverAttributeFields(Map<String, JmxAttributeFieldInfo> attributeFieldInfoMap,
Set<String> attributeNameSet, List<MBeanAttributeInfo> attributes) {
for (Class<?> clazz = target.getClass(); clazz != Object.class; clazz = clazz.getSuperclass()) {
discoverAttributeFields(attributeFieldInfoMap, attributeNameSet, attributes, clazz);
}
}
|
void function(Map<String, JmxAttributeFieldInfo> attributeFieldInfoMap, Set<String> attributeNameSet, List<MBeanAttributeInfo> attributes) { for (Class<?> clazz = target.getClass(); clazz != Object.class; clazz = clazz.getSuperclass()) { discoverAttributeFields(attributeFieldInfoMap, attributeNameSet, attributes, clazz); } }
|
/**
* Find attribute methods from our object that will be exposed via JMX.
*/
|
Find attribute methods from our object that will be exposed via JMX
|
discoverAttributeFields
|
{
"repo_name": "j256/simplejmx",
"path": "src/main/java/com/j256/simplejmx/server/ReflectionMbean.java",
"license": "isc",
"size": 22412
}
|
[
"com.j256.simplejmx.common.JmxAttributeFieldInfo",
"java.util.List",
"java.util.Map",
"java.util.Set",
"javax.management.MBeanAttributeInfo"
] |
import com.j256.simplejmx.common.JmxAttributeFieldInfo; import java.util.List; import java.util.Map; import java.util.Set; import javax.management.MBeanAttributeInfo;
|
import com.j256.simplejmx.common.*; import java.util.*; import javax.management.*;
|
[
"com.j256.simplejmx",
"java.util",
"javax.management"
] |
com.j256.simplejmx; java.util; javax.management;
| 2,561,165
|
void removeAllHeaders();
/**
* Returns all headers previously added with {@link #addHeader(String name, String value)}.
*
* @return {@code Map<header name, header value>}
|
void removeAllHeaders(); /** * Returns all headers previously added with {@link #addHeader(String name, String value)}. * * @return {@code Map<header name, header value>}
|
/**
* Removes all headers previously added with {@link #addHeader(String name, String value)}.
*/
|
Removes all headers previously added with <code>#addHeader(String name, String value)</code>
|
removeAllHeaders
|
{
"repo_name": "bltb/browsermob-proxy",
"path": "browsermob-core/src/main/java/net/lightbody/bmp/BrowserMobProxy.java",
"license": "apache-2.0",
"size": 30419
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,228,953
|
public IWsDiscoveryServiceDirectory getLocalServices();
|
IWsDiscoveryServiceDirectory function();
|
/**
* Get local services.
* @return Service directory containing local services.
*/
|
Get local services
|
getLocalServices
|
{
"repo_name": "nateridderman/java-ws-discovery",
"path": "wsdiscovery-lib/src/main/java/com/ms/wsdiscovery/interfaces/IWsDiscoveryServer.java",
"license": "lgpl-3.0",
"size": 8903
}
|
[
"com.ms.wsdiscovery.servicedirectory.interfaces.IWsDiscoveryServiceDirectory"
] |
import com.ms.wsdiscovery.servicedirectory.interfaces.IWsDiscoveryServiceDirectory;
|
import com.ms.wsdiscovery.servicedirectory.interfaces.*;
|
[
"com.ms.wsdiscovery"
] |
com.ms.wsdiscovery;
| 2,155,766
|
private void setCatalinaBase() {
if (System.getProperty(Globals.CATALINA_BASE_PROP) != null)
return;
if (System.getProperty(Globals.CATALINA_HOME_PROP) != null)
System.setProperty(Globals.CATALINA_BASE_PROP,
System.getProperty(Globals.CATALINA_HOME_PROP));
else
System.setProperty(Globals.CATALINA_BASE_PROP,
System.getProperty("user.dir"));
}
|
void function() { if (System.getProperty(Globals.CATALINA_BASE_PROP) != null) return; if (System.getProperty(Globals.CATALINA_HOME_PROP) != null) System.setProperty(Globals.CATALINA_BASE_PROP, System.getProperty(Globals.CATALINA_HOME_PROP)); else System.setProperty(Globals.CATALINA_BASE_PROP, System.getProperty(STR)); }
|
/**
* Set the <code>catalina.base</code> System property to the current
* working directory if it has not been set.
*/
|
Set the <code>catalina.base</code> System property to the current working directory if it has not been set
|
setCatalinaBase
|
{
"repo_name": "mayonghui2112/helloWorld",
"path": "sourceCode/apache-tomcat-7.0.82-src/java/org/apache/catalina/startup/Bootstrap.java",
"license": "apache-2.0",
"size": 16682
}
|
[
"org.apache.catalina.Globals"
] |
import org.apache.catalina.Globals;
|
import org.apache.catalina.*;
|
[
"org.apache.catalina"
] |
org.apache.catalina;
| 1,085,884
|
private void updateSlider() {
// update slider
VideoClip clip = getVideoClip();
slider.setMinimum(Math.max(0, -clip.getFrameShift()));
slider.setMaximum(slider.getMinimum()+clip.getFrameCount()-1);
sliderLabels.clear();
sliderLabels.put(new Integer(clip.getStartFrameNumber()), inLabel);
sliderLabels.put(new Integer(clip.getEndFrameNumber()), outLabel);
slider.repaint();
}
protected class PlayerButton extends JButton {
Icon onIcon, offIcon;
|
void function() { VideoClip clip = getVideoClip(); slider.setMinimum(Math.max(0, -clip.getFrameShift())); slider.setMaximum(slider.getMinimum()+clip.getFrameCount()-1); sliderLabels.clear(); sliderLabels.put(new Integer(clip.getStartFrameNumber()), inLabel); sliderLabels.put(new Integer(clip.getEndFrameNumber()), outLabel); slider.repaint(); } protected class PlayerButton extends JButton { Icon onIcon, offIcon;
|
/**
* Updates the slider based on the current in and out points.
*/
|
Updates the slider based on the current in and out points
|
updateSlider
|
{
"repo_name": "dobrown/tracker-mvn",
"path": "src/main/java/org/opensourcephysics/media/core/VideoPlayer.java",
"license": "gpl-3.0",
"size": 55090
}
|
[
"javax.swing.Icon",
"javax.swing.JButton"
] |
import javax.swing.Icon; import javax.swing.JButton;
|
import javax.swing.*;
|
[
"javax.swing"
] |
javax.swing;
| 881,222
|
public void setAttackTimerValue(int attackTimerValue) throws JNCException {
setAttackTimerValue(new YangUInt16(attackTimerValue));
}
|
void function(int attackTimerValue) throws JNCException { setAttackTimerValue(new YangUInt16(attackTimerValue)); }
|
/**
* Sets the value for child leaf "attack-timer",
* using Java primitive values.
* @param attackTimerValue used during instantiation.
*/
|
Sets the value for child leaf "attack-timer", using Java primitive values
|
setAttackTimerValue
|
{
"repo_name": "jnpr-shinma/yangfile",
"path": "hitel/src/hctaEpc/mmeSgsn/interface_/ss7/MmeSccpProfile.java",
"license": "apache-2.0",
"size": 19551
}
|
[
"com.tailf.jnc.YangUInt16"
] |
import com.tailf.jnc.YangUInt16;
|
import com.tailf.jnc.*;
|
[
"com.tailf.jnc"
] |
com.tailf.jnc;
| 1,663,699
|
public static void setEnchantments(Map par0Map, ItemStack par1ItemStack)
{
NBTTagList nbttaglist = new NBTTagList();
Iterator iterator = par0Map.keySet().iterator();
while (iterator.hasNext())
{
int i = ((Integer)iterator.next()).intValue();
NBTTagCompound nbttagcompound = new NBTTagCompound();
nbttagcompound.setShort("id", (short)i);
nbttagcompound.setShort("lvl", (short)((Integer)par0Map.get(Integer.valueOf(i))).intValue());
nbttaglist.appendTag(nbttagcompound);
if (par1ItemStack.itemID == Item.enchantedBook.itemID)
{
Item.enchantedBook.addEnchantment(par1ItemStack, new EnchantmentData(i, ((Integer)par0Map.get(Integer.valueOf(i))).intValue()));
}
}
if (nbttaglist.tagCount() > 0)
{
if (par1ItemStack.itemID != Item.enchantedBook.itemID)
{
par1ItemStack.setTagInfo("ench", nbttaglist);
}
}
else if (par1ItemStack.hasTagCompound())
{
par1ItemStack.getTagCompound().removeTag("ench");
}
}
|
static void function(Map par0Map, ItemStack par1ItemStack) { NBTTagList nbttaglist = new NBTTagList(); Iterator iterator = par0Map.keySet().iterator(); while (iterator.hasNext()) { int i = ((Integer)iterator.next()).intValue(); NBTTagCompound nbttagcompound = new NBTTagCompound(); nbttagcompound.setShort("id", (short)i); nbttagcompound.setShort("lvl", (short)((Integer)par0Map.get(Integer.valueOf(i))).intValue()); nbttaglist.appendTag(nbttagcompound); if (par1ItemStack.itemID == Item.enchantedBook.itemID) { Item.enchantedBook.addEnchantment(par1ItemStack, new EnchantmentData(i, ((Integer)par0Map.get(Integer.valueOf(i))).intValue())); } } if (nbttaglist.tagCount() > 0) { if (par1ItemStack.itemID != Item.enchantedBook.itemID) { par1ItemStack.setTagInfo("ench", nbttaglist); } } else if (par1ItemStack.hasTagCompound()) { par1ItemStack.getTagCompound().removeTag("ench"); } }
|
/**
* Set the enchantments for the specified stack.
*/
|
Set the enchantments for the specified stack
|
setEnchantments
|
{
"repo_name": "HATB0T/RuneCraftery",
"path": "forge/mcp/src/minecraft/net/minecraft/enchantment/EnchantmentHelper.java",
"license": "lgpl-3.0",
"size": 16957
}
|
[
"java.util.Iterator",
"java.util.Map",
"net.minecraft.item.Item",
"net.minecraft.item.ItemStack",
"net.minecraft.nbt.NBTTagCompound",
"net.minecraft.nbt.NBTTagList"
] |
import java.util.Iterator; import java.util.Map; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList;
|
import java.util.*; import net.minecraft.item.*; import net.minecraft.nbt.*;
|
[
"java.util",
"net.minecraft.item",
"net.minecraft.nbt"
] |
java.util; net.minecraft.item; net.minecraft.nbt;
| 368,237
|
public CompareResult<InetAddress> compareDnses(LinkProperties target) {
CompareResult<InetAddress> result = new CompareResult<InetAddress>();
result.removed = new ArrayList<InetAddress>(mDnses);
result.added.clear();
if (target != null) {
for (InetAddress newAddress : target.getDnses()) {
if (!result.removed.remove(newAddress)) {
result.added.add(newAddress);
}
}
}
return result;
}
|
CompareResult<InetAddress> function(LinkProperties target) { CompareResult<InetAddress> result = new CompareResult<InetAddress>(); result.removed = new ArrayList<InetAddress>(mDnses); result.added.clear(); if (target != null) { for (InetAddress newAddress : target.getDnses()) { if (!result.removed.remove(newAddress)) { result.added.add(newAddress); } } } return result; }
|
/**
* Return two lists, a list of dns addresses that would be removed from
* mDnses and a list of addresses that would be added to
* mDnses which would then result in target and mDnses
* being the same list.
*
* @param target is a LinkProperties with the new list of dns addresses
* @return the removed and added lists.
*/
|
Return two lists, a list of dns addresses that would be removed from mDnses and a list of addresses that would be added to mDnses which would then result in target and mDnses being the same list
|
compareDnses
|
{
"repo_name": "AdeebNqo/Thula",
"path": "Thula/src/main/java/android/net/LinkProperties.java",
"license": "gpl-3.0",
"size": 15611
}
|
[
"java.net.InetAddress",
"java.util.ArrayList"
] |
import java.net.InetAddress; import java.util.ArrayList;
|
import java.net.*; import java.util.*;
|
[
"java.net",
"java.util"
] |
java.net; java.util;
| 1,251,590
|
public void testCreateInvitation() throws Exception
{
String shortName = GUID.generate();
createSite("myPreset", shortName, "myTitle", "myDescription", SiteVisibility.PUBLIC, 200);
String inviteComments = "Please sir, let me in";
String userName = USER_TWO;
String roleName = SiteModel.SITE_CONSUMER;
String inviteId = null;
{
JSONObject newInvitation = new JSONObject();
newInvitation.put("invitationType", "Grundge");
newInvitation.put("inviteeRoleName", roleName);
newInvitation.put("inviteeComments", inviteComments);
newInvitation.put("inviteeUserName", userName);
sendRequest(new PostRequest(URL_SITES + "/" + shortName + "/invitations", newInvitation.toString(), "application/json"), Status.STATUS_BAD_REQUEST);
}
{
JSONObject newInvitation = new JSONObject();
newInvitation.put("inviteeRoleName", roleName);
newInvitation.put("inviteeComments", inviteComments);
newInvitation.put("inviteeUserName", userName);
sendRequest(new PostRequest(URL_SITES + "/" + shortName + "/invitations", newInvitation.toString(), "application/json"), Status.STATUS_BAD_REQUEST);
}
{
JSONObject newInvitation = new JSONObject();
newInvitation.put("invitationType", "MODERATED");
newInvitation.put("inviteeRoleName", "");
newInvitation.put("inviteeComments", inviteComments);
newInvitation.put("inviteeUserName", userName);
sendRequest(new PostRequest(URL_SITES + "/" + shortName + "/invitations", newInvitation.toString(), "application/json"), Status.STATUS_BAD_REQUEST);
}
JSONObject newInvitation = new JSONObject();
{
newInvitation.put("invitationType", "MODERATED");
newInvitation.put("inviteeRoleName", roleName);
newInvitation.put("inviteeComments", inviteComments);
newInvitation.put("inviteeUserName", userName);
Response response = sendRequest(new PostRequest(URL_SITES + "/" + shortName + "/invitations", newInvitation.toString(), "application/json"), Status.STATUS_CREATED);
JSONObject top = new JSONObject(response.getContentAsString());
JSONObject data = top.getJSONObject("data");
inviteId = data.getString("inviteId");
assertEquals("invitationType", "MODERATED", data.getString("invitationType"));
assertEquals("inviteeUserName is not set", userName, data.getString("inviteeUserName"));
assertEquals("resourceName is not correct", shortName, data.getString("resourceName"));
assertEquals("resourceType is not correct", "WEB_SITE", data.getString("resourceType"));
}
assertNotNull("inviteId is null", inviteId);
assertTrue("inviteId is too small", inviteId.length() > 0);
}
|
void function() throws Exception { String shortName = GUID.generate(); createSite(STR, shortName, STR, STR, SiteVisibility.PUBLIC, 200); String inviteComments = STR; String userName = USER_TWO; String roleName = SiteModel.SITE_CONSUMER; String inviteId = null; { JSONObject newInvitation = new JSONObject(); newInvitation.put(STR, STR); newInvitation.put(STR, roleName); newInvitation.put(STR, inviteComments); newInvitation.put(STR, userName); sendRequest(new PostRequest(URL_SITES + "/" + shortName + STR, newInvitation.toString(), STR), Status.STATUS_BAD_REQUEST); } { JSONObject newInvitation = new JSONObject(); newInvitation.put(STR, roleName); newInvitation.put(STR, inviteComments); newInvitation.put(STR, userName); sendRequest(new PostRequest(URL_SITES + "/" + shortName + STR, newInvitation.toString(), STR), Status.STATUS_BAD_REQUEST); } { JSONObject newInvitation = new JSONObject(); newInvitation.put(STR, STR); newInvitation.put(STR, ""); newInvitation.put(STR, inviteComments); newInvitation.put(STR, userName); sendRequest(new PostRequest(URL_SITES + "/" + shortName + STR, newInvitation.toString(), STR), Status.STATUS_BAD_REQUEST); } JSONObject newInvitation = new JSONObject(); { newInvitation.put(STR, STR); newInvitation.put(STR, roleName); newInvitation.put(STR, inviteComments); newInvitation.put(STR, userName); Response response = sendRequest(new PostRequest(URL_SITES + "/" + shortName + STR, newInvitation.toString(), STR), Status.STATUS_CREATED); JSONObject top = new JSONObject(response.getContentAsString()); JSONObject data = top.getJSONObject("dataSTRinviteId"); assertEquals(STR, STR, data.getString(STR)); assertEquals("inviteeUserName is not set", userName, data.getString(STR)); assertEquals("resourceName is not correctSTRresourceNameSTRresourceType is not correctSTRWEB_SITESTRresourceTypeSTRinviteId is nullSTRinviteId is too small", inviteId.length() > 0); }
|
/**
* Detailed test of Create Invitation Web Script
*
* Create Nominated Invitation
*
* Create Moderated Invitation
*
* @throws Exception
*/
|
Detailed test of Create Invitation Web Script Create Nominated Invitation Create Moderated Invitation
|
testCreateInvitation
|
{
"repo_name": "loftuxab/community-edition-old",
"path": "projects/remote-api/source/test-java/org/alfresco/repo/web/scripts/site/SiteServiceTest.java",
"license": "lgpl-3.0",
"size": 69469
}
|
[
"org.alfresco.repo.site.SiteModel",
"org.alfresco.service.cmr.site.SiteVisibility",
"org.alfresco.util.GUID",
"org.json.JSONObject",
"org.springframework.extensions.webscripts.Status",
"org.springframework.extensions.webscripts.TestWebScriptServer"
] |
import org.alfresco.repo.site.SiteModel; import org.alfresco.service.cmr.site.SiteVisibility; import org.alfresco.util.GUID; import org.json.JSONObject; import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.TestWebScriptServer;
|
import org.alfresco.repo.site.*; import org.alfresco.service.cmr.site.*; import org.alfresco.util.*; import org.json.*; import org.springframework.extensions.webscripts.*;
|
[
"org.alfresco.repo",
"org.alfresco.service",
"org.alfresco.util",
"org.json",
"org.springframework.extensions"
] |
org.alfresco.repo; org.alfresco.service; org.alfresco.util; org.json; org.springframework.extensions;
| 487,483
|
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
|
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
|
/**
* Handles the HTTP <code>GET</code> method.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
|
Handles the HTTP <code>GET</code> method
|
doGet
|
{
"repo_name": "NicolasEYSSERIC/Silverpeas-Core",
"path": "war-core/src/main/java/com/silverpeas/attachment/servlets/LaunchWebdavEdition.java",
"license": "agpl-3.0",
"size": 9022
}
|
[
"java.io.IOException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] |
import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
|
import java.io.*; import javax.servlet.*; import javax.servlet.http.*;
|
[
"java.io",
"javax.servlet"
] |
java.io; javax.servlet;
| 81,318
|
synchronized public void logDataObjectTransition(DataObject dataObjectInstance, ObjectLifecycleState newObjectLifecycleState) {
DataObjectLog dataObjectLog = new DataObjectLog(dataObjectInstance, dataObjectInstance.getObjectLifecycleState(), newObjectLifecycleState);
dataObjectLogs.add(dataObjectLog);
// TODO: think about necessary
sortLogs(dataObjectLogs);
}
|
synchronized void function(DataObject dataObjectInstance, ObjectLifecycleState newObjectLifecycleState) { DataObjectLog dataObjectLog = new DataObjectLog(dataObjectInstance, dataObjectInstance.getObjectLifecycleState(), newObjectLifecycleState); dataObjectLogs.add(dataObjectLog); sortLogs(dataObjectLogs); }
|
/**
* Log the transition of an {@link ObjectLifecycleState} of an
* {@link DataObject}.
*
* @param dataObjectInstance
* - that shall be logged
* @param newObjectLifecycleState
* - of the DataObject
*/
|
Log the transition of an <code>ObjectLifecycleState</code> of an <code>DataObject</code>
|
logDataObjectTransition
|
{
"repo_name": "bptlab/JEngine",
"path": "src/main/java/de/hpi/bpt/chimera/execution/CaseExecutioner.java",
"license": "mit",
"size": 22339
}
|
[
"de.hpi.bpt.chimera.execution.data.DataObject",
"de.hpi.bpt.chimera.model.datamodel.ObjectLifecycleState",
"de.hpi.bpt.chimera.rest.beans.history.DataObjectLog"
] |
import de.hpi.bpt.chimera.execution.data.DataObject; import de.hpi.bpt.chimera.model.datamodel.ObjectLifecycleState; import de.hpi.bpt.chimera.rest.beans.history.DataObjectLog;
|
import de.hpi.bpt.chimera.execution.data.*; import de.hpi.bpt.chimera.model.datamodel.*; import de.hpi.bpt.chimera.rest.beans.history.*;
|
[
"de.hpi.bpt"
] |
de.hpi.bpt;
| 2,131,391
|
@Test
public void testGetAllForStoragePoolWithInvalidPool() {
List<StorageDomainStatic> result = dao
.getAllForStoragePool(Guid.newGuid());
assertNotNull(result);
assertTrue(result.isEmpty());
}
|
void function() { List<StorageDomainStatic> result = dao .getAllForStoragePool(Guid.newGuid()); assertNotNull(result); assertTrue(result.isEmpty()); }
|
/**
* Ensures an empty collection is returned.
*/
|
Ensures an empty collection is returned
|
testGetAllForStoragePoolWithInvalidPool
|
{
"repo_name": "halober/ovirt-engine",
"path": "backend/manager/modules/dal/src/test/java/org/ovirt/engine/core/dao/StorageDomainStaticDAOTest.java",
"license": "apache-2.0",
"size": 6120
}
|
[
"java.util.List",
"org.junit.Assert",
"org.ovirt.engine.core.common.businessentities.StorageDomainStatic",
"org.ovirt.engine.core.compat.Guid"
] |
import java.util.List; import org.junit.Assert; import org.ovirt.engine.core.common.businessentities.StorageDomainStatic; import org.ovirt.engine.core.compat.Guid;
|
import java.util.*; import org.junit.*; import org.ovirt.engine.core.common.businessentities.*; import org.ovirt.engine.core.compat.*;
|
[
"java.util",
"org.junit",
"org.ovirt.engine"
] |
java.util; org.junit; org.ovirt.engine;
| 343,886
|
public void generateLayoutsForNodes(Set<DataSet> datasets, ImageNodeLayout layout ,TaskMonitor monitor) throws TooManyItemsException, ContainerUnplaceableExcpetion, DimensionMismatchException, WrongDatasetTypeException
{
//create a new layout
//AutomaticNodeLayout newLayout = new AutomaticNodeLayout();
//produce the Layout, this is where errors are likely to raise.
//newLayout.generateLayoutForDataSets(datasets);
//get all Nodes, that need to be updated
layout.doLayout();
//keep track of the layouts.
if(!layoutCounts.containsKey(layout))
{
layoutCounts.put(layout, 0);
}
Set<String> NodeIDs = new HashSet<String>();
for(DataSet set : datasets)
{
NodeIDs.addAll(set.getNodeIDs());
}
//add the new layout to all those nodes.
for(String id : NodeIDs)
{
PrintFDebugger.Debugging(this, "Assigning layout to node " + id);
//remove an active layout from its association.
if(activeLayouts.containsKey(id))
{
ImageNodeLayout oldlayout = activeLayouts.get(id);
Integer oldcount = layoutCounts.get(oldlayout) -1;
if(oldcount == 0)
{
layoutCounts.remove(oldlayout);
dsm.removeDataSetAboutToBeChangedListener(oldlayout);
}
else
{
layoutCounts.put(oldlayout, oldcount);
}
}
activeLayouts.put(id,layout);
layoutCounts.put(layout, layoutCounts.get(layout) + 1);
}
monitor.setProgress(0.2);
monitor.setStatusMessage("Updating Image Nodes");
// if everything went fine, register the layout.
dsm.addDataSetAboutToBeChangedListener(layout);
fireNodesChanged(NodeIDs);
}
|
void function(Set<DataSet> datasets, ImageNodeLayout layout ,TaskMonitor monitor) throws TooManyItemsException, ContainerUnplaceableExcpetion, DimensionMismatchException, WrongDatasetTypeException { layout.doLayout(); if(!layoutCounts.containsKey(layout)) { layoutCounts.put(layout, 0); } Set<String> NodeIDs = new HashSet<String>(); for(DataSet set : datasets) { NodeIDs.addAll(set.getNodeIDs()); } for(String id : NodeIDs) { PrintFDebugger.Debugging(this, STR + id); if(activeLayouts.containsKey(id)) { ImageNodeLayout oldlayout = activeLayouts.get(id); Integer oldcount = layoutCounts.get(oldlayout) -1; if(oldcount == 0) { layoutCounts.remove(oldlayout); dsm.removeDataSetAboutToBeChangedListener(oldlayout); } else { layoutCounts.put(oldlayout, oldcount); } } activeLayouts.put(id,layout); layoutCounts.put(layout, layoutCounts.get(layout) + 1); } monitor.setProgress(0.2); monitor.setStatusMessage(STR); dsm.addDataSetAboutToBeChangedListener(layout); fireNodesChanged(NodeIDs); }
|
/**
* Generate Layouts for All nodes in the {@link DataSet}s present in the {@link DataSetLayoutInfoBundle}s provided.
* This version is to be used if the created nodes are created by a {@link Task}.
* @param datasets The datasets to create nodes for
* @param layout the layout to use for the nodes
* @param monitor The TaskMonitor that keeps track of progress.
* @throws TooManyItemsException if there are too many items
* @throws ContainerUnplaceableExcpetion if a container cannot be placed
* @throws DimensionMismatchException if the dimensions dont fit.
* @throws WrongDatasetTypeException if a dataset cannot be layout out with the layout.
*/
|
Generate Layouts for All nodes in the <code>DataSet</code>s present in the <code>DataSetLayoutInfoBundle</code>s provided. This version is to be used if the created nodes are created by a <code>Task</code>
|
generateLayoutsForNodes
|
{
"repo_name": "sysbiolux/IDARE",
"path": "METANODE-CREATOR/src/main/java/idare/imagenode/internal/DataManagement/NodeManager.java",
"license": "lgpl-3.0",
"size": 20797
}
|
[
"java.util.HashSet",
"java.util.Set",
"org.cytoscape.work.TaskMonitor"
] |
import java.util.HashSet; import java.util.Set; import org.cytoscape.work.TaskMonitor;
|
import java.util.*; import org.cytoscape.work.*;
|
[
"java.util",
"org.cytoscape.work"
] |
java.util; org.cytoscape.work;
| 2,254,441
|
protected void addEObjectPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_Mapping_eObject_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_Mapping_eObject_feature", "_UI_Mapping_type"),
PickerbatchPackage.Literals.MAPPING__EOBJECT,
true,
false,
true,
null,
null,
null));
}
|
void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), PickerbatchPackage.Literals.MAPPING__EOBJECT, true, false, true, null, null, null)); }
|
/**
* This adds a property descriptor for the EObject feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This adds a property descriptor for the EObject feature.
|
addEObjectPropertyDescriptor
|
{
"repo_name": "TristanFAURE/pickerExplorer",
"path": "plugins/org.topcased.pickerexplorer.batch/src/org/topcased/pickerexplorer/batch/pickerbatch/provider/MappingItemProvider.java",
"license": "epl-1.0",
"size": 6132
}
|
[
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory",
"org.topcased.pickerexplorer.batch.pickerbatch.PickerbatchPackage"
] |
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.topcased.pickerexplorer.batch.pickerbatch.PickerbatchPackage;
|
import org.eclipse.emf.edit.provider.*; import org.topcased.pickerexplorer.batch.pickerbatch.*;
|
[
"org.eclipse.emf",
"org.topcased.pickerexplorer"
] |
org.eclipse.emf; org.topcased.pickerexplorer;
| 2,028,180
|
private void initColumnMaps() {
this.groupColumnMap = new HashMap<Integer,Integer>();
Cursor folderCursor = folderCursorHelper.getCursor();
this.groupColumnMap.put(folderCursor.getColumnIndexOrThrow(DatabaseConstants.FOLDER_NAME), R.id.row_foldername);
this.groupColumnMap.put(folderCursor.getColumnIndexOrThrow(DatabaseConstants.SUM_POS), R.id.row_foldersumpos);
this.groupColumnMap.put(folderCursor.getColumnIndexOrThrow(DatabaseConstants.SUM_NEUT), R.id.row_foldersumneu);
this.blogColumnMap = new HashMap<Integer,Integer>();
Cursor blogCursor = blogCursorHelper.getCursor();
this.blogColumnMap.put(blogCursor.getColumnIndexOrThrow(DatabaseConstants.SOCIAL_FEED_TITLE), R.id.row_socialfeed_name);
this.blogColumnMap.put(blogCursor.getColumnIndexOrThrow(DatabaseConstants.SOCIAL_FEED_ICON), R.id.row_socialfeed_icon);
this.blogColumnMap.put(blogCursor.getColumnIndexOrThrow(DatabaseConstants.SOCIAL_FEED_NEUTRAL_COUNT), R.id.row_socialsumneu);
this.blogColumnMap.put(blogCursor.getColumnIndexOrThrow(DatabaseConstants.SOCIAL_FEED_POSITIVE_COUNT), R.id.row_socialsumpos);
// child cursors are lazily initialized. temporarily try to init the first one and use it, as
// all of them have the same column layout. If there is not first folder, there is nothing we
// can do yet. Leave the map null and we'll lazily init it later when the DB is up and going.
if (folderCursor.moveToPosition(0)) {
this.childColumnMap = new HashMap<Integer,Integer>();
Cursor childCursor = getChildrenCursor(folderCursor);
this.childColumnMap.put(childCursor.getColumnIndexOrThrow(DatabaseConstants.FEED_TITLE), R.id.row_feedname);
this.childColumnMap.put(childCursor.getColumnIndexOrThrow(DatabaseConstants.FEED_FAVICON_URL), R.id.row_feedfavicon);
this.childColumnMap.put(childCursor.getColumnIndexOrThrow(DatabaseConstants.FEED_NEUTRAL_COUNT), R.id.row_feedneutral);
this.childColumnMap.put(childCursor.getColumnIndexOrThrow(DatabaseConstants.FEED_POSITIVE_COUNT), R.id.row_feedpositive);
// close the temp cursor
childCursor.close();
} else {
Log.w(this.getClass().getName(), "deferring init. of column mappings for child views");
}
}
|
void function() { this.groupColumnMap = new HashMap<Integer,Integer>(); Cursor folderCursor = folderCursorHelper.getCursor(); this.groupColumnMap.put(folderCursor.getColumnIndexOrThrow(DatabaseConstants.FOLDER_NAME), R.id.row_foldername); this.groupColumnMap.put(folderCursor.getColumnIndexOrThrow(DatabaseConstants.SUM_POS), R.id.row_foldersumpos); this.groupColumnMap.put(folderCursor.getColumnIndexOrThrow(DatabaseConstants.SUM_NEUT), R.id.row_foldersumneu); this.blogColumnMap = new HashMap<Integer,Integer>(); Cursor blogCursor = blogCursorHelper.getCursor(); this.blogColumnMap.put(blogCursor.getColumnIndexOrThrow(DatabaseConstants.SOCIAL_FEED_TITLE), R.id.row_socialfeed_name); this.blogColumnMap.put(blogCursor.getColumnIndexOrThrow(DatabaseConstants.SOCIAL_FEED_ICON), R.id.row_socialfeed_icon); this.blogColumnMap.put(blogCursor.getColumnIndexOrThrow(DatabaseConstants.SOCIAL_FEED_NEUTRAL_COUNT), R.id.row_socialsumneu); this.blogColumnMap.put(blogCursor.getColumnIndexOrThrow(DatabaseConstants.SOCIAL_FEED_POSITIVE_COUNT), R.id.row_socialsumpos); if (folderCursor.moveToPosition(0)) { this.childColumnMap = new HashMap<Integer,Integer>(); Cursor childCursor = getChildrenCursor(folderCursor); this.childColumnMap.put(childCursor.getColumnIndexOrThrow(DatabaseConstants.FEED_TITLE), R.id.row_feedname); this.childColumnMap.put(childCursor.getColumnIndexOrThrow(DatabaseConstants.FEED_FAVICON_URL), R.id.row_feedfavicon); this.childColumnMap.put(childCursor.getColumnIndexOrThrow(DatabaseConstants.FEED_NEUTRAL_COUNT), R.id.row_feedneutral); this.childColumnMap.put(childCursor.getColumnIndexOrThrow(DatabaseConstants.FEED_POSITIVE_COUNT), R.id.row_feedpositive); childCursor.close(); } else { Log.w(this.getClass().getName(), STR); } }
|
/**
* Load and store mappings from runtime DB column indicies to resource IDs needed by this class.
*
* TODO: this whole business with the mappings has a smell to it - figure out why.
*/
|
Load and store mappings from runtime DB column indicies to resource IDs needed by this class
|
initColumnMaps
|
{
"repo_name": "huihoo/reader",
"path": "media/android/NewsBlur/src/com/newsblur/database/MixedExpandableListAdapter.java",
"license": "mit",
"size": 18925
}
|
[
"android.database.Cursor",
"android.util.Log",
"java.util.HashMap"
] |
import android.database.Cursor; import android.util.Log; import java.util.HashMap;
|
import android.database.*; import android.util.*; import java.util.*;
|
[
"android.database",
"android.util",
"java.util"
] |
android.database; android.util; java.util;
| 2,368,260
|
void addIdEqualsToQuery(Parameters parameters, Object id, String prefix, boolean equals);
|
void addIdEqualsToQuery(Parameters parameters, Object id, String prefix, boolean equals);
|
/**
* Adds query statements, which contains restrictions, which express the property that the id of the entity
* with alias prefix, is equal to the given object.
* @param parameters Parameters, to which to add the statements.
* @param id Value of id.
* @param prefix Prefix to add to the properties (may be null).
* @param equals Should this query express the "=" relation or the "<>" relation.
*/
|
Adds query statements, which contains restrictions, which express the property that the id of the entity with alias prefix, is equal to the given object
|
addIdEqualsToQuery
|
{
"repo_name": "ControlSystemStudio/cs-studio",
"path": "thirdparty/plugins/org.csstudio.platform.libs.hibernate/project/envers/src/main/java/org/hibernate/envers/entities/mapper/id/IdMapper.java",
"license": "epl-1.0",
"size": 4675
}
|
[
"org.hibernate.envers.tools.query.Parameters"
] |
import org.hibernate.envers.tools.query.Parameters;
|
import org.hibernate.envers.tools.query.*;
|
[
"org.hibernate.envers"
] |
org.hibernate.envers;
| 980,719
|
@Generated
@Selector("toolTip")
public native String toolTip();
|
@Selector(STR) native String function();
|
/**
* Assigning a value to this property causes the tool tip to be displayed for the view. Setting the property to nil cancels the display of the tool tip for the view.
*/
|
Assigning a value to this property causes the tool tip to be displayed for the view. Setting the property to nil cancels the display of the tool tip for the view
|
toolTip
|
{
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/uikit/UIControl.java",
"license": "apache-2.0",
"size": 32314
}
|
[
"org.moe.natj.objc.ann.Selector"
] |
import org.moe.natj.objc.ann.Selector;
|
import org.moe.natj.objc.ann.*;
|
[
"org.moe.natj"
] |
org.moe.natj;
| 972,422
|
public void testVelociMacroCallMax()
throws Exception
{
VelocityContext context = new VelocityContext();
StringWriter writer = new StringWriter();
try
{
Velocity.evaluate(context, writer, "vm_chain2", template2);
fail("Did not exceed max macro call depth as expected");
}
catch (MacroOverflowException e)
{
assertEquals("Max calling depth of 5 was exceeded in macro 'bar'"+
" with Call Stack:bar->bar->bar->bar->bar at vm_chain2[line 1, column 15]",
e.getMessage());
}
try
{
Velocity.evaluate(context, writer, "vm_chain3", template3);
fail("Did not exceed max macro call depth as expected");
}
catch (MacroOverflowException e)
{
assertEquals("Max calling depth of 5 was exceeded in macro 'inner'"+
" with Call Stack:baz->inner->baz->inner->baz at vm_chain3[line 1, column 64]",
e.getMessage());
}
try
{
Velocity.evaluate(context, writer, "vm_chain4", template4);
fail("Did not exceed max macro call depth as expected");
}
catch (MacroOverflowException e)
{
assertEquals("Max calling depth of 5 was exceeded in macro 'loop'"+
" with Call Stack:bad->inside->loop->bad->inside at vm_chain4[line 1, column 94]",
e.getMessage());
}
}
|
void function() throws Exception { VelocityContext context = new VelocityContext(); StringWriter writer = new StringWriter(); try { Velocity.evaluate(context, writer, STR, template2); fail(STR); } catch (MacroOverflowException e) { assertEquals(STR+ STR, e.getMessage()); } try { Velocity.evaluate(context, writer, STR, template3); fail(STR); } catch (MacroOverflowException e) { assertEquals(STR+ STR, e.getMessage()); } try { Velocity.evaluate(context, writer, STR, template4); fail(STR); } catch (MacroOverflowException e) { assertEquals(STR+ STR, e.getMessage()); } }
|
/**
* Test case for evaluating max calling depths of macros
*/
|
Test case for evaluating max calling depths of macros
|
testVelociMacroCallMax
|
{
"repo_name": "VISTALL/apache.velocity-engine",
"path": "velocity-engine-core/src/test/java/org/apache/velocity/test/VelocimacroTestCase.java",
"license": "apache-2.0",
"size": 4819
}
|
[
"java.io.StringWriter",
"org.apache.velocity.VelocityContext",
"org.apache.velocity.app.Velocity",
"org.apache.velocity.exception.MacroOverflowException"
] |
import java.io.StringWriter; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.Velocity; import org.apache.velocity.exception.MacroOverflowException;
|
import java.io.*; import org.apache.velocity.*; import org.apache.velocity.app.*; import org.apache.velocity.exception.*;
|
[
"java.io",
"org.apache.velocity"
] |
java.io; org.apache.velocity;
| 1,207,748
|
protected static List<String> tokenize(String str) {
if (str == null) {
throw new NullPointerException("Cannot pass null arguments to tokenize");
}
List<String> l = new ArrayList<String>();
StringTokenizer st = new StringTokenizer(str);
while (st.hasMoreTokens()) {
l.add(st.nextToken());
}
return l;
}
|
static List<String> function(String str) { if (str == null) { throw new NullPointerException(STR); } List<String> l = new ArrayList<String>(); StringTokenizer st = new StringTokenizer(str); while (st.hasMoreTokens()) { l.add(st.nextToken()); } return l; }
|
/**
* Returns a list of tokens delimited by whitespace in String str (useful when handling XML)
*
* @param str the String to tokenize
* @return the String tokens
* @throws NullPointerException if str is null
*/
|
Returns a list of tokens delimited by whitespace in String str (useful when handling XML)
|
tokenize
|
{
"repo_name": "julie-sullivan/phytomine",
"path": "intermine/model/main/src/org/intermine/metadata/Util.java",
"license": "lgpl-2.1",
"size": 23995
}
|
[
"java.util.ArrayList",
"java.util.List",
"java.util.StringTokenizer"
] |
import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 659,052
|
public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) {
this.headerFilterStrategy = headerFilterStrategy;
}
|
void function(HeaderFilterStrategy headerFilterStrategy) { this.headerFilterStrategy = headerFilterStrategy; }
|
/**
* A custom org.apache.camel.spi.HeaderFilterStrategy to filter header to and from Camel message.
*/
|
A custom org.apache.camel.spi.HeaderFilterStrategy to filter header to and from Camel message
|
setHeaderFilterStrategy
|
{
"repo_name": "nikhilvibhav/camel",
"path": "components/camel-vertx/camel-vertx-http/src/main/java/org/apache/camel/component/vertx/http/VertxHttpConfiguration.java",
"license": "apache-2.0",
"size": 11067
}
|
[
"org.apache.camel.spi.HeaderFilterStrategy"
] |
import org.apache.camel.spi.HeaderFilterStrategy;
|
import org.apache.camel.spi.*;
|
[
"org.apache.camel"
] |
org.apache.camel;
| 1,319,556
|
public ChargeResponse updateCharge(String chargeId, String description, Map<String, Object> metadata){
MultivaluedMapImpl request = new MultivaluedMapImpl();
if(description != null){
request.add("description", description);
}
if(metadata != null){
request.putAll(mapToRequest(metadata, "metadata"));
}
return client.post(buildUrl(chargeId), ChargeResponse.class, request);
}
|
ChargeResponse function(String chargeId, String description, Map<String, Object> metadata){ MultivaluedMapImpl request = new MultivaluedMapImpl(); if(description != null){ request.add(STR, description); } if(metadata != null){ request.putAll(mapToRequest(metadata, STR)); } return client.post(buildUrl(chargeId), ChargeResponse.class, request); }
|
/**
* Update a charge
* <a href="https://stripe.com/docs/api#update_charge">https://stripe.com/docs/api#update_charge</a>
* @param chargeId the id of the charge to update
* @param description optional updated description
* @param metadata optional updated metadata
*/
|
Update a charge HREF
|
updateCharge
|
{
"repo_name": "jlinn/stripe-api-java",
"path": "src/main/java/net/joelinn/stripe/api/Charges.java",
"license": "mit",
"size": 5093
}
|
[
"com.sun.jersey.core.util.MultivaluedMapImpl",
"java.util.Map",
"net.joelinn.stripe.response.charges.ChargeResponse"
] |
import com.sun.jersey.core.util.MultivaluedMapImpl; import java.util.Map; import net.joelinn.stripe.response.charges.ChargeResponse;
|
import com.sun.jersey.core.util.*; import java.util.*; import net.joelinn.stripe.response.charges.*;
|
[
"com.sun.jersey",
"java.util",
"net.joelinn.stripe"
] |
com.sun.jersey; java.util; net.joelinn.stripe;
| 1,506,622
|
public void unpauseVid()
{
if (atEnd)
{
restart();
}
else if (!playing)
{
mediaPlayer.setPause(false);
playing = true;
}
}
}
public static final String CACHABLE_TYPE = "CACHABLE_VIDEO";
public static ArrayList<SimpleMediaPlayer> mediaPlayers = new ArrayList<SimpleMediaPlayer>();
private static final int HEIGHT = 240;
private static final Logger log = Logger.getLogger(SimpleMediaPlayer.class.getName());
private static Format TEXTURE_FORMAT = Format.RGBA16;
private static float vidHeight = 7;
private static float vidWidth = 12;
private static final int WIDTH = 320;
static
{
boolean found = false;
String vlcLib = ManagementFactory.getRuntimeMXBean().getSystemProperties().get("vlc");
if (vlcLib != null)
{
found = true;
}
if (!found)
{
found = new NativeDiscovery().discover();
}
if (!found)
{
log.warning("Cannot play videos. VLC is either not installed or located in an unexpected directory. " + "If VLC is installed in an unexpected directory you can provide the path to its library " + "location with the argument: '-Dvlc=\"...\"");
}
}
public DirectMediaPlayer mediaPlayer;
private IActionOnVideoEndListener actionOnVideoEndListener = null;
private boolean atEnd = false;
private boolean autostart = false;
private boolean firstClick = false;
private boolean hasStarted = false;
private SimpleMediaPlayer instance;
private Material mat;
private MediaPlayerFactory mediaPlayerFactory;
private CenteredQuad quad;
private Geometry quadGeometry;
private boolean repeat = false;
private Image videoImage;
private Texture2D videoTexture;
private String videoURL = null;
private VidThread vidThread = null;
public SimpleMediaPlayer(String name, UUID uuid)
{
super(name, uuid);
}
|
void function() { if (atEnd) { restart(); } else if (!playing) { mediaPlayer.setPause(false); playing = true; } } } public static final String CACHABLE_TYPE = STR; public static ArrayList<SimpleMediaPlayer> mediaPlayers = new ArrayList<SimpleMediaPlayer>(); private static final int HEIGHT = 240; private static final Logger log = Logger.getLogger(SimpleMediaPlayer.class.getName()); private static Format TEXTURE_FORMAT = Format.RGBA16; private static float vidHeight = 7; private static float vidWidth = 12; private static final int WIDTH = 320; static { boolean found = false; String vlcLib = ManagementFactory.getRuntimeMXBean().getSystemProperties().get("vlc"); if (vlcLib != null) { found = true; } if (!found) { found = new NativeDiscovery().discover(); } if (!found) { log.warning(STR + STR + STR...\""); } } public DirectMediaPlayer mediaPlayer; private IActionOnVideoEndListener actionOnVideoEndListener = null; private boolean atEnd = false; private boolean autostart = false; private boolean firstClick = false; private boolean hasStarted = false; private SimpleMediaPlayer instance; private Material mat; private MediaPlayerFactory mediaPlayerFactory; private CenteredQuad quad; private Geometry quadGeometry; private boolean repeat = false; private Image videoImage; private Texture2D videoTexture; private String videoURL = null; private VidThread vidThread = null; public SimpleMediaPlayer(String name, UUID uuid) { super(name, uuid); }
|
/**
* Unpause vid.
*/
|
Unpause vid
|
unpauseVid
|
{
"repo_name": "synergynet/synergynet3.1",
"path": "synergynet3.1-parent/synergynet3-appsystem-core/src/main/java/synergynet3/additionalitems/jme/SimpleMediaPlayer.java",
"license": "bsd-3-clause",
"size": 16663
}
|
[
"com.jme3.material.Material",
"com.jme3.scene.Geometry",
"com.jme3.texture.Image",
"com.jme3.texture.Texture2D",
"java.lang.management.ManagementFactory",
"java.util.ArrayList",
"java.util.logging.Logger",
"uk.co.caprica.vlcj.discovery.NativeDiscovery",
"uk.co.caprica.vlcj.player.MediaPlayerFactory",
"uk.co.caprica.vlcj.player.direct.DirectMediaPlayer"
] |
import com.jme3.material.Material; import com.jme3.scene.Geometry; import com.jme3.texture.Image; import com.jme3.texture.Texture2D; import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.logging.Logger; import uk.co.caprica.vlcj.discovery.NativeDiscovery; import uk.co.caprica.vlcj.player.MediaPlayerFactory; import uk.co.caprica.vlcj.player.direct.DirectMediaPlayer;
|
import com.jme3.material.*; import com.jme3.scene.*; import com.jme3.texture.*; import java.lang.management.*; import java.util.*; import java.util.logging.*; import uk.co.caprica.vlcj.discovery.*; import uk.co.caprica.vlcj.player.*; import uk.co.caprica.vlcj.player.direct.*;
|
[
"com.jme3.material",
"com.jme3.scene",
"com.jme3.texture",
"java.lang",
"java.util",
"uk.co.caprica"
] |
com.jme3.material; com.jme3.scene; com.jme3.texture; java.lang; java.util; uk.co.caprica;
| 1,977,082
|
public static URI getDefaultUserDatabase( ILoaderAieon loader){
return getDefaultUserDatabase(loader, S_DEFAULT_EXTENSION );
}
|
static URI function( ILoaderAieon loader){ return getDefaultUserDatabase(loader, S_DEFAULT_EXTENSION ); }
|
/**
* Return the default user directory. This is '%system-user%\<folder>\<name>.sqlite'
* @param aieon
* @return
*/
|
Return the default user directory. This is '%system-user%\\.sqlite'
|
getDefaultUserDatabase
|
{
"repo_name": "condast/AieonF",
"path": "Workspace/org.aieonf.concept/src/org/aieonf/concept/file/ProjectFolderUtils.java",
"license": "apache-2.0",
"size": 6682
}
|
[
"org.aieonf.concept.loader.ILoaderAieon"
] |
import org.aieonf.concept.loader.ILoaderAieon;
|
import org.aieonf.concept.loader.*;
|
[
"org.aieonf.concept"
] |
org.aieonf.concept;
| 2,130,321
|
public int getNumPages()
{
TypedQuery<E> query = em.createNamedQuery(findAllQuery, clazz);
double size = query.getResultList().size();
return (int) Math.ceil(size / RECORDS_PER_PAGE);
}
|
int function() { TypedQuery<E> query = em.createNamedQuery(findAllQuery, clazz); double size = query.getResultList().size(); return (int) Math.ceil(size / RECORDS_PER_PAGE); }
|
/**
* Returns the number of pages available. This is calculated by taking the ceiling os the size of the result list
* divided by {@code RECORDS_PER_PAGE}, where {@code RECORDS_PER_PAGE} is currently set to {@value
* #RECORDS_PER_PAGE}.
*
* @return the number of pages available
*/
|
Returns the number of pages available. This is calculated by taking the ceiling os the size of the result list divided by RECORDS_PER_PAGE, where RECORDS_PER_PAGE is currently set to #RECORDS_PER_PAGE
|
getNumPages
|
{
"repo_name": "hendrixjoseph/FamilyTree",
"path": "src/main/java/edu/wright/hendrix11/familyTree/dataBean/DataBean.java",
"license": "mit",
"size": 7964
}
|
[
"javax.persistence.TypedQuery"
] |
import javax.persistence.TypedQuery;
|
import javax.persistence.*;
|
[
"javax.persistence"
] |
javax.persistence;
| 1,546,867
|
public void testGetReleaseMetadataProxiedOnLocalMultipleRemote()
throws Exception
{
String requestedResource = "org/apache/maven/test/get-on-multiple-repos/1.0/maven-metadata.xml";
setupTestableManagedRepository( requestedResource );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false);
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false);
assertReleaseMetadataContents( requestedResource );
assertNoRepoMetadata( ID_PROXIED1, requestedResource );
assertNoRepoMetadata( ID_PROXIED2, requestedResource );
assertFetchVersioned( requestedResource );
assertReleaseMetadataContents( requestedResource );
assertRepoReleaseMetadataContents( ID_PROXIED1, requestedResource );
assertRepoReleaseMetadataContents( ID_PROXIED2, requestedResource );
}
|
void function() throws Exception { String requestedResource = STR; setupTestableManagedRepository( requestedResource ); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS, SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS, SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false); assertReleaseMetadataContents( requestedResource ); assertNoRepoMetadata( ID_PROXIED1, requestedResource ); assertNoRepoMetadata( ID_PROXIED2, requestedResource ); assertFetchVersioned( requestedResource ); assertReleaseMetadataContents( requestedResource ); assertRepoReleaseMetadataContents( ID_PROXIED1, requestedResource ); assertRepoReleaseMetadataContents( ID_PROXIED2, requestedResource ); }
|
/**
* A request for a maven-metadata.xml file that exists in the managed repository, and on multiple
* remote repositories.
*
* Expected result: the maven-metadata.xml file on the remote proxied repository is downloaded
* and merged into the contents of the existing managed repository copy of
* the maven-metadata.xml file.
*/
|
A request for a maven-metadata.xml file that exists in the managed repository, and on multiple remote repositories. Expected result: the maven-metadata.xml file on the remote proxied repository is downloaded and merged into the contents of the existing managed repository copy of the maven-metadata.xml file
|
testGetReleaseMetadataProxiedOnLocalMultipleRemote
|
{
"repo_name": "hiredman/archiva",
"path": "archiva-modules/archiva-base/archiva-proxy/src/test/java/org/apache/maven/archiva/proxy/MetadataTransferTest.java",
"license": "apache-2.0",
"size": 63158
}
|
[
"org.apache.maven.archiva.policies.CachedFailuresPolicy",
"org.apache.maven.archiva.policies.ChecksumPolicy",
"org.apache.maven.archiva.policies.ReleasesPolicy",
"org.apache.maven.archiva.policies.SnapshotsPolicy"
] |
import org.apache.maven.archiva.policies.CachedFailuresPolicy; import org.apache.maven.archiva.policies.ChecksumPolicy; import org.apache.maven.archiva.policies.ReleasesPolicy; import org.apache.maven.archiva.policies.SnapshotsPolicy;
|
import org.apache.maven.archiva.policies.*;
|
[
"org.apache.maven"
] |
org.apache.maven;
| 2,651,034
|
public static JCGLPixelFormat pixelTypeFromGL(
final int e)
{
switch (e) {
case GL.GL_UNSIGNED_INT_24_8:
return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_INT_24_8;
case GL.GL_UNSIGNED_SHORT_5_6_5:
return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_SHORT_565;
case GL.GL_UNSIGNED_SHORT_5_5_5_1:
return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_SHORT_5551;
case GL.GL_UNSIGNED_SHORT_4_4_4_4:
return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_SHORT_4444;
case GL2ES2.GL_UNSIGNED_INT_10_10_10_2:
return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_INT_1010102;
case GL.GL_UNSIGNED_SHORT:
return JCGLPixelFormat.PIXEL_COMPONENT_UNSIGNED_SHORT;
case GL.GL_UNSIGNED_INT:
return JCGLPixelFormat.PIXEL_COMPONENT_UNSIGNED_INT;
case GL.GL_UNSIGNED_BYTE:
return JCGLPixelFormat.PIXEL_COMPONENT_UNSIGNED_BYTE;
case GL.GL_SHORT:
return JCGLPixelFormat.PIXEL_COMPONENT_SHORT;
case GL2ES2.GL_INT:
return JCGLPixelFormat.PIXEL_COMPONENT_INT;
case GL.GL_FLOAT:
return JCGLPixelFormat.PIXEL_COMPONENT_FLOAT;
case GL.GL_BYTE:
return JCGLPixelFormat.PIXEL_COMPONENT_BYTE;
case GL.GL_HALF_FLOAT:
return JCGLPixelFormat.PIXEL_COMPONENT_HALF_FLOAT;
default:
throw new UnreachableCodeException();
}
}
|
static JCGLPixelFormat function( final int e) { switch (e) { case GL.GL_UNSIGNED_INT_24_8: return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_INT_24_8; case GL.GL_UNSIGNED_SHORT_5_6_5: return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_SHORT_565; case GL.GL_UNSIGNED_SHORT_5_5_5_1: return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_SHORT_5551; case GL.GL_UNSIGNED_SHORT_4_4_4_4: return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_SHORT_4444; case GL2ES2.GL_UNSIGNED_INT_10_10_10_2: return JCGLPixelFormat.PIXEL_PACKED_UNSIGNED_INT_1010102; case GL.GL_UNSIGNED_SHORT: return JCGLPixelFormat.PIXEL_COMPONENT_UNSIGNED_SHORT; case GL.GL_UNSIGNED_INT: return JCGLPixelFormat.PIXEL_COMPONENT_UNSIGNED_INT; case GL.GL_UNSIGNED_BYTE: return JCGLPixelFormat.PIXEL_COMPONENT_UNSIGNED_BYTE; case GL.GL_SHORT: return JCGLPixelFormat.PIXEL_COMPONENT_SHORT; case GL2ES2.GL_INT: return JCGLPixelFormat.PIXEL_COMPONENT_INT; case GL.GL_FLOAT: return JCGLPixelFormat.PIXEL_COMPONENT_FLOAT; case GL.GL_BYTE: return JCGLPixelFormat.PIXEL_COMPONENT_BYTE; case GL.GL_HALF_FLOAT: return JCGLPixelFormat.PIXEL_COMPONENT_HALF_FLOAT; default: throw new UnreachableCodeException(); } }
|
/**
* Convert pixel types from GL constants.
*
* @param e The GL constant.
*
* @return The value.
*/
|
Convert pixel types from GL constants
|
pixelTypeFromGL
|
{
"repo_name": "io7m/jcanephora",
"path": "com.io7m.jcanephora.jogl/src/main/java/com/io7m/jcanephora/jogl/JOGLTypeConversions.java",
"license": "isc",
"size": 42369
}
|
[
"com.io7m.jcanephora.core.JCGLPixelFormat",
"com.io7m.junreachable.UnreachableCodeException"
] |
import com.io7m.jcanephora.core.JCGLPixelFormat; import com.io7m.junreachable.UnreachableCodeException;
|
import com.io7m.jcanephora.core.*; import com.io7m.junreachable.*;
|
[
"com.io7m.jcanephora",
"com.io7m.junreachable"
] |
com.io7m.jcanephora; com.io7m.junreachable;
| 1,399,587
|
public void clear(){
stack = new ArrayList();
}
public int hashCode() { return stack.hashCode(); }
|
void function(){ stack = new ArrayList(); } public int hashCode() { return stack.hashCode(); }
|
/**
* Clears the stack.
*/
|
Clears the stack
|
clear
|
{
"repo_name": "Xyene/JBL",
"path": "src/test/java/benchmark/bcel/verifier/structurals/OperandStack.java",
"license": "lgpl-3.0",
"size": 7826
}
|
[
"java.util.ArrayList"
] |
import java.util.ArrayList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 647,147
|
public void setCoords (int destX, int destY, int destZ) {
setCoords(new CoordGrid(destX, destY, destZ));
}
/**
* Instantly moves the entity to the specified coordinates
* @param destination The destination {@link CoordGrid}
|
void function (int destX, int destY, int destZ) { setCoords(new CoordGrid(destX, destY, destZ)); } /** * Instantly moves the entity to the specified coordinates * @param destination The destination {@link CoordGrid}
|
/**
* Instantly moves the entity to the specified coordinates
* @param destX The x-coordinate of the destination
* @param destY The y-coordinate of the destination
* @param destZ The z-coordinate of the destination
*/
|
Instantly moves the entity to the specified coordinates
|
setCoords
|
{
"repo_name": "Sundays211/VirtueRS3",
"path": "src/main/java/org/virtue/game/map/movement/Movement.java",
"license": "mit",
"size": 19907
}
|
[
"org.virtue.game.map.CoordGrid"
] |
import org.virtue.game.map.CoordGrid;
|
import org.virtue.game.map.*;
|
[
"org.virtue.game"
] |
org.virtue.game;
| 1,524,432
|
@SuppressWarnings("unchecked")
public List<String> getFeatureOfInterestIdentifiers(Session session) {
Criteria criteria =
session.createCriteria(FeatureOfInterest.class).setProjection(
Projections.distinct(Projections.property(FeatureOfInterest.IDENTIFIER)));
LOGGER.debug("QUERY getFeatureOfInterestIdentifiers(): {}", HibernateHelper.getSqlString(criteria));
return criteria.list();
}
|
@SuppressWarnings(STR) List<String> function(Session session) { Criteria criteria = session.createCriteria(FeatureOfInterest.class).setProjection( Projections.distinct(Projections.property(FeatureOfInterest.IDENTIFIER))); LOGGER.debug(STR, HibernateHelper.getSqlString(criteria)); return criteria.list(); }
|
/**
* Get all featureOfInterest identifiers
*
* @param session
* Hibernate session
* @return FeatureOfInterest identifiers
*/
|
Get all featureOfInterest identifiers
|
getFeatureOfInterestIdentifiers
|
{
"repo_name": "shane-axiom/SOS",
"path": "hibernate/common/src/main/java/org/n52/sos/ds/hibernate/dao/FeatureOfInterestDAO.java",
"license": "gpl-2.0",
"size": 18200
}
|
[
"java.util.List",
"org.hibernate.Criteria",
"org.hibernate.Session",
"org.hibernate.criterion.Projections",
"org.n52.sos.ds.hibernate.entities.FeatureOfInterest",
"org.n52.sos.ds.hibernate.util.HibernateHelper"
] |
import java.util.List; import org.hibernate.Criteria; import org.hibernate.Session; import org.hibernate.criterion.Projections; import org.n52.sos.ds.hibernate.entities.FeatureOfInterest; import org.n52.sos.ds.hibernate.util.HibernateHelper;
|
import java.util.*; import org.hibernate.*; import org.hibernate.criterion.*; import org.n52.sos.ds.hibernate.entities.*; import org.n52.sos.ds.hibernate.util.*;
|
[
"java.util",
"org.hibernate",
"org.hibernate.criterion",
"org.n52.sos"
] |
java.util; org.hibernate; org.hibernate.criterion; org.n52.sos;
| 2,460,805
|
public static int getUserClassPathSource(Config config) {
JobConfig jobConfig= (JobConfig)config;
return jobConfig.getClasspath().getUserSupplied().getSource();
}
|
static int function(Config config) { JobConfig jobConfig= (JobConfig)config; return jobConfig.getClasspath().getUserSupplied().getSource(); }
|
/**
* <p>
* Gets the source, how user dependencies are supplied
* </p>
*
* @param config
* Config
* @return int source
*/
|
Gets the source, how user dependencies are supplied
|
getUserClassPathSource
|
{
"repo_name": "impetus-opensource/jumbune",
"path": "debugger/src/main/java/org/jumbune/debugger/instrumentation/utils/FileUtil.java",
"license": "lgpl-3.0",
"size": 5433
}
|
[
"org.jumbune.common.job.Config",
"org.jumbune.common.job.JobConfig"
] |
import org.jumbune.common.job.Config; import org.jumbune.common.job.JobConfig;
|
import org.jumbune.common.job.*;
|
[
"org.jumbune.common"
] |
org.jumbune.common;
| 923,842
|
public EnumCreatureAttribute getCreatureAttribute()
{
return EnumCreatureAttribute.ARTHROPOD;
}
|
EnumCreatureAttribute function() { return EnumCreatureAttribute.ARTHROPOD; }
|
/**
* Get this Entity's EnumCreatureAttribute
*/
|
Get this Entity's EnumCreatureAttribute
|
getCreatureAttribute
|
{
"repo_name": "jtrent238/jtrent238FoodMod",
"path": "src/main/java/com/jtrent238/foodmod/EntityCandySpider.java",
"license": "lgpl-2.1",
"size": 8277
}
|
[
"net.minecraft.entity.EnumCreatureAttribute"
] |
import net.minecraft.entity.EnumCreatureAttribute;
|
import net.minecraft.entity.*;
|
[
"net.minecraft.entity"
] |
net.minecraft.entity;
| 2,648,200
|
public static List<TIntSet> findAllMaximum2(TIntObjectMap<TIntSet> adjMap, int nV) {
// === build a cost matrix with all inf's excepting the complement graph's
// edges which are set to 1 ======
// --- runtime complexity of graph complement is O(|V|^2) ---
Set<PairInt> comp = Complement.graphComplement(adjMap);
float[][] matrix = new float[nV][];
int i;
int j;
for (i = 0; i < nV; ++i) {
matrix[i] = new float[nV];
Arrays.fill(matrix[i], Float.POSITIVE_INFINITY);
}
for (PairInt p : comp) {
if (p.getX() == p.getY()) {
continue;
}
matrix[p.getX()][p.getY()] = 1;
}
HungarianAlgorithm ha = new HungarianAlgorithm();
int[][] matched = ha.computeAssignments(matrix);
// === assert that each matched is an independent set and add it to a set of independent sets called s ======
Set<TIntSet> s = new HashSet<TIntSet>();
for (i = 0; i < matched.length; ++i) {
System.out.printf("c matched: %s\n", Arrays.toString(matched[i]));
if (matched[i][0] < 0 || matched[i][1] < 0) {
continue;
}
if (!(adjMap.containsKey(matched[i][0]) && adjMap.get(matched[i][0]).contains(matched[i][1]))
&& !(adjMap.containsKey(matched[i][1]) && adjMap.get(matched[i][1]).contains(matched[i][0]))){
s.add(new TIntHashSet(matched[i]));
}
}
// -- runtime complexity is O(n) for forming mis --
return findAllMaximum(s, adjMap);
}
|
static List<TIntSet> function(TIntObjectMap<TIntSet> adjMap, int nV) { Set<PairInt> comp = Complement.graphComplement(adjMap); float[][] matrix = new float[nV][]; int i; int j; for (i = 0; i < nV; ++i) { matrix[i] = new float[nV]; Arrays.fill(matrix[i], Float.POSITIVE_INFINITY); } for (PairInt p : comp) { if (p.getX() == p.getY()) { continue; } matrix[p.getX()][p.getY()] = 1; } HungarianAlgorithm ha = new HungarianAlgorithm(); int[][] matched = ha.computeAssignments(matrix); Set<TIntSet> s = new HashSet<TIntSet>(); for (i = 0; i < matched.length; ++i) { System.out.printf(STR, Arrays.toString(matched[i])); if (matched[i][0] < 0 matched[i][1] < 0) { continue; } if (!(adjMap.containsKey(matched[i][0]) && adjMap.get(matched[i][0]).contains(matched[i][1])) && !(adjMap.containsKey(matched[i][1]) && adjMap.get(matched[i][1]).contains(matched[i][0]))){ s.add(new TIntHashSet(matched[i])); } } return findAllMaximum(s, adjMap); }
|
/**
* find all maximum independent sets for the given directed graph represented
* by adjMap.
* The method internally builds the complement graph of G, then
* the maximum matching bipartite graph and then aggregates the
* matching sets of vertices into independent sets of maximum size.
* runtime complexity is:
* @param adjMap directed graph G as an adjacency map
* @param nV the number of vertexes
* @return returns a list of all maximum independent sets. note that a
* complete graph will return an empty list.
*/
|
find all maximum independent sets for the given directed graph represented by adjMap. The method internally builds the complement graph of G, then the maximum matching bipartite graph and then aggregates the matching sets of vertices into independent sets of maximum size. runtime complexity is:
|
findAllMaximum2
|
{
"repo_name": "nking/shared",
"path": "src/main/java/algorithms/graphs/MaximalIndependentSets.java",
"license": "mit",
"size": 27246
}
|
[
"gnu.trove.map.TIntObjectMap",
"gnu.trove.set.TIntSet",
"gnu.trove.set.hash.TIntHashSet",
"java.util.Arrays",
"java.util.HashSet",
"java.util.List",
"java.util.Set"
] |
import gnu.trove.map.TIntObjectMap; import gnu.trove.set.TIntSet; import gnu.trove.set.hash.TIntHashSet; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set;
|
import gnu.trove.map.*; import gnu.trove.set.*; import gnu.trove.set.hash.*; import java.util.*;
|
[
"gnu.trove.map",
"gnu.trove.set",
"java.util"
] |
gnu.trove.map; gnu.trove.set; java.util;
| 1,299,193
|
@ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.MERGE }, fetch = FetchType.LAZY )
@org.hibernate.annotations.Cascade({org.hibernate.annotations.CascadeType.SAVE_UPDATE})
@Basic( optional = true )
@JoinColumn(name = "enrollmentid", nullable = true )
public Enrollment getEnrollmentid() {
return this.enrollmentid;
}
|
@ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.MERGE }, fetch = FetchType.LAZY ) @org.hibernate.annotations.Cascade({org.hibernate.annotations.CascadeType.SAVE_UPDATE}) @Basic( optional = true ) @JoinColumn(name = STR, nullable = true ) Enrollment function() { return this.enrollmentid; }
|
/**
* Return the value associated with the column: enrollmentid.
* @return A Enrollment object (this.enrollmentid)
*/
|
Return the value associated with the column: enrollmentid
|
getEnrollmentid
|
{
"repo_name": "servinglynk/servinglynk-hmis",
"path": "hmis-model-v2016/src/main/java/com/servinglynk/hmis/warehouse/model/v2016/Entryssvf.java",
"license": "mpl-2.0",
"size": 25761
}
|
[
"javax.persistence.Basic",
"javax.persistence.CascadeType",
"javax.persistence.FetchType",
"javax.persistence.JoinColumn",
"javax.persistence.ManyToOne"
] |
import javax.persistence.Basic; import javax.persistence.CascadeType; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne;
|
import javax.persistence.*;
|
[
"javax.persistence"
] |
javax.persistence;
| 1,339,147
|
public void updateQDMTerms(String newLOID) {
List<QualityDataSet> qdss = getByListObject(newLOID);
//QualityDataSet's to delete
HashMap<String, String> dqdss = new HashMap<String, String>();
for(int i = 0; i < qdss.size()-1; i++){
QualityDataSet qds1 = qdss.get(i);
QualityDataSet qds2 = qdss.get(i+1);
String mid1 = qds1.getMeasureId().getId();
String mid2 = qds2.getMeasureId().getId();
if(mid1.equalsIgnoreCase(mid2)){
//check dataType, could be multiple dataTypes with the same category
String dt1 = qds1.getDataType().getId();
String dt2 = qds2.getDataType().getId();
if(dt1.equalsIgnoreCase(dt2)){
String occ1 = qds1.getOccurrence();
String occ2 = qds2.getOccurrence();
if(occ1 == null && occ2 == null){
//delete: same measure, same dataType, not occurrences
dqdss.put(qds2.getId(), qds1.getId());
}else if(occ1 != null && occ2 != null){
if(occ1.equalsIgnoreCase(occ2)){
//delete: same measure, same category, same occurrence
dqdss.put(qds2.getId(), qds1.getId());
}
}
}
}
}
for(String key : dqdss.keySet()){
String newID = key;
String oldID = dqdss.get(key);
//update the QDMTerm to use the latest QDM
updateQDMTerm(newID, oldID);
//delete the duplicate QDM
deleteOldQDM(oldID);
}
}
|
void function(String newLOID) { List<QualityDataSet> qdss = getByListObject(newLOID); HashMap<String, String> dqdss = new HashMap<String, String>(); for(int i = 0; i < qdss.size()-1; i++){ QualityDataSet qds1 = qdss.get(i); QualityDataSet qds2 = qdss.get(i+1); String mid1 = qds1.getMeasureId().getId(); String mid2 = qds2.getMeasureId().getId(); if(mid1.equalsIgnoreCase(mid2)){ String dt1 = qds1.getDataType().getId(); String dt2 = qds2.getDataType().getId(); if(dt1.equalsIgnoreCase(dt2)){ String occ1 = qds1.getOccurrence(); String occ2 = qds2.getOccurrence(); if(occ1 == null && occ2 == null){ dqdss.put(qds2.getId(), qds1.getId()); }else if(occ1 != null && occ2 != null){ if(occ1.equalsIgnoreCase(occ2)){ dqdss.put(qds2.getId(), qds1.getId()); } } } } } for(String key : dqdss.keySet()){ String newID = key; String oldID = dqdss.get(key); updateQDMTerm(newID, oldID); deleteOldQDM(oldID); } }
|
/**
* Update qdm terms.
*
* @param newLOID
* the new loid
*/
|
Update qdm terms
|
updateQDMTerms
|
{
"repo_name": "JaLandry/MeasureAuthoringTool_LatestSprint",
"path": "mat/src/mat/dao/impl/QualityDataSetDAO.java",
"license": "apache-2.0",
"size": 11518
}
|
[
"java.util.HashMap",
"java.util.List"
] |
import java.util.HashMap; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 483,467
|
protected static void checkForLinkHeader(final HttpResponse response, final String uri, final String rel) {
assertEquals(1, countLinkHeader(response, uri, rel));
}
|
static void function(final HttpResponse response, final String uri, final String rel) { assertEquals(1, countLinkHeader(response, uri, rel)); }
|
/**
* Test a response for a specific LINK header
*
* @param response the HTTP response
* @param uri the URI expected in the LINK header
* @param rel the rel argument to check for
*/
|
Test a response for a specific LINK header
|
checkForLinkHeader
|
{
"repo_name": "whikloj/fcrepo4",
"path": "fcrepo-http-api/src/test/java/org/fcrepo/integration/http/api/AbstractResourceIT.java",
"license": "apache-2.0",
"size": 28411
}
|
[
"org.apache.http.HttpResponse",
"org.junit.Assert"
] |
import org.apache.http.HttpResponse; import org.junit.Assert;
|
import org.apache.http.*; import org.junit.*;
|
[
"org.apache.http",
"org.junit"
] |
org.apache.http; org.junit;
| 2,765,416
|
public JsonObject createJudge(String source) throws ClientException, ConnectionException
{
return createJudge(source, 1, "testcase", "");
}
|
JsonObject function(String source) throws ClientException, ConnectionException { return createJudge(source, 1, STR, ""); }
|
/**
* Create a new C++ testcase judge with empty name
*
* @param {string} source - source code
* @throws NotAuthorizedException for invalid access token
* @throws BadRequestException for empty source code
* @throws ClientException
* @throws ConnectionException
* @return API response
*/
|
Create a new C++ testcase judge with empty name
|
createJudge
|
{
"repo_name": "sphere-engine/java-client",
"path": "src/com/SphereEngine/Api/ProblemsClientV3.java",
"license": "apache-2.0",
"size": 46337
}
|
[
"com.google.gson.JsonObject"
] |
import com.google.gson.JsonObject;
|
import com.google.gson.*;
|
[
"com.google.gson"
] |
com.google.gson;
| 2,243,612
|
View entry;
if (convertView != null)
entry = convertView;
else
entry = mInflater.inflate(R.layout.item_wifi_p2p_peer, null, false);
OpportunisticPeer peer = getItem(position);
TextView textDeviceStatus = (TextView) entry.findViewById(R.id.text_peer_status);
TextView textDeviceGroup = (TextView) entry.findViewById(R.id.text_peer_group);
TextView textDeviceUuid = (TextView) entry.findViewById(R.id.text_peer_uuid);
TextView textDeviceMacAddress = (TextView) entry.findViewById(R.id.text_peer_mac_address);
if(peer != null) {
textDeviceStatus.setText(peer.getStatus().getSymbol());
if(peer.isGroupOwner())
textDeviceGroup.setText(R.string.groupOwner);
else if(peer.hasGroupOwnerField())
if(peer.hasGroupOwner())
textDeviceGroup.setText(R.string.groupClient);
else textDeviceGroup.setText(" ");
else
textDeviceGroup.setText(R.string.missingGroup);
textDeviceUuid.setText(peer.getUuid().substring(24));
textDeviceMacAddress.setText(peer.getMacAddress());
} else {
textDeviceStatus.setText(R.string.missingStatus);
textDeviceGroup.setText(R.string.missingGroup);
textDeviceUuid.setText(R.string.missingUuid);
textDeviceMacAddress.setText(R.string.missingMac);
}
return entry;
}
|
View entry; if (convertView != null) entry = convertView; else entry = mInflater.inflate(R.layout.item_wifi_p2p_peer, null, false); OpportunisticPeer peer = getItem(position); TextView textDeviceStatus = (TextView) entry.findViewById(R.id.text_peer_status); TextView textDeviceGroup = (TextView) entry.findViewById(R.id.text_peer_group); TextView textDeviceUuid = (TextView) entry.findViewById(R.id.text_peer_uuid); TextView textDeviceMacAddress = (TextView) entry.findViewById(R.id.text_peer_mac_address); if(peer != null) { textDeviceStatus.setText(peer.getStatus().getSymbol()); if(peer.isGroupOwner()) textDeviceGroup.setText(R.string.groupOwner); else if(peer.hasGroupOwnerField()) if(peer.hasGroupOwner()) textDeviceGroup.setText(R.string.groupClient); else textDeviceGroup.setText(" "); else textDeviceGroup.setText(R.string.missingGroup); textDeviceUuid.setText(peer.getUuid().substring(24)); textDeviceMacAddress.setText(peer.getMacAddress()); } else { textDeviceStatus.setText(R.string.missingStatus); textDeviceGroup.setText(R.string.missingGroup); textDeviceUuid.setText(R.string.missingUuid); textDeviceMacAddress.setText(R.string.missingMac); } return entry; }
|
/** Used by Android to retrieve the View corresponding to a certain item in the list of WifiP2pPeers.
* @param position position of the WifiP2pPeer for which the View is requested
* @param convertView available View that can be recycled by filling it with the WifiP2pPeer details
* @param parent parent View in the hierarchy
* @return the View to be used
*/
|
Used by Android to retrieve the View corresponding to a certain item in the list of WifiP2pPeers
|
getView
|
{
"repo_name": "COPELABS-SITI/nfd-android",
"path": "app/src/main/java/pt/ulusofona/copelabs/ndn/android/ui/adapter/WifiP2pPeerAdapter.java",
"license": "lgpl-3.0",
"size": 3181
}
|
[
"android.view.View",
"android.widget.TextView",
"pt.ulusofona.copelabs.ndn.android.umobile.wifip2p.OpportunisticPeer"
] |
import android.view.View; import android.widget.TextView; import pt.ulusofona.copelabs.ndn.android.umobile.wifip2p.OpportunisticPeer;
|
import android.view.*; import android.widget.*; import pt.ulusofona.copelabs.ndn.android.umobile.wifip2p.*;
|
[
"android.view",
"android.widget",
"pt.ulusofona.copelabs"
] |
android.view; android.widget; pt.ulusofona.copelabs;
| 1,645,500
|
public void copyTo(FilePath target) throws IOException, InterruptedException {
try {
try (OutputStream out = target.write()) {
copyTo(out);
}
} catch (IOException e) {
throw new IOException("Failed to copy "+this+" to "+target,e);
}
}
|
void function(FilePath target) throws IOException, InterruptedException { try { try (OutputStream out = target.write()) { copyTo(out); } } catch (IOException e) { throw new IOException(STR+this+STR+target,e); } }
|
/**
* Copies this file to the specified target.
*/
|
Copies this file to the specified target
|
copyTo
|
{
"repo_name": "batmat/jenkins",
"path": "core/src/main/java/hudson/FilePath.java",
"license": "mit",
"size": 132867
}
|
[
"java.io.IOException",
"java.io.OutputStream"
] |
import java.io.IOException; import java.io.OutputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 558,626
|
public void failOver(final ReplicationLiveIsStoppingMessage.LiveStopping finalMessage) {
if (finalMessage == null) {
backupQuorum.causeExit(FAILURE_REPLICATING);
}
else {
backupQuorum.failOver(finalMessage);
}
}
|
void function(final ReplicationLiveIsStoppingMessage.LiveStopping finalMessage) { if (finalMessage == null) { backupQuorum.causeExit(FAILURE_REPLICATING); } else { backupQuorum.failOver(finalMessage); } }
|
/**
* Live has notified this server that it is going to stop.
*/
|
Live has notified this server that it is going to stop
|
failOver
|
{
"repo_name": "wildfly/activemq-artemis",
"path": "artemis-server/src/main/java/org/apache/activemq/artemis/core/server/impl/SharedNothingBackupActivation.java",
"license": "apache-2.0",
"size": 19196
}
|
[
"org.apache.activemq.artemis.core.protocol.core.impl.wireformat.ReplicationLiveIsStoppingMessage"
] |
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.ReplicationLiveIsStoppingMessage;
|
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.*;
|
[
"org.apache.activemq"
] |
org.apache.activemq;
| 2,100,331
|
public void close() throws SQLException {
logDerbyThreadInfo("Entering GaianResult.close()");
if ( !isScanCompleted ) {
isScanCompleted = true;
long timeNow = System.currentTimeMillis();
queryTime = timeNow - queryStartTime;
fetchTime = timeNow - queryFetchStartTime;
if ( !gaianStatementNode.isAwaitingReFetch() ) queryTimeIncludingRefetch = queryTime;
} else if ( -1 == queryTimeIncludingRefetch && !gaianStatementNode.isAwaitingReFetch() )
queryTimeIncludingRefetch = System.currentTimeMillis() - queryStartTime;
// Stop all threads that are fetching rows from the data sources and populating the resultRowsBufer.
// All of them will be recycled at the end of execution as per normal.
// The last of these will write a poison pill to the buffer.
numBufferedRowsBuilt = 0; // we are about to clear the recyled rows, so make sure we don't wait for one.
isQuiesceQuery = true;
// Release the execution threads blocked on the blocking buffers by clearing the buffers. (these should never be null really)
if ( null != resultRowsBuffer ) clearAndrecycleBufferedCells( resultRowsBuffer ); // resultRowsBuffer.clear();
if ( null != recycledRowsBuffer ) clearAndrecycleBufferedCells( recycledRowsBuffer ); // recycledRowsBuffer.clear();
executingDataSourceWrappers.clear(); // just to be sure
// if ( null != resultRowsBuffer ) resultRowsBuffer.clear();
// if ( null != recycledRowsBuffer ) recycledRowsBuffer.clear();
// rowsBatch and dataSources should not be tampered with while the threads flush themselves out.
// In any case they have little impact in that they are small and don't hold threads back.
// if ( null != rowsBatch ) {
// for ( int i=0; i < rowsBatch.length; i++ ) {
// DataValueDescriptor[] dvdr = rowsBatch[i];
// for ( int j=0; j < dvdr.length; j++ ) dvdr[j] = null;
// rowsBatch[i] = null;
// }
// rowsBatch = null;
// }
// if ( null != dataSources ) dataSources.clear();
}
|
void function() throws SQLException { logDerbyThreadInfo(STR); if ( !isScanCompleted ) { isScanCompleted = true; long timeNow = System.currentTimeMillis(); queryTime = timeNow - queryStartTime; fetchTime = timeNow - queryFetchStartTime; if ( !gaianStatementNode.isAwaitingReFetch() ) queryTimeIncludingRefetch = queryTime; } else if ( -1 == queryTimeIncludingRefetch && !gaianStatementNode.isAwaitingReFetch() ) queryTimeIncludingRefetch = System.currentTimeMillis() - queryStartTime; numBufferedRowsBuilt = 0; isQuiesceQuery = true; if ( null != resultRowsBuffer ) clearAndrecycleBufferedCells( resultRowsBuffer ); if ( null != recycledRowsBuffer ) clearAndrecycleBufferedCells( recycledRowsBuffer ); executingDataSourceWrappers.clear(); }
|
/**
* Closes the GaianResult. All thread resources and structures are cleared down
*
* @exception SQLException on unexpected JDBC error
*/
|
Closes the GaianResult. All thread resources and structures are cleared down
|
close
|
{
"repo_name": "gaiandb/gaiandb",
"path": "java/Asset/Engine/com/ibm/gaiandb/GaianResult.java",
"license": "epl-1.0",
"size": 59554
}
|
[
"java.sql.SQLException"
] |
import java.sql.SQLException;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 259,254
|
public static synchronized OutputStream globalOut() {
if (globalOut == null) {
File file = new File("compilations-" + System.currentTimeMillis() + ".cfg");
try {
globalOut = new FileOutputStream(file);
} catch (FileNotFoundException e) {
TTY.println("WARNING: Could not open " + file.getAbsolutePath());
}
}
return globalOut;
}
protected final LogStream out;
public CompilationPrinter(OutputStream os) {
out = new LogStream(os);
}
|
static synchronized OutputStream function() { if (globalOut == null) { File file = new File(STR + System.currentTimeMillis() + ".cfg"); try { globalOut = new FileOutputStream(file); } catch (FileNotFoundException e) { TTY.println(STR + file.getAbsolutePath()); } } return globalOut; } protected final LogStream out; public CompilationPrinter(OutputStream os) { out = new LogStream(os); }
|
/**
* Gets a global output stream on a file in the current working directory. This stream is first
* opened if necessary. The name of the file is
* {@code "compilations-" + System.currentTimeMillis() + ".cfg"}.
*
* @return the global output stream or {@code null} if there was an error opening the file for
* writing
*/
|
Gets a global output stream on a file in the current working directory. This stream is first opened if necessary. The name of the file is "compilations-" + System.currentTimeMillis() + ".cfg"
|
globalOut
|
{
"repo_name": "md-5/jdk10",
"path": "src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.printer/src/org/graalvm/compiler/printer/CompilationPrinter.java",
"license": "gpl-2.0",
"size": 8789
}
|
[
"java.io.File",
"java.io.FileNotFoundException",
"java.io.FileOutputStream",
"java.io.OutputStream",
"org.graalvm.compiler.debug.LogStream",
"org.graalvm.compiler.debug.TTY"
] |
import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.OutputStream; import org.graalvm.compiler.debug.LogStream; import org.graalvm.compiler.debug.TTY;
|
import java.io.*; import org.graalvm.compiler.debug.*;
|
[
"java.io",
"org.graalvm.compiler"
] |
java.io; org.graalvm.compiler;
| 2,250,939
|
private static void evaluateExpression(Operator operator,
Stack<AnyLicenseInfo> operandStack) throws InvalidSPDXAnalysisException {
if (operator == Operator.OR_LATER) {
// unary operator
AnyLicenseInfo license = operandStack.pop();
if (!(license instanceof SimpleLicensingInfo)) {
throw(new LicenseParserException("Missing license for the '+' or later operator"));
}
operandStack.push(new OrLaterOperator((SimpleLicensingInfo)license));
} else {
// binary operator
AnyLicenseInfo operand2 = operandStack.pop();
AnyLicenseInfo operand1 = operandStack.pop();
if (operand1 == null || operand2 == null) {
throw(new LicenseParserException("Missing operands for the "+operator.toString()+" operator"));
}
operandStack.push(evaluateBinary(operator, operand1, operand2));
}
}
|
static void function(Operator operator, Stack<AnyLicenseInfo> operandStack) throws InvalidSPDXAnalysisException { if (operator == Operator.OR_LATER) { AnyLicenseInfo license = operandStack.pop(); if (!(license instanceof SimpleLicensingInfo)) { throw(new LicenseParserException(STR)); } operandStack.push(new OrLaterOperator((SimpleLicensingInfo)license)); } else { AnyLicenseInfo operand2 = operandStack.pop(); AnyLicenseInfo operand1 = operandStack.pop(); if (operand1 == null operand2 == null) { throw(new LicenseParserException(STR+operator.toString()+STR)); } operandStack.push(evaluateBinary(operator, operand1, operand2)); } }
|
/**
* Evaluate the given operator using paramaeters in the parameter stack
* @param operator
* @param operandStack
* @throws InvalidSPDXAnalysisException
*/
|
Evaluate the given operator using paramaeters in the parameter stack
|
evaluateExpression
|
{
"repo_name": "goneall/SPDX-Tools",
"path": "src/org/spdx/rdfparser/license/LicenseExpressionParser.java",
"license": "apache-2.0",
"size": 10865
}
|
[
"java.util.Stack",
"org.spdx.rdfparser.InvalidSPDXAnalysisException"
] |
import java.util.Stack; import org.spdx.rdfparser.InvalidSPDXAnalysisException;
|
import java.util.*; import org.spdx.rdfparser.*;
|
[
"java.util",
"org.spdx.rdfparser"
] |
java.util; org.spdx.rdfparser;
| 953,916
|
public Route addRoute(IPAddress destination, IPAddress mask, IPAddress gateway, Interface intface) {
Route route = new Route(destination, gateway, mask, intface);
if (destination.getAddress().equals("0.0.0.0") && gateway.getAddress().equals("0.0.0.0")) { //Default route
if (hasDefaultRoute()) {
this.getRouteList().remove(this.getRouteList().size() - 1);
this.getRouteList().add(route);
} else {
this.getRouteList().add(route);
}
} else {
if (hasDefaultRoute()) {
this.getRouteList().add(this.getRouteList().size() - 1, route);
} else {
this.getRouteList().add(route);
}
}
return route;
}
|
Route function(IPAddress destination, IPAddress mask, IPAddress gateway, Interface intface) { Route route = new Route(destination, gateway, mask, intface); if (destination.getAddress().equals(STR) && gateway.getAddress().equals(STR)) { if (hasDefaultRoute()) { this.getRouteList().remove(this.getRouteList().size() - 1); this.getRouteList().add(route); } else { this.getRouteList().add(route); } } else { if (hasDefaultRoute()) { this.getRouteList().add(this.getRouteList().size() - 1, route); } else { this.getRouteList().add(route); } } return route; }
|
/**
* Add a new route in the routing table
*
* @param destination the destination IP Address
* @param mask the destination mask
* @param gateway the gateway
* @param intface the output interface
* @return the added route
*/
|
Add a new route in the routing table
|
addRoute
|
{
"repo_name": "DOCTOR-ANR/cybercaptor-server",
"path": "src/main/java/org/fiware/cybercaptor/server/topology/asset/component/RoutingTable.java",
"license": "gpl-3.0",
"size": 7358
}
|
[
"org.fiware.cybercaptor.server.topology.asset.IPAddress"
] |
import org.fiware.cybercaptor.server.topology.asset.IPAddress;
|
import org.fiware.cybercaptor.server.topology.asset.*;
|
[
"org.fiware.cybercaptor"
] |
org.fiware.cybercaptor;
| 462,479
|
public static void generateBorder(String inputFileName,
String outputFileName, String outputFormat, int spritesX,
int spritesY, final int spriteSheetBorder) throws IOException {
final ArrayBitmap srcImage = new ArrayBitmap(inputFileName);
final int spriteWidth = srcImage.getWidth() / spritesX;
final int spriteHeight = srcImage.getHeight() / spritesY;
final int borderedSpriteWidth = spriteWidth + spriteSheetBorder * 2;
final int borderedSpriteHeight = spriteHeight + spriteSheetBorder * 2;
final ArrayBitmap destImage = new ArrayBitmap(borderedSpriteWidth
* spritesX, borderedSpriteHeight * spritesY);
|
static void function(String inputFileName, String outputFileName, String outputFormat, int spritesX, int spritesY, final int spriteSheetBorder) throws IOException { final ArrayBitmap srcImage = new ArrayBitmap(inputFileName); final int spriteWidth = srcImage.getWidth() / spritesX; final int spriteHeight = srcImage.getHeight() / spritesY; final int borderedSpriteWidth = spriteWidth + spriteSheetBorder * 2; final int borderedSpriteHeight = spriteHeight + spriteSheetBorder * 2; final ArrayBitmap destImage = new ArrayBitmap(borderedSpriteWidth * spritesX, borderedSpriteHeight * spritesY);
|
/**
* Generates borders on an existing sprite sheet with no borders.
*
* @param inputFileName
* The name and path to the original sprite sheet with no
* borders.
* @param outputFileName
* The name and path to the desired output location.
* @param outputFormat
* The image format to be used. For example, "png" and "jpg" are
* valid values.
* @param spritesX
* Number of sprites on the X axis.
* @param spritesY
* Number of sprites on the Y axis.
* @param spriteSheetBorder
* The number of pixels to border each sprite with on all sides.
* @throws IOException
*/
|
Generates borders on an existing sprite sheet with no borders
|
generateBorder
|
{
"repo_name": "BennyQBD/2DPlatformer",
"path": "src/engine/util/preprocessing/SpriteSheetBorderMaker.java",
"license": "bsd-2-clause",
"size": 3519
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,352,332
|
protected Organism getOrganism() throws ObjectStoreException {
if (org == null) {
org = getDirectDataLoader().createObject(Organism.class);
org.setTaxonId(new Integer(TAXON_ID));
getDirectDataLoader().store(org);
}
return org;
}
|
Organism function() throws ObjectStoreException { if (org == null) { org = getDirectDataLoader().createObject(Organism.class); org.setTaxonId(new Integer(TAXON_ID)); getDirectDataLoader().store(org); } return org; }
|
/**
* Get and store() the Organism object to reference when creating new objects.
* @throws ObjectStoreException if there is a problem
* @return the new Organism
*/
|
Get and store() the Organism object to reference when creating new objects
|
getOrganism
|
{
"repo_name": "JoeCarlson/intermine",
"path": "bio/sources/ensembl/ensembl-snp/main/src/org/intermine/bio/dataconversion/EnsembSnpLoaderTask.java",
"license": "lgpl-2.1",
"size": 13251
}
|
[
"org.intermine.model.bio.Organism",
"org.intermine.objectstore.ObjectStoreException"
] |
import org.intermine.model.bio.Organism; import org.intermine.objectstore.ObjectStoreException;
|
import org.intermine.model.bio.*; import org.intermine.objectstore.*;
|
[
"org.intermine.model",
"org.intermine.objectstore"
] |
org.intermine.model; org.intermine.objectstore;
| 1,001,515
|
public static OffsetTime asTimeOnly(long millis) {
return OffsetTime.ofInstant(Instant.ofEpochMilli(millis % DAY_IN_MILLIS), UTC);
}
|
static OffsetTime function(long millis) { return OffsetTime.ofInstant(Instant.ofEpochMilli(millis % DAY_IN_MILLIS), UTC); }
|
/**
* Creates an date for SQL TIME type from the millis since epoch.
*/
|
Creates an date for SQL TIME type from the millis since epoch
|
asTimeOnly
|
{
"repo_name": "HonzaKral/elasticsearch",
"path": "x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java",
"license": "apache-2.0",
"size": 8764
}
|
[
"java.time.Instant",
"java.time.OffsetTime"
] |
import java.time.Instant; import java.time.OffsetTime;
|
import java.time.*;
|
[
"java.time"
] |
java.time;
| 679,248
|
public void getSelectedVerificationMethod(@NonNull ApiCallback<VerificationMethod> callback) {
makeGetCall(SELECTED_VERIFICATION, null, VerificationMethod.class, callback);
}
|
void function(@NonNull ApiCallback<VerificationMethod> callback) { makeGetCall(SELECTED_VERIFICATION, null, VerificationMethod.class, callback); }
|
/**
* Get selected verification method to verify the identity of the cardholder
*
* @param callback result callback
*/
|
Get selected verification method to verify the identity of the cardholder
|
getSelectedVerificationMethod
|
{
"repo_name": "fitpay/fitpay-android-sdk",
"path": "fitpay/src/main/java/com/fitpay/android/api/models/card/CreditCard.java",
"license": "mit",
"size": 11308
}
|
[
"androidx.annotation.NonNull",
"com.fitpay.android.api.callbacks.ApiCallback"
] |
import androidx.annotation.NonNull; import com.fitpay.android.api.callbacks.ApiCallback;
|
import androidx.annotation.*; import com.fitpay.android.api.callbacks.*;
|
[
"androidx.annotation",
"com.fitpay.android"
] |
androidx.annotation; com.fitpay.android;
| 2,543,071
|
private boolean isFeatureInList(final FeatureWithId f, final List<FeatureWithId> list) {
for (final FeatureWithId tmp : list) {
if (tmp.getId() == f.getId()) {
return true;
}
}
return false;
}
|
boolean function(final FeatureWithId f, final List<FeatureWithId> list) { for (final FeatureWithId tmp : list) { if (tmp.getId() == f.getId()) { return true; } } return false; }
|
/**
* DOCUMENT ME!
*
* @param f DOCUMENT ME!
* @param list DOCUMENT ME!
*
* @return DOCUMENT ME!
*/
|
DOCUMENT ME
|
isFeatureInList
|
{
"repo_name": "cismet/watergis-client",
"path": "src/main/java/de/cismet/watergis/utils/AbstractSearchAndSelectThread.java",
"license": "lgpl-3.0",
"size": 6058
}
|
[
"de.cismet.cismap.commons.features.FeatureWithId",
"java.util.List"
] |
import de.cismet.cismap.commons.features.FeatureWithId; import java.util.List;
|
import de.cismet.cismap.commons.features.*; import java.util.*;
|
[
"de.cismet.cismap",
"java.util"
] |
de.cismet.cismap; java.util;
| 1,518,643
|
public void testIDCHAR()
throws Exception
{
InputStream is = getClass().getResourceAsStream("idchar.testfile");
System.out.println("\n\n basedir=" + System.getProperty("basedir"));
assertNotNull(is);
Archive ar = new Archive("idchar_test.txt,v", is);
}
|
void function() throws Exception { InputStream is = getClass().getResourceAsStream(STR); System.out.println(STR + System.getProperty(STR)); assertNotNull(is); Archive ar = new Archive(STR, is); }
|
/**
* Test to check that all the parser
*/
|
Test to check that all the parser
|
testIDCHAR
|
{
"repo_name": "conder/sakai",
"path": "rwiki/rwiki-util/jrcs/src/completetest/org/apache/commons/jrcs/rcs/ParsingTest.java",
"license": "apache-2.0",
"size": 3279
}
|
[
"java.io.InputStream"
] |
import java.io.InputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,533,414
|
public com.google.longrunning.Operation updateCluster(com.google.bigtable.admin.v2.Cluster request) {
return blockingUnaryCall(
getChannel(), METHOD_UPDATE_CLUSTER, getCallOptions(), request);
}
|
com.google.longrunning.Operation function(com.google.bigtable.admin.v2.Cluster request) { return blockingUnaryCall( getChannel(), METHOD_UPDATE_CLUSTER, getCallOptions(), request); }
|
/**
* <pre>
* Updates a cluster within an instance.
* </pre>
*/
|
<code> Updates a cluster within an instance. </code>
|
updateCluster
|
{
"repo_name": "rameshdharan/cloud-bigtable-client",
"path": "bigtable-client-core-parent/bigtable-protos/src/generated/java/services/com/google/bigtable/admin/v2/BigtableInstanceAdminGrpc.java",
"license": "apache-2.0",
"size": 34248
}
|
[
"io.grpc.stub.ClientCalls"
] |
import io.grpc.stub.ClientCalls;
|
import io.grpc.stub.*;
|
[
"io.grpc.stub"
] |
io.grpc.stub;
| 2,896,460
|
public static <K extends Comparable, V extends Comparable> Map<K, V> sortByKeys(final Map<K, V> map) {
List<K> keys = new LinkedList<K>(map.keySet());
Collections.sort(keys);
//LinkedHashMap will keep the keys in the order they are inserted
//which is currently sorted on natural ordering
Map<K, V> sortedMap = new LinkedHashMap<K, V>();
for (K key: keys) {
sortedMap.put(key, map.get(key));
}
return sortedMap;
}
|
static <K extends Comparable, V extends Comparable> Map<K, V> function(final Map<K, V> map) { List<K> keys = new LinkedList<K>(map.keySet()); Collections.sort(keys); Map<K, V> sortedMap = new LinkedHashMap<K, V>(); for (K key: keys) { sortedMap.put(key, map.get(key)); } return sortedMap; }
|
/**
* Sorts the available tags from the DB.
* @param map The available tags
* @param <K> String
* @param <V> Boolean
* @return <K extends Comparable,V extends Comparable> Map<K,V>
* @see
* @since 1.0
*/
|
Sorts the available tags from the DB
|
sortByKeys
|
{
"repo_name": "ALIADA/aliada-tool",
"path": "aliada/aliada-user-interface/src/main/java/eu/aliada/gui/action/Methods.java",
"license": "gpl-3.0",
"size": 21777
}
|
[
"java.util.Collections",
"java.util.LinkedHashMap",
"java.util.LinkedList",
"java.util.List",
"java.util.Map"
] |
import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,928,993
|
Optional<SubRange> toSubRange(
ParserRuleContext messageCtx, A10Parser.Trunk_ethernet_interface_rangeContext ctx) {
Optional<Integer> maybeFrom;
Optional<Integer> maybeTo;
maybeFrom = toInteger(messageCtx, ctx.num);
maybeTo = toInteger(messageCtx, ctx.to);
if (!maybeFrom.isPresent() || !maybeTo.isPresent()) {
// Already warned
return Optional.empty();
}
int from = maybeFrom.get();
int to = maybeTo.get();
if (from > to) {
warn(
ctx,
"Invalid range for trunk interface reference, 'from' must not be greater than 'to'.");
return Optional.empty();
}
return Optional.of(new SubRange(from, to));
}
|
Optional<SubRange> toSubRange( ParserRuleContext messageCtx, A10Parser.Trunk_ethernet_interface_rangeContext ctx) { Optional<Integer> maybeFrom; Optional<Integer> maybeTo; maybeFrom = toInteger(messageCtx, ctx.num); maybeTo = toInteger(messageCtx, ctx.to); if (!maybeFrom.isPresent() !maybeTo.isPresent()) { return Optional.empty(); } int from = maybeFrom.get(); int to = maybeTo.get(); if (from > to) { warn( ctx, STR); return Optional.empty(); } return Optional.of(new SubRange(from, to)); }
|
/**
* Convert interface range context to a {@link SubRange}. Returns {@link Optional#empty()} if the
* context is invalid, e.g. {@code from} is greater than {@code to}.
*/
|
Convert interface range context to a <code>SubRange</code>. Returns <code>Optional#empty()</code> if the context is invalid, e.g. from is greater than to
|
toSubRange
|
{
"repo_name": "arifogel/batfish",
"path": "projects/batfish/src/main/java/org/batfish/vendor/a10/grammar/A10ConfigurationBuilder.java",
"license": "apache-2.0",
"size": 107744
}
|
[
"java.util.Optional",
"org.antlr.v4.runtime.ParserRuleContext",
"org.batfish.datamodel.SubRange"
] |
import java.util.Optional; import org.antlr.v4.runtime.ParserRuleContext; import org.batfish.datamodel.SubRange;
|
import java.util.*; import org.antlr.v4.runtime.*; import org.batfish.datamodel.*;
|
[
"java.util",
"org.antlr.v4",
"org.batfish.datamodel"
] |
java.util; org.antlr.v4; org.batfish.datamodel;
| 1,016,343
|
public void addRequiredColumnPriv( ColumnDescriptor column)
{
if( requiredColumnPrivileges == null // Using old style authorization
|| currPrivType == Authorizer.NULL_PRIV
|| currPrivType == Authorizer.DELETE_PRIV // Table privilege only
|| currPrivType == Authorizer.INSERT_PRIV // Table privilege only
|| currPrivType == Authorizer.TRIGGER_PRIV // Table privilege only
|| currPrivType == Authorizer.EXECUTE_PRIV
|| column == null)
{
return;
}
TableDescriptor td = column.getTableDescriptor();
if (td == null)
return;
if (td.getTableType() ==
TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE) {
return; // no priv needed, it is per session anyway
}
UUID tableUUID = td.getUUID();
//DERBY-4191
if( currPrivType == Authorizer.MIN_SELECT_PRIV){
// If we are here for MIN_SELECT_PRIV requirement, then first
// check if there is already a SELECT privilege requirement on any
// of the columns in the table, or on the table itself. If yes,
// then we do not need to add MIN_SELECT_PRIV requirement for the
// table because that requirement is already getting satisfied with
// the already existing SELECT privilege requirement.
StatementTablePermission key = new StatementTablePermission(
tableUUID, Authorizer.SELECT_PRIV);
if (requiredColumnPrivileges.containsKey(key) ||
requiredTablePrivileges.containsKey(key)) {
return;
}
}
if( currPrivType == Authorizer.SELECT_PRIV){
//If we are here for SELECT_PRIV requirement, then first check
//if there is already any MIN_SELECT_PRIV privilege required
//on this table. If yes, then that requirement will be fulfilled
//by the SELECT_PRIV requirement we are adding now. Because of
//that, remove the MIN_SELECT_PRIV privilege requirement
StatementTablePermission key = new StatementTablePermission(
tableUUID, Authorizer.MIN_SELECT_PRIV);
requiredColumnPrivileges.remove(key);
}
StatementTablePermission key = new StatementTablePermission( tableUUID, currPrivType);
StatementColumnPermission tableColumnPrivileges
= requiredColumnPrivileges.get( key);
if( tableColumnPrivileges == null)
{
tableColumnPrivileges = new StatementColumnPermission( tableUUID,
currPrivType,
new FormatableBitSet( td.getNumberOfColumns()));
requiredColumnPrivileges.put(key, tableColumnPrivileges);
}
tableColumnPrivileges.getColumns().set(column.getPosition() - 1);
} // end of addRequiredColumnPriv
|
void function( ColumnDescriptor column) { if( requiredColumnPrivileges == null currPrivType == Authorizer.NULL_PRIV currPrivType == Authorizer.DELETE_PRIV currPrivType == Authorizer.INSERT_PRIV currPrivType == Authorizer.TRIGGER_PRIV currPrivType == Authorizer.EXECUTE_PRIV column == null) { return; } TableDescriptor td = column.getTableDescriptor(); if (td == null) return; if (td.getTableType() == TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE) { return; } UUID tableUUID = td.getUUID(); if( currPrivType == Authorizer.MIN_SELECT_PRIV){ StatementTablePermission key = new StatementTablePermission( tableUUID, Authorizer.SELECT_PRIV); if (requiredColumnPrivileges.containsKey(key) requiredTablePrivileges.containsKey(key)) { return; } } if( currPrivType == Authorizer.SELECT_PRIV){ StatementTablePermission key = new StatementTablePermission( tableUUID, Authorizer.MIN_SELECT_PRIV); requiredColumnPrivileges.remove(key); } StatementTablePermission key = new StatementTablePermission( tableUUID, currPrivType); StatementColumnPermission tableColumnPrivileges = requiredColumnPrivileges.get( key); if( tableColumnPrivileges == null) { tableColumnPrivileges = new StatementColumnPermission( tableUUID, currPrivType, new FormatableBitSet( td.getNumberOfColumns())); requiredColumnPrivileges.put(key, tableColumnPrivileges); } tableColumnPrivileges.getColumns().set(column.getPosition() - 1); }
|
/**
* Add a column privilege to the list of used column privileges.
*
* @param column The column whose privileges we're interested in.
*/
|
Add a column privilege to the list of used column privileges
|
addRequiredColumnPriv
|
{
"repo_name": "apache/derby",
"path": "java/org.apache.derby.engine/org/apache/derby/impl/sql/compile/CompilerContextImpl.java",
"license": "apache-2.0",
"size": 32433
}
|
[
"org.apache.derby.iapi.services.io.FormatableBitSet",
"org.apache.derby.iapi.sql.conn.Authorizer",
"org.apache.derby.iapi.sql.dictionary.ColumnDescriptor",
"org.apache.derby.iapi.sql.dictionary.StatementColumnPermission",
"org.apache.derby.iapi.sql.dictionary.StatementTablePermission",
"org.apache.derby.iapi.sql.dictionary.TableDescriptor"
] |
import org.apache.derby.iapi.services.io.FormatableBitSet; import org.apache.derby.iapi.sql.conn.Authorizer; import org.apache.derby.iapi.sql.dictionary.ColumnDescriptor; import org.apache.derby.iapi.sql.dictionary.StatementColumnPermission; import org.apache.derby.iapi.sql.dictionary.StatementTablePermission; import org.apache.derby.iapi.sql.dictionary.TableDescriptor;
|
import org.apache.derby.iapi.services.io.*; import org.apache.derby.iapi.sql.conn.*; import org.apache.derby.iapi.sql.dictionary.*;
|
[
"org.apache.derby"
] |
org.apache.derby;
| 196,404
|
private void checkCancel(String transferHandle) throws TransferException
{
TransferStatus status = transferMonitoring.get(transferHandle);
if(status != null)
{
if(!status.cancelInProgress && status.cancelMe)
{
status.cancelInProgress = true;
throw new TransferCancelledException();
}
}
}
|
void function(String transferHandle) throws TransferException { TransferStatus status = transferMonitoring.get(transferHandle); if(status != null) { if(!status.cancelInProgress && status.cancelMe) { status.cancelInProgress = true; throw new TransferCancelledException(); } } }
|
/**
* Check whether the specified transfer should be cancelled.
* @param transferHandle
* @throws TransferException - the transfer has been cancelled.
*/
|
Check whether the specified transfer should be cancelled
|
checkCancel
|
{
"repo_name": "daniel-he/community-edition",
"path": "projects/repository/source/java/org/alfresco/repo/transfer/TransferServiceImpl2.java",
"license": "lgpl-3.0",
"size": 58252
}
|
[
"org.alfresco.service.cmr.transfer.TransferCancelledException",
"org.alfresco.service.cmr.transfer.TransferException"
] |
import org.alfresco.service.cmr.transfer.TransferCancelledException; import org.alfresco.service.cmr.transfer.TransferException;
|
import org.alfresco.service.cmr.transfer.*;
|
[
"org.alfresco.service"
] |
org.alfresco.service;
| 731,401
|
protected void renameInode(String source, String destination)
throws FileNotFoundException, PermissionDeniedException {
File file = new File(getRealPath(source));
if (!file.exists()) {
throw new FileNotFoundException(source + " cannot be found");
}
boolean result = file.renameTo(new File(getRealPath(destination)));
if (!result) {
throw new PermissionDeniedException("Cannot rename " + source
+ " to " + destination);
}
}
|
void function(String source, String destination) throws FileNotFoundException, PermissionDeniedException { File file = new File(getRealPath(source)); if (!file.exists()) { throw new FileNotFoundException(source + STR); } boolean result = file.renameTo(new File(getRealPath(destination))); if (!result) { throw new PermissionDeniedException(STR + source + STR + destination); } }
|
/**
* Rename the file/directory.
*
* @param source
* @param destination
* @throws FileNotFoundException
* if there's no such file/dir.
* @throws PermissionDeniedException
* if there's no permission to rename.
*/
|
Rename the file/directory
|
renameInode
|
{
"repo_name": "dr3plus/dr3",
"path": "src/master/src/org/drftpd/vfs/VirtualFileSystem.java",
"license": "gpl-2.0",
"size": 14635
}
|
[
"java.io.File",
"java.io.FileNotFoundException",
"org.drftpd.io.PermissionDeniedException"
] |
import java.io.File; import java.io.FileNotFoundException; import org.drftpd.io.PermissionDeniedException;
|
import java.io.*; import org.drftpd.io.*;
|
[
"java.io",
"org.drftpd.io"
] |
java.io; org.drftpd.io;
| 2,856,373
|
@Test
public void testTTL_appliedFromLastUpdate() {
IMap<Integer, String> map = createSimpleMap();
map.put(1, "value0", 1, SECONDS);
map.put(1, "value1", 2, SECONDS);
long sleepRef = currentTimeMillis();
map.put(1, "value2", 300, SECONDS);
sleepAtMostSeconds(sleepRef, 2);
assertTrue(map.containsKey(1));
}
|
void function() { IMap<Integer, String> map = createSimpleMap(); map.put(1, STR, 1, SECONDS); map.put(1, STR, 2, SECONDS); long sleepRef = currentTimeMillis(); map.put(1, STR, 300, SECONDS); sleepAtMostSeconds(sleepRef, 2); assertTrue(map.containsKey(1)); }
|
/**
* We are defining TTL as time being passed since creation time of an entry.
*/
|
We are defining TTL as time being passed since creation time of an entry
|
testTTL_appliedFromLastUpdate
|
{
"repo_name": "jerrinot/hazelcast",
"path": "hazelcast/src/test/java/com/hazelcast/map/EvictionTest.java",
"license": "apache-2.0",
"size": 50452
}
|
[
"java.lang.System",
"org.junit.Assert"
] |
import java.lang.System; import org.junit.Assert;
|
import java.lang.*; import org.junit.*;
|
[
"java.lang",
"org.junit"
] |
java.lang; org.junit;
| 587,152
|
public static @CheckResult @ColorInt int blendColors(@ColorInt int color1,
@ColorInt int color2,
@FloatRange(from = 0f, to = 1f) float ratio) {
final float inverseRatio = 1f - ratio;
float a = (Color.alpha(color1) * inverseRatio) + (Color.alpha(color2) * ratio);
float r = (Color.red(color1) * inverseRatio) + (Color.red(color2) * ratio);
float g = (Color.green(color1) * inverseRatio) + (Color.green(color2) * ratio);
float b = (Color.blue(color1) * inverseRatio) + (Color.blue(color2) * ratio);
return Color.argb((int) a, (int) r, (int) g, (int) b);
}
|
static @CheckResult @ColorInt int function(@ColorInt int color1, @ColorInt int color2, @FloatRange(from = 0f, to = 1f) float ratio) { final float inverseRatio = 1f - ratio; float a = (Color.alpha(color1) * inverseRatio) + (Color.alpha(color2) * ratio); float r = (Color.red(color1) * inverseRatio) + (Color.red(color2) * ratio); float g = (Color.green(color1) * inverseRatio) + (Color.green(color2) * ratio); float b = (Color.blue(color1) * inverseRatio) + (Color.blue(color2) * ratio); return Color.argb((int) a, (int) r, (int) g, (int) b); }
|
/**
* Blend {@code color1} and {@code color2} using the given ratio.
*
* @param ratio of which to blend. 0.0 will return {@code color1}, 0.5 will give an even blend,
* 1.0 will return {@code color2}.
*/
|
Blend color1 and color2 using the given ratio
|
blendColors
|
{
"repo_name": "liulinbo/Amumu",
"path": "app/src/main/java/io/plaidapp/util/ColorUtils.java",
"license": "apache-2.0",
"size": 6880
}
|
[
"android.graphics.Color",
"android.support.annotation.CheckResult",
"android.support.annotation.ColorInt",
"android.support.annotation.FloatRange"
] |
import android.graphics.Color; import android.support.annotation.CheckResult; import android.support.annotation.ColorInt; import android.support.annotation.FloatRange;
|
import android.graphics.*; import android.support.annotation.*;
|
[
"android.graphics",
"android.support"
] |
android.graphics; android.support;
| 2,796,208
|
public Builder setRDBConnection(DataSource documentStoreDataSource, DataSource blobStoreDataSource) {
this.documentStore = new RDBDocumentStore(documentStoreDataSource, this);
this.blobStore = new RDBBlobStore(blobStoreDataSource);
return this;
}
|
Builder function(DataSource documentStoreDataSource, DataSource blobStoreDataSource) { this.documentStore = new RDBDocumentStore(documentStoreDataSource, this); this.blobStore = new RDBBlobStore(blobStoreDataSource); return this; }
|
/**
* Sets a {@link DataSource}s to use for the RDB document and blob
* stores.
*
* @return this
*/
|
Sets a <code>DataSource</code>s to use for the RDB document and blob stores
|
setRDBConnection
|
{
"repo_name": "afilimonov/jackrabbit-oak",
"path": "oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java",
"license": "apache-2.0",
"size": 36762
}
|
[
"javax.sql.DataSource",
"org.apache.jackrabbit.oak.plugins.document.rdb.RDBBlobStore",
"org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore"
] |
import javax.sql.DataSource; import org.apache.jackrabbit.oak.plugins.document.rdb.RDBBlobStore; import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore;
|
import javax.sql.*; import org.apache.jackrabbit.oak.plugins.document.rdb.*;
|
[
"javax.sql",
"org.apache.jackrabbit"
] |
javax.sql; org.apache.jackrabbit;
| 83,823
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.