method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
@Test
public void testGetAlgorithmSourceBackendSpecLoader()
throws BackendProviderSpecLoaderException {
BackendSpecLoader<?> result =
backendProvider.getAlgorithmSourceBackendSpecLoader();
assertNotNull(result);
}
|
void function() throws BackendProviderSpecLoaderException { BackendSpecLoader<?> result = backendProvider.getAlgorithmSourceBackendSpecLoader(); assertNotNull(result); }
|
/**
* Test of getAlgorithmSourceBackendLoader method, of class CommonsBackendProvider.
*/
|
Test of getAlgorithmSourceBackendLoader method, of class CommonsBackendProvider
|
testGetAlgorithmSourceBackendSpecLoader
|
{
"repo_name": "eurekaclinical/protempa",
"path": "protempa-bp-serviceloader/src/test/java/org/protempa/bp/commons/CommonsBackendProviderTest.java",
"license": "apache-2.0",
"size": 10404
}
|
[
"org.junit.Assert",
"org.protempa.backend.BackendProviderSpecLoaderException",
"org.protempa.backend.BackendSpecLoader"
] |
import org.junit.Assert; import org.protempa.backend.BackendProviderSpecLoaderException; import org.protempa.backend.BackendSpecLoader;
|
import org.junit.*; import org.protempa.backend.*;
|
[
"org.junit",
"org.protempa.backend"
] |
org.junit; org.protempa.backend;
| 1,340,277
|
public static final void getFieldsFromPrevious( ComboVar comboVar, TransMeta transMeta, StepMeta stepMeta ) {
String selectedField = null;
int indexField = -1;
try {
RowMetaInterface r = transMeta.getPrevStepFields( stepMeta );
selectedField = comboVar.getText();
comboVar.removeAll();
if ( r != null && !r.isEmpty() ) {
r.getFieldNames();
comboVar.setItems( r.getFieldNames() );
indexField = r.indexOfValue( selectedField );
}
// Select value if possible...
if ( indexField > -1 ) {
comboVar.select( indexField );
} else {
if ( selectedField != null ) {
comboVar.setText( selectedField );
}
}
} catch ( KettleException ke ) {
new ErrorDialog( comboVar.getShell(),
BaseMessages.getString( PKG, "BaseStepDialog.FailedToGetFieldsPrevious.DialogTitle" ),
BaseMessages.getString( PKG, "BaseStepDialog.FailedToGetFieldsPrevious.DialogMessage" ), ke );
}
}
|
static final void function( ComboVar comboVar, TransMeta transMeta, StepMeta stepMeta ) { String selectedField = null; int indexField = -1; try { RowMetaInterface r = transMeta.getPrevStepFields( stepMeta ); selectedField = comboVar.getText(); comboVar.removeAll(); if ( r != null && !r.isEmpty() ) { r.getFieldNames(); comboVar.setItems( r.getFieldNames() ); indexField = r.indexOfValue( selectedField ); } if ( indexField > -1 ) { comboVar.select( indexField ); } else { if ( selectedField != null ) { comboVar.setText( selectedField ); } } } catch ( KettleException ke ) { new ErrorDialog( comboVar.getShell(), BaseMessages.getString( PKG, STR ), BaseMessages.getString( PKG, STR ), ke ); } }
|
/**
* Gets fields from previous steps and populate a ComboVar.
*
* @param comboVar
* the Combo Box (with Variables) to populate
* @param transMeta
* the transformation metadata
* @param stepMeta
* the step metadata
*/
|
Gets fields from previous steps and populate a ComboVar
|
getFieldsFromPrevious
|
{
"repo_name": "andrei-viaryshka/pentaho-kettle",
"path": "ui/src/org/pentaho/di/ui/trans/step/BaseStepDialog.java",
"license": "apache-2.0",
"size": 45460
}
|
[
"org.pentaho.di.core.exception.KettleException",
"org.pentaho.di.core.row.RowMetaInterface",
"org.pentaho.di.i18n.BaseMessages",
"org.pentaho.di.trans.TransMeta",
"org.pentaho.di.trans.step.StepMeta",
"org.pentaho.di.ui.core.dialog.ErrorDialog",
"org.pentaho.di.ui.core.widget.ComboVar"
] |
import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.widget.ComboVar;
|
import org.pentaho.di.core.exception.*; import org.pentaho.di.core.row.*; import org.pentaho.di.i18n.*; import org.pentaho.di.trans.*; import org.pentaho.di.trans.step.*; import org.pentaho.di.ui.core.dialog.*; import org.pentaho.di.ui.core.widget.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 1,902,854
|
@Override
public void removePersonFrom(Group g, Person p) throws PersonException {
if (g == null) {
throw new PersonException("Group == null");
}
if (p == null) {
throw new PersonException("Person == null");
}
String id = g.getId();
if (id == null) {
throw new PersonException("Grupo Sin Id");
}
groupCache.invalidate(id);
groupManager.removePersonFromGroup(g, p);
}
|
void function(Group g, Person p) throws PersonException { if (g == null) { throw new PersonException(STR); } if (p == null) { throw new PersonException(STR); } String id = g.getId(); if (id == null) { throw new PersonException(STR); } groupCache.invalidate(id); groupManager.removePersonFromGroup(g, p); }
|
/**
* Elimina una persona de un grupo.
* si no existe ninguna persona tira una excepcion.
*/
|
Elimina una persona de un grupo. si no existe ninguna persona tira una excepcion
|
removePersonFrom
|
{
"repo_name": "pablodanielrey/java",
"path": "person/personModel/src/main/java/ar/com/dcsys/model/GroupsManagerBean.java",
"license": "gpl-3.0",
"size": 7309
}
|
[
"ar.com.dcsys.data.group.Group",
"ar.com.dcsys.data.person.Person",
"ar.com.dcsys.exceptions.PersonException"
] |
import ar.com.dcsys.data.group.Group; import ar.com.dcsys.data.person.Person; import ar.com.dcsys.exceptions.PersonException;
|
import ar.com.dcsys.data.group.*; import ar.com.dcsys.data.person.*; import ar.com.dcsys.exceptions.*;
|
[
"ar.com.dcsys"
] |
ar.com.dcsys;
| 372,434
|
@Override
public void setValue(DataRow row, double value) {
for(Attribute a: this.innerAttributes)
a.setValue(row, value);
}
|
void function(DataRow row, double value) { for(Attribute a: this.innerAttributes) a.setValue(row, value); }
|
/**
* dummy function
*/
|
dummy function
|
setValue
|
{
"repo_name": "ntj/ComplexRapidMiner",
"path": "src/de/tud/inf/example/table/ComplexCompositeAttribute.java",
"license": "gpl-2.0",
"size": 3000
}
|
[
"com.rapidminer.example.Attribute",
"com.rapidminer.example.table.DataRow"
] |
import com.rapidminer.example.Attribute; import com.rapidminer.example.table.DataRow;
|
import com.rapidminer.example.*; import com.rapidminer.example.table.*;
|
[
"com.rapidminer.example"
] |
com.rapidminer.example;
| 1,091,185
|
private void generalAuthenticateMutualAuthentication() throws Exception {
// Calculate shared key k
byte[] k = cryptoSuite.generateSharedSecret(keyPCD.getEncodedPrivateKey(), keyPICC.getEncodedPublicKey());
// Derive key MAC
keyMAC = kdf.deriveMAC(k);
// Derive key ENC
keyENC = kdf.deriveENC(k);
// Calculate token T_PCD
AuthenticationToken tokenPCD = new AuthenticationToken(psi);
tokenPCD.generateToken(keyMAC, keyPICC.getEncodedPublicKey());
CardCommandAPDU gaMutualAuth = new GeneralAuthenticate((byte) 0x85, tokenPCD.toByteArray());
// Calculate token T_PICC
AuthenticationToken tokenPICC = new AuthenticationToken(psi);
tokenPICC.generateToken(keyMAC, keyPCD.getEncodedPublicKey());
try {
response = gaMutualAuth.transmit(dispatcher, slotHandle);
if (tokenPICC.verifyToken(response.getData(), specifiedCHAT)) {
currentCAR = tokenPICC.getCurrentCAR();
previousCAR = tokenPCD.getPreviousCAR();
} else {
throw new GeneralSecurityException("Cannot verify authentication token.");
}
} catch (APDUException e) {
logger.error(e.getMessage(), e);
int sw = e.getResponseAPDU().getSW();
if ((sw & (short) 0xFFF0) == (short) 0x63C0) {
retryCounter = (byte) (sw & (short) 0x000F);
if (retryCounter == (byte) 0x00) {
// The password is blocked.
logger.warn("The password is blocked. The password MUST be unblocked.");
throw new ProtocolException(
ECardConstants.Minor.IFD.PASSWORD_BLOCKED,
"The password is blocked. The password MUST be unblocked.");
} else if (retryCounter == (byte) 0x01) {
// The password is suspended.
logger.warn("The password is suspended. The password MUST be resumed.");
throw new ProtocolException(ECardConstants.Minor.IFD.PASSWORD_SUSPENDED,
"The password is suspended. The password MUST be resumed.");
} else if (retryCounter == (byte) 0x02) {
// The password is wrong.
logger.warn("The password is wrong.");
throw new ProtocolException(
ECardConstants.Minor.IFD.PASSWORD_ERROR,
"The password is wrong.");
}
} else {
throw new ProtocolException(
ECardConstants.Minor.IFD.AUTHENTICATION_FAILED, "Authentication failed.");
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ProtocolException(ECardConstants.Minor.IFD.UNKNOWN_ERROR, e.getMessage());
}
}
|
void function() throws Exception { byte[] k = cryptoSuite.generateSharedSecret(keyPCD.getEncodedPrivateKey(), keyPICC.getEncodedPublicKey()); keyMAC = kdf.deriveMAC(k); keyENC = kdf.deriveENC(k); AuthenticationToken tokenPCD = new AuthenticationToken(psi); tokenPCD.generateToken(keyMAC, keyPICC.getEncodedPublicKey()); CardCommandAPDU gaMutualAuth = new GeneralAuthenticate((byte) 0x85, tokenPCD.toByteArray()); AuthenticationToken tokenPICC = new AuthenticationToken(psi); tokenPICC.generateToken(keyMAC, keyPCD.getEncodedPublicKey()); try { response = gaMutualAuth.transmit(dispatcher, slotHandle); if (tokenPICC.verifyToken(response.getData(), specifiedCHAT)) { currentCAR = tokenPICC.getCurrentCAR(); previousCAR = tokenPCD.getPreviousCAR(); } else { throw new GeneralSecurityException(STR); } } catch (APDUException e) { logger.error(e.getMessage(), e); int sw = e.getResponseAPDU().getSW(); if ((sw & (short) 0xFFF0) == (short) 0x63C0) { retryCounter = (byte) (sw & (short) 0x000F); if (retryCounter == (byte) 0x00) { logger.warn(STR); throw new ProtocolException( ECardConstants.Minor.IFD.PASSWORD_BLOCKED, STR); } else if (retryCounter == (byte) 0x01) { logger.warn(STR); throw new ProtocolException(ECardConstants.Minor.IFD.PASSWORD_SUSPENDED, STR); } else if (retryCounter == (byte) 0x02) { logger.warn(STR); throw new ProtocolException( ECardConstants.Minor.IFD.PASSWORD_ERROR, STR); } } else { throw new ProtocolException( ECardConstants.Minor.IFD.AUTHENTICATION_FAILED, STR); } } catch (Exception e) { logger.error(e.getMessage(), e); throw new ProtocolException(ECardConstants.Minor.IFD.UNKNOWN_ERROR, e.getMessage()); } }
|
/**
* Step 5: Mutual authentication
*/
|
Step 5: Mutual authentication
|
generalAuthenticateMutualAuthentication
|
{
"repo_name": "adelapie/open-ecard-IRMA",
"path": "ifd/ifd-protocols/pace/src/main/java/org/openecard/ifd/protocol/pace/PACEImplementation.java",
"license": "apache-2.0",
"size": 12891
}
|
[
"java.security.GeneralSecurityException",
"org.openecard.common.ECardConstants",
"org.openecard.common.apdu.GeneralAuthenticate",
"org.openecard.common.apdu.common.CardCommandAPDU",
"org.openecard.common.apdu.exception.APDUException",
"org.openecard.common.ifd.protocol.exception.ProtocolException",
"org.openecard.ifd.protocol.pace.crypto.AuthenticationToken"
] |
import java.security.GeneralSecurityException; import org.openecard.common.ECardConstants; import org.openecard.common.apdu.GeneralAuthenticate; import org.openecard.common.apdu.common.CardCommandAPDU; import org.openecard.common.apdu.exception.APDUException; import org.openecard.common.ifd.protocol.exception.ProtocolException; import org.openecard.ifd.protocol.pace.crypto.AuthenticationToken;
|
import java.security.*; import org.openecard.common.*; import org.openecard.common.apdu.*; import org.openecard.common.apdu.common.*; import org.openecard.common.apdu.exception.*; import org.openecard.common.ifd.protocol.exception.*; import org.openecard.ifd.protocol.pace.crypto.*;
|
[
"java.security",
"org.openecard.common",
"org.openecard.ifd"
] |
java.security; org.openecard.common; org.openecard.ifd;
| 387,476
|
public void setCompositeKey(Name newCompositeKey);
|
void function(Name newCompositeKey);
|
/**
* Specifies a new composite key for the composite entity identifier
*
* @param newCompositeKey javax.naming.Name
*/
|
Specifies a new composite key for the composite entity identifier
|
setCompositeKey
|
{
"repo_name": "UW-Madison-DoIT/portal-abstraction-api",
"path": "src/main/java/edu/wisc/my/apilayer/groups/ICompositeEntityIdentifier.java",
"license": "bsd-3-clause",
"size": 4460
}
|
[
"javax.naming.Name"
] |
import javax.naming.Name;
|
import javax.naming.*;
|
[
"javax.naming"
] |
javax.naming;
| 1,904,715
|
public static Resource newResource(String resource)
throws MalformedURLException, IOException
{
return newResource(resource, __defaultUseCaches);
}
|
static Resource function(String resource) throws MalformedURLException, IOException { return newResource(resource, __defaultUseCaches); }
|
/** Construct a resource from a string.
* @param resource A URL or filename.
* @return A Resource object.
*/
|
Construct a resource from a string
|
newResource
|
{
"repo_name": "mabrek/jetty",
"path": "jetty-util/src/main/java/org/eclipse/jetty/util/resource/Resource.java",
"license": "apache-2.0",
"size": 20110
}
|
[
"java.io.IOException",
"java.net.MalformedURLException"
] |
import java.io.IOException; import java.net.MalformedURLException;
|
import java.io.*; import java.net.*;
|
[
"java.io",
"java.net"
] |
java.io; java.net;
| 462,023
|
public synchronized void operate(final Board board, float dx, float dy) {
dx = NearestMargin.findNearestMarginX(board, this, dx);
dy = NearestMargin.findNearestMarginY(board, this, dy);
// Try for (dx, dy) move
BlockSet test = testUpdate(dx, dy);
if (!Collision.isCollided(board, test)) {
update(dx, dy);
return;
}
// Try for (dx, 0) move
test = testUpdate(dx, 0);
if (!Collision.isCollided(board, test)) {
update(dx, 0);
return;
}
// Try for (0, dy)
test = testUpdate(0, dy);
if (!Collision.isCollided(board, test)) {
update(0, dy);
}
}
|
synchronized void function(final Board board, float dx, float dy) { dx = NearestMargin.findNearestMarginX(board, this, dx); dy = NearestMargin.findNearestMarginY(board, this, dy); BlockSet test = testUpdate(dx, dy); if (!Collision.isCollided(board, test)) { update(dx, dy); return; } test = testUpdate(dx, 0); if (!Collision.isCollided(board, test)) { update(dx, 0); return; } test = testUpdate(0, dy); if (!Collision.isCollided(board, test)) { update(0, dy); } }
|
/**
* Move blocks with dx and dy. <br>
* If dx or dy is larger than the nearest boarder, fix them with {@link NearestMargin} in this method. <br>
*
* @param board Current board
* @param dx Amount of move x
* @param dy Amount of move y
*/
|
Move blocks with dx and dy. If dx or dy is larger than the nearest boarder, fix them with <code>NearestMargin</code> in this method.
|
operate
|
{
"repo_name": "aratakokubun/new_words",
"path": "src/com/kkbnart/wordis/game/board/OperatedBlocks.java",
"license": "mit",
"size": 3525
}
|
[
"com.kkbnart.wordis.game.object.BlockSet",
"com.kkbnart.wordis.game.object.Collision"
] |
import com.kkbnart.wordis.game.object.BlockSet; import com.kkbnart.wordis.game.object.Collision;
|
import com.kkbnart.wordis.game.object.*;
|
[
"com.kkbnart.wordis"
] |
com.kkbnart.wordis;
| 739,912
|
protected final void FP_REM(OPT_Instruction s, OPT_Operand val1, OPT_Operand val2) {
EMIT(CPOS(s, MIR_Move.create(IA32_FMOV, D(getFPR(1)), val2)));
EMIT(CPOS(s, MIR_Move.create(IA32_FMOV, D(getFPR(0)), val1)));
EMIT(MIR_BinaryAcc.mutate(s, IA32_FPREM, D(getFPR(0)), D(getFPR(1))));
}
|
final void function(OPT_Instruction s, OPT_Operand val1, OPT_Operand val2) { EMIT(CPOS(s, MIR_Move.create(IA32_FMOV, D(getFPR(1)), val2))); EMIT(CPOS(s, MIR_Move.create(IA32_FMOV, D(getFPR(0)), val1))); EMIT(MIR_BinaryAcc.mutate(s, IA32_FPREM, D(getFPR(0)), D(getFPR(1)))); }
|
/**
* Expansion of FP_REM
*
* @param s the instruction to expand
* @param val1 the first operand
* @param val2 the second operand
*/
|
Expansion of FP_REM
|
FP_REM
|
{
"repo_name": "rmcilroy/HeraJVM",
"path": "rvm/src/org/jikesrvm/compilers/opt/ia32/OPT_BURS_Helpers.java",
"license": "epl-1.0",
"size": 151134
}
|
[
"org.jikesrvm.compilers.opt.ir.Move"
] |
import org.jikesrvm.compilers.opt.ir.Move;
|
import org.jikesrvm.compilers.opt.ir.*;
|
[
"org.jikesrvm.compilers"
] |
org.jikesrvm.compilers;
| 436,813
|
public String remove(String key) throws IOException, java.rmi.RemoteException;
|
String function(String key) throws IOException, java.rmi.RemoteException;
|
/**
*
* Do not use this method, use IWMainApplicationSettings.
*
* @see com.idega.core.business.ICApplicationBindingBusinessBean#gremove
*/
|
Do not use this method, use IWMainApplicationSettings
|
remove
|
{
"repo_name": "idega/com.idega.core",
"path": "src/java/com/idega/core/business/ICApplicationBindingBusiness.java",
"license": "gpl-3.0",
"size": 1887
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,424,324
|
EAttribute getMakeDiagram_Id();
|
EAttribute getMakeDiagram_Id();
|
/**
* Returns the meta object for the attribute '{@link no.hib.dpf.text.tdpf.MakeDiagram#getId <em>Id</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Id</em>'.
* @see no.hib.dpf.text.tdpf.MakeDiagram#getId()
* @see #getMakeDiagram()
* @generated
*/
|
Returns the meta object for the attribute '<code>no.hib.dpf.text.tdpf.MakeDiagram#getId Id</code>'.
|
getMakeDiagram_Id
|
{
"repo_name": "fmantz/DPF_Text",
"path": "no.hib.dpf.text/src-gen/no/hib/dpf/text/tdpf/TdpfPackage.java",
"license": "epl-1.0",
"size": 84409
}
|
[
"org.eclipse.emf.ecore.EAttribute"
] |
import org.eclipse.emf.ecore.EAttribute;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 397,802
|
public Paint getShadowPaint() {
return this.shadowPaint;
}
|
Paint function() { return this.shadowPaint; }
|
/**
* Returns the shadow paint.
*
* @return The shadow paint.
*/
|
Returns the shadow paint
|
getShadowPaint
|
{
"repo_name": "akardapolov/ASH-Viewer",
"path": "jfreechart-fse/src/main/java/org/jfree/chart/text/TextBox.java",
"license": "gpl-3.0",
"size": 12597
}
|
[
"java.awt.Paint"
] |
import java.awt.Paint;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 245,578
|
this.context = context;
getHolder().addCallback(this);
setFocusable(true);
chessboard = new Chessboard(context, CalculateKnightMove.boardSpaces);
knightMovements = new CalculateKnightMove(context);
knightMovements.startingPoint();
}
|
this.context = context; getHolder().addCallback(this); setFocusable(true); chessboard = new Chessboard(context, CalculateKnightMove.boardSpaces); knightMovements = new CalculateKnightMove(context); knightMovements.startingPoint(); }
|
/**
* A centralize method are all of the different constructs to intialize
* the SurfaceView in the same way without dup code
*/
|
A centralize method are all of the different constructs to intialize the SurfaceView in the same way without dup code
|
init
|
{
"repo_name": "virtualprodigy/TheKnightsTourAndroid",
"path": "app/src/main/java/com/virtualprodigy/theknightstour/Layout/KnightsSurfaceView/KnightsSurfaceView.java",
"license": "apache-2.0",
"size": 3434
}
|
[
"com.virtualprodigy.theknightstour.Layout",
"com.virtualprodigy.theknightstour.Utilities"
] |
import com.virtualprodigy.theknightstour.Layout; import com.virtualprodigy.theknightstour.Utilities;
|
import com.virtualprodigy.theknightstour.*;
|
[
"com.virtualprodigy.theknightstour"
] |
com.virtualprodigy.theknightstour;
| 2,471,090
|
public Message createSignal(Handler handler, int myGeneration) {
return Message.obtain(handler, ordinal(), myGeneration, 0, null);
}
|
Message function(Handler handler, int myGeneration) { return Message.obtain(handler, ordinal(), myGeneration, 0, null); }
|
/**
* Creates a message that when sent will raise the signal of this condition.
*/
|
Creates a message that when sent will raise the signal of this condition
|
createSignal
|
{
"repo_name": "djodjoni/tarator",
"path": "core/src/main/java/org/djodjo/tarator/base/UiControllerImpl.java",
"license": "apache-2.0",
"size": 20678
}
|
[
"android.os.Handler",
"android.os.Message"
] |
import android.os.Handler; import android.os.Message;
|
import android.os.*;
|
[
"android.os"
] |
android.os;
| 2,435,518
|
private ProxyConfiguration createNewConfiguration(final ConfigurationService configurationService) throws DMPControllerException {
final ProxyConfiguration proxyConfiguration;
try {
proxyConfiguration = configurationService.createObjectTransactional();
} catch (final DMPPersistenceException e) {
ResourcesResource.LOG.debug("something went wrong while configuration creation");
throw new DMPControllerException("something went wrong while configuration creation\n" + e.getMessage());
}
if (proxyConfiguration == null) {
throw new DMPControllerException("fresh configuration shouldn't be null");
}
final Configuration configuration = proxyConfiguration.getObject();
if (configuration == null) {
throw new DMPControllerException("fresh configuration shouldn't be null");
}
return proxyConfiguration;
}
|
ProxyConfiguration function(final ConfigurationService configurationService) throws DMPControllerException { final ProxyConfiguration proxyConfiguration; try { proxyConfiguration = configurationService.createObjectTransactional(); } catch (final DMPPersistenceException e) { ResourcesResource.LOG.debug(STR); throw new DMPControllerException(STR + e.getMessage()); } if (proxyConfiguration == null) { throw new DMPControllerException(STR); } final Configuration configuration = proxyConfiguration.getObject(); if (configuration == null) { throw new DMPControllerException(STR); } return proxyConfiguration; }
|
/**
* Persists a new configuration in the database.
*
* @param configurationService the configuration persistence service
* @return the new persisted configuration
* @throws DMPControllerException
*/
|
Persists a new configuration in the database
|
createNewConfiguration
|
{
"repo_name": "janpolowinski/dswarm",
"path": "controller/src/main/java/org/dswarm/controller/resources/resource/ResourcesResource.java",
"license": "apache-2.0",
"size": 47175
}
|
[
"org.dswarm.controller.DMPControllerException",
"org.dswarm.persistence.DMPPersistenceException",
"org.dswarm.persistence.model.resource.Configuration",
"org.dswarm.persistence.model.resource.proxy.ProxyConfiguration",
"org.dswarm.persistence.service.resource.ConfigurationService"
] |
import org.dswarm.controller.DMPControllerException; import org.dswarm.persistence.DMPPersistenceException; import org.dswarm.persistence.model.resource.Configuration; import org.dswarm.persistence.model.resource.proxy.ProxyConfiguration; import org.dswarm.persistence.service.resource.ConfigurationService;
|
import org.dswarm.controller.*; import org.dswarm.persistence.*; import org.dswarm.persistence.model.resource.*; import org.dswarm.persistence.model.resource.proxy.*; import org.dswarm.persistence.service.resource.*;
|
[
"org.dswarm.controller",
"org.dswarm.persistence"
] |
org.dswarm.controller; org.dswarm.persistence;
| 2,109,391
|
protected static Job createSubmittableJob(Configuration conf, String[] args)
throws IOException, ClassNotFoundException {
Job job = null;
boolean isDryRun = conf.getBoolean(DRY_RUN_CONF_KEY, false);
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Admin admin = connection.getAdmin()) {
// Support non-XML supported characters
// by re-encoding the passed separator as a Base64 string.
String actualSeparator = conf.get(SEPARATOR_CONF_KEY);
if (actualSeparator != null) {
conf.set(SEPARATOR_CONF_KEY,
Bytes.toString(Base64.getEncoder().encode(Bytes.toBytes(actualSeparator))));
}
// See if a non-default Mapper was set
String mapperClassName = conf.get(MAPPER_CONF_KEY);
Class mapperClass = mapperClassName != null? Class.forName(mapperClassName): DEFAULT_MAPPER;
TableName tableName = TableName.valueOf(args[0]);
Path inputDir = new Path(args[1]);
String jobName = conf.get(JOB_NAME_CONF_KEY,NAME + "_" + tableName.getNameAsString());
job = Job.getInstance(conf, jobName);
job.setJarByClass(mapperClass);
FileInputFormat.setInputPaths(job, inputDir);
job.setInputFormatClass(TextInputFormat.class);
job.setMapperClass(mapperClass);
job.setMapOutputKeyClass(ImmutableBytesWritable.class);
String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
String[] columns = conf.getStrings(COLUMNS_CONF_KEY);
if(StringUtils.isNotEmpty(conf.get(CREDENTIALS_LOCATION))) {
String fileLoc = conf.get(CREDENTIALS_LOCATION);
Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf);
job.getCredentials().addAll(cred);
}
if (hfileOutPath != null) {
if (!admin.tableExists(tableName)) {
LOG.warn(format("Table '%s' does not exist.", tableName));
if ("yes".equalsIgnoreCase(conf.get(CREATE_TABLE_CONF_KEY, "yes"))) {
// TODO: this is backwards. Instead of depending on the existence of a table,
// create a sane splits file for HFileOutputFormat based on data sampling.
createTable(admin, tableName, columns);
if (isDryRun) {
LOG.warn("Dry run: Table will be deleted at end of dry run.");
synchronized (ImportTsv.class) {
DRY_RUN_TABLE_CREATED = true;
}
}
} else {
String errorMsg =
format("Table '%s' does not exist and '%s' is set to no.", tableName,
CREATE_TABLE_CONF_KEY);
LOG.error(errorMsg);
throw new TableNotFoundException(errorMsg);
}
}
try (Table table = connection.getTable(tableName);
RegionLocator regionLocator = connection.getRegionLocator(tableName)) {
boolean noStrict = conf.getBoolean(NO_STRICT_COL_FAMILY, false);
// if no.strict is false then check column family
if(!noStrict) {
ArrayList<String> unmatchedFamilies = new ArrayList<>();
Set<String> cfSet = getColumnFamilies(columns);
TableDescriptor tDesc = table.getDescriptor();
for (String cf : cfSet) {
if(!tDesc.hasColumnFamily(Bytes.toBytes(cf))) {
unmatchedFamilies.add(cf);
}
}
if(unmatchedFamilies.size() > 0) {
ArrayList<String> familyNames = new ArrayList<>();
for (ColumnFamilyDescriptor family : table.getDescriptor().getColumnFamilies()) {
familyNames.add(family.getNameAsString());
}
String msg =
"Column Families " + unmatchedFamilies + " specified in " + COLUMNS_CONF_KEY
+ " does not match with any of the table " + tableName
+ " column families " + familyNames + ".\n"
+ "To disable column family check, use -D" + NO_STRICT_COL_FAMILY
+ "=true.\n";
usage(msg);
System.exit(-1);
}
}
if (mapperClass.equals(TsvImporterTextMapper.class)) {
job.setMapOutputValueClass(Text.class);
job.setReducerClass(TextSortReducer.class);
} else {
job.setMapOutputValueClass(Put.class);
job.setCombinerClass(PutCombiner.class);
job.setReducerClass(PutSortReducer.class);
}
if (!isDryRun) {
Path outputDir = new Path(hfileOutPath);
FileOutputFormat.setOutputPath(job, outputDir);
HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(),
regionLocator);
}
}
} else {
if (!admin.tableExists(tableName)) {
String errorMsg = format("Table '%s' does not exist.", tableName);
LOG.error(errorMsg);
throw new TableNotFoundException(errorMsg);
}
if (mapperClass.equals(TsvImporterTextMapper.class)) {
usage(TsvImporterTextMapper.class.toString()
+ " should not be used for non bulkloading case. use "
+ TsvImporterMapper.class.toString()
+ " or custom mapper whose value type is Put.");
System.exit(-1);
}
if (!isDryRun) {
// No reducers. Just write straight to table. Call initTableReducerJob
// to set up the TableOutputFormat.
TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null, job);
}
job.setNumReduceTasks(0);
}
if (isDryRun) {
job.setOutputFormatClass(NullOutputFormat.class);
job.getConfiguration().setStrings("io.serializations",
job.getConfiguration().get("io.serializations"),
MutationSerialization.class.getName(), ResultSerialization.class.getName(),
CellSerialization.class.getName());
}
TableMapReduceUtil.addDependencyJars(job);
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
org.apache.hbase.thirdparty.com.google.common.base.Function.class );
}
}
return job;
}
|
static Job function(Configuration conf, String[] args) throws IOException, ClassNotFoundException { Job job = null; boolean isDryRun = conf.getBoolean(DRY_RUN_CONF_KEY, false); try (Connection connection = ConnectionFactory.createConnection(conf)) { try (Admin admin = connection.getAdmin()) { String actualSeparator = conf.get(SEPARATOR_CONF_KEY); if (actualSeparator != null) { conf.set(SEPARATOR_CONF_KEY, Bytes.toString(Base64.getEncoder().encode(Bytes.toBytes(actualSeparator)))); } String mapperClassName = conf.get(MAPPER_CONF_KEY); Class mapperClass = mapperClassName != null? Class.forName(mapperClassName): DEFAULT_MAPPER; TableName tableName = TableName.valueOf(args[0]); Path inputDir = new Path(args[1]); String jobName = conf.get(JOB_NAME_CONF_KEY,NAME + "_" + tableName.getNameAsString()); job = Job.getInstance(conf, jobName); job.setJarByClass(mapperClass); FileInputFormat.setInputPaths(job, inputDir); job.setInputFormatClass(TextInputFormat.class); job.setMapperClass(mapperClass); job.setMapOutputKeyClass(ImmutableBytesWritable.class); String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY); String[] columns = conf.getStrings(COLUMNS_CONF_KEY); if(StringUtils.isNotEmpty(conf.get(CREDENTIALS_LOCATION))) { String fileLoc = conf.get(CREDENTIALS_LOCATION); Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf); job.getCredentials().addAll(cred); } if (hfileOutPath != null) { if (!admin.tableExists(tableName)) { LOG.warn(format(STR, tableName)); if ("yes".equalsIgnoreCase(conf.get(CREATE_TABLE_CONF_KEY, "yes"))) { createTable(admin, tableName, columns); if (isDryRun) { LOG.warn(STR); synchronized (ImportTsv.class) { DRY_RUN_TABLE_CREATED = true; } } } else { String errorMsg = format(STR, tableName, CREATE_TABLE_CONF_KEY); LOG.error(errorMsg); throw new TableNotFoundException(errorMsg); } } try (Table table = connection.getTable(tableName); RegionLocator regionLocator = connection.getRegionLocator(tableName)) { boolean noStrict = conf.getBoolean(NO_STRICT_COL_FAMILY, false); if(!noStrict) { ArrayList<String> unmatchedFamilies = new ArrayList<>(); Set<String> cfSet = getColumnFamilies(columns); TableDescriptor tDesc = table.getDescriptor(); for (String cf : cfSet) { if(!tDesc.hasColumnFamily(Bytes.toBytes(cf))) { unmatchedFamilies.add(cf); } } if(unmatchedFamilies.size() > 0) { ArrayList<String> familyNames = new ArrayList<>(); for (ColumnFamilyDescriptor family : table.getDescriptor().getColumnFamilies()) { familyNames.add(family.getNameAsString()); } String msg = STR + unmatchedFamilies + STR + COLUMNS_CONF_KEY + STR + tableName + STR + familyNames + ".\n" + STR + NO_STRICT_COL_FAMILY + STR; usage(msg); System.exit(-1); } } if (mapperClass.equals(TsvImporterTextMapper.class)) { job.setMapOutputValueClass(Text.class); job.setReducerClass(TextSortReducer.class); } else { job.setMapOutputValueClass(Put.class); job.setCombinerClass(PutCombiner.class); job.setReducerClass(PutSortReducer.class); } if (!isDryRun) { Path outputDir = new Path(hfileOutPath); FileOutputFormat.setOutputPath(job, outputDir); HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(), regionLocator); } } } else { if (!admin.tableExists(tableName)) { String errorMsg = format(STR, tableName); LOG.error(errorMsg); throw new TableNotFoundException(errorMsg); } if (mapperClass.equals(TsvImporterTextMapper.class)) { usage(TsvImporterTextMapper.class.toString() + STR + TsvImporterMapper.class.toString() + STR); System.exit(-1); } if (!isDryRun) { TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null, job); } job.setNumReduceTasks(0); } if (isDryRun) { job.setOutputFormatClass(NullOutputFormat.class); job.getConfiguration().setStrings(STR, job.getConfiguration().get(STR), MutationSerialization.class.getName(), ResultSerialization.class.getName(), CellSerialization.class.getName()); } TableMapReduceUtil.addDependencyJars(job); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), org.apache.hbase.thirdparty.com.google.common.base.Function.class ); } } return job; }
|
/**
* Sets up the actual job.
*
* @param conf The current configuration.
* @param args The command line parameters.
* @return The newly created job.
* @throws IOException When setting up the job fails.
*/
|
Sets up the actual job
|
createSubmittableJob
|
{
"repo_name": "mahak/hbase",
"path": "hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java",
"license": "apache-2.0",
"size": 31676
}
|
[
"java.io.File",
"java.io.IOException",
"java.lang.String",
"java.util.ArrayList",
"java.util.Base64",
"java.util.Set",
"org.apache.commons.lang3.StringUtils",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hbase.TableName",
"org.apache.hadoop.hbase.TableNotFoundException",
"org.apache.hadoop.hbase.client.Admin",
"org.apache.hadoop.hbase.client.ColumnFamilyDescriptor",
"org.apache.hadoop.hbase.client.Connection",
"org.apache.hadoop.hbase.client.ConnectionFactory",
"org.apache.hadoop.hbase.client.Put",
"org.apache.hadoop.hbase.client.RegionLocator",
"org.apache.hadoop.hbase.client.Table",
"org.apache.hadoop.hbase.client.TableDescriptor",
"org.apache.hadoop.hbase.io.ImmutableBytesWritable",
"org.apache.hadoop.hbase.util.Bytes",
"org.apache.hadoop.io.Text",
"org.apache.hadoop.mapreduce.Job",
"org.apache.hadoop.mapreduce.lib.input.FileInputFormat",
"org.apache.hadoop.mapreduce.lib.input.TextInputFormat",
"org.apache.hadoop.mapreduce.lib.output.FileOutputFormat",
"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat",
"org.apache.hadoop.security.Credentials"
] |
import java.io.File; import java.io.IOException; import java.lang.String; import java.util.ArrayList; import java.util.Base64; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.security.Credentials;
|
import java.io.*; import java.lang.*; import java.util.*; import org.apache.commons.lang3.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.io.*; import org.apache.hadoop.hbase.util.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.lib.input.*; import org.apache.hadoop.mapreduce.lib.output.*; import org.apache.hadoop.security.*;
|
[
"java.io",
"java.lang",
"java.util",
"org.apache.commons",
"org.apache.hadoop"
] |
java.io; java.lang; java.util; org.apache.commons; org.apache.hadoop;
| 2,759,080
|
public Observable<ServiceResponse<PrivateLinkHubInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String privateLinkHubName, PrivateLinkHubInner privateLinkHubInfo) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (privateLinkHubName == null) {
throw new IllegalArgumentException("Parameter privateLinkHubName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
if (privateLinkHubInfo == null) {
throw new IllegalArgumentException("Parameter privateLinkHubInfo is required and cannot be null.");
}
|
Observable<ServiceResponse<PrivateLinkHubInner>> function(String resourceGroupName, String privateLinkHubName, PrivateLinkHubInner privateLinkHubInfo) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (privateLinkHubName == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); } if (privateLinkHubInfo == null) { throw new IllegalArgumentException(STR); }
|
/**
* Creates or updates a privateLinkHub.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param privateLinkHubName The name of the privateLinkHub
* @param privateLinkHubInfo PrivateLinkHub create or update request properties
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PrivateLinkHubInner object
*/
|
Creates or updates a privateLinkHub
|
createOrUpdateWithServiceResponseAsync
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/synapse/mgmt-v2019_06_01_preview/src/main/java/com/microsoft/azure/management/synapse/v2019_06_01_preview/implementation/PrivateLinkHubsInner.java",
"license": "mit",
"size": 57290
}
|
[
"com.microsoft.rest.ServiceResponse"
] |
import com.microsoft.rest.ServiceResponse;
|
import com.microsoft.rest.*;
|
[
"com.microsoft.rest"
] |
com.microsoft.rest;
| 1,859,679
|
@Test
public void testReplication () {
replication = 3;
preferredBlockSize = 128*1024*1024;
INodeFile inf = createINodeFile(replication, preferredBlockSize);
assertEquals("True has to be returned in this case", replication,
inf.getFileReplication());
}
|
void function () { replication = 3; preferredBlockSize = 128*1024*1024; INodeFile inf = createINodeFile(replication, preferredBlockSize); assertEquals(STR, replication, inf.getFileReplication()); }
|
/**
* Test for the Replication value. Sets a value and checks if it was set
* correct.
*/
|
Test for the Replication value. Sets a value and checks if it was set correct
|
testReplication
|
{
"repo_name": "HazelChen/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestINodeFile.java",
"license": "apache-2.0",
"size": 42319
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 2,525,840
|
@WebMethod
@WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201602")
@RequestWrapper(localName = "createAdRules", targetNamespace = "https://www.google.com/apis/ads/publisher/v201602", className = "com.google.api.ads.dfp.jaxws.v201602.AdRuleServiceInterfacecreateAdRules")
@ResponseWrapper(localName = "createAdRulesResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201602", className = "com.google.api.ads.dfp.jaxws.v201602.AdRuleServiceInterfacecreateAdRulesResponse")
public List<AdRule> createAdRules(
@WebParam(name = "adRules", targetNamespace = "https://www.google.com/apis/ads/publisher/v201602")
List<AdRule> adRules)
throws ApiException_Exception
;
|
@WebResult(name = "rval", targetNamespace = STRcreateAdRulesSTRhttps: @ResponseWrapper(localName = "createAdRulesResponseSTRhttps: List<AdRule> function( @WebParam(name = "adRulesSTRhttps: List<AdRule> adRules) throws ApiException_Exception ;
|
/**
*
* Creates new {@link AdRule} objects.
*
* @param adRules the ad rules to create
* @return the created ad rules with their IDs filled in
*
*
* @param adRules
* @return
* returns java.util.List<com.google.api.ads.dfp.jaxws.v201602.AdRule>
* @throws ApiException_Exception
*/
|
Creates new <code>AdRule</code> objects
|
createAdRules
|
{
"repo_name": "gawkermedia/googleads-java-lib",
"path": "modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201602/AdRuleServiceInterface.java",
"license": "apache-2.0",
"size": 6961
}
|
[
"java.util.List",
"javax.jws.WebParam",
"javax.jws.WebResult",
"javax.xml.ws.ResponseWrapper"
] |
import java.util.List; import javax.jws.WebParam; import javax.jws.WebResult; import javax.xml.ws.ResponseWrapper;
|
import java.util.*; import javax.jws.*; import javax.xml.ws.*;
|
[
"java.util",
"javax.jws",
"javax.xml"
] |
java.util; javax.jws; javax.xml;
| 784,480
|
public boolean copyTree(Name oldNodeDN, Name newNodeDN) // may be a single node.
{
log.log(Level.FINER,"recursively copy tree from " + oldNodeDN.toString() + " to " + newNodeDN.toString());
open("Copying " + oldNodeDN.toString(), "copying");
boolean ret = recCopyTree(oldNodeDN, newNodeDN);
close();
return ret;
}
|
boolean function(Name oldNodeDN, Name newNodeDN) { log.log(Level.FINER,STR + oldNodeDN.toString() + STR + newNodeDN.toString()); open(STR + oldNodeDN.toString(), STR); boolean ret = recCopyTree(oldNodeDN, newNodeDN); close(); return ret; }
|
/**
* Copies a DN representing a subtree to a new subtree, including
* copying all subordinate entries.
*
* @param oldNodeDN the original DN of the sub tree root
* to be copied (may be a single entry).
* @param newNodeDN the target DN for the tree to be moved to.
* @param progBarDisplayer (may be null) - a Component within which to display
* a progress tracker if the operation is taking a long time...
* @return the operation's success status
*/
|
Copies a DN representing a subtree to a new subtree, including copying all subordinate entries
|
copyTree
|
{
"repo_name": "idega/platform2",
"path": "src/com/idega/core/ldap/client/jndi/AdvancedOps.java",
"license": "gpl-3.0",
"size": 10264
}
|
[
"java.util.logging.Level",
"javax.naming.Name"
] |
import java.util.logging.Level; import javax.naming.Name;
|
import java.util.logging.*; import javax.naming.*;
|
[
"java.util",
"javax.naming"
] |
java.util; javax.naming;
| 361,953
|
private void exportToXML(final String tableOrViewName, final String fileName, final Partitioning partitions)
{
final FileWriter fw;
try {
fw = new FileWriter(context.getWorkingDirectory() + File.separator + fileName);
final StringBuilder fileContent = new StringBuilder();
fileContent.append(XML_DEF_LINE);
fileContent.append("\n<DocumentElement>");
for (final Partition partition : partitions) {
final List<Record> recordsInPartition = database.getAllRecordsFromTableOrView(tableOrViewName, partition.getFirstRecord(),
partition.getLength(), null, null);
if (null != recordsInPartition) {
for (final Record record : recordsInPartition) {
context.getLog().info("...done. Write records " + partition.getFirstRecord() + "" + (partition.getFirstRecord() + partition.getLength() - 1));
fileContent.append("\n<");
fileContent.append(tableOrViewName);
fileContent.append(">");
final StringBuilder line = new StringBuilder();
for (Value v : record) {
line.append("\n\t<");
line.append(v.getAttributeName());
line.append(">");
final String escapedXmlValue = StringEscapeUtils.escapeXml(v.getValue().toString());
if (v.getValue().toString().length() < SAME_LINE_XML_LIMIT) {
line.append(escapedXmlValue);
line.append("</");
line.append(v.getAttributeName());
line.append(">");
} else {
line.append("\n\t\t");
line.append(escapedXmlValue);
line.append("\n\t");
line.append("</");
line.append(v.getAttributeName());
line.append(">");
}
}
fileContent.append(line);
fileContent.append("\n</");
fileContent.append(tableOrViewName);
fileContent.append(">");
}
}
}
fileContent.append("\n</DocumentElement>");
// write to file
fw.append(fileContent.toString());
fw.flush();
fw.close();
} catch (IOException e) {
context.getLog().err(e.getMessage());
}
}
|
void function(final String tableOrViewName, final String fileName, final Partitioning partitions) { final FileWriter fw; try { fw = new FileWriter(context.getWorkingDirectory() + File.separator + fileName); final StringBuilder fileContent = new StringBuilder(); fileContent.append(XML_DEF_LINE); fileContent.append(STR); for (final Partition partition : partitions) { final List<Record> recordsInPartition = database.getAllRecordsFromTableOrView(tableOrViewName, partition.getFirstRecord(), partition.getLength(), null, null); if (null != recordsInPartition) { for (final Record record : recordsInPartition) { context.getLog().info(STR + partition.getFirstRecord() + STR\n<STR>STR\n\t<STR>STR</STR>STR\n\t\tSTR\n\tSTR</STR>STR\n</STR>STR\n</DocumentElement>"); fw.append(fileContent.toString()); fw.flush(); fw.close(); } catch (IOException e) { context.getLog().err(e.getMessage()); } }
|
/**
* Uses existing table, converts it to xml format and saves in file.
*
* @param tableOrViewName table name which will be exported
* @param fileName file name which will be created
* @param partitions part of data from database
*/
|
Uses existing table, converts it to xml format and saves in file
|
exportToXML
|
{
"repo_name": "BrickworkVentures/boilersuit-core",
"path": "src/main/java/ch/brickwork/bsuit/interpreter/interpreters/ExportInterpreter.java",
"license": "mit",
"size": 23844
}
|
[
"ch.brickwork.bsuit.database.Record",
"ch.brickwork.bsuit.util.Partition",
"ch.brickwork.bsuit.util.Partitioning",
"java.io.File",
"java.io.FileWriter",
"java.io.IOException",
"java.util.List"
] |
import ch.brickwork.bsuit.database.Record; import ch.brickwork.bsuit.util.Partition; import ch.brickwork.bsuit.util.Partitioning; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.List;
|
import ch.brickwork.bsuit.database.*; import ch.brickwork.bsuit.util.*; import java.io.*; import java.util.*;
|
[
"ch.brickwork.bsuit",
"java.io",
"java.util"
] |
ch.brickwork.bsuit; java.io; java.util;
| 1,367,372
|
void defrag() throws HsqlException {
close();
// return here if *.data file was deleted because it was empty
if (!FileUtil.exists(sName)) {
init();
open(cacheReadonly);
Trace.printSystemOut("opened empty chache");
return;
}
HsqlArrayList indexRoots = null;
try {
// open as readonly
open(true);
boolean wasNio = dataFile.wasNio();
DataFileDefrag dfd = new DataFileDefrag();
indexRoots = dfd.defrag(dDatabase, sName);
closeFile();
Trace.printSystemOut("closed old cache");
if (wasNio) {
System.gc();
FileUtil.renameOverwrite(sName, sName + ".old");
File oldfile = new File(sName + ".old");
oldfile.delete();
FileUtil.deleteOnExit(oldfile);
}
FileUtil.renameOverwrite(sName + ".new", sName);
String backupName = dDatabase.getPath() + ".backup";
backup(backupName + ".new");
FileUtil.renameOverwrite(backupName + ".new", backupName);
dbProps.setProperty("hsqldb.cache_version", "1.7.0");
for (int i = 0; i < indexRoots.size(); i++) {
int[] roots = (int[]) indexRoots.get(i);
if (roots != null) {
Trace.printSystemOut(
org.hsqldb.lib.StringUtil.getList(roots, " ", ""));
}
}
} catch (Exception e) {
// e.printStackTrace();
throw Trace.error(Trace.FILE_IO_ERROR,
Trace.DataFileCache_defrag, new Object[] {
e, sName
});
} finally {
init();
open(cacheReadonly);
if (indexRoots != null) {
DataFileDefrag.updateTableIndexRoots(dDatabase.getTables(),
indexRoots);
}
Trace.printSystemOut("opened cache");
}
}
|
void defrag() throws HsqlException { close(); if (!FileUtil.exists(sName)) { init(); open(cacheReadonly); Trace.printSystemOut(STR); return; } HsqlArrayList indexRoots = null; try { open(true); boolean wasNio = dataFile.wasNio(); DataFileDefrag dfd = new DataFileDefrag(); indexRoots = dfd.defrag(dDatabase, sName); closeFile(); Trace.printSystemOut(STR); if (wasNio) { System.gc(); FileUtil.renameOverwrite(sName, sName + ".old"); File oldfile = new File(sName + ".old"); oldfile.delete(); FileUtil.deleteOnExit(oldfile); } FileUtil.renameOverwrite(sName + ".new", sName); String backupName = dDatabase.getPath() + STR; backup(backupName + ".new"); FileUtil.renameOverwrite(backupName + ".new", backupName); dbProps.setProperty(STR, "1.7.0"); for (int i = 0; i < indexRoots.size(); i++) { int[] roots = (int[]) indexRoots.get(i); if (roots != null) { Trace.printSystemOut( org.hsqldb.lib.StringUtil.getList(roots, " ", STRopened cache"); } }
|
/**
* Writes out all the rows to a new file without fragmentation and
* returns an ArrayList containing new positions for index roots.
* Is called with the cache file closed.
*
* Not possible with nio .data file as it can't be overwritten
*/
|
Writes out all the rows to a new file without fragmentation and returns an ArrayList containing new positions for index roots. Is called with the cache file closed. Not possible with nio .data file as it can't be overwritten
|
defrag
|
{
"repo_name": "simeshev/parabuild-ci",
"path": "3rdparty/hsqldb1733/src/org/hsqldb/DataFileCache.java",
"license": "lgpl-3.0",
"size": 14715
}
|
[
"java.io.File",
"org.hsqldb.lib.FileUtil",
"org.hsqldb.lib.HsqlArrayList"
] |
import java.io.File; import org.hsqldb.lib.FileUtil; import org.hsqldb.lib.HsqlArrayList;
|
import java.io.*; import org.hsqldb.lib.*;
|
[
"java.io",
"org.hsqldb.lib"
] |
java.io; org.hsqldb.lib;
| 560,259
|
public SliceBuilder slice() {
return sliceBuilder;
}
|
SliceBuilder function() { return sliceBuilder; }
|
/**
* Gets the slice used to filter the search hits, the top hits and the aggregations.
*/
|
Gets the slice used to filter the search hits, the top hits and the aggregations
|
slice
|
{
"repo_name": "robin13/elasticsearch",
"path": "server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java",
"license": "apache-2.0",
"size": 66952
}
|
[
"org.elasticsearch.search.slice.SliceBuilder"
] |
import org.elasticsearch.search.slice.SliceBuilder;
|
import org.elasticsearch.search.slice.*;
|
[
"org.elasticsearch.search"
] |
org.elasticsearch.search;
| 2,121,742
|
public static boolean deleteFilesRecursive(File path) {
boolean success = true;
if (path.isDirectory()) {
String files[] = path.list();
// delete contents first
for (int i = 0; i < files.length; i++) {
File newPath = new File(path, files[i]);
success = deleteFilesRecursive(newPath);
if (!success) {
break;
}
}
}
if (success) {
// delete path (whether it is a file or directory)
success = path.delete();
if (!success) {
log.warn("Unable to delete " + path.getPath());
}
}
return success;
}
|
static boolean function(File path) { boolean success = true; if (path.isDirectory()) { String files[] = path.list(); for (int i = 0; i < files.length; i++) { File newPath = new File(path, files[i]); success = deleteFilesRecursive(newPath); if (!success) { break; } } } if (success) { success = path.delete(); if (!success) { log.warn(STR + path.getPath()); } } return success; }
|
/**
* Recursively deletes a directory. If path represents a file it is deleted. If it represents a directory then its contents are deleted before the directory
* itself is removed.
*
* Be very careful calling this function, specifying a root directory could wipe the drive (assuming correct permissions)
*
* @param path
* File or directory to delete.
*
* @return true if deletion was successful, false otherwise
*/
|
Recursively deletes a directory. If path represents a file it is deleted. If it represents a directory then its contents are deleted before the directory itself is removed. Be very careful calling this function, specifying a root directory could wipe the drive (assuming correct permissions)
|
deleteFilesRecursive
|
{
"repo_name": "squireg/portal-core",
"path": "src/main/java/org/auscope/portal/core/util/FileIOUtil.java",
"license": "lgpl-3.0",
"size": 21568
}
|
[
"java.io.File"
] |
import java.io.File;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,486,391
|
@Deprecated
protected SignedJWT signJWTWithRSA(SignedJWT signedJWT, JWSAlgorithm jwsAlgorithm, String tenantDomain,
int tenantId)
throws IdentityOAuth2Exception {
try {
Key privateKey = getPrivateKey(tenantDomain, tenantId);
JWSSigner signer = OAuth2Util.createJWSSigner((RSAPrivateKey) privateKey);
signedJWT.sign(signer);
return signedJWT;
} catch (JOSEException e) {
log.error("Error in obtaining tenant's keystore", e);
throw new IdentityOAuth2Exception("Error in obtaining tenant's keystore", e);
} catch (Exception e) {
log.error("Error in obtaining tenant's keystore", e);
throw new IdentityOAuth2Exception("Error in obtaining tenant's keystore", e);
}
}
|
SignedJWT function(SignedJWT signedJWT, JWSAlgorithm jwsAlgorithm, String tenantDomain, int tenantId) throws IdentityOAuth2Exception { try { Key privateKey = getPrivateKey(tenantDomain, tenantId); JWSSigner signer = OAuth2Util.createJWSSigner((RSAPrivateKey) privateKey); signedJWT.sign(signer); return signedJWT; } catch (JOSEException e) { log.error(STR, e); throw new IdentityOAuth2Exception(STR, e); } catch (Exception e) { log.error(STR, e); throw new IdentityOAuth2Exception(STR, e); } }
|
/**
* Sign with given RSA Algorithm
*
* @param signedJWT
* @param jwsAlgorithm
* @param tenantDomain
* @param tenantId
* @return
* @throws IdentityOAuth2Exception
*/
|
Sign with given RSA Algorithm
|
signJWTWithRSA
|
{
"repo_name": "darshanasbg/identity-inbound-auth-oauth",
"path": "components/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth2/authcontext/JWTTokenGenerator.java",
"license": "apache-2.0",
"size": 24079
}
|
[
"com.nimbusds.jose.JOSEException",
"com.nimbusds.jose.JWSAlgorithm",
"com.nimbusds.jose.JWSSigner",
"com.nimbusds.jwt.SignedJWT",
"java.security.Key",
"java.security.interfaces.RSAPrivateKey",
"org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception",
"org.wso2.carbon.identity.oauth2.util.OAuth2Util"
] |
import com.nimbusds.jose.JOSEException; import com.nimbusds.jose.JWSAlgorithm; import com.nimbusds.jose.JWSSigner; import com.nimbusds.jwt.SignedJWT; import java.security.Key; import java.security.interfaces.RSAPrivateKey; import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception; import org.wso2.carbon.identity.oauth2.util.OAuth2Util;
|
import com.nimbusds.jose.*; import com.nimbusds.jwt.*; import java.security.*; import java.security.interfaces.*; import org.wso2.carbon.identity.oauth2.*; import org.wso2.carbon.identity.oauth2.util.*;
|
[
"com.nimbusds.jose",
"com.nimbusds.jwt",
"java.security",
"org.wso2.carbon"
] |
com.nimbusds.jose; com.nimbusds.jwt; java.security; org.wso2.carbon;
| 922,761
|
@Test
public void streamToJournalCheckpoint() throws Exception {
InodeDirectoryView root = mTree.getRoot();
// test root
verifyJournal(mTree, Lists.<InodeView>newArrayList(root));
// test nested URI
createPath(mTree, NESTED_FILE_URI, sNestedFileOptions);
InodeDirectory nested = (InodeDirectory) root.getChild("nested");
InodeDirectory test = (InodeDirectory) nested.getChild("test");
InodeView file = test.getChild("file");
verifyJournal(mTree, Arrays.asList(root, nested, test, file));
// add a sibling of test and verify journaling is in correct order (breadth first)
createPath(mTree, new AlluxioURI("/nested/test1/file1"), sNestedFileOptions);
InodeDirectory test1 = (InodeDirectory) nested.getChild("test1");
InodeView file1 = test1.getChild("file1");
verifyJournal(mTree, Arrays.asList(root, nested, test, test1, file, file1));
}
|
void function() throws Exception { InodeDirectoryView root = mTree.getRoot(); verifyJournal(mTree, Lists.<InodeView>newArrayList(root)); createPath(mTree, NESTED_FILE_URI, sNestedFileOptions); InodeDirectory nested = (InodeDirectory) root.getChild(STR); InodeDirectory test = (InodeDirectory) nested.getChild("test"); InodeView file = test.getChild("file"); verifyJournal(mTree, Arrays.asList(root, nested, test, file)); createPath(mTree, new AlluxioURI(STR), sNestedFileOptions); InodeDirectory test1 = (InodeDirectory) nested.getChild("test1"); InodeView file1 = test1.getChild("file1"); verifyJournal(mTree, Arrays.asList(root, nested, test, test1, file, file1)); }
|
/**
* Tests that streaming to a journal checkpoint works.
*/
|
Tests that streaming to a journal checkpoint works
|
streamToJournalCheckpoint
|
{
"repo_name": "aaudiber/alluxio",
"path": "core/server/master/src/test/java/alluxio/master/file/meta/InodeTreeTest.java",
"license": "apache-2.0",
"size": 35358
}
|
[
"com.google.common.collect.Lists",
"java.util.Arrays"
] |
import com.google.common.collect.Lists; import java.util.Arrays;
|
import com.google.common.collect.*; import java.util.*;
|
[
"com.google.common",
"java.util"
] |
com.google.common; java.util;
| 2,573,353
|
void storeInLocalRepository( ArtifactRepository localRepository,
ArtifactRepository remoteRepository )
throws RepositoryMetadataStoreException;
|
void storeInLocalRepository( ArtifactRepository localRepository, ArtifactRepository remoteRepository ) throws RepositoryMetadataStoreException;
|
/**
* Store the metadata in the local repository.
*
* @param localRepository the local repository
* @param remoteRepository the remote repository it came from
* @todo this should only be needed on the repository metadata
*/
|
Store the metadata in the local repository
|
storeInLocalRepository
|
{
"repo_name": "wangyuesong0/maven",
"path": "maven-artifact/src/main/java/org/apache/maven/repository/legacy/metadata/ArtifactMetadata.java",
"license": "apache-2.0",
"size": 2773
}
|
[
"org.apache.maven.artifact.repository.ArtifactRepository",
"org.apache.maven.artifact.repository.metadata.RepositoryMetadataStoreException"
] |
import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.repository.metadata.RepositoryMetadataStoreException;
|
import org.apache.maven.artifact.repository.*; import org.apache.maven.artifact.repository.metadata.*;
|
[
"org.apache.maven"
] |
org.apache.maven;
| 1,469,618
|
private void assertOwnedBy(IObject object, EventContext expectedOwner) throws ServerError {
assertOwnedBy(Collections.singleton(object), expectedOwner);
}
|
void function(IObject object, EventContext expectedOwner) throws ServerError { assertOwnedBy(Collections.singleton(object), expectedOwner); }
|
/**
* Assert that the given object is owned by the given owner.
* @param object a model object
* @param expectedOwner a user's event context
* @throws ServerError unexpected
*/
|
Assert that the given object is owned by the given owner
|
assertOwnedBy
|
{
"repo_name": "dominikl/openmicroscopy",
"path": "components/tools/OmeroJava/test/integration/chown/PermissionsTest.java",
"license": "gpl-2.0",
"size": 34686
}
|
[
"java.util.Collections"
] |
import java.util.Collections;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,246,632
|
public List<Rule> getRules () throws Exception {
Conf conf = getConf();
return conf.getRules();
}
|
List<Rule> function () throws Exception { Conf conf = getConf(); return conf.getRules(); }
|
/**
* Complete list of Rules on this rewrite filter
*
* @return
* @throws Exception
*/
|
Complete list of Rules on this rewrite filter
|
getRules
|
{
"repo_name": "dotCMS/core-2.x",
"path": "src/com/dotmarketing/filters/DotUrlRewriteFilter.java",
"license": "gpl-3.0",
"size": 3215
}
|
[
"java.util.List",
"org.tuckey.web.filters.urlrewrite.Conf",
"org.tuckey.web.filters.urlrewrite.Rule"
] |
import java.util.List; import org.tuckey.web.filters.urlrewrite.Conf; import org.tuckey.web.filters.urlrewrite.Rule;
|
import java.util.*; import org.tuckey.web.filters.urlrewrite.*;
|
[
"java.util",
"org.tuckey.web"
] |
java.util; org.tuckey.web;
| 2,172,833
|
public static String read ( File file ) throws IOException {
if ( ! file.isFile () )
throw new IOException ( "File " + file.getName() + " does not exist");
return read ( new FileInputStream ( file ) );
}
|
static String function ( File file ) throws IOException { if ( ! file.isFile () ) throw new IOException ( STR + file.getName() + STR); return read ( new FileInputStream ( file ) ); }
|
/**
* get the contents of a file as String
*
* @param file
*
* @return contents of the given file as String
*
* @throws IOException
*/
|
get the contents of a file as String
|
read
|
{
"repo_name": "AlexRuppert/las2peer_project",
"path": "java/i5/las2peer/tools/FileContentReader.java",
"license": "mit",
"size": 1575
}
|
[
"java.io.File",
"java.io.FileInputStream",
"java.io.IOException"
] |
import java.io.File; import java.io.FileInputStream; import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,635,517
|
public static Map<String, String> posixAttributesToMap(PosixFileAttributes attrs) {
if (attrs == null)
return null;
Map<String, String> props = U.newHashMap(3);
props.put(IgfsUtils.PROP_USER_NAME, attrs.owner().getName());
props.put(IgfsUtils.PROP_GROUP_NAME, attrs.group().getName());
int perm = 0;
for(PosixFilePermission p : attrs.permissions())
perm |= (1 << 8 - p.ordinal());
props.put(IgfsUtils.PROP_PERMISSION, '0' + Integer.toOctalString(perm));
return props;
}
private LocalFileSystemUtils() {
// No-op.
}
|
static Map<String, String> function(PosixFileAttributes attrs) { if (attrs == null) return null; Map<String, String> props = U.newHashMap(3); props.put(IgfsUtils.PROP_USER_NAME, attrs.owner().getName()); props.put(IgfsUtils.PROP_GROUP_NAME, attrs.group().getName()); int perm = 0; for(PosixFilePermission p : attrs.permissions()) perm = (1 << 8 - p.ordinal()); props.put(IgfsUtils.PROP_PERMISSION, '0' + Integer.toOctalString(perm)); return props; } private LocalFileSystemUtils() { }
|
/**
* Convert POSIX attributes to property map.
*
* @param attrs Attributes view.
* @return IGFS properties map.
*/
|
Convert POSIX attributes to property map
|
posixAttributesToMap
|
{
"repo_name": "leveyj/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/secondary/local/LocalFileSystemUtils.java",
"license": "apache-2.0",
"size": 4728
}
|
[
"java.nio.file.attribute.PosixFileAttributes",
"java.nio.file.attribute.PosixFilePermission",
"java.util.Map",
"org.apache.ignite.internal.processors.igfs.IgfsUtils",
"org.apache.ignite.internal.util.typedef.internal.U"
] |
import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; import java.util.Map; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.internal.util.typedef.internal.U;
|
import java.nio.file.attribute.*; import java.util.*; import org.apache.ignite.internal.processors.igfs.*; import org.apache.ignite.internal.util.typedef.internal.*;
|
[
"java.nio",
"java.util",
"org.apache.ignite"
] |
java.nio; java.util; org.apache.ignite;
| 2,558,768
|
public List<Process> getProcesses() {
return processes;
}
|
List<Process> function() { return processes; }
|
/**
* Gets a list of processes that were parsed in the content specification
*
* @return A List of Processes
*/
|
Gets a list of processes that were parsed in the content specification
|
getProcesses
|
{
"repo_name": "pressgang-ccms/PressGangCCMSContentSpecProcessor",
"path": "src/main/java/org/jboss/pressgang/ccms/contentspec/processor/ContentSpecParser.java",
"license": "gpl-3.0",
"size": 111420
}
|
[
"java.util.List",
"org.jboss.pressgang.ccms.contentspec.Process"
] |
import java.util.List; import org.jboss.pressgang.ccms.contentspec.Process;
|
import java.util.*; import org.jboss.pressgang.ccms.contentspec.*;
|
[
"java.util",
"org.jboss.pressgang"
] |
java.util; org.jboss.pressgang;
| 1,172,180
|
private static BufferedWriter openWriter(final File file) throws IOException {
return new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), ENCODING));
}
|
static BufferedWriter function(final File file) throws IOException { return new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), ENCODING)); }
|
/**
* Opens a CSS file for writing.
*/
|
Opens a CSS file for writing
|
openWriter
|
{
"repo_name": "Geomatys/sis",
"path": "core/sis-build-helper/src/main/java/org/apache/sis/internal/doclet/Doclet.java",
"license": "apache-2.0",
"size": 9802
}
|
[
"java.io.BufferedWriter",
"java.io.File",
"java.io.FileOutputStream",
"java.io.IOException",
"java.io.OutputStreamWriter"
] |
import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 307,400
|
public void shutdownMiniCluster() throws IOException {
LOG.info("Shutting down minicluster");
shutdownMiniHBaseCluster();
shutdownMiniDFSCluster();
shutdownMiniZKCluster();
cleanupTestDir();
miniClusterRunning = false;
LOG.info("Minicluster is down");
}
|
void function() throws IOException { LOG.info(STR); shutdownMiniHBaseCluster(); shutdownMiniDFSCluster(); shutdownMiniZKCluster(); cleanupTestDir(); miniClusterRunning = false; LOG.info(STR); }
|
/**
* Stops mini hbase, zk, and hdfs clusters.
* @see #startMiniCluster(int)
*/
|
Stops mini hbase, zk, and hdfs clusters
|
shutdownMiniCluster
|
{
"repo_name": "HubSpot/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java",
"license": "apache-2.0",
"size": 173926
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,069,388
|
public static PlatformListenable listen(final PlatformContext ctx, IgniteInternalFuture fut, final long futPtr,
final int typ, PlatformTarget target) {
PlatformListenable listenable = getListenable(fut);
listen(ctx, listenable, futPtr, typ, null, target);
return listenable;
}
|
static PlatformListenable function(final PlatformContext ctx, IgniteInternalFuture fut, final long futPtr, final int typ, PlatformTarget target) { PlatformListenable listenable = getListenable(fut); listen(ctx, listenable, futPtr, typ, null, target); return listenable; }
|
/**
* Listen future.
*
* @param ctx Context.
* @param fut Java future.
* @param futPtr Native future pointer.
* @param typ Expected return type.
* @return Resulting listenable.
*/
|
Listen future
|
listen
|
{
"repo_name": "afinka77/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/platform/utils/PlatformFutureUtils.java",
"license": "apache-2.0",
"size": 14150
}
|
[
"org.apache.ignite.internal.IgniteInternalFuture",
"org.apache.ignite.internal.processors.platform.PlatformContext",
"org.apache.ignite.internal.processors.platform.PlatformTarget"
] |
import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.processors.platform.PlatformContext; import org.apache.ignite.internal.processors.platform.PlatformTarget;
|
import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.platform.*;
|
[
"org.apache.ignite"
] |
org.apache.ignite;
| 1,737,942
|
static List<String> execSelectAndDumpData(String selectStmt, IDriver driver, String msg)
throws Exception {
executeStatementOnDriver(selectStmt, driver);
ArrayList<String> valuesReadFromHiveDriver = new ArrayList<String>();
driver.getResults(valuesReadFromHiveDriver);
int rowIdx = 0;
LOG.debug(msg);
for (String row : valuesReadFromHiveDriver) {
LOG.debug(" rowIdx=" + rowIdx++ + ":" + row);
}
return valuesReadFromHiveDriver;
}
|
static List<String> execSelectAndDumpData(String selectStmt, IDriver driver, String msg) throws Exception { executeStatementOnDriver(selectStmt, driver); ArrayList<String> valuesReadFromHiveDriver = new ArrayList<String>(); driver.getResults(valuesReadFromHiveDriver); int rowIdx = 0; LOG.debug(msg); for (String row : valuesReadFromHiveDriver) { LOG.debug(STR + rowIdx++ + ":" + row); } return valuesReadFromHiveDriver; }
|
/**
* convenience method to execute a select stmt and dump results to log file
*/
|
convenience method to execute a select stmt and dump results to log file
|
execSelectAndDumpData
|
{
"repo_name": "sankarh/hive",
"path": "itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java",
"license": "apache-2.0",
"size": 118244
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.apache.hadoop.hive.ql.IDriver"
] |
import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hive.ql.IDriver;
|
import java.util.*; import org.apache.hadoop.hive.ql.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 2,304,641
|
public static WorkspaceItem create(Context c, Collection coll,
boolean template) throws AuthorizeException, SQLException,
IOException
{
// Check the user has permission to ADD to the collection
AuthorizeManager.authorizeAction(c, coll, Constants.ADD);
// Create an item
Item i = Item.create(c);
i.setSubmitter(c.getCurrentUser());
// Now create the policies for the submitter and workflow
// users to modify item and contents
// contents = bitstreams, bundles
// FIXME: icky hardcoded workflow steps
Group step1group = coll.getWorkflowGroup(1);
Group step2group = coll.getWorkflowGroup(2);
Group step3group = coll.getWorkflowGroup(3);
EPerson e = c.getCurrentUser();
// read permission
AuthorizeManager.addPolicy(c, i, Constants.READ, e);
// write permission
AuthorizeManager.addPolicy(c, i, Constants.WRITE, e);
// add permission
AuthorizeManager.addPolicy(c, i, Constants.ADD, e);
// remove contents permission
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, e);
// Copy template if appropriate
Item templateItem = coll.getTemplateItem();
if (template && (templateItem != null))
{
DCValue[] md = templateItem.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (int n = 0; n < md.length; n++)
{
i.addMetadata(md[n].schema, md[n].element, md[n].qualifier, md[n].language,
md[n].value);
}
}
i.update();
// Create the workspace item row
TableRow row = DatabaseManager.row("workspaceitem");
row.setColumn("item_id", i.getID());
row.setColumn("collection_id", coll.getID());
log.info(LogManager.getHeader(c, "create_workspace_item",
"workspace_item_id=" + row.getIntColumn("workspace_item_id")
+ "item_id=" + i.getID() + "collection_id="
+ coll.getID()));
DatabaseManager.insert(c, row);
WorkspaceItem wi = new WorkspaceItem(c, row);
return wi;
}
|
static WorkspaceItem function(Context c, Collection coll, boolean template) throws AuthorizeException, SQLException, IOException { AuthorizeManager.authorizeAction(c, coll, Constants.ADD); Item i = Item.create(c); i.setSubmitter(c.getCurrentUser()); Group step1group = coll.getWorkflowGroup(1); Group step2group = coll.getWorkflowGroup(2); Group step3group = coll.getWorkflowGroup(3); EPerson e = c.getCurrentUser(); AuthorizeManager.addPolicy(c, i, Constants.READ, e); AuthorizeManager.addPolicy(c, i, Constants.WRITE, e); AuthorizeManager.addPolicy(c, i, Constants.ADD, e); AuthorizeManager.addPolicy(c, i, Constants.REMOVE, e); Item templateItem = coll.getTemplateItem(); if (template && (templateItem != null)) { DCValue[] md = templateItem.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY); for (int n = 0; n < md.length; n++) { i.addMetadata(md[n].schema, md[n].element, md[n].qualifier, md[n].language, md[n].value); } } i.update(); TableRow row = DatabaseManager.row(STR); row.setColumn(STR, i.getID()); row.setColumn(STR, coll.getID()); log.info(LogManager.getHeader(c, STR, STR + row.getIntColumn(STR) + STR + i.getID() + STR + coll.getID())); DatabaseManager.insert(c, row); WorkspaceItem wi = new WorkspaceItem(c, row); return wi; }
|
/**
* Create a new workspace item, with a new ID. An Item is also created. The
* submitter is the current user in the context.
*
* @param c
* DSpace context object
* @param coll
* Collection being submitted to
* @param template
* if <code>true</code>, the workspace item starts as a copy
* of the collection's template item
*
* @return the newly created workspace item
*/
|
Create a new workspace item, with a new ID. An Item is also created. The submitter is the current user in the context
|
create
|
{
"repo_name": "jamie-dryad/dryad-repo",
"path": "dspace/modules/api/src/main/java/org/dspace/content/WorkspaceItem.java",
"license": "bsd-3-clause",
"size": 18567
}
|
[
"java.io.IOException",
"java.sql.SQLException",
"org.dspace.authorize.AuthorizeException",
"org.dspace.authorize.AuthorizeManager",
"org.dspace.core.Constants",
"org.dspace.core.Context",
"org.dspace.core.LogManager",
"org.dspace.eperson.EPerson",
"org.dspace.eperson.Group",
"org.dspace.storage.rdbms.DatabaseManager",
"org.dspace.storage.rdbms.TableRow"
] |
import java.io.IOException; import java.sql.SQLException; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow;
|
import java.io.*; import java.sql.*; import org.dspace.authorize.*; import org.dspace.core.*; import org.dspace.eperson.*; import org.dspace.storage.rdbms.*;
|
[
"java.io",
"java.sql",
"org.dspace.authorize",
"org.dspace.core",
"org.dspace.eperson",
"org.dspace.storage"
] |
java.io; java.sql; org.dspace.authorize; org.dspace.core; org.dspace.eperson; org.dspace.storage;
| 1,244,240
|
public String processUserEICNListSearch(javax.servlet.http.HttpServletRequest request,
javax.servlet.http.HttpServletResponse response) {
String eicnListXML = null;
// String zipCode = null;
ETRAMInquiryDTO inquiryDTO = null;
HttpSession userSession = request.getSession();
String output = null;
// String userSelection = null;
String userID = null;
try {
inquiryDTO = new ETRAMInquiryDTO();
InternalAssociate loginUser = (InternalAssociate) userSession.getAttribute("dirProfile");
userID = loginUser.getUserID();
inquiryDTO.setUserSelection("s6");
inquiryDTO.setHicn("HICN");
inquiryDTO.setUserID(userID);
// inquiryDTO.setUserID("12345");
inquiryDTO.setGroupName(this.getPolicyGroup(loginUser, request));
HashMap<Object, Object> eicnList = getEICNList(inquiryDTO); // get EICN hash map
if (eicnList == null || eicnList.size() == 0) {
// check for the dummy user ID
String dummyUserID = loginUser.getSecondaryUid();
if (dummyUserID != null && dummyUserID.trim().length() != 0)// If dummy user ID is available
{
inquiryDTO.setUserID(loginUser.getSecondaryUid());
inquiryDTO.setGroupName("GA");
eicnList = getEICNList(inquiryDTO);
}
if (eicnList == null || eicnList.size() == 0) {
return this.showEICNSearch(request, response);
}
}
if (eicnList != null && eicnList.size() != 0) {
if (eicnList.size() > 1000) {
EmailUtility emailUtil = new EmailUtility();
emailUtil.sendEmailToSysAdmin(buildEmailTextForSearchQuery(inquiryDTO), "Loop Count > 1000 in eQuest");
}
eicnListXML = getEICNListXML(eicnList);
this.saveInSession(request, eicnListXML, eicnList);
if (eicnList.size() == 1)// If there is only one EICN show the user default screen but not EICN List screen
{
Iterator<Object> eicnsIterator = eicnList.values().iterator();
String tempNumber = "";
while (eicnsIterator.hasNext()) {
EICNBrief tempEICN = (EICNBrief) eicnsIterator.next();
tempNumber = tempEICN.getEicnNumber();
}
String defaultScreen = this.getDefaultScreen(loginUser.getUserID());
if (defaultScreen != null && defaultScreen.equalsIgnoreCase("medicalser"))
defaultScreen = "medicalservice";
response.sendRedirect("../servlet/etraminquiry?userrequest=" + defaultScreen + "&eicn=" + tempNumber);
}
}
// apply xsl and get html back
TransformationEngine transformer = new TransformationEngine();
HashMap<Object, Object> properties = new HashMap<Object, Object>();
// eTraM Inquiry
if (eicnList == null || eicnList.size() == 0) {
properties.put("userselection", "eTraM Inquiry");
} else {
properties.put("userselection", "eicnquery");
}
// get default screen name
String defaultScreen = getDefaultScreen(loginUser.getUserID());
if (defaultScreen == null)
defaultScreen = "billservice";
properties.put("defaultview", defaultScreen);
properties.put("title", "EICN List");
transformer.setParameters(properties);
output = transformer.getHTML(eicnListXML, getXSLTTransformer());
} catch (Exception e) {
log.error(e);
}
return output;
}
|
String function(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response) { String eicnListXML = null; ETRAMInquiryDTO inquiryDTO = null; HttpSession userSession = request.getSession(); String output = null; String userID = null; try { inquiryDTO = new ETRAMInquiryDTO(); InternalAssociate loginUser = (InternalAssociate) userSession.getAttribute(STR); userID = loginUser.getUserID(); inquiryDTO.setUserSelection("s6"); inquiryDTO.setHicn("HICN"); inquiryDTO.setUserID(userID); inquiryDTO.setGroupName(this.getPolicyGroup(loginUser, request)); HashMap<Object, Object> eicnList = getEICNList(inquiryDTO); if (eicnList == null eicnList.size() == 0) { String dummyUserID = loginUser.getSecondaryUid(); if (dummyUserID != null && dummyUserID.trim().length() != 0) { inquiryDTO.setUserID(loginUser.getSecondaryUid()); inquiryDTO.setGroupName("GA"); eicnList = getEICNList(inquiryDTO); } if (eicnList == null eicnList.size() == 0) { return this.showEICNSearch(request, response); } } if (eicnList != null && eicnList.size() != 0) { if (eicnList.size() > 1000) { EmailUtility emailUtil = new EmailUtility(); emailUtil.sendEmailToSysAdmin(buildEmailTextForSearchQuery(inquiryDTO), STR); } eicnListXML = getEICNListXML(eicnList); this.saveInSession(request, eicnListXML, eicnList); if (eicnList.size() == 1) { Iterator<Object> eicnsIterator = eicnList.values().iterator(); String tempNumber = STRmedicalserSTRmedicalserviceSTR../servlet/etraminquiry?userrequest=STR&eicn=STRuserselectionSTReTraM InquirySTRuserselectionSTReicnquerySTRbillserviceSTRdefaultviewSTRtitleSTREICN List"); transformer.setParameters(properties); output = transformer.getHTML(eicnListXML, getXSLTTransformer()); } catch (Exception e) { log.error(e); } return output; }
|
/**
* Insert the method's description here. Creation date: (09/15/2003 9:24:18 AM)
*
* @return java.lang.String
*/
|
Insert the method's description here. Creation date: (09/15/2003 9:24:18 AM)
|
processUserEICNListSearch
|
{
"repo_name": "ankitbaderiya/code-samples",
"path": "ETRAMInquiryControl.java",
"license": "mit",
"size": 96362
}
|
[
"com.mutualofomaha.edivision.common.ETRAMInquiryDTO",
"com.mutualofomaha.edivision.common.EmailUtility",
"com.mutualofomaha.hipaa.sessionbeans.InternalAssociate",
"java.util.HashMap",
"java.util.Iterator",
"java.util.List",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"javax.servlet.http.HttpSession"
] |
import com.mutualofomaha.edivision.common.ETRAMInquiryDTO; import com.mutualofomaha.edivision.common.EmailUtility; import com.mutualofomaha.hipaa.sessionbeans.InternalAssociate; import java.util.HashMap; import java.util.Iterator; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession;
|
import com.mutualofomaha.edivision.common.*; import com.mutualofomaha.hipaa.sessionbeans.*; import java.util.*; import javax.servlet.http.*;
|
[
"com.mutualofomaha.edivision",
"com.mutualofomaha.hipaa",
"java.util",
"javax.servlet"
] |
com.mutualofomaha.edivision; com.mutualofomaha.hipaa; java.util; javax.servlet;
| 1,429,270
|
public void addPropertiesFunction(PropertiesFunction function) {
this.functions.put(function.getName(), function);
}
|
void function(PropertiesFunction function) { this.functions.put(function.getName(), function); }
|
/**
* Registers the {@link PropertiesFunction} as a function to this component.
*/
|
Registers the <code>PropertiesFunction</code> as a function to this component
|
addPropertiesFunction
|
{
"repo_name": "tadayosi/camel",
"path": "core/camel-base/src/main/java/org/apache/camel/component/properties/PropertiesComponent.java",
"license": "apache-2.0",
"size": 25517
}
|
[
"org.apache.camel.spi.PropertiesFunction"
] |
import org.apache.camel.spi.PropertiesFunction;
|
import org.apache.camel.spi.*;
|
[
"org.apache.camel"
] |
org.apache.camel;
| 1,139,871
|
@Endpoint(
describeByClass = true
)
public static ResourceScatterNdAdd create(Scope scope, Operand<? extends TType> ref,
Operand<? extends TNumber> indices, Operand<? extends TType> updates, Options... options) {
OperationBuilder opBuilder = scope.opBuilder(OP_NAME, "ResourceScatterNdAdd");
opBuilder.addInput(ref.asOutput());
opBuilder.addInput(indices.asOutput());
opBuilder.addInput(updates.asOutput());
if (options != null) {
for (Options opts : options) {
if (opts.useLocking != null) {
opBuilder.setAttr("use_locking", opts.useLocking);
}
}
}
return new ResourceScatterNdAdd(opBuilder.build());
}
|
@Endpoint( describeByClass = true ) static ResourceScatterNdAdd function(Scope scope, Operand<? extends TType> ref, Operand<? extends TNumber> indices, Operand<? extends TType> updates, Options... options) { OperationBuilder opBuilder = scope.opBuilder(OP_NAME, STR); opBuilder.addInput(ref.asOutput()); opBuilder.addInput(indices.asOutput()); opBuilder.addInput(updates.asOutput()); if (options != null) { for (Options opts : options) { if (opts.useLocking != null) { opBuilder.setAttr(STR, opts.useLocking); } } } return new ResourceScatterNdAdd(opBuilder.build()); }
|
/**
* Factory method to create a class wrapping a new ResourceScatterNdAdd operation.
*
* @param scope current scope
* @param ref A resource handle. Must be from a VarHandleOp.
* @param indices A Tensor. Must be one of the following types: int32, int64.
* A tensor of indices into ref.
* @param updates A Tensor. Must have the same type as ref. A tensor of
* values to add to ref.
* @param options carries optional attribute values
* @return a new instance of ResourceScatterNdAdd
*/
|
Factory method to create a class wrapping a new ResourceScatterNdAdd operation
|
create
|
{
"repo_name": "tensorflow/java",
"path": "tensorflow-core/tensorflow-core-api/src/gen/java/org/tensorflow/op/core/ResourceScatterNdAdd.java",
"license": "apache-2.0",
"size": 6799
}
|
[
"org.tensorflow.Operand",
"org.tensorflow.OperationBuilder",
"org.tensorflow.op.Scope",
"org.tensorflow.op.annotation.Endpoint",
"org.tensorflow.types.family.TNumber",
"org.tensorflow.types.family.TType"
] |
import org.tensorflow.Operand; import org.tensorflow.OperationBuilder; import org.tensorflow.op.Scope; import org.tensorflow.op.annotation.Endpoint; import org.tensorflow.types.family.TNumber; import org.tensorflow.types.family.TType;
|
import org.tensorflow.*; import org.tensorflow.op.*; import org.tensorflow.op.annotation.*; import org.tensorflow.types.family.*;
|
[
"org.tensorflow",
"org.tensorflow.op",
"org.tensorflow.types"
] |
org.tensorflow; org.tensorflow.op; org.tensorflow.types;
| 259,958
|
private File getChangelogBaseDir() {
return new File(packageDir, "/" + FOLDERNAMEFOR_CHANGELOG);
}
|
File function() { return new File(packageDir, "/" + FOLDERNAMEFOR_CHANGELOG); }
|
/**
* Returns the package's change log directory
*
* @return change log directory
*/
|
Returns the package's change log directory
|
getChangelogBaseDir
|
{
"repo_name": "huihoo/olat",
"path": "olat7.8/src/main/java/org/olat/lms/ims/qti/editor/QTIEditorPackageEBL.java",
"license": "apache-2.0",
"size": 14280
}
|
[
"java.io.File"
] |
import java.io.File;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,008,263
|
public File downloadConfigDir(SolrZkClient zkClient, String configName)
throws IOException, InterruptedException, KeeperException {
File dir = Files.createTempDir();
dir.deleteOnExit();
ZkController.downloadConfigDir(zkClient, configName, dir);
File confDir = new File(dir, "conf");
if (!confDir.isDirectory()) {
// create a temporary directory with "conf" subdir and mv the config in there. This is
// necessary because of CDH-11188; solrctl does not generate nor accept directories with e.g.
// conf/solrconfig.xml which is necessary for proper solr operation. This should work
// even if solrctl changes.
confDir = new File(Files.createTempDir().getAbsolutePath(), "conf");
confDir.getParentFile().deleteOnExit();
Files.move(dir, confDir);
dir = confDir.getParentFile();
}
FileUtils.writeStringToFile(new File(dir, "solr.xml"), "<solr><cores><core name=\"collection1\" instanceDir=\".\" /></cores></solr>", "UTF-8");
verifyConfigDir(confDir);
return dir;
}
|
File function(SolrZkClient zkClient, String configName) throws IOException, InterruptedException, KeeperException { File dir = Files.createTempDir(); dir.deleteOnExit(); ZkController.downloadConfigDir(zkClient, configName, dir); File confDir = new File(dir, "conf"); if (!confDir.isDirectory()) { confDir = new File(Files.createTempDir().getAbsolutePath(), "conf"); confDir.getParentFile().deleteOnExit(); Files.move(dir, confDir); dir = confDir.getParentFile(); } FileUtils.writeStringToFile(new File(dir, STR), STRcollection1\STR.\STR, "UTF-8"); verifyConfigDir(confDir); return dir; }
|
/**
* Download and return the config directory from ZK
*/
|
Download and return the config directory from ZK
|
downloadConfigDir
|
{
"repo_name": "pengzong1111/solr4",
"path": "solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java",
"license": "apache-2.0",
"size": 8186
}
|
[
"com.google.common.io.Files",
"java.io.File",
"java.io.IOException",
"org.apache.commons.io.FileUtils",
"org.apache.solr.cloud.ZkController",
"org.apache.solr.common.cloud.SolrZkClient",
"org.apache.zookeeper.KeeperException"
] |
import com.google.common.io.Files; import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.apache.solr.cloud.ZkController; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.zookeeper.KeeperException;
|
import com.google.common.io.*; import java.io.*; import org.apache.commons.io.*; import org.apache.solr.cloud.*; import org.apache.solr.common.cloud.*; import org.apache.zookeeper.*;
|
[
"com.google.common",
"java.io",
"org.apache.commons",
"org.apache.solr",
"org.apache.zookeeper"
] |
com.google.common; java.io; org.apache.commons; org.apache.solr; org.apache.zookeeper;
| 2,186,154
|
@Override
public void setLocale(Locale locale)
{
_response.setLocale(locale);
}
|
void function(Locale locale) { _response.setLocale(locale); }
|
/**
* Sets the output locale. The response will set the character encoding
* based on the locale. For example, setting the "kr" locale will set
* the character encoding to "EUC_KR".
*/
|
Sets the output locale. The response will set the character encoding based on the locale. For example, setting the "kr" locale will set the character encoding to "EUC_KR"
|
setLocale
|
{
"repo_name": "dlitz/resin",
"path": "modules/resin/src/com/caucho/server/http/ResponseWrapper.java",
"license": "gpl-2.0",
"size": 11202
}
|
[
"java.util.Locale"
] |
import java.util.Locale;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 635,685
|
@GET
@GZIP
@Path("users")
@Produces("application/json")
List<SchedulerUserData> getUsers(@HeaderParam("sessionid")
final String sessionId) throws NotConnectedRestException, PermissionRestException;
|
@Path("users") @Produces(STR) List<SchedulerUserData> getUsers(@HeaderParam(STR) final String sessionId) throws NotConnectedRestException, PermissionRestException;
|
/**
* Users currently connected to the scheduler
*
* @param sessionId
* the session id associated to this new connection
* @return list of users
* @throws NotConnectedRestException
* @throws PermissionRestException
*/
|
Users currently connected to the scheduler
|
getUsers
|
{
"repo_name": "tobwiens/scheduling",
"path": "rest/rest-api/src/main/java/org/ow2/proactive_grid_cloud_portal/common/SchedulerRestInterface.java",
"license": "agpl-3.0",
"size": 80291
}
|
[
"java.util.List",
"javax.ws.rs.HeaderParam",
"javax.ws.rs.Path",
"javax.ws.rs.Produces",
"org.ow2.proactive_grid_cloud_portal.scheduler.dto.SchedulerUserData",
"org.ow2.proactive_grid_cloud_portal.scheduler.exception.NotConnectedRestException",
"org.ow2.proactive_grid_cloud_portal.scheduler.exception.PermissionRestException"
] |
import java.util.List; import javax.ws.rs.HeaderParam; import javax.ws.rs.Path; import javax.ws.rs.Produces; import org.ow2.proactive_grid_cloud_portal.scheduler.dto.SchedulerUserData; import org.ow2.proactive_grid_cloud_portal.scheduler.exception.NotConnectedRestException; import org.ow2.proactive_grid_cloud_portal.scheduler.exception.PermissionRestException;
|
import java.util.*; import javax.ws.rs.*; import org.ow2.proactive_grid_cloud_portal.scheduler.dto.*; import org.ow2.proactive_grid_cloud_portal.scheduler.exception.*;
|
[
"java.util",
"javax.ws",
"org.ow2.proactive_grid_cloud_portal"
] |
java.util; javax.ws; org.ow2.proactive_grid_cloud_portal;
| 443,795
|
@Override
public boolean isReplaceable(IBlockAccess worldIn, BlockPos pos)
{
IBlockState iblockstate = worldIn.getBlockState(pos);
if (iblockstate.getBlock() != this)
{
return true;
}
else
{
BlockACDoublePlant.EnumPlantType blockdoubleplant$enumplanttype = (BlockACDoublePlant.EnumPlantType)iblockstate.getActualState(worldIn, pos).getValue(VARIANT);
return blockdoubleplant$enumplanttype == BlockACDoublePlant.EnumPlantType.GLOWBULB || blockdoubleplant$enumplanttype == BlockACDoublePlant.EnumPlantType.FLIPANT;
}
}
|
boolean function(IBlockAccess worldIn, BlockPos pos) { IBlockState iblockstate = worldIn.getBlockState(pos); if (iblockstate.getBlock() != this) { return true; } else { BlockACDoublePlant.EnumPlantType blockdoubleplant$enumplanttype = (BlockACDoublePlant.EnumPlantType)iblockstate.getActualState(worldIn, pos).getValue(VARIANT); return blockdoubleplant$enumplanttype == BlockACDoublePlant.EnumPlantType.GLOWBULB blockdoubleplant$enumplanttype == BlockACDoublePlant.EnumPlantType.FLIPANT; } }
|
/**
* Whether this Block can be replaced directly by other blocks (true for e.g. tall grass)
*/
|
Whether this Block can be replaced directly by other blocks (true for e.g. tall grass)
|
isReplaceable
|
{
"repo_name": "JennyLeeP/AlphaCentauri",
"path": "src/main/java/com/cyborgJenn/alphaCentauri/blocks/BlockACDoublePlant.java",
"license": "gpl-3.0",
"size": 13623
}
|
[
"net.minecraft.block.state.IBlockState",
"net.minecraft.util.math.BlockPos",
"net.minecraft.world.IBlockAccess"
] |
import net.minecraft.block.state.IBlockState; import net.minecraft.util.math.BlockPos; import net.minecraft.world.IBlockAccess;
|
import net.minecraft.block.state.*; import net.minecraft.util.math.*; import net.minecraft.world.*;
|
[
"net.minecraft.block",
"net.minecraft.util",
"net.minecraft.world"
] |
net.minecraft.block; net.minecraft.util; net.minecraft.world;
| 744,696
|
public Status getStatus();
|
Status function();
|
/**
* Retrieve the status code returned by the server
* @return Status code
*/
|
Retrieve the status code returned by the server
|
getStatus
|
{
"repo_name": "s-store/sstore-soft",
"path": "src/frontend/org/voltdb/client/ClientResponse.java",
"license": "gpl-3.0",
"size": 5021
}
|
[
"edu.brown.hstore.Hstoreservice"
] |
import edu.brown.hstore.Hstoreservice;
|
import edu.brown.hstore.*;
|
[
"edu.brown.hstore"
] |
edu.brown.hstore;
| 796,010
|
public boolean databaseManipulate(String connectionTitle, String request, Object ... args) throws SQLException{
HikariDataSource ds = this.dbSources.get(connectionTitle);
if(ds == null){
throw new IllegalArgumentException("No database pool found for identifier " + connectionTitle);
}
return databaseManipulate(ds.getConnection(), request, args);
}
|
boolean function(String connectionTitle, String request, Object ... args) throws SQLException{ HikariDataSource ds = this.dbSources.get(connectionTitle); if(ds == null){ throw new IllegalArgumentException(STR + connectionTitle); } return databaseManipulate(ds.getConnection(), request, args); }
|
/**
* Executes SQL manipulate for provided database connection
* @param connectionTitle String identifier of connection which would be used
* @param request SQL request
* @param args Arguments of SQL request
* @return boolean true if request was successful and false if error happened
* @throws SQLException
* @throws IllegalArgumentException will be thrown if no connection for such <b>connectionTitle</b> has been initialized
*/
|
Executes SQL manipulate for provided database connection
|
databaseManipulate
|
{
"repo_name": "DiceNyan/DreamEcon_InventoryAdditionals",
"path": "src/main/java/skymine/redenergy/core/database/DatabaseConnector.java",
"license": "lgpl-2.1",
"size": 5301
}
|
[
"com.zaxxer.hikari.HikariDataSource",
"java.sql.SQLException"
] |
import com.zaxxer.hikari.HikariDataSource; import java.sql.SQLException;
|
import com.zaxxer.hikari.*; import java.sql.*;
|
[
"com.zaxxer.hikari",
"java.sql"
] |
com.zaxxer.hikari; java.sql;
| 2,138,874
|
public Artifact getLinkDynamicLibraryTool() {
return linkDynamicLibraryTool;
}
|
Artifact function() { return linkDynamicLibraryTool; }
|
/**
* Returns the tool which should be used for linking dynamic libraries, or in case it's not
* specified by the crosstool this will be @tools_repository/tools/cpp:link_dynamic_library
*/
|
Returns the tool which should be used for linking dynamic libraries, or in case it's not specified by the crosstool this will be @tools_repository/tools/cpp:link_dynamic_library
|
getLinkDynamicLibraryTool
|
{
"repo_name": "akira-baruah/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/cpp/CcToolchainProvider.java",
"license": "apache-2.0",
"size": 35939
}
|
[
"com.google.devtools.build.lib.actions.Artifact"
] |
import com.google.devtools.build.lib.actions.Artifact;
|
import com.google.devtools.build.lib.actions.*;
|
[
"com.google.devtools"
] |
com.google.devtools;
| 2,541,396
|
CompletableFuture<Void> endTxn(TxnID txnID, int txnAction, long lowWaterMark);
|
CompletableFuture<Void> endTxn(TxnID txnID, int txnAction, long lowWaterMark);
|
/**
* End the transaction in this topic.
*
* @param txnID Transaction id
* @param txnAction Transaction action.
* @param lowWaterMark low water mark of this tc
* @return
*/
|
End the transaction in this topic
|
endTxn
|
{
"repo_name": "yahoo/pulsar",
"path": "pulsar-broker/src/main/java/org/apache/pulsar/broker/service/Topic.java",
"license": "apache-2.0",
"size": 9115
}
|
[
"java.util.concurrent.CompletableFuture",
"org.apache.pulsar.client.api.transaction.TxnID"
] |
import java.util.concurrent.CompletableFuture; import org.apache.pulsar.client.api.transaction.TxnID;
|
import java.util.concurrent.*; import org.apache.pulsar.client.api.transaction.*;
|
[
"java.util",
"org.apache.pulsar"
] |
java.util; org.apache.pulsar;
| 2,273,427
|
public static void isBlank(CharSequence text, CharSequence message) {
if (StringUtils.isNotBlank(text)) {
throw new IllegalArgumentException("[FOSS-0006][isBlank]["
+ message + "]");
}
}
|
static void function(CharSequence text, CharSequence message) { if (StringUtils.isNotBlank(text)) { throw new IllegalArgumentException(STR + message + "]"); } }
|
/**
* Assert that a character sequence is null or blank("" for example)
* @param text the character sequence to be checked
* @param message the exception message to display when the assertion failed
*/
|
Assert that a character sequence is null or blank("" for example)
|
isBlank
|
{
"repo_name": "tylerchen/springmvc-mybatis-modules-project",
"path": "common/src/main/java/com/foreveross/infra/util/Assert.java",
"license": "apache-2.0",
"size": 20063
}
|
[
"org.apache.commons.lang3.StringUtils"
] |
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 2,264,300
|
Optional<Object> property(PropertyKey propertyKey);
|
Optional<Object> property(PropertyKey propertyKey);
|
/**
* Returns an optional property value mapped to specified
* {@code propertyKey}.
*
* @param propertyKey the property key
*
* @return an optional value.
*/
|
Returns an optional property value mapped to specified propertyKey
|
property
|
{
"repo_name": "jinahya/simple-file-back",
"path": "src/main/java/com/github/jinahya/simple/file/back/FileContext.java",
"license": "apache-2.0",
"size": 14576
}
|
[
"java.util.Optional"
] |
import java.util.Optional;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,894,265
|
@Override
public LinkedList<Element> getScalarElements(LinkedList<Element> aList) {
assert aList!=null;
aList.add(this);//add this element to the list
return aList;//we are done
}//getScalarElements()
|
LinkedList<Element> function(LinkedList<Element> aList) { assert aList!=null; aList.add(this); return aList; }
|
/**
* Returns this element
* @param aList list to place descendent elements into
* @return list with this element added to it
*/
|
Returns this element
|
getScalarElements
|
{
"repo_name": "duncanpauly/com.justone.json",
"path": "src/com/justone/json/ScalarElement.java",
"license": "mit",
"size": 6488
}
|
[
"java.util.LinkedList"
] |
import java.util.LinkedList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,079,311
|
public static <E> Collection<E> configuredCollection(Collection<E>
storedCollection,
CursorConfig config) {
return (Collection)
((StoredContainer) storedCollection).configuredClone(config);
}
|
static <E> Collection<E> function(Collection<E> storedCollection, CursorConfig config) { return (Collection) ((StoredContainer) storedCollection).configuredClone(config); }
|
/**
* Creates a configured collection from a given stored collection.
*
* @param storedCollection the base collection.
*
* @param config is the cursor configuration to be used for all operations
* performed via the new collection instance; null may be specified to use
* the default configuration.
*
* @return the configured collection.
*
* @throws ClassCastException if the given container is not a
* StoredContainer.
*/
|
Creates a configured collection from a given stored collection
|
configuredCollection
|
{
"repo_name": "bjorndm/prebake",
"path": "code/third_party/bdb/src/com/sleepycat/collections/StoredCollections.java",
"license": "apache-2.0",
"size": 6768
}
|
[
"com.sleepycat.je.CursorConfig",
"java.util.Collection"
] |
import com.sleepycat.je.CursorConfig; import java.util.Collection;
|
import com.sleepycat.je.*; import java.util.*;
|
[
"com.sleepycat.je",
"java.util"
] |
com.sleepycat.je; java.util;
| 1,346,761
|
@Nullable
public static String emptyToNull(@Nullable String string) {
return isNullOrEmpty(string) ? null : string;
}
|
static String function(@Nullable String string) { return isNullOrEmpty(string) ? null : string; }
|
/**
* Returns the given string if it is nonempty; {@code null} otherwise.
*
* @param string the string to test and possibly return
* @return {@code string} itself if it is nonempty; {@code null} if it is empty or null
*/
|
Returns the given string if it is nonempty; null otherwise
|
emptyToNull
|
{
"repo_name": "dropwizard/dropwizard",
"path": "dropwizard-util/src/main/java/io/dropwizard/util/Strings.java",
"license": "apache-2.0",
"size": 3232
}
|
[
"javax.annotation.Nullable"
] |
import javax.annotation.Nullable;
|
import javax.annotation.*;
|
[
"javax.annotation"
] |
javax.annotation;
| 2,728,348
|
public void detach(final EntityInvocationHandler entity) {
searchableEntities.remove(entity.getUUID());
allAttachedEntities.remove(entity);
}
|
void function(final EntityInvocationHandler entity) { searchableEntities.remove(entity.getUUID()); allAttachedEntities.remove(entity); }
|
/**
* Detaches entity.
*
* @param entity entity to be detached.
*/
|
Detaches entity
|
detach
|
{
"repo_name": "apache/olingo-odata4",
"path": "ext/client-proxy/src/main/java/org/apache/olingo/ext/proxy/context/EntityContext.java",
"license": "apache-2.0",
"size": 7182
}
|
[
"org.apache.olingo.ext.proxy.commons.EntityInvocationHandler"
] |
import org.apache.olingo.ext.proxy.commons.EntityInvocationHandler;
|
import org.apache.olingo.ext.proxy.commons.*;
|
[
"org.apache.olingo"
] |
org.apache.olingo;
| 2,678,966
|
private Dimension getTextExtents(String s, Font f, int fontHeight) {
if (s.length() == 0) {
return getMapModeConstants().dimension_nDPtoLP_0;
} else {
// height should be set using the font height and the number of
// lines in the string
Dimension d = FigureUtilities.getTextExtents(s, f);
IMapMode mapMode = getFigureMapMode();
d.width = mapMode.DPtoLP(d.width);
d.height = fontHeight * new StringTokenizer(s, "\n").countTokens();//$NON-NLS-1$
return d;
}
}
|
Dimension function(String s, Font f, int fontHeight) { if (s.length() == 0) { return getMapModeConstants().dimension_nDPtoLP_0; } else { Dimension d = FigureUtilities.getTextExtents(s, f); IMapMode mapMode = getFigureMapMode(); d.width = mapMode.DPtoLP(d.width); d.height = fontHeight * new StringTokenizer(s, "\n").countTokens(); return d; } }
|
/**
* Gets the tex extent scaled to the mapping mode
*/
|
Gets the tex extent scaled to the mapping mode
|
getTextExtents
|
{
"repo_name": "debabratahazra/DS",
"path": "designstudio/components/process/ui/com.odcgroup.process.editor.diagram/src/main/java/com/odcgroup/process/diagram/custom/figures/WrapLabel.java",
"license": "epl-1.0",
"size": 53957
}
|
[
"java.util.StringTokenizer",
"org.eclipse.draw2d.FigureUtilities",
"org.eclipse.draw2d.geometry.Dimension",
"org.eclipse.gmf.runtime.draw2d.ui.mapmode.IMapMode",
"org.eclipse.swt.graphics.Font"
] |
import java.util.StringTokenizer; import org.eclipse.draw2d.FigureUtilities; import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.gmf.runtime.draw2d.ui.mapmode.IMapMode; import org.eclipse.swt.graphics.Font;
|
import java.util.*; import org.eclipse.draw2d.*; import org.eclipse.draw2d.geometry.*; import org.eclipse.gmf.runtime.draw2d.ui.mapmode.*; import org.eclipse.swt.graphics.*;
|
[
"java.util",
"org.eclipse.draw2d",
"org.eclipse.gmf",
"org.eclipse.swt"
] |
java.util; org.eclipse.draw2d; org.eclipse.gmf; org.eclipse.swt;
| 763,732
|
@SuppressWarnings("unchecked")
Collection<ResourceGroup> getGroups(Resource r) {
return (Collection<ResourceGroup>) createQuery(
"select g.group from GroupMember g " + "where g.resource = :resource").setParameter(
"resource", r).list();
}
|
@SuppressWarnings(STR) Collection<ResourceGroup> getGroups(Resource r) { return (Collection<ResourceGroup>) createQuery( STR + STR).setParameter( STR, r).list(); }
|
/**
* Get groups that a resource belongs to via the persistence mechanism (i.e.
* mapping table)
*
* @return {@link ResourceGroup}s
*/
|
Get groups that a resource belongs to via the persistence mechanism (i.e. mapping table)
|
getGroups
|
{
"repo_name": "cc14514/hq6",
"path": "hq-server/src/main/java/org/hyperic/hq/authz/server/session/ResourceGroupDAO.java",
"license": "unlicense",
"size": 18426
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 633,570
|
MongoClientOptions.Builder optionsBuilder = MongoClientOptions.builder();
optionsBuilder.writeConcern( writeConcern );
optionsBuilder.readPreference( readPreference );
Map<String, Method> settingsMap = createSettingsMap();
for ( Map.Entry<String, Method> entry : settingsMap.entrySet() ) {
String setting = MongoDBProperties.MONGO_DRIVER_SETTINGS_PREFIX + "." + entry.getKey();
// we know that there is exactly one parameter
Class<?> type = entry.getValue().getParameterTypes()[0];
// for reflection purposes we need to deal with wrapper classes
if ( int.class.equals( type ) ) {
type = Integer.class;
}
if ( boolean.class.equals( type ) ) {
type = Boolean.class;
}
Object property = propertyReader.property( setting, type ).withDefault( null ).getValue();
if ( property == null ) {
continue;
}
Method settingMethod = entry.getValue();
try {
settingMethod.invoke( optionsBuilder, property );
}
catch ( InvocationTargetException | IllegalAccessException e ) {
throw log.unableToInvokeMethodViaReflection(
settingMethod.getDeclaringClass().getName(),
settingMethod.getName()
);
}
}
return optionsBuilder.build();
}
|
MongoClientOptions.Builder optionsBuilder = MongoClientOptions.builder(); optionsBuilder.writeConcern( writeConcern ); optionsBuilder.readPreference( readPreference ); Map<String, Method> settingsMap = createSettingsMap(); for ( Map.Entry<String, Method> entry : settingsMap.entrySet() ) { String setting = MongoDBProperties.MONGO_DRIVER_SETTINGS_PREFIX + "." + entry.getKey(); Class<?> type = entry.getValue().getParameterTypes()[0]; if ( int.class.equals( type ) ) { type = Integer.class; } if ( boolean.class.equals( type ) ) { type = Boolean.class; } Object property = propertyReader.property( setting, type ).withDefault( null ).getValue(); if ( property == null ) { continue; } Method settingMethod = entry.getValue(); try { settingMethod.invoke( optionsBuilder, property ); } catch ( InvocationTargetException IllegalAccessException e ) { throw log.unableToInvokeMethodViaReflection( settingMethod.getDeclaringClass().getName(), settingMethod.getName() ); } } return optionsBuilder.build(); }
|
/**
* Create a {@link MongoClientOptions} using the {@link MongoDBConfiguration}.
*
* @return the {@link MongoClientOptions} corresponding to the {@link MongoDBConfiguration}
*/
|
Create a <code>MongoClientOptions</code> using the <code>MongoDBConfiguration</code>
|
buildOptions
|
{
"repo_name": "schernolyas/hibernate-ogm",
"path": "mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/configuration/impl/MongoDBConfiguration.java",
"license": "lgpl-2.1",
"size": 5602
}
|
[
"com.mongodb.MongoClientOptions",
"java.lang.reflect.InvocationTargetException",
"java.lang.reflect.Method",
"java.util.Map",
"org.hibernate.ogm.datastore.mongodb.MongoDBProperties"
] |
import com.mongodb.MongoClientOptions; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Map; import org.hibernate.ogm.datastore.mongodb.MongoDBProperties;
|
import com.mongodb.*; import java.lang.reflect.*; import java.util.*; import org.hibernate.ogm.datastore.mongodb.*;
|
[
"com.mongodb",
"java.lang",
"java.util",
"org.hibernate.ogm"
] |
com.mongodb; java.lang; java.util; org.hibernate.ogm;
| 2,677,581
|
public static OutputJobInfo create(String databaseName,
String tableName,
Map<String, String> partitionValues) {
return new OutputJobInfo(databaseName,
tableName,
partitionValues);
}
private OutputJobInfo(String databaseName,
String tableName,
Map<String, String> partitionValues) {
this.databaseName = (databaseName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName;
this.tableName = tableName;
this.partitionValues = partitionValues;
this.properties = new Properties();
}
|
static OutputJobInfo function(String databaseName, String tableName, Map<String, String> partitionValues) { return new OutputJobInfo(databaseName, tableName, partitionValues); } private OutputJobInfo(String databaseName, String tableName, Map<String, String> partitionValues) { this.databaseName = (databaseName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName; this.tableName = tableName; this.partitionValues = partitionValues; this.properties = new Properties(); }
|
/**
* Initializes a new OutputJobInfo instance
* for writing data from a table.
* @param databaseName the db name
* @param tableName the table name
* @param partitionValues The partition values to publish to, can be null or empty Map to
* work with hadoop security, the kerberos principal name of the server - else null
* The principal name should be of the form:
* <servicename>/_HOST@<realm> like "hcat/_HOST@myrealm.com"
* The special string _HOST will be replaced automatically with the correct host name
* indicate write to a unpartitioned table. For partitioned tables, this map should
* contain keys for all partition columns with corresponding values.
*/
|
Initializes a new OutputJobInfo instance for writing data from a table
|
create
|
{
"repo_name": "cloudera/hcatalog",
"path": "core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java",
"license": "apache-2.0",
"size": 8426
}
|
[
"java.util.Map",
"java.util.Properties",
"org.apache.hadoop.hive.metastore.MetaStoreUtils"
] |
import java.util.Map; import java.util.Properties; import org.apache.hadoop.hive.metastore.MetaStoreUtils;
|
import java.util.*; import org.apache.hadoop.hive.metastore.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 404,327
|
@Test
public void testStandardMethods() throws IllegalArgumentException, SecurityException, IllegalAccessException, InvocationTargetException{
Set<String> skipTests = new HashSet<String>();
skipTests.add("close");
skipTests.add("getConnection");
skipTests.add("markPossiblyBroken");
skipTests.add("trackStatement");
skipTests.add("checkClosed");
skipTests.add("isClosed");
skipTests.add("internalClose");
skipTests.add("prepareCall");
skipTests.add("prepareStatement");
skipTests.add("setClientInfo");
skipTests.add("getConnectionLastUsed");
skipTests.add("setConnectionLastUsed");
skipTests.add("getConnectionLastReset");
skipTests.add("setConnectionLastReset");
skipTests.add("isPossiblyBroken");
skipTests.add("getOriginatingPartition");
skipTests.add("setOriginatingPartition");
skipTests.add("renewConnection");
skipTests.add("clearStatementCaches");
skipTests.add("obtainInternalConnection");
skipTests.add("sendInitSQL");
skipTests.add("$VRi"); // this only comes into play when code coverage is started. Eclemma bug?
CommonTestUtils.testStatementBounceMethod(mockConnection, testClass, skipTests, mockConnection);
}
|
void function() throws IllegalArgumentException, SecurityException, IllegalAccessException, InvocationTargetException{ Set<String> skipTests = new HashSet<String>(); skipTests.add("close"); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add(STR); skipTests.add("$VRi"); CommonTestUtils.testStatementBounceMethod(mockConnection, testClass, skipTests, mockConnection); }
|
/** Test bounce of inner connection.
* @throws IllegalArgumentException
* @throws SecurityException
* @throws IllegalAccessException
* @throws InvocationTargetException
*/
|
Test bounce of inner connection
|
testStandardMethods
|
{
"repo_name": "liuxing521a/itas-core",
"path": "core/src/test/java/org/itas/core/dbpool/TestConnectionHandle.java",
"license": "apache-2.0",
"size": 31282
}
|
[
"com.jolbox.bonecp.CommonTestUtils",
"java.lang.reflect.InvocationTargetException",
"java.util.HashSet",
"java.util.Set"
] |
import com.jolbox.bonecp.CommonTestUtils; import java.lang.reflect.InvocationTargetException; import java.util.HashSet; import java.util.Set;
|
import com.jolbox.bonecp.*; import java.lang.reflect.*; import java.util.*;
|
[
"com.jolbox.bonecp",
"java.lang",
"java.util"
] |
com.jolbox.bonecp; java.lang; java.util;
| 2,889,814
|
public String[] queryNodeUsers(UUID templateId)
{
HashSet<String> userSet = new HashSet<String>();
for (Right right : this.rightSet)
{
if(right.templateId.equals(templateId))
userSet.add(right.username);
}
String[] userArray = userSet.toArray(new String[userSet.size()]);
Arrays.sort(userArray);
return userArray;
}
|
String[] function(UUID templateId) { HashSet<String> userSet = new HashSet<String>(); for (Right right : this.rightSet) { if(right.templateId.equals(templateId)) userSet.add(right.username); } String[] userArray = userSet.toArray(new String[userSet.size()]); Arrays.sort(userArray); return userArray; }
|
/**
* (non-Javadoc)
* <p> Title:queryNodeUsers</p>
* <p> Description:TODO</p>
* @param templateId
* @return
* @see com.sogou.qadev.service.cynthia.bean.Flow#queryNodeUsers(com.sogou.qadev.service.cynthia.bean.UUID)
*/
|
(non-Javadoc) Title:queryNodeUsers Description:TODO
|
queryNodeUsers
|
{
"repo_name": "yioye/Cynthia",
"path": "src/main/java/com/sogou/qadev/service/cynthia/bean/impl/FlowImpl.java",
"license": "gpl-2.0",
"size": 46682
}
|
[
"com.sogou.qadev.service.cynthia.bean.Right",
"java.util.Arrays",
"java.util.HashSet"
] |
import com.sogou.qadev.service.cynthia.bean.Right; import java.util.Arrays; import java.util.HashSet;
|
import com.sogou.qadev.service.cynthia.bean.*; import java.util.*;
|
[
"com.sogou.qadev",
"java.util"
] |
com.sogou.qadev; java.util;
| 142,547
|
private void transform(Source source, SerializationHandler handler,
String encoding) throws TransformerException
{
try {
if ((source instanceof StreamSource && source.getSystemId()==null
&& ((StreamSource)source).getInputStream()==null &&
((StreamSource)source).getReader()==null)||
(source instanceof SAXSource &&
((SAXSource)source).getInputSource()==null &&
((SAXSource)source).getXMLReader()==null )||
(source instanceof DOMSource &&
((DOMSource)source).getNode()==null)){
DocumentBuilderFactory builderF = FactoryImpl.getDOMFactory(_useServicesMechanism);
DocumentBuilder builder = builderF.newDocumentBuilder();
String systemID = source.getSystemId();
source = new DOMSource(builder.newDocument());
// Copy system ID from original, empty Source to new
if (systemID != null) {
source.setSystemId(systemID);
}
}
if (_isIdentity) {
transformIdentity(source, handler);
} else {
_translet.transform(getDOM(source), handler);
}
} catch (TransletException e) {
if (_errorListener != null) postErrorToListener(e.getMessage());
throw new TransformerException(e);
} catch (RuntimeException e) {
if (_errorListener != null) postErrorToListener(e.getMessage());
throw new TransformerException(e);
} catch (Exception e) {
if (_errorListener != null) postErrorToListener(e.getMessage());
throw new TransformerException(e);
} finally {
_dtmManager = null;
}
// If we create an output stream for the Result, we need to close it after the transformation.
if (_ostream != null) {
try {
_ostream.close();
}
catch (IOException e) {}
_ostream = null;
}
}
|
void function(Source source, SerializationHandler handler, String encoding) throws TransformerException { try { if ((source instanceof StreamSource && source.getSystemId()==null && ((StreamSource)source).getInputStream()==null && ((StreamSource)source).getReader()==null) (source instanceof SAXSource && ((SAXSource)source).getInputSource()==null && ((SAXSource)source).getXMLReader()==null ) (source instanceof DOMSource && ((DOMSource)source).getNode()==null)){ DocumentBuilderFactory builderF = FactoryImpl.getDOMFactory(_useServicesMechanism); DocumentBuilder builder = builderF.newDocumentBuilder(); String systemID = source.getSystemId(); source = new DOMSource(builder.newDocument()); if (systemID != null) { source.setSystemId(systemID); } } if (_isIdentity) { transformIdentity(source, handler); } else { _translet.transform(getDOM(source), handler); } } catch (TransletException e) { if (_errorListener != null) postErrorToListener(e.getMessage()); throw new TransformerException(e); } catch (RuntimeException e) { if (_errorListener != null) postErrorToListener(e.getMessage()); throw new TransformerException(e); } catch (Exception e) { if (_errorListener != null) postErrorToListener(e.getMessage()); throw new TransformerException(e); } finally { _dtmManager = null; } if (_ostream != null) { try { _ostream.close(); } catch (IOException e) {} _ostream = null; } }
|
/**
* Internal transformation method - uses the internal APIs of XSLTC
*/
|
Internal transformation method - uses the internal APIs of XSLTC
|
transform
|
{
"repo_name": "lostdj/Jaklin-OpenJDK-JAXP",
"path": "src/java.xml/share/classes/com/sun/org/apache/xalan/internal/xsltc/trax/TransformerImpl.java",
"license": "gpl-2.0",
"size": 55101
}
|
[
"com.sun.org.apache.xalan.internal.utils.FactoryImpl",
"com.sun.org.apache.xalan.internal.xsltc.TransletException",
"com.sun.org.apache.xml.internal.serializer.SerializationHandler",
"java.io.IOException",
"javax.xml.parsers.DocumentBuilder",
"javax.xml.parsers.DocumentBuilderFactory",
"javax.xml.transform.Source",
"javax.xml.transform.TransformerException",
"javax.xml.transform.dom.DOMSource",
"javax.xml.transform.sax.SAXSource",
"javax.xml.transform.stream.StreamSource"
] |
import com.sun.org.apache.xalan.internal.utils.FactoryImpl; import com.sun.org.apache.xalan.internal.xsltc.TransletException; import com.sun.org.apache.xml.internal.serializer.SerializationHandler; import java.io.IOException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.transform.Source; import javax.xml.transform.TransformerException; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXSource; import javax.xml.transform.stream.StreamSource;
|
import com.sun.org.apache.xalan.internal.utils.*; import com.sun.org.apache.xalan.internal.xsltc.*; import com.sun.org.apache.xml.internal.serializer.*; import java.io.*; import javax.xml.parsers.*; import javax.xml.transform.*; import javax.xml.transform.dom.*; import javax.xml.transform.sax.*; import javax.xml.transform.stream.*;
|
[
"com.sun.org",
"java.io",
"javax.xml"
] |
com.sun.org; java.io; javax.xml;
| 178,156
|
@Test public void testExceptionCleanup() {
checkFails(
"select 0.5e1^.1^ from sales.emps",
"(?s).*Encountered \".1\" at line 1, column 13.\n"
+ "Was expecting one of:\n"
+ " \"FROM\" ...\n"
+ " \",\" ...\n"
+ " \"AS\" ...\n"
+ " <IDENTIFIER> ...\n"
+ " <QUOTED_IDENTIFIER> ...\n"
+ ".*");
}
|
@Test void function() { checkFails( STR, STR.1\STR + STR + STRFROM\STR + STR,\STR + STRAS\STR + STR + STR + ".*"); }
|
/**
* Tests that when there is an error, non-reserved keywords such as "A",
* "ABSOLUTE" (which naturally arise whenever a production uses
* "<IDENTIFIER>") are removed, but reserved words such as "AND"
* remain.
*/
|
Tests that when there is an error, non-reserved keywords such as "A", "ABSOLUTE" (which naturally arise whenever a production uses "<IDENTIFIER>") are removed, but reserved words such as "AND" remain
|
testExceptionCleanup
|
{
"repo_name": "hsuanyi/incubator-calcite",
"path": "core/src/test/java/org/apache/calcite/sql/parser/SqlParserTest.java",
"license": "apache-2.0",
"size": 212538
}
|
[
"org.junit.Test"
] |
import org.junit.Test;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 2,534,550
|
public Builder rootDirectory(URI uri) {
this.rootDirectory = new Path(uri);
return this;
}
|
Builder function(URI uri) { this.rootDirectory = new Path(uri); return this; }
|
/**
* The root directory for dataset files.
*/
|
The root directory for dataset files
|
rootDirectory
|
{
"repo_name": "whoschek/kite",
"path": "kite-data/kite-data-hcatalog/src/main/java/org/kitesdk/data/hcatalog/HCatalogDatasetRepository.java",
"license": "apache-2.0",
"size": 4313
}
|
[
"org.apache.hadoop.fs.Path"
] |
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 647,491
|
public void createInputMessages() throws Exception {
KafkaTool kafkaTool = new KafkaTool("KafkaTool", config.zkConnect);
String kafkaBrokerConnects = kafkaTool.getKafkaBrokerList();
Properties props = new Properties();
props.put("metadata.broker.list", kafkaBrokerConnects);
props.put("serializer.class", "kafka.serializer.StringEncoder");
props.put("partitioner.class", "kafka.producer.DefaultPartitioner");
props.put("request.required.acks", "1");
props.put("retry.backoff.ms", "1000");
ProducerConfig producerConfig = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(producerConfig);
for(int i = 0; i < config.inputNumOfMessages; i++){
String messageKey = "key-" + i;
String message = Integer.toString(i);
producer.send(new KeyedMessage<String, String>(config.inputTopic, messageKey, message));
if((i + 1) % 500 == 0) {
System.out.println("Send " + (i + 1) + " messages");
}
}
producer.close();
}
|
void function() throws Exception { KafkaTool kafkaTool = new KafkaTool(STR, config.zkConnect); String kafkaBrokerConnects = kafkaTool.getKafkaBrokerList(); Properties props = new Properties(); props.put(STR, kafkaBrokerConnects); props.put(STR, STR); props.put(STR, STR); props.put(STR, "1"); props.put(STR, "1000"); ProducerConfig producerConfig = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(producerConfig); for(int i = 0; i < config.inputNumOfMessages; i++){ String messageKey = "key-" + i; String message = Integer.toString(i); producer.send(new KeyedMessage<String, String>(config.inputTopic, messageKey, message)); if((i + 1) % 500 == 0) { System.out.println(STR + (i + 1) + STR); } } producer.close(); }
|
/**
* Push data to Kafka
* @param kafkaConnect Kafka's [host]:[port]
* @param inputTopic Topic to write to
* @throws Exception
*/
|
Push data to Kafka
|
createInputMessages
|
{
"repo_name": "DemandCube/NeverwinterDP",
"path": "scribengin/dataflow/example/src/main/java/com/neverwinterdp/scribengin/dataflow/example/simple/SimpleDataflowExample.java",
"license": "agpl-3.0",
"size": 11895
}
|
[
"com.neverwinterdp.kafka.KafkaTool",
"java.util.Properties"
] |
import com.neverwinterdp.kafka.KafkaTool; import java.util.Properties;
|
import com.neverwinterdp.kafka.*; import java.util.*;
|
[
"com.neverwinterdp.kafka",
"java.util"
] |
com.neverwinterdp.kafka; java.util;
| 786,417
|
public static final void shuffle(int[] a, int start, int len, Random r) {
for ( int i=start+len; --i>0; ) {
int t = a[i], j = r.nextInt(i);
a[i] = a[j];
a[j] = t;
}
}
|
static final void function(int[] a, int start, int len, Random r) { for ( int i=start+len; --i>0; ) { int t = a[i], j = r.nextInt(i); a[i] = a[j]; a[j] = t; } }
|
/**
* Randomly permute the contents of a range an array.
* @param a the array to shuffle
* @param start the starting index of the range to shuffle
* @param len then length of the range to shuffle
* @param r the source of randomness to use
*/
|
Randomly permute the contents of a range an array
|
shuffle
|
{
"repo_name": "giacomovagni/Prefuse",
"path": "src/prefuse/util/ArrayLib.java",
"license": "bsd-3-clause",
"size": 44177
}
|
[
"java.util.Random"
] |
import java.util.Random;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,972,808
|
public final ImmutableSortedSet<E> toSortedSet(Comparator<? super E> comparator) {
return ImmutableSortedSet.copyOf(comparator, iterable);
}
/**
* Returns an immutable map whose keys are the distinct elements of this {@code FluentIterable}
|
final ImmutableSortedSet<E> function(Comparator<? super E> comparator) { return ImmutableSortedSet.copyOf(comparator, iterable); } /** * Returns an immutable map whose keys are the distinct elements of this {@code FluentIterable}
|
/**
* Returns an {@code ImmutableSortedSet} containing all of the elements from this {@code
* FluentIterable} in the order specified by {@code comparator}, with duplicates (determined by
* {@code comparator.compare(x, y) == 0}) removed. To produce an {@code ImmutableSortedSet} sorted
* by its natural ordering, use {@code toSortedSet(Ordering.natural())}.
*
* @param comparator the function by which to sort set elements
* @throws NullPointerException if any element is null
* @since 14.0 (since 12.0 as {@code toImmutableSortedSet()}).
*/
|
Returns an ImmutableSortedSet containing all of the elements from this FluentIterable in the order specified by comparator, with duplicates (determined by comparator.compare(x, y) == 0) removed. To produce an ImmutableSortedSet sorted by its natural ordering, use toSortedSet(Ordering.natural())
|
toSortedSet
|
{
"repo_name": "dongxingong/Guava",
"path": "guava/src/com/google/common/collect/FluentIterable.java",
"license": "apache-2.0",
"size": 20334
}
|
[
"java.util.Comparator"
] |
import java.util.Comparator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 17,040
|
void setConflictCheckForwardActivity(Activity value);
|
void setConflictCheckForwardActivity(Activity value);
|
/**
* Sets the value of the '{@link de.mdelab.mltgg.productionschema2petrinet.generated.productionschema2petrinetLink_r4#getConflictCheckForwardActivity <em>Conflict Check Forward Activity</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Conflict Check Forward Activity</em>' reference.
* @see #getConflictCheckForwardActivity()
* @generated
*/
|
Sets the value of the '<code>de.mdelab.mltgg.productionschema2petrinet.generated.productionschema2petrinetLink_r4#getConflictCheckForwardActivity Conflict Check Forward Activity</code>' reference.
|
setConflictCheckForwardActivity
|
{
"repo_name": "Somae/mdsd-factory-project",
"path": "transformation/de.mdelab.languages.productionschema2petrinet/src-gen/de/mdelab/mltgg/productionschema2petrinet/generated/productionschema2petrinetLink_r4.java",
"license": "gpl-3.0",
"size": 32261
}
|
[
"de.mdelab.mlsdm.Activity"
] |
import de.mdelab.mlsdm.Activity;
|
import de.mdelab.mlsdm.*;
|
[
"de.mdelab.mlsdm"
] |
de.mdelab.mlsdm;
| 2,411,655
|
public void analyze(final EncogAnalyst theAnalyst,
final File inputFile,
final boolean headers, final CSVFormat format) {
this.setInputFilename(inputFile);
setExpectInputHeaders(headers);
setInputFormat(format);
this.analyst = theAnalyst;
setAnalyzed(true);
performBasicCounts();
this.inputCount = this.analyst.determineInputCount();
this.outputCount = this.analyst.determineOutputCount();
this.idealCount = getInputHeadings().length - this.inputCount;
if ((getInputHeadings().length != this.inputCount) &&
(getInputHeadings().length !=
(this.inputCount + this.outputCount))) {
throw new AnalystError("Invalid number of columns(" +
getInputHeadings().length + "), must match input(" +
this.inputCount + ") count or input+output(" +
(this.inputCount + this.outputCount) + ") count.");
}
}
|
void function(final EncogAnalyst theAnalyst, final File inputFile, final boolean headers, final CSVFormat format) { this.setInputFilename(inputFile); setExpectInputHeaders(headers); setInputFormat(format); this.analyst = theAnalyst; setAnalyzed(true); performBasicCounts(); this.inputCount = this.analyst.determineInputCount(); this.outputCount = this.analyst.determineOutputCount(); this.idealCount = getInputHeadings().length - this.inputCount; if ((getInputHeadings().length != this.inputCount) && (getInputHeadings().length != (this.inputCount + this.outputCount))) { throw new AnalystError(STR + getInputHeadings().length + STR + this.inputCount + STR + (this.inputCount + this.outputCount) + STR); } }
|
/**
* Analyze the data. This counts the records and prepares the data to be
* processed.
* <p/>
* @param theAnalyst The analyst to use.
* @param inputFile The input file.
* @param headers True if headers are present.
* @param format The format the file is in.
*/
|
Analyze the data. This counts the records and prepares the data to be processed.
|
analyze
|
{
"repo_name": "ladygagapowerbot/bachelor-thesis-implementation",
"path": "lib/Encog/src/main/java/org/encog/app/analyst/csv/AnalystEvaluateRawCSV.java",
"license": "mit",
"size": 7151
}
|
[
"java.io.File",
"org.encog.app.analyst.AnalystError",
"org.encog.app.analyst.EncogAnalyst",
"org.encog.util.csv.CSVFormat"
] |
import java.io.File; import org.encog.app.analyst.AnalystError; import org.encog.app.analyst.EncogAnalyst; import org.encog.util.csv.CSVFormat;
|
import java.io.*; import org.encog.app.analyst.*; import org.encog.util.csv.*;
|
[
"java.io",
"org.encog.app",
"org.encog.util"
] |
java.io; org.encog.app; org.encog.util;
| 424,633
|
public static Bson match(final Bson filter) {
return new SimplePipelineStage("$match", filter);
}
|
static Bson function(final Bson filter) { return new SimplePipelineStage(STR, filter); }
|
/**
* Creates a $match pipeline stage for the specified filter
*
* @param filter the filter to match
* @return the $match pipeline stage
* @see Filters
* @mongodb.driver.manual reference/operator/aggregation/match/ $match
*/
|
Creates a $match pipeline stage for the specified filter
|
match
|
{
"repo_name": "rozza/mongo-java-driver",
"path": "driver-core/src/main/com/mongodb/client/model/Aggregates.java",
"license": "apache-2.0",
"size": 61174
}
|
[
"org.bson.conversions.Bson"
] |
import org.bson.conversions.Bson;
|
import org.bson.conversions.*;
|
[
"org.bson.conversions"
] |
org.bson.conversions;
| 2,611,124
|
public void setRefid(Reference r) throws BuildException {
super.setRefid(r);
}
|
void function(Reference r) throws BuildException { super.setRefid(r); }
|
/**
* Specifies that this element should behave as if the content of the
* element with the matching id attribute was inserted at this location. If
* specified, no other attributes should be specified.
*
*/
|
Specifies that this element should behave as if the content of the element with the matching id attribute was inserted at this location. If specified, no other attributes should be specified
|
setRefid
|
{
"repo_name": "cniweb/ant-contrib",
"path": "cpptasks/src/main/java/net/sf/antcontrib/cpptasks/VersionInfo.java",
"license": "apache-2.0",
"size": 20040
}
|
[
"org.apache.tools.ant.BuildException",
"org.apache.tools.ant.types.Reference"
] |
import org.apache.tools.ant.BuildException; import org.apache.tools.ant.types.Reference;
|
import org.apache.tools.ant.*; import org.apache.tools.ant.types.*;
|
[
"org.apache.tools"
] |
org.apache.tools;
| 764,216
|
V computeIfAbsent(K key, CheckedSupplier<V> supplier);
|
V computeIfAbsent(K key, CheckedSupplier<V> supplier);
|
/**
* If the key is not already associated with a cached value, attempts to compute its value using
* the given supplier and puts it into the cache. Otherwise it returns the cached value. If the
* function itself throws an (unchecked) exception, the exception is rethrown.
*
* @param key key with which the specified value is to be associated
* @param supplier value to be associated with the specified key
* @return cached value
*/
|
If the key is not already associated with a cached value, attempts to compute its value using the given supplier and puts it into the cache. Otherwise it returns the cached value. If the function itself throws an (unchecked) exception, the exception is rethrown
|
computeIfAbsent
|
{
"repo_name": "RobWin/circuitbreaker-java8",
"path": "resilience4j-cache/src/main/java/io/github/resilience4j/cache/Cache.java",
"license": "apache-2.0",
"size": 5151
}
|
[
"io.github.resilience4j.core.functions.CheckedSupplier"
] |
import io.github.resilience4j.core.functions.CheckedSupplier;
|
import io.github.resilience4j.core.functions.*;
|
[
"io.github.resilience4j"
] |
io.github.resilience4j;
| 794,763
|
public boolean exists(Policy policy, Key key) throws AerospikeException;
|
boolean function(Policy policy, Key key) throws AerospikeException;
|
/**
* Determine if a record key exists.
* The policy can be used to specify timeouts.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param key unique record identifier
* @return whether record exists or not
* @throws AerospikeException if command fails
*/
|
Determine if a record key exists. The policy can be used to specify timeouts
|
exists
|
{
"repo_name": "wgpshashank/aerospike-client-java",
"path": "client/src/com/aerospike/client/IAerospikeClient.java",
"license": "apache-2.0",
"size": 41552
}
|
[
"com.aerospike.client.policy.Policy"
] |
import com.aerospike.client.policy.Policy;
|
import com.aerospike.client.policy.*;
|
[
"com.aerospike.client"
] |
com.aerospike.client;
| 1,984,729
|
@Override
public void toPNML(FileChannel fc) {
//id 0
//idref 0
//attributes 0
//sons 1
final int bufferSizeKB = 8;
final int bufferSize = bufferSizeKB * 1024;
final ByteBuffer bytebuf = ByteBuffer.allocateDirect(bufferSize);
final String charsetEncoding = PNMLEncoding.UTF_8.getName();
Boolean prettyPrintStatus = ModelRepository.getInstance().isPrettyPrintActive();
String retline = "";
String headline = "";
PrettyPrintData prpd = null;
if (prettyPrintStatus) {
retline = "\n";
prpd = ModelRepository.getInstance().getPrettyPrintData();
headline = prpd.getCurrentLineHeader();
}
StringBuilder sb = new StringBuilder();
sb.append(headline);
sb.append("<stringlength");
if (prettyPrintStatus) {
headline = prpd.increaseLineHeaderLevel();
}
//begin attributes, id and id ref processing
boolean haveSons = false;
sb.append(">");
sb.append(retline);
//sons, follow processing
if (getSubterm() != null) {
try {
writeIntoStream(bytebuf, fc, sb.toString().getBytes(Charset.forName(charsetEncoding)));
} catch (IOException io) {
io.printStackTrace();
// fail fast
return;
}
sb.delete(0, sb.length());
java.util.List<fr.lip6.move.pnml.hlpn.terms.Term> items = getSubterm();
for (Iterator<Term> iterator = items.iterator(); iterator.hasNext();) {
Term item = iterator.next();
sb.append(headline);
sb.append("<");
sb.append("subterm");
sb.append(">");
try {
writeIntoStream(bytebuf, fc, sb.toString().getBytes(Charset.forName(charsetEncoding)));
} catch (IOException io) {
io.printStackTrace();
// fail fast
return;
}
sb.delete(0, sb.length());
item.toPNML(fc);
sb.append(headline);
sb.append("</");
sb.append("subterm");
sb.append(">");
haveSons = true;
}
}
if (prettyPrintStatus) {
headline = prpd.decreaseLineHeaderLevel();
}
sb.append(headline);
sb.append("</stringlength>");
sb.append(retline);
try {
writeIntoStream(bytebuf, fc, sb.toString().getBytes(Charset.forName(charsetEncoding)));
} catch (IOException io) {
io.printStackTrace();
// fail fast
return;
}
sb = null;
}
|
void function(FileChannel fc) { final int bufferSizeKB = 8; final int bufferSize = bufferSizeKB * 1024; final ByteBuffer bytebuf = ByteBuffer.allocateDirect(bufferSize); final String charsetEncoding = PNMLEncoding.UTF_8.getName(); Boolean prettyPrintStatus = ModelRepository.getInstance().isPrettyPrintActive(); String retline = STRSTR\nSTR<stringlengthSTR>STR<STRsubtermSTR>STR</STRsubtermSTR>STR</stringlength>"); sb.append(retline); try { writeIntoStream(bytebuf, fc, sb.toString().getBytes(Charset.forName(charsetEncoding))); } catch (IOException io) { io.printStackTrace(); return; } sb = null; }
|
/**
* Return the string containing the pnml output
*/
|
Return the string containing the pnml output
|
toPNML
|
{
"repo_name": "lhillah/pnmlframework",
"path": "pnmlFw-HLPN/src/fr/lip6/move/pnml/hlpn/strings/impl/LengthImpl.java",
"license": "epl-1.0",
"size": 21311
}
|
[
"fr.lip6.move.pnml.framework.utils.ModelRepository",
"fr.lip6.move.pnml.framework.utils.PNMLEncoding",
"java.io.IOException",
"java.nio.ByteBuffer",
"java.nio.channels.FileChannel",
"java.nio.charset.Charset"
] |
import fr.lip6.move.pnml.framework.utils.ModelRepository; import fr.lip6.move.pnml.framework.utils.PNMLEncoding; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.charset.Charset;
|
import fr.lip6.move.pnml.framework.utils.*; import java.io.*; import java.nio.*; import java.nio.channels.*; import java.nio.charset.*;
|
[
"fr.lip6.move",
"java.io",
"java.nio"
] |
fr.lip6.move; java.io; java.nio;
| 445,971
|
Response<Factory> getByIdWithResponse(String id, String ifNoneMatch, Context context);
|
Response<Factory> getByIdWithResponse(String id, String ifNoneMatch, Context context);
|
/**
* Gets a factory.
*
* @param id the resource ID.
* @param ifNoneMatch ETag of the factory entity. Should only be specified for get. If the ETag matches the existing
* entity tag, or if * was provided, then no content will be returned.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a factory along with {@link Response}.
*/
|
Gets a factory
|
getByIdWithResponse
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Factories.java",
"license": "mit",
"size": 12271
}
|
[
"com.azure.core.http.rest.Response",
"com.azure.core.util.Context"
] |
import com.azure.core.http.rest.Response; import com.azure.core.util.Context;
|
import com.azure.core.http.rest.*; import com.azure.core.util.*;
|
[
"com.azure.core"
] |
com.azure.core;
| 546,642
|
private void init() {
try (InputStream inStream = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(MarkLogicPublishingModel.GLOBAL_PROPERTIESFILE)) {
if (LOG.isInfoEnabled()) {
LOG.info("Loading access keys from 'alfresco-global.properties'");
}
KEYS.load(inStream);
} catch (IOException ioex) {
if (LOG.isErrorEnabled()) {
LOG.error("Exception getting the keys from alfreco-global.properties: ",ioex);
}
}
}
|
void function() { try (InputStream inStream = Thread.currentThread().getContextClassLoader() .getResourceAsStream(MarkLogicPublishingModel.GLOBAL_PROPERTIESFILE)) { if (LOG.isInfoEnabled()) { LOG.info(STR); } KEYS.load(inStream); } catch (IOException ioex) { if (LOG.isErrorEnabled()) { LOG.error(STR,ioex); } } }
|
/**
* Loads the properties from the alfresco-global.properties.
*/
|
Loads the properties from the alfresco-global.properties
|
init
|
{
"repo_name": "abhinavmishra14/marklogic-alfresco5.x-integration",
"path": "src/com/abhinav/alfresco/publishing/marklogic/ConfigReader.java",
"license": "apache-2.0",
"size": 2470
}
|
[
"java.io.IOException",
"java.io.InputStream",
"org.zaizi.alfresco.publishing.marklogic.MarkLogicPublishingModel"
] |
import java.io.IOException; import java.io.InputStream; import org.zaizi.alfresco.publishing.marklogic.MarkLogicPublishingModel;
|
import java.io.*; import org.zaizi.alfresco.publishing.marklogic.*;
|
[
"java.io",
"org.zaizi.alfresco"
] |
java.io; org.zaizi.alfresco;
| 1,271,701
|
public List getVariations(String imgName) {
List ret = (List)m_variations.get(imgName);
if (ret == null) {
return new ArrayList();
}
Collections.sort(ret);
return ret;
}
|
List function(String imgName) { List ret = (List)m_variations.get(imgName); if (ret == null) { return new ArrayList(); } Collections.sort(ret); return ret; }
|
/**
* Returns the variations for the given image.<p>
*
* @param imgName the image name
*
* @return the variations for the given image
*/
|
Returns the variations for the given image
|
getVariations
|
{
"repo_name": "alkacon/opencms-core",
"path": "src/org/opencms/ui/apps/cacheadmin/CmsImageCacheHelper.java",
"license": "lgpl-2.1",
"size": 11504
}
|
[
"java.util.ArrayList",
"java.util.Collections",
"java.util.List"
] |
import java.util.ArrayList; import java.util.Collections; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,008,941
|
public void completeIPRotation(com.google.container.v1.CompleteIPRotationRequest request,
io.grpc.stub.StreamObserver<com.google.container.v1.Operation> responseObserver) {
asyncUnaryCall(
getChannel().newCall(getCompleteIPRotationMethodHelper(), getCallOptions()), request, responseObserver);
}
|
void function(com.google.container.v1.CompleteIPRotationRequest request, io.grpc.stub.StreamObserver<com.google.container.v1.Operation> responseObserver) { asyncUnaryCall( getChannel().newCall(getCompleteIPRotationMethodHelper(), getCallOptions()), request, responseObserver); }
|
/**
* <pre>
* Completes master IP rotation.
* </pre>
*/
|
<code> Completes master IP rotation. </code>
|
completeIPRotation
|
{
"repo_name": "pongad/api-client-staging",
"path": "generated/java/grpc-google-cloud-container-v1/src/main/java/com/google/container/v1/ClusterManagerGrpc.java",
"license": "bsd-3-clause",
"size": 147597
}
|
[
"io.grpc.stub.ClientCalls",
"io.grpc.stub.ServerCalls"
] |
import io.grpc.stub.ClientCalls; import io.grpc.stub.ServerCalls;
|
import io.grpc.stub.*;
|
[
"io.grpc.stub"
] |
io.grpc.stub;
| 2,509,922
|
private String readResource(final String resourceName)
throws IOException {
final InputStream stream = new FileInputStream(
new File("src/test/resources/" + resourceName)
);
return new String(IOUtils.toByteArray(stream));
}
|
String function(final String resourceName) throws IOException { final InputStream stream = new FileInputStream( new File(STR + resourceName) ); return new String(IOUtils.toByteArray(stream)); }
|
/**
* Read resource for test.
* @param resourceName Name of the file being read.
* @return String content of the resource file.
* @throws IOException If it goes wrong.
*/
|
Read resource for test
|
readResource
|
{
"repo_name": "decorators-squad/versioneye-api",
"path": "src/test/java/com/amihaiemil/versioneye/FavoritesPageTestCase.java",
"license": "bsd-3-clause",
"size": 6363
}
|
[
"java.io.File",
"java.io.FileInputStream",
"java.io.IOException",
"java.io.InputStream",
"org.apache.commons.io.IOUtils"
] |
import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import org.apache.commons.io.IOUtils;
|
import java.io.*; import org.apache.commons.io.*;
|
[
"java.io",
"org.apache.commons"
] |
java.io; org.apache.commons;
| 1,840,726
|
public String readLink(String path) throws IOException
{
int req_id = generateNextRequestID();
TypesWriter tw = new TypesWriter();
tw.writeString(path, charsetName);
if (debug != null)
{
debug.println("Sending SSH_FXP_READLINK...");
debug.flush();
}
sendMessage(Packet.SSH_FXP_READLINK, req_id, tw.getBytes());
byte[] resp = receiveMessage(34000);
if (debug != null)
{
debug.println("Got REPLY.");
debug.flush();
}
TypesReader tr = new TypesReader(resp);
int t = tr.readByte();
int rep_id = tr.readUINT32();
if (rep_id != req_id)
throw new IOException("The server sent an invalid id field.");
if (t == Packet.SSH_FXP_NAME)
{
int count = tr.readUINT32();
if (count != 1)
throw new IOException("The server sent an invalid SSH_FXP_NAME packet.");
return tr.readString(charsetName);
}
if (t != Packet.SSH_FXP_STATUS)
throw new IOException("The SFTP server sent an unexpected packet type (" + t + ")");
int errorCode = tr.readUINT32();
throw new SFTPException(tr.readString(), errorCode);
}
|
String function(String path) throws IOException { int req_id = generateNextRequestID(); TypesWriter tw = new TypesWriter(); tw.writeString(path, charsetName); if (debug != null) { debug.println(STR); debug.flush(); } sendMessage(Packet.SSH_FXP_READLINK, req_id, tw.getBytes()); byte[] resp = receiveMessage(34000); if (debug != null) { debug.println(STR); debug.flush(); } TypesReader tr = new TypesReader(resp); int t = tr.readByte(); int rep_id = tr.readUINT32(); if (rep_id != req_id) throw new IOException(STR); if (t == Packet.SSH_FXP_NAME) { int count = tr.readUINT32(); if (count != 1) throw new IOException(STR); return tr.readString(charsetName); } if (t != Packet.SSH_FXP_STATUS) throw new IOException(STR + t + ")"); int errorCode = tr.readUINT32(); throw new SFTPException(tr.readString(), errorCode); }
|
/**
* Read the target of a symbolic link.
*
* @param path See the {@link SFTPv3Client comment} for the class for more details.
* @return The target of the link.
* @throws IOException
*/
|
Read the target of a symbolic link
|
readLink
|
{
"repo_name": "handong106324/sqLogWeb",
"path": "src/ch/ethz/ssh2/SFTPv3Client.java",
"license": "lgpl-2.1",
"size": 37667
}
|
[
"ch.ethz.ssh2.packets.TypesReader",
"ch.ethz.ssh2.packets.TypesWriter",
"ch.ethz.ssh2.sftp.Packet",
"java.io.IOException"
] |
import ch.ethz.ssh2.packets.TypesReader; import ch.ethz.ssh2.packets.TypesWriter; import ch.ethz.ssh2.sftp.Packet; import java.io.IOException;
|
import ch.ethz.ssh2.packets.*; import ch.ethz.ssh2.sftp.*; import java.io.*;
|
[
"ch.ethz.ssh2",
"java.io"
] |
ch.ethz.ssh2; java.io;
| 2,118,012
|
@Override
public void write(Form form, IFile file) {
// Make sure the input isn't null
if (form == null || file == null) {
return;
}
// Get the components from the form
ArrayList<Component> components = form.getComponents();
// Make sure that the form had data
if (components != null) {
try {
PipedInputStream in = new PipedInputStream(8196);
PipedOutputStream out = new PipedOutputStream(in);
DataComponent dataComp;
String tableContents, compName;
String newLine = System.getProperty("line.separator");
byte[] byteArray;
// Make sure that we have a file to write to before proceeding
if (!file.exists()) {
byte[] blank = "".getBytes();
InputStream s = new ByteArrayInputStream(blank);
file.create(s, true, new NullProgressMonitor());
}
// Each component corresponds to a section in the INI file
for (Component comp : components) {
dataComp = (DataComponent) comp;
compName = dataComp.getName();
// If the section had a name start by adding that
// Then set the indentation required accordingly
if (compName != "Default Section") {
tableContents = sectionPrefix + compName
+ sectionPostfix + newLine;
} else {
// Otherwise, just leave it blank
tableContents = "";
}
// Now go through the rows and add each variable
for (Entry ent : dataComp.retrieveAllEntries()) {
tableContents += ent.getName().trim()
+ assignmentPattern + ent.getValue().trim()
+ newLine;
}
tableContents += newLine;
// Write it out
byteArray = tableContents.getBytes();
out.write(byteArray);
}
// Close the stream and set the file contents
out.close();
file.setContents(in, true, false, new NullProgressMonitor());
in.close();
} catch (FileNotFoundException e) {
logger.info("INIWriter Message: Could not find "
+ file.getName() + " for writing.");
} catch (IOException e) {
logger.info("INIWriter Message: Could not write to "
+ file.getName() + " du to an IO error");
} catch (CoreException e) {
logger.info("INIWriter Message: Could not write to "
+ file.getName() + " due to an ICE Core error.");
}
}
}
|
void function(Form form, IFile file) { if (form == null file == null) { return; } ArrayList<Component> components = form.getComponents(); if (components != null) { try { PipedInputStream in = new PipedInputStream(8196); PipedOutputStream out = new PipedOutputStream(in); DataComponent dataComp; String tableContents, compName; String newLine = System.getProperty(STR); byte[] byteArray; if (!file.exists()) { byte[] blank = STRDefault SectionSTRSTRINIWriter Message: Could not find STR for writing.STRINIWriter Message: Could not write to STR du to an IO errorSTRINIWriter Message: Could not write to STR due to an ICE Core error."); } } }
|
/**
* Writes out an INI file from the given Form to the given IFile.
*
* @param form
* The form containing the data to write.
* @param file
* The file to write to.
*/
|
Writes out an INI file from the given Form to the given IFile
|
write
|
{
"repo_name": "SmithRWORNL/ice",
"path": "src/org.eclipse.ice.io/src/org/eclipse/ice/io/ini/INIWriter.java",
"license": "epl-1.0",
"size": 5515
}
|
[
"java.io.PipedInputStream",
"java.io.PipedOutputStream",
"java.util.ArrayList",
"org.eclipse.core.resources.IFile",
"org.eclipse.ice.datastructures.ICEObject",
"org.eclipse.ice.datastructures.form.DataComponent",
"org.eclipse.ice.datastructures.form.Form"
] |
import java.io.PipedInputStream; import java.io.PipedOutputStream; import java.util.ArrayList; import org.eclipse.core.resources.IFile; import org.eclipse.ice.datastructures.ICEObject; import org.eclipse.ice.datastructures.form.DataComponent; import org.eclipse.ice.datastructures.form.Form;
|
import java.io.*; import java.util.*; import org.eclipse.core.resources.*; import org.eclipse.ice.datastructures.*; import org.eclipse.ice.datastructures.form.*;
|
[
"java.io",
"java.util",
"org.eclipse.core",
"org.eclipse.ice"
] |
java.io; java.util; org.eclipse.core; org.eclipse.ice;
| 1,464,426
|
public static boolean checkBeanData(ToDoData data, ToDoBean bean, IMessageLogger messageLogger){
boolean ok = true;
if(StringUtil.isStringBlank(bean.getTitle())) {
messageLogger.addErrorMessage(data.getMessageLogger().getText("Error.ToDoTaskMissingTitle"));
ok = false;
}
if(bean.getDeadline().before(new Date())) {
messageLogger.addErrorMessage(data.getMessageLogger().getText("Error.ToDoTaskInvalidDeadline"));
ok = false;
}
return ok;
}
|
static boolean function(ToDoData data, ToDoBean bean, IMessageLogger messageLogger){ boolean ok = true; if(StringUtil.isStringBlank(bean.getTitle())) { messageLogger.addErrorMessage(data.getMessageLogger().getText(STR)); ok = false; } if(bean.getDeadline().before(new Date())) { messageLogger.addErrorMessage(data.getMessageLogger().getText(STR)); ok = false; } return ok; }
|
/**
* Validacija podataka za novi task
* @param messageLogger
* @param bean
*/
|
Validacija podataka za novi task
|
checkBeanData
|
{
"repo_name": "KarloKnezevic/Ferko",
"path": "src/java/hr/fer/zemris/jcms/service/ToDoService.java",
"license": "apache-2.0",
"size": 38138
}
|
[
"hr.fer.zemris.jcms.beans.ext.ToDoBean",
"hr.fer.zemris.jcms.web.actions.data.ToDoData",
"hr.fer.zemris.jcms.web.actions.data.support.IMessageLogger",
"hr.fer.zemris.util.StringUtil",
"java.util.Date"
] |
import hr.fer.zemris.jcms.beans.ext.ToDoBean; import hr.fer.zemris.jcms.web.actions.data.ToDoData; import hr.fer.zemris.jcms.web.actions.data.support.IMessageLogger; import hr.fer.zemris.util.StringUtil; import java.util.Date;
|
import hr.fer.zemris.jcms.beans.ext.*; import hr.fer.zemris.jcms.web.actions.data.*; import hr.fer.zemris.jcms.web.actions.data.support.*; import hr.fer.zemris.util.*; import java.util.*;
|
[
"hr.fer.zemris",
"java.util"
] |
hr.fer.zemris; java.util;
| 1,074,764
|
public static <T> void checkOperatorPartForNumFuzzedValues(OperatorSpecificFuzzedValues<T> operatorPart, int expectedNumFuzzedValues) {
List<FuzzedValue<T>> fuzzedValues = operatorPart.getFuzzedValues();
int actualNumFuzzedValues = fuzzedValues.size();
assertTrueWithPrefix("Wrong number of fuzzed values: was " + actualNumFuzzedValues + " instead of " + expectedNumFuzzedValues,
actualNumFuzzedValues == expectedNumFuzzedValues);
}
|
static <T> void function(OperatorSpecificFuzzedValues<T> operatorPart, int expectedNumFuzzedValues) { List<FuzzedValue<T>> fuzzedValues = operatorPart.getFuzzedValues(); int actualNumFuzzedValues = fuzzedValues.size(); assertTrueWithPrefix(STR + actualNumFuzzedValues + STR + expectedNumFuzzedValues, actualNumFuzzedValues == expectedNumFuzzedValues); }
|
/**
* Checks a operator part (XML tag 'response:operator')for a certain number of fuzzed values
* (XML tag 'response:fuzzedValue').
* Fails if the number differs.
* @param <T>
*
* @param opratorPart the operator part containing the fuzzed values.
* @param expectedNumFuzzedValues the expected number of fuzzed values.
*/
|
Checks a operator part (XML tag 'response:operator')for a certain number of fuzzed values (XML tag 'response:fuzzedValue'). Fails if the number differs
|
checkOperatorPartForNumFuzzedValues
|
{
"repo_name": "fraunhoferfokus/Fuzzino",
"path": "src/test/java/de/fraunhofer/fokus/fuzzing/fuzzino/TestUtil.java",
"license": "apache-2.0",
"size": 30135
}
|
[
"de.fraunhofer.fokus.fuzzing.fuzzino.response.OperatorSpecificFuzzedValues",
"java.util.List"
] |
import de.fraunhofer.fokus.fuzzing.fuzzino.response.OperatorSpecificFuzzedValues; import java.util.List;
|
import de.fraunhofer.fokus.fuzzing.fuzzino.response.*; import java.util.*;
|
[
"de.fraunhofer.fokus",
"java.util"
] |
de.fraunhofer.fokus; java.util;
| 945,062
|
private void restoreWithRescaling(Collection<KeyedStateHandle> restoreStateHandles) throws Exception {
// Prepare for restore with rescaling
KeyedStateHandle initialHandle = RocksDBIncrementalCheckpointUtils.chooseTheBestStateHandleForInitial(
restoreStateHandles, keyGroupRange);
// Init base DB instance
if (initialHandle != null) {
restoreStateHandles.remove(initialHandle);
initDBWithRescaling(initialHandle);
} else {
openDB();
}
// Transfer remaining key-groups from temporary instance into base DB
byte[] startKeyGroupPrefixBytes = new byte[keyGroupPrefixBytes];
RocksDBKeySerializationUtils.serializeKeyGroup(keyGroupRange.getStartKeyGroup(), startKeyGroupPrefixBytes);
byte[] stopKeyGroupPrefixBytes = new byte[keyGroupPrefixBytes];
RocksDBKeySerializationUtils.serializeKeyGroup(keyGroupRange.getEndKeyGroup() + 1, stopKeyGroupPrefixBytes);
for (KeyedStateHandle rawStateHandle : restoreStateHandles) {
if (!(rawStateHandle instanceof IncrementalRemoteKeyedStateHandle)) {
throw new IllegalStateException("Unexpected state handle type, " +
"expected " + IncrementalRemoteKeyedStateHandle.class +
", but found " + rawStateHandle.getClass());
}
Path temporaryRestoreInstancePath = new Path(instanceBasePath.getAbsolutePath() + UUID.randomUUID().toString());
try (RestoredDBInstance tmpRestoreDBInfo = restoreDBInstanceFromStateHandle(
(IncrementalRemoteKeyedStateHandle) rawStateHandle,
temporaryRestoreInstancePath);
RocksDBWriteBatchWrapper writeBatchWrapper = new RocksDBWriteBatchWrapper(this.db)) {
List<ColumnFamilyDescriptor> tmpColumnFamilyDescriptors = tmpRestoreDBInfo.columnFamilyDescriptors;
List<ColumnFamilyHandle> tmpColumnFamilyHandles = tmpRestoreDBInfo.columnFamilyHandles;
// iterating only the requested descriptors automatically skips the default column family handle
for (int i = 0; i < tmpColumnFamilyDescriptors.size(); ++i) {
ColumnFamilyHandle tmpColumnFamilyHandle = tmpColumnFamilyHandles.get(i);
ColumnFamilyHandle targetColumnFamilyHandle = getOrRegisterStateColumnFamilyHandle(
null, tmpRestoreDBInfo.stateMetaInfoSnapshots.get(i))
.columnFamilyHandle;
try (RocksIteratorWrapper iterator = RocksDBOperationUtils.getRocksIterator(tmpRestoreDBInfo.db, tmpColumnFamilyHandle)) {
iterator.seek(startKeyGroupPrefixBytes);
while (iterator.isValid()) {
if (RocksDBIncrementalCheckpointUtils.beforeThePrefixBytes(iterator.key(), stopKeyGroupPrefixBytes)) {
writeBatchWrapper.put(targetColumnFamilyHandle, iterator.key(), iterator.value());
} else {
// Since the iterator will visit the record according to the sorted order,
// we can just break here.
break;
}
iterator.next();
}
} // releases native iterator resources
}
} finally {
cleanUpPathQuietly(temporaryRestoreInstancePath);
}
}
}
|
void function(Collection<KeyedStateHandle> restoreStateHandles) throws Exception { KeyedStateHandle initialHandle = RocksDBIncrementalCheckpointUtils.chooseTheBestStateHandleForInitial( restoreStateHandles, keyGroupRange); if (initialHandle != null) { restoreStateHandles.remove(initialHandle); initDBWithRescaling(initialHandle); } else { openDB(); } byte[] startKeyGroupPrefixBytes = new byte[keyGroupPrefixBytes]; RocksDBKeySerializationUtils.serializeKeyGroup(keyGroupRange.getStartKeyGroup(), startKeyGroupPrefixBytes); byte[] stopKeyGroupPrefixBytes = new byte[keyGroupPrefixBytes]; RocksDBKeySerializationUtils.serializeKeyGroup(keyGroupRange.getEndKeyGroup() + 1, stopKeyGroupPrefixBytes); for (KeyedStateHandle rawStateHandle : restoreStateHandles) { if (!(rawStateHandle instanceof IncrementalRemoteKeyedStateHandle)) { throw new IllegalStateException(STR + STR + IncrementalRemoteKeyedStateHandle.class + STR + rawStateHandle.getClass()); } Path temporaryRestoreInstancePath = new Path(instanceBasePath.getAbsolutePath() + UUID.randomUUID().toString()); try (RestoredDBInstance tmpRestoreDBInfo = restoreDBInstanceFromStateHandle( (IncrementalRemoteKeyedStateHandle) rawStateHandle, temporaryRestoreInstancePath); RocksDBWriteBatchWrapper writeBatchWrapper = new RocksDBWriteBatchWrapper(this.db)) { List<ColumnFamilyDescriptor> tmpColumnFamilyDescriptors = tmpRestoreDBInfo.columnFamilyDescriptors; List<ColumnFamilyHandle> tmpColumnFamilyHandles = tmpRestoreDBInfo.columnFamilyHandles; for (int i = 0; i < tmpColumnFamilyDescriptors.size(); ++i) { ColumnFamilyHandle tmpColumnFamilyHandle = tmpColumnFamilyHandles.get(i); ColumnFamilyHandle targetColumnFamilyHandle = getOrRegisterStateColumnFamilyHandle( null, tmpRestoreDBInfo.stateMetaInfoSnapshots.get(i)) .columnFamilyHandle; try (RocksIteratorWrapper iterator = RocksDBOperationUtils.getRocksIterator(tmpRestoreDBInfo.db, tmpColumnFamilyHandle)) { iterator.seek(startKeyGroupPrefixBytes); while (iterator.isValid()) { if (RocksDBIncrementalCheckpointUtils.beforeThePrefixBytes(iterator.key(), stopKeyGroupPrefixBytes)) { writeBatchWrapper.put(targetColumnFamilyHandle, iterator.key(), iterator.value()); } else { break; } iterator.next(); } } } } finally { cleanUpPathQuietly(temporaryRestoreInstancePath); } } }
|
/**
* Recovery from multi incremental states with rescaling. For rescaling, this method creates a temporary
* RocksDB instance for a key-groups shard. All contents from the temporary instance are copied into the
* real restore instance and then the temporary instance is discarded.
*/
|
Recovery from multi incremental states with rescaling. For rescaling, this method creates a temporary RocksDB instance for a key-groups shard. All contents from the temporary instance are copied into the real restore instance and then the temporary instance is discarded
|
restoreWithRescaling
|
{
"repo_name": "ueshin/apache-flink",
"path": "flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/restore/RocksDBIncrementalRestoreOperation.java",
"license": "apache-2.0",
"size": 19844
}
|
[
"java.util.Collection",
"java.util.List",
"java.util.UUID",
"org.apache.flink.contrib.streaming.state.RocksDBIncrementalCheckpointUtils",
"org.apache.flink.contrib.streaming.state.RocksDBKeySerializationUtils",
"org.apache.flink.contrib.streaming.state.RocksDBOperationUtils",
"org.apache.flink.contrib.streaming.state.RocksDBWriteBatchWrapper",
"org.apache.flink.contrib.streaming.state.RocksIteratorWrapper",
"org.apache.flink.core.fs.Path",
"org.apache.flink.runtime.state.IncrementalRemoteKeyedStateHandle",
"org.apache.flink.runtime.state.KeyedStateHandle",
"org.rocksdb.ColumnFamilyDescriptor",
"org.rocksdb.ColumnFamilyHandle"
] |
import java.util.Collection; import java.util.List; import java.util.UUID; import org.apache.flink.contrib.streaming.state.RocksDBIncrementalCheckpointUtils; import org.apache.flink.contrib.streaming.state.RocksDBKeySerializationUtils; import org.apache.flink.contrib.streaming.state.RocksDBOperationUtils; import org.apache.flink.contrib.streaming.state.RocksDBWriteBatchWrapper; import org.apache.flink.contrib.streaming.state.RocksIteratorWrapper; import org.apache.flink.core.fs.Path; import org.apache.flink.runtime.state.IncrementalRemoteKeyedStateHandle; import org.apache.flink.runtime.state.KeyedStateHandle; import org.rocksdb.ColumnFamilyDescriptor; import org.rocksdb.ColumnFamilyHandle;
|
import java.util.*; import org.apache.flink.contrib.streaming.state.*; import org.apache.flink.core.fs.*; import org.apache.flink.runtime.state.*; import org.rocksdb.*;
|
[
"java.util",
"org.apache.flink",
"org.rocksdb"
] |
java.util; org.apache.flink; org.rocksdb;
| 420,468
|
public static float getDensity(Context context) {
return context.getResources().getDisplayMetrics().density;
}
|
static float function(Context context) { return context.getResources().getDisplayMetrics().density; }
|
/**
* get the density of device screen.
*
* @param context the context
* @return the screen density
*/
|
get the density of device screen
|
getDensity
|
{
"repo_name": "watay147/RankTipsView",
"path": "src/RankTipsView.java",
"license": "apache-2.0",
"size": 9646
}
|
[
"android.content.Context"
] |
import android.content.Context;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 954,376
|
Map<String,Object> searchPaginatedAPIProducts(String searchQuery, String tenantDomain,int start,int end) throws
APIManagementException;
|
Map<String,Object> searchPaginatedAPIProducts(String searchQuery, String tenantDomain,int start,int end) throws APIManagementException;
|
/**
* Returns APIProduct Search result based on the provided query.
*
* @param searchQuery search query. Ex: provider=*admin*
* @param tenantDomain tenant domain
* @param start starting number
* @param end ending number
* @return APIProduct result
* @throws APIManagementException if search is failed
*/
|
Returns APIProduct Search result based on the provided query
|
searchPaginatedAPIProducts
|
{
"repo_name": "jaadds/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.api/src/main/java/org/wso2/carbon/apimgt/api/APIManager.java",
"license": "apache-2.0",
"size": 36284
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,199,067
|
protected void fireChangeEvent() {
Iterator iter = m_listeners.iterator();
ChangeEvent evt = new ChangeEvent(this);
while ( iter.hasNext() ) {
ChangeListener cl = (ChangeListener)iter.next();
cl.stateChanged(evt);
}
}
// ------------------------------------------------------------------------
|
void function() { Iterator iter = m_listeners.iterator(); ChangeEvent evt = new ChangeEvent(this); while ( iter.hasNext() ) { ChangeListener cl = (ChangeListener)iter.next(); cl.stateChanged(evt); } }
|
/**
* Fire a change event to listeners.
*/
|
Fire a change event to listeners
|
fireChangeEvent
|
{
"repo_name": "mccraigmccraig/prefuse",
"path": "src/prefuse/util/ui/JValueSlider.java",
"license": "bsd-3-clause",
"size": 12577
}
|
[
"java.util.Iterator",
"javax.swing.event.ChangeEvent",
"javax.swing.event.ChangeListener"
] |
import java.util.Iterator; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener;
|
import java.util.*; import javax.swing.event.*;
|
[
"java.util",
"javax.swing"
] |
java.util; javax.swing;
| 167,056
|
protected boolean isApplicableType(NodeRef actionedUponNodeRef)
{
if (this.baseNodeService.exists(actionedUponNodeRef) == true)
{
QName nodeType = baseNodeService.getType(actionedUponNodeRef);
// Quick check in the set
if (applicableTypes.contains(nodeType))
{
return true;
}
else
{
// Have to do a long-winded check
for (QName type : applicableTypes)
{
if (this.dictionaryService.isSubClass(nodeType, type))
{
return true;
}
// Not a subtype; keep checking
}
}
}
return false;
}
|
boolean function(NodeRef actionedUponNodeRef) { if (this.baseNodeService.exists(actionedUponNodeRef) == true) { QName nodeType = baseNodeService.getType(actionedUponNodeRef); if (applicableTypes.contains(nodeType)) { return true; } else { for (QName type : applicableTypes) { if (this.dictionaryService.isSubClass(nodeType, type)) { return true; } } } } return false; }
|
/**
* Check if a node is a type or subtype of the of one of the applicable types
*
* @param actionedUponNodeRef the node to check
* @return Returns <tt>true</tt> if the node is in the list of
* {@link #setApplicableTypes(String[]) applicable types} or one of the
* subtypes
*/
|
Check if a node is a type or subtype of the of one of the applicable types
|
isApplicableType
|
{
"repo_name": "Kast0rTr0y/community-edition",
"path": "projects/repository/source/java/org/alfresco/repo/action/executer/ActionExecuterAbstractBase.java",
"license": "lgpl-3.0",
"size": 10761
}
|
[
"org.alfresco.service.cmr.repository.NodeRef",
"org.alfresco.service.namespace.QName"
] |
import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.namespace.QName;
|
import org.alfresco.service.cmr.repository.*; import org.alfresco.service.namespace.*;
|
[
"org.alfresco.service"
] |
org.alfresco.service;
| 2,562,803
|
public static Optional<FlinkKafkaPartitioner<RowData>> getFlinkKafkaPartitioner(
ReadableConfig tableOptions, ClassLoader classLoader) {
return tableOptions
.getOptional(SINK_PARTITIONER)
.flatMap(
(String partitioner) -> {
switch (partitioner) {
case SINK_PARTITIONER_VALUE_FIXED:
return Optional.of(new FlinkFixedPartitioner<>());
case SINK_PARTITIONER_VALUE_DEFAULT:
case SINK_PARTITIONER_VALUE_ROUND_ROBIN:
return Optional.empty();
// Default fallback to full class name of the partitioner.
default:
return Optional.of(
initializePartitioner(partitioner, classLoader));
}
});
}
|
static Optional<FlinkKafkaPartitioner<RowData>> function( ReadableConfig tableOptions, ClassLoader classLoader) { return tableOptions .getOptional(SINK_PARTITIONER) .flatMap( (String partitioner) -> { switch (partitioner) { case SINK_PARTITIONER_VALUE_FIXED: return Optional.of(new FlinkFixedPartitioner<>()); case SINK_PARTITIONER_VALUE_DEFAULT: case SINK_PARTITIONER_VALUE_ROUND_ROBIN: return Optional.empty(); default: return Optional.of( initializePartitioner(partitioner, classLoader)); } }); }
|
/**
* The partitioner can be either "fixed", "round-robin" or a customized partitioner full class
* name.
*/
|
The partitioner can be either "fixed", "round-robin" or a customized partitioner full class name
|
getFlinkKafkaPartitioner
|
{
"repo_name": "aljoscha/flink",
"path": "flink-connectors/flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/KafkaOptions.java",
"license": "apache-2.0",
"size": 36537
}
|
[
"java.util.Optional",
"org.apache.flink.configuration.ReadableConfig",
"org.apache.flink.streaming.connectors.kafka.partitioner.FlinkFixedPartitioner",
"org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner",
"org.apache.flink.table.data.RowData"
] |
import java.util.Optional; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkFixedPartitioner; import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner; import org.apache.flink.table.data.RowData;
|
import java.util.*; import org.apache.flink.configuration.*; import org.apache.flink.streaming.connectors.kafka.partitioner.*; import org.apache.flink.table.data.*;
|
[
"java.util",
"org.apache.flink"
] |
java.util; org.apache.flink;
| 1,047,497
|
public void setDisplayDifferenceToReference(boolean displayDifferenceToReference)
throws RemoteException;
|
void function(boolean displayDifferenceToReference) throws RemoteException;
|
/**
* If set to true, the differenceSignal calculated by TPT.hose will not be shown but used to
* display, where the Assesslet fails (red background in SignalViewer). Instead an absolute Signal
* will be calculated and displayed.
*
* @param displayDifferenceToReference
* the new attribute value
*
* @throws RemoteException
* remote communication problem
*/
|
If set to true, the differenceSignal calculated by TPT.hose will not be shown but used to display, where the Assesslet fails (red background in SignalViewer). Instead an absolute Signal will be calculated and displayed
|
setDisplayDifferenceToReference
|
{
"repo_name": "jenkinsci/piketec-tpt-plugin",
"path": "src/main/java/com/piketec/tpt/api/Back2BackSettings.java",
"license": "mit",
"size": 14114
}
|
[
"java.rmi.RemoteException"
] |
import java.rmi.RemoteException;
|
import java.rmi.*;
|
[
"java.rmi"
] |
java.rmi;
| 1,848,625
|
public ServiceResponse<Void> putBigFloat(double numberBody) throws ErrorException, IOException {
Call<ResponseBody> call = service.putBigFloat(numberBody);
return putBigFloatDelegate(call.execute());
}
|
ServiceResponse<Void> function(double numberBody) throws ErrorException, IOException { Call<ResponseBody> call = service.putBigFloat(numberBody); return putBigFloatDelegate(call.execute()); }
|
/**
* Put big float value 3.402823e+20.
*
* @param numberBody the double value
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the {@link ServiceResponse} object if successful.
*/
|
Put big float value 3.402823e+20
|
putBigFloat
|
{
"repo_name": "John-Hart/autorest",
"path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodynumber/implementation/NumbersImpl.java",
"license": "mit",
"size": 59057
}
|
[
"com.microsoft.rest.ServiceResponse",
"java.io.IOException"
] |
import com.microsoft.rest.ServiceResponse; import java.io.IOException;
|
import com.microsoft.rest.*; import java.io.*;
|
[
"com.microsoft.rest",
"java.io"
] |
com.microsoft.rest; java.io;
| 13,703
|
@Override
public void addOrUpdateGroupEntry(Group group) {
// check if this new entry is an update to an existing entry
StoredGroupEntry existing = (groupEntriesById.get(
group.deviceId()) != null) ?
groupEntriesById.get(group.deviceId()).get(group.id()) :
null;
GroupEvent event = null;
if (existing != null) {
synchronized (existing) {
existing.setLife(group.life());
existing.setPackets(group.packets());
existing.setBytes(group.bytes());
if (existing.state() == GroupState.PENDING_ADD) {
existing.setState(GroupState.ADDED);
event = new GroupEvent(Type.GROUP_ADDED, existing);
} else {
if (existing.state() == GroupState.PENDING_UPDATE) {
existing.setState(GroupState.PENDING_UPDATE);
}
event = new GroupEvent(Type.GROUP_UPDATED, existing);
}
}
}
if (event != null) {
notifyDelegate(event);
}
}
|
void function(Group group) { StoredGroupEntry existing = (groupEntriesById.get( group.deviceId()) != null) ? groupEntriesById.get(group.deviceId()).get(group.id()) : null; GroupEvent event = null; if (existing != null) { synchronized (existing) { existing.setLife(group.life()); existing.setPackets(group.packets()); existing.setBytes(group.bytes()); if (existing.state() == GroupState.PENDING_ADD) { existing.setState(GroupState.ADDED); event = new GroupEvent(Type.GROUP_ADDED, existing); } else { if (existing.state() == GroupState.PENDING_UPDATE) { existing.setState(GroupState.PENDING_UPDATE); } event = new GroupEvent(Type.GROUP_UPDATED, existing); } } } if (event != null) { notifyDelegate(event); } }
|
/**
* Stores a new group entry, or updates an existing entry.
*
* @param group group entry
*/
|
Stores a new group entry, or updates an existing entry
|
addOrUpdateGroupEntry
|
{
"repo_name": "kuangrewawa/onos",
"path": "core/store/dist/src/main/java/org/onosproject/store/group/impl/DistributedGroupStore.java",
"license": "apache-2.0",
"size": 21269
}
|
[
"org.onosproject.net.group.Group",
"org.onosproject.net.group.GroupEvent",
"org.onosproject.net.group.StoredGroupEntry"
] |
import org.onosproject.net.group.Group; import org.onosproject.net.group.GroupEvent; import org.onosproject.net.group.StoredGroupEntry;
|
import org.onosproject.net.group.*;
|
[
"org.onosproject.net"
] |
org.onosproject.net;
| 903,937
|
public XmlPathConfig declareNamespaces(Map<String, String> namespacesToDeclare) {
return new XmlPathConfig(jaxbObjectMapperFactory, defaultParserType, defaultDeserializer, charset, features, namespacesToDeclare,
properties, validating, namespaceAware, allowDocTypeDeclaration);
}
|
XmlPathConfig function(Map<String, String> namespacesToDeclare) { return new XmlPathConfig(jaxbObjectMapperFactory, defaultParserType, defaultDeserializer, charset, features, namespacesToDeclare, properties, validating, namespaceAware, allowDocTypeDeclaration); }
|
/**
* Specify declared namespaces that will be used when parsing XML.
*
* @param namespacesToDeclare A map containing features that will be used by the underlying {@link groovy.xml.XmlSlurper}.
* @return A new instance of XmlPathConfig
* @see org.xml.sax.XMLReader#setFeature(java.lang.String, boolean)
*/
|
Specify declared namespaces that will be used when parsing XML
|
declareNamespaces
|
{
"repo_name": "jayway/rest-assured",
"path": "xml-path/src/main/java/io/restassured/path/xml/config/XmlPathConfig.java",
"license": "apache-2.0",
"size": 17138
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,184,865
|
protected FlowExecutor createFlowExecutor(
FlowDefinitionLocator definitionLocator, FlowExecutionFactory executionFactory,
FlowExecutionRepository executionRepository) {
FlowExecutorImpl flowExecutor =
new GrailsFlowExecutor(definitionLocator, executionFactory, executionRepository);
if (getInputMapper() != null) {
flowExecutor.setInputMapper(getInputMapper());
}
return flowExecutor;
}
|
FlowExecutor function( FlowDefinitionLocator definitionLocator, FlowExecutionFactory executionFactory, FlowExecutionRepository executionRepository) { FlowExecutorImpl flowExecutor = new GrailsFlowExecutor(definitionLocator, executionFactory, executionRepository); if (getInputMapper() != null) { flowExecutor.setInputMapper(getInputMapper()); } return flowExecutor; }
|
/**
* Create the flow executor instance created by this factory bean and configure
* it appropriately. Subclasses may override if they which to use a custom executor
* implementation.
* @param definitionLocator the definition locator to use
* @param executionFactory the execution factory to use
* @param executionRepository the execution repository to use
* @return a new flow executor instance
*/
|
Create the flow executor instance created by this factory bean and configure it appropriately. Subclasses may override if they which to use a custom executor implementation
|
createFlowExecutor
|
{
"repo_name": "lpicanco/grails",
"path": "src/web/org/codehaus/groovy/grails/webflow/config/GrailsAwareFlowExecutorFactoryBean.java",
"license": "apache-2.0",
"size": 5874
}
|
[
"org.codehaus.groovy.grails.webflow.executor.GrailsFlowExecutor",
"org.springframework.webflow.definition.registry.FlowDefinitionLocator",
"org.springframework.webflow.execution.FlowExecutionFactory",
"org.springframework.webflow.execution.repository.FlowExecutionRepository",
"org.springframework.webflow.executor.FlowExecutor",
"org.springframework.webflow.executor.FlowExecutorImpl"
] |
import org.codehaus.groovy.grails.webflow.executor.GrailsFlowExecutor; import org.springframework.webflow.definition.registry.FlowDefinitionLocator; import org.springframework.webflow.execution.FlowExecutionFactory; import org.springframework.webflow.execution.repository.FlowExecutionRepository; import org.springframework.webflow.executor.FlowExecutor; import org.springframework.webflow.executor.FlowExecutorImpl;
|
import org.codehaus.groovy.grails.webflow.executor.*; import org.springframework.webflow.definition.registry.*; import org.springframework.webflow.execution.*; import org.springframework.webflow.execution.repository.*; import org.springframework.webflow.executor.*;
|
[
"org.codehaus.groovy",
"org.springframework.webflow"
] |
org.codehaus.groovy; org.springframework.webflow;
| 1,339,419
|
public SchemaBuilder parameter(String propertyName, String propertyValue) {
// Preserve order of insertion with a LinkedHashMap. This isn't strictly necessary, but is nice if logical types
// can print their properties in a consistent order.
if (parameters == null)
parameters = new LinkedHashMap<>();
parameters.put(propertyName, propertyValue);
return this;
}
|
SchemaBuilder function(String propertyName, String propertyValue) { if (parameters == null) parameters = new LinkedHashMap<>(); parameters.put(propertyName, propertyValue); return this; }
|
/**
* Set a schema parameter.
* @param propertyName name of the schema property to define
* @param propertyValue value of the schema property to define, as a String
* @return the SchemaBuilder
*/
|
Set a schema parameter
|
parameter
|
{
"repo_name": "ijuma/kafka",
"path": "connect/api/src/main/java/org/apache/kafka/connect/data/SchemaBuilder.java",
"license": "apache-2.0",
"size": 12760
}
|
[
"java.util.LinkedHashMap"
] |
import java.util.LinkedHashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 200,725
|
public static final boolean isShadowDefined( Label la )
{
return !isColorTransparent( la.getShadowColor( ) );
}
|
static final boolean function( Label la ) { return !isColorTransparent( la.getShadowColor( ) ); }
|
/**
* Returns if the given label has defined a shadow.
*
* @param la
* @return if the given label has defined a shadow.
*/
|
Returns if the given label has defined a shadow
|
isShadowDefined
|
{
"repo_name": "Charling-Huang/birt",
"path": "chart/org.eclipse.birt.chart.engine/src/org/eclipse/birt/chart/util/ChartUtil.java",
"license": "epl-1.0",
"size": 73736
}
|
[
"org.eclipse.birt.chart.model.component.Label"
] |
import org.eclipse.birt.chart.model.component.Label;
|
import org.eclipse.birt.chart.model.component.*;
|
[
"org.eclipse.birt"
] |
org.eclipse.birt;
| 815,985
|
void writeRawBuffer(IHAWriteMessage msg, IBufferAccess b) throws IOException,
InterruptedException;
|
void writeRawBuffer(IHAWriteMessage msg, IBufferAccess b) throws IOException, InterruptedException;
|
/**
* Write a buffer containing data replicated from the master onto the local
* persistence store.
*
* @throws InterruptedException
* @throws IOException
*/
|
Write a buffer containing data replicated from the master onto the local persistence store
|
writeRawBuffer
|
{
"repo_name": "smalyshev/blazegraph",
"path": "bigdata/src/java/com/bigdata/journal/IHABufferStrategy.java",
"license": "gpl-2.0",
"size": 10952
}
|
[
"com.bigdata.ha.msg.IHAWriteMessage",
"com.bigdata.io.IBufferAccess",
"java.io.IOException"
] |
import com.bigdata.ha.msg.IHAWriteMessage; import com.bigdata.io.IBufferAccess; import java.io.IOException;
|
import com.bigdata.ha.msg.*; import com.bigdata.io.*; import java.io.*;
|
[
"com.bigdata.ha",
"com.bigdata.io",
"java.io"
] |
com.bigdata.ha; com.bigdata.io; java.io;
| 1,540,428
|
private static void writeAsSlip(byte[] packet, SerialPort serialPort) {
serialPort.writeByte(SLIP_END);
for (byte b: packet) {
if (b == SLIP_END) {
serialPort.writeByte(SLIP_ESC);
serialPort.writeByte(SLIP_ESC_END);
} else if (b == SLIP_ESC) {
serialPort.writeByte(SLIP_ESC);
serialPort.writeByte(SLIP_ESC_ESC);
} else {
serialPort.writeByte(b);
}
}
serialPort.writeByte(SLIP_END);
}
public enum SlipState {
STATE_OK,
STATE_ESC,
STATE_RUBBISH
}
|
static void function(byte[] packet, SerialPort serialPort) { serialPort.writeByte(SLIP_END); for (byte b: packet) { if (b == SLIP_END) { serialPort.writeByte(SLIP_ESC); serialPort.writeByte(SLIP_ESC_END); } else if (b == SLIP_ESC) { serialPort.writeByte(SLIP_ESC); serialPort.writeByte(SLIP_ESC_ESC); } else { serialPort.writeByte(b); } } serialPort.writeByte(SLIP_END); } public enum SlipState { STATE_OK, STATE_ESC, STATE_RUBBISH }
|
/**
* Wraps packet as SLIP.
*
* @param packet Packet data
*/
|
Wraps packet as SLIP
|
writeAsSlip
|
{
"repo_name": "MohamedSeliem/contiki",
"path": "tools/cooja/apps/native_gateway/java/org/contikios/cooja/plugins/NativeIPGateway.java",
"license": "bsd-3-clause",
"size": 34346
}
|
[
"org.contikios.cooja.interfaces.SerialPort"
] |
import org.contikios.cooja.interfaces.SerialPort;
|
import org.contikios.cooja.interfaces.*;
|
[
"org.contikios.cooja"
] |
org.contikios.cooja;
| 1,628,568
|
public static List<APIEndpointURLsDTO> extractEndpointURLs(APIProduct apiProduct, String tenantDomain)
throws APIManagementException {
List<APIEndpointURLsDTO> apiEndpointsList = new ArrayList<>();
Map<String, Environment> environments = APIUtil.getEnvironments();
Set<String> environmentsPublishedByAPI = new HashSet<>(apiProduct.getEnvironments());
environmentsPublishedByAPI.remove("none");
Set<String> apiTransports = new HashSet<>(Arrays.asList(apiProduct.getTransports().split(",")));
APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer();
for (String environmentName : environmentsPublishedByAPI) {
Environment environment = environments.get(environmentName);
if (environment != null) {
APIURLsDTO apiURLsDTO = new APIURLsDTO();
String[] gwEndpoints = null;
gwEndpoints = environment.getApiGatewayEndpoint().split(",");
Map<String, String> domains = new HashMap<>();
if (tenantDomain != null) {
domains = apiConsumer.getTenantDomainMappings(tenantDomain,
APIConstants.API_DOMAIN_MAPPINGS_GATEWAY);
}
String customGatewayUrl = null;
if (domains != null) {
customGatewayUrl = domains.get(APIConstants.CUSTOM_URL);
}
for (String gwEndpoint : gwEndpoints) {
StringBuilder endpointBuilder = new StringBuilder(gwEndpoint);
if (customGatewayUrl != null) {
int index = endpointBuilder.indexOf("//");
endpointBuilder.replace(index + 2, endpointBuilder.length(), customGatewayUrl);
endpointBuilder.append(apiProduct.getContext().replace("/t/" + tenantDomain, ""));
} else {
endpointBuilder.append(apiProduct.getContext());
}
if (gwEndpoint.contains("http:") && apiTransports.contains("http")) {
apiURLsDTO.setHttp(endpointBuilder.toString());
} else if (gwEndpoint.contains("https:") && apiTransports.contains("https")) {
apiURLsDTO.setHttps(endpointBuilder.toString());
}
}
APIEndpointURLsDTO apiEndpointURLsDTO = new APIEndpointURLsDTO();
apiEndpointURLsDTO.setUrLs(apiURLsDTO);
apiEndpointURLsDTO.setEnvironmentName(environment.getName());
apiEndpointURLsDTO.setEnvironmentType(environment.getType());
apiEndpointsList.add(apiEndpointURLsDTO);
}
}
return apiEndpointsList;
}
|
static List<APIEndpointURLsDTO> function(APIProduct apiProduct, String tenantDomain) throws APIManagementException { List<APIEndpointURLsDTO> apiEndpointsList = new ArrayList<>(); Map<String, Environment> environments = APIUtil.getEnvironments(); Set<String> environmentsPublishedByAPI = new HashSet<>(apiProduct.getEnvironments()); environmentsPublishedByAPI.remove("none"); Set<String> apiTransports = new HashSet<>(Arrays.asList(apiProduct.getTransports().split(","))); APIConsumer apiConsumer = RestApiCommonUtil.getLoggedInUserConsumer(); for (String environmentName : environmentsPublishedByAPI) { Environment environment = environments.get(environmentName); if (environment != null) { APIURLsDTO apiURLsDTO = new APIURLsDTO(); String[] gwEndpoints = null; gwEndpoints = environment.getApiGatewayEndpoint().split(","); Map<String, String> domains = new HashMap<>(); if (tenantDomain != null) { domains = apiConsumer.getTenantDomainMappings(tenantDomain, APIConstants.API_DOMAIN_MAPPINGS_GATEWAY); } String customGatewayUrl = null; if (domains != null) { customGatewayUrl = domains.get(APIConstants.CUSTOM_URL); } for (String gwEndpoint : gwEndpoints) { StringBuilder endpointBuilder = new StringBuilder(gwEndpoint); if (customGatewayUrl != null) { int index = endpointBuilder.indexOf(STR/t/STRSTRhttp:STRhttpSTRhttps:STRhttps")) { apiURLsDTO.setHttps(endpointBuilder.toString()); } } APIEndpointURLsDTO apiEndpointURLsDTO = new APIEndpointURLsDTO(); apiEndpointURLsDTO.setUrLs(apiURLsDTO); apiEndpointURLsDTO.setEnvironmentName(environment.getName()); apiEndpointURLsDTO.setEnvironmentType(environment.getType()); apiEndpointsList.add(apiEndpointURLsDTO); } } return apiEndpointsList; }
|
/**
* Extracts the API environment details with access url for each endpoint
*
* @param apiProduct API object
* @param tenantDomain Tenant domain of the API
* @return the API environment details
* @throws APIManagementException error while extracting the information
*/
|
Extracts the API environment details with access url for each endpoint
|
extractEndpointURLs
|
{
"repo_name": "tharikaGitHub/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.rest.api.store.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/store/v1/utils/APIUtils.java",
"license": "apache-2.0",
"size": 9424
}
|
[
"java.util.ArrayList",
"java.util.Arrays",
"java.util.HashMap",
"java.util.HashSet",
"java.util.List",
"java.util.Map",
"java.util.Set",
"org.wso2.carbon.apimgt.api.APIConsumer",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.api.model.APIProduct",
"org.wso2.carbon.apimgt.api.model.Environment",
"org.wso2.carbon.apimgt.impl.APIConstants",
"org.wso2.carbon.apimgt.impl.utils.APIUtil",
"org.wso2.carbon.apimgt.rest.api.common.RestApiCommonUtil",
"org.wso2.carbon.apimgt.rest.api.store.v1.dto.APIEndpointURLsDTO",
"org.wso2.carbon.apimgt.rest.api.store.v1.dto.APIURLsDTO"
] |
import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.wso2.carbon.apimgt.api.APIConsumer; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.model.APIProduct; import org.wso2.carbon.apimgt.api.model.Environment; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.rest.api.common.RestApiCommonUtil; import org.wso2.carbon.apimgt.rest.api.store.v1.dto.APIEndpointURLsDTO; import org.wso2.carbon.apimgt.rest.api.store.v1.dto.APIURLsDTO;
|
import java.util.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.impl.*; import org.wso2.carbon.apimgt.impl.utils.*; import org.wso2.carbon.apimgt.rest.api.common.*; import org.wso2.carbon.apimgt.rest.api.store.v1.dto.*;
|
[
"java.util",
"org.wso2.carbon"
] |
java.util; org.wso2.carbon;
| 185,249
|
public SimpleEvaluator addDataSet(Crossfolder cross){
crossfolders.add(cross);
return this;
}
/**
* Add a new data set to be cross-folded. This method creates a new {@link Crossfolder}
|
SimpleEvaluator function(Crossfolder cross){ crossfolders.add(cross); return this; } /** * Add a new data set to be cross-folded. This method creates a new {@link Crossfolder}
|
/**
* Adds a crossfolder's results to the experiment.
*
* @param cross The crossfold task.
* @return The simple evaluator (for chaining).
*/
|
Adds a crossfolder's results to the experiment
|
addDataSet
|
{
"repo_name": "kluver/lenskit",
"path": "lenskit-eval/src/main/java/org/lenskit/eval/traintest/SimpleEvaluator.java",
"license": "lgpl-2.1",
"size": 9971
}
|
[
"org.lenskit.eval.crossfold.Crossfolder"
] |
import org.lenskit.eval.crossfold.Crossfolder;
|
import org.lenskit.eval.crossfold.*;
|
[
"org.lenskit.eval"
] |
org.lenskit.eval;
| 2,590,072
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.