code stringlengths 73 34.1k | label stringclasses 1 value |
|---|---|
public static DMatrixRMaj insideSpan(DMatrixRMaj[] span , double min , double max , Random rand ) {
DMatrixRMaj A = new DMatrixRMaj(span.length,1);
DMatrixRMaj B = new DMatrixRMaj(span[0].getNumElements(),1);
for( int i = 0; i < span.length; i++ ) {
B.set(span[i]);
double val = rand.nextDouble()*(max-min)+min;
CommonOps_DDRM.scale(val,B);
CommonOps_DDRM.add(A,B,A);
}
return A;
} | java |
public static DMatrixRMaj diagonal(int N , double min , double max , Random rand ) {
return diagonal(N,N,min,max,rand);
} | java |
public static DMatrixRMaj diagonal(int numRows , int numCols , double min , double max , Random rand ) {
if( max < min )
throw new IllegalArgumentException("The max must be >= the min");
DMatrixRMaj ret = new DMatrixRMaj(numRows,numCols);
int N = Math.min(numRows,numCols);
double r = max-min;
for( int i = 0; i < N; i++ ) {
ret.set(i,i, rand.nextDouble()*r+min);
}
return ret;
} | java |
public static DMatrixRMaj symmetricWithEigenvalues(int num, Random rand , double ...eigenvalues ) {
DMatrixRMaj V = RandomMatrices_DDRM.orthogonal(num,num,rand);
DMatrixRMaj D = CommonOps_DDRM.diag(eigenvalues);
DMatrixRMaj temp = new DMatrixRMaj(num,num);
CommonOps_DDRM.mult(V,D,temp);
CommonOps_DDRM.multTransB(temp,V,D);
return D;
} | java |
public static BMatrixRMaj randomBinary(int numRow , int numCol , Random rand ) {
BMatrixRMaj mat = new BMatrixRMaj(numRow,numCol);
setRandomB(mat, rand);
return mat;
} | java |
public static DMatrixRMaj symmetric(int length, double min, double max, Random rand) {
DMatrixRMaj A = new DMatrixRMaj(length,length);
symmetric(A,min,max,rand);
return A;
} | java |
public static void symmetric(DMatrixRMaj A, double min, double max, Random rand) {
if( A.numRows != A.numCols )
throw new IllegalArgumentException("A must be a square matrix");
double range = max-min;
int length = A.numRows;
for( int i = 0; i < length; i++ ) {
for( int j = i; j < length; j++ ) {
double val = rand.nextDouble()*range + min;
A.set(i,j,val);
A.set(j,i,val);
}
}
} | java |
public static DMatrixRMaj triangularUpper(int dimen , int hessenberg , double min , double max , Random rand )
{
if( hessenberg < 0 )
throw new RuntimeException("hessenberg must be more than or equal to 0");
double range = max-min;
DMatrixRMaj A = new DMatrixRMaj(dimen,dimen);
for( int i = 0; i < dimen; i++ ) {
int start = i <= hessenberg ? 0 : i-hessenberg;
for( int j = start; j < dimen; j++ ) {
A.set(i,j, rand.nextDouble()*range+min);
}
}
return A;
} | java |
public double computeLikelihoodP() {
double ret = 1.0;
for( int i = 0; i < r.numRows; i++ ) {
double a = r.get(i,0);
ret *= Math.exp(-a*a/2.0);
}
return ret;
} | java |
@Override
public boolean decompose(DMatrixRMaj orig) {
if( orig.numCols != orig.numRows )
throw new IllegalArgumentException("Matrix must be square.");
if( orig.numCols <= 0 )
return false;
int N = orig.numRows;
// compute a similar tridiagonal matrix
if( !decomp.decompose(orig) )
return false;
if( diag == null || diag.length < N) {
diag = new double[N];
off = new double[N-1];
}
decomp.getDiagonal(diag,off);
// Tell the helper to work with this matrix
helper.init(diag,off,N);
if( computeVectors ) {
if( computeVectorsWithValues ) {
return extractTogether();
} else {
return extractSeparate(N);
}
} else {
return computeEigenValues();
}
} | java |
private boolean computeEigenValues() {
// make a copy of the internal tridiagonal matrix data for later use
diagSaved = helper.copyDiag(diagSaved);
offSaved = helper.copyOff(offSaved);
vector.setQ(null);
vector.setFastEigenvalues(true);
// extract the eigenvalues
if( !vector.process(-1,null,null) )
return false;
// save a copy of them since this data structure will be recycled next
values = helper.copyEigenvalues(values);
return true;
} | java |
protected void solveL(double[] vv) {
int ii = 0;
for( int i = 0; i < n; i++ ) {
int ip = indx[i];
double sumReal = vv[ip*2];
double sumImg = vv[ip*2+1];
vv[ip*2] = vv[i*2];
vv[ip*2+1] = vv[i*2+1];
if( ii != 0 ) {
// for( int j = ii-1; j < i; j++ )
// sum -= dataLU[i* n +j]*vv[j];
int index = i*stride + (ii-1)*2;
for( int j = ii-1; j < i; j++ ){
double luReal = dataLU[index++];
double luImg = dataLU[index++];
double vvReal = vv[j*2];
double vvImg = vv[j*2+1];
sumReal -= luReal*vvReal - luImg*vvImg;
sumImg -= luReal*vvImg + luImg*vvReal;
}
} else if( sumReal*sumReal + sumImg*sumImg != 0.0 ) {
ii=i+1;
}
vv[i*2] = sumReal;
vv[i*2+1] = sumImg;
}
} | java |
public boolean decompose(DMatrixRMaj mat , int indexStart , int n ) {
double m[] = mat.data;
double el_ii;
double div_el_ii=0;
for( int i = 0; i < n; i++ ) {
for( int j = i; j < n; j++ ) {
double sum = m[indexStart+i*mat.numCols+j];
int iEl = i*n;
int jEl = j*n;
int end = iEl+i;
// k = 0:i-1
for( ; iEl<end; iEl++,jEl++ ) {
// sum -= el[i*n+k]*el[j*n+k];
sum -= el[iEl]*el[jEl];
}
if( i == j ) {
// is it positive-definate?
if( sum <= 0.0 )
return false;
el_ii = Math.sqrt(sum);
el[i*n+i] = el_ii;
m[indexStart+i*mat.numCols+i] = el_ii;
div_el_ii = 1.0/el_ii;
} else {
double v = sum*div_el_ii;
el[j*n+i] = v;
m[indexStart+j*mat.numCols+i] = v;
}
}
}
return true;
} | java |
public static List<int[]> createList( int N )
{
int data[] = new int[ N ];
for( int i = 0; i < data.length; i++ ) {
data[i] = -1;
}
List<int[]> ret = new ArrayList<int[]>();
createList(data,0,-1,ret);
return ret;
} | java |
private static void createList( int data[], int k , int level , List<int[]> ret )
{
data[k] = level;
if( level < data.length-1 ) {
for( int i = 0; i < data.length; i++ ) {
if( data[i] == -1 ) {
createList(data,i,level+1,ret);
}
}
} else {
int []copy = new int[data.length];
System.arraycopy(data,0,copy,0,data.length);
ret.add(copy);
}
data[k] = -1;
} | java |
public int[] next()
{
boolean hasNewPerm = false;
escape:while( level >= 0) {
// boolean foundZero = false;
for( int i = iter[level]; i < data.length; i = iter[level] ) {
iter[level]++;
if( data[i] == -1 ) {
level++;
data[i] = level-1;
if( level >= data.length ) {
// a new permutation has been created return the results.
hasNewPerm = true;
System.arraycopy(data,0,ret,0,ret.length);
level = level-1;
data[i] = -1;
break escape;
} else {
valk[level] = i;
}
}
}
data[valk[level]] = -1;
iter[level] = 0;
level = level-1;
}
if( hasNewPerm )
return ret;
return null;
} | java |
public static DMatrixSparseTriplet uniform(int numRows , int numCols , int nz_total ,
double min , double max , Random rand ) {
// Create a list of all the possible element values
int N = numCols*numRows;
if( N < 0 )
throw new IllegalArgumentException("matrix size is too large");
nz_total = Math.min(N,nz_total);
int selected[] = new int[N];
for (int i = 0; i < N; i++) {
selected[i] = i;
}
for (int i = 0; i < nz_total; i++) {
int s = rand.nextInt(N);
int tmp = selected[s];
selected[s] = selected[i];
selected[i] = tmp;
}
// Create a sparse matrix
DMatrixSparseTriplet ret = new DMatrixSparseTriplet(numRows,numCols,nz_total);
for (int i = 0; i < nz_total; i++) {
int row = selected[i]/numCols;
int col = selected[i]%numCols;
double value = rand.nextDouble()*(max-min)+min;
ret.addItem(row,col, value);
}
return ret;
} | java |
public static boolean decomposeQR_block_col( final int blockLength ,
final DSubmatrixD1 Y ,
final double gamma[] )
{
int width = Y.col1-Y.col0;
int height = Y.row1-Y.row0;
int min = Math.min(width,height);
for( int i = 0; i < min; i++ ) {
// compute the householder vector
if (!computeHouseHolderCol(blockLength, Y, gamma, i))
return false;
// apply to rest of the columns in the block
rank1UpdateMultR_Col(blockLength,Y,i,gamma[Y.col0+i]);
}
return true;
} | java |
public static void divideElementsCol(final int blockLength ,
final DSubmatrixD1 Y , final int col , final double val ) {
final int width = Math.min(blockLength,Y.col1-Y.col0);
final double dataY[] = Y.original.data;
for( int i = Y.row0; i < Y.row1; i += blockLength ) {
int height = Math.min( blockLength , Y.row1 - i );
int index = i*Y.original.numCols + height*Y.col0 + col;
if( i == Y.row0 ) {
index += width*(col+1);
for( int k = col+1; k < height; k++ , index += width ) {
dataY[index] /= val;
}
} else {
int endIndex = index + width*height;
//for( int k = 0; k < height; k++
for( ; index != endIndex; index += width ) {
dataY[index] /= val;
}
}
}
} | java |
public static void multAdd_zeros(final int blockLength ,
final DSubmatrixD1 Y , final DSubmatrixD1 B ,
final DSubmatrixD1 C )
{
int widthY = Y.col1 - Y.col0;
for( int i = Y.row0; i < Y.row1; i += blockLength ) {
int heightY = Math.min( blockLength , Y.row1 - i );
for( int j = B.col0; j < B.col1; j += blockLength ) {
int widthB = Math.min( blockLength , B.col1 - j );
int indexC = (i-Y.row0+C.row0)*C.original.numCols + (j-B.col0+C.col0)*heightY;
for( int k = Y.col0; k < Y.col1; k += blockLength ) {
int indexY = i*Y.original.numCols + k*heightY;
int indexB = (k-Y.col0+B.row0)*B.original.numCols + j*widthY;
if( i == Y.row0 ) {
multBlockAdd_zerosone(Y.original.data,B.original.data,C.original.data,
indexY,indexB,indexC,heightY,widthY,widthB);
} else {
InnerMultiplication_DDRB.blockMultPlus(Y.original.data,B.original.data,C.original.data,
indexY,indexB,indexC,heightY,widthY,widthB);
}
}
}
}
} | java |
public static ManagerFunctions.InputN createMultTransA() {
return (inputs, manager) -> {
if( inputs.size() != 2 )
throw new RuntimeException("Two inputs required");
final Variable varA = inputs.get(0);
final Variable varB = inputs.get(1);
Operation.Info ret = new Operation.Info();
if( varA instanceof VariableMatrix && varB instanceof VariableMatrix ) {
// The output matrix or scalar variable must be created with the provided manager
final VariableMatrix output = manager.createMatrix();
ret.output = output;
ret.op = new Operation("multTransA-mm") {
@Override
public void process() {
DMatrixRMaj mA = ((VariableMatrix)varA).matrix;
DMatrixRMaj mB = ((VariableMatrix)varB).matrix;
CommonOps_DDRM.multTransA(mA,mB,output.matrix);
}
};
} else {
throw new IllegalArgumentException("Expected both inputs to be a matrix");
}
return ret;
};
} | java |
public static DMatrixRMaj copyChangeRow(int order[] , DMatrixRMaj src , DMatrixRMaj dst )
{
if( dst == null ) {
dst = new DMatrixRMaj(src.numRows,src.numCols);
} else if( src.numRows != dst.numRows || src.numCols != dst.numCols ) {
throw new IllegalArgumentException("src and dst must have the same dimensions.");
}
for( int i = 0; i < src.numRows; i++ ) {
int indexDst = i*src.numCols;
int indexSrc = order[i]*src.numCols;
System.arraycopy(src.data,indexSrc,dst.data,indexDst,src.numCols);
}
return dst;
} | java |
public static DMatrixRMaj copyTriangle(DMatrixRMaj src , DMatrixRMaj dst , boolean upper ) {
if( dst == null ) {
dst = new DMatrixRMaj(src.numRows,src.numCols);
} else if( src.numRows != dst.numRows || src.numCols != dst.numCols ) {
throw new IllegalArgumentException("src and dst must have the same dimensions.");
}
if( upper ) {
int N = Math.min(src.numRows,src.numCols);
for( int i = 0; i < N; i++ ) {
int index = i*src.numCols+i;
System.arraycopy(src.data,index,dst.data,index,src.numCols-i);
}
} else {
for( int i = 0; i < src.numRows; i++ ) {
int length = Math.min(i+1,src.numCols);
int index = i*src.numCols;
System.arraycopy(src.data,index,dst.data,index,length);
}
}
return dst;
} | java |
public static DMatrixRMaj[] splitIntoVectors(DMatrix1Row A , boolean column )
{
int w = column ? A.numCols : A.numRows;
int M = column ? A.numRows : 1;
int N = column ? 1 : A.numCols;
int o = Math.max(M,N);
DMatrixRMaj[] ret = new DMatrixRMaj[w];
for( int i = 0; i < w; i++ ) {
DMatrixRMaj a = new DMatrixRMaj(M,N);
if( column )
subvector(A,0,i,o,false,0,a);
else
subvector(A,i,0,o,true,0,a);
ret[i] = a;
}
return ret;
} | java |
public static double diagProd( DMatrix1Row T )
{
double prod = 1.0;
int N = Math.min(T.numRows,T.numCols);
for( int i = 0; i < N; i++ ) {
prod *= T.unsafe_get(i,i);
}
return prod;
} | java |
public static double elementSumSq( DMatrixD1 m ) {
// minimize round off error
double maxAbs = CommonOps_DDRM.elementMaxAbs(m);
if( maxAbs == 0)
return 0;
double total = 0;
int N = m.getNumElements();
for( int i = 0; i < N; i++ ) {
double d = m.data[i]/maxAbs;
total += d*d;
}
return maxAbs*total*maxAbs;
} | java |
public void init( double diag[] ,
double off[],
int numCols ) {
reset(numCols);
this.diag = diag;
this.off = off;
} | java |
public void reset( int N ) {
this.N = N;
this.diag = null;
this.off = null;
if( splits.length < N ) {
splits = new int[N];
}
numSplits = 0;
x1 = 0;
x2 = N-1;
steps = numExceptional = lastExceptional = 0;
this.Q = null;
} | java |
protected boolean isZero( int index ) {
double bottom = Math.abs(diag[index])+Math.abs(diag[index+1]);
return( Math.abs(off[index]) <= bottom*UtilEjml.EPS);
} | java |
protected void createBulge( int x1 , double p , boolean byAngle ) {
double a11 = diag[x1];
double a22 = diag[x1+1];
double a12 = off[x1];
double a23 = off[x1+1];
if( byAngle ) {
c = Math.cos(p);
s = Math.sin(p);
c2 = c*c;
s2 = s*s;
cs = c*s;
} else {
computeRotation(a11-p, a12);
}
// multiply the rotator on the top left.
diag[x1] = c2*a11 + 2.0*cs*a12 + s2*a22;
diag[x1+1] = c2*a22 - 2.0*cs*a12 + s2*a11;
off[x1] = a12*(c2-s2) + cs*(a22 - a11);
off[x1+1] = c*a23;
bulge = s*a23;
if( Q != null )
updateQ(x1,x1+1,c,s);
} | java |
protected void eigenvalue2by2( int x1 ) {
double a = diag[x1];
double b = off[x1];
double c = diag[x1+1];
// normalize to reduce overflow
double absA = Math.abs(a);
double absB = Math.abs(b);
double absC = Math.abs(c);
double scale = absA > absB ? absA : absB;
if( absC > scale ) scale = absC;
// see if it is a pathological case. the diagonal must already be zero
// and the eigenvalues are all zero. so just return
if( scale == 0 ) {
off[x1] = 0;
diag[x1] = 0;
diag[x1+1] = 0;
return;
}
a /= scale;
b /= scale;
c /= scale;
eigenSmall.symm2x2_fast(a,b,c);
off[x1] = 0;
diag[x1] = scale*eigenSmall.value0.real;
diag[x1+1] = scale*eigenSmall.value1.real;
} | java |
@Override
public void solve(DMatrixRMaj B, DMatrixRMaj X) {
X.reshape(blockA.numCols,B.numCols);
blockB.reshape(B.numRows,B.numCols,false);
blockX.reshape(X.numRows,X.numCols,false);
MatrixOps_DDRB.convert(B,blockB);
alg.solve(blockB,blockX);
MatrixOps_DDRB.convert(blockX,X);
} | java |
@Override
public void invert(DMatrixRMaj A_inv) {
blockB.reshape(A_inv.numRows,A_inv.numCols,false);
alg.invert(blockB);
MatrixOps_DDRB.convert(blockB,A_inv);
} | java |
public void printMinors(int matrix[], int N, PrintStream stream) {
this.N = N;
this.stream = stream;
// compute all the minors
int index = 0;
for( int i = 1; i <= N; i++ ) {
for( int j = 1; j <= N; j++ , index++) {
stream.print(" double m"+i+""+j+" = ");
if( (i+j) % 2 == 1 )
stream.print("-( ");
printTopMinor(matrix,i-1,j-1,N);
if( (i+j) % 2 == 1 )
stream.print(")");
stream.print(";\n");
}
}
stream.println();
// compute the determinant
stream.print(" double det = (a11*m11");
for( int i = 2; i <= N; i++ ) {
stream.print(" + "+a(i-1)+"*m"+1+""+i);
}
stream.println(")/scale;");
} | java |
@Override
public boolean setA(DMatrixRBlock A) {
if( A.numRows < A.numCols )
throw new IllegalArgumentException("Number of rows must be more than or equal to the number of columns. " +
"Can't solve an underdetermined system.");
if( !decomposer.decompose(A))
return false;
this.QR = decomposer.getQR();
return true;
} | java |
@Override
public void invert(DMatrixRBlock A_inv) {
int M = Math.min(QR.numRows,QR.numCols);
if( A_inv.numRows != M || A_inv.numCols != M )
throw new IllegalArgumentException("A_inv must be square an have dimension "+M);
// Solve for A^-1
// Q*R*A^-1 = I
// Apply householder reflectors to the identity matrix
// y = Q^T*I = Q^T
MatrixOps_DDRB.setIdentity(A_inv);
decomposer.applyQTran(A_inv);
// Solve using upper triangular R matrix
// R*A^-1 = y
// A^-1 = R^-1*y
TriangularSolver_DDRB.solve(QR.blockLength,true,
new DSubmatrixD1(QR,0,M,0,M),new DSubmatrixD1(A_inv),false);
} | java |
public void perform() {
for (int i = 0; i < operations.size(); i++) {
operations.get(i).process();
}
} | java |
@Override
public void setExpectedMaxSize( int numRows , int numCols ) {
super.setExpectedMaxSize(numRows,numCols);
// if the matrix that is being decomposed is smaller than the block we really don't
// see the B matrix.
if( numRows < blockWidth)
B = new DMatrixRMaj(0,0);
else
B = new DMatrixRMaj(blockWidth,maxWidth);
chol = new CholeskyBlockHelper_DDRM(blockWidth);
} | java |
public static DMatrixSparseCSC convert(DMatrixSparseTriplet src , DMatrixSparseCSC dst , int hist[] ) {
if( dst == null )
dst = new DMatrixSparseCSC(src.numRows, src.numCols , src.nz_length);
else
dst.reshape(src.numRows, src.numCols, src.nz_length);
if( hist == null )
hist = new int[ src.numCols ];
else if( hist.length >= src.numCols )
Arrays.fill(hist,0,src.numCols, 0);
else
throw new IllegalArgumentException("Length of hist must be at least numCols");
// compute the number of elements in each columns
for (int i = 0; i < src.nz_length; i++) {
hist[src.nz_rowcol.data[i*2+1]]++;
}
// define col_idx
dst.histogramToStructure(hist);
System.arraycopy(dst.col_idx,0,hist,0,dst.numCols);
// now write the row indexes and the values
for (int i = 0; i < src.nz_length; i++) {
int row = src.nz_rowcol.data[i*2];
int col = src.nz_rowcol.data[i*2+1];
double value = src.nz_value.data[i];
int index = hist[col]++;
dst.nz_rows[index] = row;
dst.nz_values[index] = value;
}
dst.indicesSorted = false;
return dst;
} | java |
protected void setupPivotInfo() {
for( int col = 0; col < numCols; col++ ) {
pivots[col] = col;
double c[] = dataQR[col];
double norm = 0;
for( int row = 0; row < numRows; row++ ) {
double element = c[row];
norm += element*element;
}
normsCol[col] = norm;
}
} | java |
protected void updateNorms( int j ) {
boolean foundNegative = false;
for( int col = j; col < numCols; col++ ) {
double e = dataQR[col][j-1];
double v = normsCol[col] -= e*e;
if( v < 0 ) {
foundNegative = true;
break;
}
}
// if a negative sum has been found then clearly too much precision has been lost
// and it should recompute the column norms from scratch
if( foundNegative ) {
for( int col = j; col < numCols; col++ ) {
double u[] = dataQR[col];
double actual = 0;
for( int i=j; i < numRows; i++ ) {
double v = u[i];
actual += v*v;
}
normsCol[col] = actual;
}
}
} | java |
protected void swapColumns( int j ) {
// find the column with the largest norm
int largestIndex = j;
double largestNorm = normsCol[j];
for( int col = j+1; col < numCols; col++ ) {
double n = normsCol[col];
if( n > largestNorm ) {
largestNorm = n;
largestIndex = col;
}
}
// swap the columns
double []tempC = dataQR[j];
dataQR[j] = dataQR[largestIndex];
dataQR[largestIndex] = tempC;
double tempN = normsCol[j];
normsCol[j] = normsCol[largestIndex];
normsCol[largestIndex] = tempN;
int tempP = pivots[j];
pivots[j] = pivots[largestIndex];
pivots[largestIndex] = tempP;
} | java |
public String getHiveExecutionEngine() {
String executionEngine = hiveConfSystemOverride.get(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname);
return executionEngine == null ? HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.getDefaultValue() : executionEngine;
} | java |
public void override(HiveRunnerConfig hiveRunnerConfig) {
config.putAll(hiveRunnerConfig.config);
hiveConfSystemOverride.putAll(hiveRunnerConfig.hiveConfSystemOverride);
} | java |
public static Optional<Field> getField(Class<?> type, final String fieldName) {
Optional<Field> field = Iterables.tryFind(newArrayList(type.getDeclaredFields()), havingFieldName(fieldName));
if (!field.isPresent() && type.getSuperclass() != null){
field = getField(type.getSuperclass(), fieldName);
}
return field;
} | java |
public void init(Map<String, String> testConfig, Map<String, String> hiveVars) {
context.init();
HiveConf hiveConf = context.getHiveConf();
// merge test case properties with hive conf before HiveServer is started.
for (Map.Entry<String, String> property : testConfig.entrySet()) {
hiveConf.set(property.getKey(), property.getValue());
}
try {
hiveServer2 = new HiveServer2();
hiveServer2.init(hiveConf);
// Locate the ClIService in the HiveServer2
for (Service service : hiveServer2.getServices()) {
if (service instanceof CLIService) {
client = (CLIService) service;
}
}
Preconditions.checkNotNull(client, "ClIService was not initialized by HiveServer2");
sessionHandle = client.openSession("noUser", "noPassword", null);
SessionState sessionState = client.getSessionManager().getSession(sessionHandle).getSessionState();
currentSessionState = sessionState;
currentSessionState.setHiveVariables(hiveVars);
} catch (Exception e) {
throw new IllegalStateException("Failed to create HiveServer :" + e.getMessage(), e);
}
// Ping hive server before we do anything more with it! If validation
// is switched on, this will fail if metastorage is not set up properly
pingHiveServer();
} | java |
public InsertIntoTable addRowsFromDelimited(File file, String delimiter, Object nullValue) {
builder.addRowsFromDelimited(file, delimiter, nullValue);
return this;
} | java |
public InsertIntoTable addRowsFrom(File file, FileParser fileParser) {
builder.addRowsFrom(file, fileParser);
return this;
} | java |
public InsertIntoTable set(String name, Object value) {
builder.set(name, value);
return this;
} | java |
public HiveShellContainer evaluateStatement(List<? extends Script> scripts, Object target, TemporaryFolder temporaryFolder, Statement base) throws Throwable {
container = null;
FileUtil.setPermission(temporaryFolder.getRoot(), FsPermission.getDirDefault());
try {
LOGGER.info("Setting up {} in {}", getName(), temporaryFolder.getRoot().getAbsolutePath());
container = createHiveServerContainer(scripts, target, temporaryFolder);
base.evaluate();
return container;
} finally {
tearDown();
}
} | java |
private HiveShellContainer createHiveServerContainer(final List<? extends Script> scripts, final Object testCase, TemporaryFolder baseDir)
throws IOException {
HiveServerContext context = new StandaloneHiveServerContext(baseDir, config);
final HiveServerContainer hiveTestHarness = new HiveServerContainer(context);
HiveShellBuilder hiveShellBuilder = new HiveShellBuilder();
hiveShellBuilder.setCommandShellEmulation(config.getCommandShellEmulator());
HiveShellField shellSetter = loadScriptUnderTest(testCase, hiveShellBuilder);
if (scripts != null) {
hiveShellBuilder.overrideScriptsUnderTest(scripts);
}
hiveShellBuilder.setHiveServerContainer(hiveTestHarness);
loadAnnotatedResources(testCase, hiveShellBuilder);
loadAnnotatedProperties(testCase, hiveShellBuilder);
loadAnnotatedSetupScripts(testCase, hiveShellBuilder);
// Build shell
final HiveShellContainer shell = hiveShellBuilder.buildShell();
// Set shell
shellSetter.setShell(shell);
if (shellSetter.isAutoStart()) {
shell.start();
}
return shell;
} | java |
public static Command newInsert(Object object,
String outIdentifier) {
return getCommandFactoryProvider().newInsert( object,
outIdentifier );
} | java |
public static Command newInsert(Object object,
String outIdentifier,
boolean returnObject,
String entryPoint ) {
return getCommandFactoryProvider().newInsert( object,
outIdentifier,
returnObject,
entryPoint );
} | java |
public static Command newInsertElements(Collection objects, String outIdentifier, boolean returnObject, String entryPoint) {
return getCommandFactoryProvider().newInsertElements( objects, outIdentifier, returnObject, entryPoint );
} | java |
public static Command newSetGlobal(String identifier,
Object object) {
return getCommandFactoryProvider().newSetGlobal( identifier,
object );
} | java |
public static Command newGetGlobal(String identifier,
String outIdentifier) {
return getCommandFactoryProvider().newGetGlobal( identifier,
outIdentifier );
} | java |
public static Command newStartProcess(String processId,
Map<String, Object> parameters) {
return getCommandFactoryProvider().newStartProcess( processId,
parameters );
} | java |
public static Command newQuery(String identifier,
String name) {
return getCommandFactoryProvider().newQuery( identifier,
name );
} | java |
public static Command newQuery(String identifier,
String name,
Object[] arguments) {
return getCommandFactoryProvider().newQuery( identifier,
name,
arguments );
} | java |
public static KnowledgeBuilderConfiguration newKnowledgeBuilderConfiguration(Properties properties,
ClassLoader... classLoaders) {
return FactoryServiceHolder.factoryService.newKnowledgeBuilderConfiguration( properties, classLoaders );
} | java |
public static synchronized void unregister(final String serviceName,
final Callable<Class< ? >> factory) {
if ( serviceName == null ) {
throw new IllegalArgumentException( "serviceName cannot be null" );
}
if ( factories != null ) {
List<Callable<Class< ? >>> l = factories.get( serviceName );
if ( l != null ) {
l.remove( factory );
}
}
} | java |
public static synchronized void register(final String serviceName,
final Callable<Class< ? >> factory) {
if ( serviceName == null ) {
throw new IllegalArgumentException( "serviceName cannot be null" );
}
if ( factory != null ) {
if ( factories == null ) {
factories = new HashMap<String, List<Callable<Class< ? >>>>();
}
List<Callable<Class< ? >>> l = factories.get( serviceName );
if ( l == null ) {
l = new ArrayList<Callable<Class< ? >>>();
factories.put( serviceName,
l );
}
l.add( factory );
}
} | java |
public static synchronized Class< ? > locate(final String serviceName) {
if ( serviceName == null ) {
throw new IllegalArgumentException( "serviceName cannot be null" );
}
if ( factories != null ) {
List<Callable<Class< ? >>> l = factories.get( serviceName );
if ( l != null && !l.isEmpty() ) {
Callable<Class< ? >> c = l.get( l.size() - 1 );
try {
return c.call();
} catch ( Exception e ) {
}
}
}
return null;
} | java |
public static synchronized List<Class< ? >> locateAll(final String serviceName) {
if ( serviceName == null ) {
throw new IllegalArgumentException( "serviceName cannot be null" );
}
List<Class< ? >> classes = new ArrayList<Class< ? >>();
if ( factories != null ) {
List<Callable<Class< ? >>> l = factories.get( serviceName );
if ( l != null ) {
for ( Callable<Class< ? >> c : l ) {
try {
classes.add( c.call() );
} catch ( Exception e ) {
}
}
}
}
return classes;
} | java |
public static KieRuntimeLogger newFileLogger(KieRuntimeEventManager session,
String fileName) {
return getKnowledgeRuntimeLoggerProvider().newFileLogger( session,
fileName );
} | java |
public static String toXml(DeploymentDescriptor descriptor) {
try {
Marshaller marshaller = getContext().createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
marshaller.setProperty(Marshaller.JAXB_SCHEMA_LOCATION, "http://www.jboss.org/jbpm deployment-descriptor.xsd");
marshaller.setSchema(schema);
StringWriter stringWriter = new StringWriter();
// clone the object and cleanup transients
DeploymentDescriptor clone = ((DeploymentDescriptorImpl) descriptor).clearClone();
marshaller.marshal(clone, stringWriter);
String output = stringWriter.toString();
return output;
} catch (Exception e) {
throw new RuntimeException("Unable to generate xml from deployment descriptor", e);
}
} | java |
public static List<ObjectModelResolver> getResolvers() {
if (resolvers == null) {
synchronized (serviceLoader) {
if (resolvers == null) {
List<ObjectModelResolver> foundResolvers = new ArrayList<ObjectModelResolver>();
for (ObjectModelResolver resolver : serviceLoader) {
foundResolvers.add(resolver);
}
resolvers = foundResolvers;
}
}
}
return resolvers;
} | java |
public static ObjectModelResolver get(String resolverId) {
List<ObjectModelResolver> resolvers = getResolvers();
for (ObjectModelResolver resolver : resolvers) {
if (resolver.accept(resolverId)) {
return resolver;
}
}
return null;
} | java |
void reset()
{
if (!hasStopped)
{
throw new IllegalStateException("cannot reset a non stopped queue poller");
}
hasStopped = false;
run = true;
lastLoop = null;
loop = new Semaphore(0);
} | java |
void releaseResources(JobInstance ji)
{
this.peremption.remove(ji.getId());
this.actualNbThread.decrementAndGet();
for (ResourceManagerBase rm : this.resourceManagers)
{
rm.releaseResource(ji);
}
if (!this.strictPollingPeriod)
{
// Force a new loop at once. This makes queues more fluid.
loop.release(1);
}
this.engine.signalEndOfRun();
} | java |
public static JqmEngineOperations startEngine(String name, JqmEngineHandler handler)
{
JqmEngine e = new JqmEngine();
e.start(name, handler);
return e;
} | java |
@Override
public Response toResponse(ErrorDto e)
{
// String type = headers.getContentType() == null ? MediaType.APPLICATION_JSON : headers.getContentType();
return Response.status(e.httpStatus).entity(e).type(MediaType.APPLICATION_JSON).build();
} | java |
public static JqmClient getClient(String name, Properties p, boolean cached)
{
Properties p2 = null;
if (binder == null)
{
bind();
}
if (p == null)
{
p2 = props;
}
else
{
p2 = new Properties(props);
p2.putAll(p);
}
return binder.getClientFactory().getClient(name, p2, cached);
} | java |
public static ScheduledJob create(String cronExpression)
{
ScheduledJob res = new ScheduledJob();
res.cronExpression = cronExpression;
return res;
} | java |
static void export(String path, String queueName, DbConn cnx) throws JqmXmlException
{
// Argument tests
if (queueName == null)
{
throw new IllegalArgumentException("queue name cannot be null");
}
if (cnx == null)
{
throw new IllegalArgumentException("database connection cannot be null");
}
Queue q = CommonXml.findQueue(queueName, cnx);
if (q == null)
{
throw new IllegalArgumentException("there is no queue named " + queueName);
}
List<Queue> l = new ArrayList<>();
l.add(q);
export(path, l, cnx);
} | java |
static void createMessage(String textMessage, JobInstance jobInstance, DbConn cnx)
{
cnx.runUpdate("message_insert", jobInstance.getId(), textMessage);
} | java |
static int createDeliverable(String path, String originalFileName, String fileFamily, Integer jobId, DbConn cnx)
{
QueryResult qr = cnx.runUpdate("deliverable_insert", fileFamily, path, jobId, originalFileName, UUID.randomUUID().toString());
return qr.getGeneratedId();
} | java |
static void initSingleParam(String key, String initValue, DbConn cnx)
{
try
{
cnx.runSelectSingle("globalprm_select_by_key", 2, String.class, key);
return;
}
catch (NoResultException e)
{
GlobalParameter.create(cnx, key, initValue);
}
catch (NonUniqueResultException e)
{
// It exists! Nothing to do...
}
} | java |
static void setSingleParam(String key, String value, DbConn cnx)
{
QueryResult r = cnx.runUpdate("globalprm_update_value_by_key", value, key);
if (r.nbUpdated == 0)
{
cnx.runUpdate("globalprm_insert", key, value);
}
cnx.commit();
} | java |
public void close()
{
if (transac_open)
{
try
{
this._cnx.rollback();
}
catch (Exception e)
{
// Ignore.
}
}
for (Statement s : toClose)
{
closeQuietly(s);
}
toClose.clear();
closeQuietly(_cnx);
_cnx = null;
} | java |
void killAll()
{
for (RjiRegistration reg : this.instancesById.values().toArray(new RjiRegistration[] {}))
{
reg.rji.handleInstruction(Instruction.KILL);
}
} | java |
@Override
public void setJobQueue(int jobId, Queue queue)
{
JqmClientFactory.getClient().setJobQueue(jobId, queue);
} | java |
@Override
public List<JobInstance> getJobs(Query query)
{
return JqmClientFactory.getClient().getJobs(query);
} | java |
@Override
public void stop()
{
synchronized (killHook)
{
jqmlogger.info("JQM engine " + this.node.getName() + " has received a stop order");
// Kill hook should be removed
try
{
if (!Runtime.getRuntime().removeShutdownHook(killHook))
{
jqmlogger.error("The engine could not unregister its shutdown hook");
}
}
catch (IllegalStateException e)
{
// This happens if the stop sequence is initiated by the shutdown hook itself.
jqmlogger.info("Stop order is due to an admin operation (KILL/INT)");
}
}
// Stop pollers
int pollerCount = pollers.size();
for (QueuePoller p : pollers.values())
{
p.stop();
}
// Scheduler
this.scheduler.stop();
// Jetty is closed automatically when all pollers are down
// Wait for the end of the world
if (pollerCount > 0)
{
try
{
this.ended.acquire();
}
catch (InterruptedException e)
{
jqmlogger.error("interrupted", e);
}
}
// Send a KILL signal to remaining job instances, and wait some more.
if (this.getCurrentlyRunningJobCount() > 0)
{
this.runningJobInstanceManager.killAll();
try
{
Thread.sleep(10000);
}
catch (InterruptedException e)
{
jqmlogger.error("interrupted", e);
}
}
jqmlogger.debug("Stop order was correctly handled. Engine for node " + this.node.getName() + " has stopped.");
} | java |
private void purgeDeadJobInstances(DbConn cnx, Node node)
{
for (JobInstance ji : JobInstance.select(cnx, "ji_select_by_node", node.getId()))
{
try
{
cnx.runSelectSingle("history_select_state_by_id", String.class, ji.getId());
}
catch (NoResultException e)
{
History.create(cnx, ji, State.CRASHED, Calendar.getInstance());
Message.create(cnx,
"Job was supposed to be running at server startup - usually means it was killed along a server by an admin or a crash",
ji.getId());
}
cnx.runUpdate("ji_delete_by_id", ji.getId());
}
cnx.commit();
} | java |
private URL[] getClasspath(JobInstance ji, JobRunnerCallback cb) throws JqmPayloadException
{
switch (ji.getJD().getPathType())
{
case MAVEN:
return mavenResolver.resolve(ji);
case MEMORY:
return new URL[0];
case FS:
default:
return fsResolver.getLibraries(ji.getNode(), ji.getJD());
}
} | java |
public static int forceCleanup(Thread t)
{
int i = 0;
for (Map.Entry<ConnectionPool, Set<ConnPair>> e : conns.entrySet())
{
for (ConnPair c : e.getValue())
{
if (c.thread.equals(t))
{
try
{
// This will in turn remove it from the static Map.
c.conn.getHandler().invoke(c.conn, Connection.class.getMethod("close"), null);
}
catch (Throwable e1)
{
e1.printStackTrace();
}
i++;
}
}
}
return i;
} | java |
public static GlobalParameter create(DbConn cnx, String key, String value)
{
QueryResult r = cnx.runUpdate("globalprm_insert", key, value);
GlobalParameter res = new GlobalParameter();
res.id = r.getGeneratedId();
res.key = key;
res.value = value;
return res;
} | java |
public static String getParameter(DbConn cnx, String key, String defaultValue)
{
try
{
return cnx.runSelectSingle("globalprm_select_by_key", 3, String.class, key);
}
catch (NoResultException e)
{
return defaultValue;
}
} | java |
static JndiContext createJndiContext() throws NamingException
{
try
{
if (!NamingManager.hasInitialContextFactoryBuilder())
{
JndiContext ctx = new JndiContext();
NamingManager.setInitialContextFactoryBuilder(ctx);
return ctx;
}
else
{
return (JndiContext) NamingManager.getInitialContext(null);
}
}
catch (Exception e)
{
jqmlogger.error("Could not create JNDI context: " + e.getMessage());
NamingException ex = new NamingException("Could not initialize JNDI Context");
ex.setRootCause(e);
throw ex;
}
} | java |
private static ClassLoader getParentCl()
{
try
{
Method m = ClassLoader.class.getMethod("getPlatformClassLoader");
return (ClassLoader) m.invoke(null);
}
catch (NoSuchMethodException e)
{
// Java < 9, just use the bootstrap CL.
return null;
}
catch (Exception e)
{
throw new JqmInitError("Could not fetch Platform Class Loader", e);
}
} | java |
public static Properties loadProperties(String[] filesToLoad)
{
Properties p = new Properties();
InputStream fis = null;
for (String path : filesToLoad)
{
try
{
fis = Db.class.getClassLoader().getResourceAsStream(path);
if (fis != null)
{
p.load(fis);
jqmlogger.info("A jqm.properties file was found at {}", path);
}
}
catch (IOException e)
{
// We allow no configuration files, but not an unreadable configuration file.
throw new DatabaseException("META-INF/jqm.properties file is invalid", e);
}
finally
{
closeQuietly(fis);
}
}
// Overload the datasource name from environment variable if any (tests only).
String dbName = System.getenv("DB");
if (dbName != null)
{
p.put("com.enioka.jqm.jdbc.datasource", "jdbc/" + dbName);
}
// Done
return p;
} | java |
private void init(boolean upgrade)
{
initAdapter();
initQueries();
if (upgrade)
{
dbUpgrade();
}
// First contact with the DB is version checking (if no connection opened by pool).
// If DB is in wrong version or not available, just wait for it to be ready.
boolean versionValid = false;
while (!versionValid)
{
try
{
checkSchemaVersion();
versionValid = true;
}
catch (Exception e)
{
String msg = e.getLocalizedMessage();
if (e.getCause() != null)
{
msg += " - " + e.getCause().getLocalizedMessage();
}
jqmlogger.error("Database not ready: " + msg + ". Waiting for database...");
try
{
Thread.sleep(10000);
}
catch (Exception e2)
{
}
}
}
} | java |
private void dbUpgrade()
{
DbConn cnx = this.getConn();
Map<String, Object> rs = null;
int db_schema_version = 0;
try
{
rs = cnx.runSelectSingleRow("version_select_latest");
db_schema_version = (Integer) rs.get("VERSION_D1");
}
catch (Exception e)
{
// Database is to be created, so version 0 is OK.
}
cnx.rollback();
if (SCHEMA_VERSION > db_schema_version)
{
jqmlogger.warn("Database is being upgraded from version {} to version {}", db_schema_version, SCHEMA_VERSION);
// Upgrade scripts are named from_to.sql with 5 padding (e.g. 00000_00003.sql)
// We try to find the fastest path (e.g. a direct 00000_00005.sql for creating a version 5 schema from nothing)
// This is a simplistic and non-optimal algorithm as we try only a single path (no going back)
int loop_from = db_schema_version;
int to = db_schema_version;
List<String> toApply = new ArrayList<>();
toApply.addAll(adapter.preSchemaCreationScripts());
while (to != SCHEMA_VERSION)
{
boolean progressed = false;
for (int loop_to = SCHEMA_VERSION; loop_to > db_schema_version; loop_to--)
{
String migrationFileName = String.format("/sql/%05d_%05d.sql", loop_from, loop_to);
jqmlogger.debug("Trying migration script {}", migrationFileName);
if (Db.class.getResource(migrationFileName) != null)
{
toApply.add(migrationFileName);
to = loop_to;
loop_from = loop_to;
progressed = true;
break;
}
}
if (!progressed)
{
break;
}
}
if (to != SCHEMA_VERSION)
{
throw new DatabaseException(
"There is no migration path from version " + db_schema_version + " to version " + SCHEMA_VERSION);
}
for (String s : toApply)
{
jqmlogger.info("Running migration script {}", s);
ScriptRunner.run(cnx, s);
}
cnx.commit(); // Yes, really. For advanced DB!
cnx.close(); // HSQLDB does not refresh its schema without this.
cnx = getConn();
cnx.runUpdate("version_insert", SCHEMA_VERSION, SCHEMA_COMPATIBLE_VERSION);
cnx.commit();
jqmlogger.info("Database is now up to date");
}
else
{
jqmlogger.info("Database is already up to date");
}
cnx.close();
} | java |
private void initAdapter()
{
Connection tmp = null;
DatabaseMetaData meta = null;
try
{
tmp = _ds.getConnection();
meta = tmp.getMetaData();
product = meta.getDatabaseProductName().toLowerCase();
}
catch (SQLException e)
{
throw new DatabaseException("Cannot connect to the database", e);
}
finally
{
try
{
if (tmp != null)
{
tmp.close();
}
}
catch (SQLException e)
{
// Nothing to do.
}
}
DbAdapter newAdpt = null;
for (String s : ADAPTERS)
{
try
{
Class<? extends DbAdapter> clazz = Db.class.getClassLoader().loadClass(s).asSubclass(DbAdapter.class);
newAdpt = clazz.newInstance();
if (newAdpt.compatibleWith(meta))
{
adapter = newAdpt;
break;
}
}
catch (Exception e)
{
throw new DatabaseException("Issue when loading database adapter named: " + s, e);
}
}
if (adapter == null)
{
throw new DatabaseException("Unsupported database! There is no JQM database adapter compatible with product name " + product);
}
else
{
jqmlogger.info("Using database adapter {}", adapter.getClass().getCanonicalName());
}
} | java |
public DbConn getConn()
{
Connection cnx = null;
try
{
Thread.interrupted(); // this is VERY sad. Needed for Oracle driver which otherwise fails spectacularly.
cnx = _ds.getConnection();
if (cnx.getAutoCommit())
{
cnx.setAutoCommit(false);
cnx.rollback(); // To ensure no open transaction created by the pool before changing TX mode
}
if (cnx.getTransactionIsolation() != Connection.TRANSACTION_READ_COMMITTED)
{
cnx.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
}
return new DbConn(this, cnx);
}
catch (SQLException e)
{
DbHelper.closeQuietly(cnx); // May have been left open when the pool has given us a failed connection.
throw new DatabaseException(e);
}
} | java |
String getQuery(String key)
{
String res = this.adapter.getSqlText(key);
if (res == null)
{
throw new DatabaseException("Query " + key + " does not exist");
}
return res;
} | java |
public RuntimeParameter addParameter(String key, String value)
{
RuntimeParameter jp = new RuntimeParameter();
jp.setJi(this.getId());
jp.setKey(key);
jp.setValue(value);
return jp;
} | java |
void endOfRunDb()
{
DbConn cnx = null;
try
{
cnx = Helpers.getNewDbSession();
// Done: put inside history & remove instance from queue.
History.create(cnx, this.ji, this.resultStatus, endDate);
jqmlogger.trace("An History was just created for job instance " + this.ji.getId());
cnx.runUpdate("ji_delete_by_id", this.ji.getId());
cnx.commit();
}
catch (RuntimeException e)
{
endBlockDbFailureAnalysis(e);
}
finally
{
Helpers.closeQuietly(cnx);
}
} | java |
private Integer highlanderMode(JobDef jd, DbConn cnx)
{
if (!jd.isHighlander())
{
return null;
}
try
{
Integer existing = cnx.runSelectSingle("ji_select_existing_highlander", Integer.class, jd.getId());
return existing;
}
catch (NoResultException ex)
{
// Just continue, this means no existing waiting JI in queue.
}
// Now we need to actually synchronize through the database to avoid double posting
// TODO: use a dedicated table, not the JobDef one. Will avoid locking the configuration.
ResultSet rs = cnx.runSelect(true, "jd_select_by_id", jd.getId());
// Now we have a lock, just retry - some other client may have created a job instance recently.
try
{
Integer existing = cnx.runSelectSingle("ji_select_existing_highlander", Integer.class, jd.getId());
rs.close();
cnx.commit(); // Do not keep the lock!
return existing;
}
catch (NoResultException ex)
{
// Just continue, this means no existing waiting JI in queue. We keep the lock!
}
catch (SQLException e)
{
// Who cares.
jqmlogger.warn("Issue when closing a ResultSet. Transaction or session leak is possible.", e);
}
jqmlogger.trace("Highlander mode analysis is done: nor existing JO, must create a new one. Lock is hold.");
return null;
} | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.