idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
19,600
public static long numVectors ( INDArray arr ) { if ( arr . rank ( ) == 1 ) return 1 ; else if ( arr . rank ( ) == 2 ) return arr . size ( 0 ) ; else { int prod = 1 ; for ( int i = 0 ; i < arr . rank ( ) - 1 ; i ++ ) { prod *= arr . size ( i ) ; } return prod ; } }
Return the number of vectors for an array the number of vectors for an array
19,601
public static long sliceOffsetForTensor ( int index , INDArray arr , int [ ] tensorShape ) { long tensorLength = ArrayUtil . prodLong ( tensorShape ) ; long lengthPerSlice = NDArrayMath . lengthPerSlice ( arr ) ; long offset = index * tensorLength / lengthPerSlice ; return offset ; }
calculates the offset for a tensor
19,602
public static int mapIndexOntoTensor ( int index , INDArray arr , int ... rank ) { int ret = index * ArrayUtil . prod ( ArrayUtil . removeIndex ( arr . shape ( ) , rank ) ) ; return ret ; }
This maps an index of a vector on to a vector in the matrix that can be used for indexing in to a tensor
19,603
public HashMap < Integer , List < Integer > > getPair_PositionList_Table ( byte [ ] fingerprint ) { List < int [ ] > pairPositionList = getPairPositionList ( fingerprint ) ; HashMap < Integer , List < Integer > > pair_positionList_table = new HashMap < > ( ) ; for ( int [ ] pair_position : pairPositionList ) { if ( pair_positionList_table . containsKey ( pair_position [ 0 ] ) ) { pair_positionList_table . get ( pair_position [ 0 ] ) . add ( pair_position [ 1 ] ) ; } else { List < Integer > positionList = new LinkedList < > ( ) ; positionList . add ( pair_position [ 1 ] ) ; pair_positionList_table . put ( pair_position [ 0 ] , positionList ) ; } } return pair_positionList_table ; }
Get a pair - positionList table It s a hash map which the key is the hashed pair and the value is list of positions That means the table stores the positions which have the same hashed pair
19,604
public void fit ( INDArray x ) { this . data = x ; for ( int i = 0 ; i < numTrees ; i ++ ) { RPTree tree = new RPTree ( data . columns ( ) , maxSize , similarityFunction ) ; tree . buildTree ( x ) ; trees . add ( tree ) ; } }
Build the trees from the given dataset
19,605
public INDArray queryAll ( INDArray toQuery , int n ) { return RPUtils . queryAll ( toQuery , data , trees , n , similarityFunction ) ; }
Query results up to length n nearest neighbors
19,606
public static Tree toTree ( TreebankNode node , Pair < String , MultiDimensionalMap < Integer , Integer , String > > labels ) throws Exception { List < String > tokens = tokens ( node ) ; Tree ret = new Tree ( tokens ) ; ret . setValue ( node . getNodeValue ( ) ) ; ret . setLabel ( node . getNodeType ( ) ) ; ret . setType ( node . getNodeType ( ) ) ; ret . setBegin ( node . getBegin ( ) ) ; ret . setEnd ( node . getEnd ( ) ) ; ret . setParse ( TreebankNodeUtil . toTreebankString ( node ) ) ; if ( node . getNodeTags ( ) != null ) ret . setTags ( tags ( node ) ) ; else ret . setTags ( Arrays . asList ( node . getNodeType ( ) ) ) ; return ret ; }
Converts a treebank node to a tree
19,607
public static int getVIntSize ( long i ) { if ( i >= - 112 && i <= 127 ) { return 1 ; } if ( i < 0 ) { i ^= - 1L ; } int dataBits = Long . SIZE - Long . numberOfLeadingZeros ( i ) ; return ( dataBits + 7 ) / 8 + 1 ; }
Get the encoded length if an integer is stored in a variable - length format
19,608
public static < T extends Enum < T > > T readEnum ( DataInput in , Class < T > enumType ) throws IOException { return T . valueOf ( enumType , Text . readString ( in ) ) ; }
Read an Enum value from DataInput Enums are read and written using String values .
19,609
public static void writeEnum ( DataOutput out , Enum < ? > enumVal ) throws IOException { Text . writeString ( out , enumVal . name ( ) ) ; }
writes String value of enum to DataOutput .
19,610
public static byte [ ] toByteArray ( Writable ... writables ) { final DataOutputBuffer out = new DataOutputBuffer ( ) ; try { for ( Writable w : writables ) { w . write ( out ) ; } out . close ( ) ; } catch ( IOException e ) { throw new RuntimeException ( "Fail to convert writables to a byte array" , e ) ; } return out . getData ( ) ; }
Convert writables to a byte array
19,611
public void broadcast ( INDArray array ) { if ( array == null ) return ; Nd4j . getExecutioner ( ) . commit ( ) ; val config = OpProfiler . getInstance ( ) . getConfig ( ) ; val locality = config . isCheckLocality ( ) ; if ( locality ) config . setCheckLocality ( false ) ; int numDevices = Nd4j . getAffinityManager ( ) . getNumberOfDevices ( ) ; for ( int i = 0 ; i < numDevices ; i ++ ) { if ( Nd4j . getAffinityManager ( ) . getDeviceForCurrentThread ( ) == i ) { set ( i , array ) ; } else { set ( i , Nd4j . getAffinityManager ( ) . replicateToDevice ( i , array ) ) ; } } config . setCheckLocality ( locality ) ; }
This method duplicates array and stores it to all devices
19,612
public void synchronizeToHost ( AllocationPoint point ) { if ( ! point . isActualOnHostSide ( ) ) { CudaContext context = ( CudaContext ) allocator . getDeviceContext ( ) . getContext ( ) ; if ( ! point . isConstant ( ) ) waitTillFinished ( point ) ; if ( point . getAllocationStatus ( ) == AllocationStatus . DEVICE && ! point . isActualOnHostSide ( ) ) { long perfD = PerformanceTracker . getInstance ( ) . helperStartTransaction ( ) ; if ( nativeOps . memcpyAsync ( point . getHostPointer ( ) , point . getDevicePointer ( ) , AllocationUtils . getRequiredMemory ( point . getShape ( ) ) , CudaConstants . cudaMemcpyDeviceToHost , context . getSpecialStream ( ) ) == 0 ) throw new IllegalStateException ( "MemcpyAsync failed: " + point . getShape ( ) ) ; commitTransfer ( context . getSpecialStream ( ) ) ; PerformanceTracker . getInstance ( ) . helperRegisterTransaction ( point . getDeviceId ( ) , perfD , point . getNumberOfBytes ( ) , MemcpyDirection . DEVICE_TO_HOST ) ; } point . tickHostRead ( ) ; } }
This method makes sure HOST memory contains latest data from GPU
19,613
public MixtureDensityComponents extractComponents ( INDArray output ) { long outputSize = output . size ( 1 ) ; if ( outputSize != ( mLabelWidth + 2 ) * mMixtures ) { throw new IllegalArgumentException ( "Network output size " + outputSize + " must be (labels+2)*mixtures where labels = " + mLabelWidth + " and mixtures = " + mMixtures ) ; } MixtureDensityComponents mdc = new MixtureDensityComponents ( ) ; mdc . alpha = output . get ( NDArrayIndex . all ( ) , NDArrayIndex . interval ( 0 , mMixtures ) ) ; mdc . sigma = output . get ( NDArrayIndex . all ( ) , NDArrayIndex . interval ( mMixtures , 2 * mMixtures ) ) ; mdc . mu = output . get ( NDArrayIndex . all ( ) , NDArrayIndex . interval ( 2 * mMixtures , ( mLabelWidth + 2 ) * mMixtures ) ) . reshape ( output . size ( 0 ) , mMixtures , mLabelWidth ) ; mdc . alpha = Nd4j . getExecutioner ( ) . exec ( new OldSoftMax ( mdc . alpha ) ) ; mdc . sigma = Transforms . exp ( mdc . sigma ) ; return mdc ; }
through Nd4j operations in order to increase performance .
19,614
public INDArray computeScoreArray ( INDArray labels , INDArray preOutput , IActivation activationFn , INDArray mask ) { labels = labels . castTo ( preOutput . dataType ( ) ) ; INDArray output = activationFn . getActivation ( preOutput . dup ( ) , false ) ; MixtureDensityComponents mdc = extractComponents ( output ) ; INDArray scoreArr = negativeLogLikelihood ( labels , mdc . alpha , mdc . mu , mdc . sigma ) ; if ( mask != null ) { LossUtil . applyMask ( scoreArr , mask ) ; } return scoreArr ; }
This method returns the score for each of the given outputs against the given set of labels . For a mixture density network this is done by extracting the alpha mu and sigma components of each gaussian and computing the negative log likelihood that the labels fall within a linear combination of these gaussian distributions . The smaller the negative log likelihood the higher the probability that the given labels actually would fall within the distribution . Therefore by minimizing the negative log likelihood we get to a position of highest probability that the gaussian mixture explains the phenomenon .
19,615
public static List < Pair < Double , Integer > > sortCandidates ( INDArray x , INDArray X , List < Integer > candidates , String similarityFunction ) { int prevIdx = - 1 ; List < Pair < Double , Integer > > ret = new ArrayList < > ( ) ; for ( int i = 0 ; i < candidates . size ( ) ; i ++ ) { if ( candidates . get ( i ) != prevIdx ) { ret . add ( Pair . of ( computeDistance ( similarityFunction , X . slice ( candidates . get ( i ) ) , x ) , candidates . get ( i ) ) ) ; } prevIdx = i ; } Collections . sort ( ret , new Comparator < Pair < Double , Integer > > ( ) { public int compare ( Pair < Double , Integer > doubleIntegerPair , Pair < Double , Integer > t1 ) { return Doubles . compare ( doubleIntegerPair . getFirst ( ) , t1 . getFirst ( ) ) ; } } ) ; return ret ; }
Get the sorted distances given the query vector input data given the list of possible search candidates
19,616
public static RPNode query ( RPNode from , RPHyperPlanes planes , INDArray x , String similarityFunction ) { if ( from . getLeft ( ) == null && from . getRight ( ) == null ) { return from ; } INDArray hyperPlane = planes . getHyperPlaneAt ( from . getDepth ( ) ) ; double dist = computeDistance ( similarityFunction , x , hyperPlane ) ; if ( dist <= from . getMedian ( ) ) { return query ( from . getLeft ( ) , planes , x , similarityFunction ) ; } else { return query ( from . getRight ( ) , planes , x , similarityFunction ) ; } }
Query the tree starting from the given node using the given hyper plane and similarity function
19,617
public static void buildTree ( RPTree tree , RPNode from , RPHyperPlanes planes , INDArray X , int maxSize , int depth , String similarityFunction ) { if ( from . getIndices ( ) . size ( ) <= maxSize ) { slimNode ( from ) ; return ; } List < Double > distances = new ArrayList < > ( ) ; RPNode left = new RPNode ( tree , depth + 1 ) ; RPNode right = new RPNode ( tree , depth + 1 ) ; if ( planes . getWholeHyperPlane ( ) == null || depth >= planes . getWholeHyperPlane ( ) . rows ( ) ) { planes . addRandomHyperPlane ( ) ; } INDArray hyperPlane = planes . getHyperPlaneAt ( depth ) ; for ( int i = 0 ; i < from . getIndices ( ) . size ( ) ; i ++ ) { double cosineSim = computeDistance ( similarityFunction , hyperPlane , X . slice ( from . getIndices ( ) . get ( i ) ) ) ; distances . add ( cosineSim ) ; } Collections . sort ( distances ) ; from . setMedian ( distances . get ( distances . size ( ) / 2 ) ) ; for ( int i = 0 ; i < from . getIndices ( ) . size ( ) ; i ++ ) { double cosineSim = computeDistance ( similarityFunction , hyperPlane , X . slice ( from . getIndices ( ) . get ( i ) ) ) ; if ( cosineSim <= from . getMedian ( ) ) { left . getIndices ( ) . add ( from . getIndices ( ) . get ( i ) ) ; } else { right . getIndices ( ) . add ( from . getIndices ( ) . get ( i ) ) ; } } if ( left . getIndices ( ) . isEmpty ( ) || right . getIndices ( ) . isEmpty ( ) ) { slimNode ( from ) ; return ; } from . setLeft ( left ) ; from . setRight ( right ) ; slimNode ( from ) ; buildTree ( tree , left , planes , X , maxSize , depth + 1 , similarityFunction ) ; buildTree ( tree , right , planes , X , maxSize , depth + 1 , similarityFunction ) ; }
Initialize the tree given the input parameters
19,618
public static void slimNode ( RPNode node ) { if ( node . getRight ( ) != null && node . getLeft ( ) != null ) { node . getIndices ( ) . clear ( ) ; } }
Prune indices from the given node when it s a leaf
19,619
private < T extends TokenBase > List < T > createTokenList ( int offset , String text ) { ArrayList < T > result = new ArrayList < > ( ) ; ViterbiLattice lattice = viterbiBuilder . build ( text ) ; List < ViterbiNode > bestPath = viterbiSearcher . search ( lattice ) ; for ( ViterbiNode node : bestPath ) { int wordId = node . getWordId ( ) ; if ( node . getType ( ) == ViterbiNode . Type . KNOWN && wordId == - 1 ) { continue ; } @ SuppressWarnings ( "unchecked" ) T token = ( T ) tokenFactory . createToken ( wordId , node . getSurface ( ) , node . getType ( ) , offset + node . getStartIndex ( ) , dictionaryMap . get ( node . getType ( ) ) ) ; result . add ( token ) ; } return result ; }
Tokenize input sentence .
19,620
public void destroyWorkspace ( MemoryWorkspace workspace ) { if ( workspace == null || workspace instanceof DummyWorkspace ) return ; backingMap . get ( ) . remove ( workspace . getId ( ) ) ; }
This method destroys given workspace
19,621
public void destroyWorkspace ( ) { ensureThreadExistense ( ) ; MemoryWorkspace workspace = backingMap . get ( ) . get ( MemoryWorkspace . DEFAULT_ID ) ; backingMap . get ( ) . remove ( MemoryWorkspace . DEFAULT_ID ) ; }
This method destroy default workspace if any
19,622
public void destroyAllWorkspacesForCurrentThread ( ) { ensureThreadExistense ( ) ; List < MemoryWorkspace > workspaces = new ArrayList < > ( ) ; workspaces . addAll ( backingMap . get ( ) . values ( ) ) ; for ( MemoryWorkspace workspace : workspaces ) { destroyWorkspace ( workspace ) ; } Nd4j . getMemoryManager ( ) . invokeGc ( ) ; }
This method destroys all workspaces allocated in current thread
19,623
public static void scaleByMax ( INDArray toScale ) { INDArray scale = toScale . max ( 1 ) ; for ( int i = 0 ; i < toScale . rows ( ) ; i ++ ) { double scaleBy = scale . getDouble ( i ) ; toScale . putRow ( i , toScale . getRow ( i ) . divi ( scaleBy ) ) ; } }
Divides each row by its max
19,624
public String decodePredictions ( INDArray predictions ) { Preconditions . checkState ( predictions . size ( 1 ) == predictionLabels . size ( ) , "Invalid input array:" + " expected array with size(1) equal to numLabels (%s), got array with shape %s" , predictionLabels . size ( ) , predictions . shape ( ) ) ; String predictionDescription = "" ; int [ ] top5 = new int [ 5 ] ; float [ ] top5Prob = new float [ 5 ] ; int i = 0 ; for ( int batch = 0 ; batch < predictions . size ( 0 ) ; batch ++ ) { predictionDescription += "Predictions for batch " ; if ( predictions . size ( 0 ) > 1 ) { predictionDescription += String . valueOf ( batch ) ; } predictionDescription += " :" ; INDArray currentBatch = predictions . getRow ( batch ) . dup ( ) ; while ( i < 5 ) { top5 [ i ] = Nd4j . argMax ( currentBatch , 1 ) . getInt ( 0 ) ; top5Prob [ i ] = currentBatch . getFloat ( batch , top5 [ i ] ) ; currentBatch . putScalar ( 0 , top5 [ i ] , 0 ) ; predictionDescription += "\n\t" + String . format ( "%3f" , top5Prob [ i ] * 100 ) + "%, " + predictionLabels . get ( top5 [ i ] ) ; i ++ ; } } return predictionDescription ; }
Given predictions from the trained model this method will return a string listing the top five matches and the respective probabilities
19,625
public void load ( File ... files ) throws IOException { setFeatureStats ( DistributionStats . load ( files [ 0 ] , files [ 1 ] ) ) ; if ( isFitLabel ( ) ) { setLabelStats ( DistributionStats . load ( files [ 2 ] , files [ 3 ] ) ) ; } }
Load the means and standard deviations from the file system
19,626
public static File getDirectory ( ResourceType resourceType , String resourceName ) { File f = new File ( baseDirectory , resourceType . resourceName ( ) ) ; f = new File ( f , resourceName ) ; f . mkdirs ( ) ; return f ; }
Get the storage location for the specified resource type and resource name
19,627
public void finishTraining ( long originatorId , long taskId ) { if ( params != null && stepFunction != null ) { if ( hasSomething . get ( ) ) { stepFunction . step ( params , updates ) ; updates . assign ( 0.0 ) ; } } }
This method is used on Master only applies buffered updates to params
19,628
private Mat streamToMat ( InputStream is ) throws IOException { if ( buffer == null ) { buffer = IOUtils . toByteArray ( is ) ; bufferMat = new Mat ( buffer ) ; return bufferMat ; } else { int numReadTotal = is . read ( buffer ) ; if ( numReadTotal < buffer . length ) { bufferMat . data ( ) . put ( buffer , 0 , numReadTotal ) ; bufferMat . cols ( numReadTotal ) ; return bufferMat ; } int numReadCurrent = numReadTotal ; while ( numReadCurrent != - 1 ) { byte [ ] oldBuffer = buffer ; if ( oldBuffer . length == Integer . MAX_VALUE ) { throw new IllegalStateException ( "Cannot read more than Integer.MAX_VALUE bytes" ) ; } long increase = Math . max ( buffer . length , MIN_BUFFER_STEP_SIZE ) ; int newBufferLength = ( int ) Math . min ( Integer . MAX_VALUE , buffer . length + increase ) ; buffer = new byte [ newBufferLength ] ; System . arraycopy ( oldBuffer , 0 , buffer , 0 , oldBuffer . length ) ; numReadCurrent = is . read ( buffer , oldBuffer . length , buffer . length - oldBuffer . length ) ; if ( numReadCurrent > 0 ) { numReadTotal += numReadCurrent ; } } bufferMat = new Mat ( buffer ) ; return bufferMat ; } }
Read the stream to the buffer and return the number of bytes read
19,629
public ImageWritable asWritable ( File f ) throws IOException { try ( BufferedInputStream bis = new BufferedInputStream ( new FileInputStream ( f ) ) ) { Mat mat = streamToMat ( bis ) ; Mat image = imdecode ( mat , IMREAD_ANYDEPTH | IMREAD_ANYCOLOR ) ; if ( image == null || image . empty ( ) ) { PIX pix = pixReadMem ( mat . data ( ) , mat . cols ( ) ) ; if ( pix == null ) { throw new IOException ( "Could not decode image from input stream" ) ; } image = convert ( pix ) ; pixDestroy ( pix ) ; } ImageWritable writable = new ImageWritable ( converter . convert ( image ) ) ; return writable ; } }
Convert a file to a INDArray
19,630
public INDArray asMatrix ( ImageWritable writable ) throws IOException { Mat image = converter . convert ( writable . getFrame ( ) ) ; return asMatrix ( image ) ; }
Convert ImageWritable to INDArray
19,631
public Frame asFrame ( INDArray array , int dataType ) { return converter . convert ( asMat ( array , OpenCVFrameConverter . getMatDepth ( dataType ) ) ) ; }
Converts an INDArray to a JavaCV Frame . Only intended for images with rank 3 .
19,632
private INDArray asMatrix ( BytePointer bytes , long length ) throws IOException { PIXA pixa ; pixa = pixaReadMemMultipageTiff ( bytes , length ) ; INDArray data ; INDArray currentD ; INDArrayIndex [ ] index = null ; switch ( this . multiPageMode ) { case MINIBATCH : data = Nd4j . create ( pixa . n ( ) , 1 , pixa . pix ( 0 ) . h ( ) , pixa . pix ( 0 ) . w ( ) ) ; break ; case CHANNELS : data = Nd4j . create ( 1 , pixa . n ( ) , pixa . pix ( 0 ) . h ( ) , pixa . pix ( 0 ) . w ( ) ) ; break ; case FIRST : data = Nd4j . create ( 1 , 1 , pixa . pix ( 0 ) . h ( ) , pixa . pix ( 0 ) . w ( ) ) ; PIX pix = pixa . pix ( 0 ) ; currentD = asMatrix ( convert ( pix ) ) ; pixDestroy ( pix ) ; index = new INDArrayIndex [ ] { NDArrayIndex . point ( 0 ) , NDArrayIndex . point ( 0 ) , NDArrayIndex . all ( ) , NDArrayIndex . all ( ) } ; data . put ( index , currentD . get ( NDArrayIndex . all ( ) , NDArrayIndex . all ( ) , NDArrayIndex . all ( ) ) ) ; return data ; default : throw new UnsupportedOperationException ( "Unsupported MultiPageMode: " + multiPageMode ) ; } for ( int i = 0 ; i < pixa . n ( ) ; i ++ ) { PIX pix = pixa . pix ( i ) ; currentD = asMatrix ( convert ( pix ) ) ; pixDestroy ( pix ) ; switch ( this . multiPageMode ) { case MINIBATCH : index = new INDArrayIndex [ ] { NDArrayIndex . point ( i ) , NDArrayIndex . all ( ) , NDArrayIndex . all ( ) , NDArrayIndex . all ( ) } ; break ; case CHANNELS : index = new INDArrayIndex [ ] { NDArrayIndex . all ( ) , NDArrayIndex . point ( i ) , NDArrayIndex . all ( ) , NDArrayIndex . all ( ) } ; break ; default : throw new UnsupportedOperationException ( "Unsupported MultiPageMode: " + multiPageMode ) ; } data . put ( index , currentD . get ( NDArrayIndex . all ( ) , NDArrayIndex . all ( ) , NDArrayIndex . all ( ) ) ) ; } return data ; }
Read multipage tiff and load into INDArray
19,633
public void resetLayerDefaultConfig ( ) { this . setIUpdater ( null ) ; this . setWeightInitFn ( null ) ; this . setBiasInit ( Double . NaN ) ; this . setGainInit ( Double . NaN ) ; this . regularization = null ; this . regularizationBias = null ; this . setGradientNormalization ( GradientNormalization . None ) ; this . setGradientNormalizationThreshold ( 1.0 ) ; this . iUpdater = null ; this . biasUpdater = null ; }
Reset the learning related configs of the layer to default . When instantiated with a global neural network configuration the parameters specified in the neural network configuration will be used . For internal use with the transfer learning API . Users should not have to call this method directly .
19,634
public static void exec ( String code ) { code = getFunctionalCode ( "__f_" + Thread . currentThread ( ) . getId ( ) , code ) ; acquireGIL ( ) ; log . info ( "CPython: PyRun_SimpleStringFlag()" ) ; log . info ( code ) ; int result = PyRun_SimpleStringFlags ( code , null ) ; if ( result != 0 ) { PyErr_Print ( ) ; throw new RuntimeException ( "exec failed" ) ; } log . info ( "Exec done" ) ; releaseGIL ( ) ; }
Executes python code . Also manages python thread state .
19,635
public static void assertSameLength ( INDArray x , INDArray z ) { val lengthX = x . length ( ) ; val lengthZ = z . length ( ) ; if ( lengthX != lengthZ && lengthX != 1 && lengthZ != 1 ) throw new IllegalStateException ( "Mis matched lengths: [" + x . length ( ) + "] != [" + z . length ( ) + "] - " + "Array 1 shape: " + Arrays . toString ( x . shape ( ) ) + ", array 2 shape: " + Arrays . toString ( z . shape ( ) ) ) ; }
Asserts both arrays be the same length
19,636
public TF_Session loadSavedModel ( SavedModelConfig savedModelConfig , TF_SessionOptions options , TF_Buffer runOptions , TF_Graph graph , Map < String , String > inputsMap , Map < String , String > outputsMap , TF_Status status ) { TF_Buffer metaGraph = TF_Buffer . newBuffer ( ) ; TF_Session session = TF_LoadSessionFromSavedModel ( options , runOptions , new BytePointer ( savedModelConfig . getSavedModelPath ( ) ) , new BytePointer ( savedModelConfig . getModelTag ( ) ) , 1 , graph , metaGraph , status ) ; if ( TF_GetCode ( status ) != TF_OK ) { throw new IllegalStateException ( "ERROR: Unable to import model " + TF_Message ( status ) . getString ( ) ) ; } MetaGraphDef metaGraphDef ; try { metaGraphDef = MetaGraphDef . parseFrom ( metaGraph . data ( ) . capacity ( metaGraph . length ( ) ) . asByteBuffer ( ) ) ; } catch ( InvalidProtocolBufferException ex ) { throw new IllegalStateException ( "ERROR: Unable to import model " + ex ) ; } Map < String , SignatureDef > signatureDefMap = metaGraphDef . getSignatureDefMap ( ) ; SignatureDef signatureDef = signatureDefMap . get ( savedModelConfig . getSignatureKey ( ) ) ; Map < String , TensorInfo > inputs = signatureDef . getInputsMap ( ) ; for ( Map . Entry < String , TensorInfo > e : inputs . entrySet ( ) ) { inputsMap . put ( e . getKey ( ) , e . getValue ( ) . getName ( ) ) ; } Map < String , TensorInfo > outputs = signatureDef . getOutputsMap ( ) ; for ( Map . Entry < String , TensorInfo > e : outputs . entrySet ( ) ) { outputsMap . put ( e . getKey ( ) , e . getValue ( ) . getName ( ) ) ; } return session ; }
Load a session based on the saved model
19,637
public void leverageTo ( String id ) { if ( fwdPassOutput != null ) fwdPassOutput = fwdPassOutput . leverageTo ( id ) ; if ( fwdPassOutputAsArrays != null ) for ( int i = 0 ; i < fwdPassOutputAsArrays . length ; i ++ ) fwdPassOutputAsArrays [ i ] = fwdPassOutputAsArrays [ i ] . leverageTo ( id ) ; if ( memCellState != null ) for ( int i = 0 ; i < memCellState . length ; i ++ ) memCellState [ i ] = memCellState [ i ] . leverageTo ( id ) ; if ( memCellActivations != null ) for ( int i = 0 ; i < memCellActivations . length ; i ++ ) memCellActivations [ i ] = memCellActivations [ i ] . leverageTo ( id ) ; if ( fwdPassOutputAsArrays != null ) for ( int i = 0 ; i < fwdPassOutputAsArrays . length ; i ++ ) fwdPassOutputAsArrays [ i ] = fwdPassOutputAsArrays [ i ] . leverageTo ( id ) ; if ( iz != null ) for ( int i = 0 ; i < iz . length ; i ++ ) iz [ i ] = iz [ i ] . leverageTo ( id ) ; if ( ia != null ) for ( int i = 0 ; i < ia . length ; i ++ ) ia [ i ] = ia [ i ] . leverageTo ( id ) ; if ( fa != null ) for ( int i = 0 ; i < fa . length ; i ++ ) fa [ i ] = fa [ i ] . leverageTo ( id ) ; if ( oa != null ) for ( int i = 0 ; i < oa . length ; i ++ ) oa [ i ] = oa [ i ] . leverageTo ( id ) ; if ( ga != null ) for ( int i = 0 ; i < ga . length ; i ++ ) ga [ i ] = ga [ i ] . leverageTo ( id ) ; if ( fz != null ) for ( int i = 0 ; i < fz . length ; i ++ ) fz [ i ] = fz [ i ] . leverageTo ( id ) ; if ( oz != null ) for ( int i = 0 ; i < oz . length ; i ++ ) oz [ i ] = oz [ i ] . leverageTo ( id ) ; if ( gz != null ) for ( int i = 0 ; i < gz . length ; i ++ ) gz [ i ] = gz [ i ] . leverageTo ( id ) ; if ( lastAct != null ) lastAct = lastAct . leverageTo ( id ) ; if ( lastMemCell != null ) lastMemCell = lastMemCell . leverageTo ( id ) ; }
This method is OPTIONAL and written mostly for future use
19,638
public List < ViterbiNode > search ( ViterbiLattice lattice ) { ViterbiNode [ ] [ ] endIndexArr = calculatePathCosts ( lattice ) ; LinkedList < ViterbiNode > result = backtrackBestPath ( endIndexArr [ 0 ] [ 0 ] ) ; return result ; }
Find best path from input lattice .
19,639
protected VocabCache < ShallowSequenceElement > buildShallowVocabCache ( Counter < Long > counter ) { VocabCache < ShallowSequenceElement > vocabCache = new AbstractCache < > ( ) ; for ( Long id : counter . keySet ( ) ) { ShallowSequenceElement shallowElement = new ShallowSequenceElement ( counter . getCount ( id ) , id ) ; vocabCache . addToken ( shallowElement ) ; } Huffman huffman = new Huffman ( vocabCache . vocabWords ( ) ) ; huffman . build ( ) ; huffman . applyIndexes ( vocabCache ) ; return vocabCache ; }
This method builds shadow vocabulary and huffman tree
19,640
public INDArray storeAndAllocateNewArray ( ) { Preconditions . checkState ( variableType == VariableType . VARIABLE , "Unable to allocate and store array for variable of type %s: only" + " VARIABLE type variables can be initialized using this method" , variableType ) ; if ( ! sameDiff . arrayAlreadyExistsForVarName ( varName ) ) { long [ ] shape = getShape ( ) ; INDArray arr = getWeightInitScheme ( ) . create ( dataType ( ) , shape ) ; sameDiff . associateArrayWithVariable ( arr , this ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Generated and stored new array for variable \"{}\": shape {}" , getVarName ( ) , Arrays . toString ( arr . shape ( ) ) ) ; } return arr ; } INDArray ret = getArr ( ) ; return ret ; }
Allocate and return a new array based on the vertex id and weight initialization .
19,641
public long [ ] getShape ( ) { if ( variableType == VariableType . PLACEHOLDER && getArr ( ) == null ) { if ( shape != null ) return shape ; else return new long [ 0 ] ; } long [ ] initialShape = sameDiff . getShapeForVarName ( getVarName ( ) ) ; if ( initialShape == null ) { val arr = getArr ( ) ; if ( arr != null ) return arr . shape ( ) ; } return initialShape ; }
Returns the shape of this variable
19,642
public NearestNeighborsResults knnNew ( int k , INDArray arr ) throws Exception { Base64NDArrayBody base64NDArrayBody = Base64NDArrayBody . builder ( ) . k ( k ) . ndarray ( Nd4jBase64 . base64String ( arr ) ) . build ( ) ; HttpRequestWithBody req = Unirest . post ( url + "/knnnew" ) ; req . header ( "accept" , "application/json" ) . header ( "Content-Type" , "application/json" ) . body ( base64NDArrayBody ) ; addAuthHeader ( req ) ; NearestNeighborsResults ret = req . asObject ( NearestNeighborsResults . class ) . getBody ( ) ; return ret ; }
Run a k nearest neighbors search on a NEW data point
19,643
protected HttpRequest addAuthHeader ( HttpRequest request ) { if ( authToken != null ) { request . header ( "authorization" , "Bearer " + authToken ) ; } return request ; }
Add the specified authentication header to the specified HttpRequest
19,644
public SDVariable localResponseNormalization ( SDVariable input , LocalResponseNormalizationConfig lrnConfig ) { LocalResponseNormalization lrn = LocalResponseNormalization . builder ( ) . inputFunctions ( new SDVariable [ ] { input } ) . sameDiff ( sameDiff ( ) ) . config ( lrnConfig ) . build ( ) ; return lrn . outputVariable ( ) ; }
Local response normalization operation .
19,645
public SDVariable conv1d ( SDVariable input , SDVariable weights , Conv1DConfig conv1DConfig ) { Conv1D conv1D = Conv1D . builder ( ) . inputFunctions ( new SDVariable [ ] { input , weights } ) . sameDiff ( sameDiff ( ) ) . config ( conv1DConfig ) . build ( ) ; return conv1D . outputVariable ( ) ; }
Conv1d operation .
19,646
public SDVariable avgPooling2d ( SDVariable input , Pooling2DConfig pooling2DConfig ) { AvgPooling2D avgPooling2D = AvgPooling2D . builder ( ) . input ( input ) . sameDiff ( sameDiff ( ) ) . config ( pooling2DConfig ) . build ( ) ; return avgPooling2D . outputVariable ( ) ; }
Average pooling 2d operation .
19,647
public SDVariable maxPooling2d ( SDVariable input , Pooling2DConfig pooling2DConfig ) { MaxPooling2D maxPooling2D = MaxPooling2D . builder ( ) . input ( input ) . sameDiff ( sameDiff ( ) ) . config ( pooling2DConfig ) . build ( ) ; return maxPooling2D . outputVariable ( ) ; }
Max pooling 2d operation .
19,648
public SDVariable avgPooling3d ( SDVariable input , Pooling3DConfig pooling3DConfig ) { pooling3DConfig . setType ( Pooling3D . Pooling3DType . AVG ) ; return pooling3d ( input , pooling3DConfig ) ; }
Avg pooling 3d operation .
19,649
public SDVariable maxPooling3d ( SDVariable input , Pooling3DConfig pooling3DConfig ) { pooling3DConfig . setType ( Pooling3D . Pooling3DType . MAX ) ; return pooling3d ( input , pooling3DConfig ) ; }
Max pooling 3d operation .
19,650
public SDVariable conv3d ( SDVariable [ ] inputs , Conv3DConfig conv3DConfig ) { Conv3D conv3D = Conv3D . builder ( ) . inputFunctions ( inputs ) . conv3DConfig ( conv3DConfig ) . sameDiff ( sameDiff ( ) ) . build ( ) ; val outputVars = conv3D . outputVariables ( ) ; return outputVars [ 0 ] ; }
Conv3d operation .
19,651
public SDVariable matchCondition ( SDVariable in , Condition condition ) { return new MatchConditionTransform ( sameDiff ( ) , in , condition ) . outputVariable ( ) ; }
Returns a boolean mask of equal shape to the input where the condition is satisfied
19,652
public static SingleCSVRecord fromRow ( DataSet row ) { if ( ! row . getFeatures ( ) . isVector ( ) && ! row . getFeatures ( ) . isScalar ( ) ) throw new IllegalArgumentException ( "Passed in dataset must represent a scalar or vector" ) ; if ( ! row . getLabels ( ) . isVector ( ) && ! row . getLabels ( ) . isScalar ( ) ) throw new IllegalArgumentException ( "Passed in dataset labels must be a scalar or vector" ) ; SingleCSVRecord record ; int idx = 0 ; if ( row . getLabels ( ) . sumNumber ( ) . doubleValue ( ) == 1.0 ) { String [ ] values = new String [ row . getFeatures ( ) . columns ( ) + 1 ] ; for ( int i = 0 ; i < row . getFeatures ( ) . length ( ) ; i ++ ) { values [ idx ++ ] = String . valueOf ( row . getFeatures ( ) . getDouble ( i ) ) ; } int maxIdx = 0 ; for ( int i = 0 ; i < row . getLabels ( ) . length ( ) ; i ++ ) { if ( row . getLabels ( ) . getDouble ( maxIdx ) < row . getLabels ( ) . getDouble ( i ) ) { maxIdx = i ; } } values [ idx ++ ] = String . valueOf ( maxIdx ) ; record = new SingleCSVRecord ( values ) ; } else { String [ ] values = new String [ row . getFeatures ( ) . columns ( ) + row . getLabels ( ) . columns ( ) ] ; for ( int i = 0 ; i < row . getFeatures ( ) . length ( ) ; i ++ ) { values [ idx ++ ] = String . valueOf ( row . getFeatures ( ) . getDouble ( i ) ) ; } for ( int i = 0 ; i < row . getLabels ( ) . length ( ) ; i ++ ) { values [ idx ++ ] = String . valueOf ( row . getLabels ( ) . getDouble ( i ) ) ; } record = new SingleCSVRecord ( values ) ; } return record ; }
Instantiate a csv record from a vector given either an input dataset and a one hot matrix the index will be appended to the end of the record or for regression it will append all values in the labels
19,653
private URL getUrl ( ) { ClassLoader loader = null ; try { loader = Thread . currentThread ( ) . getContextClassLoader ( ) ; } catch ( Exception e ) { } if ( loader == null ) { loader = ClassPathResource . class . getClassLoader ( ) ; } URL url = loader . getResource ( this . resourceName ) ; if ( url == null ) { if ( this . resourceName . startsWith ( "/" ) ) { url = loader . getResource ( this . resourceName . replaceFirst ( "[\\\\/]" , "" ) ) ; if ( url != null ) return url ; } else { url = loader . getResource ( "/" + this . resourceName ) ; if ( url != null ) return url ; } throw new IllegalStateException ( "Resource '" + this . resourceName + "' cannot be found." ) ; } return url ; }
Returns URL of the requested resource
19,654
public File getFile ( ) throws FileNotFoundException { URL url = this . getUrl ( ) ; if ( isJarURL ( url ) ) { try { url = extractActualUrl ( url ) ; File file = File . createTempFile ( "canova_temp" , "file" ) ; file . deleteOnExit ( ) ; ZipFile zipFile = new ZipFile ( url . getFile ( ) ) ; ZipEntry entry = zipFile . getEntry ( this . resourceName ) ; if ( entry == null ) { if ( this . resourceName . startsWith ( "/" ) ) { entry = zipFile . getEntry ( this . resourceName . replaceFirst ( "/" , "" ) ) ; if ( entry == null ) { throw new FileNotFoundException ( "Resource " + this . resourceName + " not found" ) ; } } else throw new FileNotFoundException ( "Resource " + this . resourceName + " not found" ) ; } long size = entry . getSize ( ) ; InputStream stream = zipFile . getInputStream ( entry ) ; FileOutputStream outputStream = new FileOutputStream ( file ) ; byte [ ] array = new byte [ 1024 ] ; int rd = 0 ; long bytesRead = 0 ; do { rd = stream . read ( array ) ; outputStream . write ( array , 0 , rd ) ; bytesRead += rd ; } while ( bytesRead < size ) ; outputStream . flush ( ) ; outputStream . close ( ) ; stream . close ( ) ; zipFile . close ( ) ; return file ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } else { try { URI uri = new URI ( url . toString ( ) . replaceAll ( " " , "%20" ) ) ; return new File ( uri . getSchemeSpecificPart ( ) ) ; } catch ( URISyntaxException e ) { return new File ( url . getFile ( ) ) ; } } }
Returns requested ClassPathResource as File object
19,655
public void putFunctionForId ( String id , DifferentialFunction function ) { if ( ops . containsKey ( id ) && ops . get ( id ) . getOp ( ) == null ) { throw new ND4JIllegalStateException ( "Function by id already exists!" ) ; } else if ( function instanceof SDVariable ) { throw new ND4JIllegalStateException ( "Function must not be a variable!" ) ; } if ( ops . containsKey ( id ) ) { } else { ops . put ( id , SameDiffOp . builder ( ) . name ( id ) . op ( function ) . build ( ) ) ; } }
Put the function for the given id
19,656
public void putShapeForVarName ( String varName , long [ ] shape ) { if ( shape == null ) { throw new ND4JIllegalStateException ( "Shape must not be null!" ) ; } if ( variableNameToShape . containsKey ( varName ) ) { throw new ND4JIllegalStateException ( "Shape for " + varName + " already exists!" ) ; } variableNameToShape . put ( varName , shape ) ; }
Associate a vertex id with the given shape .
19,657
public void putOrUpdateShapeForVarName ( String varName , long [ ] shape , boolean clearArrayOnShapeMismatch ) { Preconditions . checkNotNull ( shape , "Cannot put null shape for variable: %s" , varName ) ; if ( variableNameToShape . containsKey ( varName ) ) { } else { putShapeForVarName ( varName , shape ) ; } }
Put or update the shape for the given variable name . Optionally supports clearing the specified variable s INDArray if it s shape does not match the new shape
19,658
public Map < String , SDVariable > variableMap ( ) { Map < String , SDVariable > ret = new LinkedHashMap < > ( ) ; for ( Variable v : variables . values ( ) ) { ret . put ( v . getName ( ) , v . getVariable ( ) ) ; } return ret ; }
Return a copy of the internal variable map
19,659
public boolean hasArgs ( DifferentialFunction function ) { List < String > vertexIdArgs = ops . get ( function . getOwnName ( ) ) . getInputsToOp ( ) ; return vertexIdArgs != null && vertexIdArgs . size ( ) > 0 ; }
Returns true if this function already has defined arguments
19,660
public DifferentialFunction [ ] functions ( ) { List < DifferentialFunction > out = new ArrayList < > ( ops . size ( ) ) ; for ( SameDiffOp op : ops . values ( ) ) { out . add ( op . getOp ( ) ) ; } return out . toArray ( new DifferentialFunction [ out . size ( ) ] ) ; }
Get an array of differential functions that have been defined for this SameDiff instance
19,661
public SDVariable one ( String name , org . nd4j . linalg . api . buffer . DataType dataType , int ... shape ) { return var ( name , new ConstantInitScheme ( 'f' , 1.0 ) , dataType , ArrayUtil . toLongArray ( shape ) ) ; }
Create a new variable with the specified shape with all values initialized to 1 . 0
19,662
public SDVariable zero ( String name , org . nd4j . linalg . api . buffer . DataType dataType , int ... shape ) { return var ( name , new ZeroInitScheme ( ) , dataType , ArrayUtil . toLongArray ( shape ) ) ; }
Create a new variable with the specified shape with all values initialized to 0
19,663
public void removeArgFromFunction ( String varName , DifferentialFunction function ) { val args = function . args ( ) ; for ( int i = 0 ; i < args . length ; i ++ ) { if ( args [ i ] . getVarName ( ) . equals ( varName ) ) { List < String > reverseArgs = ops . get ( function . getOwnName ( ) ) . getInputsToOp ( ) ; val newArgs = new ArrayList < String > ( args . length - 1 ) ; for ( int arg = 0 ; arg < args . length ; arg ++ ) { if ( ! reverseArgs . get ( arg ) . equals ( varName ) ) { newArgs . add ( reverseArgs . get ( arg ) ) ; } } ops . get ( function . getOwnName ( ) ) . setInputsToOp ( newArgs ) ; break ; } } }
Remove an argument for a function . Note that if this function does not contain the argument it will just be a no op .
19,664
public SDVariable getVariable ( String name ) { Variable v = variables . get ( name ) ; return v == null ? null : v . getVariable ( ) ; }
Get the variable based on the opName
19,665
public void setGradientForVariableName ( String variableName , SDVariable variable ) { Preconditions . checkState ( variables . containsKey ( variableName ) , "No variable exists with name \"%s\"" , variableName ) ; if ( variable == null ) { throw new ND4JIllegalStateException ( "Unable to set null gradient for variable name " + variableName ) ; } variables . get ( variableName ) . setGradient ( variable ) ; }
Assign a SDVariable to represent the gradient of the SDVariable with the specified name
19,666
public SDVariable addVariable ( SDVariable variable ) { Preconditions . checkState ( variable . getSameDiff ( ) == this , "Samediff instance must be the same." ) ; if ( variables . containsKey ( variable . getVarName ( ) ) && ! variables . get ( variable . getVarName ( ) ) . getVariable ( ) . equals ( variable ) ) { throw new IllegalArgumentException ( "Variable already found with variable opName " + variable . getVarName ( ) ) ; } Preconditions . checkState ( variable . getSameDiff ( ) == this , "Same diff instance for variable must be the same!" ) ; variables . put ( variable . getVarName ( ) , Variable . builder ( ) . name ( variable . getVarName ( ) ) . variable ( variable ) . build ( ) ) ; return variable ; }
Add the specified variable to this SameDiff instance
19,667
public SDVariable [ ] updateVariableNamesAndReferences ( SDVariable [ ] variablesToUpdate , String [ ] newVariableNames ) { int numVariables = variablesToUpdate . length ; SDVariable [ ] updatedVariables = new SDVariable [ numVariables ] ; for ( int i = 0 ; i < numVariables ; i ++ ) { SDVariable varToUpdate = variablesToUpdate [ i ] ; String name = newVariableNames == null ? null : newVariableNames [ i ] ; updatedVariables [ i ] = updateVariableNameAndReference ( varToUpdate , name ) ; } return updatedVariables ; }
Updates the variable name property on the passed in variables its reference in samediff and returns the variable .
19,668
public FlatGraph asFlatGraph ( long graphId , ExecutorConfiguration configuration ) { return FlatGraph . getRootAsFlatGraph ( asFlatBuffers ( graphId , configuration ) ) ; }
This method returns FlatGraph structure
19,669
public void saveWithTrainingConfig ( TrainingConfig trainingConfig , OutputStream outputStream ) throws IOException { ObjectMapper objectMapper = ObjectMapperHolder . getJsonMapper ( ) ; String configJson = objectMapper . writeValueAsString ( trainingConfig ) ; ZipOutputStream zipfile = new ZipOutputStream ( new CloseShieldOutputStream ( outputStream ) ) ; ZipEntry config = new ZipEntry ( TRAINING_CONFIG_JSON_ZIP_ENTRY_NAME ) ; zipfile . putNextEntry ( config ) ; zipfile . write ( configJson . getBytes ( ) ) ; ZipEntry sameDiff = new ZipEntry ( SAMEDIFF_FILE_ENTRY_NAME ) ; zipfile . putNextEntry ( sameDiff ) ; val fb = asFlatBuffers ( ) ; val offset = fb . position ( ) ; val array = fb . array ( ) ; try ( BufferedOutputStream zipFileOutputStream = new BufferedOutputStream ( zipfile ) ; val dos = new DataOutputStream ( zipFileOutputStream ) ) { dos . write ( array , offset , array . length - offset ) ; } }
Save this samediff instance as a zip file with the training configuration
19,670
public String serializeTransformList ( List < Transform > list ) { ObjectMapper om = getObjectMapper ( ) ; try { return om . writeValueAsString ( new ListWrappers . TransformList ( list ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Serialize a list of Transforms
19,671
public String serializeFilterList ( List < Filter > list ) { ObjectMapper om = getObjectMapper ( ) ; try { return om . writeValueAsString ( new ListWrappers . FilterList ( list ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Serialize a list of Filters
19,672
public String serializeConditionList ( List < Condition > list ) { ObjectMapper om = getObjectMapper ( ) ; try { return om . writeValueAsString ( new ListWrappers . ConditionList ( list ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Serialize a list of Conditions
19,673
public String serializeReducerList ( List < IAssociativeReducer > list ) { ObjectMapper om = getObjectMapper ( ) ; try { return om . writeValueAsString ( new ListWrappers . ReducerList ( list ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Serialize a list of IReducers
19,674
public String serializeSequenceComparatorList ( List < SequenceComparator > list ) { ObjectMapper om = getObjectMapper ( ) ; try { return om . writeValueAsString ( new ListWrappers . SequenceComparatorList ( list ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Serialize a list of SequenceComparators
19,675
public String serializeDataActionList ( List < DataAction > list ) { ObjectMapper om = getObjectMapper ( ) ; try { return om . writeValueAsString ( new ListWrappers . DataActionList ( list ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Serialize a list of DataActions
19,676
public < M extends Model > M initPretrained ( PretrainedType pretrainedType ) throws IOException { String remoteUrl = pretrainedUrl ( pretrainedType ) ; if ( remoteUrl == null ) throw new UnsupportedOperationException ( "Pretrained " + pretrainedType + " weights are not available for this model." ) ; String localFilename = new File ( remoteUrl ) . getName ( ) ; File rootCacheDir = DL4JResources . getDirectory ( ResourceType . ZOO_MODEL , modelName ( ) ) ; File cachedFile = new File ( rootCacheDir , localFilename ) ; if ( ! cachedFile . exists ( ) ) { log . info ( "Downloading model to " + cachedFile . toString ( ) ) ; FileUtils . copyURLToFile ( new URL ( remoteUrl ) , cachedFile ) ; } else { log . info ( "Using cached model at " + cachedFile . toString ( ) ) ; } long expectedChecksum = pretrainedChecksum ( pretrainedType ) ; if ( expectedChecksum != 0L ) { log . info ( "Verifying download..." ) ; Checksum adler = new Adler32 ( ) ; FileUtils . checksum ( cachedFile , adler ) ; long localChecksum = adler . getValue ( ) ; log . info ( "Checksum local is " + localChecksum + ", expecting " + expectedChecksum ) ; if ( expectedChecksum != localChecksum ) { log . error ( "Checksums do not match. Cleaning up files and failing..." ) ; cachedFile . delete ( ) ; throw new IllegalStateException ( "Pretrained model file failed checksum. If this error persists, please open an issue at https://github.com/deeplearning4j/deeplearning4j." ) ; } } if ( modelType ( ) == MultiLayerNetwork . class ) { return ( M ) ModelSerializer . restoreMultiLayerNetwork ( cachedFile ) ; } else if ( modelType ( ) == ComputationGraph . class ) { return ( M ) ModelSerializer . restoreComputationGraph ( cachedFile ) ; } else { throw new UnsupportedOperationException ( "Pretrained models are only supported for MultiLayerNetwork and ComputationGraph." ) ; } }
Returns a pretrained model for the given dataset if available .
19,677
public static boolean getUnrollRecurrentLayer ( KerasLayerConfiguration conf , Map < String , Object > layerConfig ) throws InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( ! innerConfig . containsKey ( conf . getLAYER_FIELD_UNROLL ( ) ) ) throw new InvalidKerasConfigurationException ( "Keras LSTM layer config missing " + conf . getLAYER_FIELD_UNROLL ( ) + " field" ) ; return ( boolean ) innerConfig . get ( conf . getLAYER_FIELD_UNROLL ( ) ) ; }
Get unroll parameter to decide whether to unroll RNN with BPTT or not .
19,678
public static double getRecurrentDropout ( KerasLayerConfiguration conf , Map < String , Object > layerConfig ) throws UnsupportedKerasConfigurationException , InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; double dropout = 1.0 ; if ( innerConfig . containsKey ( conf . getLAYER_FIELD_DROPOUT_U ( ) ) ) try { dropout = 1.0 - ( double ) innerConfig . get ( conf . getLAYER_FIELD_DROPOUT_U ( ) ) ; } catch ( Exception e ) { int kerasDropout = ( int ) innerConfig . get ( conf . getLAYER_FIELD_DROPOUT_U ( ) ) ; dropout = 1.0 - ( double ) kerasDropout ; } if ( dropout < 1.0 ) throw new UnsupportedKerasConfigurationException ( "Dropout > 0 on recurrent connections not supported." ) ; return dropout ; }
Get recurrent weight dropout from Keras layer configuration . Non - zero dropout rates are currently not supported .
19,679
public static void download ( String name , URL url , File f , String targetMD5 , int maxTries ) throws IOException { download ( name , url , f , targetMD5 , maxTries , 0 ) ; }
Download the specified URL to the specified file and verify that the target MD5 matches
19,680
public static boolean checkMD5OfFile ( String targetMD5 , File file ) throws IOException { InputStream in = FileUtils . openInputStream ( file ) ; String trueMd5 = DigestUtils . md5Hex ( in ) ; IOUtils . closeQuietly ( in ) ; return ( targetMD5 . equals ( trueMd5 ) ) ; }
Check the MD5 of the specified file
19,681
public void addUpdate ( NDArrayMessage array ) { UnsafeBuffer directBuffer = ( UnsafeBuffer ) NDArrayMessage . toBuffer ( array ) ; byte [ ] data = directBuffer . byteArray ( ) ; if ( data == null ) { data = new byte [ directBuffer . capacity ( ) ] ; directBuffer . getBytes ( 0 , data , 0 , data . length ) ; } byte [ ] key = ByteBuffer . allocate ( 4 ) . putInt ( size ) . array ( ) ; try { db . put ( key , data ) ; } catch ( RocksDBException e ) { throw new RuntimeException ( e ) ; } size ++ ; }
Add an ndarray to the storage
19,682
public void clear ( ) { RocksIterator iterator = db . newIterator ( ) ; while ( iterator . isValid ( ) ) try { db . remove ( iterator . key ( ) ) ; } catch ( RocksDBException e ) { throw new RuntimeException ( e ) ; } iterator . close ( ) ; size = 0 ; }
Clear the array storage
19,683
public NDArrayMessage doGetUpdate ( int index ) { byte [ ] key = ByteBuffer . allocate ( 4 ) . putInt ( index ) . array ( ) ; try { UnsafeBuffer unsafeBuffer = new UnsafeBuffer ( db . get ( key ) ) ; return NDArrayMessage . fromBuffer ( unsafeBuffer , 0 ) ; } catch ( RocksDBException e ) { throw new RuntimeException ( e ) ; } }
A method for actually performing the implementation of retrieving the ndarray
19,684
private long numDepthWiseParams ( SeparableConvolution2D layerConf ) { int [ ] kernel = layerConf . getKernelSize ( ) ; val nIn = layerConf . getNIn ( ) ; val depthMultiplier = layerConf . getDepthMultiplier ( ) ; return nIn * depthMultiplier * kernel [ 0 ] * kernel [ 1 ] ; }
For each input feature we separately compute depthMultiplier many output maps for the given kernel size
19,685
public static String [ ] textToWordSequence ( String text , String filters , boolean lower , String split ) { if ( lower ) text = text . toLowerCase ( ) ; for ( String filter : filters . split ( "" ) ) { text = text . replace ( filter , split ) ; } String [ ] sequences = text . split ( split ) ; List < String > seqList = new ArrayList ( Arrays . asList ( sequences ) ) ; seqList . removeAll ( Arrays . asList ( "" , null ) ) ; return seqList . toArray ( new String [ seqList . size ( ) ] ) ; }
Turns a String text into a sequence of tokens .
19,686
public void fitOnTexts ( String [ ] texts ) { String [ ] sequence ; for ( String text : texts ) { if ( documentCount == null ) documentCount = 1 ; else documentCount += 1 ; if ( charLevel ) { if ( lower ) text = text . toLowerCase ( ) ; sequence = text . split ( "" ) ; } else { sequence = textToWordSequence ( text , filters , lower , split ) ; } for ( String word : sequence ) { if ( wordCounts . containsKey ( word ) ) wordCounts . put ( word , wordCounts . get ( word ) + 1 ) ; else wordCounts . put ( word , 1 ) ; } Set < String > sequenceSet = new HashSet < > ( Arrays . asList ( sequence ) ) ; for ( String word : sequenceSet ) { if ( wordDocs . containsKey ( word ) ) wordDocs . put ( word , wordDocs . get ( word ) + 1 ) ; else wordDocs . put ( word , 1 ) ; } } Map < String , Integer > sortedWordCounts = reverseSortByValues ( ( HashMap ) wordCounts ) ; ArrayList < String > sortedVocabulary = new ArrayList < > ( ) ; if ( outOfVocabularyToken != null ) sortedVocabulary . add ( outOfVocabularyToken ) ; for ( String word : sortedWordCounts . keySet ( ) ) { sortedVocabulary . add ( word ) ; } for ( int i = 0 ; i < sortedVocabulary . size ( ) ; i ++ ) wordIndex . put ( sortedVocabulary . get ( i ) , i + 1 ) ; for ( String key : wordIndex . keySet ( ) ) { indexWord . put ( wordIndex . get ( key ) , key ) ; } for ( String key : wordDocs . keySet ( ) ) indexDocs . put ( wordIndex . get ( key ) , wordDocs . get ( key ) ) ; }
Fit this tokenizer on a corpus of texts .
19,687
private static HashMap reverseSortByValues ( HashMap map ) { List list = new LinkedList ( map . entrySet ( ) ) ; Collections . sort ( list , new Comparator ( ) { public int compare ( Object o1 , Object o2 ) { return ( ( Comparable ) ( ( Map . Entry ) ( o1 ) ) . getValue ( ) ) . compareTo ( ( ( Map . Entry ) ( o2 ) ) . getValue ( ) ) ; } } ) ; HashMap sortedHashMap = new LinkedHashMap ( ) ; for ( Iterator it = list . iterator ( ) ; it . hasNext ( ) ; ) { Map . Entry entry = ( Map . Entry ) it . next ( ) ; sortedHashMap . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } return sortedHashMap ; }
Sort HashMap by values in reverse order
19,688
public void fitOnSequences ( Integer [ ] [ ] sequences ) { documentCount += 1 ; for ( Integer [ ] sequence : sequences ) { Set < Integer > sequenceSet = new HashSet < > ( Arrays . asList ( sequence ) ) ; for ( Integer index : sequenceSet ) indexDocs . put ( index , indexDocs . get ( index ) + 1 ) ; } }
Fit this tokenizer on a corpus of word indices
19,689
public Integer [ ] [ ] textsToSequences ( String [ ] texts ) { Integer oovTokenIndex = wordIndex . get ( outOfVocabularyToken ) ; String [ ] wordSequence ; ArrayList < Integer [ ] > sequences = new ArrayList < > ( ) ; for ( String text : texts ) { if ( charLevel ) { if ( lower ) { text = text . toLowerCase ( ) ; } wordSequence = text . split ( "" ) ; } else { wordSequence = textToWordSequence ( text , filters , lower , split ) ; } ArrayList < Integer > indexVector = new ArrayList < > ( ) ; for ( String word : wordSequence ) { if ( wordIndex . containsKey ( word ) ) { int index = wordIndex . get ( word ) ; if ( numWords != null && index >= numWords ) { if ( oovTokenIndex != null ) indexVector . add ( oovTokenIndex ) ; } else { indexVector . add ( index ) ; } } else if ( oovTokenIndex != null ) { indexVector . add ( oovTokenIndex ) ; } } Integer [ ] indices = indexVector . toArray ( new Integer [ indexVector . size ( ) ] ) ; sequences . add ( indices ) ; } return sequences . toArray ( new Integer [ sequences . size ( ) ] [ ] ) ; }
Transforms a bunch of texts into their index representations .
19,690
public String [ ] sequencesToTexts ( Integer [ ] [ ] sequences ) { Integer oovTokenIndex = wordIndex . get ( outOfVocabularyToken ) ; ArrayList < String > texts = new ArrayList < > ( ) ; for ( Integer [ ] sequence : sequences ) { ArrayList < String > wordVector = new ArrayList < > ( ) ; for ( Integer index : sequence ) { if ( indexWord . containsKey ( index ) ) { String word = indexWord . get ( index ) ; if ( numWords != null && index >= numWords ) { if ( oovTokenIndex != null ) { wordVector . add ( indexWord . get ( oovTokenIndex ) ) ; } else { wordVector . add ( word ) ; } } } else if ( oovTokenIndex != null ) { wordVector . add ( indexWord . get ( oovTokenIndex ) ) ; } } StringBuilder builder = new StringBuilder ( ) ; for ( String word : wordVector ) { builder . append ( word + split ) ; } String text = builder . toString ( ) ; texts . add ( text ) ; } return texts . toArray ( new String [ texts . size ( ) ] ) ; }
Turns index sequences back into texts
19,691
public void setArray ( INDArray arr ) { if ( this . arr . get ( ) == null ) this . arr . set ( arr ) ; }
Set the ndarray
19,692
public static TFImportStatus checkAllModelsForImport ( File directory ) throws IOException { Preconditions . checkState ( directory . isDirectory ( ) , "Specified directory %s is not actually a directory" , directory ) ; Collection < File > files = FileUtils . listFiles ( directory , new String [ ] { "pb" } , true ) ; Preconditions . checkState ( ! files . isEmpty ( ) , "No .pb files found in directory %s" , directory ) ; TFImportStatus status = null ; for ( File f : files ) { if ( status == null ) { status = checkModelForImport ( f ) ; } else { status = status . merge ( checkModelForImport ( f ) ) ; } } return status ; }
Recursively scan the specified directory for . pb files and evaluate
19,693
public < T > T output ( long graphId , T value , OperandsAdapter < T > adapter ) { return adapter . output ( this . output ( graphId , adapter . input ( value ) ) ) ; }
This method is suited for use of custom OperandsAdapters
19,694
public INDArray [ ] output ( long graphId , Pair < String , INDArray > ... inputs ) { val operands = new Operands ( ) ; for ( val in : inputs ) operands . addArgument ( in . getFirst ( ) , in . getSecond ( ) ) ; return output ( graphId , operands ) . asArray ( ) ; }
This method sends inference request to the GraphServer instance and returns result as array of INDArrays
19,695
public void dropGraph ( long graphId ) { val builder = new FlatBufferBuilder ( 128 ) ; val off = FlatDropRequest . createFlatDropRequest ( builder , graphId ) ; builder . finish ( off ) ; val req = FlatDropRequest . getRootAsFlatDropRequest ( builder . dataBuffer ( ) ) ; val v = blockingStub . forgetGraph ( req ) ; if ( v . status ( ) != 0 ) throw new ND4JIllegalStateException ( "registerGraph() gRPC call failed" ) ; }
This method allows to remove graph from the GraphServer instance
19,696
public static TimeSource getInstance ( String className ) { try { Class < ? > c = Class . forName ( className ) ; Method m = c . getMethod ( "getInstance" ) ; return ( TimeSource ) m . invoke ( null ) ; } catch ( Exception e ) { throw new RuntimeException ( "Error getting TimeSource instance for class \"" + className + "\"" , e ) ; } }
Get a specific TimeSource by class name
19,697
public static int [ ] getDeconvolutionOutputSize ( INDArray inputData , int [ ] kernel , int [ ] strides , int [ ] padding , ConvolutionMode convolutionMode , int [ ] dilation ) { int hIn = ( int ) inputData . size ( 2 ) ; int wIn = ( int ) inputData . size ( 3 ) ; int [ ] eKernel = effectiveKernelSize ( kernel , dilation ) ; if ( convolutionMode == ConvolutionMode . Same ) { int hOut = strides [ 0 ] * hIn ; int wOut = strides [ 1 ] * wIn ; return new int [ ] { hOut , wOut } ; } int hOut = strides [ 0 ] * ( hIn - 1 ) + eKernel [ 0 ] - 2 * padding [ 0 ] ; int wOut = strides [ 1 ] * ( wIn - 1 ) + eKernel [ 1 ] - 2 * padding [ 1 ] ; return new int [ ] { hOut , wOut } ; }
Get the output size of a deconvolution operation for given input data . In deconvolution we compute the inverse of the shape computation of a convolution .
19,698
public static int [ ] getHeightAndWidth ( NeuralNetConfiguration conf ) { return getHeightAndWidth ( ( ( org . deeplearning4j . nn . conf . layers . ConvolutionLayer ) conf . getLayer ( ) ) . getKernelSize ( ) ) ; }
Get the height and width from the configuration
19,699
public static int [ ] getHeightAndWidth ( int [ ] shape ) { if ( shape . length < 2 ) throw new IllegalArgumentException ( "No width and height able to be found: array must be at least length 2" ) ; return new int [ ] { shape [ shape . length - 1 ] , shape [ shape . length - 2 ] } ; }
Get the height and width for an image