idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
19,700
public static void convert ( String inFile , String outFile ) throws IOException , org . nd4j . linalg . exception . ND4JIllegalStateException { SameDiff tg = TFGraphMapper . getInstance ( ) . importGraph ( new File ( inFile ) ) ; tg . asFlatFile ( new File ( outFile ) ) ; }
Converts a file containing a model from the Protocol Buffer format to the Flat Buffer format .
19,701
public static void convertBERT ( String inFile , String outFile ) throws IOException , org . nd4j . linalg . exception . ND4JIllegalStateException { int minibatchSize = 4 ; Map < String , TFImportOverride > m = new HashMap < > ( ) ; m . put ( "IteratorGetNext" , ( inputs , controlDepInputs , nodeDef , initWith , attributesForNode , graph ) -> { return Arrays . asList ( initWith . placeHolder ( "IteratorGetNext" , DataType . INT , minibatchSize , 128 ) , initWith . placeHolder ( "IteratorGetNext:1" , DataType . INT , minibatchSize , 128 ) , initWith . placeHolder ( "IteratorGetNext:4" , DataType . INT , minibatchSize , 128 ) ) ; } ) ; TFOpImportFilter filter = ( nodeDef , initWith , attributesForNode , graph ) -> { return "IteratorV2" . equals ( nodeDef . getName ( ) ) ; } ; SameDiff sd = TFGraphMapper . getInstance ( ) . importGraph ( new File ( inFile ) , m , filter ) ; SubGraphPredicate p = SubGraphPredicate . withRoot ( OpPredicate . nameMatches ( ".*/dropout/mul" ) ) . withInputCount ( 2 ) . withInputSubgraph ( 0 , SubGraphPredicate . withRoot ( OpPredicate . nameMatches ( ".*/dropout/div" ) ) ) . withInputSubgraph ( 1 , SubGraphPredicate . withRoot ( OpPredicate . nameMatches ( ".*/dropout/Floor" ) ) . withInputSubgraph ( 0 , SubGraphPredicate . withRoot ( OpPredicate . nameMatches ( ".*/dropout/add" ) ) . withInputSubgraph ( 1 , SubGraphPredicate . withRoot ( OpPredicate . nameMatches ( ".*/dropout/random_uniform" ) ) . withInputSubgraph ( 0 , SubGraphPredicate . withRoot ( OpPredicate . nameMatches ( ".*/dropout/random_uniform/mul" ) ) . withInputSubgraph ( 0 , SubGraphPredicate . withRoot ( OpPredicate . nameMatches ( ".*/dropout/random_uniform/RandomUniform" ) ) ) . withInputSubgraph ( 1 , SubGraphPredicate . withRoot ( OpPredicate . nameMatches ( ".*/dropout/random_uniform/sub" ) ) ) ) ) ) ) ; List < SubGraph > subGraphs = GraphTransformUtil . getSubgraphsMatching ( sd , p ) ; int subGraphCount = subGraphs . size ( ) ; sd = GraphTransformUtil . replaceSubgraphsMatching ( sd , p , new SubGraphProcessor ( ) { public List < SDVariable > processSubgraph ( SameDiff sd , SubGraph subGraph ) { List < SDVariable > inputs = subGraph . inputs ( ) ; SDVariable newOut = null ; for ( SDVariable v : inputs ) { if ( v . getVarName ( ) . endsWith ( "/BiasAdd" ) || v . getVarName ( ) . endsWith ( "/Softmax" ) || v . getVarName ( ) . endsWith ( "/add_1" ) || v . getVarName ( ) . endsWith ( "/Tanh" ) ) { newOut = v ; break ; } } if ( newOut != null ) { return Collections . singletonList ( newOut ) ; } throw new RuntimeException ( "No pre-dropout input variable found" ) ; } } ) ; System . out . println ( "Exporting file " + outFile ) ; sd . asFlatFile ( new File ( outFile ) ) ; }
Converts a BERT model from the Protocol Buffer format to the Flat Buffer format .
19,702
public static boolean and ( final INDArray n , final Condition cond ) { if ( cond instanceof BaseCondition ) { long val = ( long ) Nd4j . getExecutioner ( ) . exec ( new MatchCondition ( n , cond ) ) . getDouble ( 0 ) ; if ( val == n . length ( ) ) return true ; else return false ; } else { throw new RuntimeException ( "Can only execute BaseCondition conditions using this method" ) ; } }
And over the whole ndarray given some condition
19,703
public static INDArray lastIndex ( INDArray array , Condition condition ) { if ( ! ( condition instanceof BaseCondition ) ) throw new UnsupportedOperationException ( "Only static Conditions are supported" ) ; LastIndex idx = new LastIndex ( array , condition ) ; Nd4j . getExecutioner ( ) . exec ( idx ) ; return Nd4j . scalar ( DataType . LONG , idx . getFinalResult ( ) . longValue ( ) ) ; }
This method returns last index matching given condition
19,704
public static void writeImageToPpm ( int [ ] [ ] image , String ppmFileName ) throws IOException { try ( BufferedWriter ppmOut = new BufferedWriter ( new FileWriter ( ppmFileName ) ) ) { int rows = image . length ; int cols = image [ 0 ] . length ; ppmOut . write ( "P3\n" ) ; ppmOut . write ( "" + rows + " " + cols + " 255\n" ) ; for ( int [ ] anImage : image ) { StringBuilder s = new StringBuilder ( ) ; for ( int j = 0 ; j < cols ; j ++ ) { s . append ( anImage [ j ] + " " + anImage [ j ] + " " + anImage [ j ] + " " ) ; } ppmOut . write ( s . toString ( ) ) ; } } }
Writes the given image in the given file using the PPM data format .
19,705
public List < INDArray > windows ( boolean flattened ) { List < INDArray > ret = new ArrayList < > ( ) ; int window = 0 ; for ( int i = 0 ; i < toSlice . length ( ) ; i ++ ) { if ( window >= toSlice . length ( ) ) break ; double [ ] w = new double [ this . windowRowSize * this . windowColumnSize ] ; for ( int count = 0 ; count < this . windowRowSize * this . windowColumnSize ; count ++ ) { w [ count ] = toSlice . getDouble ( count + window ) ; } INDArray add = Nd4j . create ( w ) ; if ( flattened ) add = add . ravel ( ) ; else add = add . reshape ( windowRowSize , windowColumnSize ) ; if ( addRotate ) { INDArray currRotation = add . dup ( ) ; for ( int rotation = 0 ; rotation < 3 ; rotation ++ ) { Nd4j . rot90 ( currRotation ) ; ret . add ( currRotation . dup ( ) ) ; } } window += this . windowRowSize * this . windowColumnSize ; ret . add ( add ) ; } return ret ; }
Moving window capture a row x column moving window of a given matrix
19,706
public static int numExamplesTrain ( Set dataSet ) { switch ( dataSet ) { case COMPLETE : return NUM_COMPLETE_TRAIN ; case MERGE : return NUM_MERGE_TRAIN ; case BALANCED : return NUM_BALANCED_TRAIN ; case LETTERS : return NUM_LETTERS_TRAIN ; case DIGITS : return NUM_DIGITS_TRAIN ; case MNIST : return NUM_MNIST_TRAIN ; default : throw new UnsupportedOperationException ( "Unknown Set: " + dataSet ) ; } }
Get the number of training examples for the specified subset
19,707
public static char [ ] getLabelsArray ( Set dataSet ) { switch ( dataSet ) { case COMPLETE : return LABELS_COMPLETE ; case MERGE : return LABELS_MERGE ; case BALANCED : return LABELS_BALANCED ; case LETTERS : return LABELS_LETTERS ; case DIGITS : case MNIST : return LABELS_DIGITS ; default : throw new UnsupportedOperationException ( "Unknown Set: " + dataSet ) ; } }
Get the label assignments for the given set as a character array .
19,708
private INDArray getMask ( ) { if ( inputIdx < 0 ) { return null ; } final INDArray [ ] inputMaskArrays = graph . getInputMaskArrays ( ) ; return ( inputMaskArrays != null ? inputMaskArrays [ inputIdx ] : null ) ; }
Gets the current mask array from the provided input
19,709
public INDArray inferVector ( String text , double learningRate , double minLearningRate , int iterations ) { if ( tokenizerFactory == null ) throw new IllegalStateException ( "TokenizerFactory should be defined, prior to predict() call" ) ; if ( this . vocab == null || this . vocab . numWords ( ) == 0 ) reassignExistingModel ( ) ; List < String > tokens = tokenizerFactory . create ( text ) . getTokens ( ) ; List < VocabWord > document = new ArrayList < > ( ) ; for ( String token : tokens ) { if ( vocab . containsWord ( token ) ) { document . add ( vocab . wordFor ( token ) ) ; } } if ( document . isEmpty ( ) ) throw new ND4JIllegalStateException ( "Text passed for inference has no matches in model vocabulary." ) ; return inferVector ( document , learningRate , minLearningRate , iterations ) ; }
This method calculates inferred vector for given text
19,710
public INDArray inferVector ( String text ) { return inferVector ( text , this . learningRate . get ( ) , this . minLearningRate , this . numEpochs * this . numIterations ) ; }
This method calculates inferred vector for given text with default parameters for learning rate and iterations
19,711
public INDArray inferVector ( LabelledDocument document ) { return inferVector ( document , this . learningRate . get ( ) , this . minLearningRate , this . numEpochs * this . numIterations ) ; }
This method calculates inferred vector for given document with default parameters for learning rate and iterations
19,712
public Collection < String > nearestLabels ( LabelledDocument document , int topN ) { if ( document . getReferencedContent ( ) != null ) { return nearestLabels ( document . getReferencedContent ( ) , topN ) ; } else return nearestLabels ( document . getContent ( ) , topN ) ; }
This method returns top N labels nearest to specified document
19,713
public Collection < String > nearestLabels ( INDArray labelVector , int topN ) { if ( labelsMatrix == null || labelsList == null || labelsList . isEmpty ( ) ) extractLabels ( ) ; List < BasicModelUtils . WordSimilarity > result = new ArrayList < > ( ) ; if ( labelsMatrix == null || labelsList == null || labelsList . isEmpty ( ) ) { log . warn ( "Labels list is empty!" ) ; return new ArrayList < > ( ) ; } if ( ! normalizedLabels ) { synchronized ( this ) { if ( ! normalizedLabels ) { labelsMatrix . diviColumnVector ( labelsMatrix . norm1 ( 1 ) ) ; normalizedLabels = true ; } } } INDArray similarity = Transforms . unitVec ( labelVector ) . mmul ( labelsMatrix . transpose ( ) ) ; List < Double > highToLowSimList = getTopN ( similarity , topN + 20 ) ; for ( int i = 0 ; i < highToLowSimList . size ( ) ; i ++ ) { String word = labelsList . get ( highToLowSimList . get ( i ) . intValue ( ) ) . getLabel ( ) ; if ( word != null && ! word . equals ( "UNK" ) && ! word . equals ( "STOP" ) ) { INDArray otherVec = lookupTable . vector ( word ) ; double sim = Transforms . cosineSim ( labelVector , otherVec ) ; result . add ( new BasicModelUtils . WordSimilarity ( word , sim ) ) ; } } Collections . sort ( result , new BasicModelUtils . SimilarityComparator ( ) ) ; return BasicModelUtils . getLabels ( result , topN ) ; }
This method returns top N labels nearest to specified features vector
19,714
private List < Double > getTopN ( INDArray vec , int N ) { BasicModelUtils . ArrayComparator comparator = new BasicModelUtils . ArrayComparator ( ) ; PriorityQueue < Double [ ] > queue = new PriorityQueue < > ( vec . rows ( ) , comparator ) ; for ( int j = 0 ; j < vec . length ( ) ; j ++ ) { final Double [ ] pair = new Double [ ] { vec . getDouble ( j ) , ( double ) j } ; if ( queue . size ( ) < N ) { queue . add ( pair ) ; } else { Double [ ] head = queue . peek ( ) ; if ( comparator . compare ( pair , head ) > 0 ) { queue . poll ( ) ; queue . add ( pair ) ; } } } List < Double > lowToHighSimLst = new ArrayList < > ( ) ; while ( ! queue . isEmpty ( ) ) { double ind = queue . poll ( ) [ 1 ] ; lowToHighSimLst . add ( ind ) ; } return Lists . reverse ( lowToHighSimLst ) ; }
Get top N elements
19,715
public void close ( ) { if ( images != null ) { try { images . close ( ) ; } catch ( IOException e ) { } images = null ; } if ( labels != null ) { try { labels . close ( ) ; } catch ( IOException e ) { } labels = null ; } }
Close any resources opened by the manager .
19,716
public DataSet copy ( ) { DataSet ret = new DataSet ( getFeatures ( ) . dup ( ) , getLabels ( ) . dup ( ) ) ; if ( getLabelsMaskArray ( ) != null ) ret . setLabelsMaskArray ( getLabelsMaskArray ( ) . dup ( ) ) ; if ( getFeaturesMaskArray ( ) != null ) ret . setFeaturesMaskArray ( getFeaturesMaskArray ( ) . dup ( ) ) ; ret . setColumnNames ( getColumnNames ( ) ) ; ret . setLabelNames ( getLabelNames ( ) ) ; return ret ; }
Clone the dataset
19,717
public DataSet reshape ( int rows , int cols ) { DataSet ret = new DataSet ( getFeatures ( ) . reshape ( new long [ ] { rows , cols } ) , getLabels ( ) ) ; return ret ; }
Reshapes the input in to the given rows and columns
19,718
public void shuffle ( long seed ) { if ( numExamples ( ) < 2 ) return ; List < INDArray > arrays = new ArrayList < > ( ) ; List < int [ ] > dimensions = new ArrayList < > ( ) ; arrays . add ( getFeatures ( ) ) ; dimensions . add ( ArrayUtil . range ( 1 , getFeatures ( ) . rank ( ) ) ) ; arrays . add ( getLabels ( ) ) ; dimensions . add ( ArrayUtil . range ( 1 , getLabels ( ) . rank ( ) ) ) ; if ( featuresMask != null ) { arrays . add ( getFeaturesMaskArray ( ) ) ; dimensions . add ( ArrayUtil . range ( 1 , getFeaturesMaskArray ( ) . rank ( ) ) ) ; } if ( labelsMask != null ) { arrays . add ( getLabelsMaskArray ( ) ) ; dimensions . add ( ArrayUtil . range ( 1 , getLabelsMaskArray ( ) . rank ( ) ) ) ; } Nd4j . shuffle ( arrays , new Random ( seed ) , dimensions ) ; if ( exampleMetaData != null ) { int [ ] map = ArrayUtil . buildInterleavedVector ( new Random ( seed ) , numExamples ( ) ) ; ArrayUtil . shuffleWithMap ( exampleMetaData , map ) ; } }
Shuffles the dataset in place given a seed for a random number generator . For reproducibility This will modify the dataset in place!!
19,719
public void squishToRange ( double min , double max ) { for ( int i = 0 ; i < getFeatures ( ) . length ( ) ; i ++ ) { double curr = ( double ) getFeatures ( ) . getScalar ( i ) . element ( ) ; if ( curr < min ) getFeatures ( ) . put ( i , Nd4j . scalar ( min ) ) ; else if ( curr > max ) getFeatures ( ) . put ( i , Nd4j . scalar ( max ) ) ; } }
Squeezes input data to a max and a min
19,720
public void binarize ( double cutoff ) { INDArray linear = getFeatures ( ) . reshape ( - 1 ) ; for ( int i = 0 ; i < getFeatures ( ) . length ( ) ; i ++ ) { double curr = linear . getDouble ( i ) ; if ( curr > cutoff ) getFeatures ( ) . putScalar ( i , 1 ) ; else getFeatures ( ) . putScalar ( i , 0 ) ; } }
Binarizes the dataset such that any number greater than cutoff is 1 otherwise zero
19,721
public void setNewNumberOfLabels ( int labels ) { int examples = numExamples ( ) ; INDArray newOutcomes = Nd4j . create ( examples , labels ) ; setLabels ( newOutcomes ) ; }
Clears the outcome matrix setting a new number of labels
19,722
public void setOutcome ( int example , int label ) { if ( example > numExamples ( ) ) throw new IllegalArgumentException ( "No example at " + example ) ; if ( label > numOutcomes ( ) || label < 0 ) throw new IllegalArgumentException ( "Illegal label" ) ; INDArray outcome = FeatureUtil . toOutcomeVector ( label , numOutcomes ( ) ) ; getLabels ( ) . putRow ( example , outcome ) ; }
Sets the outcome of a particular example
19,723
public List < DataSet > batchBy ( int num ) { List < DataSet > batched = Lists . newArrayList ( ) ; for ( List < DataSet > splitBatch : Lists . partition ( asList ( ) , num ) ) { batched . add ( DataSet . merge ( splitBatch ) ) ; } return batched ; }
Partitions a dataset in to mini batches where each dataset in each list is of the specified number of examples
19,724
public DataSet filterBy ( int [ ] labels ) { List < DataSet > list = asList ( ) ; List < DataSet > newList = new ArrayList < > ( ) ; List < Integer > labelList = new ArrayList < > ( ) ; for ( int i : labels ) labelList . add ( i ) ; for ( DataSet d : list ) { int outcome = d . outcome ( ) ; if ( labelList . contains ( outcome ) ) { newList . add ( d ) ; } } return DataSet . merge ( newList ) ; }
Strips the data transform of all but the passed in labels
19,725
public void filterAndStrip ( int [ ] labels ) { DataSet filtered = filterBy ( labels ) ; List < Integer > newLabels = new ArrayList < > ( ) ; Map < Integer , Integer > labelMap = new HashMap < > ( ) ; for ( int i = 0 ; i < labels . length ; i ++ ) labelMap . put ( labels [ i ] , i ) ; for ( int i = 0 ; i < filtered . numExamples ( ) ; i ++ ) { DataSet example = filtered . get ( i ) ; int o2 = example . outcome ( ) ; Integer outcome = labelMap . get ( o2 ) ; newLabels . add ( outcome ) ; } INDArray newLabelMatrix = Nd4j . create ( filtered . numExamples ( ) , labels . length ) ; if ( newLabelMatrix . rows ( ) != newLabels . size ( ) ) throw new IllegalStateException ( "Inconsistent label sizes" ) ; for ( int i = 0 ; i < newLabelMatrix . rows ( ) ; i ++ ) { Integer i2 = newLabels . get ( i ) ; if ( i2 == null ) throw new IllegalStateException ( "Label not found on row " + i ) ; INDArray newRow = FeatureUtil . toOutcomeVector ( i2 , labels . length ) ; newLabelMatrix . putRow ( i , newRow ) ; } setFeatures ( filtered . getFeatures ( ) ) ; setLabels ( newLabelMatrix ) ; }
Strips the dataset down to the specified labels and remaps them
19,726
public List < DataSet > dataSetBatches ( int num ) { List < List < DataSet > > list = Lists . partition ( asList ( ) , num ) ; List < DataSet > ret = new ArrayList < > ( ) ; for ( List < DataSet > l : list ) ret . add ( DataSet . merge ( l ) ) ; return ret ; }
Partitions the data transform by the specified number .
19,727
public void sortByLabel ( ) { Map < Integer , Queue < DataSet > > map = new HashMap < > ( ) ; List < DataSet > data = asList ( ) ; int numLabels = numOutcomes ( ) ; int examples = numExamples ( ) ; for ( DataSet d : data ) { int label = d . outcome ( ) ; Queue < DataSet > q = map . get ( label ) ; if ( q == null ) { q = new ArrayDeque < > ( ) ; map . put ( label , q ) ; } q . add ( d ) ; } for ( Map . Entry < Integer , Queue < DataSet > > label : map . entrySet ( ) ) { log . info ( "Label " + label + " has " + label . getValue ( ) . size ( ) + " elements" ) ; } boolean optimal = true ; for ( int i = 0 ; i < examples ; i ++ ) { if ( optimal ) { for ( int j = 0 ; j < numLabels ; j ++ ) { Queue < DataSet > q = map . get ( j ) ; if ( q == null ) { optimal = false ; break ; } DataSet next = q . poll ( ) ; if ( next != null ) { addRow ( next , i ) ; i ++ ; } else { optimal = false ; break ; } } } else { DataSet add = null ; for ( Queue < DataSet > q : map . values ( ) ) { if ( ! q . isEmpty ( ) ) { add = q . poll ( ) ; break ; } } addRow ( add , i ) ; } } }
Organizes the dataset to minimize sampling error while still allowing efficient batching .
19,728
public DataSet sample ( int numSamples , org . nd4j . linalg . api . rng . Random rng ) { return sample ( numSamples , rng , false ) ; }
Sample without replacement
19,729
public DataSet sample ( int numSamples , boolean withReplacement ) { return sample ( numSamples , Nd4j . getRandom ( ) , withReplacement ) ; }
Sample a dataset numSamples times
19,730
public DataSet sample ( int numSamples , org . nd4j . linalg . api . rng . Random rng , boolean withReplacement ) { Set < Integer > added = new HashSet < > ( ) ; List < DataSet > toMerge = new ArrayList < > ( ) ; boolean terminate = false ; for ( int i = 0 ; i < numSamples && ! terminate ; i ++ ) { int picked = rng . nextInt ( numExamples ( ) ) ; if ( ! withReplacement ) { while ( added . contains ( picked ) ) { picked = rng . nextInt ( numExamples ( ) ) ; if ( added . size ( ) == numExamples ( ) ) { terminate = true ; break ; } } } added . add ( picked ) ; toMerge . add ( get ( picked ) ) ; } return DataSet . merge ( toMerge ) ; }
Sample a dataset
19,731
public void compressi ( INDArray array ) { if ( array . isView ( ) ) throw new UnsupportedOperationException ( "Impossible to apply inplace compression on View" ) ; array . setData ( compress ( array . data ( ) ) ) ; array . markAsCompressed ( true ) ; }
Inplace compression of INDArray
19,732
public INDArray compress ( float [ ] data , int [ ] shape , char order ) { FloatPointer pointer = new FloatPointer ( data ) ; DataBuffer shapeInfo = Nd4j . getShapeInfoProvider ( ) . createShapeInformation ( ArrayUtil . toLongArray ( shape ) , order , DataType . FLOAT ) . getFirst ( ) ; DataBuffer buffer = compressPointer ( DataTypeEx . FLOAT , pointer , data . length , 4 ) ; return Nd4j . createArrayFromShapeBuffer ( buffer , shapeInfo ) ; }
This method creates compressed INDArray from Java float array skipping usual INDArray instantiation routines
19,733
public void initialize ( IGraph < V , E > graph ) { int nVertices = graph . numVertices ( ) ; int [ ] degrees = new int [ nVertices ] ; for ( int i = 0 ; i < nVertices ; i ++ ) degrees [ i ] = graph . getVertexDegree ( i ) ; initialize ( degrees ) ; }
Initialize the DeepWalk model with a given graph .
19,734
public void fit ( IGraph < V , E > graph , int walkLength ) { if ( ! initCalled ) initialize ( graph ) ; GraphWalkIteratorProvider < V > iteratorProvider = new RandomWalkGraphIteratorProvider < > ( graph , walkLength , seed , NoEdgeHandling . SELF_LOOP_ON_DISCONNECTED ) ; fit ( iteratorProvider ) ; }
Fit the model in parallel . This creates a set of GraphWalkIterators which are then distributed one to each thread
19,735
public void addRandomHyperPlane ( ) { INDArray newPlane = Nd4j . randn ( new int [ ] { 1 , dim } ) ; newPlane . divi ( newPlane . normmaxNumber ( ) ) ; if ( wholeHyperPlane == null ) wholeHyperPlane = newPlane ; else { wholeHyperPlane = Nd4j . concat ( 0 , wholeHyperPlane , newPlane ) ; } }
Add a new random element to the hyper plane .
19,736
public static Pair < INDArray , ByteBuffer > toArrayAndByteBuffer ( ByteBuffer buffer , int offset ) { ByteBuffer byteBuffer = buffer . hasArray ( ) ? ByteBuffer . allocateDirect ( buffer . array ( ) . length ) . put ( buffer . array ( ) ) . order ( ByteOrder . nativeOrder ( ) ) : buffer . order ( ByteOrder . nativeOrder ( ) ) ; byteBuffer . position ( offset ) ; int rank = byteBuffer . getInt ( ) ; if ( rank < 0 ) throw new IllegalStateException ( "Found negative integer. Corrupt serialization?" ) ; int shapeBufferLength = Shape . shapeInfoLength ( rank ) ; DataBuffer shapeBuff = Nd4j . createBufferDetached ( new int [ shapeBufferLength ] ) ; DataType type = DataType . values ( ) [ byteBuffer . getInt ( ) ] ; for ( int i = 0 ; i < shapeBufferLength ; i ++ ) { shapeBuff . put ( i , byteBuffer . getLong ( ) ) ; } if ( type != DataType . COMPRESSED ) { ByteBuffer slice = byteBuffer . slice ( ) ; DataBuffer buff = Nd4j . createBuffer ( slice , type , ( int ) Shape . length ( shapeBuff ) ) ; int position = byteBuffer . position ( ) + ( buff . getElementSize ( ) * ( int ) buff . length ( ) ) ; byteBuffer . position ( position ) ; INDArray arr = Nd4j . createArrayFromShapeBuffer ( buff . dup ( ) , shapeBuff . dup ( ) ) ; return Pair . of ( arr , byteBuffer ) ; } else { CompressionDescriptor compressionDescriptor = CompressionDescriptor . fromByteBuffer ( byteBuffer ) ; ByteBuffer slice = byteBuffer . slice ( ) ; BytePointer byteBufferPointer = new BytePointer ( slice ) ; CompressedDataBuffer compressedDataBuffer = new CompressedDataBuffer ( byteBufferPointer , compressionDescriptor ) ; INDArray arr = Nd4j . createArrayFromShapeBuffer ( compressedDataBuffer . dup ( ) , shapeBuff . dup ( ) ) ; int compressLength = ( int ) compressionDescriptor . getCompressedLength ( ) ; byteBuffer . position ( byteBuffer . position ( ) + compressLength ) ; return Pair . of ( arr , byteBuffer ) ; } }
Create an ndarray and existing bytebuffer
19,737
public static ByteBuffer toByteBuffer ( INDArray arr ) { if ( arr . isView ( ) ) arr = arr . dup ( ) ; if ( ! arr . isCompressed ( ) ) { ByteBuffer b3 = ByteBuffer . allocateDirect ( byteBufferSizeFor ( arr ) ) . order ( ByteOrder . nativeOrder ( ) ) ; doByteBufferPutUnCompressed ( arr , b3 , true ) ; return b3 ; } else { ByteBuffer b3 = ByteBuffer . allocateDirect ( byteBufferSizeFor ( arr ) ) . order ( ByteOrder . nativeOrder ( ) ) ; doByteBufferPutCompressed ( arr , b3 , true ) ; return b3 ; } }
Convert an ndarray to an unsafe buffer for use by aeron
19,738
public static int byteBufferSizeFor ( INDArray arr ) { if ( ! arr . isCompressed ( ) ) { ByteBuffer buffer = arr . data ( ) . pointer ( ) . asByteBuffer ( ) . order ( ByteOrder . nativeOrder ( ) ) ; ByteBuffer shapeBuffer = arr . shapeInfoDataBuffer ( ) . pointer ( ) . asByteBuffer ( ) . order ( ByteOrder . nativeOrder ( ) ) ; int twoInts = 8 ; return twoInts + buffer . limit ( ) + shapeBuffer . limit ( ) ; } else { CompressedDataBuffer compressedDataBuffer = ( CompressedDataBuffer ) arr . data ( ) ; CompressionDescriptor descriptor = compressedDataBuffer . getCompressionDescriptor ( ) ; ByteBuffer codecByteBuffer = descriptor . toByteBuffer ( ) ; ByteBuffer buffer = arr . data ( ) . pointer ( ) . asByteBuffer ( ) . order ( ByteOrder . nativeOrder ( ) ) ; ByteBuffer shapeBuffer = arr . shapeInfoDataBuffer ( ) . pointer ( ) . asByteBuffer ( ) . order ( ByteOrder . nativeOrder ( ) ) ; int twoInts = 2 * 4 ; return twoInts + buffer . limit ( ) + shapeBuffer . limit ( ) + codecByteBuffer . limit ( ) ; } }
Returns the byte buffer size for the given ndarray . This is an auxillary method for determining the size of the buffer size to allocate for sending an ndarray via the aeron media driver .
19,739
public static void writeArrayToOutputStream ( INDArray arr , OutputStream outputStream ) { ByteBuffer buffer = BinarySerde . toByteBuffer ( arr ) ; try ( WritableByteChannel channel = Channels . newChannel ( outputStream ) ) { channel . write ( buffer ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Write an array to an output stream .
19,740
public static void writeArrayToDisk ( INDArray arr , File toWrite ) throws IOException { try ( FileOutputStream os = new FileOutputStream ( toWrite ) ) { FileChannel channel = os . getChannel ( ) ; ByteBuffer buffer = BinarySerde . toByteBuffer ( arr ) ; channel . write ( buffer ) ; } }
Write an ndarray to disk in binary format
19,741
public static INDArray readFromDisk ( File readFrom ) throws IOException { try ( FileInputStream os = new FileInputStream ( readFrom ) ) { FileChannel channel = os . getChannel ( ) ; ByteBuffer buffer = ByteBuffer . allocateDirect ( ( int ) readFrom . length ( ) ) ; channel . read ( buffer ) ; INDArray ret = toArray ( buffer ) ; return ret ; } }
Read an ndarray from disk
19,742
public static DataBuffer readShapeFromDisk ( File readFrom ) throws IOException { try ( FileInputStream os = new FileInputStream ( readFrom ) ) { FileChannel channel = os . getChannel ( ) ; int len = ( int ) Math . min ( ( 32 * 2 + 3 ) * 8 , readFrom . length ( ) ) ; ByteBuffer buffer = ByteBuffer . allocateDirect ( len ) ; channel . read ( buffer ) ; ByteBuffer byteBuffer = buffer == null ? ByteBuffer . allocateDirect ( buffer . array ( ) . length ) . put ( buffer . array ( ) ) . order ( ByteOrder . nativeOrder ( ) ) : buffer . order ( ByteOrder . nativeOrder ( ) ) ; buffer . position ( 0 ) ; int rank = byteBuffer . getInt ( ) ; val result = new long [ Shape . shapeInfoLength ( rank ) ] ; result [ 0 ] = rank ; byteBuffer . position ( 16 ) ; for ( int e = 1 ; e < Shape . shapeInfoLength ( rank ) ; e ++ ) { result [ e ] = byteBuffer . getLong ( ) ; } DataBuffer dataBuffer = Nd4j . getDataBufferFactory ( ) . createLong ( result ) ; return dataBuffer ; } }
This method returns shape databuffer from saved earlier file
19,743
public Map < Integer , Long > createUpdatedMap ( ) { if ( storageFile == null ) { db = DBMaker . memoryDB ( ) . make ( ) ; } else { db = DBMaker . fileDB ( storageFile ) . closeOnJvmShutdown ( ) . transactionEnable ( ) . make ( ) ; } updated = db . hashMap ( "updated" ) . keySerializer ( Serializer . INTEGER ) . valueSerializer ( Serializer . LONG ) . createOrOpen ( ) ; return updated ; }
Create the storage map
19,744
protected Integer getNextDevice ( long threadId ) { Integer device = null ; if ( ! CudaEnvironment . getInstance ( ) . getConfiguration ( ) . isForcedSingleGPU ( ) && getNumberOfDevices ( ) > 0 ) { synchronized ( this ) { device = CudaEnvironment . getInstance ( ) . getConfiguration ( ) . getAvailableDevices ( ) . get ( devPtr . getAndIncrement ( ) ) ; if ( devPtr . get ( ) >= CudaEnvironment . getInstance ( ) . getConfiguration ( ) . getAvailableDevices ( ) . size ( ) ) devPtr . set ( 0 ) ; val t = Thread . currentThread ( ) ; val n = t . getId ( ) == threadId ? t . getName ( ) : "N/A" ; logger . debug ( "Mapping thread [{} - {}] to device [{}], out of [{}] devices..." , threadId , n , device , CudaEnvironment . getInstance ( ) . getConfiguration ( ) . getAvailableDevices ( ) . size ( ) ) ; } } else { device = CudaEnvironment . getInstance ( ) . getConfiguration ( ) . getAvailableDevices ( ) . get ( 0 ) ; logger . debug ( "Single device is forced, mapping to device [{}]" , device ) ; } return device ; }
This method returns device id available . Round - robin balancing used here .
19,745
public int getNumberOfDevices ( ) { if ( numberOfDevices . get ( ) < 0 ) { synchronized ( this ) { if ( numberOfDevices . get ( ) < 1 ) { numberOfDevices . set ( NativeOpsHolder . getInstance ( ) . getDeviceNativeOps ( ) . getAvailableDevices ( ) ) ; } } } return numberOfDevices . get ( ) ; }
This method returns number of available devices in system .
19,746
public synchronized INDArray replicateToDevice ( Integer deviceId , INDArray array ) { if ( array == null ) return null ; if ( array . isS ( ) ) return array . dup ( array . ordering ( ) ) ; if ( array . isView ( ) ) throw new UnsupportedOperationException ( "It's impossible to replicate View" ) ; val shape = array . shape ( ) ; val stride = array . stride ( ) ; val elementWiseStride = array . elementWiseStride ( ) ; val ordering = array . ordering ( ) ; val length = array . length ( ) ; val dtype = array . dataType ( ) ; AtomicAllocator . getInstance ( ) . getPointer ( array , ( CudaContext ) AtomicAllocator . getInstance ( ) . getDeviceContext ( ) . getContext ( ) ) ; int currentDeviceId = getDeviceForCurrentThread ( ) ; if ( currentDeviceId != deviceId . intValue ( ) ) { Nd4j . getMemoryManager ( ) . releaseCurrentContext ( ) ; NativeOpsHolder . getInstance ( ) . getDeviceNativeOps ( ) . setDevice ( deviceId ) ; attachThreadToDevice ( Thread . currentThread ( ) . getId ( ) , deviceId ) ; } DataBuffer newDataBuffer = replicateToDevice ( deviceId , array . data ( ) ) ; DataBuffer newShapeBuffer = Nd4j . getShapeInfoProvider ( ) . createShapeInformation ( shape , stride , elementWiseStride , ordering , dtype ) . getFirst ( ) ; INDArray result = Nd4j . createArrayFromShapeBuffer ( newDataBuffer , newShapeBuffer ) ; if ( currentDeviceId != deviceId . intValue ( ) ) { Nd4j . getMemoryManager ( ) . releaseCurrentContext ( ) ; attachThreadToDevice ( Thread . currentThread ( ) . getId ( ) , currentDeviceId ) ; NativeOpsHolder . getInstance ( ) . getDeviceNativeOps ( ) . setDevice ( currentDeviceId ) ; } return result ; }
This method replicates given INDArray and places it to target device .
19,747
public DataBuffer replicateToDevice ( Integer deviceId , DataBuffer buffer ) { if ( buffer == null ) return null ; int currentDeviceId = AtomicAllocator . getInstance ( ) . getDeviceId ( ) ; if ( currentDeviceId != deviceId ) { Nd4j . getMemoryManager ( ) . releaseCurrentContext ( ) ; NativeOpsHolder . getInstance ( ) . getDeviceNativeOps ( ) . setDevice ( deviceId ) ; Nd4j . getAffinityManager ( ) . attachThreadToDevice ( Thread . currentThread ( ) . getId ( ) , deviceId ) ; } DataBuffer dstBuffer = Nd4j . createBuffer ( buffer . dataType ( ) , buffer . length ( ) , false ) ; AtomicAllocator . getInstance ( ) . memcpy ( dstBuffer , buffer ) ; if ( currentDeviceId != deviceId ) { Nd4j . getMemoryManager ( ) . releaseCurrentContext ( ) ; NativeOpsHolder . getInstance ( ) . getDeviceNativeOps ( ) . setDevice ( currentDeviceId ) ; Nd4j . getAffinityManager ( ) . attachThreadToDevice ( Thread . currentThread ( ) . getId ( ) , currentDeviceId ) ; } return dstBuffer ; }
This method replicates given DataBuffer and places it to target device .
19,748
public static ComputationGraphSpace fromYaml ( String yaml ) { try { return YamlMapper . getMapper ( ) . readValue ( yaml , ComputationGraphSpace . class ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } }
Instantiate a computation graph space from a raw yaml string
19,749
public void purgeConstants ( ) { buffersCache = new HashMap < > ( ) ; protector . purgeProtector ( ) ; resetHappened = true ; logger . info ( "Resetting Constants..." ) ; for ( Integer device : constantOffsets . keySet ( ) ) { constantOffsets . get ( device ) . set ( 0 ) ; buffersCache . put ( device , new ConcurrentHashMap < ArrayDescriptor , DataBuffer > ( ) ) ; } }
This method removes all cached constants
19,750
public boolean reserveAllocationIfPossible ( Long threadId , Integer deviceId , long memorySize ) { ensureThreadRegistered ( threadId , deviceId ) ; try { deviceLocks . get ( deviceId ) . writeLock ( ) . lock ( ) ; addToReservedSpace ( deviceId , memorySize ) ; return true ; } finally { deviceLocks . get ( deviceId ) . writeLock ( ) . unlock ( ) ; } }
This method reserves memory within allocator
19,751
public void scale ( int scale ) { parameterSize *= scale ; updaterStateSize *= scale ; workingMemoryFixedInference *= scale ; workingMemoryVariableInference *= scale ; cacheModeMemFixed = scaleEntries ( cacheModeMemFixed , scale ) ; cacheModeMemVariablePerEx = scaleEntries ( cacheModeMemVariablePerEx , scale ) ; }
Multiply all memory usage by the specified scaling factor
19,752
public double getDistanceToCenter ( Point point ) { return Nd4j . getExecutioner ( ) . execAndReturn ( ClusterUtils . createDistanceFunctionOp ( distanceFunction , center . getArray ( ) , point . getArray ( ) ) ) . getFinalResult ( ) . doubleValue ( ) ; }
Get the distance to the given point from the cluster
19,753
public void addPoint ( Point point , boolean moveClusterCenter ) { if ( moveClusterCenter ) { if ( isInverse ( ) ) { center . getArray ( ) . muli ( points . size ( ) ) . subi ( point . getArray ( ) ) . divi ( points . size ( ) + 1 ) ; } else { center . getArray ( ) . muli ( points . size ( ) ) . addi ( point . getArray ( ) ) . divi ( points . size ( ) + 1 ) ; } } getPoints ( ) . add ( point ) ; }
Add a point to the cluster
19,754
public Point getPoint ( String id ) { for ( Point point : points ) if ( id . equals ( point . getId ( ) ) ) return point ; return null ; }
Return the point with the given id
19,755
public Point removePoint ( String id ) { Point removePoint = null ; for ( Point point : points ) if ( id . equals ( point . getId ( ) ) ) removePoint = point ; if ( removePoint != null ) points . remove ( removePoint ) ; return removePoint ; }
Remove the point and return it
19,756
public static Map < CacheMode , Long > cacheModeMapFor ( long value ) { if ( value == 0 ) { return CACHE_MODE_ALL_ZEROS ; } Map < CacheMode , Long > m = new HashMap < > ( ) ; for ( CacheMode cm : CacheMode . values ( ) ) { m . put ( cm , value ) ; } return m ; }
Get a map of CacheMode with all keys associated with the specified value
19,757
public Matrix predict ( Matrix features ) { return MLLibUtil . toMatrix ( network . output ( MLLibUtil . toMatrix ( features ) ) ) ; }
Predict the given feature matrix
19,758
public Vector predict ( Vector point ) { return MLLibUtil . toVector ( network . output ( MLLibUtil . toVector ( point ) ) ) ; }
Predict the given vector
19,759
public MultiLayerNetwork fit ( JavaRDD < DataSet > trainingData ) { if ( Nd4j . getExecutioner ( ) instanceof GridExecutioner ) ( ( GridExecutioner ) Nd4j . getExecutioner ( ) ) . flushQueue ( ) ; trainingMaster . executeTraining ( this , trainingData ) ; network . incrementEpochCount ( ) ; return network ; }
Fit the DataSet RDD
19,760
public MultiLayerNetwork fitLabeledPoint ( JavaRDD < LabeledPoint > rdd ) { int nLayers = network . getLayerWiseConfigurations ( ) . getConfs ( ) . size ( ) ; FeedForwardLayer ffl = ( FeedForwardLayer ) network . getLayerWiseConfigurations ( ) . getConf ( nLayers - 1 ) . getLayer ( ) ; JavaRDD < DataSet > ds = MLLibUtil . fromLabeledPoint ( sc , rdd , ffl . getNOut ( ) ) ; return fit ( ds ) ; }
Fit a MultiLayerNetwork using Spark MLLib LabeledPoint instances . This will convert the labeled points to the internal DL4J data format and train the model on that
19,761
public MultiLayerNetwork fitContinuousLabeledPoint ( JavaRDD < LabeledPoint > rdd ) { return fit ( MLLibUtil . fromContinuousLabeledPoint ( sc , rdd ) ) ; }
Fits a MultiLayerNetwork using Spark MLLib LabeledPoint instances This will convert labeled points that have continuous labels used for regression to the internal DL4J data format and train the model on that
19,762
public static OpPredicate classEquals ( final Class < ? > c ) { return new OpPredicate ( ) { public boolean matches ( SameDiff sameDiff , DifferentialFunction function ) { return function . getClass ( ) == c ; } } ; }
Return true if the operation class is equal to the specified class
19,763
public static ByteBuffer getDirectByteBuffer ( DirectBuffer directBuffer ) { return directBuffer . byteBuffer ( ) == null ? ByteBuffer . allocateDirect ( directBuffer . capacity ( ) ) . put ( directBuffer . byteArray ( ) ) : directBuffer . byteBuffer ( ) ; }
Get the direct byte buffer from the given direct buffer
19,764
public V get ( Object key ) { if ( key == null ) { throw new NullPointerException ( "Key can not be null" ) ; } if ( ! ( key instanceof String ) ) { throw new ClassCastException ( "Only String keys are supported -- got " + key . getClass ( ) ) ; } if ( key . equals ( "" ) ) { if ( root . getRight ( ) == null ) { return null ; } else { return root . getRight ( ) . getValue ( ) ; } } PatriciaNode < V > nearest = findNearestNode ( ( String ) key ) ; if ( key . equals ( nearest . getKey ( ) ) ) { return nearest . getValue ( ) ; } else { return null ; } }
Get value associated with specified key in this trie
19,765
public boolean containsKey ( Object key ) { if ( key == null ) { throw new NullPointerException ( "Key can not be null" ) ; } if ( ! ( key instanceof String ) ) { throw new ClassCastException ( "Only String keys are supported -- got " + key . getClass ( ) ) ; } return get ( key ) != null ; }
Test membership in this trie
19,766
public Set < String > keySet ( ) { Set < String > keys = new HashSet < > ( ) ; keysR ( root . getLeft ( ) , - 1 , keys ) ; return keys ; }
Returns a copy of the keys contained in this trie as a Set
19,767
public Collection < V > values ( ) { List < V > values = new ArrayList < > ( ) ; valuesR ( root . getLeft ( ) , - 1 , values ) ; return values ; }
Returns a copy of the values contained in this trie as a Set
19,768
public void clear ( ) { root = new PatriciaNode < > ( null , null , - 1 ) ; root . setLeft ( root ) ; entries = 0 ; }
Clears this trie by removing all its key - value pairs
19,769
public boolean containsValue ( Object value ) { for ( V v : values ( ) ) { if ( v . equals ( value ) ) { return true ; } } return false ; }
Predicate to test value membership
19,770
public Set < Entry < String , V > > entrySet ( ) { HashMap < String , V > entries = new HashMap < > ( ) ; entriesR ( root . getLeft ( ) , - 1 , entries ) ; return entries . entrySet ( ) ; }
Returns a copy of the mappings contained in this trie as a Set
19,771
private PatriciaNode < V > findNearestNode ( String key ) { PatriciaNode < V > current = root . getLeft ( ) ; PatriciaNode < V > parent = root ; while ( parent . getBit ( ) < current . getBit ( ) ) { parent = current ; if ( ! keyMapper . isSet ( current . getBit ( ) , key ) ) { current = current . getLeft ( ) ; } else { current = current . getRight ( ) ; } } return current ; }
Finds the closest node in the trie matching key
19,772
private int findFirstDifferingBit ( String key1 , String key2 ) { int bit = 0 ; while ( keyMapper . isSet ( bit , key1 ) == keyMapper . isSet ( bit , key2 ) ) { bit ++ ; } return bit ; }
Returns the leftmost differing bit index when doing a bitwise comparison of key1 and key2
19,773
private void insertNode ( PatriciaNode < V > node ) { PatriciaNode < V > current = root . getLeft ( ) ; PatriciaNode < V > parent = root ; while ( parent . getBit ( ) < current . getBit ( ) && current . getBit ( ) < node . getBit ( ) ) { parent = current ; if ( ! keyMapper . isSet ( current . getBit ( ) , node . getKey ( ) ) ) { current = current . getLeft ( ) ; } else { current = current . getRight ( ) ; } } if ( ! keyMapper . isSet ( node . getBit ( ) , node . getKey ( ) ) ) { node . setLeft ( node ) ; node . setRight ( current ) ; } else { node . setLeft ( current ) ; node . setRight ( node ) ; } if ( ! keyMapper . isSet ( parent . getBit ( ) , node . getKey ( ) ) ) { parent . setLeft ( node ) ; } else { parent . setRight ( node ) ; } }
Inserts a node into this trie
19,774
public synchronized String nextLabel ( ) { if ( labels != null ) { return labels . get ( ( ( Long ) counter . getAndIncrement ( ) ) . intValue ( ) ) ; } else { maxCount = counter . getAndIncrement ( ) ; return formatLabel ( maxCount ) ; } }
Returns next label .
19,775
public List < String > getLabels ( ) { if ( labels != null && ! labels . isEmpty ( ) ) return labels ; else { List < String > result = new ArrayList < > ( ) ; for ( long x = 0 ; x < counter . get ( ) ; x ++ ) result . add ( formatLabel ( x ) ) ; return result ; } }
This method returns the list of labels used by this generator instance . If external list os labels was used as source whole list will be returned .
19,776
public void storeLabel ( String label ) { if ( labels == null ) labels = new ArrayList < > ( ) ; if ( ! uniq . contains ( label ) ) { uniq . add ( label ) ; labels . add ( label ) ; } }
This method is intended for storing labels retrieved from external sources .
19,777
public int getNumberOfLabelsUsed ( ) { if ( labels != null && ! labels . isEmpty ( ) ) return labels . size ( ) ; else return ( ( Long ) ( maxCount + 1 ) ) . intValue ( ) ; }
This method returns number of labels used up to the method s call
19,778
public static String formatDuration ( long durationMs ) { Period period = Period . seconds ( ( int ) ( durationMs / 1000L ) ) ; Period p2 = period . normalizedStandard ( PeriodType . yearMonthDayTime ( ) ) ; PeriodFormatter formatter = new PeriodFormatterBuilder ( ) . appendYears ( ) . appendSuffix ( " yr " ) . appendMonths ( ) . appendSuffix ( " months " ) . appendDays ( ) . appendSuffix ( " days " ) . appendHours ( ) . appendSuffix ( " hr " ) . appendMinutes ( ) . appendSuffix ( " min " ) . appendSeconds ( ) . appendSuffix ( " sec" ) . toFormatter ( ) ; return formatter . print ( p2 ) ; }
Format the duration in milliseconds to a human readable String with yr days hr etc prefixes
19,779
protected ImageWritable doTransform ( ImageWritable image , Random random ) { if ( shuffle ) { Collections . shuffle ( imageTransforms ) ; } currentTransforms . clear ( ) ; for ( Pair < ImageTransform , Double > tuple : imageTransforms ) { if ( tuple . getSecond ( ) == 1.0 || rng . nextDouble ( ) < tuple . getSecond ( ) ) { currentTransforms . add ( tuple . getFirst ( ) ) ; image = random != null ? tuple . getFirst ( ) . transform ( image , random ) : tuple . getFirst ( ) . transform ( image ) ; } } return image ; }
Takes an image and executes a pipeline of combined transforms .
19,780
protected synchronized void maintenance ( ) { if ( positions . size ( ) < expectedConsumers ) { log . trace ( "Skipping maintanance due to not all expected consumers shown up: [{}] vs [{}]" , positions . size ( ) , expectedConsumers ) ; return ; } val minIdx = maxAppliedIndexEverywhere ( ) ; val allPositions = new long [ positions . size ( ) ] ; int cnt = 0 ; for ( val p : positions . values ( ) ) allPositions [ cnt ++ ] = p . get ( ) ; log . trace ( "Min idx: {}; last deleted index: {}; stored updates: {}; positions: {}" , minIdx , lastDeletedIndex . get ( ) , updates . size ( ) , allPositions ) ; if ( minIdx > lastDeletedIndex . get ( ) ) { for ( long e = lastDeletedIndex . get ( ) ; e < minIdx ; e ++ ) { updates . remove ( e ) ; } lastDeletedIndex . set ( minIdx ) ; } }
This method does maintenance of updates within
19,781
public void incrementAll ( Collection < T > elements , double inc ) { for ( T element : elements ) { incrementCount ( element , inc ) ; } }
This method will increment all elements in collection
19,782
public < T2 extends T > void incrementAll ( Counter < T2 > other ) { for ( T2 element : other . keySet ( ) ) { double cnt = other . getCount ( element ) ; incrementCount ( element , cnt ) ; } }
This method will increment counts of this counter by counts from other counter
19,783
public double setCount ( T element , double count ) { AtomicDouble t = map . get ( element ) ; if ( t != null ) { double val = t . getAndSet ( count ) ; dirty . set ( true ) ; return val ; } else { map . put ( element , new AtomicDouble ( count ) ) ; totalCount . addAndGet ( count ) ; return 0 ; } }
This method sets new counter value for given element
19,784
public List < T > keySetSorted ( ) { List < T > result = new ArrayList < > ( ) ; PriorityQueue < Pair < T , Double > > pq = asPriorityQueue ( ) ; while ( ! pq . isEmpty ( ) ) { result . add ( pq . poll ( ) . getFirst ( ) ) ; } return result ; }
This method returns List of elements sorted by their counts
19,785
public void normalize ( ) { for ( T key : keySet ( ) ) { setCount ( key , getCount ( key ) / totalCount . get ( ) ) ; } rebuildTotals ( ) ; }
This method will apply normalization to counter values and totals .
19,786
public double removeKey ( T element ) { AtomicDouble v = map . remove ( element ) ; dirty . set ( true ) ; if ( v != null ) return v . get ( ) ; else return 0.0 ; }
This method removes given key from counter
19,787
public T argMax ( ) { double maxCount = - Double . MAX_VALUE ; T maxKey = null ; for ( Map . Entry < T , AtomicDouble > entry : map . entrySet ( ) ) { if ( entry . getValue ( ) . get ( ) > maxCount || maxKey == null ) { maxKey = entry . getKey ( ) ; maxCount = entry . getValue ( ) . get ( ) ; } } return maxKey ; }
This method returns element with highest counter value
19,788
public void dropElementsBelowThreshold ( double threshold ) { Iterator < T > iterator = keySet ( ) . iterator ( ) ; while ( iterator . hasNext ( ) ) { T element = iterator . next ( ) ; double val = map . get ( element ) . get ( ) ; if ( val < threshold ) { iterator . remove ( ) ; dirty . set ( true ) ; } } }
This method will remove all elements with counts below given threshold from counter
19,789
public void keepTopNElements ( int N ) { PriorityQueue < Pair < T , Double > > queue = asPriorityQueue ( ) ; clear ( ) ; for ( int e = 0 ; e < N ; e ++ ) { Pair < T , Double > pair = queue . poll ( ) ; if ( pair != null ) incrementCount ( pair . getFirst ( ) , pair . getSecond ( ) ) ; } }
This method removes all elements except of top N by counter values
19,790
public Gradient [ ] calcGradient ( IDQN current , Stack < MiniTrans < Integer > > rewards ) { MiniTrans < Integer > minTrans = rewards . pop ( ) ; int size = rewards . size ( ) ; int [ ] shape = getHistoryProcessor ( ) == null ? mdp . getObservationSpace ( ) . getShape ( ) : getHistoryProcessor ( ) . getConf ( ) . getShape ( ) ; int [ ] nshape = Learning . makeShape ( size , shape ) ; INDArray input = Nd4j . create ( nshape ) ; INDArray targets = Nd4j . create ( size , mdp . getActionSpace ( ) . getSize ( ) ) ; double r = minTrans . getReward ( ) ; for ( int i = size - 1 ; i >= 0 ; i -- ) { minTrans = rewards . pop ( ) ; r = minTrans . getReward ( ) + conf . getGamma ( ) * r ; input . putRow ( i , minTrans . getObs ( ) ) ; INDArray row = minTrans . getOutput ( ) [ 0 ] ; row = row . putScalar ( minTrans . getAction ( ) , r ) ; targets . putRow ( i , row ) ; } return current . gradient ( input , targets ) ; }
calc the gradient based on the n - step rewards
19,791
public static void checkForUnsupportedConfigurations ( Map < String , Object > layerConfig , boolean enforceTrainingConfig , KerasLayerConfiguration conf ) throws UnsupportedKerasConfigurationException , InvalidKerasConfigurationException { getBiasL1RegularizationFromConfig ( layerConfig , enforceTrainingConfig , conf ) ; getBiasL2RegularizationFromConfig ( layerConfig , enforceTrainingConfig , conf ) ; Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( innerConfig . containsKey ( conf . getLAYER_FIELD_W_REGULARIZER ( ) ) ) { checkForUnknownRegularizer ( ( Map < String , Object > ) innerConfig . get ( conf . getLAYER_FIELD_W_REGULARIZER ( ) ) , enforceTrainingConfig , conf ) ; } if ( innerConfig . containsKey ( conf . getLAYER_FIELD_B_REGULARIZER ( ) ) ) { checkForUnknownRegularizer ( ( Map < String , Object > ) innerConfig . get ( conf . getLAYER_FIELD_B_REGULARIZER ( ) ) , enforceTrainingConfig , conf ) ; } }
Checks whether layer config contains unsupported options .
19,792
private static void checkForUnknownRegularizer ( Map < String , Object > regularizerConfig , boolean enforceTrainingConfig , KerasLayerConfiguration conf ) throws UnsupportedKerasConfigurationException { if ( regularizerConfig != null ) { for ( String field : regularizerConfig . keySet ( ) ) { if ( ! field . equals ( conf . getREGULARIZATION_TYPE_L1 ( ) ) && ! field . equals ( conf . getREGULARIZATION_TYPE_L2 ( ) ) && ! field . equals ( conf . getLAYER_FIELD_NAME ( ) ) && ! field . equals ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) && ! field . equals ( conf . getLAYER_FIELD_CONFIG ( ) ) ) { if ( enforceTrainingConfig ) throw new UnsupportedKerasConfigurationException ( "Unknown regularization field " + field ) ; else log . warn ( "Ignoring unknown regularization field " + field ) ; } } } }
Check whether Keras weight regularization is of unknown type . Currently prints a warning since main use case for model import is inference not further training . Unlikely since standard Keras weight regularizers are L1 and L2 .
19,793
public static KerasLayer getKerasLayerFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf , Map < String , Class < ? extends KerasLayer > > customLayers , Map < String , SameDiffLambdaLayer > lambdaLayers , Map < String , ? extends KerasLayer > previousLayers ) throws InvalidKerasConfigurationException , UnsupportedKerasConfigurationException { return getKerasLayerFromConfig ( layerConfig , false , conf , customLayers , lambdaLayers , previousLayers ) ; }
Build KerasLayer from a Keras layer configuration .
19,794
public static String getClassNameFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { if ( ! layerConfig . containsKey ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) ) throw new InvalidKerasConfigurationException ( "Field " + conf . getLAYER_FIELD_CLASS_NAME ( ) + " missing from layer config" ) ; return ( String ) layerConfig . get ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) ; }
Get Keras layer class name from Keras layer configuration .
19,795
public static Map < String , Object > getTimeDistributedLayerConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { if ( ! layerConfig . containsKey ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) ) throw new InvalidKerasConfigurationException ( "Field " + conf . getLAYER_FIELD_CLASS_NAME ( ) + " missing from layer config" ) ; if ( ! layerConfig . get ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) . equals ( conf . getLAYER_CLASS_NAME_TIME_DISTRIBUTED ( ) ) ) throw new InvalidKerasConfigurationException ( "Expected " + conf . getLAYER_CLASS_NAME_TIME_DISTRIBUTED ( ) + " layer, found " + layerConfig . get ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) ) ; if ( ! layerConfig . containsKey ( conf . getLAYER_FIELD_CONFIG ( ) ) ) throw new InvalidKerasConfigurationException ( "Field " + conf . getLAYER_FIELD_CONFIG ( ) + " missing from layer config" ) ; Map < String , Object > outerConfig = getInnerLayerConfigFromConfig ( layerConfig , conf ) ; Map < String , Object > innerLayer = ( Map < String , Object > ) outerConfig . get ( conf . getLAYER_FIELD_LAYER ( ) ) ; layerConfig . put ( conf . getLAYER_FIELD_CLASS_NAME ( ) , innerLayer . get ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) ) ; Map < String , Object > innerConfig = getInnerLayerConfigFromConfig ( innerLayer , conf ) ; innerConfig . put ( conf . getLAYER_FIELD_NAME ( ) , outerConfig . get ( conf . getLAYER_FIELD_NAME ( ) ) ) ; outerConfig . putAll ( innerConfig ) ; outerConfig . remove ( conf . getLAYER_FIELD_LAYER ( ) ) ; return layerConfig ; }
Extract inner layer config from TimeDistributed configuration and merge it into the outer config .
19,796
public static Map < String , Object > getInnerLayerConfigFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { if ( ! layerConfig . containsKey ( conf . getLAYER_FIELD_CONFIG ( ) ) ) throw new InvalidKerasConfigurationException ( "Field " + conf . getLAYER_FIELD_CONFIG ( ) + " missing from layer config" ) ; return ( Map < String , Object > ) layerConfig . get ( conf . getLAYER_FIELD_CONFIG ( ) ) ; }
Get inner layer config from Keras layer configuration .
19,797
public static String getLayerNameFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( ! innerConfig . containsKey ( conf . getLAYER_FIELD_NAME ( ) ) ) throw new InvalidKerasConfigurationException ( "Field " + conf . getLAYER_FIELD_NAME ( ) + " missing from layer config" ) ; return ( String ) innerConfig . get ( conf . getLAYER_FIELD_NAME ( ) ) ; }
Get layer name from Keras layer configuration .
19,798
public static int [ ] getInputShapeFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( ! innerConfig . containsKey ( conf . getLAYER_FIELD_BATCH_INPUT_SHAPE ( ) ) ) return null ; List < Integer > batchInputShape = ( List < Integer > ) innerConfig . get ( conf . getLAYER_FIELD_BATCH_INPUT_SHAPE ( ) ) ; int [ ] inputShape = new int [ batchInputShape . size ( ) - 1 ] ; for ( int i = 1 ; i < batchInputShape . size ( ) ; i ++ ) { inputShape [ i - 1 ] = batchInputShape . get ( i ) != null ? batchInputShape . get ( i ) : 0 ; } return inputShape ; }
Get Keras input shape from Keras layer configuration .
19,799
public static List < String > getInboundLayerNamesFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) { List < String > inboundLayerNames = new ArrayList < > ( ) ; if ( layerConfig . containsKey ( conf . getLAYER_FIELD_INBOUND_NODES ( ) ) ) { List < Object > inboundNodes = ( List < Object > ) layerConfig . get ( conf . getLAYER_FIELD_INBOUND_NODES ( ) ) ; if ( ! inboundNodes . isEmpty ( ) ) { inboundNodes = ( List < Object > ) inboundNodes . get ( 0 ) ; for ( Object o : inboundNodes ) { String nodeName = ( String ) ( ( List < Object > ) o ) . get ( 0 ) ; inboundLayerNames . add ( nodeName ) ; } } } return inboundLayerNames ; }
Get list of inbound layers from Keras layer configuration .