idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
19,800
public static int getNOutFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; int nOut ; if ( innerConfig . containsKey ( conf . getLAYER_FIELD_OUTPUT_DIM ( ) ) ) nOut = ( int ) innerConfig . get ( conf . getLAYER_FIELD_OUTPUT_DIM ( ) ) ; else if ( innerConfig . containsKey ( conf . getLAYER_FIELD_EMBEDDING_OUTPUT_DIM ( ) ) ) nOut = ( int ) innerConfig . get ( conf . getLAYER_FIELD_EMBEDDING_OUTPUT_DIM ( ) ) ; else if ( innerConfig . containsKey ( conf . getLAYER_FIELD_NB_FILTER ( ) ) ) nOut = ( int ) innerConfig . get ( conf . getLAYER_FIELD_NB_FILTER ( ) ) ; else throw new InvalidKerasConfigurationException ( "Could not determine number of outputs for layer: no " + conf . getLAYER_FIELD_OUTPUT_DIM ( ) + " or " + conf . getLAYER_FIELD_NB_FILTER ( ) + " field found" ) ; return nOut ; }
Get number of outputs from Keras layer configuration .
19,801
public static double getDropoutFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; double dropout = 1.0 ; if ( innerConfig . containsKey ( conf . getLAYER_FIELD_DROPOUT ( ) ) ) { try { dropout = 1.0 - ( double ) innerConfig . get ( conf . getLAYER_FIELD_DROPOUT ( ) ) ; } catch ( Exception e ) { int kerasDropout = ( int ) innerConfig . get ( conf . getLAYER_FIELD_DROPOUT ( ) ) ; dropout = 1.0 - kerasDropout ; } } else if ( innerConfig . containsKey ( conf . getLAYER_FIELD_DROPOUT_W ( ) ) ) { try { dropout = 1.0 - ( double ) innerConfig . get ( conf . getLAYER_FIELD_DROPOUT_W ( ) ) ; } catch ( Exception e ) { int kerasDropout = ( int ) innerConfig . get ( conf . getLAYER_FIELD_DROPOUT_W ( ) ) ; dropout = 1.0 - kerasDropout ; } } return dropout ; }
Get dropout from Keras layer configuration .
19,802
public static boolean getHasBiasFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; boolean hasBias = true ; if ( innerConfig . containsKey ( conf . getLAYER_FIELD_USE_BIAS ( ) ) ) { hasBias = ( boolean ) innerConfig . get ( conf . getLAYER_FIELD_USE_BIAS ( ) ) ; } return hasBias ; }
Determine if layer should be instantiated with bias
19,803
public static boolean getZeroMaskingFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; boolean hasZeroMasking = true ; if ( innerConfig . containsKey ( conf . getLAYER_FIELD_MASK_ZERO ( ) ) ) { hasZeroMasking = ( boolean ) innerConfig . get ( conf . getLAYER_FIELD_MASK_ZERO ( ) ) ; } return hasZeroMasking ; }
Get zero masking flag
19,804
public static double getMaskingValueFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf ) throws InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; double maskValue = 0.0 ; if ( innerConfig . containsKey ( conf . getLAYER_FIELD_MASK_VALUE ( ) ) ) { try { maskValue = ( double ) innerConfig . get ( conf . getLAYER_FIELD_MASK_VALUE ( ) ) ; } catch ( Exception e ) { log . warn ( "Couldn't read masking value, default to 0.0" ) ; } } else { throw new InvalidKerasConfigurationException ( "No mask value found, field " + conf . getLAYER_FIELD_MASK_VALUE ( ) ) ; } return maskValue ; }
Get mask value
19,805
public static void removeDefaultWeights ( Map < String , INDArray > weights , KerasLayerConfiguration conf ) { if ( weights . size ( ) > 2 ) { Set < String > paramNames = weights . keySet ( ) ; paramNames . remove ( conf . getKERAS_PARAM_NAME_W ( ) ) ; paramNames . remove ( conf . getKERAS_PARAM_NAME_B ( ) ) ; String unknownParamNames = paramNames . toString ( ) ; log . warn ( "Attemping to set weights for unknown parameters: " + unknownParamNames . substring ( 1 , unknownParamNames . length ( ) - 1 ) ) ; } }
Remove weights from config after weight setting .
19,806
public MultiNormalizerHybrid minMaxScaleInput ( int input , double rangeFrom , double rangeTo ) { perInputStrategies . put ( input , new MinMaxStrategy ( rangeFrom , rangeTo ) ) ; return this ; }
Apply min - max scaling to a specific input overriding the global input strategy if any
19,807
public MultiNormalizerHybrid minMaxScaleOutput ( int output , double rangeFrom , double rangeTo ) { perOutputStrategies . put ( output , new MinMaxStrategy ( rangeFrom , rangeTo ) ) ; return this ; }
Apply min - max scaling to a specific output overriding the global output strategy if any
19,808
public List < Writable > transformRawStringsToInputList ( List < String > values ) { List < Writable > ret = new ArrayList < > ( ) ; if ( values . size ( ) != initialSchema . numColumns ( ) ) throw new IllegalArgumentException ( String . format ( "Number of values %d does not match the number of input columns %d for schema" , values . size ( ) , initialSchema . numColumns ( ) ) ) ; for ( int i = 0 ; i < values . size ( ) ; i ++ ) { switch ( initialSchema . getType ( i ) ) { case String : ret . add ( new Text ( values . get ( i ) ) ) ; break ; case Integer : ret . add ( new IntWritable ( Integer . parseInt ( values . get ( i ) ) ) ) ; break ; case Double : ret . add ( new DoubleWritable ( Double . parseDouble ( values . get ( i ) ) ) ) ; break ; case Float : ret . add ( new FloatWritable ( Float . parseFloat ( values . get ( i ) ) ) ) ; break ; case Categorical : ret . add ( new Text ( values . get ( i ) ) ) ; break ; case Boolean : ret . add ( new BooleanWritable ( Boolean . parseBoolean ( values . get ( i ) ) ) ) ; break ; case Time : break ; case Long : ret . add ( new LongWritable ( Long . parseLong ( values . get ( i ) ) ) ) ; } } return ret ; }
Based on the input schema map raw string values to the appropriate writable
19,809
public static PoolingType mapPoolingType ( String className , KerasLayerConfiguration conf ) throws UnsupportedKerasConfigurationException { PoolingType poolingType ; if ( className . equals ( conf . getLAYER_CLASS_NAME_MAX_POOLING_2D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_MAX_POOLING_1D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_MAX_POOLING_3D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_MAX_POOLING_1D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_MAX_POOLING_2D ( ) ) ) { poolingType = PoolingType . MAX ; } else if ( className . equals ( conf . getLAYER_CLASS_NAME_AVERAGE_POOLING_2D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_AVERAGE_POOLING_1D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_AVERAGE_POOLING_3D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_AVERAGE_POOLING_1D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_AVERAGE_POOLING_2D ( ) ) ) { poolingType = PoolingType . AVG ; } else { throw new UnsupportedKerasConfigurationException ( "Unsupported Keras pooling layer " + className ) ; } return poolingType ; }
Map Keras pooling layers to DL4J pooling types .
19,810
public static int [ ] mapGlobalPoolingDimensions ( String className , KerasLayerConfiguration conf ) throws UnsupportedKerasConfigurationException { int [ ] dimensions ; if ( className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_MAX_POOLING_1D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_AVERAGE_POOLING_1D ( ) ) ) { dimensions = new int [ ] { 2 } ; } else if ( className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_MAX_POOLING_2D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_AVERAGE_POOLING_2D ( ) ) ) { dimensions = new int [ ] { 2 , 3 } ; } else if ( className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_MAX_POOLING_3D ( ) ) || className . equals ( conf . getLAYER_CLASS_NAME_GLOBAL_AVERAGE_POOLING_3D ( ) ) ) { dimensions = new int [ ] { 2 , 3 , 4 } ; } else { throw new UnsupportedKerasConfigurationException ( "Unsupported Keras pooling layer " + className ) ; } return dimensions ; }
Map Keras pooling layers to DL4J pooling dimensions .
19,811
public static void createHtmlSequencePlotFile ( String title , Schema schema , List < List < Writable > > sequence , File output ) throws Exception { String s = createHtmlSequencePlots ( title , schema , sequence ) ; FileUtils . writeStringToFile ( output , s , StandardCharsets . UTF_8 ) ; }
Create a HTML file with plots for the given sequence and write it to a file .
19,812
protected ImageWritable doTransform ( ImageWritable image , Random random ) { if ( image == null ) { return null ; } if ( image . getFrame ( ) . imageHeight < outputHeight || image . getFrame ( ) . imageWidth < outputWidth ) throw new UnsupportedOperationException ( "Output height/width cannot be more than the input image. Requested: " + outputHeight + "+x" + outputWidth + ", got " + image . getFrame ( ) . imageHeight + "+x" + image . getFrame ( ) . imageWidth ) ; int cropTop = image . getFrame ( ) . imageHeight - outputHeight ; int cropLeft = image . getFrame ( ) . imageWidth - outputWidth ; Mat mat = converter . convert ( image . getFrame ( ) ) ; int top = rng . nextInt ( cropTop + 1 ) ; int left = rng . nextInt ( cropLeft + 1 ) ; y = Math . min ( top , mat . rows ( ) - 1 ) ; x = Math . min ( left , mat . cols ( ) - 1 ) ; Mat result = mat . apply ( new Rect ( x , y , outputWidth , outputHeight ) ) ; return new ImageWritable ( converter . convert ( result ) ) ; }
Takes an image and returns a randomly cropped image .
19,813
public static INDArray movingAverage ( INDArray toAvg , int n ) { INDArray ret = Nd4j . cumsum ( toAvg ) ; INDArrayIndex [ ] ends = new INDArrayIndex [ ] { NDArrayIndex . interval ( n , toAvg . columns ( ) ) } ; INDArrayIndex [ ] begins = new INDArrayIndex [ ] { NDArrayIndex . interval ( 0 , toAvg . columns ( ) - n , false ) } ; INDArrayIndex [ ] nMinusOne = new INDArrayIndex [ ] { NDArrayIndex . interval ( n - 1 , toAvg . columns ( ) ) } ; ret . put ( ends , ret . get ( ends ) . sub ( ret . get ( begins ) ) ) ; return ret . get ( nMinusOne ) . divi ( n ) ; }
Calculate a moving average given the length
19,814
public static AllocationShape buildAllocationShape ( DataBuffer buffer ) { AllocationShape shape = new AllocationShape ( ) ; shape . setDataType ( buffer . dataType ( ) ) ; shape . setLength ( buffer . length ( ) ) ; return shape ; }
This method returns AllocationShape for the whole DataBuffer .
19,815
public static List < Writable > sampleFromColumn ( int count , String columnName , Schema schema , JavaRDD < List < Writable > > data ) { int colIdx = schema . getIndexOfColumn ( columnName ) ; JavaRDD < Writable > ithColumn = data . map ( new SelectColumnFunction ( colIdx ) ) ; return ithColumn . takeSample ( false , count ) ; }
Randomly sample values from a single column
19,816
public static List < List < Writable > > sample ( int count , JavaRDD < List < Writable > > data ) { return data . takeSample ( false , count ) ; }
Randomly sample a set of examples
19,817
public static List < List < List < Writable > > > sampleSequence ( int count , JavaRDD < List < List < Writable > > > data ) { return data . takeSample ( false , count ) ; }
Randomly sample a number of sequences from the data
19,818
public static Map < Writable , Long > sampleMostFrequentFromColumn ( int nMostFrequent , String columnName , Schema schema , JavaRDD < List < Writable > > data ) { int columnIdx = schema . getIndexOfColumn ( columnName ) ; JavaPairRDD < Writable , Long > keyedByWritable = data . mapToPair ( new ColumnToKeyPairTransform ( columnIdx ) ) ; JavaPairRDD < Writable , Long > reducedByWritable = keyedByWritable . reduceByKey ( new SumLongsFunction2 ( ) ) ; List < Tuple2 < Writable , Long > > list = reducedByWritable . takeOrdered ( nMostFrequent , new Tuple2Comparator < Writable > ( false ) ) ; List < Tuple2 < Writable , Long > > sorted = new ArrayList < > ( list ) ; Collections . sort ( sorted , new Tuple2Comparator < Writable > ( false ) ) ; Map < Writable , Long > map = new LinkedHashMap < > ( ) ; for ( Tuple2 < Writable , Long > t2 : sorted ) { map . put ( t2 . _1 ( ) , t2 . _2 ( ) ) ; } return map ; }
Sample the N most frequently occurring values in the specified column
19,819
public static Writable min ( JavaRDD < List < Writable > > allData , String columnName , Schema schema ) { int columnIdx = schema . getIndexOfColumn ( columnName ) ; JavaRDD < Writable > col = allData . map ( new SelectColumnFunction ( columnIdx ) ) ; return col . min ( Comparators . forType ( schema . getType ( columnName ) . getWritableType ( ) ) ) ; }
Get the minimum value for the specified column
19,820
public static double precision ( long tpCount , long fpCount , double edgeCase ) { if ( tpCount == 0 && fpCount == 0 ) { return edgeCase ; } return tpCount / ( double ) ( tpCount + fpCount ) ; }
Calculate the precision from true positive and false positive counts
19,821
public static double recall ( long tpCount , long fnCount , double edgeCase ) { if ( tpCount == 0 && fnCount == 0 ) { return edgeCase ; } return tpCount / ( double ) ( tpCount + fnCount ) ; }
Calculate the recall from true positive and false negative counts
19,822
public static double falsePositiveRate ( long fpCount , long tnCount , double edgeCase ) { if ( fpCount == 0 && tnCount == 0 ) { return edgeCase ; } return fpCount / ( double ) ( fpCount + tnCount ) ; }
Calculate the false positive rate from the false positive count and true negative count
19,823
public static double falseNegativeRate ( long fnCount , long tpCount , double edgeCase ) { if ( fnCount == 0 && tpCount == 0 ) { return edgeCase ; } return fnCount / ( double ) ( fnCount + tpCount ) ; }
Calculate the false negative rate from the false negative counts and true positive count
19,824
public static double fBeta ( double beta , long tp , long fp , long fn ) { double prec = tp / ( ( double ) tp + fp ) ; double recall = tp / ( ( double ) tp + fn ) ; return fBeta ( beta , prec , recall ) ; }
Calculate the F beta value from counts
19,825
public static double fBeta ( double beta , double precision , double recall ) { if ( precision == 0.0 || recall == 0.0 ) return 0 ; double numerator = ( 1 + beta * beta ) * precision * recall ; double denominator = beta * beta * precision + recall ; return numerator / denominator ; }
Calculate the F - beta value from precision and recall
19,826
public static double matthewsCorrelation ( long tp , long fp , long fn , long tn ) { double numerator = ( ( double ) tp ) * tn - ( ( double ) fp ) * fn ; double denominator = Math . sqrt ( ( ( double ) tp + fp ) * ( tp + fn ) * ( tn + fp ) * ( tn + fn ) ) ; return numerator / denominator ; }
Calculate the binary Matthews correlation coefficient from counts
19,827
public IfImportState nodesForIf ( NodeDef from , GraphDef graph ) { int currNodeIndex = graph . getNodeList ( ) . indexOf ( from ) ; val trueDefName = from . getInput ( 1 ) ; val falseDefName = from . getInput ( 0 ) ; val scopeId = UUID . randomUUID ( ) . toString ( ) ; val scopeName = scopeId + "-" + trueDefName . substring ( 0 , trueDefName . indexOf ( "/" ) ) ; val trueDefScopeName = scopeName + "-true-scope" ; val falseDefScopeName = scopeName + "-false-scope" ; boolean onFalseDefinition = true ; boolean onTrueDefinition = false ; List < NodeDef > falseBodyNodes = new ArrayList < > ( ) ; List < NodeDef > trueBodyNodes = new ArrayList < > ( ) ; List < NodeDef > conditionNodes = new ArrayList < > ( ) ; Set < String > seenNames = new LinkedHashSet < > ( ) ; for ( int i = currNodeIndex ; i >= 0 ; i -- ) { if ( graph . getNode ( i ) . getName ( ) . equals ( trueDefName ) ) { onFalseDefinition = false ; onTrueDefinition = true ; } if ( graph . getNode ( i ) . getName ( ) . contains ( "pred_id" ) ) { onTrueDefinition = false ; } if ( onTrueDefinition && ! graph . getNode ( i ) . equals ( from ) ) { trueBodyNodes . add ( graph . getNode ( i ) ) ; } else if ( onFalseDefinition && ! graph . getNode ( i ) . equals ( from ) ) { falseBodyNodes . add ( graph . getNode ( i ) ) ; } else { val currNode = graph . getNode ( i ) ; if ( currNode . equals ( from ) ) continue ; if ( ! seenNames . contains ( graph . getNode ( i ) . getName ( ) ) && ! graph . getNode ( i ) . getName ( ) . contains ( "pred_id" ) ) { break ; } for ( int inputIdx = 0 ; inputIdx < currNode . getInputCount ( ) ; inputIdx ++ ) { seenNames . add ( currNode . getInput ( inputIdx ) ) ; } seenNames . add ( graph . getNode ( i ) . getName ( ) ) ; conditionNodes . add ( graph . getNode ( i ) ) ; } } Collections . reverse ( falseBodyNodes ) ; Collections . reverse ( trueBodyNodes ) ; Collections . reverse ( conditionNodes ) ; return IfImportState . builder ( ) . condNodes ( conditionNodes ) . falseNodes ( falseBodyNodes ) . trueNodes ( trueBodyNodes ) . conditionBodyScopeName ( falseDefScopeName ) . falseBodyScopeName ( falseDefScopeName ) . trueBodyScopeName ( trueDefScopeName ) . conditionBodyScopeName ( scopeName ) . build ( ) ; }
Returns the node for an if statement
19,828
public List < SDVariable > gru ( GRUCellConfiguration configuration ) { GRUCell c = new GRUCell ( sd , configuration ) ; return Arrays . asList ( c . outputVariables ( ) ) ; }
The gru cell
19,829
public SDVariable sru ( String baseName , SRUConfiguration configuration ) { return new SRU ( sd , configuration ) . outputVariables ( baseName ) [ 0 ] ; }
Simiple recurrent unit
19,830
public SDVariable sruCell ( String baseName , SRUCellConfiguration configuration ) { return new SRUCell ( sd , configuration ) . outputVariables ( baseName ) [ 0 ] ; }
An sru cell
19,831
public static void validateOutputLayerForClassifierEvaluation ( Layer outputLayer , Class < ? extends IEvaluation > classifierEval ) { if ( outputLayer instanceof Yolo2OutputLayer ) { throw new IllegalStateException ( "Classifier evaluation using " + classifierEval . getSimpleName ( ) + " class cannot be applied for object" + " detection evaluation using Yolo2OutputLayer: " + classifierEval . getSimpleName ( ) + " class is for classifier evaluation only." ) ; } if ( outputLayer instanceof BaseLayer ) { BaseLayer bl = ( BaseLayer ) outputLayer ; boolean isOutputLayer = outputLayer instanceof OutputLayer || outputLayer instanceof RnnOutputLayer || outputLayer instanceof CenterLossOutputLayer ; if ( activationExceedsZeroOneRange ( bl . getActivationFn ( ) , ! isOutputLayer ) ) { throw new IllegalStateException ( "Classifier evaluation using " + classifierEval . getSimpleName ( ) + " class cannot be applied to output" + " layers with activation functions that are not probabilities (in range 0 to 1). Output layer type: " + outputLayer . getClass ( ) . getSimpleName ( ) + " has activation function " + bl . getActivationFn ( ) . getClass ( ) . getSimpleName ( ) + ". This check can be disabled using MultiLayerNetwork.getLayerWiseConfigurations().setValidateOutputLayerConfig(false)" + " or ComputationGraph.getConfiguration().setValidateOutputLayerConfig(false)" ) ; } } }
Validates if the output layer configuration is valid for classifier evaluation . This is used to try and catch invalid evaluation - i . e . trying to use classifier evaluation on a regression model . This method won t catch all possible invalid cases but should catch some common problems .
19,832
public void balance ( ) { if ( ! rootDir . exists ( ) ) rootDir . mkdirs ( ) ; if ( ! rootSaveDir . exists ( ) ) rootSaveDir . mkdirs ( ) ; if ( paths == null ) paths = Maps . newHashMap ( ) ; if ( labelRootDirs == null ) labelRootDirs = Lists . newArrayList ( ) ; for ( int i = 0 ; i < numLabels ; i ++ ) { paths . put ( i , new ArrayList < File > ( ) ) ; labelRootDirs . add ( new File ( rootDir , String . valueOf ( i ) ) ) ; } while ( dataSetIterator . hasNext ( ) ) { DataSet next = dataSetIterator . next ( ) ; if ( miniBatchSize < 0 ) miniBatchSize = next . numExamples ( ) ; for ( int i = 0 ; i < next . numExamples ( ) ; i ++ ) { DataSet currExample = next . get ( i ) ; if ( ! labelRootDirs . get ( currExample . outcome ( ) ) . exists ( ) ) labelRootDirs . get ( currExample . outcome ( ) ) . mkdirs ( ) ; File example = new File ( labelRootDirs . get ( currExample . outcome ( ) ) , String . valueOf ( paths . get ( currExample . outcome ( ) ) . size ( ) ) ) ; currExample . save ( example ) ; paths . get ( currExample . outcome ( ) ) . add ( example ) ; } } int numsSaved = 0 ; while ( ! paths . isEmpty ( ) ) { List < DataSet > miniBatch = new ArrayList < > ( ) ; while ( miniBatch . size ( ) < miniBatchSize && ! paths . isEmpty ( ) ) { for ( int i = 0 ; i < numLabels ; i ++ ) { if ( paths . get ( i ) != null && ! paths . get ( i ) . isEmpty ( ) ) { DataSet d = new DataSet ( ) ; d . load ( paths . get ( i ) . remove ( 0 ) ) ; miniBatch . add ( d ) ; } else paths . remove ( i ) ; } } if ( ! rootSaveDir . exists ( ) ) rootSaveDir . mkdirs ( ) ; if ( ! miniBatch . isEmpty ( ) ) { DataSet merge = DataSet . merge ( miniBatch ) ; if ( dataNormalization != null ) dataNormalization . transform ( merge ) ; merge . save ( new File ( rootSaveDir , String . format ( "dataset-%d.bin" , numsSaved ++ ) ) ) ; } } }
Generate a balanced dataset minibatch fileset .
19,833
public void fitMultipleFiles ( JavaPairRDD < String , String > documentsRdd ) { validateConfiguration ( ) ; broadcastEnvironment ( new JavaSparkContext ( documentsRdd . context ( ) ) ) ; JavaRDD < Sequence < VocabWord > > sequenceRdd = documentsRdd . map ( new KeySequenceConvertFunction ( configurationBroadcast ) ) ; super . fitSequences ( sequenceRdd ) ; }
This method builds ParagraphVectors model expecting JavaPairRDD with key as label and value as document - in - a - string .
19,834
public PagedPointer alloc ( long requiredMemory , MemoryKind kind , DataType dataType , boolean initialize ) { throw new UnsupportedOperationException ( "DummyWorkspace shouldn't be used for allocation" ) ; }
This method does allocation from a given Workspace
19,835
public MemoryWorkspace notifyScopeEntered ( ) { parentWorkspace = Nd4j . getMemoryManager ( ) . getCurrentWorkspace ( ) ; Nd4j . getMemoryManager ( ) . setCurrentWorkspace ( null ) ; return this ; }
This method notifies given Workspace that new use cycle is starting now
19,836
public VoidAggregation nextCandidate ( ) { VoidAggregation result = completedQueue . poll ( ) ; if ( result != null ) { completedCounter . decrementAndGet ( ) ; unpin ( result . getOriginatorId ( ) , result . getTaskId ( ) ) ; } return result ; }
This method returns one of available aggregations if there s at least 1 ready .
19,837
private List < String > yield ( List < String > labels ) { labels . add ( label ) ; for ( Tree t : children ( ) ) { labels . addAll ( t . yield ( ) ) ; } return labels ; }
Returns the list of labels for this node and all of its children recursively
19,838
public boolean isPreTerminal ( ) { if ( children == null && label != null && ! label . equals ( "TOP" ) ) children = new ArrayList < > ( ) ; if ( children != null && children . size ( ) == 1 ) { Tree child = children . get ( 0 ) ; return child != null && child . isLeaf ( ) ; } return false ; }
Node has one child that is a leaf
19,839
public int depth ( ) { if ( isLeaf ( ) ) { return 0 ; } int maxDepth = 0 ; List < Tree > kids = children ( ) ; for ( Tree kid : kids ) { int curDepth = kid . depth ( ) ; if ( curDepth > maxDepth ) { maxDepth = curDepth ; } } return maxDepth + 1 ; }
Finds the channels of the tree . The channels is defined as the length of the longest path from this node to a leaf node . Leaf nodes have channels zero . POS tags have channels 1 . Phrasal nodes have channels &gt ; = 2 .
19,840
public int depth ( Tree node ) { Tree p = node . parent ( this ) ; if ( this == node ) { return 0 ; } if ( p == null ) { return - 1 ; } int depth = 1 ; while ( this != p ) { p = p . parent ( this ) ; depth ++ ; } return depth ; }
Returns the distance between this node and the specified subnode
19,841
public Tree parent ( Tree root ) { List < Tree > kids = root . children ( ) ; return traverse ( root , kids , this ) ; }
Returns the parent of the passed in tree via traversal
19,842
private static Tree traverse ( Tree parent , List < Tree > kids , Tree node ) { for ( Tree kid : kids ) { if ( kid == node ) { return parent ; } Tree ret = node . parent ( kid ) ; if ( ret != null ) { return ret ; } } return null ; }
traverses the tree by recursion
19,843
public Tree ancestor ( int height , Tree root ) { if ( height < 0 ) { throw new IllegalArgumentException ( "ancestor: height cannot be negative" ) ; } if ( height == 0 ) { return this ; } Tree par = parent ( root ) ; if ( par == null ) { return null ; } return par . ancestor ( height - 1 , root ) ; }
Returns the ancestor of the given tree
19,844
public double errorSum ( ) { if ( isLeaf ( ) ) { return 0.0 ; } else if ( isPreTerminal ( ) ) { return error ( ) ; } else { double error = 0.0 ; for ( Tree child : children ( ) ) { error += child . errorSum ( ) ; } return error ( ) + error ; } }
Returns the total prediction error for this tree and its children
19,845
@ SuppressWarnings ( "unchecked" ) public < T extends Tree > List < T > getLeaves ( List < T > list ) { if ( isLeaf ( ) ) { list . add ( ( T ) this ) ; } else { for ( Tree kid : children ( ) ) { kid . getLeaves ( list ) ; } } return list ; }
Gets the leaves of the tree .
19,846
public void connect ( List < Tree > children ) { this . children = children ; for ( Tree t : children ) t . setParent ( this ) ; }
Connects the given trees and sets the parents of the children
19,847
public String format ( INDArray arr , boolean summarize ) { if ( arr . isEmpty ( ) ) return EMPTY_ARRAY_STR ; this . scientificFormat = "0." ; int addPrecision = this . precision ; while ( addPrecision > 0 ) { this . scientificFormat += "#" ; addPrecision -= 1 ; } this . scientificFormat = this . scientificFormat + "E0" ; if ( this . scientificFormat . length ( ) + 2 > this . padding ) this . padding = this . scientificFormat . length ( ) + 2 ; this . maxToPrintWithoutSwitching = Math . pow ( 10 , this . precision ) ; this . minToPrintWithoutSwitching = 1.0 / ( this . maxToPrintWithoutSwitching ) ; return format ( arr , 0 , summarize && arr . length ( ) > maxPrintElements ) ; }
Format the given ndarray as a string
19,848
public static LossFunctions . LossFunction mapLossFunction ( String kerasLoss , KerasLayerConfiguration conf ) throws UnsupportedKerasConfigurationException { LossFunctions . LossFunction dl4jLoss ; if ( kerasLoss . equals ( conf . getKERAS_LOSS_MEAN_SQUARED_ERROR ( ) ) || kerasLoss . equals ( conf . getKERAS_LOSS_MSE ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . SQUARED_LOSS ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_MEAN_ABSOLUTE_ERROR ( ) ) || kerasLoss . equals ( conf . getKERAS_LOSS_MAE ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . MEAN_ABSOLUTE_ERROR ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_MEAN_ABSOLUTE_PERCENTAGE_ERROR ( ) ) || kerasLoss . equals ( conf . getKERAS_LOSS_MAPE ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . MEAN_ABSOLUTE_PERCENTAGE_ERROR ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_MEAN_SQUARED_LOGARITHMIC_ERROR ( ) ) || kerasLoss . equals ( conf . getKERAS_LOSS_MSLE ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . MEAN_SQUARED_LOGARITHMIC_ERROR ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_SQUARED_HINGE ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . SQUARED_HINGE ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_HINGE ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . HINGE ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_SPARSE_CATEGORICAL_CROSSENTROPY ( ) ) ) { throw new UnsupportedKerasConfigurationException ( "Loss function " + kerasLoss + " not supported yet." ) ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_BINARY_CROSSENTROPY ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . XENT ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_CATEGORICAL_CROSSENTROPY ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . MCXENT ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_KULLBACK_LEIBLER_DIVERGENCE ( ) ) || kerasLoss . equals ( conf . getKERAS_LOSS_KLD ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . KL_DIVERGENCE ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_POISSON ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . POISSON ; } else if ( kerasLoss . equals ( conf . getKERAS_LOSS_COSINE_PROXIMITY ( ) ) ) { dl4jLoss = LossFunctions . LossFunction . COSINE_PROXIMITY ; } else { throw new UnsupportedKerasConfigurationException ( "Unknown Keras loss function " + kerasLoss ) ; } return dl4jLoss ; }
Map Keras to DL4J loss functions .
19,849
public void buildVocab ( ) { val constructor = new VocabConstructor . Builder < T > ( ) . addSource ( iterator , minWordFrequency ) . setTargetVocabCache ( vocab ) . fetchLabels ( trainSequenceVectors ) . setStopWords ( stopWords ) . enableScavenger ( enableScavenger ) . setEntriesLimit ( vocabLimit ) . allowParallelTokenization ( configuration . isAllowParallelTokenization ( ) ) . setUnk ( useUnknown && unknownElement != null ? unknownElement : null ) . build ( ) ; if ( existingModel != null && lookupTable instanceof InMemoryLookupTable && existingModel . lookupTable ( ) instanceof InMemoryLookupTable ) { log . info ( "Merging existing vocabulary into the current one..." ) ; constructor . buildMergedVocabulary ( existingModel , true ) ; ( ( InMemoryLookupTable < VocabWord > ) lookupTable ) . consume ( ( InMemoryLookupTable < VocabWord > ) existingModel . lookupTable ( ) ) ; } else { log . info ( "Starting vocabulary building..." ) ; constructor . buildJointVocabulary ( false , true ) ; if ( vocab . numWords ( ) / constructor . getNumberOfSequences ( ) > 1000 ) { log . warn ( "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" ) ; log . warn ( "! !" ) ; log . warn ( "! Your input looks malformed: number of sentences is too low, model accuracy may suffer !" ) ; log . warn ( "! !" ) ; log . warn ( "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" ) ; } } }
Builds vocabulary from provided SequenceIterator instance
19,850
public static INDArray invert ( INDArray arr , boolean inPlace ) { if ( arr . rank ( ) == 2 && arr . length ( ) == 1 ) { if ( inPlace ) { return arr . rdivi ( 1.0 ) ; } else { return arr . rdiv ( 1.0 ) ; } } if ( ! arr . isSquare ( ) ) { throw new IllegalArgumentException ( "invalid array: must be square matrix" ) ; } RealMatrix rm = CheckUtil . convertToApacheMatrix ( arr ) ; RealMatrix rmInverse = new LUDecomposition ( rm ) . getSolver ( ) . getInverse ( ) ; INDArray inverse = CheckUtil . convertFromApacheMatrix ( rmInverse , arr . dataType ( ) ) ; if ( inPlace ) arr . assign ( inverse ) ; return inverse ; }
Inverts a matrix
19,851
public static INDArray pinvert ( INDArray arr , boolean inPlace ) { RealMatrix realMatrix = CheckUtil . convertToApacheMatrix ( arr ) ; QRDecomposition decomposition = new QRDecomposition ( realMatrix , 0 ) ; DecompositionSolver solver = decomposition . getSolver ( ) ; if ( ! solver . isNonSingular ( ) ) { throw new IllegalArgumentException ( "invalid array: must be singular matrix" ) ; } RealMatrix pinvRM = solver . getInverse ( ) ; INDArray pseudoInverse = CheckUtil . convertFromApacheMatrix ( pinvRM , arr . dataType ( ) ) ; if ( inPlace ) arr . assign ( pseudoInverse ) ; return pseudoInverse ; }
Calculates pseudo inverse of a matrix using QR decomposition
19,852
public static INDArray pLeftInvert ( INDArray arr , boolean inPlace ) { try { final INDArray inv = invert ( arr . transpose ( ) . mmul ( arr ) , inPlace ) . mmul ( arr . transpose ( ) ) ; if ( inPlace ) arr . assign ( inv ) ; return inv ; } catch ( SingularMatrixException e ) { throw new IllegalArgumentException ( "Full column rank condition for left pseudo inverse was not met." ) ; } }
Compute the left pseudo inverse . Input matrix must have full column rank .
19,853
protected void initializeHelper ( DataType dataType ) { String backend = Nd4j . getExecutioner ( ) . getEnvironmentInformation ( ) . getProperty ( "backend" ) ; if ( "CUDA" . equalsIgnoreCase ( backend ) ) { try { helper = Class . forName ( "org.deeplearning4j.nn.layers.dropout.CudnnDropoutHelper" ) . asSubclass ( DropoutHelper . class ) . getConstructor ( DataType . class ) . newInstance ( dataType ) ; log . debug ( "CudnnDropoutHelper successfully initialized" ) ; if ( ! helper . checkSupported ( ) ) { helper = null ; } } catch ( Throwable t ) { if ( ! ( t instanceof ClassNotFoundException ) ) { log . warn ( "Could not initialize CudnnDropoutHelper" , t ) ; } } } initializedHelper = true ; }
Initialize the CuDNN dropout helper if possible
19,854
public void buildTree ( int [ ] vertexDegree ) { PriorityQueue < Node > pq = new PriorityQueue < > ( ) ; for ( int i = 0 ; i < vertexDegree . length ; i ++ ) pq . add ( new Node ( i , vertexDegree [ i ] , null , null ) ) ; while ( pq . size ( ) > 1 ) { Node left = pq . remove ( ) ; Node right = pq . remove ( ) ; Node newNode = new Node ( - 1 , left . count + right . count , left , right ) ; pq . add ( newNode ) ; } Node tree = pq . remove ( ) ; int [ ] innerNodePath = new int [ MAX_CODE_LENGTH ] ; traverse ( tree , 0L , ( byte ) 0 , - 1 , innerNodePath , 0 ) ; }
Build the Huffman tree given an array of vertex degrees
19,855
public void applyUpdater ( INDArray gradient , int iteration , int epoch ) { if ( msg == null || msdx == null ) throw new IllegalStateException ( "Updater has not been initialized with view state" ) ; double rho = config . getRho ( ) ; double epsilon = config . getEpsilon ( ) ; msg . muli ( rho ) . addi ( gradient . mul ( gradient ) . muli ( 1 - rho ) ) ; INDArray rmsdx_t1 = Transforms . sqrt ( msdx . add ( epsilon ) , false ) ; INDArray rmsg_t = Transforms . sqrt ( msg . add ( epsilon ) , false ) ; INDArray update = gradient . muli ( rmsdx_t1 . divi ( rmsg_t ) ) ; msdx . muli ( rho ) . addi ( update . mul ( update ) . muli ( 1 - rho ) ) ; }
Get the updated gradient for the given gradient and also update the state of ada delta .
19,856
public boolean sameTypes ( Schema schema ) { if ( schema . numColumns ( ) != numColumns ( ) ) return false ; for ( int i = 0 ; i < schema . numColumns ( ) ; i ++ ) { if ( getType ( i ) != schema . getType ( i ) ) return false ; } return true ; }
Returns true if the given schema has the same types at each index
19,857
public int getIndexOfColumn ( String columnName ) { Integer idx = columnNamesIndex . get ( columnName ) ; if ( idx == null ) throw new NoSuchElementException ( "Unknown column: \"" + columnName + "\"" ) ; return idx ; }
Returns the index for the given column name
19,858
public boolean hasColumn ( String columnName ) { Integer idx = columnNamesIndex . get ( columnName ) ; return idx != null ; }
Determine if the schema has a column with the specified name
19,859
public static Schema fromJson ( String json ) { try { return JsonMappers . getMapper ( ) . readValue ( json , Schema . class ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Create a schema from a given json string
19,860
public static Schema fromYaml ( String yaml ) { try { return JsonMappers . getMapperYaml ( ) . readValue ( yaml , Schema . class ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Create a schema from the given yaml string
19,861
public static Schema infer ( List < Writable > record ) { Schema . Builder builder = new Schema . Builder ( ) ; for ( int i = 0 ; i < record . size ( ) ; i ++ ) { if ( record . get ( i ) instanceof DoubleWritable ) builder . addColumnDouble ( String . valueOf ( i ) ) ; else if ( record . get ( i ) instanceof IntWritable ) builder . addColumnInteger ( String . valueOf ( i ) ) ; else if ( record . get ( i ) instanceof LongWritable ) builder . addColumnLong ( String . valueOf ( i ) ) ; else if ( record . get ( i ) instanceof FloatWritable ) builder . addColumnFloat ( String . valueOf ( i ) ) ; else if ( record . get ( i ) instanceof Text ) { builder . addColumnString ( String . valueOf ( i ) ) ; } else throw new IllegalStateException ( "Illegal writable for infering schema of type " + record . get ( i ) . getClass ( ) . toString ( ) + " with record " + record ) ; } return builder . build ( ) ; }
Infers a schema based on the record . The column names are based on indexing .
19,862
public ReliabilityDiagram getReliabilityDiagram ( int classIdx ) { INDArray totalCountBins = rDiagBinTotalCount . getColumn ( classIdx ) ; INDArray countPositiveBins = rDiagBinPosCount . getColumn ( classIdx ) ; double [ ] meanPredictionBins = rDiagBinSumPredictions . getColumn ( classIdx ) . castTo ( DataType . DOUBLE ) . div ( totalCountBins . castTo ( DataType . DOUBLE ) ) . data ( ) . asDouble ( ) ; double [ ] fracPositives = countPositiveBins . castTo ( DataType . DOUBLE ) . div ( totalCountBins . castTo ( DataType . DOUBLE ) ) . data ( ) . asDouble ( ) ; if ( excludeEmptyBins ) { val condition = new MatchCondition ( totalCountBins , Conditions . equals ( 0 ) ) ; int numZeroBins = Nd4j . getExecutioner ( ) . exec ( condition ) . getInt ( 0 ) ; if ( numZeroBins != 0 ) { double [ ] mpb = meanPredictionBins ; double [ ] fp = fracPositives ; meanPredictionBins = new double [ ( int ) ( totalCountBins . length ( ) - numZeroBins ) ] ; fracPositives = new double [ meanPredictionBins . length ] ; int j = 0 ; for ( int i = 0 ; i < mpb . length ; i ++ ) { if ( totalCountBins . getDouble ( i ) != 0 ) { meanPredictionBins [ j ] = mpb [ i ] ; fracPositives [ j ] = fp [ i ] ; j ++ ; } } } } String title = "Reliability Diagram: Class " + classIdx ; return new ReliabilityDiagram ( title , meanPredictionBins , fracPositives ) ; }
Get the reliability diagram for the specified class
19,863
public INDArray unsafeDuplication ( ) { WorkspaceUtils . assertValidArray ( this , "Cannot duplicate array" ) ; if ( isView ( ) ) return this . dup ( this . ordering ( ) ) ; DataBuffer rb = Nd4j . getMemoryManager ( ) . getCurrentWorkspace ( ) == null ? Nd4j . getDataBufferFactory ( ) . createSame ( this . data , false ) : Nd4j . getDataBufferFactory ( ) . createSame ( this . data , false , Nd4j . getMemoryManager ( ) . getCurrentWorkspace ( ) ) ; INDArray ret = Nd4j . createArrayFromShapeBuffer ( rb , this . shapeInfoDataBuffer ( ) ) ; val perfD = PerformanceTracker . getInstance ( ) . helperStartTransaction ( ) ; Pointer . memcpy ( ret . data ( ) . addressPointer ( ) , this . data ( ) . addressPointer ( ) , this . data ( ) . length ( ) * this . data ( ) . getElementSize ( ) ) ; PerformanceTracker . getInstance ( ) . helperRegisterTransaction ( 0 , perfD , this . data ( ) . length ( ) * this . data ( ) . getElementSize ( ) , MemcpyDirection . HOST_TO_HOST ) ; return ret ; }
This method does direct array copy . Impossible to use on views or mixed orders .
19,864
public static FileBatch forFiles ( List < File > files ) throws IOException { List < String > origPaths = new ArrayList < > ( files . size ( ) ) ; List < byte [ ] > bytes = new ArrayList < > ( files . size ( ) ) ; for ( File f : files ) { bytes . add ( FileUtils . readFileToByteArray ( f ) ) ; origPaths . add ( f . toURI ( ) . toString ( ) ) ; } return new FileBatch ( bytes , origPaths ) ; }
Create a FileBatch from the specified files
19,865
public void set ( boolean value , int entry ) { if ( entry > numEntries || entry < 0 ) throw new ND4JIllegalStateException ( "Entry index given (" + entry + ")in is higher then configured one (" + numEntries + ")" ) ; if ( oneTime && this . timeTracker . get ( entry ) ) return ; if ( value ) this . holder |= 1 << ( entry + 1 ) ; else this . holder &= ~ ( 1 << ( entry + 1 ) ) ; if ( oneTime ) this . timeTracker . set ( true , entry ) ; }
Sets specified entry to specified state
19,866
public boolean get ( int entry ) { if ( entry > numEntries || entry < 0 ) throw new ND4JIllegalStateException ( "Entry index given (" + entry + ")in is higher then configured one (" + numEntries + ")" ) ; return ( this . holder & 1 << ( entry + 1 ) ) != 0 ; }
Gets current state for specified entry
19,867
public static Condition XOR ( Condition first , Condition second ) { return new BooleanCondition ( Type . XOR , first , second ) ; }
And of all the given conditions
19,868
public void reset ( boolean shuffle ) { this . position . set ( 0 ) ; if ( shuffle ) { logger . debug ( "Calling shuffle() on entries..." ) ; for ( int i = order . length - 1 ; i > 0 ; i -- ) { int j = rng . nextInt ( i + 1 ) ; int temp = order [ j ] ; order [ j ] = order [ i ] ; order [ i ] = temp ; } } }
This method resets walker
19,869
protected static void validateInteger ( String opName , SDVariable v ) { if ( v == null ) return ; if ( ! v . dataType ( ) . isIntType ( ) ) throw new IllegalStateException ( "Cannot apply operation \"" + opName + "\" to variable \"" + v . getVarName ( ) + "\" with non-integer data type " + v . dataType ( ) ) ; }
Validate that the operation is being applied on an integer type SDVariable
19,870
protected static void validateFloatingPoint ( String opName , SDVariable v ) { if ( v == null ) return ; if ( ! v . dataType ( ) . isFPType ( ) ) throw new IllegalStateException ( "Cannot apply operation \"" + opName + "\" to variable \"" + v . getVarName ( ) + "\" with non-floating point data type " + v . dataType ( ) ) ; }
Validate that the operation is being applied on an floating point type SDVariable
19,871
protected static void validateBool ( String opName , SDVariable v ) { if ( v == null ) return ; if ( v . dataType ( ) != DataType . BOOL ) throw new IllegalStateException ( "Cannot apply operation \"" + opName + "\" to variable \"" + v . getVarName ( ) + "\" with non-boolean point data type " + v . dataType ( ) ) ; }
Validate that the operation is being applied on a boolean type SDVariable
19,872
protected static void validateBool ( String opName , SDVariable v1 , SDVariable v2 ) { if ( v1 . dataType ( ) != DataType . BOOL || v2 . dataType ( ) != DataType . BOOL ) throw new IllegalStateException ( "Cannot perform operation \"" + opName + "\" on variables \"" + v1 . getVarName ( ) + "\" and \"" + v2 . getVarName ( ) + "\" if one or both variables are non-boolean: " + v1 . dataType ( ) + " and " + v2 . dataType ( ) ) ; }
Validate that the operation is being applied on boolean SDVariables
19,873
public boolean resetSupported ( ) { boolean sup = true ; for ( val i : iterators ) if ( ! i . resetSupported ( ) ) { sup = false ; break ; } return sup ; }
Is resetting supported by this DataSetIterator? Many DataSetIterators do support resetting but some don t
19,874
public static DoubleArrayTrie read ( InputStream input ) throws IOException { DoubleArrayTrie trie = new DoubleArrayTrie ( ) ; DataInputStream dis = new DataInputStream ( new BufferedInputStream ( input ) ) ; trie . compact = dis . readBoolean ( ) ; int baseCheckSize = dis . readInt ( ) ; int tailSize = dis . readInt ( ) ; ReadableByteChannel channel = Channels . newChannel ( dis ) ; ByteBuffer tmpBaseBuffer = ByteBuffer . allocate ( baseCheckSize * 4 ) ; channel . read ( tmpBaseBuffer ) ; tmpBaseBuffer . rewind ( ) ; trie . baseBuffer = tmpBaseBuffer . asIntBuffer ( ) ; ByteBuffer tmpCheckBuffer = ByteBuffer . allocate ( baseCheckSize * 4 ) ; channel . read ( tmpCheckBuffer ) ; tmpCheckBuffer . rewind ( ) ; trie . checkBuffer = tmpCheckBuffer . asIntBuffer ( ) ; ByteBuffer tmpTailBuffer = ByteBuffer . allocate ( tailSize * 2 ) ; channel . read ( tmpTailBuffer ) ; tmpTailBuffer . rewind ( ) ; trie . tailBuffer = tmpTailBuffer . asCharBuffer ( ) ; input . close ( ) ; return trie ; }
Load Stored data
19,875
public void build ( Trie trie ) { ProgressLog . begin ( "building " + ( compact ? "compact" : "sparse" ) + " trie" ) ; baseBuffer = IntBuffer . allocate ( BASE_CHECK_INITIAL_SIZE ) ; baseBuffer . put ( 0 , 1 ) ; checkBuffer = IntBuffer . allocate ( BASE_CHECK_INITIAL_SIZE ) ; tailBuffer = CharBuffer . allocate ( TAIL_INITIAL_SIZE ) ; add ( - 1 , 0 , trie . getRoot ( ) ) ; reportUtilizationRate ( ) ; ProgressLog . end ( ) ; }
Construct double array trie which is equivalent to input trie
19,876
private int matchTail ( int base , int index , String key ) { int positionInTailArr = base - TAIL_OFFSET ; int keyLength = key . length ( ) ; for ( int i = 0 ; i < keyLength ; i ++ ) { if ( key . charAt ( i ) != tailBuffer . get ( positionInTailArr + i ) ) { return - 1 ; } } return tailBuffer . get ( positionInTailArr + keyLength ) == TERMINATING_CHARACTER ? index : 0 ; }
Check match in tail array
19,877
private int findBase ( int index , List < Trie . Node > nodes ) { int base = baseBuffer . get ( index ) ; if ( base < 0 ) { return base ; } while ( true ) { boolean collision = false ; for ( Trie . Node node : nodes ) { int nextIndex = index + base + node . getKey ( ) ; maxBaseCheckIndex = Math . max ( maxBaseCheckIndex , nextIndex ) ; if ( baseBuffer . capacity ( ) <= nextIndex ) { extendBuffers ( nextIndex ) ; } if ( baseBuffer . get ( nextIndex ) != 0 ) { base ++ ; collision = true ; break ; } } if ( ! collision ) { break ; } } for ( Trie . Node node : nodes ) { baseBuffer . put ( index + base + node . getKey ( ) , node . getKey ( ) == TERMINATING_CHARACTER ? - 1 : 1 ) ; } return base ; }
Find base value for current node which contains input nodes . They are children of current node . Set default base value which is one at the index of each input node .
19,878
public static boolean checkAdd ( INDArray first , INDArray second , double maxRelativeDifference , double minAbsDifference ) { RealMatrix rmFirst = convertToApacheMatrix ( first ) ; RealMatrix rmSecond = convertToApacheMatrix ( second ) ; INDArray result = first . add ( second ) ; RealMatrix rmResult = rmFirst . add ( rmSecond ) ; if ( ! checkShape ( rmResult , result ) ) return false ; boolean ok = checkEntries ( rmResult , result , maxRelativeDifference , minAbsDifference ) ; if ( ! ok ) { INDArray onCopies = Shape . toOffsetZeroCopy ( first ) . add ( Shape . toOffsetZeroCopy ( second ) ) ; printFailureDetails ( first , second , rmResult , result , onCopies , "add" ) ; } return ok ; }
Same as checkMmul but for matrix addition
19,879
public static boolean checkSubtract ( INDArray first , INDArray second , double maxRelativeDifference , double minAbsDifference ) { RealMatrix rmFirst = convertToApacheMatrix ( first ) ; RealMatrix rmSecond = convertToApacheMatrix ( second ) ; INDArray result = first . sub ( second ) ; RealMatrix rmResult = rmFirst . subtract ( rmSecond ) ; if ( ! checkShape ( rmResult , result ) ) return false ; boolean ok = checkEntries ( rmResult , result , maxRelativeDifference , minAbsDifference ) ; if ( ! ok ) { INDArray onCopies = Shape . toOffsetZeroCopy ( first ) . sub ( Shape . toOffsetZeroCopy ( second ) ) ; printFailureDetails ( first , second , rmResult , result , onCopies , "sub" ) ; } return ok ; }
Same as checkMmul but for matrix subtraction
19,880
public static void assertNoWorkspacesOpen ( String msg , boolean allowScopedOut ) throws ND4JWorkspaceException { if ( Nd4j . getWorkspaceManager ( ) . anyWorkspaceActiveForCurrentThread ( ) ) { MemoryWorkspace currWs = Nd4j . getMemoryManager ( ) . getCurrentWorkspace ( ) ; if ( allowScopedOut && ( currWs == null || currWs instanceof DummyWorkspace ) ) return ; List < MemoryWorkspace > l = Nd4j . getWorkspaceManager ( ) . getAllWorkspacesForCurrentThread ( ) ; List < String > workspaces = new ArrayList < > ( l . size ( ) ) ; for ( MemoryWorkspace ws : l ) { if ( ws . isScopeActive ( ) ) { workspaces . add ( ws . getId ( ) ) ; } } throw new ND4JWorkspaceException ( msg + " - Open/active workspaces: " + workspaces ) ; } }
Assert that no workspaces are currently open
19,881
public Tree findHead ( Tree parentNode ) { Tree cursor = parentNode . getType ( ) . equals ( "TOP" ) ? parentNode . firstChild ( ) : parentNode ; while ( cursor . children ( ) != null && ! cursor . children ( ) . isEmpty ( ) ) cursor = findHead2 ( cursor ) ; return cursor ; }
Finds the bottom most head
19,882
public GenericDictionaryEntry parse ( String entry ) { String [ ] fields = parseLine ( entry ) ; String surface = fields [ 0 ] ; short leftId = Short . parseShort ( fields [ 1 ] ) ; short rightId = Short . parseShort ( fields [ 2 ] ) ; short wordCost = Short . parseShort ( fields [ 3 ] ) ; List < String > pos = new ArrayList < > ( ) ; pos . addAll ( Arrays . asList ( fields ) . subList ( 4 , 10 ) ) ; List < String > features = new ArrayList < > ( ) ; features . addAll ( Arrays . asList ( fields ) . subList ( 10 , fields . length ) ) ; GenericDictionaryEntry dictionaryEntry = new GenericDictionaryEntry . Builder ( ) . surface ( surface ) . leftId ( leftId ) . rightId ( rightId ) . wordCost ( wordCost ) . pos ( pos ) . features ( features ) . build ( ) ; return dictionaryEntry ; }
which is okay for all the dictionaries supported so far ...
19,883
public Rational multiply ( final BigInteger val ) { Rational val2 = new Rational ( val , BigInteger . ONE ) ; return ( multiply ( val2 ) ) ; }
Multiply by a BigInteger .
19,884
public Rational multiply ( final int val ) { BigInteger tmp = BigInteger . valueOf ( val ) ; return multiply ( tmp ) ; }
Multiply by an integer .
19,885
public Rational divide ( BigInteger val ) { Rational val2 = new Rational ( val , BigInteger . ONE ) ; return ( divide ( val2 ) ) ; }
Divide by an integer .
19,886
public Rational add ( Rational val ) { BigInteger num = a . multiply ( val . b ) . add ( b . multiply ( val . a ) ) ; BigInteger deno = b . multiply ( val . b ) ; return ( new Rational ( num , deno ) ) ; }
Add another fraction .
19,887
public Rational add ( BigInteger val ) { Rational val2 = new Rational ( val , BigInteger . ONE ) ; return ( add ( val2 ) ) ; }
Add another integer .
19,888
public Rational subtract ( Rational val ) { Rational val2 = val . negate ( ) ; return ( add ( val2 ) ) ; }
Subtract another fraction . 7
19,889
public Rational subtract ( BigInteger val ) { Rational val2 = new Rational ( val , BigInteger . ONE ) ; return ( subtract ( val2 ) ) ; }
Subtract an integer .
19,890
public BigInteger trunc ( ) { if ( b . compareTo ( BigInteger . ONE ) == 0 ) { return a ; } else { return a . divide ( b ) ; } }
Remove the fractional part .
19,891
public double doubleValue ( ) { BigDecimal adivb = ( new BigDecimal ( a ) ) . divide ( new BigDecimal ( b ) , MathContext . DECIMAL128 ) ; return adivb . doubleValue ( ) ; }
Return a double value representation .
19,892
public float floatValue ( ) { BigDecimal adivb = ( new BigDecimal ( a ) ) . divide ( new BigDecimal ( b ) , MathContext . DECIMAL128 ) ; return adivb . floatValue ( ) ; }
Return a float value representation .
19,893
public BigDecimal BigDecimalValue ( MathContext mc ) { BigDecimal n = new BigDecimal ( a ) ; BigDecimal d = new BigDecimal ( b ) ; return n . divide ( d , mc ) ; }
Return a representation as BigDecimal .
19,894
public String toFString ( int digits ) { if ( b . compareTo ( BigInteger . ONE ) != 0 ) { MathContext mc = new MathContext ( digits , RoundingMode . DOWN ) ; BigDecimal f = ( new BigDecimal ( a ) ) . divide ( new BigDecimal ( b ) , mc ) ; return ( f . toString ( ) ) ; } else { return a . toString ( ) ; } }
Return a string in floating point format .
19,895
protected void normalize ( ) { final BigInteger g = a . gcd ( b ) ; if ( g . compareTo ( BigInteger . ONE ) > 0 ) { a = a . divide ( g ) ; b = b . divide ( g ) ; } if ( b . compareTo ( BigInteger . ZERO ) == - 1 ) { a = a . negate ( ) ; b = b . negate ( ) ; } }
Normalize to coprime numerator and denominator . Also copy a negative sign of the denominator to the numerator .
19,896
protected void initializeConstraints ( Builder < ? > builder ) { List < LayerConstraint > allConstraints = new ArrayList < > ( ) ; if ( builder . allParamConstraints != null && ! initializer ( ) . paramKeys ( this ) . isEmpty ( ) ) { for ( LayerConstraint c : builder . allParamConstraints ) { LayerConstraint c2 = c . clone ( ) ; c2 . setParams ( new HashSet < > ( initializer ( ) . paramKeys ( this ) ) ) ; allConstraints . add ( c2 ) ; } } if ( builder . weightConstraints != null && ! initializer ( ) . weightKeys ( this ) . isEmpty ( ) ) { for ( LayerConstraint c : builder . weightConstraints ) { LayerConstraint c2 = c . clone ( ) ; c2 . setParams ( new HashSet < > ( initializer ( ) . weightKeys ( this ) ) ) ; allConstraints . add ( c2 ) ; } } if ( builder . biasConstraints != null && ! initializer ( ) . biasKeys ( this ) . isEmpty ( ) ) { for ( LayerConstraint c : builder . biasConstraints ) { LayerConstraint c2 = c . clone ( ) ; c2 . setParams ( new HashSet < > ( initializer ( ) . biasKeys ( this ) ) ) ; allConstraints . add ( c2 ) ; } } if ( ! allConstraints . isEmpty ( ) ) { this . constraints = allConstraints ; } else { this . constraints = null ; } this . iDropout = builder . iDropout ; }
Initialize the weight constraints . Should be called last in the outer - most constructor
19,897
public int [ ] readLabels ( int num ) throws IOException { int [ ] out = new int [ num ] ; for ( int i = 0 ; i < num ; i ++ ) out [ i ] = readLabel ( ) ; return out ; }
Read the specified number of labels from the current position
19,898
public double asum ( INDArray arr ) { if ( arr . isSparse ( ) ) { return Nd4j . getSparseBlasWrapper ( ) . level1 ( ) . asum ( arr ) ; } if ( Nd4j . getExecutioner ( ) . getProfilingMode ( ) == OpExecutioner . ProfilingMode . ALL ) OpProfiler . getInstance ( ) . processBlasCall ( false , arr ) ; if ( arr . data ( ) . dataType ( ) == DataType . DOUBLE ) { DefaultOpExecutioner . validateDataType ( DataType . DOUBLE , arr ) ; return dasum ( arr . length ( ) , arr , BlasBufferUtil . getBlasStride ( arr ) ) ; } else if ( arr . data ( ) . dataType ( ) == DataType . FLOAT ) { DefaultOpExecutioner . validateDataType ( DataType . FLOAT , arr ) ; return sasum ( arr . length ( ) , arr , BlasBufferUtil . getBlasStride ( arr ) ) ; } else { DefaultOpExecutioner . validateDataType ( DataType . HALF , arr ) ; return hasum ( arr . length ( ) , arr , BlasBufferUtil . getBlasStride ( arr ) ) ; } }
computes the sum of magnitudes of all vector elements or for a complex vector x the sum
19,899
public int iamax ( INDArray arr ) { if ( arr . isSparse ( ) ) { return Nd4j . getSparseBlasWrapper ( ) . level1 ( ) . iamax ( arr ) ; } if ( Nd4j . getExecutioner ( ) . getProfilingMode ( ) == OpExecutioner . ProfilingMode . ALL ) OpProfiler . getInstance ( ) . processBlasCall ( false , arr ) ; if ( arr . data ( ) . dataType ( ) == DataType . DOUBLE ) { DefaultOpExecutioner . validateDataType ( DataType . DOUBLE , arr ) ; return idamax ( arr . length ( ) , arr , BlasBufferUtil . getBlasStride ( arr ) ) ; } else { DefaultOpExecutioner . validateDataType ( DataType . FLOAT , arr ) ; return isamax ( arr . length ( ) , arr , BlasBufferUtil . getBlasStride ( arr ) ) ; } }
finds the element of a vector that has the largest absolute value .