idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
19,300
public static < T > List < T > getUnique ( Collection < T > collection ) { HashSet < T > set = new HashSet < > ( ) ; List < T > out = new ArrayList < > ( ) ; for ( T t : collection ) { if ( ! set . contains ( t ) ) { out . add ( t ) ; set . add ( t ) ; } } return out ; }
Returns a list containing only unique values in a collection
19,301
public long numParams ( ) { int ret = 0 ; for ( INDArray val : params . values ( ) ) ret += val . length ( ) ; return ret ; }
The number of parameters for the model
19,302
public void add ( Chromosome element ) { if ( population . size ( ) == populationSize ) { cullOperator . cullPopulation ( ) ; } population . add ( element ) ; Collections . sort ( population , chromosomeComparator ) ; triggerPopulationChangedListeners ( population ) ; }
Add a Chromosome to the population and call the PopulationListeners . Culling may be triggered .
19,303
public INDArray readDataSet ( String datasetName , String ... groups ) throws UnsupportedKerasConfigurationException { synchronized ( Hdf5Archive . LOCK_OBJECT ) { if ( groups . length == 0 ) return readDataSet ( this . file , datasetName ) ; Group [ ] groupArray = openGroups ( groups ) ; INDArray a = readDataSet ( groupArray [ groupArray . length - 1 ] , datasetName ) ; closeGroups ( groupArray ) ; return a ; } }
Read data set as ND4J array from group path .
19,304
public String readAttributeAsJson ( String attributeName , String ... groups ) throws UnsupportedKerasConfigurationException { synchronized ( Hdf5Archive . LOCK_OBJECT ) { if ( groups . length == 0 ) { Attribute a = this . file . openAttribute ( attributeName ) ; String s = readAttributeAsJson ( a ) ; a . deallocate ( ) ; return s ; } Group [ ] groupArray = openGroups ( groups ) ; Attribute a = groupArray [ groups . length - 1 ] . openAttribute ( attributeName ) ; String s = readAttributeAsJson ( a ) ; a . deallocate ( ) ; closeGroups ( groupArray ) ; return s ; } }
Read JSON - formatted string attribute from group path .
19,305
public boolean hasAttribute ( String attributeName , String ... groups ) { synchronized ( Hdf5Archive . LOCK_OBJECT ) { if ( groups . length == 0 ) return this . file . attrExists ( attributeName ) ; Group [ ] groupArray = openGroups ( groups ) ; boolean b = groupArray [ groupArray . length - 1 ] . attrExists ( attributeName ) ; closeGroups ( groupArray ) ; return b ; } }
Check whether group path contains string attribute .
19,306
public List < String > getDataSets ( String ... groups ) { synchronized ( Hdf5Archive . LOCK_OBJECT ) { if ( groups . length == 0 ) return getObjects ( this . file , H5O_TYPE_DATASET ) ; Group [ ] groupArray = openGroups ( groups ) ; List < String > ls = getObjects ( groupArray [ groupArray . length - 1 ] , H5O_TYPE_DATASET ) ; closeGroups ( groupArray ) ; return ls ; } }
Get list of data sets from group path .
19,307
public List < String > getGroups ( String ... groups ) { synchronized ( Hdf5Archive . LOCK_OBJECT ) { if ( groups . length == 0 ) return getObjects ( this . file , H5O_TYPE_GROUP ) ; Group [ ] groupArray = openGroups ( groups ) ; List < String > ls = getObjects ( groupArray [ groupArray . length - 1 ] , H5O_TYPE_GROUP ) ; closeGroups ( groupArray ) ; return ls ; } }
Get list of groups from group path .
19,308
private List < String > getObjects ( Group fileGroup , int objType ) { synchronized ( Hdf5Archive . LOCK_OBJECT ) { List < String > groups = new ArrayList < > ( ) ; for ( int i = 0 ; i < fileGroup . getNumObjs ( ) ; i ++ ) { BytePointer objPtr = fileGroup . getObjnameByIdx ( i ) ; if ( fileGroup . childObjType ( objPtr ) == objType ) groups . add ( fileGroup . getObjnameByIdx ( i ) . getString ( ) ) ; } return groups ; } }
Get list of objects with a given type from a file group .
19,309
private String readAttributeAsJson ( Attribute attribute ) throws UnsupportedKerasConfigurationException { synchronized ( Hdf5Archive . LOCK_OBJECT ) { VarLenType vl = attribute . getVarLenType ( ) ; int bufferSizeMult = 1 ; String s ; while ( true ) { byte [ ] attrBuffer = new byte [ bufferSizeMult * 2000 ] ; BytePointer attrPointer = new BytePointer ( attrBuffer ) ; attribute . read ( vl , attrPointer ) ; attrPointer . get ( attrBuffer ) ; s = new String ( attrBuffer ) ; ObjectMapper mapper = new ObjectMapper ( ) ; mapper . enable ( DeserializationFeature . FAIL_ON_READING_DUP_TREE_KEY ) ; try { mapper . readTree ( s ) ; break ; } catch ( IOException e ) { } bufferSizeMult *= 2 ; if ( bufferSizeMult > 1024 ) { throw new UnsupportedKerasConfigurationException ( "Could not read abnormally long HDF5 attribute" ) ; } } vl . deallocate ( ) ; return s ; } }
Read JSON - formatted string attribute .
19,310
private String readAttributeAsString ( Attribute attribute ) throws UnsupportedKerasConfigurationException { synchronized ( Hdf5Archive . LOCK_OBJECT ) { VarLenType vl = attribute . getVarLenType ( ) ; int bufferSizeMult = 1 ; String s = null ; while ( true ) { byte [ ] attrBuffer = new byte [ bufferSizeMult * 2000 ] ; BytePointer attrPointer = new BytePointer ( attrBuffer ) ; attribute . read ( vl , attrPointer ) ; attrPointer . get ( attrBuffer ) ; s = new String ( attrBuffer ) ; if ( s . endsWith ( "\u0000" ) ) { s = s . replace ( "\u0000" , "" ) ; break ; } bufferSizeMult ++ ; if ( bufferSizeMult > 1000 ) { throw new UnsupportedKerasConfigurationException ( "Could not read abnormally long HDF5 attribute" ) ; } } vl . deallocate ( ) ; return s ; } }
Read attribute as string .
19,311
public String readAttributeAsFixedLengthString ( String attributeName , int bufferSize ) throws UnsupportedKerasConfigurationException { synchronized ( Hdf5Archive . LOCK_OBJECT ) { Attribute a = this . file . openAttribute ( attributeName ) ; String s = readAttributeAsFixedLengthString ( a , bufferSize ) ; a . deallocate ( ) ; return s ; } }
Read string attribute from group path .
19,312
private String readAttributeAsFixedLengthString ( Attribute attribute , int bufferSize ) throws UnsupportedKerasConfigurationException { synchronized ( Hdf5Archive . LOCK_OBJECT ) { VarLenType vl = attribute . getVarLenType ( ) ; byte [ ] attrBuffer = new byte [ bufferSize ] ; BytePointer attrPointer = new BytePointer ( attrBuffer ) ; attribute . read ( vl , attrPointer ) ; attrPointer . get ( attrBuffer ) ; vl . deallocate ( ) ; return new String ( attrBuffer ) ; } }
Read attribute of fixed buffer size as string .
19,313
public INDArray reducedBasis ( double variance ) { INDArray vars = Transforms . pow ( eigenvalues , - 0.5 , true ) ; double res = vars . sumNumber ( ) . doubleValue ( ) ; double total = 0.0 ; int ndims = 0 ; for ( int i = 0 ; i < vars . columns ( ) ; i ++ ) { ndims ++ ; total += vars . getDouble ( i ) ; if ( total / res > variance ) break ; } INDArray result = Nd4j . create ( eigenvectors . rows ( ) , ndims ) ; for ( int i = 0 ; i < ndims ; i ++ ) result . putColumn ( i , eigenvectors . getColumn ( i ) ) ; return result ; }
Return a reduced basis set that covers a certain fraction of the variance of the data
19,314
public INDArray convertToComponents ( INDArray data ) { INDArray dx = data . subRowVector ( mean ) ; return Nd4j . tensorMmul ( eigenvectors . transpose ( ) , dx , new int [ ] [ ] { { 1 } , { 1 } } ) . transposei ( ) ; }
Takes a set of data on each row with the same number of features as the constructing data and returns the data in the coordinates of the basis set about the mean .
19,315
public INDArray convertBackToFeatures ( INDArray data ) { return Nd4j . tensorMmul ( eigenvectors , data , new int [ ] [ ] { { 1 } , { 1 } } ) . transposei ( ) . addiRowVector ( mean ) ; }
Take the data that has been transformed to the principal components about the mean and transform it back into the original feature set . Make sure to fill in zeroes in columns where components were dropped!
19,316
public static INDArray pca ( INDArray A , int nDims , boolean normalize ) { INDArray factor = pca_factor ( A , nDims , normalize ) ; return A . mmul ( factor ) ; }
Calculates pca vectors of a matrix for a flags number of reduced features returns the reduced feature set The return is a projection of A onto principal nDims components
19,317
public static INDArray pca_factor ( INDArray A , int nDims , boolean normalize ) { if ( normalize ) { INDArray mean = A . mean ( 0 ) ; A . subiRowVector ( mean ) ; } long m = A . rows ( ) ; long n = A . columns ( ) ; INDArray s = Nd4j . create ( A . dataType ( ) , m < n ? m : n ) ; INDArray VT = Nd4j . create ( A . dataType ( ) , new long [ ] { n , n } , 'f' ) ; Nd4j . getBlasWrapper ( ) . lapack ( ) . gesvd ( A , s , null , VT ) ; INDArray V = VT . transpose ( ) ; INDArray factor = Nd4j . create ( A . dataType ( ) , new long [ ] { n , nDims } , 'f' ) ; for ( int i = 0 ; i < nDims ; i ++ ) { factor . putColumn ( i , V . getColumn ( i ) ) ; } return factor ; }
Calculates pca factors of a matrix for a flags number of reduced features returns the factors to scale observations
19,318
public static INDArray pca2 ( INDArray in , double variance ) { INDArray [ ] covmean = covarianceMatrix ( in ) ; INDArray [ ] pce = principalComponents ( covmean [ 0 ] ) ; INDArray vars = Transforms . pow ( pce [ 1 ] , - 0.5 , true ) ; double res = vars . sumNumber ( ) . doubleValue ( ) ; double total = 0.0 ; int ndims = 0 ; for ( int i = 0 ; i < vars . columns ( ) ; i ++ ) { ndims ++ ; total += vars . getDouble ( i ) ; if ( total / res > variance ) break ; } INDArray result = Nd4j . create ( in . columns ( ) , ndims ) ; for ( int i = 0 ; i < ndims ; i ++ ) result . putColumn ( i , pce [ 0 ] . getColumn ( i ) ) ; return result ; }
This method performs a dimensionality reduction including principal components that cover a fraction of the total variance of the system . It does all calculations about the mean .
19,319
public String timestamp ( ) { float totalSeconds = this . length ( ) ; float second = totalSeconds % 60 ; int minute = ( int ) totalSeconds / 60 % 60 ; int hour = ( int ) ( totalSeconds / 3600 ) ; StringBuilder sb = new StringBuilder ( ) ; if ( hour > 0 ) { sb . append ( hour + ":" ) ; } if ( minute > 0 ) { sb . append ( minute + ":" ) ; } sb . append ( second ) ; return sb . toString ( ) ; }
Timestamp of the wave length
19,320
public byte [ ] getFingerprintFromFile ( String fingerprintFile ) { byte [ ] fingerprint = null ; try { InputStream fis = new FileInputStream ( fingerprintFile ) ; fingerprint = getFingerprintFromInputStream ( fis ) ; fis . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } return fingerprint ; }
Get bytes from fingerprint file
19,321
public byte [ ] getFingerprintFromInputStream ( InputStream inputStream ) { byte [ ] fingerprint = null ; try { fingerprint = new byte [ inputStream . available ( ) ] ; inputStream . read ( fingerprint ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } return fingerprint ; }
Get bytes from fingerprint inputstream
19,322
public void saveFingerprintAsFile ( byte [ ] fingerprint , String filename ) { FileOutputStream fileOutputStream ; try { fileOutputStream = new FileOutputStream ( filename ) ; fileOutputStream . write ( fingerprint ) ; fileOutputStream . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Save fingerprint to a file
19,323
protected ArrayList < String > getLabels ( String textResource ) throws IOException { ArrayList < String > labels = new ArrayList < > ( ) ; File resourceFile = getResourceFile ( ) ; try ( InputStream is = new BufferedInputStream ( new FileInputStream ( resourceFile ) ) ; Scanner s = new Scanner ( is ) ) { while ( s . hasNextLine ( ) ) { labels . add ( s . nextLine ( ) ) ; } } return labels ; }
Returns labels based on the text file resource .
19,324
public void setPoints ( int [ ] points ) { this . points = new ArrayList < > ( ) ; for ( int i = 0 ; i < points . length ; i ++ ) { this . points . add ( points [ i ] ) ; } }
Sets Huffman tree points
19,325
public void printAvailableCompressors ( ) { StringBuilder builder = new StringBuilder ( ) ; builder . append ( "Available compressors: " ) ; for ( String comp : codecs . keySet ( ) ) { builder . append ( "[" ) . append ( comp ) . append ( "] " ) ; } System . out . println ( builder . toString ( ) ) ; }
Prints available compressors to standard out
19,326
public DataBuffer compress ( DataBuffer buffer , String algorithm ) { algorithm = algorithm . toUpperCase ( ) ; if ( ! codecs . containsKey ( algorithm ) ) throw new RuntimeException ( "Non-existent compression algorithm requested: [" + algorithm + "]" ) ; return codecs . get ( algorithm ) . compress ( buffer ) ; }
Compress the data buffer given a specified algorithm
19,327
public INDArray compress ( INDArray array , String algorithm ) { algorithm = algorithm . toUpperCase ( ) ; if ( ! codecs . containsKey ( algorithm ) ) throw new RuntimeException ( "Non-existent compression algorithm requested: [" + algorithm + "]" ) ; return codecs . get ( algorithm ) . compress ( array ) ; }
Returns a compressed version of the given ndarray
19,328
public DataBuffer decompress ( DataBuffer buffer , DataType targetType ) { if ( buffer . dataType ( ) != DataType . COMPRESSED ) throw new IllegalStateException ( "You can't decompress DataBuffer with dataType of: " + buffer . dataType ( ) ) ; CompressedDataBuffer comp = ( CompressedDataBuffer ) buffer ; CompressionDescriptor descriptor = comp . getCompressionDescriptor ( ) ; if ( ! codecs . containsKey ( descriptor . getCompressionAlgorithm ( ) ) ) throw new RuntimeException ( "Non-existent compression algorithm requested: [" + descriptor . getCompressionAlgorithm ( ) + "]" ) ; return codecs . get ( descriptor . getCompressionAlgorithm ( ) ) . decompress ( buffer , targetType ) ; }
Decompress the given databuffer
19,329
public void decompressi ( INDArray array ) { if ( array . data ( ) . dataType ( ) != DataType . COMPRESSED ) return ; val comp = ( CompressedDataBuffer ) array . data ( ) ; val descriptor = comp . getCompressionDescriptor ( ) ; if ( ! codecs . containsKey ( descriptor . getCompressionAlgorithm ( ) ) ) throw new RuntimeException ( "Non-existent compression algorithm requested: [" + descriptor . getCompressionAlgorithm ( ) + "]" ) ; codecs . get ( descriptor . getCompressionAlgorithm ( ) ) . decompressi ( array ) ; }
in place decompression of the given ndarray . If the ndarray isn t compressed this will do nothing
19,330
public void overrideMinorityDefault ( int index ) { if ( targetMinorityDistMap . containsKey ( index ) ) { minorityLabelMap . put ( index , 0 ) ; } else { throw new IllegalArgumentException ( "Index specified is not contained in the target minority distribution map specified with the preprocessor. Map contains " + ArrayUtils . toString ( targetMinorityDistMap . keySet ( ) . toArray ( ) ) ) ; } }
Will change the default minority label from 1 to 0 and correspondingly the majority class from 0 to 1 for the label at the index specified
19,331
public static void convert ( RecordReader reader , RecordWriter writer ) throws IOException { convert ( reader , writer , true ) ; }
Write all values from the specified record reader to the specified record writer . Closes the record writer on completion
19,332
public static void convert ( RecordReader reader , RecordWriter writer , boolean closeOnCompletion ) throws IOException { if ( ! reader . hasNext ( ) ) { throw new UnsupportedOperationException ( "Cannot convert RecordReader: reader has no next element" ) ; } while ( reader . hasNext ( ) ) { writer . write ( reader . next ( ) ) ; } if ( closeOnCompletion ) { writer . close ( ) ; } }
Write all values from the specified record reader to the specified record writer . Optionally close the record writer on completion
19,333
public static LayerConstraint mapConstraint ( String kerasConstraint , KerasLayerConfiguration conf , Map < String , Object > constraintConfig ) throws UnsupportedKerasConfigurationException { LayerConstraint constraint ; if ( kerasConstraint . equals ( conf . getLAYER_FIELD_MINMAX_NORM_CONSTRAINT ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_MINMAX_NORM_CONSTRAINT_ALIAS ( ) ) ) { double min = ( double ) constraintConfig . get ( conf . getLAYER_FIELD_MINMAX_MIN_CONSTRAINT ( ) ) ; double max = ( double ) constraintConfig . get ( conf . getLAYER_FIELD_MINMAX_MAX_CONSTRAINT ( ) ) ; double rate = ( double ) constraintConfig . get ( conf . getLAYER_FIELD_CONSTRAINT_RATE ( ) ) ; int dim = ( int ) constraintConfig . get ( conf . getLAYER_FIELD_CONSTRAINT_DIM ( ) ) ; constraint = new MinMaxNormConstraint ( min , max , rate , dim + 1 ) ; } else if ( kerasConstraint . equals ( conf . getLAYER_FIELD_MAX_NORM_CONSTRAINT ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_MAX_NORM_CONSTRAINT_ALIAS ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_MAX_NORM_CONSTRAINT_ALIAS_2 ( ) ) ) { double max = ( double ) constraintConfig . get ( conf . getLAYER_FIELD_MAX_CONSTRAINT ( ) ) ; int dim = ( int ) constraintConfig . get ( conf . getLAYER_FIELD_CONSTRAINT_DIM ( ) ) ; constraint = new MaxNormConstraint ( max , dim + 1 ) ; } else if ( kerasConstraint . equals ( conf . getLAYER_FIELD_UNIT_NORM_CONSTRAINT ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_UNIT_NORM_CONSTRAINT_ALIAS ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_UNIT_NORM_CONSTRAINT_ALIAS_2 ( ) ) ) { int dim = ( int ) constraintConfig . get ( conf . getLAYER_FIELD_CONSTRAINT_DIM ( ) ) ; constraint = new UnitNormConstraint ( dim + 1 ) ; } else if ( kerasConstraint . equals ( conf . getLAYER_FIELD_NON_NEG_CONSTRAINT ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_NON_NEG_CONSTRAINT_ALIAS ( ) ) || kerasConstraint . equals ( conf . getLAYER_FIELD_NON_NEG_CONSTRAINT_ALIAS_2 ( ) ) ) { constraint = new NonNegativeConstraint ( ) ; } else { throw new UnsupportedKerasConfigurationException ( "Unknown keras constraint " + kerasConstraint ) ; } return constraint ; }
Map Keras to DL4J constraint .
19,334
public static LayerConstraint getConstraintsFromConfig ( Map < String , Object > layerConfig , String constraintField , KerasLayerConfiguration conf , int kerasMajorVersion ) throws InvalidKerasConfigurationException , UnsupportedKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( ! innerConfig . containsKey ( constraintField ) ) { return null ; } HashMap constraintMap = ( HashMap ) innerConfig . get ( constraintField ) ; if ( constraintMap == null ) return null ; String kerasConstraint ; if ( constraintMap . containsKey ( conf . getLAYER_FIELD_CONSTRAINT_NAME ( ) ) ) { kerasConstraint = ( String ) constraintMap . get ( conf . getLAYER_FIELD_CONSTRAINT_NAME ( ) ) ; } else { throw new InvalidKerasConfigurationException ( "Keras layer is missing " + conf . getLAYER_FIELD_CONSTRAINT_NAME ( ) + " field" ) ; } Map < String , Object > constraintConfig ; if ( kerasMajorVersion == 2 ) { constraintConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( constraintMap , conf ) ; } else { constraintConfig = constraintMap ; } LayerConstraint layerConstraint = mapConstraint ( kerasConstraint , conf , constraintConfig ) ; return layerConstraint ; }
Get constraint initialization from Keras layer configuration .
19,335
protected static < T extends IEvaluation > T attempFromLegacyFromJson ( String json , IllegalArgumentException originalException ) { if ( json . contains ( "org.deeplearning4j.eval.Evaluation" ) ) { String newJson = json . replaceAll ( "org.deeplearning4j.eval.Evaluation" , "org.nd4j.evaluation.classification.Evaluation" ) ; return ( T ) fromJson ( newJson , Evaluation . class ) ; } if ( json . contains ( "org.deeplearning4j.eval.EvaluationBinary" ) ) { String newJson = json . replaceAll ( "org.deeplearning4j.eval.EvaluationBinary" , "org.nd4j.evaluation.classification.EvaluationBinary" ) . replaceAll ( "org.deeplearning4j.eval.ROC" , "org.nd4j.evaluation.classification.ROC" ) . replaceAll ( "org.deeplearning4j.eval.curves." , "org.nd4j.evaluation.curves." ) ; return ( T ) fromJson ( newJson , EvaluationBinary . class ) ; } if ( json . contains ( "org.deeplearning4j.eval.EvaluationCalibration" ) ) { String newJson = json . replaceAll ( "org.deeplearning4j.eval.EvaluationCalibration" , "org.nd4j.evaluation.classification.EvaluationCalibration" ) . replaceAll ( "org.deeplearning4j.eval.curves." , "org.nd4j.evaluation.curves." ) ; return ( T ) fromJson ( newJson , EvaluationCalibration . class ) ; } if ( json . contains ( "org.deeplearning4j.eval.ROCBinary" ) ) { String newJson = json . replaceAll ( "org.deeplearning4j.eval.ROCBinary" , "org.nd4j.evaluation.classification.ROCBinary" ) . replaceAll ( "org.deeplearning4j.eval.ROC" , "org.nd4j.evaluation.classification.ROC" ) . replaceAll ( "org.deeplearning4j.eval.curves." , "org.nd4j.evaluation.curves." ) ; return ( T ) fromJson ( newJson , ROCBinary . class ) ; } if ( json . contains ( "org.deeplearning4j.eval.ROCMultiClass" ) ) { String newJson = json . replaceAll ( "org.deeplearning4j.eval.ROCMultiClass" , "org.nd4j.evaluation.classification.ROCMultiClass" ) . replaceAll ( "org.deeplearning4j.eval.ROC" , "org.nd4j.evaluation.classification.ROC" ) . replaceAll ( "org.deeplearning4j.eval.curves." , "org.nd4j.evaluation.curves." ) ; return ( T ) fromJson ( newJson , ROCMultiClass . class ) ; } if ( json . contains ( "org.deeplearning4j.eval.ROC" ) ) { String newJson = json . replaceAll ( "org.deeplearning4j.eval.ROC" , "org.nd4j.evaluation.classification.ROC" ) . replaceAll ( "org.deeplearning4j.eval.curves." , "org.nd4j.evaluation.curves." ) ; return ( T ) fromJson ( newJson , ROC . class ) ; } if ( json . contains ( "org.deeplearning4j.eval.RegressionEvaluation" ) ) { String newJson = json . replaceAll ( "org.deeplearning4j.eval.RegressionEvaluation" , "org.nd4j.evaluation.regression.RegressionEvaluation" ) ; return ( T ) fromJson ( newJson , RegressionEvaluation . class ) ; } throw originalException ; }
Attempt to load DL4J IEvaluation JSON from 1 . 0 . 0 - beta2 or earlier . Given IEvaluation classes were moved to ND4J with no major changes a simple find and replace for the class names is used .
19,336
public void shuffle ( INDArray array , Random rnd , int ... dimension ) { shuffle ( Collections . singletonList ( array ) , rnd , dimension ) ; }
In place shuffle of an ndarray along a specified set of dimensions
19,337
public static void crashDumpOutputDirectory ( File rootDir ) { if ( rootDir == null ) { String userDir = System . getProperty ( "user.dir" ) ; if ( userDir == null ) { userDir = "" ; } crashDumpRootDirectory = new File ( userDir ) ; return ; } crashDumpRootDirectory = rootDir ; }
Method that can be use to customize the output directory for memory crash reporting . By default the current working directory will be used .
19,338
public static double getWeightRegularizerFromConfig ( Map < String , Object > layerConfig , KerasLayerConfiguration conf , String configField , String regularizerType ) throws UnsupportedKerasConfigurationException , InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( innerConfig . containsKey ( configField ) ) { Map < String , Object > regularizerConfig = ( Map < String , Object > ) innerConfig . get ( configField ) ; if ( regularizerConfig != null ) { if ( regularizerConfig . containsKey ( regularizerType ) ) { return ( double ) regularizerConfig . get ( regularizerType ) ; } if ( regularizerConfig . containsKey ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) && regularizerConfig . get ( conf . getLAYER_FIELD_CLASS_NAME ( ) ) . equals ( "L1L2" ) ) { Map < String , Object > innerRegularizerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( regularizerConfig , conf ) ; try { return ( double ) innerRegularizerConfig . get ( regularizerType ) ; } catch ( Exception e ) { return ( double ) ( int ) innerRegularizerConfig . get ( regularizerType ) ; } } } } return 0.0 ; }
Get weight regularization from Keras weight regularization configuration .
19,339
public Map < String , long [ ] > getParamShapes ( ) { Map < String , long [ ] > map = new LinkedHashMap < > ( ) ; map . putAll ( weightParams ) ; map . putAll ( biasParams ) ; return map ; }
Get the parameter shapes for all parameters
19,340
public Configuration getConfiguration ( ) { try { globalLock . readLock ( ) . lock ( ) ; return configuration ; } finally { globalLock . readLock ( ) . unlock ( ) ; } }
Returns current Allocator configuration
19,341
public Pointer getPointer ( DataBuffer buffer , AllocationShape shape , boolean isView , CudaContext context ) { return memoryHandler . getDevicePointer ( buffer , context ) ; }
This method returns actual device pointer valid for specified shape of current object
19,342
public Pointer getPointer ( INDArray array , CudaContext context ) { if ( array . isEmpty ( ) ) return null ; return memoryHandler . getDevicePointer ( array . data ( ) , context ) ; }
This method returns actual device pointer valid for specified INDArray
19,343
public Pointer getHostPointer ( INDArray array ) { if ( array . isEmpty ( ) ) return null ; synchronizeHostData ( array ) ; return memoryHandler . getHostPointer ( array . data ( ) ) ; }
This method returns actual host pointer valid for current object
19,344
public void freeMemory ( AllocationPoint point ) { if ( point . getAllocationStatus ( ) == AllocationStatus . DEVICE ) { this . getMemoryHandler ( ) . getMemoryProvider ( ) . free ( point ) ; point . setAllocationStatus ( AllocationStatus . HOST ) ; this . getMemoryHandler ( ) . getMemoryProvider ( ) . free ( point ) ; this . getMemoryHandler ( ) . forget ( point , AllocationStatus . DEVICE ) ; } else { this . getMemoryHandler ( ) . getMemoryProvider ( ) . free ( point ) ; this . getMemoryHandler ( ) . forget ( point , AllocationStatus . HOST ) ; } allocationsMap . remove ( point . getObjectId ( ) ) ; }
This method releases memory allocated for this allocation point
19,345
public AllocationPoint allocateMemory ( DataBuffer buffer , AllocationShape requiredMemory , boolean initialize ) { AllocationPoint point = null ; if ( configuration . getMemoryModel ( ) == Configuration . MemoryModel . IMMEDIATE ) { point = allocateMemory ( buffer , requiredMemory , memoryHandler . getInitialLocation ( ) , initialize ) ; } else if ( configuration . getMemoryModel ( ) == Configuration . MemoryModel . DELAYED ) { point = allocateMemory ( buffer , requiredMemory , AllocationStatus . HOST , initialize ) ; } return point ; }
This method allocates required chunk of memory
19,346
protected synchronized long seekUnusedZero ( Long bucketId , Aggressiveness aggressiveness ) { AtomicLong freeSpace = new AtomicLong ( 0 ) ; int totalElements = ( int ) memoryHandler . getAllocatedHostObjects ( bucketId ) ; float shortAverage = zeroShort . getAverage ( ) ; float longAverage = zeroLong . getAverage ( ) ; float shortThreshold = shortAverage / ( Aggressiveness . values ( ) . length - aggressiveness . ordinal ( ) ) ; float longThreshold = longAverage / ( Aggressiveness . values ( ) . length - aggressiveness . ordinal ( ) ) ; AtomicInteger elementsDropped = new AtomicInteger ( 0 ) ; AtomicInteger elementsSurvived = new AtomicInteger ( 0 ) ; for ( Long object : memoryHandler . getHostTrackingPoints ( bucketId ) ) { AllocationPoint point = getAllocationPoint ( object ) ; if ( point == null ) continue ; if ( point . getAllocationStatus ( ) == AllocationStatus . HOST ) { if ( point . getBuffer ( ) == null ) { purgeZeroObject ( bucketId , object , point , false ) ; freeSpace . addAndGet ( AllocationUtils . getRequiredMemory ( point . getShape ( ) ) ) ; elementsDropped . incrementAndGet ( ) ; continue ; } else { elementsSurvived . incrementAndGet ( ) ; } } else { } } log . debug ( "Zero {} elements checked: [{}], deleted: {}, survived: {}" , bucketId , totalElements , elementsDropped . get ( ) , elementsSurvived . get ( ) ) ; return freeSpace . get ( ) ; }
This method seeks for unused zero - copy memory allocations
19,347
protected long seekUnusedDevice ( Long threadId , Integer deviceId , Aggressiveness aggressiveness ) { AtomicLong freeSpace = new AtomicLong ( 0 ) ; float shortAverage = deviceShort . getAverage ( ) ; float longAverage = deviceLong . getAverage ( ) ; float shortThreshold = shortAverage / ( Aggressiveness . values ( ) . length - aggressiveness . ordinal ( ) ) ; float longThreshold = longAverage / ( Aggressiveness . values ( ) . length - aggressiveness . ordinal ( ) ) ; AtomicInteger elementsDropped = new AtomicInteger ( 0 ) ; AtomicInteger elementsMoved = new AtomicInteger ( 0 ) ; AtomicInteger elementsSurvived = new AtomicInteger ( 0 ) ; for ( Long object : memoryHandler . getDeviceTrackingPoints ( deviceId ) ) { AllocationPoint point = getAllocationPoint ( object ) ; if ( point . getBuffer ( ) == null ) { if ( point . getAllocationStatus ( ) == AllocationStatus . DEVICE ) { purgeDeviceObject ( threadId , deviceId , object , point , false ) ; freeSpace . addAndGet ( AllocationUtils . getRequiredMemory ( point . getShape ( ) ) ) ; purgeZeroObject ( point . getBucketId ( ) , object , point , false ) ; elementsDropped . incrementAndGet ( ) ; continue ; } ; } else { elementsSurvived . incrementAndGet ( ) ; } } log . debug ( "Thread/Device [" + threadId + "/" + deviceId + "] elements purged: [" + elementsDropped . get ( ) + "]; Relocated: [" + elementsMoved . get ( ) + "]; Survivors: [" + elementsSurvived . get ( ) + "]" ) ; return freeSpace . get ( ) ; }
This method seeks for unused device memory allocations for specified thread and device
19,348
public void memcpyAsync ( DataBuffer dstBuffer , Pointer srcPointer , long length , long dstOffset ) { this . memoryHandler . memcpyAsync ( dstBuffer , srcPointer , length , dstOffset ) ; }
This method implements asynchronous memcpy if that s available on current hardware
19,349
public void memcpy ( DataBuffer dstBuffer , DataBuffer srcBuffer ) { this . memoryHandler . memcpy ( dstBuffer , srcBuffer ) ; }
This method implements blocking memcpy
19,350
public List < String > asLabels ( ) { List < String > labels = new ArrayList < > ( ) ; for ( T element : getElements ( ) ) { labels . add ( element . getLabel ( ) ) ; } return labels ; }
Returns this sequence as list of labels
19,351
public Pair < List < String > , AtomicLong > call ( List < String > lstOfWords ) throws Exception { List < String > stops = stopWords . getValue ( ) ; Counter < String > counter = new Counter < > ( ) ; for ( String w : lstOfWords ) { if ( w . isEmpty ( ) ) continue ; if ( ! stops . isEmpty ( ) ) { if ( stops . contains ( w ) ) { counter . incrementCount ( "STOP" , 1.0f ) ; } else { counter . incrementCount ( w , 1.0f ) ; } } else { counter . incrementCount ( w , 1.0f ) ; } } wordFreqAcc . add ( counter ) ; AtomicLong lstOfWordsSize = new AtomicLong ( lstOfWords . size ( ) ) ; return new Pair < > ( lstOfWords , lstOfWordsSize ) ; }
Function to add to word freq counter and total count of words
19,352
public void addHook ( TrainingHook trainingHook ) { if ( trainingHookList == null ) { trainingHookList = new ArrayList < > ( ) ; } trainingHookList . add ( trainingHook ) ; }
Add a hook for the master for pre and post training
19,353
public DataType nd4jTypeFromOnnxType ( OnnxProto3 . TensorProto . DataType dataType ) { switch ( dataType ) { case DOUBLE : return DataType . DOUBLE ; case FLOAT : return DataType . FLOAT ; case FLOAT16 : return DataType . HALF ; case INT32 : case INT64 : return DataType . INT ; default : return DataType . UNKNOWN ; } }
Convert an onnx type to the proper nd4j type
19,354
public static double toClassifierPrediction ( Vector vector ) { double max = Double . NEGATIVE_INFINITY ; int maxIndex = 0 ; for ( int i = 0 ; i < vector . size ( ) ; i ++ ) { double curr = vector . apply ( i ) ; if ( curr > max ) { maxIndex = i ; max = curr ; } } return maxIndex ; }
This is for the edge case where you have a single output layer and need to convert the output layer to an index
19,355
public static LabeledPoint pointOf ( Collection < Writable > writables ) { double [ ] ret = new double [ writables . size ( ) - 1 ] ; int count = 0 ; double target = 0 ; for ( Writable w : writables ) { if ( count < writables . size ( ) - 1 ) ret [ count ++ ] = Float . parseFloat ( w . toString ( ) ) ; else target = Float . parseFloat ( w . toString ( ) ) ; } if ( target < 0 ) throw new IllegalStateException ( "Target must be >= 0" ) ; return new LabeledPoint ( target , Vectors . dense ( ret ) ) ; }
Returns a labeled point of the writables where the final item is the point and the rest of the items are features
19,356
public static JavaRDD < DataSet > fromLabeledPoint ( JavaRDD < LabeledPoint > data , final long numPossibleLabels , long batchSize ) { JavaRDD < DataSet > mappedData = data . map ( new Function < LabeledPoint , DataSet > ( ) { public DataSet call ( LabeledPoint lp ) { return fromLabeledPoint ( lp , numPossibleLabels ) ; } } ) ; return mappedData . repartition ( ( int ) ( mappedData . count ( ) / batchSize ) ) ; }
Convert an rdd of labeled point based on the specified batch size in to data set
19,357
public static JavaRDD < DataSet > fromLabeledPoint ( JavaSparkContext sc , JavaRDD < LabeledPoint > data , final long numPossibleLabels ) { return data . map ( new Function < LabeledPoint , DataSet > ( ) { public DataSet call ( LabeledPoint lp ) { return fromLabeledPoint ( lp , numPossibleLabels ) ; } } ) ; }
From labeled point
19,358
public static JavaRDD < DataSet > fromContinuousLabeledPoint ( JavaSparkContext sc , JavaRDD < LabeledPoint > data ) { return data . map ( new Function < LabeledPoint , DataSet > ( ) { public DataSet call ( LabeledPoint lp ) { return convertToDataset ( lp ) ; } } ) ; }
Convert rdd labeled points to a rdd dataset with continuous features
19,359
private static List < LabeledPoint > toLabeledPoint ( List < DataSet > labeledPoints ) { List < LabeledPoint > ret = new ArrayList < > ( ) ; for ( DataSet point : labeledPoints ) { ret . add ( toLabeledPoint ( point ) ) ; } return ret ; }
Convert a list of dataset in to a list of labeled points
19,360
public static JavaRDD < DataSet > fromContinuousLabeledPoint ( JavaRDD < LabeledPoint > data , boolean preCache ) { if ( preCache && ! data . getStorageLevel ( ) . useMemory ( ) ) { data . cache ( ) ; } return data . map ( new Function < LabeledPoint , DataSet > ( ) { public DataSet call ( LabeledPoint lp ) { return convertToDataset ( lp ) ; } } ) ; }
Converts a continuous JavaRDD LabeledPoint to a JavaRDD DataSet .
19,361
public static JavaRDD < DataSet > fromLabeledPoint ( JavaRDD < LabeledPoint > data , final long numPossibleLabels ) { return fromLabeledPoint ( data , numPossibleLabels , false ) ; }
Converts JavaRDD labeled points to JavaRDD datasets .
19,362
public static JavaRDD < DataSet > fromLabeledPoint ( JavaRDD < LabeledPoint > data , final long numPossibleLabels , boolean preCache ) { if ( preCache && ! data . getStorageLevel ( ) . useMemory ( ) ) { data . cache ( ) ; } return data . map ( new Function < LabeledPoint , DataSet > ( ) { public DataSet call ( LabeledPoint lp ) { return fromLabeledPoint ( lp , numPossibleLabels ) ; } } ) ; }
Converts JavaRDD labeled points to JavaRDD DataSets .
19,363
public static JavaRDD < LabeledPoint > fromDataSet ( JavaRDD < DataSet > data , boolean preCache ) { if ( preCache && ! data . getStorageLevel ( ) . useMemory ( ) ) { data . cache ( ) ; } return data . map ( new Function < DataSet , LabeledPoint > ( ) { public LabeledPoint call ( DataSet dataSet ) { return toLabeledPoint ( dataSet ) ; } } ) ; }
Convert an rdd of data set in to labeled point .
19,364
public static Constructor < ? > getEmptyConstructor ( Class < ? > clazz ) { Constructor < ? > c = clazz . getDeclaredConstructors ( ) [ 0 ] ; for ( int i = 0 ; i < clazz . getDeclaredConstructors ( ) . length ; i ++ ) { if ( clazz . getDeclaredConstructors ( ) [ i ] . getParameterTypes ( ) . length < 1 ) { c = clazz . getDeclaredConstructors ( ) [ i ] ; break ; } } return c ; }
Gets the empty constructor from a class
19,365
public static void setProperties ( Object obj , Properties props ) throws Exception { for ( Field field : obj . getClass ( ) . getDeclaredFields ( ) ) { field . setAccessible ( true ) ; if ( props . containsKey ( field . getName ( ) ) ) { set ( field , obj , props . getProperty ( field . getName ( ) ) ) ; } } }
Sets the properties of the given object
19,366
public static Properties getFieldsAsProperties ( Object obj , Class < ? > [ ] clazzes ) throws Exception { Properties props = new Properties ( ) ; for ( Field field : obj . getClass ( ) . getDeclaredFields ( ) ) { if ( Modifier . isStatic ( field . getModifiers ( ) ) ) continue ; field . setAccessible ( true ) ; Class < ? > type = field . getType ( ) ; if ( clazzes == null || contains ( type , clazzes ) ) { Object val = field . get ( obj ) ; if ( val != null ) props . put ( field . getName ( ) , val . toString ( ) ) ; } } return props ; }
Get fields as properties
19,367
public boolean pingDeviceForFreeMemory ( Integer deviceId , long requiredMemory ) { long freeMem = nativeOps . getDeviceFreeMemory ( - 1 ) ; if ( freeMem - requiredMemory < DEVICE_RESERVED_SPACE ) return false ; else return true ; }
This method checks specified device for specified amount of memory
19,368
public void plotVocab ( BarnesHutTsne tsne , int numWords , UiConnectionInfo connectionInfo ) { try { final List < String > labels = fitTnseAndGetLabels ( tsne , numWords ) ; final INDArray reducedData = tsne . getData ( ) ; StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < reducedData . rows ( ) && i < numWords ; i ++ ) { String word = labels . get ( i ) ; INDArray wordVector = reducedData . getRow ( i ) ; for ( int j = 0 ; j < wordVector . length ( ) ; j ++ ) { sb . append ( String . valueOf ( wordVector . getDouble ( j ) ) ) . append ( "," ) ; } sb . append ( word ) ; } String address = connectionInfo . getFirstPart ( ) + "/tsne/post/" + connectionInfo . getSessionId ( ) ; URI uri = new URI ( address ) ; HttpURLConnection connection = ( HttpURLConnection ) uri . toURL ( ) . openConnection ( ) ; connection . setRequestMethod ( "POST" ) ; connection . setRequestProperty ( "User-Agent" , "Mozilla/5.0" ) ; connection . setRequestProperty ( "Content-Type" , "multipart/form-data; boundary=-----TSNE-POST-DATA-----" ) ; connection . setDoOutput ( true ) ; final OutputStream outputStream = connection . getOutputStream ( ) ; final PrintWriter writer = new PrintWriter ( outputStream ) ; writer . println ( "-------TSNE-POST-DATA-----" ) ; writer . println ( "Content-Disposition: form-data; name=\"fileupload\"; filename=\"tsne.csv\"" ) ; writer . println ( "Content-Type: text/plain; charset=UTF-16" ) ; writer . println ( "Content-Transfer-Encoding: binary" ) ; writer . println ( ) ; writer . flush ( ) ; DataOutputStream dos = new DataOutputStream ( outputStream ) ; dos . writeBytes ( sb . toString ( ) ) ; dos . flush ( ) ; writer . println ( ) ; writer . flush ( ) ; dos . close ( ) ; outputStream . close ( ) ; try { int responseCode = connection . getResponseCode ( ) ; System . out . println ( "RESPONSE CODE: " + responseCode ) ; if ( responseCode != 200 ) { BufferedReader in = new BufferedReader ( new InputStreamReader ( connection . getInputStream ( ) ) ) ; String inputLine ; StringBuilder response = new StringBuilder ( ) ; while ( ( inputLine = in . readLine ( ) ) != null ) { response . append ( inputLine ) ; } in . close ( ) ; log . warn ( "Error posting to remote UI - received response code {}\tContent: {}" , response , response . toString ( ) ) ; } } catch ( IOException e ) { log . warn ( "Error posting to remote UI at {}" , uri , e ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Render the words via TSNE
19,369
public void putVector ( String word , INDArray vector ) { if ( word == null ) throw new IllegalArgumentException ( "No null words allowed" ) ; if ( vector == null ) throw new IllegalArgumentException ( "No null vectors allowed" ) ; int idx = vocab . indexOf ( word ) ; syn0 . slice ( idx ) . assign ( vector ) ; }
Inserts a word vector
19,370
public void consume ( InMemoryLookupTable < T > srcTable ) { if ( srcTable . vectorLength != this . vectorLength ) throw new IllegalStateException ( "You can't consume lookupTable with different vector lengths" ) ; if ( srcTable . syn0 == null ) throw new IllegalStateException ( "Source lookupTable Syn0 is NULL" ) ; this . resetWeights ( true ) ; AtomicInteger cntHs = new AtomicInteger ( 0 ) ; AtomicInteger cntNg = new AtomicInteger ( 0 ) ; if ( srcTable . syn0 . rows ( ) > this . syn0 . rows ( ) ) throw new IllegalStateException ( "You can't consume lookupTable with built for larger vocabulary without updating your vocabulary first" ) ; for ( int x = 0 ; x < srcTable . syn0 . rows ( ) ; x ++ ) { this . syn0 . putRow ( x , srcTable . syn0 . getRow ( x ) ) ; if ( this . syn1 != null && srcTable . syn1 != null ) this . syn1 . putRow ( x , srcTable . syn1 . getRow ( x ) ) ; else if ( cntHs . incrementAndGet ( ) == 1 ) log . info ( "Skipping syn1 merge" ) ; if ( this . syn1Neg != null && srcTable . syn1Neg != null ) { this . syn1Neg . putRow ( x , srcTable . syn1Neg . getRow ( x ) ) ; } else if ( cntNg . incrementAndGet ( ) == 1 ) log . info ( "Skipping syn1Neg merge" ) ; if ( cntHs . get ( ) > 0 && cntNg . get ( ) > 0 ) throw new ND4JIllegalStateException ( "srcTable has no syn1/syn1neg" ) ; } }
This method consumes weights of a given InMemoryLookupTable
19,371
public void initializeInstance ( PopulationModel populationModel ) { super . initializeInstance ( populationModel ) ; parentSelection . initializeInstance ( populationModel . getPopulation ( ) ) ; }
Will be called by the selection operator once the population model is instantiated .
19,372
public short [ ] interpolate ( int oldSampleRate , int newSampleRate , short [ ] samples ) { if ( oldSampleRate == newSampleRate ) { return samples ; } int newLength = Math . round ( ( ( float ) samples . length / oldSampleRate * newSampleRate ) ) ; float lengthMultiplier = ( float ) newLength / samples . length ; short [ ] interpolatedSamples = new short [ newLength ] ; for ( int i = 0 ; i < newLength ; i ++ ) { float currentPosition = i / lengthMultiplier ; int nearestLeftPosition = ( int ) currentPosition ; int nearestRightPosition = nearestLeftPosition + 1 ; if ( nearestRightPosition >= samples . length ) { nearestRightPosition = samples . length - 1 ; } float slope = samples [ nearestRightPosition ] - samples [ nearestLeftPosition ] ; float positionFromLeft = currentPosition - nearestLeftPosition ; interpolatedSamples [ i ] = ( short ) ( slope * positionFromLeft + samples [ nearestLeftPosition ] ) ; } return interpolatedSamples ; }
Do interpolation on the samples according to the original and destinated sample rates
19,373
public static < T > Set < T > intersection ( Collection < T > parentCollection , Collection < T > removeFromCollection ) { Set < T > results = new HashSet < > ( parentCollection ) ; results . retainAll ( removeFromCollection ) ; return results ; }
Set specific operations
19,374
public static < T > Set < T > difference ( Collection < ? extends T > s1 , Collection < ? extends T > s2 ) { Set < T > s3 = new HashSet < > ( s1 ) ; s3 . removeAll ( s2 ) ; return s3 ; }
Return is s1 \ s2
19,375
@ SuppressWarnings ( "unchecked" ) public static < T > T readObject ( InputStream is ) { try { ObjectInputStream ois = new ObjectInputStream ( is ) ; T ret = ( T ) ois . readObject ( ) ; ois . close ( ) ; return ret ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Reads an object from the given input stream
19,376
public static byte [ ] toByteArray ( Serializable toSave ) { try { ByteArrayOutputStream bos = new ByteArrayOutputStream ( ) ; ObjectOutputStream os = new ObjectOutputStream ( bos ) ; os . writeObject ( toSave ) ; byte [ ] ret = bos . toByteArray ( ) ; os . close ( ) ; return ret ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Converts the given object to a byte array
19,377
public static void writeObject ( Serializable toSave , OutputStream writeTo ) { try { ObjectOutputStream os = new ObjectOutputStream ( writeTo ) ; os . writeObject ( toSave ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Writes the object to the output stream THIS DOES NOT FLUSH THE STREAM
19,378
public void initialize ( InputSplit split ) throws IOException , InterruptedException { super . initialize ( split ) ; this . iter = getIterator ( 0 ) ; this . initialized = true ; }
Using String as StandardCharsets . UTF_8 is not serializable
19,379
public static < K , V > CounterMap < K , V > parallelCounterMap ( ) { CounterMap < K , V > totalWords = new CounterMap < > ( ) ; return totalWords ; }
Returns a thread safe counter map
19,380
public void apply ( GloveWeightLookupTable table ) { table . getBias ( ) . putScalar ( w1 . getIndex ( ) , table . getBias ( ) . getDouble ( w1 . getIndex ( ) ) - w1BiasUpdate ) ; table . getBias ( ) . putScalar ( w2 . getIndex ( ) , table . getBias ( ) . getDouble ( w2 . getIndex ( ) ) - w2BiasUpdate ) ; table . getSyn0 ( ) . slice ( w1 . getIndex ( ) ) . subi ( w1Update ) ; table . getSyn0 ( ) . slice ( w2 . getIndex ( ) ) . subi ( w2Update ) ; table . getWeightAdaGrad ( ) . getHistoricalGradient ( ) . slice ( w1 . getIndex ( ) ) . addi ( w1History ) ; table . getWeightAdaGrad ( ) . getHistoricalGradient ( ) . slice ( w2 . getIndex ( ) ) . addi ( w2History ) ; table . getBiasAdaGrad ( ) . getHistoricalGradient ( ) . putScalar ( w1 . getIndex ( ) , table . getBiasAdaGrad ( ) . getHistoricalGradient ( ) . getDouble ( w1 . getIndex ( ) ) + w1BiasHistory ) ; table . getBiasAdaGrad ( ) . getHistoricalGradient ( ) . putScalar ( w2 . getIndex ( ) , table . getBiasAdaGrad ( ) . getHistoricalGradient ( ) . getDouble ( w2 . getIndex ( ) ) + w1BiasHistory ) ; }
Apply the changes to the table
19,381
public void set ( T value ) { try { lock . writeLock ( ) . lock ( ) ; this . value = value ; } finally { lock . writeLock ( ) . unlock ( ) ; } }
This method assigns new value
19,382
public boolean cas ( T expected , T newValue ) { try { lock . writeLock ( ) . lock ( ) ; if ( Objects . equals ( value , expected ) ) { this . value = newValue ; return true ; } else return false ; } finally { lock . writeLock ( ) . unlock ( ) ; } }
This method implements compare - and - swap
19,383
protected ImageWritable doTransform ( ImageWritable image , Random random ) { if ( image == null ) { return null ; } Mat mat = converter . convert ( image . getFrame ( ) ) ; Mat box = new Mat ( height , width , mat . type ( ) ) ; box . put ( borderValue ) ; x = ( mat . cols ( ) - width ) / 2 ; y = ( mat . rows ( ) - height ) / 2 ; int w = Math . min ( mat . cols ( ) , width ) ; int h = Math . min ( mat . rows ( ) , height ) ; Rect matRect = new Rect ( x , y , w , h ) ; Rect boxRect = new Rect ( x , y , w , h ) ; if ( x <= 0 ) { matRect . x ( 0 ) ; boxRect . x ( - x ) ; } else { matRect . x ( x ) ; boxRect . x ( 0 ) ; } if ( y <= 0 ) { matRect . y ( 0 ) ; boxRect . y ( - y ) ; } else { matRect . y ( y ) ; boxRect . y ( 0 ) ; } mat . apply ( matRect ) . copyTo ( box . apply ( boxRect ) ) ; return new ImageWritable ( converter . convert ( box ) ) ; }
Takes an image and returns a boxed version of the image .
19,384
public static INDArray pooling2D ( INDArray img , int kh , int kw , int sy , int sx , int ph , int pw , int dh , int dw , boolean isSameMode , Pooling2D . Pooling2DType type , Pooling2D . Divisor divisor , double extra , int virtualHeight , int virtualWidth , INDArray out ) { Pooling2D pooling = Pooling2D . builder ( ) . arrayInputs ( new INDArray [ ] { img } ) . arrayOutputs ( new INDArray [ ] { out } ) . config ( Pooling2DConfig . builder ( ) . dH ( dh ) . dW ( dw ) . extra ( extra ) . kH ( kh ) . kW ( kw ) . pH ( ph ) . pW ( pw ) . isSameMode ( isSameMode ) . sH ( sy ) . sW ( sx ) . virtualHeight ( virtualHeight ) . virtualWidth ( virtualWidth ) . type ( type ) . divisor ( divisor ) . build ( ) ) . build ( ) ; Nd4j . getExecutioner ( ) . execAndReturn ( pooling ) ; return out ; }
Pooling 2d implementation
19,385
public static int outSize ( int size , int k , int s , int p , int dilation , boolean coverAll ) { k = effectiveKernelSize ( k , dilation ) ; if ( coverAll ) return ( size + p * 2 - k + s - 1 ) / s + 1 ; else return ( size + p * 2 - k ) / s + 1 ; }
The out size for a convolution
19,386
public static INDArray conv2d ( INDArray input , INDArray kernel , Type type ) { return Nd4j . getConvolution ( ) . conv2d ( input , kernel , type ) ; }
2d convolution ( aka the last 2 dimensions
19,387
public static long [ ] dataBufferToArray ( DataBuffer buffer ) { int rank = buffer . getInt ( 0 ) ; val ret = new long [ Shape . shapeInfoLength ( rank ) ] ; ret [ 0 ] = rank ; for ( int e = 1 ; e < Shape . shapeInfoLength ( rank ) ; e ++ ) { ret [ e ] = buffer . getInt ( e ) ; } return ret ; }
Obtain the values from the shape buffer for the array
19,388
public static OptimizationConfiguration fromYaml ( String json ) { try { return JsonMapper . getYamlMapper ( ) . readValue ( json , OptimizationConfiguration . class ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } }
Create an optimization configuration from the json
19,389
public double calculateAverageAuc ( ) { double ret = 0.0 ; for ( int i = 0 ; i < numLabels ( ) ; i ++ ) { ret += calculateAUC ( i ) ; } return ret / ( double ) numLabels ( ) ; }
Macro - average AUC for all outcomes
19,390
public static Consumer < Subscription > subscriberLoop ( final FragmentHandler fragmentHandler , final int limit , final AtomicBoolean running , final AtomicBoolean launched ) { final IdleStrategy idleStrategy = new BusySpinIdleStrategy ( ) ; return subscriberLoop ( fragmentHandler , limit , running , idleStrategy , launched ) ; }
Return a reusable parametrized event loop that calls a default idler when no messages are received
19,391
public static Consumer < Subscription > subscriberLoop ( final FragmentHandler fragmentHandler , final int limit , final AtomicBoolean running , final IdleStrategy idleStrategy , final AtomicBoolean launched ) { return ( subscription ) -> { try { while ( running . get ( ) ) { idleStrategy . idle ( subscription . poll ( fragmentHandler , limit ) ) ; launched . set ( true ) ; } } catch ( final Exception ex ) { LangUtil . rethrowUnchecked ( ex ) ; } } ; }
Return a reusable parameterized event loop that calls and idler when no messages are received
19,392
public static void printAvailableImage ( final Image image ) { final Subscription subscription = image . subscription ( ) ; System . out . println ( String . format ( "Available image on %s streamId=%d sessionId=%d from %s" , subscription . channel ( ) , subscription . streamId ( ) , image . sessionId ( ) , image . sourceIdentity ( ) ) ) ; }
Print the information for an available image to stdout .
19,393
protected static int [ ] adaptForTensorDescr ( int [ ] shapeOrStrides ) { if ( shapeOrStrides . length >= 4 ) return shapeOrStrides ; int [ ] out = new int [ 4 ] ; int i = 0 ; for ( ; i < shapeOrStrides . length ; i ++ ) { out [ i ] = shapeOrStrides [ i ] ; } for ( ; i < 4 ; i ++ ) { out [ i ] = 1 ; } return out ; }
From CuDNN documentation - Tensors are restricted to having at least 4 dimensions ... When working with lower dimensional data it is recommended that the user create a 4Dtensor and set the size along unused dimensions to 1 .
19,394
protected void checkBufferCoherence ( ) { if ( values . length ( ) < length ) { throw new IllegalStateException ( "nnz is larger than capacity of buffers" ) ; } if ( values . length ( ) * rank ( ) != indices . length ( ) ) { throw new IllegalArgumentException ( "Sizes of values, indices and shape are incoherent." ) ; } }
Check that the length of indices and values are coherent and matches the rank of the matrix .
19,395
protected static DataBuffer createSparseInformationBuffer ( int rank ) { int [ ] flags = new int [ rank ] ; long [ ] sparseOffsets = new long [ rank ] ; int [ ] hiddenDimension = new int [ ] { - 1 } ; return Nd4j . getSparseInfoProvider ( ) . createSparseInformation ( flags , sparseOffsets , hiddenDimension , rank ) ; }
Create a SparseInfo databuffer given rank if of the sparse matrix .
19,396
protected static DataBuffer createValueBuffer ( float [ ] values ) { checkNotNull ( values ) ; if ( values . length == 0 ) { return Nd4j . createBuffer ( 1 ) ; } return Nd4j . createBuffer ( values ) ; }
Create a DataBuffer for values of given array of values .
19,397
protected static DataBuffer createIndiceBuffer ( long [ ] [ ] indices , long [ ] shape ) { checkNotNull ( indices ) ; checkNotNull ( shape ) ; if ( indices . length == 0 ) { return Nd4j . getDataBufferFactory ( ) . createLong ( shape . length ) ; } if ( indices . length == shape . length ) { return Nd4j . createBuffer ( ArrayUtil . flattenF ( indices ) ) ; } return Nd4j . createBuffer ( ArrayUtil . flatten ( indices ) ) ; }
Create a DataBuffer for indices of given arrays of indices .
19,398
public long [ ] translateToPhysical ( long [ ] virtualIndexes ) { long [ ] physicalIndexes = new long [ underlyingRank ( ) ] ; int idxPhy = 0 ; int hidden = 0 ; for ( int idxVir = 0 ; idxVir < virtualIndexes . length ; idxVir ++ ) { if ( hidden < getNumHiddenDimension ( ) && hiddenDimensions ( ) [ hidden ] == idxVir ) { hidden ++ ; } else { while ( idxPhy < underlyingRank ( ) && isDimensionFixed ( idxPhy ) ) { physicalIndexes [ idxPhy ] = sparseOffsets ( ) [ idxPhy ] ; idxPhy ++ ; } if ( idxPhy < underlyingRank ( ) && ! isDimensionFixed ( idxPhy ) ) { physicalIndexes [ idxPhy ] = sparseOffsets ( ) [ idxPhy ] + virtualIndexes [ idxVir ] ; idxPhy ++ ; } } } return physicalIndexes ; }
Translate the view index to the corresponding index of the original ndarray
19,399
public void addOrUpdate ( long [ ] indexes , double value ) { long [ ] physicalIndexes = isView ( ) ? translateToPhysical ( indexes ) : indexes ; for ( int i = 0 ; i < length ; i ++ ) { long [ ] idx = getUnderlyingIndicesOf ( i ) . asLong ( ) ; if ( Arrays . equals ( idx , physicalIndexes ) ) { if ( value == 0 ) { removeEntry ( i ) ; length -- ; } else { values . put ( i , value ) ; length ++ ; } return ; } } if ( value == 0 ) { return ; } while ( ! canInsert ( values , 1 ) ) { long size = ( long ) Math . ceil ( ( values . capacity ( ) * THRESHOLD_MEMORY_ALLOCATION ) ) ; values . reallocate ( size ) ; } values . put ( length , value ) ; while ( ! canInsert ( indices , physicalIndexes . length ) ) { long size = ( long ) Math . ceil ( ( indices . capacity ( ) * THRESHOLD_MEMORY_ALLOCATION ) ) ; indices . reallocate ( size ) ; } for ( int i = 0 ; i < physicalIndexes . length ; i ++ ) { indices . put ( length * rank ( ) + i , physicalIndexes [ i ] ) ; } length ++ ; isSorted = false ; }
Add a new element in the ndarray or update the value if there is already a non - null element at this position