idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
19,400
public INDArray removeEntry ( int idx ) { values = shiftLeft ( values , idx + 1 , 1 , length ( ) ) ; indices = shiftLeft ( indices , ( int ) ( idx * shape . length ( ) + shape . length ( ) ) , ( int ) shape . length ( ) , indices . length ( ) ) ; return this ; }
Remove an element of the ndarray
19,401
public int reverseIndexes ( int ... indexes ) { long [ ] idx = translateToPhysical ( ArrayUtil . toLongArray ( indexes ) ) ; sort ( ) ; return indexesBinarySearch ( 0 , ( int ) length ( ) , ArrayUtil . toInts ( idx ) ) ; }
Return the index of the value corresponding to the indexes
19,402
public int indexesBinarySearch ( int lowerBound , int upperBound , int [ ] idx ) { int min = lowerBound ; int max = upperBound ; int mid = ( max + min ) / 2 ; int [ ] midIdx = getUnderlyingIndicesOf ( mid ) . asInt ( ) ; if ( Arrays . equals ( idx , midIdx ) ) { return mid ; } if ( ArrayUtil . lessThan ( idx , midIdx ) ) { max = mid ; } if ( ArrayUtil . greaterThan ( idx , midIdx ) ) { min = mid ; } if ( min == max ) { return - 1 ; } return indexesBinarySearch ( min , max , idx ) ; }
Return the position of the idx array into the indexes buffer between the lower and upper bound .
19,403
public DataBuffer getVectorCoordinates ( ) { int idx ; if ( isRowVector ( ) ) { idx = 1 ; } else if ( isColumnVector ( ) ) { idx = 0 ; } else { throw new UnsupportedOperationException ( ) ; } int [ ] temp = new int [ ( int ) length ( ) ] ; for ( int i = 0 ; i < length ( ) ; i ++ ) { temp [ i ] = getUnderlyingIndicesOf ( i ) . getInt ( idx ) ; } return Nd4j . createBuffer ( temp ) ; }
Returns the indices of non - zero element of the vector
19,404
public INDArray toDense ( ) { INDArray result = Nd4j . zeros ( shape ( ) ) ; switch ( data ( ) . dataType ( ) ) { case DOUBLE : for ( int i = 0 ; i < length ; i ++ ) { int [ ] idx = getUnderlyingIndicesOf ( i ) . asInt ( ) ; double value = values . getDouble ( i ) ; result . putScalar ( idx , value ) ; } break ; case FLOAT : for ( int i = 0 ; i < length ; i ++ ) { int [ ] idx = getUnderlyingIndicesOf ( i ) . asInt ( ) ; float value = values . getFloat ( i ) ; result . putScalar ( idx , value ) ; } break ; default : throw new UnsupportedOperationException ( ) ; } return result ; }
Converts the sparse ndarray into a dense one
19,405
private long [ ] createSparseOffsets ( long offset ) { int underlyingRank = sparseOffsets ( ) . length ; long [ ] newOffsets = new long [ rank ( ) ] ; List < Long > shapeList = Longs . asList ( shape ( ) ) ; int penultimate = rank ( ) - 1 ; for ( int i = 0 ; i < penultimate ; i ++ ) { long prod = ArrayUtil . prodLong ( shapeList . subList ( i + 1 , rank ( ) ) ) ; newOffsets [ i ] = offset / prod ; offset = offset - newOffsets [ i ] * prod ; } newOffsets [ rank ( ) - 1 ] = offset % shape ( ) [ rank ( ) - 1 ] ; long [ ] finalOffsets = new long [ underlyingRank ] ; int dimNotFixed = 0 ; for ( int dim = 0 ; dim < underlyingRank ; dim ++ ) { if ( flags ( ) [ dim ] == 1 ) { finalOffsets [ dim ] = sparseOffsets ( ) [ dim ] ; } else { finalOffsets [ dim ] = newOffsets [ dimNotFixed ] + sparseOffsets ( ) [ dim ] ; dimNotFixed ++ ; } } return finalOffsets ; }
Compute the sparse offsets of the view we are getting for each dimension according to the original ndarray
19,406
public DataBuffer getUnderlyingIndicesOf ( int i ) { int from = underlyingRank ( ) * i ; int [ ] res = new int [ underlyingRank ( ) ] ; for ( int j = 0 ; j < underlyingRank ( ) ; j ++ ) { res [ j ] = indices . getInt ( from + j ) ; } return Nd4j . getDataBufferFactory ( ) . createInt ( res ) ; }
Returns the underlying indices of the element of the given index such as there really are in the original ndarray
19,407
public DataBuffer getIndicesOf ( int i ) { int from = underlyingRank ( ) * i ; int to = from + underlyingRank ( ) ; int [ ] arr = new int [ rank ] ; int j = 0 ; int k = 0 ; for ( int dim = 0 ; dim < rank ; dim ++ ) { if ( k < hiddenDimensions ( ) . length && hiddenDimensions ( ) [ k ] == j ) { arr [ dim ] = 0 ; k ++ ; } else { arr [ dim ] = indices . getInt ( j ) ; j ++ ; } } return Nd4j . getDataBufferFactory ( ) . createInt ( arr ) ; }
Returns the indices of the element of the given index in the array context
19,408
public INDArray mmul ( INDArray other , INDArray result , MMulTranspose mMulTranspose ) { return null ; }
Perform an copy matrix multiplication
19,409
public static void addNormalizerToModel ( File f , Normalizer < ? > normalizer ) { File tempFile = null ; try { tempFile = DL4JFileUtils . createTempFile ( "dl4jModelSerializerTemp" , "bin" ) ; tempFile . deleteOnExit ( ) ; Files . copy ( f , tempFile ) ; try ( ZipFile zipFile = new ZipFile ( tempFile ) ; ZipOutputStream writeFile = new ZipOutputStream ( new BufferedOutputStream ( new FileOutputStream ( f ) ) ) ) { Enumeration < ? extends ZipEntry > entries = zipFile . entries ( ) ; while ( entries . hasMoreElements ( ) ) { ZipEntry entry = entries . nextElement ( ) ; if ( entry . getName ( ) . equalsIgnoreCase ( NORMALIZER_BIN ) ) continue ; log . debug ( "Copying: {}" , entry . getName ( ) ) ; InputStream is = zipFile . getInputStream ( entry ) ; ZipEntry wEntry = new ZipEntry ( entry . getName ( ) ) ; writeFile . putNextEntry ( wEntry ) ; IOUtils . copy ( is , writeFile ) ; } ZipEntry nEntry = new ZipEntry ( NORMALIZER_BIN ) ; writeFile . putNextEntry ( nEntry ) ; NormalizerSerializer . getDefault ( ) . write ( normalizer , writeFile ) ; } } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } finally { if ( tempFile != null ) { tempFile . delete ( ) ; } } }
This method appends normalizer to a given persisted model .
19,410
public static < T extends Normalizer > T restoreNormalizerFromFile ( File file ) { try ( ZipFile zipFile = new ZipFile ( file ) ) { ZipEntry norm = zipFile . getEntry ( NORMALIZER_BIN ) ; if ( norm == null ) return null ; return NormalizerSerializer . getDefault ( ) . restore ( zipFile . getInputStream ( norm ) ) ; } catch ( Exception e ) { log . warn ( "Error while restoring normalizer, trying to restore assuming deprecated format..." ) ; DataNormalization restoredDeprecated = restoreNormalizerFromFileDeprecated ( file ) ; log . warn ( "Recovered using deprecated method. Will now re-save the normalizer to fix this issue." ) ; addNormalizerToModel ( file , restoredDeprecated ) ; return ( T ) restoredDeprecated ; } }
This method restores normalizer from a given persisted model file
19,411
public static < T extends Normalizer > T restoreNormalizerFromInputStream ( InputStream is ) throws IOException { checkInputStream ( is ) ; File tmpFile = null ; try { tmpFile = tempFileFromStream ( is ) ; return restoreNormalizerFromFile ( tmpFile ) ; } finally { if ( tmpFile != null ) { tmpFile . delete ( ) ; } } }
This method restores the normalizer form a persisted model file .
19,412
public void load ( File mean , File std ) throws IOException { this . mean = Nd4j . readBinary ( mean ) ; this . std = Nd4j . readBinary ( std ) ; }
Load the given mean and std
19,413
public void save ( File mean , File std ) throws IOException { Nd4j . saveBinary ( this . mean , mean ) ; Nd4j . saveBinary ( this . std , std ) ; }
Save the current mean and std
19,414
public void transform ( DataSet dataSet ) { dataSet . setFeatures ( dataSet . getFeatures ( ) . subRowVector ( mean ) ) ; dataSet . setFeatures ( dataSet . getFeatures ( ) . divRowVector ( std ) ) ; }
Transform the data
19,415
public void readFields ( DataInput in ) throws IOException { DataInputStream dis = new DataInputStream ( new DataInputWrapperStream ( in ) ) ; byte header = dis . readByte ( ) ; if ( header != NDARRAY_SER_VERSION_HEADER && header != NDARRAY_SER_VERSION_HEADER_NULL ) { throw new IllegalStateException ( "Unexpected NDArrayWritable version header - stream corrupt?" ) ; } if ( header == NDARRAY_SER_VERSION_HEADER_NULL ) { array = null ; return ; } array = Nd4j . read ( dis ) ; hash = null ; }
Deserialize into a row vector of default type .
19,416
public void write ( DataOutput out ) throws IOException { if ( array == null ) { out . write ( NDARRAY_SER_VERSION_HEADER_NULL ) ; return ; } INDArray toWrite ; if ( array . isView ( ) ) { toWrite = array . dup ( ) ; } else { toWrite = array ; } out . write ( NDARRAY_SER_VERSION_HEADER ) ; Nd4j . write ( toWrite , new DataOutputStream ( new DataOutputWrapperStream ( out ) ) ) ; }
Serialize array data linearly .
19,417
public void applyUpdater ( INDArray gradient , int iteration , int epoch ) { if ( v == null ) throw new IllegalStateException ( "Updater has not been initialized with view state" ) ; double momentum = config . currentMomentum ( iteration , epoch ) ; double learningRate = config . getLearningRate ( iteration , epoch ) ; INDArray vPrev = v . dup ( gradientReshapeOrder ) ; v . muli ( momentum ) . subi ( gradient . dup ( gradientReshapeOrder ) . muli ( learningRate ) ) ; Nd4j . getExecutioner ( ) . exec ( new OldAddOp ( vPrev . muli ( momentum ) , v . mul ( - momentum - 1 ) , gradient ) ) ; }
Get the nesterov update
19,418
public synchronized void shutdown ( ) { if ( stopLock . get ( ) ) return ; transport . shutdown ( ) ; disposable . dispose ( ) ; updaterParamsSubscribers . clear ( ) ; modelParamsSubsribers . clear ( ) ; updatesSubscribers . clear ( ) ; updatesQueue . clear ( ) ; launchLock . set ( false ) ; stopLock . set ( true ) ; }
This method stops parameter server
19,419
public Collection < INDArray > getUpdates ( ) { val list = new ArrayList < INDArray > ( ) ; updatesQueue . drainTo ( list ) ; return list ; }
This method returns updates received from network
19,420
public ViterbiLattice build ( String text ) { int textLength = text . length ( ) ; ViterbiLattice lattice = new ViterbiLattice ( textLength + 2 ) ; lattice . addBos ( ) ; int unknownWordEndIndex = - 1 ; for ( int startIndex = 0 ; startIndex < textLength ; startIndex ++ ) { if ( lattice . tokenEndsWhereCurrentTokenStarts ( startIndex ) ) { String suffix = text . substring ( startIndex ) ; boolean found = processIndex ( lattice , startIndex , suffix ) ; if ( searchMode || unknownWordEndIndex <= startIndex ) { int [ ] categories = characterDefinitions . lookupCategories ( suffix . charAt ( 0 ) ) ; for ( int i = 0 ; i < categories . length ; i ++ ) { int category = categories [ i ] ; unknownWordEndIndex = processUnknownWord ( category , i , lattice , unknownWordEndIndex , startIndex , suffix , found ) ; } } } } if ( useUserDictionary ) { processUserDictionary ( text , lattice ) ; } lattice . addEos ( ) ; return lattice ; }
Build lattice from input text
19,421
private void repairBrokenLatticeBefore ( ViterbiLattice lattice , int index ) { ViterbiNode [ ] [ ] nodeStartIndices = lattice . getStartIndexArr ( ) ; for ( int startIndex = index ; startIndex > 0 ; startIndex -- ) { if ( nodeStartIndices [ startIndex ] != null ) { ViterbiNode glueBase = findGlueNodeCandidate ( index , nodeStartIndices [ startIndex ] , startIndex ) ; if ( glueBase != null ) { int length = index + 1 - startIndex ; String surface = glueBase . getSurface ( ) . substring ( 0 , length ) ; ViterbiNode glueNode = createGlueNode ( startIndex , glueBase , surface ) ; lattice . addNode ( glueNode , startIndex , startIndex + glueNode . getSurface ( ) . length ( ) ) ; return ; } } } }
Tries to repair the lattice by creating and adding an additional Viterbi node to the LEFT of the newly inserted user dictionary entry by using the substring of the node in the lattice that overlaps the least
19,422
private void repairBrokenLatticeAfter ( ViterbiLattice lattice , int nodeEndIndex ) { ViterbiNode [ ] [ ] nodeEndIndices = lattice . getEndIndexArr ( ) ; for ( int endIndex = nodeEndIndex + 1 ; endIndex < nodeEndIndices . length ; endIndex ++ ) { if ( nodeEndIndices [ endIndex ] != null ) { ViterbiNode glueBase = findGlueNodeCandidate ( nodeEndIndex , nodeEndIndices [ endIndex ] , endIndex ) ; if ( glueBase != null ) { int delta = endIndex - nodeEndIndex ; String glueBaseSurface = glueBase . getSurface ( ) ; String surface = glueBaseSurface . substring ( glueBaseSurface . length ( ) - delta ) ; ViterbiNode glueNode = createGlueNode ( nodeEndIndex , glueBase , surface ) ; lattice . addNode ( glueNode , nodeEndIndex , nodeEndIndex + glueNode . getSurface ( ) . length ( ) ) ; return ; } } } }
Tries to repair the lattice by creating and adding an additional Viterbi node to the RIGHT of the newly inserted user dictionary entry by using the substring of the node in the lattice that overlaps the least
19,423
private ViterbiNode findGlueNodeCandidate ( int index , ViterbiNode [ ] latticeNodes , int startIndex ) { List < ViterbiNode > candidates = new ArrayList < > ( ) ; for ( ViterbiNode viterbiNode : latticeNodes ) { if ( viterbiNode != null ) { candidates . add ( viterbiNode ) ; } } if ( ! candidates . isEmpty ( ) ) { ViterbiNode glueBase = null ; int length = index + 1 - startIndex ; for ( ViterbiNode candidate : candidates ) { if ( isAcceptableCandidate ( length , glueBase , candidate ) ) { glueBase = candidate ; } } if ( glueBase != null ) { return glueBase ; } } return null ; }
Tries to locate a candidate for a glue node that repairs the broken lattice by looking at all nodes at the current index .
19,424
private boolean isAcceptableCandidate ( int targetLength , ViterbiNode glueBase , ViterbiNode candidate ) { return ( glueBase == null || candidate . getSurface ( ) . length ( ) < glueBase . getSurface ( ) . length ( ) ) && candidate . getSurface ( ) . length ( ) >= targetLength ; }
Check whether a candidate for a glue node is acceptable . The candidate should be as short as possible but long enough to overlap with the inserted user entry
19,425
private ViterbiNode createGlueNode ( int startIndex , ViterbiNode glueBase , String surface ) { return new ViterbiNode ( glueBase . getWordId ( ) , surface , glueBase . getLeftId ( ) , glueBase . getRightId ( ) , glueBase . getWordCost ( ) , startIndex , ViterbiNode . Type . INSERTED ) ; }
Create a glue node to be inserted based on ViterbiNode already in the lattice . The new node takes the same parameters as the node it is based on but the word is truncated to match the hole in the lattice caused by the new user entry
19,426
public boolean isEmpty ( F element ) { if ( isEmpty ( ) ) return true ; Counter < S > m = maps . get ( element ) ; if ( m == null ) return true ; else return m . isEmpty ( ) ; }
This method checks if this CounterMap has any values stored for a given first element
19,427
public void incrementAll ( CounterMap < F , S > other ) { for ( Map . Entry < F , Counter < S > > entry : other . maps . entrySet ( ) ) { F key = entry . getKey ( ) ; Counter < S > innerCounter = entry . getValue ( ) ; for ( Map . Entry < S , AtomicDouble > innerEntry : innerCounter . entrySet ( ) ) { S value = innerEntry . getKey ( ) ; incrementCount ( key , value , innerEntry . getValue ( ) . get ( ) ) ; } } }
This method will increment values of this counter by counts of other counter
19,428
public Pair < F , S > argMax ( ) { Double maxCount = - Double . MAX_VALUE ; Pair < F , S > maxKey = null ; for ( Map . Entry < F , Counter < S > > entry : maps . entrySet ( ) ) { Counter < S > counter = entry . getValue ( ) ; S localMax = counter . argMax ( ) ; if ( counter . getCount ( localMax ) > maxCount || maxKey == null ) { maxKey = new Pair < F , S > ( entry . getKey ( ) , localMax ) ; maxCount = counter . getCount ( localMax ) ; } } return maxKey ; }
This method returns pair of elements with a max value
19,429
public void clear ( F element ) { Counter < S > s = maps . get ( element ) ; if ( s != null ) s . clear ( ) ; }
This method purges counter for a given first element
19,430
public int totalSize ( ) { int size = 0 ; for ( F first : keySet ( ) ) { size += getCounter ( first ) . size ( ) ; } return size ; }
This method returns total number of elements in this CounterMap
19,431
public void tpsv ( char order , char Uplo , char TransA , char Diag , INDArray Ap , INDArray X ) { if ( Nd4j . getExecutioner ( ) . getProfilingMode ( ) == OpExecutioner . ProfilingMode . ALL ) OpProfiler . getInstance ( ) . processBlasCall ( false , Ap , X ) ; if ( X . data ( ) . dataType ( ) == DataType . DOUBLE ) { DefaultOpExecutioner . validateDataType ( DataType . DOUBLE , X , Ap ) ; dtpsv ( order , Uplo , TransA , Diag , ( int ) X . length ( ) , Ap , X , X . stride ( - 1 ) ) ; } else { DefaultOpExecutioner . validateDataType ( DataType . FLOAT , Ap , X ) ; stpsv ( order , Uplo , TransA , Diag , ( int ) X . length ( ) , Ap , X , X . stride ( - 1 ) ) ; } OpExecutionerUtil . checkForAny ( X ) ; }
tpsv solves a system of linear equations whose coefficients are in a triangular packed matrix .
19,432
public static FingerprintProperties getInstance ( ) { if ( instance == null ) { synchronized ( FingerprintProperties . class ) { if ( instance == null ) { instance = new FingerprintProperties ( ) ; } } } return instance ; }
num frequency units
19,433
private Result listSessions ( ) { StringBuilder sb = new StringBuilder ( "<!DOCTYPE html>\n" + "<html lang=\"en\">\n" + "<head>\n" + " <meta charset=\"utf-8\">\n" + " <title>Training sessions - DL4J Training UI</title>\n" + " </head>\n" + "\n" + " <body>\n" + " <h1>DL4J Training UI</h1>\n" + " <p>UI server is in multi-session mode." + " To visualize a training session, please select one from the following list.</p>\n" + " <h2>List of attached training sessions</h2>\n" ) ; if ( ! knownSessionIDs . isEmpty ( ) ) { sb . append ( " <ul>" ) ; for ( String sessionId : knownSessionIDs . keySet ( ) ) { sb . append ( " <li><a href=\"train/" + sessionId + "\">" + sessionId + "</a></li>\n" ) ; } sb . append ( " </ul>" ) ; } else { sb . append ( "No training session attached." ) ; } sb . append ( " </body>\n" + "</html>\n" ) ; return ok ( sb . toString ( ) ) . as ( "text/html; charset=utf-8" ) ; }
List training sessions
19,434
private Result sessionNotFound ( String sessionId , String targetPath ) { if ( sessionLoader != null && sessionLoader . apply ( sessionId ) ) { if ( targetPath != null ) { return temporaryRedirect ( "./" + targetPath ) ; } else { return ok ( ) ; } } else { return notFound ( "Unknown session ID: " + sessionId ) ; } }
Load StatsStorage via provider or return not found
19,435
private Long getLastUpdateTime ( String sessionId ) { if ( lastUpdateForSession != null && sessionId != null && lastUpdateForSession . containsKey ( sessionId ) ) { return lastUpdateForSession . get ( sessionId ) ; } else { return - 1L ; } }
Get last update time for given session ID checking for null values
19,436
public void publish ( NDArrayMessage message ) throws Exception { if ( ! init ) init ( ) ; boolean connected = false ; if ( aeron == null ) { try { while ( ! connected ) { aeron = Aeron . connect ( ctx ) ; connected = true ; } } catch ( Exception e ) { log . warn ( "Reconnecting on publisher...failed to connect" ) ; } } int connectionTries = 0 ; while ( publication == null && connectionTries < NUM_RETRIES ) { try { publication = aeron . addPublication ( channel , streamId ) ; log . info ( "Created publication on channel " + channel + " and stream " + streamId ) ; } catch ( DriverTimeoutException e ) { Thread . sleep ( 1000 * ( connectionTries + 1 ) ) ; log . warn ( "Failed to connect due to driver time out on channel " + channel + " and stream " + streamId + "...retrying in " + connectionTries + " seconds" ) ; connectionTries ++ ; } } if ( ! connected && connectionTries >= 3 || publication == null ) { throw new IllegalStateException ( "Publisher unable to connect to channel " + channel + " and stream " + streamId ) ; } log . info ( "Publishing to " + channel + " on stream Id " + streamId ) ; INDArray arr = message . getArr ( ) ; if ( isCompress ( ) ) while ( ! message . getArr ( ) . isCompressed ( ) ) Nd4j . getCompressor ( ) . compressi ( arr , "GZIP" ) ; if ( NDArrayMessage . byteBufferSizeForMessage ( message ) >= publication . maxMessageLength ( ) ) { NDArrayMessageChunk [ ] chunks = NDArrayMessage . chunks ( message , publication . maxMessageLength ( ) / 128 ) ; for ( int i = 0 ; i < chunks . length ; i ++ ) { ByteBuffer sendBuff = NDArrayMessageChunk . toBuffer ( chunks [ i ] ) ; sendBuff . rewind ( ) ; DirectBuffer buffer = new UnsafeBuffer ( sendBuff ) ; sendBuffer ( buffer ) ; } } else { DirectBuffer buffer = NDArrayMessage . toBuffer ( message ) ; sendBuffer ( buffer ) ; } }
Publish an ndarray to an aeron channel
19,437
public CoOccurrenceWeight < T > nextObject ( ) { String line = iterator . nextSentence ( ) ; if ( line == null || line . isEmpty ( ) ) { return null ; } String [ ] strings = line . split ( " " ) ; CoOccurrenceWeight < T > object = new CoOccurrenceWeight < > ( ) ; object . setElement1 ( vocabCache . elementAtIndex ( Integer . valueOf ( strings [ 0 ] ) ) ) ; object . setElement2 ( vocabCache . elementAtIndex ( Integer . valueOf ( strings [ 1 ] ) ) ) ; object . setWeight ( Double . parseDouble ( strings [ 2 ] ) ) ; return object ; }
Returns next CoOccurrenceWeight object
19,438
public static ComputationGraphConfiguration fromJson ( String json ) { ObjectMapper mapper = NeuralNetConfiguration . mapper ( ) ; ComputationGraphConfiguration conf ; try { conf = mapper . readValue ( json , ComputationGraphConfiguration . class ) ; } catch ( Exception e ) { String msg = e . getMessage ( ) ; if ( msg != null && msg . contains ( "legacy" ) ) { throw new RuntimeException ( "Error deserializing ComputationGraphConfiguration - configuration may have a custom " + "layer, vertex or preprocessor, in pre version 1.0.0-alpha JSON format. These layers can be " + "deserialized by first registering them with NeuralNetConfiguration.registerLegacyCustomClassesForJSON(Class...)" , e ) ; } throw new RuntimeException ( e ) ; } int layerCount = 0 ; Map < String , GraphVertex > vertexMap = conf . getVertices ( ) ; JsonNode vertices = null ; for ( Map . Entry < String , GraphVertex > entry : vertexMap . entrySet ( ) ) { if ( ! ( entry . getValue ( ) instanceof LayerVertex ) ) { continue ; } LayerVertex lv = ( LayerVertex ) entry . getValue ( ) ; if ( lv . getLayerConf ( ) != null && lv . getLayerConf ( ) . getLayer ( ) != null ) { Layer layer = lv . getLayerConf ( ) . getLayer ( ) ; if ( layer instanceof BaseLayer && ( ( BaseLayer ) layer ) . getActivationFn ( ) == null ) { String layerName = layer . getLayerName ( ) ; try { if ( vertices == null ) { JsonNode jsonNode = mapper . readTree ( json ) ; vertices = jsonNode . get ( "vertices" ) ; } JsonNode vertexNode = vertices . get ( layerName ) ; JsonNode layerVertexNode = vertexNode . get ( "LayerVertex" ) ; if ( layerVertexNode == null || ! layerVertexNode . has ( "layerConf" ) || ! layerVertexNode . get ( "layerConf" ) . has ( "layer" ) ) { continue ; } JsonNode layerWrapperNode = layerVertexNode . get ( "layerConf" ) . get ( "layer" ) ; if ( layerWrapperNode == null || layerWrapperNode . size ( ) != 1 ) { continue ; } JsonNode layerNode = layerWrapperNode . elements ( ) . next ( ) ; JsonNode activationFunction = layerNode . get ( "activationFunction" ) ; if ( activationFunction != null ) { IActivation ia = Activation . fromString ( activationFunction . asText ( ) ) . getActivationFunction ( ) ; ( ( BaseLayer ) layer ) . setActivationFn ( ia ) ; } } catch ( IOException e ) { log . warn ( "Layer with null ActivationFn field or pre-0.7.2 activation function detected: could not parse JSON" , e ) ; } } handleLegacyWeightInitFromJson ( json , layer , mapper , vertices ) ; } } return conf ; }
Create a computation graph configuration from json
19,439
public void validate ( boolean allowDisconnected , boolean allowNoOutput ) { if ( networkInputs == null || networkInputs . isEmpty ( ) ) { throw new IllegalStateException ( "Invalid configuration: network has no inputs. " + "Use .addInputs(String...) to label (and give an ordering to) the network inputs" ) ; } if ( ( networkOutputs == null || networkOutputs . isEmpty ( ) ) && ! allowNoOutput ) { throw new IllegalStateException ( "Invalid configuration: network has no outputs." + "Use .setOutput(String...) to specify (and give an ordering to) the output vertices, " + "or use allowNoOutputs(true) to disable this check" ) ; } for ( String s : networkInputs ) { if ( vertices . containsKey ( s ) ) { throw new IllegalStateException ( "Invalid configuration: name \"" + s + "\" is present in both network inputs and graph vertices/layers" ) ; } } for ( Map . Entry < String , List < String > > e : vertexInputs . entrySet ( ) ) { String nodeName = e . getKey ( ) ; if ( e . getValue ( ) == null || e . getValue ( ) . isEmpty ( ) ) { throw new IllegalStateException ( "Invalid configuration: vertex \"" + nodeName + "\" has no inputs" ) ; } for ( String inputName : e . getValue ( ) ) { if ( ! vertices . containsKey ( inputName ) && ! networkInputs . contains ( inputName ) ) { throw new IllegalStateException ( "Invalid configuration: Vertex \"" + nodeName + "\" has input \"" + inputName + "\" that does not exist" ) ; } } } if ( networkOutputs != null ) { for ( String s : networkOutputs ) { if ( ! vertices . containsKey ( s ) ) { throw new IllegalStateException ( "Invalid configuration: Output name \"" + s + "\" is not a valid vertex" ) ; } } } if ( ! allowDisconnected ) { Set < String > seenAsInput = new HashSet < > ( ) ; seenAsInput . addAll ( networkOutputs ) ; for ( Map . Entry < String , List < String > > e : vertexInputs . entrySet ( ) ) { seenAsInput . addAll ( e . getValue ( ) ) ; } Set < String > disconnected = new HashSet < > ( ) ; disconnected . addAll ( networkInputs ) ; disconnected . addAll ( vertices . keySet ( ) ) ; disconnected . removeAll ( seenAsInput ) ; if ( ! disconnected . isEmpty ( ) && ! allowNoOutput ) { throw new IllegalStateException ( "Invalid configuration: disconnected vertices found - " + disconnected + ". Disconnected vertices are those that do not connect to either another vertex, and are also" + " not a network output. To disable this error (i.e., allow network configurations with" + " disconnected vertices) use GraphBuilder.allowDisconnected(true)" ) ; } } }
Check the configuration make sure it is valid
19,440
public void load ( File ... statistics ) throws IOException { setFeatureStats ( new MinMaxStats ( Nd4j . readBinary ( statistics [ 0 ] ) , Nd4j . readBinary ( statistics [ 1 ] ) ) ) ; if ( isFitLabel ( ) ) { setLabelStats ( new MinMaxStats ( Nd4j . readBinary ( statistics [ 2 ] ) , Nd4j . readBinary ( statistics [ 3 ] ) ) ) ; } }
Load the given min and max
19,441
public void save ( File ... files ) throws IOException { Nd4j . saveBinary ( getMin ( ) , files [ 0 ] ) ; Nd4j . saveBinary ( getMax ( ) , files [ 1 ] ) ; if ( isFitLabel ( ) ) { Nd4j . saveBinary ( getLabelMin ( ) , files [ 2 ] ) ; Nd4j . saveBinary ( getLabelMax ( ) , files [ 3 ] ) ; } }
Save the current min and max
19,442
public static String versionInfoString ( Detail detail ) { StringBuilder sb = new StringBuilder ( ) ; for ( VersionInfo grp : getVersionInfos ( ) ) { sb . append ( grp . getGroupId ( ) ) . append ( " : " ) . append ( grp . getArtifactId ( ) ) . append ( " : " ) . append ( grp . getBuildVersion ( ) ) ; switch ( detail ) { case FULL : case GAVC : sb . append ( " - " ) . append ( grp . getCommitIdAbbrev ( ) ) ; if ( detail != Detail . FULL ) break ; sb . append ( "buildTime=" ) . append ( grp . getBuildTime ( ) ) . append ( "branch=" ) . append ( grp . getBranch ( ) ) . append ( "commitMsg=" ) . append ( grp . getCommitMessageShort ( ) ) ; } sb . append ( "\n" ) ; } return sb . toString ( ) ; }
Get the version information for dependencies as a string with a specified amount of detail
19,443
public static void logVersionInfo ( Detail detail ) { List < VersionInfo > info = getVersionInfos ( ) ; for ( VersionInfo grp : info ) { switch ( detail ) { case GAV : log . info ( "{} : {} : {}" , grp . getGroupId ( ) , grp . getArtifactId ( ) , grp . getBuildVersion ( ) ) ; break ; case GAVC : log . info ( "{} : {} : {} - {}" , grp . getGroupId ( ) , grp . getArtifactId ( ) , grp . getBuildVersion ( ) , grp . getCommitIdAbbrev ( ) ) ; break ; case FULL : log . info ( "{} : {} : {} - {}, buildTime={}, buildHost={} branch={}, commitMsg={}" , grp . getGroupId ( ) , grp . getArtifactId ( ) , grp . getBuildVersion ( ) , grp . getCommitId ( ) , grp . getBuildTime ( ) , grp . getBuildHost ( ) , grp . getBranch ( ) , grp . getCommitMessageShort ( ) ) ; break ; } } }
Log the version information with the specified level of detail
19,444
public INDArray getConvParameterValues ( INDArray kerasParamValue ) throws InvalidKerasConfigurationException { INDArray paramValue ; switch ( this . getDimOrder ( ) ) { case TENSORFLOW : if ( kerasParamValue . rank ( ) == 5 ) paramValue = kerasParamValue . permute ( 4 , 3 , 0 , 1 , 2 ) ; else paramValue = kerasParamValue . permute ( 3 , 2 , 0 , 1 ) ; break ; case THEANO : paramValue = kerasParamValue . dup ( ) ; for ( int i = 0 ; i < paramValue . tensorsAlongDimension ( 2 , 3 ) ; i ++ ) { INDArray copyFilter = paramValue . tensorAlongDimension ( i , 2 , 3 ) . dup ( ) ; double [ ] flattenedFilter = copyFilter . ravel ( ) . data ( ) . asDouble ( ) ; ArrayUtils . reverse ( flattenedFilter ) ; INDArray newFilter = Nd4j . create ( flattenedFilter , copyFilter . shape ( ) ) ; INDArray inPlaceFilter = paramValue . tensorAlongDimension ( i , 2 , 3 ) ; inPlaceFilter . muli ( 0 ) . addi ( newFilter ) ; } break ; default : throw new InvalidKerasConfigurationException ( "Unknown keras backend " + this . getDimOrder ( ) ) ; } return paramValue ; }
Return processed parameter values obtained from Keras convolutional layers .
19,445
public Vendor getBlasVendor ( ) { int vendor = getBlasVendorId ( ) ; boolean isUnknowVendor = ( ( vendor > Vendor . values ( ) . length - 1 ) || ( vendor <= 0 ) ) ; if ( isUnknowVendor ) { return Vendor . UNKNOWN ; } return Vendor . values ( ) [ vendor ] ; }
Returns the BLAS library vendor
19,446
public void setIfUnset ( String name , String value ) { if ( get ( name ) == null ) { set ( name , value ) ; } }
Sets a property if it is currently unset .
19,447
public boolean hasNext ( ) { if ( next != null ) { return true ; } if ( ! recordReader . hasNext ( ) ) { return false ; } while ( next == null && recordReader . hasNext ( ) ) { Record r = recordReader . nextRecord ( ) ; List < Writable > temp = transformProcess . execute ( r . getRecord ( ) ) ; if ( temp == null ) { continue ; } next = new org . datavec . api . records . impl . Record ( temp , r . getMetaData ( ) ) ; } return next != null ; }
Whether there are anymore records
19,448
public static ClusterSetInfo classifyPoints ( final ClusterSet clusterSet , List < Point > points , ExecutorService executorService ) { final ClusterSetInfo clusterSetInfo = ClusterSetInfo . initialize ( clusterSet , true ) ; List < Runnable > tasks = new ArrayList < > ( ) ; for ( final Point point : points ) { try { PointClassification result = classifyPoint ( clusterSet , point ) ; if ( result . isNewLocation ( ) ) clusterSetInfo . getPointLocationChange ( ) . incrementAndGet ( ) ; clusterSetInfo . getClusterInfo ( result . getCluster ( ) . getId ( ) ) . getPointDistancesFromCenter ( ) . put ( point . getId ( ) , result . getDistanceFromCenter ( ) ) ; } catch ( Throwable t ) { log . warn ( "Error classifying point" , t ) ; } } return clusterSetInfo ; }
Classify the set of points base on cluster centers . This also adds each point to the ClusterSet
19,449
public Blob convert ( INDArray toConvert ) throws SQLException { ByteBuffer byteBuffer = BinarySerde . toByteBuffer ( toConvert ) ; Buffer buffer = ( Buffer ) byteBuffer ; buffer . rewind ( ) ; byte [ ] arr = new byte [ byteBuffer . capacity ( ) ] ; byteBuffer . get ( arr ) ; Connection c = dataSource . getConnection ( ) ; Blob b = c . createBlob ( ) ; b . setBytes ( 1 , arr ) ; return b ; }
Convert an ndarray to a blob
19,450
public INDArray load ( Blob blob ) throws SQLException { if ( blob == null ) return null ; try ( InputStream is = blob . getBinaryStream ( ) ) { ByteBuffer direct = ByteBuffer . allocateDirect ( ( int ) blob . length ( ) ) ; ReadableByteChannel readableByteChannel = Channels . newChannel ( is ) ; readableByteChannel . read ( direct ) ; Buffer byteBuffer = ( Buffer ) direct ; byteBuffer . rewind ( ) ; return BinarySerde . toArray ( direct ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
Load an ndarray from a blob
19,451
public void save ( INDArray save , String id ) throws SQLException , IOException { doSave ( save , id ) ; }
Save the ndarray
19,452
public Blob loadForID ( String id ) throws SQLException { Connection c = dataSource . getConnection ( ) ; PreparedStatement preparedStatement = c . prepareStatement ( loadStatement ( ) ) ; preparedStatement . setString ( 1 , id ) ; ResultSet r = preparedStatement . executeQuery ( ) ; if ( r . wasNull ( ) || ! r . next ( ) ) { return null ; } else { Blob first = r . getBlob ( 2 ) ; return first ; } }
Load an ndarray blob given an id
19,453
public void delete ( String id ) throws SQLException { Connection c = dataSource . getConnection ( ) ; PreparedStatement p = c . prepareStatement ( deleteStatement ( ) ) ; p . setString ( 1 , id ) ; p . execute ( ) ; }
Delete the given ndarray
19,454
private int getInputDimFromConfig ( Map < String , Object > layerConfig ) throws InvalidKerasConfigurationException { Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( ! innerConfig . containsKey ( conf . getLAYER_FIELD_INPUT_DIM ( ) ) ) throw new InvalidKerasConfigurationException ( "Keras Embedding layer config missing " + conf . getLAYER_FIELD_INPUT_DIM ( ) + " field" ) ; return ( int ) innerConfig . get ( conf . getLAYER_FIELD_INPUT_DIM ( ) ) ; }
Get Keras input dimension from Keras layer configuration .
19,455
@ SuppressWarnings ( "unchecked" ) public void processMessage ( ) { TrainingDriver < SkipGramRequestMessage > sgt = ( TrainingDriver < SkipGramRequestMessage > ) trainer ; sgt . startTraining ( this ) ; }
This method does actual training for SkipGram algorithm
19,456
public static void writeParagraphVectors ( ParagraphVectors vectors , File file ) { try ( FileOutputStream fos = new FileOutputStream ( file ) ; BufferedOutputStream stream = new BufferedOutputStream ( fos ) ) { writeParagraphVectors ( vectors , stream ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
This method saves ParagraphVectors model into compressed zip file
19,457
public static void writeWordVectors ( ParagraphVectors vectors , OutputStream stream ) { try ( BufferedWriter writer = new BufferedWriter ( new OutputStreamWriter ( stream , StandardCharsets . UTF_8 ) ) ) { VocabCache < VocabWord > vocabCache = vectors . getVocab ( ) ; for ( VocabWord word : vocabCache . vocabWords ( ) ) { StringBuilder builder = new StringBuilder ( ) ; builder . append ( word . isLabel ( ) ? "L" : "E" ) . append ( " " ) ; builder . append ( word . getLabel ( ) . replaceAll ( " " , WHITESPACE_REPLACEMENT ) ) . append ( " " ) ; INDArray vector = vectors . getWordVectorMatrix ( word . getLabel ( ) ) ; for ( int j = 0 ; j < vector . length ( ) ; j ++ ) { builder . append ( vector . getDouble ( j ) ) ; if ( j < vector . length ( ) - 1 ) { builder . append ( " " ) ; } } writer . write ( builder . append ( "\n" ) . toString ( ) ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
This method saves paragraph vectors to the given output stream .
19,458
public static WordVectors fromTableAndVocab ( WeightLookupTable table , VocabCache vocab ) { WordVectorsImpl vectors = new WordVectorsImpl ( ) ; vectors . setLookupTable ( table ) ; vectors . setVocab ( vocab ) ; vectors . setModelUtils ( new BasicModelUtils ( ) ) ; return vectors ; }
Load word vectors for the given vocab and table
19,459
public static Word2Vec fromPair ( Pair < InMemoryLookupTable , VocabCache > pair ) { Word2Vec vectors = new Word2Vec ( ) ; vectors . setLookupTable ( pair . getFirst ( ) ) ; vectors . setVocab ( pair . getSecond ( ) ) ; vectors . setModelUtils ( new BasicModelUtils ( ) ) ; return vectors ; }
Load word vectors from the given pair
19,460
public static void writeTsneFormat ( Glove vec , INDArray tsne , File csv ) throws Exception { try ( BufferedWriter write = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream ( csv ) , StandardCharsets . UTF_8 ) ) ) { int words = 0 ; InMemoryLookupCache l = ( InMemoryLookupCache ) vec . vocab ( ) ; for ( String word : vec . vocab ( ) . words ( ) ) { if ( word == null ) { continue ; } StringBuilder sb = new StringBuilder ( ) ; INDArray wordVector = tsne . getRow ( l . wordFor ( word ) . getIndex ( ) ) ; for ( int j = 0 ; j < wordVector . length ( ) ; j ++ ) { sb . append ( wordVector . getDouble ( j ) ) ; if ( j < wordVector . length ( ) - 1 ) { sb . append ( "," ) ; } } sb . append ( "," ) ; sb . append ( word . replaceAll ( " " , WHITESPACE_REPLACEMENT ) ) ; sb . append ( " " ) ; sb . append ( "\n" ) ; write . write ( sb . toString ( ) ) ; } log . info ( "Wrote " + words + " with size of " + vec . lookupTable ( ) . layerSize ( ) ) ; } }
Write the tsne format
19,461
public INDArray outputFromFeaturized ( INDArray input ) { if ( isGraph ) { if ( unFrozenSubsetGraph . getNumOutputArrays ( ) > 1 ) { throw new IllegalArgumentException ( "Graph has more than one output. Expecting an input array with outputFromFeaturized method call" ) ; } return unFrozenSubsetGraph . output ( input ) [ 0 ] ; } else { return unFrozenSubsetMLN . output ( input ) ; } }
Use to get the output from a featurized input
19,462
public static Evaluation getEvaluation ( ComputationGraph model , MultiDataSetIterator testData ) { if ( model . getNumOutputArrays ( ) != 1 ) throw new IllegalStateException ( "GraphSetSetAccuracyScoreFunction cannot be " + "applied to ComputationGraphs with more than one output. NumOutputs = " + model . getNumOutputArrays ( ) ) ; return model . evaluate ( testData ) ; }
Get the evaluation for the given model and test dataset
19,463
public static double score ( ComputationGraph model , MultiDataSetIterator testData , boolean average ) { double sumScore = 0.0 ; int totalExamples = 0 ; while ( testData . hasNext ( ) ) { MultiDataSet ds = testData . next ( ) ; long numExamples = ds . getFeatures ( 0 ) . size ( 0 ) ; sumScore += numExamples * model . score ( ds ) ; totalExamples += numExamples ; } if ( ! average ) return sumScore ; return sumScore / totalExamples ; }
Score based on the loss function
19,464
public static double score ( MultiLayerNetwork model , DataSetIterator testData , boolean average ) { double sumScore = 0.0 ; int totalExamples = 0 ; while ( testData . hasNext ( ) ) { DataSet ds = testData . next ( ) ; int numExamples = ds . numExamples ( ) ; sumScore += numExamples * model . score ( ds ) ; totalExamples += numExamples ; } if ( ! average ) return sumScore ; return sumScore / totalExamples ; }
Score the given test data with the given multi layer network
19,465
public static double score ( MultiLayerNetwork model , DataSetIterator testSet , RegressionValue regressionValue ) { RegressionEvaluation eval = model . evaluateRegression ( testSet ) ; return getScoreFromRegressionEval ( eval , regressionValue ) ; }
Score the given multi layer network
19,466
public String getMatchingAddress ( ) { if ( informationCollection . size ( ) > 1 ) this . informationCollection = buildLocalInformation ( ) ; List < String > list = getSubset ( 1 ) ; if ( list . size ( ) < 1 ) throw new ND4JIllegalStateException ( "Unable to find network interface matching requested mask: " + networkMask ) ; if ( list . size ( ) > 1 ) log . warn ( "We have {} local IPs matching given netmask [{}]" , list . size ( ) , networkMask ) ; return list . get ( 0 ) ; }
This method returns local IP address that matches given network mask . To be used with single - argument constructor only .
19,467
public List < String > getSubset ( int numShards , Collection < String > primary ) { if ( networkMask == null ) return getIntersections ( numShards , primary ) ; List < String > addresses = new ArrayList < > ( ) ; SubnetUtils utils = new SubnetUtils ( networkMask ) ; Collections . shuffle ( informationCollection ) ; for ( NetworkInformation information : informationCollection ) { for ( String ip : information . getIpAddresses ( ) ) { if ( primary != null && primary . contains ( ip ) ) continue ; if ( utils . getInfo ( ) . isInRange ( ip ) ) { log . debug ( "Picked {} as {}" , ip , primary == null ? "Shard" : "Backup" ) ; addresses . add ( ip ) ; } if ( addresses . size ( ) >= numShards ) break ; } if ( addresses . size ( ) >= numShards ) break ; } return addresses ; }
This method returns specified number of IP addresses from original list of addresses that are NOT listen in primary collection
19,468
private void processReturnedTask ( Future < OptimizationResult > future ) { long currentTime = System . currentTimeMillis ( ) ; OptimizationResult result ; try { result = future . get ( 100 , TimeUnit . MILLISECONDS ) ; } catch ( InterruptedException e ) { throw new RuntimeException ( "Unexpected InterruptedException thrown for task" , e ) ; } catch ( ExecutionException e ) { log . warn ( "Task failed" , e ) ; numCandidatesFailed . getAndIncrement ( ) ; return ; } catch ( TimeoutException e ) { throw new RuntimeException ( e ) ; } CandidateInfo status = currentStatus . get ( result . getIndex ( ) ) ; CandidateInfo newStatus = new CandidateInfo ( result . getIndex ( ) , result . getCandidateInfo ( ) . getCandidateStatus ( ) , result . getScore ( ) , status . getCreatedTime ( ) , result . getCandidateInfo ( ) . getStartTime ( ) , currentTime , status . getFlatParams ( ) , result . getCandidateInfo ( ) . getExceptionStackTrace ( ) ) ; currentStatus . put ( result . getIndex ( ) , newStatus ) ; if ( result . getCandidateInfo ( ) . getCandidateStatus ( ) == CandidateStatus . Failed ) { log . info ( "Task {} failed during execution: {}" , result . getIndex ( ) , result . getCandidateInfo ( ) . getExceptionStackTrace ( ) ) ; numCandidatesFailed . getAndIncrement ( ) ; } else { config . getCandidateGenerator ( ) . reportResults ( result ) ; Double score = result . getScore ( ) ; log . info ( "Completed task {}, score = {}" , result . getIndex ( ) , result . getScore ( ) ) ; boolean minimize = config . getScoreFunction ( ) . minimize ( ) ; if ( score != null && ( bestScore == null || ( ( minimize && score < bestScore ) || ( ! minimize && score > bestScore ) ) ) ) { if ( bestScore == null ) { log . info ( "New best score: {} (first completed model)" , score ) ; } else { int idx = result . getIndex ( ) ; int lastBestIdx = bestScoreCandidateIndex . get ( ) ; log . info ( "New best score: {}, model {} (prev={}, model {})" , score , idx , bestScore , lastBestIdx ) ; } bestScore = score ; bestScoreTime = System . currentTimeMillis ( ) ; bestScoreCandidateIndex . set ( result . getIndex ( ) ) ; } numCandidatesCompleted . getAndIncrement ( ) ; ResultReference resultReference = result . getResultReference ( ) ; if ( resultReference != null ) allResults . add ( resultReference ) ; } }
Process returned task ( either completed or failed
19,469
protected void set ( final int n , final Rational value ) { final int nindx = n / 2 ; if ( nindx < a . size ( ) ) { a . set ( nindx , value ) ; } else { while ( a . size ( ) < nindx ) { a . add ( Rational . ZERO ) ; } a . add ( value ) ; } }
Set a coefficient in the internal table .
19,470
public Rational at ( int n ) { if ( n == 1 ) { return ( new Rational ( - 1 , 2 ) ) ; } else if ( n % 2 != 0 ) { return Rational . ZERO ; } else { final int nindx = n / 2 ; if ( a . size ( ) <= nindx ) { for ( int i = 2 * a . size ( ) ; i <= n ; i += 2 ) { set ( i , doubleSum ( i ) ) ; } } return a . get ( nindx ) ; } }
The Bernoulli number at the index provided .
19,471
public static SequenceBatchCSVRecord fromWritables ( List < List < List < Writable > > > input ) { SequenceBatchCSVRecord ret = new SequenceBatchCSVRecord ( ) ; for ( int i = 0 ; i < input . size ( ) ; i ++ ) { ret . add ( Arrays . asList ( BatchCSVRecord . fromWritables ( input . get ( i ) ) ) ) ; } return ret ; }
Convert a writables time series to a sequence batch
19,472
public BufferedImage asBufferedImage ( INDArray array , int dataType ) { return converter2 . convert ( asFrame ( array , dataType ) ) ; }
Converts an INDArray to a BufferedImage . Only intended for images with rank 3 .
19,473
public Deque < Integer > getCrossoverPoints ( ) { Collections . shuffle ( parameterIndexes ) ; List < Integer > crossoverPointLists = parameterIndexes . subList ( 0 , rng . nextInt ( maxCrossovers - minCrossovers ) + minCrossovers ) ; Collections . sort ( crossoverPointLists ) ; Deque < Integer > crossoverPoints = new ArrayDeque < Integer > ( crossoverPointLists ) ; crossoverPoints . add ( Integer . MAX_VALUE ) ; return crossoverPoints ; }
Generate a list of crossover points .
19,474
public double tfidfWord ( String word , long wordCount , long documentLength ) { double tf = tfForWord ( wordCount , documentLength ) ; double idf = idfForWord ( word ) ; return MathUtils . tfidf ( tf , idf ) ; }
Calculate the tifdf for a word given the word word count and document length
19,475
public static void registerCustomLayer ( String layerName , Class < ? extends KerasLayer > configClass ) { customLayers . put ( layerName , configClass ) ; }
Register a custom layer
19,476
public void copyWeightsToLayer ( org . deeplearning4j . nn . api . Layer layer ) throws InvalidKerasConfigurationException { if ( this . getNumParams ( ) > 0 ) { String dl4jLayerName = layer . conf ( ) . getLayer ( ) . getLayerName ( ) ; String kerasLayerName = this . getLayerName ( ) ; String msg = "Error when attempting to copy weights from Keras layer " + kerasLayerName + " to DL4J layer " + dl4jLayerName ; if ( getWeights ( ) == null ) throw new InvalidKerasConfigurationException ( msg + "(weights is null)" ) ; Set < String > paramsInLayer = new HashSet < > ( layer . paramTable ( ) . keySet ( ) ) ; Set < String > paramsInKerasLayer = new HashSet < > ( this . weights . keySet ( ) ) ; paramsInLayer . removeAll ( paramsInKerasLayer ) ; if ( ! paramsInLayer . isEmpty ( ) ) { String joinedParamsInLayer = StringUtils . join ( paramsInLayer , ", " ) ; throw new InvalidKerasConfigurationException ( msg + "(no stored weights for parameters: " + joinedParamsInLayer + ")" ) ; } paramsInKerasLayer . removeAll ( layer . paramTable ( ) . keySet ( ) ) ; if ( ! paramsInKerasLayer . isEmpty ( ) ) { String joinedParamsInKerasLayer = StringUtils . join ( paramsInKerasLayer , ", " ) ; throw new InvalidKerasConfigurationException ( msg + "(found no parameters named: " + joinedParamsInKerasLayer + ")" ) ; } for ( String paramName : layer . paramTable ( ) . keySet ( ) ) { try { layer . setParam ( paramName , this . weights . get ( paramName ) ) ; } catch ( Exception e ) { log . error ( e . getMessage ( ) ) ; throw new InvalidKerasConfigurationException ( e . getMessage ( ) + "\nTried to set weights for layer with name " + this . getLayerName ( ) + ", of " + layer . conf ( ) . getLayer ( ) . getClass ( ) + ".\n" + "Failed to set weights for parameter " + paramName + "\n" + "Expected shape for this parameter: " + layer . getParam ( paramName ) . shapeInfoToString ( ) + ", \ngot: " + this . weights . get ( paramName ) . shapeInfoToString ( ) ) ; } } } }
Copy Keras layer weights to DL4J Layer .
19,477
protected long getNInFromConfig ( Map < String , ? extends KerasLayer > previousLayers ) throws UnsupportedKerasConfigurationException { int size = previousLayers . size ( ) ; int count = 0 ; long nIn ; String inboundLayerName = inboundLayerNames . get ( 0 ) ; while ( count <= size ) { if ( previousLayers . containsKey ( inboundLayerName ) ) { KerasLayer inbound = previousLayers . get ( inboundLayerName ) ; try { FeedForwardLayer ffLayer = ( FeedForwardLayer ) inbound . getLayer ( ) ; nIn = ffLayer . getNOut ( ) ; if ( nIn > 0 ) return nIn ; count ++ ; inboundLayerName = inbound . getInboundLayerNames ( ) . get ( 0 ) ; } catch ( Exception e ) { inboundLayerName = inbound . getInboundLayerNames ( ) . get ( 0 ) ; } } } throw new UnsupportedKerasConfigurationException ( "Could not determine number of input channels for" + "depthwise convolution." ) ; }
Some DL4J layers need explicit specification of number of inputs which Keras does infer . This method searches through previous layers until a FeedForwardLayer is found . These layers have nOut values that subsequently correspond to the nIn value of this layer .
19,478
public SDVariable layerNorm ( SDVariable input , SDVariable gain , int ... dimensions ) { return layerNorm ( ( String ) null , input , gain , dimensions ) ; }
Apply Layer Normalization without bias
19,479
public void setConf ( Configuration conf ) { super . setConf ( conf ) ; featureFirstColumn = conf . getInt ( FEATURE_FIRST_COLUMN , 0 ) ; hasLabel = conf . getBoolean ( HAS_LABELS , true ) ; multilabel = conf . getBoolean ( MULTILABEL , false ) ; labelFirstColumn = conf . getInt ( LABEL_FIRST_COLUMN , - 1 ) ; labelLastColumn = conf . getInt ( LABEL_LAST_COLUMN , - 1 ) ; featureLastColumn = conf . getInt ( FEATURE_LAST_COLUMN , labelFirstColumn > 0 ? labelFirstColumn - 1 : - 1 ) ; zeroBasedIndexing = conf . getBoolean ( ZERO_BASED_INDEXING , false ) ; zeroBasedLabelIndexing = conf . getBoolean ( ZERO_BASED_LABEL_INDEXING , false ) ; }
Set DataVec configuration
19,480
public boolean updaterDivideByMinibatch ( String paramName ) { int idx = paramName . indexOf ( '_' ) ; int layerIdx = Integer . parseInt ( paramName . substring ( 0 , idx ) ) ; String subName = paramName . substring ( idx + 1 ) ; return getLayer ( layerIdx ) . updaterDivideByMinibatch ( subName ) ; }
Intended for internal use
19,481
public INDArray activateSelectedLayers ( int from , int to , INDArray input ) { if ( input == null ) throw new IllegalStateException ( "Unable to perform activation; no input found" ) ; if ( from < 0 || from >= layers . length || from >= to ) throw new IllegalStateException ( "Unable to perform activation; FROM is out of layer space" ) ; if ( to < 1 || to >= layers . length ) throw new IllegalStateException ( "Unable to perform activation; TO is out of layer space" ) ; try { LayerWorkspaceMgr mgr = LayerWorkspaceMgr . noWorkspaces ( helperWorkspaces ) ; INDArray res = input ; for ( int l = from ; l <= to ; l ++ ) { res = this . activationFromPrevLayer ( l , res , false , mgr ) ; } return res ; } catch ( OutOfMemoryError e ) { CrashReportingUtil . writeMemoryCrashDump ( this , e ) ; throw e ; } }
Calculate activation for few layers at once . Suitable for autoencoder partial activation .
19,482
public long numParams ( boolean backwards ) { int length = 0 ; for ( int i = 0 ; i < layers . length ; i ++ ) length += layers [ i ] . numParams ( backwards ) ; return length ; }
Returns the number of parameters in the network
19,483
public double f1Score ( org . nd4j . linalg . dataset . api . DataSet data ) { return f1Score ( data . getFeatures ( ) , data . getLabels ( ) ) ; }
Sets the input and labels and returns the F1 score for the prediction with respect to the true labels
19,484
public void clear ( ) { for ( Layer layer : layers ) layer . clear ( ) ; input = null ; labels = null ; solver = null ; }
Clear the inputs . Clears optimizer state .
19,485
public void setInput ( INDArray input ) { this . input = input ; if ( this . layers == null ) { init ( ) ; } if ( input != null ) { if ( input . length ( ) == 0 ) throw new IllegalArgumentException ( "Invalid input: length 0 (shape: " + Arrays . toString ( input . shape ( ) ) + ")" ) ; setInputMiniBatchSize ( ( int ) input . size ( 0 ) ) ; } }
Set the input array for the network
19,486
public Layer getOutputLayer ( ) { Layer ret = getLayers ( ) [ getLayers ( ) . length - 1 ] ; if ( ret instanceof FrozenLayerWithBackprop ) { ret = ( ( FrozenLayerWithBackprop ) ret ) . getInsideLayer ( ) ; } return ret ; }
Get the output layer - i . e . the last layer in the netwok
19,487
public < T extends RegressionEvaluation > T evaluateRegression ( DataSetIterator iterator ) { return ( T ) doEvaluation ( iterator , new RegressionEvaluation ( iterator . totalOutcomes ( ) ) ) [ 0 ] ; }
Evaluate the network for regression performance
19,488
public static int createDims ( FlatBufferBuilder bufferBuilder , INDArray arr ) { int [ ] tensorDimOffsets = new int [ arr . rank ( ) ] ; int [ ] nameOffset = new int [ arr . rank ( ) ] ; for ( int i = 0 ; i < tensorDimOffsets . length ; i ++ ) { nameOffset [ i ] = bufferBuilder . createString ( "" ) ; tensorDimOffsets [ i ] = TensorDim . createTensorDim ( bufferBuilder , arr . size ( i ) , nameOffset [ i ] ) ; } return Tensor . createShapeVector ( bufferBuilder , tensorDimOffsets ) ; }
Create the dimensions for the flatbuffer builder
19,489
public Bitmap asBitmap ( INDArray array , int dataType ) { return converter2 . convert ( asFrame ( array , dataType ) ) ; }
Converts an INDArray to a Bitmap . Only intended for images with rank 3 .
19,490
public int totalCount ( int outputNum ) { assertIndex ( outputNum ) ; return countTruePositive [ outputNum ] + countTrueNegative [ outputNum ] + countFalseNegative [ outputNum ] + countFalsePositive [ outputNum ] ; }
Get the total number of values for the specified column accounting for any masking
19,491
public double accuracy ( int outputNum ) { assertIndex ( outputNum ) ; return ( countTruePositive [ outputNum ] + countTrueNegative [ outputNum ] ) / ( double ) totalCount ( outputNum ) ; }
Get the accuracy for the specified output
19,492
public double fBeta ( double beta , int outputNum ) { assertIndex ( outputNum ) ; double precision = precision ( outputNum ) ; double recall = recall ( outputNum ) ; return EvaluationUtils . fBeta ( beta , precision , recall ) ; }
Calculate the F - beta value for the given output
19,493
public double matthewsCorrelation ( int outputNum ) { assertIndex ( outputNum ) ; return EvaluationUtils . matthewsCorrelation ( truePositives ( outputNum ) , falsePositives ( outputNum ) , falseNegatives ( outputNum ) , trueNegatives ( outputNum ) ) ; }
Calculate the Matthews correlation coefficient for the specified output
19,494
public double gMeasure ( int output ) { double precision = precision ( output ) ; double recall = recall ( output ) ; return EvaluationUtils . gMeasure ( precision , recall ) ; }
Calculate the G - measure for the given output
19,495
public double falseNegativeRate ( Integer classLabel , double edgeCase ) { double fnCount = falseNegatives ( classLabel ) ; double tpCount = truePositives ( classLabel ) ; return EvaluationUtils . falseNegativeRate ( ( long ) fnCount , ( long ) tpCount , edgeCase ) ; }
Returns the false negative rate for a given label
19,496
public String stats ( int printPrecision ) { StringBuilder sb = new StringBuilder ( ) ; int maxLabelsLength = 15 ; if ( labels != null ) { for ( String s : labels ) { maxLabelsLength = Math . max ( s . length ( ) , maxLabelsLength ) ; } } String subPattern = "%-12." + printPrecision + "f" ; String pattern = "%-" + ( maxLabelsLength + 5 ) + "s" + subPattern + subPattern + subPattern + subPattern + "%-8d%-7d%-7d%-7d%-7d" ; String patternHeader = "%-" + ( maxLabelsLength + 5 ) + "s%-12s%-12s%-12s%-12s%-8s%-7s%-7s%-7s%-7s" ; List < String > headerNames = Arrays . asList ( "Label" , "Accuracy" , "F1" , "Precision" , "Recall" , "Total" , "TP" , "TN" , "FP" , "FN" ) ; if ( rocBinary != null ) { patternHeader += "%-12s" ; pattern += subPattern ; headerNames = new ArrayList < > ( headerNames ) ; headerNames . add ( "AUC" ) ; } String header = String . format ( patternHeader , headerNames . toArray ( ) ) ; sb . append ( header ) ; if ( countTrueNegative != null ) { for ( int i = 0 ; i < countTrueNegative . length ; i ++ ) { int totalCount = totalCount ( i ) ; double acc = accuracy ( i ) ; double f1 = f1 ( i ) ; double precision = precision ( i ) ; double recall = recall ( i ) ; String label = ( labels == null ? String . valueOf ( i ) : labels . get ( i ) ) ; List < Object > args = Arrays . < Object > asList ( label , acc , f1 , precision , recall , totalCount , truePositives ( i ) , trueNegatives ( i ) , falsePositives ( i ) , falseNegatives ( i ) ) ; if ( rocBinary != null ) { args = new ArrayList < > ( args ) ; args . add ( rocBinary . calculateAUC ( i ) ) ; } sb . append ( "\n" ) . append ( String . format ( pattern , args . toArray ( ) ) ) ; } if ( decisionThreshold != null ) { sb . append ( "\nPer-output decision thresholds: " ) . append ( Arrays . toString ( decisionThreshold . dup ( ) . data ( ) . asFloat ( ) ) ) ; } } else { sb . append ( "\n-- No Data --\n" ) ; } return sb . toString ( ) ; }
Get a String representation of the EvaluationBinary class using the specified precision
19,497
public FingerprintSimilarity getFingerprintsSimilarity ( ) { HashMap < Integer , Integer > offset_Score_Table = new HashMap < > ( ) ; int numFrames ; float score = 0 ; int mostSimilarFramePosition = Integer . MIN_VALUE ; if ( fingerprint1 . length > fingerprint2 . length ) { numFrames = FingerprintManager . getNumFrames ( fingerprint2 ) ; } else { numFrames = FingerprintManager . getNumFrames ( fingerprint1 ) ; } PairManager pairManager = new PairManager ( ) ; HashMap < Integer , List < Integer > > this_Pair_PositionList_Table = pairManager . getPair_PositionList_Table ( fingerprint1 ) ; HashMap < Integer , List < Integer > > compareWave_Pair_PositionList_Table = pairManager . getPair_PositionList_Table ( fingerprint2 ) ; for ( Integer compareWaveHashNumber : compareWave_Pair_PositionList_Table . keySet ( ) ) { if ( ! this_Pair_PositionList_Table . containsKey ( compareWaveHashNumber ) || ! compareWave_Pair_PositionList_Table . containsKey ( compareWaveHashNumber ) ) { continue ; } List < Integer > wavePositionList = this_Pair_PositionList_Table . get ( compareWaveHashNumber ) ; List < Integer > compareWavePositionList = compareWave_Pair_PositionList_Table . get ( compareWaveHashNumber ) ; for ( Integer thisPosition : wavePositionList ) { for ( Integer compareWavePosition : compareWavePositionList ) { int offset = thisPosition - compareWavePosition ; if ( offset_Score_Table . containsKey ( offset ) ) { offset_Score_Table . put ( offset , offset_Score_Table . get ( offset ) + 1 ) ; } else { offset_Score_Table . put ( offset , 1 ) ; } } } } MapRank mapRank = new MapRankInteger ( offset_Score_Table , false ) ; List < Integer > orderedKeyList = mapRank . getOrderedKeyList ( 100 , true ) ; if ( orderedKeyList . size ( ) > 0 ) { int key = orderedKeyList . get ( 0 ) ; mostSimilarFramePosition = key ; score = offset_Score_Table . get ( key ) ; if ( offset_Score_Table . containsKey ( key - 1 ) ) { score += offset_Score_Table . get ( key - 1 ) / 2 ; } if ( offset_Score_Table . containsKey ( key + 1 ) ) { score += offset_Score_Table . get ( key + 1 ) / 2 ; } } score /= numFrames ; float similarity = score ; if ( similarity > 1 ) { similarity = 1 ; } fingerprintSimilarity . setMostSimilarFramePosition ( mostSimilarFramePosition ) ; fingerprintSimilarity . setScore ( score ) ; fingerprintSimilarity . setSimilarity ( similarity ) ; return fingerprintSimilarity ; }
Get fingerprint similarity of inout fingerprints
19,498
public static List < SubGraph > getSubgraphsMatching ( SameDiff sd , SubGraphPredicate p ) { List < SubGraph > out = new ArrayList < > ( ) ; for ( DifferentialFunction df : sd . functions ( ) ) { if ( p . matches ( sd , df ) ) { SubGraph sg = p . getSubGraph ( sd , df ) ; out . add ( sg ) ; } } return out ; }
Get a list of all the subgraphs that match the specified predicate
19,499
public String confusionMatrix ( ) { int nClasses = numClasses ( ) ; if ( confusion == null ) { return "Confusion matrix: <no data>" ; } List < Integer > classes = confusion . getClasses ( ) ; int maxCount = 1 ; for ( Integer i : classes ) { for ( Integer j : classes ) { int count = confusion ( ) . getCount ( i , j ) ; maxCount = Math . max ( maxCount , count ) ; } } maxCount = Math . max ( maxCount , nClasses ) ; int numDigits = ( int ) Math . ceil ( Math . log10 ( maxCount ) ) ; if ( numDigits < 1 ) numDigits = 1 ; String digitFormat = "%" + ( numDigits + 1 ) + "d" ; StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < nClasses ; i ++ ) { sb . append ( String . format ( digitFormat , i ) ) ; } sb . append ( "\n" ) ; int numDividerChars = ( numDigits + 1 ) * nClasses + 1 ; for ( int i = 0 ; i < numDividerChars ; i ++ ) { sb . append ( "-" ) ; } sb . append ( "\n" ) ; for ( int actual = 0 ; actual < nClasses ; actual ++ ) { String actualName = resolveLabelForClass ( actual ) ; for ( int predicted = 0 ; predicted < nClasses ; predicted ++ ) { int count = confusion . getCount ( actual , predicted ) ; sb . append ( String . format ( digitFormat , count ) ) ; } sb . append ( " | " ) . append ( actual ) . append ( " = " ) . append ( actualName ) . append ( "\n" ) ; } sb . append ( "\nConfusion matrix format: Actual (rowClass) predicted as (columnClass) N times" ) ; return sb . toString ( ) ; }
Get the confusion matrix as a String