idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
10,800
public static String generateStringExample ( String format , List < String > enumValues ) { if ( enumValues == null || enumValues . isEmpty ( ) ) { if ( format == null ) { return "string" ; } else { switch ( format ) { case "byte" : return "Ynl0ZQ==" ; case "date" : return "1970-01-01" ; case "date-time" : return "1970-01-01T00:00:00Z" ; case "email" : return "email@example.com" ; case "password" : return "secret" ; case "uuid" : return "f81d4fae-7dec-11d0-a765-00a0c91e6bf6" ; default : return "string" ; } } } else { return enumValues . get ( 0 ) ; } }
Generates examples for string properties or parameters with given format
10,801
public static Integer generateIntegerExample ( List < Integer > enumValues ) { if ( enumValues == null || enumValues . isEmpty ( ) ) { return 0 ; } else { return enumValues . get ( 0 ) ; } }
Generates examples for integer properties - if there are enums it uses first enum value returns 0 otherwise .
10,802
private static Map < HttpMethod , Operation > getOperationMap ( Path path ) { Map < HttpMethod , Operation > result = new LinkedHashMap < > ( ) ; if ( path . getGet ( ) != null ) { result . put ( HttpMethod . GET , path . getGet ( ) ) ; } if ( path . getPut ( ) != null ) { result . put ( HttpMethod . PUT , path . getPut ( ) ) ; } if ( path . getPost ( ) != null ) { result . put ( HttpMethod . POST , path . getPost ( ) ) ; } if ( path . getDelete ( ) != null ) { result . put ( HttpMethod . DELETE , path . getDelete ( ) ) ; } if ( path . getPatch ( ) != null ) { result . put ( HttpMethod . PATCH , path . getPatch ( ) ) ; } if ( path . getHead ( ) != null ) { result . put ( HttpMethod . HEAD , path . getHead ( ) ) ; } if ( path . getOptions ( ) != null ) { result . put ( HttpMethod . OPTIONS , path . getOptions ( ) ) ; } return result ; }
Returns the operations of a path as a map which preserves the insertion order .
10,803
public static List < PathOperation > toPathOperationsList ( Map < String , Path > paths , String host , String basePath , Comparator < PathOperation > comparator ) { List < PathOperation > pathOperations = new ArrayList < > ( ) ; paths . forEach ( ( relativePath , path ) -> pathOperations . addAll ( toPathOperationsList ( host + basePath + relativePath , path ) ) ) ; if ( comparator != null ) { pathOperations . sort ( comparator ) ; } return pathOperations ; }
Converts the Swagger paths into a list of PathOperations .
10,804
public static List < PathOperation > toPathOperationsList ( String path , Path pathModel ) { List < PathOperation > pathOperations = new ArrayList < > ( ) ; getOperationMap ( pathModel ) . forEach ( ( httpMethod , operation ) -> pathOperations . add ( new PathOperation ( httpMethod , path , operation ) ) ) ; return pathOperations ; }
Converts a Swagger path into a PathOperation .
10,805
public String getTitle ( ) { String operationName = operation . getSummary ( ) ; if ( isBlank ( operationName ) ) { operationName = getMethod ( ) . toString ( ) + " " + getPath ( ) ; } return operationName ; }
Returns the display title for an operation
10,806
public MarkupDocBuilder apply ( MarkupDocBuilder markupDocBuilder , OverviewDocument . Parameters params ) { Swagger swagger = params . swagger ; Info info = swagger . getInfo ( ) ; buildDocumentTitle ( markupDocBuilder , info . getTitle ( ) ) ; applyOverviewDocumentExtension ( new Context ( Position . DOCUMENT_BEFORE , markupDocBuilder ) ) ; buildOverviewTitle ( markupDocBuilder , labels . getLabel ( Labels . OVERVIEW ) ) ; applyOverviewDocumentExtension ( new Context ( Position . DOCUMENT_BEGIN , markupDocBuilder ) ) ; buildDescriptionParagraph ( markupDocBuilder , info . getDescription ( ) ) ; buildVersionInfoSection ( markupDocBuilder , info ) ; buildContactInfoSection ( markupDocBuilder , info . getContact ( ) ) ; buildLicenseInfoSection ( markupDocBuilder , info ) ; buildUriSchemeSection ( markupDocBuilder , swagger ) ; buildTagsSection ( markupDocBuilder , swagger . getTags ( ) ) ; buildConsumesSection ( markupDocBuilder , swagger . getConsumes ( ) ) ; buildProducesSection ( markupDocBuilder , swagger . getProduces ( ) ) ; buildExternalDocsSection ( markupDocBuilder , swagger . getExternalDocs ( ) ) ; applyOverviewDocumentExtension ( new Context ( Position . DOCUMENT_END , markupDocBuilder ) ) ; applyOverviewDocumentExtension ( new Context ( Position . DOCUMENT_AFTER , markupDocBuilder ) ) ; return markupDocBuilder ; }
Builds the overview MarkupDocument .
10,807
private void applyOverviewDocumentExtension ( Context context ) { extensionRegistry . getOverviewDocumentExtensions ( ) . forEach ( extension -> extension . apply ( context ) ) ; }
Apply extension context to all OverviewContentExtension
10,808
public MarkupDocBuilder apply ( MarkupDocBuilder markupDocBuilder , DefinitionsDocument . Parameters params ) { Map < String , Model > definitions = params . definitions ; if ( MapUtils . isNotEmpty ( definitions ) ) { applyDefinitionsDocumentExtension ( new Context ( Position . DOCUMENT_BEFORE , markupDocBuilder ) ) ; buildDefinitionsTitle ( markupDocBuilder , labels . getLabel ( Labels . DEFINITIONS ) ) ; applyDefinitionsDocumentExtension ( new Context ( Position . DOCUMENT_BEGIN , markupDocBuilder ) ) ; buildDefinitionsSection ( markupDocBuilder , definitions ) ; applyDefinitionsDocumentExtension ( new Context ( Position . DOCUMENT_END , markupDocBuilder ) ) ; applyDefinitionsDocumentExtension ( new Context ( Position . DOCUMENT_AFTER , markupDocBuilder ) ) ; } return markupDocBuilder ; }
Builds the definitions MarkupDocument .
10,809
private void applyDefinitionsDocumentExtension ( Context context ) { extensionRegistry . getDefinitionsDocumentExtensions ( ) . forEach ( extension -> extension . apply ( context ) ) ; }
Apply extension context to all DefinitionsContentExtension
10,810
private void buildDefinition ( MarkupDocBuilder markupDocBuilder , String definitionName , Model model ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Definition processed : '{}'" , definitionName ) ; } if ( config . isSeparatedDefinitionsEnabled ( ) ) { MarkupDocBuilder defDocBuilder = copyMarkupDocBuilder ( markupDocBuilder ) ; applyDefinitionComponent ( defDocBuilder , definitionName , model ) ; Path definitionFile = context . getOutputPath ( ) . resolve ( definitionDocumentNameResolver . apply ( definitionName ) ) ; defDocBuilder . writeToFileWithoutExtension ( definitionFile , StandardCharsets . UTF_8 ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Separate definition file produced : '{}'" , definitionFile ) ; } definitionRef ( markupDocBuilder , definitionName ) ; } else { applyDefinitionComponent ( markupDocBuilder , definitionName , model ) ; } }
Generate definition files depending on the generation mode
10,811
private void applyDefinitionComponent ( MarkupDocBuilder markupDocBuilder , String definitionName , Model model ) { definitionComponent . apply ( markupDocBuilder , DefinitionComponent . parameters ( definitionName , model , 2 ) ) ; }
Builds a concrete definition
10,812
private void definitionRef ( MarkupDocBuilder markupDocBuilder , String definitionName ) { buildDefinitionTitle ( markupDocBuilder , crossReference ( markupDocBuilder , definitionDocumentResolverDefault . apply ( definitionName ) , definitionName , definitionName ) , "ref-" + definitionName ) ; }
Builds a cross - reference to a separated definition file .
10,813
private void buildDefinitionTitle ( MarkupDocBuilder markupDocBuilder , String title , String anchor ) { markupDocBuilder . sectionTitleWithAnchorLevel2 ( title , anchor ) ; }
Builds definition title
10,814
public static Object generateExample ( AbstractSerializableParameter < ? > parameter ) { switch ( parameter . getType ( ) ) { case "integer" : return 0 ; case "number" : return 0.0 ; case "boolean" : return true ; case "string" : return ExamplesUtil . generateStringExample ( parameter . getFormat ( ) , parameter . getEnum ( ) ) ; default : return parameter . getType ( ) ; } }
Generate a default example value for parameter .
10,815
private Type getType ( Map < String , Model > definitions , DocumentResolver definitionDocumentResolver ) { Validate . notNull ( parameter , "parameter must not be null!" ) ; Type type = null ; if ( parameter instanceof BodyParameter ) { BodyParameter bodyParameter = ( BodyParameter ) parameter ; Model model = bodyParameter . getSchema ( ) ; if ( model != null ) { type = ModelUtils . getType ( model , definitions , definitionDocumentResolver ) ; } else { type = new BasicType ( "string" , bodyParameter . getName ( ) ) ; } } else if ( parameter instanceof AbstractSerializableParameter ) { AbstractSerializableParameter serializableParameter = ( AbstractSerializableParameter ) parameter ; @ SuppressWarnings ( "unchecked" ) List < String > enums = serializableParameter . getEnum ( ) ; if ( CollectionUtils . isNotEmpty ( enums ) ) { type = new EnumType ( serializableParameter . getName ( ) , enums ) ; } else { type = new BasicType ( serializableParameter . getType ( ) , serializableParameter . getName ( ) , serializableParameter . getFormat ( ) ) ; } if ( serializableParameter . getType ( ) . equals ( "array" ) ) { String collectionFormat = serializableParameter . getCollectionFormat ( ) ; type = new ArrayType ( serializableParameter . getName ( ) , new PropertyAdapter ( serializableParameter . getItems ( ) ) . getType ( definitionDocumentResolver ) , collectionFormat ) ; } } else if ( parameter instanceof RefParameter ) { String refName = ( ( RefParameter ) parameter ) . getSimpleRef ( ) ; type = new RefType ( definitionDocumentResolver . apply ( refName ) , new ObjectType ( refName , null ) ) ; } return type ; }
Retrieves the type of a parameter or otherwise null
10,816
public Optional < Object > getDefaultValue ( ) { Validate . notNull ( parameter , "parameter must not be null!" ) ; if ( parameter instanceof AbstractSerializableParameter ) { AbstractSerializableParameter serializableParameter = ( AbstractSerializableParameter ) parameter ; return Optional . ofNullable ( serializableParameter . getDefaultValue ( ) ) ; } return Optional . empty ( ) ; }
Retrieves the default value of a parameter
10,817
public static PlotCanvas plot ( SparseMatrix sparse ) { double [ ] lowerBound = { 0 , 0 } ; double [ ] upperBound = { sparse . ncols ( ) , sparse . nrows ( ) } ; PlotCanvas canvas = new PlotCanvas ( lowerBound , upperBound , false ) ; canvas . add ( new SparseMatrixPlot ( sparse ) ) ; canvas . getAxis ( 0 ) . setLabelVisible ( false ) ; canvas . getAxis ( 0 ) . setGridVisible ( false ) ; canvas . getAxis ( 1 ) . setLabelVisible ( false ) ; canvas . getAxis ( 1 ) . setGridVisible ( false ) ; return canvas ; }
Create a sparse matrix plot canvas .
10,818
public BaseGrid setFrameVisible ( boolean v ) { for ( int i = 0 ; i < axis . length ; i ++ ) { axis [ i ] . setGridVisible ( v ) ; } return this ; }
Set if the frame visible .
10,819
public BaseGrid setAxisLabel ( String ... axisLabels ) { if ( axisLabels . length != base . getDimension ( ) ) { throw new IllegalArgumentException ( "Axis label size don't match base dimension." ) ; } for ( int i = 0 ; i < axisLabels . length ; i ++ ) { axis [ i ] . setAxisLabel ( axisLabels [ i ] ) ; } return this ; }
Set axis labels .
10,820
public String [ ] getAxisLabel ( ) { String [ ] array = new String [ axis . length ] ; for ( int i = 0 ; i < array . length ; i ++ ) { array [ i ] = axis [ i ] . getAxisLabel ( ) ; } return array ; }
Get axis label .
10,821
public void paint ( Graphics g ) { for ( int i = 0 ; i < axis . length ; i ++ ) { axis [ i ] . paint ( g ) ; } }
Draw the grid .
10,822
public long learn ( double confidence , PrintStream out ) { long n = 0 ; ttree = fim . buildTotalSupportTree ( ) ; for ( int i = 0 ; i < ttree . root . children . length ; i ++ ) { if ( ttree . root . children [ i ] != null ) { int [ ] itemset = { ttree . root . children [ i ] . id } ; n += learn ( out , null , itemset , i , ttree . root . children [ i ] , confidence ) ; } } return n ; }
Mines the association rules . The discovered rules will be printed out to the provided stream .
10,823
public List < AssociationRule > learn ( double confidence ) { List < AssociationRule > list = new ArrayList < > ( ) ; ttree = fim . buildTotalSupportTree ( ) ; for ( int i = 0 ; i < ttree . root . children . length ; i ++ ) { if ( ttree . root . children [ i ] != null ) { int [ ] itemset = { ttree . root . children [ i ] . id } ; learn ( null , list , itemset , i , ttree . root . children [ i ] , confidence ) ; } } return list ; }
Mines the association rules . The discovered frequent rules will be returned in a list .
10,824
private long learn ( PrintStream out , List < AssociationRule > list , int [ ] itemset , int size , Node node , double confidence ) { long n = 0 ; if ( node . children == null ) { return n ; } for ( int i = 0 ; i < size ; i ++ ) { if ( node . children [ i ] != null ) { int [ ] newItemset = FPGrowth . insert ( itemset , node . children [ i ] . id ) ; n += learn ( out , list , newItemset , node . children [ i ] . support , confidence ) ; n += learn ( out , list , newItemset , i , node . children [ i ] , confidence ) ; } } return n ; }
Generates association rules from a T - tree .
10,825
private long learn ( PrintStream out , List < AssociationRule > list , int [ ] itemset , int support , double confidence ) { long n = 0 ; int [ ] [ ] combinations = getPowerSet ( itemset ) ; for ( int i = 0 ; i < combinations . length ; i ++ ) { int [ ] complement = getComplement ( combinations [ i ] , itemset ) ; if ( complement != null ) { double arc = getConfidence ( combinations [ i ] , support ) ; if ( arc >= confidence ) { double supp = ( double ) support / fim . size ( ) ; AssociationRule ar = new AssociationRule ( combinations [ i ] , complement , supp , arc ) ; n ++ ; if ( out != null ) { out . println ( ar ) ; } if ( list != null ) { list . add ( ar ) ; } } } } return n ; }
Generates all association rules for a given item set .
10,826
private static int [ ] getComplement ( int [ ] subset , int [ ] fullset ) { int size = fullset . length - subset . length ; if ( size < 1 ) { return null ; } int [ ] complement = new int [ size ] ; int index = 0 ; for ( int i = 0 ; i < fullset . length ; i ++ ) { int item = fullset [ i ] ; boolean member = false ; for ( int j = 0 ; j < subset . length ; j ++ ) { if ( item == subset [ j ] ) { member = true ; break ; } } if ( ! member ) { complement [ index ++ ] = item ; } } return complement ; }
Returns the complement of subset .
10,827
private static int [ ] [ ] getPowerSet ( int [ ] set ) { int [ ] [ ] sets = new int [ getPowerSetSize ( set . length ) ] [ ] ; getPowerSet ( set , 0 , null , sets , 0 ) ; return sets ; }
Returns all possible subsets except null and full set .
10,828
private static int getPowerSet ( int [ ] set , int inputIndex , int [ ] sofar , int [ ] [ ] sets , int outputIndex ) { for ( int i = inputIndex ; i < set . length ; i ++ ) { int n = sofar == null ? 0 : sofar . length ; if ( n < set . length - 1 ) { int [ ] subset = new int [ n + 1 ] ; subset [ n ] = set [ i ] ; if ( sofar != null ) { System . arraycopy ( sofar , 0 , subset , 0 , n ) ; } sets [ outputIndex ] = subset ; outputIndex = getPowerSet ( set , i + 1 , subset , sets , outputIndex + 1 ) ; } } return outputIndex ; }
Recursively calculates all possible subsets .
10,829
private static double incompleteFractionSummation ( double alpha , double beta , double x ) { final int MAXITER = 500 ; final double EPS = 3.0E-7 ; double aplusb = alpha + beta ; double aplus1 = alpha + 1.0 ; double aminus1 = alpha - 1.0 ; double c = 1.0 ; double d = 1.0 - aplusb * x / aplus1 ; if ( Math . abs ( d ) < FPMIN ) { d = FPMIN ; } d = 1.0 / d ; double h = d ; double aa = 0.0 ; double del = 0.0 ; int i = 1 , i2 = 0 ; boolean test = true ; while ( test ) { i2 = 2 * i ; aa = i * ( beta - i ) * x / ( ( aminus1 + i2 ) * ( alpha + i2 ) ) ; d = 1.0 + aa * d ; if ( Math . abs ( d ) < FPMIN ) { d = FPMIN ; } c = 1.0 + aa / c ; if ( Math . abs ( c ) < FPMIN ) { c = FPMIN ; } d = 1.0 / d ; h *= d * c ; aa = - ( alpha + i ) * ( aplusb + i ) * x / ( ( alpha + i2 ) * ( aplus1 + i2 ) ) ; d = 1.0 + aa * d ; if ( Math . abs ( d ) < FPMIN ) { d = FPMIN ; } c = 1.0 + aa / c ; if ( Math . abs ( c ) < FPMIN ) { c = FPMIN ; } d = 1.0 / d ; del = d * c ; h *= del ; i ++ ; if ( Math . abs ( del - 1.0 ) < EPS ) { test = false ; } if ( i > MAXITER ) { test = false ; logger . error ( "Beta.incompleteFractionSummation: Maximum number of iterations wes exceeded" ) ; } } return h ; }
Incomplete fraction summation used in the method regularizedIncompleteBeta using a modified Lentz s method .
10,830
private double [ ] [ ] expd ( double [ ] [ ] D , double perplexity , double tol ) { int n = D . length ; double [ ] [ ] P = new double [ n ] [ n ] ; double [ ] DiSum = Math . rowSums ( D ) ; int nprocs = MulticoreExecutor . getThreadPoolSize ( ) ; int chunk = n / nprocs ; List < PerplexityTask > tasks = new ArrayList < > ( ) ; for ( int i = 0 ; i < nprocs ; i ++ ) { int start = i * chunk ; int end = i == nprocs - 1 ? n : ( i + 1 ) * chunk ; PerplexityTask task = new PerplexityTask ( start , end , D , P , DiSum , perplexity , tol ) ; tasks . add ( task ) ; } try { MulticoreExecutor . run ( tasks ) ; } catch ( Exception e ) { logger . error ( "t-SNE Gaussian kernel width search task fails: {}" , e ) ; } return P ; }
Compute the Gaussian kernel ( search the width for given perplexity .
10,831
private Node buildNode ( int begin , int end ) { int d = keys [ 0 ] . length ; Node node = new Node ( ) ; node . count = end - begin ; node . index = begin ; double [ ] lowerBound = new double [ d ] ; double [ ] upperBound = new double [ d ] ; for ( int i = 0 ; i < d ; i ++ ) { lowerBound [ i ] = keys [ index [ begin ] ] [ i ] ; upperBound [ i ] = keys [ index [ begin ] ] [ i ] ; } for ( int i = begin + 1 ; i < end ; i ++ ) { for ( int j = 0 ; j < d ; j ++ ) { double c = keys [ index [ i ] ] [ j ] ; if ( lowerBound [ j ] > c ) { lowerBound [ j ] = c ; } if ( upperBound [ j ] < c ) { upperBound [ j ] = c ; } } } double maxRadius = - 1 ; for ( int i = 0 ; i < d ; i ++ ) { double radius = ( upperBound [ i ] - lowerBound [ i ] ) / 2 ; if ( radius > maxRadius ) { maxRadius = radius ; node . split = i ; node . cutoff = ( upperBound [ i ] + lowerBound [ i ] ) / 2 ; } } if ( maxRadius == 0 ) { node . lower = node . upper = null ; return node ; } int i1 = begin , i2 = end - 1 , size = 0 ; while ( i1 <= i2 ) { boolean i1Good = ( keys [ index [ i1 ] ] [ node . split ] < node . cutoff ) ; boolean i2Good = ( keys [ index [ i2 ] ] [ node . split ] >= node . cutoff ) ; if ( ! i1Good && ! i2Good ) { int temp = index [ i1 ] ; index [ i1 ] = index [ i2 ] ; index [ i2 ] = temp ; i1Good = i2Good = true ; } if ( i1Good ) { i1 ++ ; size ++ ; } if ( i2Good ) { i2 -- ; } } node . lower = buildNode ( begin , begin + size ) ; node . upper = buildNode ( begin + size , end ) ; return node ; }
Build a k - d tree from the given set of dataset .
10,832
private void search ( double [ ] q , Node node , Neighbor < double [ ] , E > neighbor ) { if ( node . isLeaf ( ) ) { for ( int idx = node . index ; idx < node . index + node . count ; idx ++ ) { if ( q == keys [ index [ idx ] ] && identicalExcluded ) { continue ; } double distance = Math . squaredDistance ( q , keys [ index [ idx ] ] ) ; if ( distance < neighbor . distance ) { neighbor . key = keys [ index [ idx ] ] ; neighbor . value = data [ index [ idx ] ] ; neighbor . index = index [ idx ] ; neighbor . distance = distance ; } } } else { Node nearer , further ; double diff = q [ node . split ] - node . cutoff ; if ( diff < 0 ) { nearer = node . lower ; further = node . upper ; } else { nearer = node . upper ; further = node . lower ; } search ( q , nearer , neighbor ) ; if ( neighbor . distance >= diff * diff ) { search ( q , further , neighbor ) ; } } }
Returns the nearest neighbors of the given target starting from the give tree node .
10,833
private void search ( double [ ] q , Node node , double radius , List < Neighbor < double [ ] , E > > neighbors ) { if ( node . isLeaf ( ) ) { for ( int idx = node . index ; idx < node . index + node . count ; idx ++ ) { if ( q == keys [ index [ idx ] ] && identicalExcluded ) { continue ; } double distance = Math . distance ( q , keys [ index [ idx ] ] ) ; if ( distance <= radius ) { neighbors . add ( new Neighbor < > ( keys [ index [ idx ] ] , data [ index [ idx ] ] , index [ idx ] , distance ) ) ; } } } else { Node nearer , further ; double diff = q [ node . split ] - node . cutoff ; if ( diff < 0 ) { nearer = node . lower ; further = node . upper ; } else { nearer = node . upper ; further = node . lower ; } search ( q , nearer , radius , neighbors ) ; if ( radius >= Math . abs ( diff ) ) { search ( q , further , radius , neighbors ) ; } } }
Returns the neighbors in the given range of search target from the give tree node .
10,834
public DenseMatrix inverse ( ) { int n = L . nrows ( ) ; DenseMatrix inv = Matrix . eye ( n ) ; solve ( inv ) ; return inv ; }
Returns the matrix inverse .
10,835
public void actionPerformed ( ActionEvent e ) { if ( e . getActionCommand ( ) . compareTo ( "Copy" ) == 0 ) { StringBuilder sbf = new StringBuilder ( ) ; int numcols = table . getSelectedColumnCount ( ) ; int numrows = table . getSelectedRowCount ( ) ; int [ ] rowsselected = table . getSelectedRows ( ) ; int [ ] colsselected = table . getSelectedColumns ( ) ; if ( ! ( ( numrows - 1 == rowsselected [ rowsselected . length - 1 ] - rowsselected [ 0 ] && numrows == rowsselected . length ) && ( numcols - 1 == colsselected [ colsselected . length - 1 ] - colsselected [ 0 ] && numcols == colsselected . length ) ) ) { JOptionPane . showMessageDialog ( null , "Invalid Copy Selection" , "Invalid Copy Selection" , JOptionPane . ERROR_MESSAGE ) ; return ; } for ( int i = 0 ; i < numrows ; i ++ ) { for ( int j = 0 ; j < numcols ; j ++ ) { sbf . append ( table . getValueAt ( rowsselected [ i ] , colsselected [ j ] ) ) ; if ( j < numcols - 1 ) { sbf . append ( "\t" ) ; } } sbf . append ( "\n" ) ; } stsel = new StringSelection ( sbf . toString ( ) ) ; system = Toolkit . getDefaultToolkit ( ) . getSystemClipboard ( ) ; system . setContents ( stsel , stsel ) ; } if ( e . getActionCommand ( ) . compareTo ( "Paste" ) == 0 ) { LOGGER . log ( Level . FINE , "Trying to Paste" ) ; int startRow = ( table . getSelectedRows ( ) ) [ 0 ] ; int startCol = ( table . getSelectedColumns ( ) ) [ 0 ] ; try { String trstring = ( String ) ( system . getContents ( this ) . getTransferData ( DataFlavor . stringFlavor ) ) ; StringTokenizer st1 = new StringTokenizer ( trstring , "\n" ) ; for ( int i = 0 ; st1 . hasMoreTokens ( ) ; i ++ ) { rowstring = st1 . nextToken ( ) ; StringTokenizer st2 = new StringTokenizer ( rowstring , "\t" ) ; for ( int j = 0 ; st2 . hasMoreTokens ( ) ; j ++ ) { value = ( String ) st2 . nextToken ( ) ; if ( startRow + i < table . getRowCount ( ) && startCol + j < table . getColumnCount ( ) ) { table . setValueAt ( value , startRow + i , startCol + j ) ; } } } } catch ( Exception ex ) { } } }
This method is activated on the Keystrokes we are listening to in this implementation . Here it listens for Copy and Paste ActionCommands . Selections comprising non - adjacent cells result in invalid selection and then copy action cannot be performed . Paste is done by aligning the upper left corner of the selection with the 1st element in the current selection of the JTable .
10,836
public BigramCollocation [ ] find ( Corpus corpus , int k ) { BigramCollocation [ ] bigrams = new BigramCollocation [ k ] ; HeapSelect < BigramCollocation > heap = new HeapSelect < > ( bigrams ) ; Iterator < Bigram > iterator = corpus . getBigrams ( ) ; while ( iterator . hasNext ( ) ) { Bigram bigram = iterator . next ( ) ; int c12 = corpus . getBigramFrequency ( bigram ) ; if ( c12 > minFreq ) { int c1 = corpus . getTermFrequency ( bigram . w1 ) ; int c2 = corpus . getTermFrequency ( bigram . w2 ) ; double score = likelihoodRatio ( c1 , c2 , c12 , corpus . size ( ) ) ; heap . add ( new BigramCollocation ( bigram . w1 , bigram . w2 , c12 , - score ) ) ; } } heap . sort ( ) ; BigramCollocation [ ] collocations = new BigramCollocation [ k ] ; for ( int i = 0 ; i < k ; i ++ ) { BigramCollocation bigram = bigrams [ k - i - 1 ] ; collocations [ i ] = new BigramCollocation ( bigram . w1 ( ) , bigram . w2 ( ) , bigram . frequency ( ) , - bigram . score ( ) ) ; } return collocations ; }
Finds top k bigram collocations in the given corpus .
10,837
public BigramCollocation [ ] find ( Corpus corpus , double p ) { if ( p <= 0.0 || p >= 1.0 ) { throw new IllegalArgumentException ( "Invalid p = " + p ) ; } double cutoff = chisq . quantile ( p ) ; ArrayList < BigramCollocation > bigrams = new ArrayList < > ( ) ; Iterator < Bigram > iterator = corpus . getBigrams ( ) ; while ( iterator . hasNext ( ) ) { Bigram bigram = iterator . next ( ) ; int c12 = corpus . getBigramFrequency ( bigram ) ; if ( c12 > minFreq ) { int c1 = corpus . getTermFrequency ( bigram . w1 ) ; int c2 = corpus . getTermFrequency ( bigram . w2 ) ; double score = likelihoodRatio ( c1 , c2 , c12 , corpus . size ( ) ) ; if ( score > cutoff ) { bigrams . add ( new BigramCollocation ( bigram . w1 , bigram . w2 , c12 , score ) ) ; } } } int n = bigrams . size ( ) ; BigramCollocation [ ] collocations = new BigramCollocation [ n ] ; for ( int i = 0 ; i < n ; i ++ ) { collocations [ i ] = bigrams . get ( i ) ; } Arrays . sort ( collocations ) ; for ( int i = 0 ; i < n / 2 ; i ++ ) { BigramCollocation b = collocations [ i ] ; collocations [ i ] = collocations [ n - i - 1 ] ; collocations [ n - i - 1 ] = b ; } return collocations ; }
Finds bigram collocations in the given corpus whose p - value is less than the given threshold .
10,838
private double likelihoodRatio ( int c1 , int c2 , int c12 , long N ) { double p = ( double ) c2 / N ; double p1 = ( double ) c12 / c1 ; double p2 = ( double ) ( c2 - c12 ) / ( N - c1 ) ; double logLambda = logL ( c12 , c1 , p ) + logL ( c2 - c12 , N - c1 , p ) - logL ( c12 , c1 , p1 ) - logL ( c2 - c12 , N - c1 , p2 ) ; return - 2 * logLambda ; }
Returns the likelihood ratio test statistic - 2 log &lambda ;
10,839
private double logL ( int k , long n , double x ) { if ( x == 0.0 ) x = 0.01 ; if ( x == 1.0 ) x = 0.99 ; return k * Math . log ( x ) + ( n - k ) * Math . log ( 1 - x ) ; }
Help function for calculating likelihood ratio statistic .
10,840
public static void denoise ( double [ ] t , Wavelet wavelet , boolean soft ) { wavelet . transform ( t ) ; int n = t . length ; int nh = t . length >> 1 ; double [ ] wc = new double [ nh ] ; System . arraycopy ( t , nh , wc , 0 , nh ) ; double error = Math . mad ( wc ) / 0.6745 ; double lambda = error * Math . sqrt ( 2 * Math . log ( n ) ) ; if ( soft ) { for ( int i = 2 ; i < n ; i ++ ) { t [ i ] = Math . signum ( t [ i ] ) * Math . max ( Math . abs ( t [ i ] ) - lambda , 0.0 ) ; } } else { for ( int i = 2 ; i < n ; i ++ ) { if ( Math . abs ( t [ i ] ) < lambda ) { t [ i ] = 0.0 ; } } } wavelet . inverse ( t ) ; }
Adaptive denoising a time series with given wavelet .
10,841
private void sety2 ( double [ ] x , double [ ] y ) { double p , qn , sig , un ; double [ ] u = new double [ n - 1 ] ; y2 [ 0 ] = u [ 0 ] = 0.0 ; for ( int i = 1 ; i < n - 1 ; i ++ ) { sig = ( x [ i ] - x [ i - 1 ] ) / ( x [ i + 1 ] - x [ i - 1 ] ) ; p = sig * y2 [ i - 1 ] + 2.0 ; y2 [ i ] = ( sig - 1.0 ) / p ; u [ i ] = ( y [ i + 1 ] - y [ i ] ) / ( x [ i + 1 ] - x [ i ] ) - ( y [ i ] - y [ i - 1 ] ) / ( x [ i ] - x [ i - 1 ] ) ; u [ i ] = ( 6.0 * u [ i ] / ( x [ i + 1 ] - x [ i - 1 ] ) - sig * u [ i - 1 ] ) / p ; } qn = un = 0.0 ; y2 [ n - 1 ] = ( un - qn * u [ n - 2 ] ) / ( qn * y2 [ n - 2 ] + 1.0 ) ; for ( int k = n - 2 ; k >= 0 ; k -- ) { y2 [ k ] = y2 [ k ] * y2 [ k + 1 ] + u [ k ] ; } }
Calculate the second derivatives of the interpolating function at the tabulated points . At the endpoints we use a natural spline with zero second derivative on that boundary .
10,842
public double bic ( double [ ] data ) { if ( components . isEmpty ( ) ) { throw new IllegalStateException ( "Mixture is empty!" ) ; } int n = data . length ; double logLikelihood = 0.0 ; for ( double x : data ) { double p = p ( x ) ; if ( p > 0 ) { logLikelihood += Math . log ( p ) ; } } return logLikelihood - 0.5 * npara ( ) * Math . log ( n ) ; }
BIC score of the mixture for given data .
10,843
private static int [ ] seed ( SparseDataset data , int k ) { int n = data . size ( ) ; int [ ] y = new int [ n ] ; SparseArray centroid = data . get ( Math . randomInt ( n ) ) . x ; double [ ] D = new double [ n ] ; for ( int i = 0 ; i < n ; i ++ ) { D [ i ] = Double . MAX_VALUE ; } for ( int i = 1 ; i < k ; i ++ ) { for ( int j = 0 ; j < n ; j ++ ) { double dist = Math . JensenShannonDivergence ( data . get ( j ) . x , centroid ) ; if ( dist < D [ j ] ) { D [ j ] = dist ; y [ j ] = i - 1 ; } } double cutoff = Math . random ( ) * Math . sum ( D ) ; double cost = 0.0 ; int index = 0 ; for ( ; index < n ; index ++ ) { cost += D [ index ] ; if ( cost >= cutoff ) { break ; } } centroid = data . get ( index ) . x ; } for ( int j = 0 ; j < n ; j ++ ) { double dist = Math . JensenShannonDivergence ( data . get ( j ) . x , centroid ) ; if ( dist < D [ j ] ) { D [ j ] = dist ; y [ j ] = k - 1 ; } } return y ; }
Initialize clusters with KMeans ++ algorithm .
10,844
public double cdf2tiled ( double x ) { if ( x < 0 ) { throw new IllegalArgumentException ( "Invalid x: " + x ) ; } return 1.0 - Beta . regularizedIncompleteBetaFunction ( 0.5 * nu , 0.5 , nu / ( nu + x * x ) ) ; }
Two - tailed cdf .
10,845
public double quantile2tiled ( double p ) { if ( p < 0.0 || p > 1.0 ) { throw new IllegalArgumentException ( "Invalid p: " + p ) ; } double x = Beta . inverseRegularizedIncompleteBetaFunction ( 0.5 * nu , 0.5 , 1.0 - p ) ; return Math . sqrt ( nu * ( 1.0 - x ) / x ) ; }
Two - tailed quantile .
10,846
public AttributeDataset parse ( String name , InputStream stream ) throws IOException , ParseException { try ( BufferedReader reader = new BufferedReader ( new InputStreamReader ( stream ) ) ) { return parse ( name , null , reader ) ; } }
Parse a dataset from an input stream .
10,847
public static String coordToString ( double ... c ) { StringBuilder builder = new StringBuilder ( "(" ) ; for ( int i = 0 ; i < c . length ; i ++ ) { builder . append ( Math . round ( c [ i ] , 2 ) ) . append ( "," ) ; } if ( c . length > 0 ) { builder . setCharAt ( builder . length ( ) , ')' ) ; } else { builder . append ( ")" ) ; } return builder . toString ( ) ; }
Convert coordinate to a string .
10,848
public static < T extends Comparable < ? super T > > T median ( T [ ] a ) { int k = a . length / 2 ; return select ( a , k ) ; }
Find the median of an array of type double .
10,849
public static double measure ( int [ ] truth , double [ ] probability ) { if ( truth . length != probability . length ) { throw new IllegalArgumentException ( String . format ( "The vector sizes don't match: %d != %d." , truth . length , probability . length ) ) ; } double pos = 0 ; double neg = 0 ; for ( int i = 0 ; i < truth . length ; i ++ ) { if ( truth [ i ] == 0 ) { neg ++ ; } else if ( truth [ i ] == 1 ) { pos ++ ; } else { throw new IllegalArgumentException ( "AUC is only for binary classification. Invalid label: " + truth [ i ] ) ; } } int [ ] label = truth . clone ( ) ; double [ ] prediction = probability . clone ( ) ; QuickSort . sort ( prediction , label ) ; double [ ] rank = new double [ label . length ] ; for ( int i = 0 ; i < prediction . length ; i ++ ) { if ( i == prediction . length - 1 || prediction [ i ] != prediction [ i + 1 ] ) { rank [ i ] = i + 1 ; } else { int j = i + 1 ; for ( ; j < prediction . length && prediction [ j ] == prediction [ i ] ; j ++ ) ; double r = ( i + 1 + j ) / 2.0 ; for ( int k = i ; k < j ; k ++ ) rank [ k ] = r ; i = j - 1 ; } } double auc = 0.0 ; for ( int i = 0 ; i < label . length ; i ++ ) { if ( label [ i ] == 1 ) auc += rank [ i ] ; } auc = ( auc - ( pos * ( pos + 1 ) / 2.0 ) ) / ( pos * neg ) ; return auc ; }
Caulculate AUC for binary classifier .
10,850
public Neuron [ ] neurons ( ) { HashMap < Integer , Neuron > hash = new HashMap < > ( ) ; Neuron [ ] neurons = new Neuron [ nodes . size ( ) ] ; int i = 0 ; for ( Node node : nodes ) { Neuron [ ] neighbors = new Neuron [ node . edges . size ( ) ] ; neurons [ i ] = new Neuron ( node . w , neighbors ) ; hash . put ( node . id , neurons [ i ] ) ; i ++ ; } i = 0 ; for ( Node node : nodes ) { int j = 0 ; for ( Edge edge : node . edges ) { if ( edge . a != node ) neurons [ i ] . neighbors [ j ++ ] = hash . get ( edge . a . id ) ; else neurons [ i ] . neighbors [ j ++ ] = hash . get ( edge . b . id ) ; } i ++ ; } return neurons ; }
Returns the neurons in the network .
10,851
public int predict ( double [ ] x ) { double minDist = Double . MAX_VALUE ; int bestCluster = 0 ; int i = 0 ; for ( Node neuron : nodes ) { double dist = Math . squaredDistance ( x , neuron . w ) ; if ( dist < minDist ) { minDist = dist ; bestCluster = i ; } i ++ ; } if ( y == null || y . length != nodes . size ( ) ) { return bestCluster ; } else { return y [ bestCluster ] ; } }
Cluster a new instance to the nearest neuron .
10,852
public double [ ] inverseProjection ( int x , int y ) { double [ ] sc = new double [ 2 ] ; double ratio = ( canvas . base . upperBound [ 0 ] - canvas . base . lowerBound [ 0 ] ) / ( canvas . getWidth ( ) * ( 1 - 2 * canvas . margin ) ) ; sc [ 0 ] = canvas . base . lowerBound [ 0 ] + ratio * ( x - canvas . getWidth ( ) * canvas . margin ) ; ratio = ( canvas . base . upperBound [ 1 ] - canvas . base . lowerBound [ 1 ] ) / ( canvas . getHeight ( ) * ( 1 - 2 * canvas . margin ) ) ; sc [ 1 ] = canvas . base . lowerBound [ 1 ] + ratio * ( canvas . getHeight ( ) * ( 1 - canvas . margin ) - y ) ; return sc ; }
Project the screen coordinate back to the logical coordinates .
10,853
public int [ ] [ ] getClusterLabel ( ) { if ( y == null ) { throw new IllegalStateException ( "Neuron cluster labels are not available. Call partition() first." ) ; } int [ ] [ ] clusterLabels = new int [ height ] [ width ] ; for ( int i = 0 , l = 0 ; i < height ; i ++ ) { for ( int j = 0 ; j < width ; j ++ ) { clusterLabels [ i ] [ j ] = y [ i * width + j ] ; } } return clusterLabels ; }
Returns the cluster labels for each neuron . If the neurons have not been clustered throws an Illegal State Exception .
10,854
public int [ ] partition ( int k ) { int n = width * height ; double [ ] [ ] units = new double [ n ] [ d ] ; for ( int i = 0 , l = 0 ; i < height ; i ++ ) { for ( int j = 0 ; j < width ; j ++ , l ++ ) { units [ l ] = neurons [ i ] [ j ] ; } } double [ ] [ ] proximity = new double [ n ] [ ] ; for ( int i = 0 ; i < n ; i ++ ) { proximity [ i ] = new double [ i + 1 ] ; for ( int j = 0 ; j < i ; j ++ ) { proximity [ i ] [ j ] = Math . distance ( units [ i ] , units [ j ] ) ; } } Linkage linkage = new UPGMALinkage ( proximity ) ; HierarchicalClustering hc = new HierarchicalClustering ( linkage ) ; y = hc . partition ( k ) ; int [ ] cluster = new int [ bmu . length ] ; for ( int i = 0 ; i < cluster . length ; i ++ ) { cluster [ i ] = y [ bmu [ i ] [ 0 ] * width + bmu [ i ] [ 1 ] ] ; } return cluster ; }
Clustering the neurons into k groups . And then assigns the samples in each neuron to the corresponding cluster .
10,855
public int predict ( double [ ] x ) { double best = Double . MAX_VALUE ; int ii = - 1 , jj = - 1 ; for ( int i = 0 ; i < height ; i ++ ) { for ( int j = 0 ; j < width ; j ++ ) { double dist = Math . squaredDistance ( neurons [ i ] [ j ] , x ) ; if ( dist < best ) { best = dist ; ii = i ; jj = j ; } } } if ( y == null ) { return ii * width + jj ; } else { return y [ ii * width + jj ] ; } }
Cluster a new instance to the nearest neuron . For clustering purpose one should build a sufficient large map to capture the structure of data space . Then the neurons of map can be clustered into a small number of clusters . Finally the sample should be assign to the cluster of its nearest neurons .
10,856
public SparseDataset parse ( String name , String path ) throws IOException , ParseException { return parse ( name , new File ( path ) ) ; }
Parse a libsvm sparse dataset from given file .
10,857
public SparseDataset parse ( String name , InputStream stream ) throws IOException , ParseException { BufferedReader reader = new BufferedReader ( new InputStreamReader ( stream ) ) ; try { String line = reader . readLine ( ) ; if ( line == null ) { throw new IOException ( "Empty data source." ) ; } String [ ] tokens = line . trim ( ) . split ( "\\s+" ) ; boolean classification = true ; Attribute response = null ; try { Integer . valueOf ( tokens [ 0 ] ) ; response = new NominalAttribute ( "class" ) ; } catch ( NumberFormatException e ) { try { Double . valueOf ( tokens [ 0 ] ) ; response = new NominalAttribute ( "response" ) ; classification = false ; } catch ( NumberFormatException ex ) { logger . error ( "Failed to parse {}" , tokens [ 0 ] , ex ) ; throw new NumberFormatException ( "Unrecognized response variable value: " + tokens [ 0 ] ) ; } } SparseDataset sparse = new SparseDataset ( name , response ) ; for ( int i = 0 ; line != null ; i ++ ) { tokens = line . trim ( ) . split ( "\\s+" ) ; if ( classification ) { int y = Integer . parseInt ( tokens [ 0 ] ) ; sparse . set ( i , y ) ; } else { double y = Double . parseDouble ( tokens [ 0 ] ) ; sparse . set ( i , y ) ; } for ( int k = 1 ; k < tokens . length ; k ++ ) { String [ ] pair = tokens [ k ] . split ( ":" ) ; if ( pair . length != 2 ) { throw new NumberFormatException ( "Invalid data: " + tokens [ k ] ) ; } int j = Integer . parseInt ( pair [ 0 ] ) - 1 ; double x = Double . parseDouble ( pair [ 1 ] ) ; sparse . set ( i , j , x ) ; } line = reader . readLine ( ) ; } if ( classification ) { int n = sparse . size ( ) ; int [ ] y = sparse . toArray ( new int [ n ] ) ; int [ ] label = Math . unique ( y ) ; Arrays . sort ( label ) ; for ( int c : label ) { response . valueOf ( String . valueOf ( c ) ) ; } for ( int i = 0 ; i < n ; i ++ ) { sparse . get ( i ) . y = Arrays . binarySearch ( label , y [ i ] ) ; } } return sparse ; } finally { reader . close ( ) ; } }
Parse a libsvm sparse dataset from an input stream .
10,858
public AttributeDataset parse ( String name , String path ) throws IOException , ParseException { return parse ( name , new File ( path ) ) ; }
Parse a TXT dataset from given file .
10,859
public AttributeDataset parse ( String name , InputStream stream ) throws IOException , ParseException { BufferedReader reader = new BufferedReader ( new InputStreamReader ( stream ) ) ; String line = reader . readLine ( ) ; if ( line == null ) { throw new IOException ( "Empty data source." ) ; } String [ ] tokens = line . split ( "\t" , - 1 ) ; int start = 1 ; int p = tokens . length - 1 ; if ( tokens [ 1 ] . equalsIgnoreCase ( "description" ) ) { start = 2 ; p = tokens . length - 2 ; } Attribute [ ] attributes = new Attribute [ p ] ; for ( int i = 0 ; i < p ; i ++ ) { attributes [ i ] = new NumericAttribute ( tokens [ i + start ] ) ; } AttributeDataset data = new AttributeDataset ( name , attributes ) ; for ( int i = 2 ; ( line = reader . readLine ( ) ) != null ; i ++ ) { tokens = line . split ( "\t" , - 1 ) ; if ( tokens . length != p + start ) { throw new IOException ( String . format ( "Invalid number of elements of line %d: %d" , i , tokens . length ) ) ; } double [ ] x = new double [ p ] ; for ( int j = 0 ; j < p ; j ++ ) { if ( tokens [ j + start ] . isEmpty ( ) ) { x [ j ] = Double . NaN ; } else { x [ j ] = Double . valueOf ( tokens [ j + start ] ) ; } } AttributeDataset . Row datum = data . add ( x ) ; datum . name = tokens [ 0 ] ; if ( start == 2 ) { datum . description = tokens [ 1 ] ; } } reader . close ( ) ; return data ; }
Parse a TXT dataset from an input stream .
10,860
public static DenseMatrix matrix ( double [ ] [ ] A ) { if ( nlmatrixZeros != null ) { try { return ( DenseMatrix ) nlmatrixArray2D . newInstance ( ( Object ) A ) ; } catch ( Exception e ) { logger . error ( "Failed to call NLMatrix(double[][]): {}" , e ) ; } } return new JMatrix ( A ) ; }
Creates a matrix initialized by A .
10,861
public static DenseMatrix matrix ( int nrows , int ncols ) { if ( nlmatrixZeros != null ) { try { return ( DenseMatrix ) nlmatrixZeros . newInstance ( nrows , ncols ) ; } catch ( Exception e ) { logger . error ( "Failed to call NLMatrix(int, int): {}" , e ) ; } } return new JMatrix ( nrows , ncols ) ; }
Creates a matrix of all zeros .
10,862
public static DenseMatrix matrix ( int nrows , int ncols , double value ) { if ( nlmatrixOnes != null ) { try { return ( DenseMatrix ) nlmatrixOnes . newInstance ( nrows , ncols , value ) ; } catch ( Exception e ) { logger . error ( "Failed to call NLMatrix(int, int, double): {}" , e ) ; } } return new JMatrix ( nrows , ncols , value ) ; }
Creates a matrix filled with given value .
10,863
public static PlotCanvas plot ( double [ ] x ) { double [ ] lowerBound = { Math . min ( x ) , GaussianDistribution . getInstance ( ) . quantile ( 1 / ( x . length + 1.0 ) ) } ; double [ ] upperBound = { Math . max ( x ) , GaussianDistribution . getInstance ( ) . quantile ( x . length / ( x . length + 1.0 ) ) } ; PlotCanvas canvas = new PlotCanvas ( lowerBound , upperBound ) ; canvas . add ( new QQPlot ( x ) ) ; return canvas ; }
Create a plot canvas with the one sample Q - Q plot to standard normal distribution . The x - axis is the quantiles of x and the y - axis is the quantiles of normal distribution .
10,864
public static PlotCanvas plot ( double [ ] x , double [ ] y ) { double [ ] lowerBound = { Math . min ( x ) , Math . min ( y ) } ; double [ ] upperBound = { Math . max ( x ) , Math . max ( y ) } ; PlotCanvas canvas = new PlotCanvas ( lowerBound , upperBound ) ; canvas . add ( new QQPlot ( x , y ) ) ; return canvas ; }
Create a plot canvas with the two sample Q - Q plot . The x - axis is the quantiles of x and the y - axis is the quantiles of y .
10,865
public static PlotCanvas plot ( int [ ] x , DiscreteDistribution d ) { double [ ] lowerBound = { Math . min ( x ) , d . quantile ( 1 / ( x . length + 1.0 ) ) } ; double [ ] upperBound = { Math . max ( x ) , d . quantile ( x . length / ( x . length + 1.0 ) ) } ; PlotCanvas canvas = new PlotCanvas ( lowerBound , upperBound ) ; canvas . add ( new QQPlot ( x , d ) ) ; return canvas ; }
Create a plot canvas with the one sample Q - Q plot to given distribution . The x - axis is the quantiles of x and the y - axis is the quantiles of given distribution .
10,866
private void precompute ( ) { sinTheta = Math . sin ( theta ) ; cosTheta = Math . cos ( theta ) ; sinPhi = Math . sin ( phi ) ; cosPhi = Math . cos ( phi ) ; }
Pre - computes sin and cos of rotation angles .
10,867
public double [ ] project ( double [ ] xyz ) { double [ ] coord = new double [ 3 ] ; coord [ 0 ] = cosTheta * xyz [ 1 ] - sinTheta * xyz [ 0 ] ; coord [ 1 ] = cosPhi * xyz [ 2 ] - sinPhi * cosTheta * xyz [ 0 ] - sinPhi * sinTheta * xyz [ 1 ] ; coord [ 2 ] = cosPhi * sinTheta * xyz [ 1 ] + sinPhi * xyz [ 2 ] + cosPhi * cosTheta * xyz [ 0 ] ; return coord ; }
Returns the camera coordinates .
10,868
public double z ( double [ ] xyz ) { return cosPhi * sinTheta * xyz [ 1 ] + sinPhi * xyz [ 2 ] + cosPhi * cosTheta * xyz [ 0 ] ; }
Returns z - axis value in the camera coordinates .
10,869
public void rotate ( double t , double p ) { theta = theta - t / 100 ; phi = phi + p / 100 ; precompute ( ) ; reset ( ) ; }
Rotates the plot i . e . change the view angle .
10,870
private static int pivsign ( int [ ] piv , int n ) { int pivsign = 1 ; for ( int i = 0 ; i < n ; i ++ ) { if ( piv [ i ] != ( i + 1 ) ) pivsign = - pivsign ; } return pivsign ; }
Returns the pivot sign .
10,871
public DenseMatrix inverse ( ) { int m = lu . nrows ( ) ; int n = lu . ncols ( ) ; if ( m != n ) { throw new IllegalArgumentException ( String . format ( "Matrix is not square: %d x %d" , m , n ) ) ; } int nb = LAPACK . getInstance ( ) . ilaenv ( 1 , "DGETRI" , "" , n , - 1 , - 1 , - 1 ) ; if ( nb < 0 ) { logger . warn ( "LAPACK ILAENV error code: {}" , nb ) ; } if ( nb < 1 ) nb = 1 ; int lwork = lu . ncols ( ) * nb ; double [ ] work = new double [ lwork ] ; intW info = new intW ( 0 ) ; LAPACK . getInstance ( ) . dgetri ( lu . ncols ( ) , lu . data ( ) , lu . ld ( ) , piv , work , lwork , info ) ; if ( info . val != 0 ) { logger . error ( "LAPACK DGETRI error code: {}" , info . val ) ; throw new IllegalArgumentException ( "LAPACK DGETRI error code: " + info . val ) ; } return lu ; }
Returns the matrix inverse . The LU matrix will overwritten with the inverse of the original matrix .
10,872
private void buildCoverTree ( ) { ArrayList < DistanceSet > pointSet = new ArrayList < > ( ) ; ArrayList < DistanceSet > consumedSet = new ArrayList < > ( ) ; E point = data [ 0 ] ; int idx = 0 ; double maxDist = - 1 ; for ( int i = 1 ; i < data . length ; i ++ ) { DistanceSet set = new DistanceSet ( i ) ; double dist = distance . d ( point , data [ i ] ) ; set . dist . add ( dist ) ; pointSet . add ( set ) ; if ( dist > maxDist ) { maxDist = dist ; } } root = batchInsert ( idx , getScale ( maxDist ) , getScale ( maxDist ) , pointSet , consumedSet ) ; }
Builds the cover tree .
10,873
private double max ( ArrayList < DistanceSet > v ) { double max = 0.0 ; for ( DistanceSet n : v ) { if ( max < n . dist . get ( n . dist . size ( ) - 1 ) ) { max = n . dist . get ( n . dist . size ( ) - 1 ) ; } } return max ; }
Returns the max distance of the reference point p in current node to it s children nodes .
10,874
public static String diff ( String expression ) throws InvalidExpressionException { ExpressionTree expTree = parseToTree ( expression ) ; expTree . derive ( ) ; expTree . reduce ( ) ; return expTree . toString ( ) ; }
Compute the symbolic derivative .
10,875
public static final double diff ( String expression , double val ) throws InvalidExpressionException { ExpressionTree expTree = parseToTree ( expression ) ; expTree . derive ( ) ; expTree . reduce ( ) ; return expTree . getVal ( ) ; }
Compute numeric derivative
10,876
public static String diffReadable ( String expression ) throws InvalidExpressionException { ExpressionParser p = new ExpressionParser ( ) ; return p . format ( diff ( expression ) ) ; }
Compute the reformatted symbolic derivative .
10,877
public static String rewrite ( String expression ) throws InvalidExpressionException { ExpressionTree expTree = parseToTree ( expression ) ; expTree . reduce ( ) ; return expTree . toString ( ) ; }
Rewrite the expression to eliminate redundant terms and simplify the expression .
10,878
private static final ExpressionTree parseToTree ( String expression ) throws InvalidExpressionException { ExpressionParser parser = new ExpressionParser ( ) ; parser . parse ( expression ) ; return new ExpressionTree ( parser . getVar ( ) , parser . getTokens ( ) ) ; }
Parse a mathematical expression and form a binary expression tree .
10,879
public double rand ( ) { double np = n * p ; if ( np < 1.E-6 ) { return PoissonDistribution . tinyLambdaRand ( np ) ; } boolean inv = false ; if ( p > 0.5 ) { inv = true ; } if ( np < 55 ) { if ( p <= 0.5 ) { rng = new ModeSearch ( p ) ; } else { rng = new ModeSearch ( 1.0 - p ) ; } } else { if ( p <= 0.5 ) { rng = new Patchwork ( p ) ; } else { rng = new Patchwork ( 1.0 - p ) ; } } int x = rng . rand ( ) ; if ( inv ) { x = n - x ; } return x ; }
This function generates a random variate with the binomial distribution .
10,880
private double weightedEdit ( char [ ] x , char [ ] y ) { if ( x . length < y . length ) { char [ ] swap = x ; x = y ; y = swap ; } int radius = ( int ) Math . round ( r * Math . max ( x . length , y . length ) ) ; double [ ] [ ] d = new double [ 2 ] [ y . length + 1 ] ; d [ 0 ] [ 0 ] = 0.0 ; for ( int j = 1 ; j <= y . length ; j ++ ) { d [ 0 ] [ j ] = d [ 0 ] [ j - 1 ] + weight [ 0 ] [ y [ j ] ] ; } for ( int i = 1 ; i <= x . length ; i ++ ) { d [ 1 ] [ 0 ] = d [ 0 ] [ 0 ] + weight [ x [ i ] ] [ 0 ] ; int start = 1 ; int end = y . length ; if ( radius > 0 ) { start = i - radius ; if ( start > 1 ) d [ 1 ] [ start - 1 ] = Double . POSITIVE_INFINITY ; else start = 1 ; end = i + radius ; if ( end < y . length ) d [ 1 ] [ end + 1 ] = Double . POSITIVE_INFINITY ; else end = y . length ; } for ( int j = start ; j <= end ; j ++ ) { double cost = weight [ x [ i - 1 ] ] [ y [ j - 1 ] ] ; d [ 1 ] [ j ] = Math . min ( d [ 0 ] [ j ] + weight [ x [ i - 1 ] ] [ 0 ] , d [ 1 ] [ j - 1 ] + weight [ 0 ] [ y [ j - 1 ] ] , d [ 0 ] [ j - 1 ] + cost ) ; } double [ ] swap = d [ 0 ] ; d [ 0 ] = d [ 1 ] ; d [ 1 ] = swap ; } return d [ 0 ] [ y . length ] ; }
Weighted edit distance .
10,881
private int br ( char [ ] x , char [ ] y ) { if ( x . length > y . length ) { char [ ] swap = x ; x = y ; y = swap ; } final int m = x . length ; final int n = y . length ; int ZERO_K = n ; if ( n + 2 > FKP [ 0 ] . length ) FKP = new int [ 2 * n + 1 ] [ n + 2 ] ; for ( int k = - ZERO_K ; k < 0 ; k ++ ) { int p = - k - 1 ; FKP [ k + ZERO_K ] [ p + 1 ] = Math . abs ( k ) - 1 ; FKP [ k + ZERO_K ] [ p ] = - Integer . MAX_VALUE ; } FKP [ ZERO_K ] [ 0 ] = - 1 ; for ( int k = 1 ; k <= ZERO_K ; k ++ ) { int p = k - 1 ; FKP [ k + ZERO_K ] [ p + 1 ] = - 1 ; FKP [ k + ZERO_K ] [ p ] = - Integer . MAX_VALUE ; } int p = n - m - 1 ; do { p ++ ; for ( int i = ( p - ( n - m ) ) / 2 ; i >= 1 ; i -- ) { brf . f ( x , y , FKP , ZERO_K , n - m + i , p - i ) ; } for ( int i = ( n - m + p ) / 2 ; i >= 1 ; i -- ) { brf . f ( x , y , FKP , ZERO_K , n - m - i , p - i ) ; } brf . f ( x , y , FKP , ZERO_K , n - m , p ) ; } while ( FKP [ ( n - m ) + ZERO_K ] [ p ] != m ) ; return p - 1 ; }
Berghel & Roach s extended Ukkonen s algorithm .
10,882
private void init ( ) { if ( x == null ) { x = new double [ z [ 0 ] . length ] ; for ( int i = 0 ; i < x . length ; i ++ ) { x [ i ] = i + 0.5 ; } } if ( y == null ) { y = new double [ z . length ] ; for ( int i = 0 ; i < y . length ; i ++ ) { y [ i ] = y . length - i - 0.5 ; } } int n = z . length * z [ 0 ] . length ; double [ ] values = new double [ n ] ; int i = 0 ; for ( double [ ] zi : z ) { for ( double zij : zi ) { if ( ! Double . isNaN ( zij ) ) { values [ i ++ ] = zij ; } } } if ( i > 0 ) { Arrays . sort ( values , 0 , i ) ; min = values [ ( int ) Math . round ( 0.01 * i ) ] ; max = values [ ( int ) Math . round ( 0.99 * ( i - 1 ) ) ] ; width = ( max - min ) / palette . length ; } }
Initialize the internal variables .
10,883
public Complex minus ( Complex b ) { Complex a = this ; double real = a . re - b . re ; double imag = a . im - b . im ; return new Complex ( real , imag ) ; }
Returns this - b .
10,884
public Complex reciprocal ( ) { double scale = re * re + im * im ; return new Complex ( re / scale , - im / scale ) ; }
Returns the reciprocal .
10,885
public Complex exp ( ) { return new Complex ( Math . exp ( re ) * Math . cos ( im ) , Math . exp ( re ) * Math . sin ( im ) ) ; }
Returns the complex exponential .
10,886
public Complex sin ( ) { return new Complex ( Math . sin ( re ) * Math . cosh ( im ) , Math . cos ( re ) * Math . sinh ( im ) ) ; }
Returns the complex sine .
10,887
private double [ ] getAugmentedResponse ( double [ ] y ) { double [ ] ret = new double [ y . length + p ] ; System . arraycopy ( y , 0 , ret , 0 , y . length ) ; return ret ; }
transform the original response array by padding 0 at the tail
10,888
private double [ ] [ ] getAugmentedData ( double [ ] [ ] x ) { double [ ] [ ] ret = new double [ x . length + p ] [ p ] ; double padding = c * Math . sqrt ( lambda2 ) ; for ( int i = 0 ; i < x . length ; i ++ ) { for ( int j = 0 ; j < p ; j ++ ) { ret [ i ] [ j ] = c * x [ i ] [ j ] ; } } for ( int i = x . length ; i < ret . length ; i ++ ) { ret [ i ] [ i - x . length ] = padding ; } return ret ; }
transform the original data array by padding a weighted identity matrix and multiply a scaling
10,889
public BinarySparseDataset parse ( String name , URI uri ) throws IOException , ParseException { return parse ( name , new File ( uri ) ) ; }
Parse a binary sparse dataset from given URI .
10,890
public BinarySparseDataset parse ( String name , InputStream stream ) throws IOException , ParseException { try ( BufferedReader reader = new BufferedReader ( new InputStreamReader ( stream ) ) ) { BinarySparseDataset sparse = new BinarySparseDataset ( name ) ; String line = reader . readLine ( ) ; if ( line == null ) { throw new IOException ( "Empty data source." ) ; } Set < Integer > items = new HashSet < > ( ) ; do { line = line . trim ( ) ; if ( line . isEmpty ( ) ) { continue ; } String [ ] s = line . split ( "\\s+" ) ; items . clear ( ) ; for ( int i = 0 ; i < s . length ; i ++ ) { items . add ( Integer . parseInt ( s [ i ] ) ) ; } int j = 0 ; int [ ] point = new int [ items . size ( ) ] ; for ( int i : items ) { point [ j ++ ] = i ; } Arrays . sort ( point ) ; sparse . add ( point ) ; line = reader . readLine ( ) ; } while ( line != null ) ; return sparse ; } }
Parse a binary sparse dataset from an input stream .
10,891
public static PlotCanvas plot ( double [ ] [ ] data ) { double [ ] lowerBound = { - 0.5 , 0.36 } ; double [ ] upperBound = { data [ 0 ] . length , data . length * 0.87 + 0.5 } ; PlotCanvas canvas = new PlotCanvas ( lowerBound , upperBound , false ) ; canvas . add ( new Hexmap ( data ) ) ; canvas . getAxis ( 0 ) . setFrameVisible ( false ) ; canvas . getAxis ( 0 ) . setLabelVisible ( false ) ; canvas . getAxis ( 0 ) . setGridVisible ( false ) ; canvas . getAxis ( 1 ) . setFrameVisible ( false ) ; canvas . getAxis ( 1 ) . setLabelVisible ( false ) ; canvas . getAxis ( 1 ) . setGridVisible ( false ) ; return canvas ; }
Create a plot canvas with the pseudo hexmap plot of given data .
10,892
public boolean stopCellEditing ( ) { JFormattedTextField ftf = ( JFormattedTextField ) getComponent ( ) ; if ( ftf . isEditValid ( ) ) { try { ftf . commitEdit ( ) ; } catch ( java . text . ParseException ex ) { } } else { Toolkit . getDefaultToolkit ( ) . beep ( ) ; textField . selectAll ( ) ; return false ; } return super . stopCellEditing ( ) ; }
of this method so that everything gets cleaned up .
10,893
private void init ( ) { width = Double . MAX_VALUE ; for ( int i = 1 ; i < data . length ; i ++ ) { double w = Math . abs ( data [ i ] [ 0 ] - data [ i - 1 ] [ 0 ] ) ; if ( width > w ) { width = w ; } } leftTop = new double [ data . length ] [ 2 ] ; rightTop = new double [ data . length ] [ 2 ] ; leftBottom = new double [ data . length ] [ 2 ] ; rightBottom = new double [ data . length ] [ 2 ] ; for ( int i = 0 ; i < data . length ; i ++ ) { leftTop [ i ] [ 0 ] = data [ i ] [ 0 ] - width / 2 ; leftTop [ i ] [ 1 ] = data [ i ] [ 1 ] ; rightTop [ i ] [ 0 ] = data [ i ] [ 0 ] + width / 2 ; rightTop [ i ] [ 1 ] = data [ i ] [ 1 ] ; leftBottom [ i ] [ 0 ] = data [ i ] [ 0 ] - width / 2 ; leftBottom [ i ] [ 1 ] = 0 ; rightBottom [ i ] [ 0 ] = data [ i ] [ 0 ] + width / 2 ; rightBottom [ i ] [ 1 ] = 0 ; } }
Calculate bar width and position .
10,894
public double rand ( ) { int mm = m ; int nn = n ; if ( mm > N / 2 ) { mm = N - mm ; } if ( nn > N / 2 ) { nn = N - nn ; } if ( nn > mm ) { int swap = nn ; nn = mm ; mm = swap ; } if ( rng == null ) { if ( ( double ) nn * mm >= 20 * N ) { rng = new Patchwork ( N , mm , nn ) ; } else { rng = new Inversion ( N , mm , nn ) ; } } return rng . rand ( ) ; }
Uses inversion by chop - down search from the mode when the mean &lt ; 20 and the patchwork - rejection method when the mean &gt ; 20 .
10,895
void init ( double [ ] [ ] proximity ) { size = proximity . length ; this . proximity = new float [ size * ( size + 1 ) / 2 ] ; for ( int j = 0 , k = 0 ; j < size ; j ++ ) { for ( int i = j ; i < size ; i ++ , k ++ ) { this . proximity [ k ] = ( float ) proximity [ i ] [ j ] ; } } }
Initialize the linkage with the lower triangular proximity matrix .
10,896
public double d ( int [ ] x , int [ ] y ) { if ( x . length != y . length ) throw new IllegalArgumentException ( String . format ( "Arrays have different length: x[%d], y[%d]" , x . length , y . length ) ) ; double dist = 0.0 ; if ( weight == null ) { for ( int i = 0 ; i < x . length ; i ++ ) { double d = Math . abs ( x [ i ] - y [ i ] ) ; dist += Math . pow ( d , p ) ; } } else { if ( x . length != weight . length ) throw new IllegalArgumentException ( String . format ( "Input vectors and weight vector have different length: %d, %d" , x . length , weight . length ) ) ; for ( int i = 0 ; i < x . length ; i ++ ) { double d = Math . abs ( x [ i ] - y [ i ] ) ; dist += weight [ i ] * Math . pow ( d , p ) ; } } return Math . pow ( dist , 1.0 / p ) ; }
Minkowski distance between the two arrays of type integer .
10,897
public DenseMatrix getR ( ) { int n = qr . ncols ( ) ; DenseMatrix R = Matrix . zeros ( n , n ) ; for ( int i = 0 ; i < n ; i ++ ) { R . set ( i , i , tau [ i ] ) ; for ( int j = i + 1 ; j < n ; j ++ ) { R . set ( i , j , qr . get ( i , j ) ) ; } } return R ; }
Returns the upper triangular factor .
10,898
public DenseMatrix getQ ( ) { int m = qr . nrows ( ) ; int n = qr . ncols ( ) ; DenseMatrix Q = Matrix . zeros ( m , n ) ; for ( int k = n - 1 ; k >= 0 ; k -- ) { Q . set ( k , k , 1.0 ) ; for ( int j = k ; j < n ; j ++ ) { if ( qr . get ( k , k ) != 0 ) { double s = 0.0 ; for ( int i = k ; i < m ; i ++ ) { s += qr . get ( i , k ) * Q . get ( i , j ) ; } s = - s / qr . get ( k , k ) ; for ( int i = k ; i < m ; i ++ ) { Q . add ( i , j , s * qr . get ( i , k ) ) ; } } } } return Q ; }
Returns the orthogonal factor .
10,899
public SparseDataset parse ( String name , InputStream stream ) throws IOException , ParseException { BufferedReader reader = new BufferedReader ( new InputStreamReader ( stream ) ) ; try { int nrow = 1 ; String line = reader . readLine ( ) ; for ( ; nrow <= 3 && line != null ; nrow ++ ) { String [ ] tokens = line . trim ( ) . split ( " " ) ; if ( tokens . length >= 3 ) { break ; } line = reader . readLine ( ) ; } if ( line == null ) { throw new IOException ( "Empty data source." ) ; } SparseDataset sparse = new SparseDataset ( name ) ; do { String [ ] tokens = line . trim ( ) . split ( " " ) ; if ( tokens . length != 3 ) { throw new ParseException ( "Invalid number of tokens." , nrow ) ; } int d = Integer . parseInt ( tokens [ 0 ] ) - arrayIndexOrigin ; int w = Integer . parseInt ( tokens [ 1 ] ) - arrayIndexOrigin ; double c = Double . parseDouble ( tokens [ 2 ] ) ; sparse . set ( d , w , c ) ; line = reader . readLine ( ) ; nrow ++ ; } while ( line != null ) ; return sparse ; } finally { reader . close ( ) ; } }
Parse a sparse dataset from an input stream .