idx int64 0 41.2k | question stringlengths 73 5.81k | target stringlengths 5 918 |
|---|---|---|
11,600 | public static boolean isH2Connection ( Settings configuration ) { final String connStr ; try { connStr = configuration . getConnectionString ( Settings . KEYS . DB_CONNECTION_STRING , Settings . KEYS . DB_FILE_NAME ) ; } catch ( IOException ex ) { LOGGER . debug ( "Unable to get connectionn string" , ex ) ; return false ; } return connStr . startsWith ( "jdbc:h2:file:" ) ; } | Determines if the connection string is for an H2 database . |
11,601 | private void ensureSchemaVersion ( Connection conn ) throws DatabaseException { ResultSet rs = null ; PreparedStatement ps = null ; try { ps = conn . prepareStatement ( "SELECT value FROM properties WHERE id = 'version'" ) ; rs = ps . executeQuery ( ) ; if ( rs . next ( ) ) { final String dbSchemaVersion = settings . getString ( Settings . KEYS . DB_VERSION ) ; final DependencyVersion appDbVersion = DependencyVersionUtil . parseVersion ( dbSchemaVersion ) ; if ( appDbVersion == null ) { throw new DatabaseException ( "Invalid application database schema" ) ; } final DependencyVersion db = DependencyVersionUtil . parseVersion ( rs . getString ( 1 ) ) ; if ( db == null ) { throw new DatabaseException ( "Invalid database schema" ) ; } LOGGER . debug ( "DC Schema: {}" , appDbVersion . toString ( ) ) ; LOGGER . debug ( "DB Schema: {}" , db . toString ( ) ) ; if ( appDbVersion . compareTo ( db ) > 0 ) { updateSchema ( conn , appDbVersion , db ) ; if ( ++ callDepth < 10 ) { ensureSchemaVersion ( conn ) ; } } } else { throw new DatabaseException ( "Database schema is missing" ) ; } } catch ( SQLException ex ) { LOGGER . debug ( "" , ex ) ; throw new DatabaseException ( "Unable to check the database schema version" , ex ) ; } finally { DBUtils . closeResultSet ( rs ) ; DBUtils . closeStatement ( ps ) ; } } | Uses the provided connection to check the specified schema version within the database . |
11,602 | private boolean checkEnabled ( ) { boolean retval = false ; try { if ( ! DEFAULT_URL . equals ( getSettings ( ) . getString ( Settings . KEYS . ANALYZER_NEXUS_URL ) ) && getSettings ( ) . getBoolean ( Settings . KEYS . ANALYZER_NEXUS_ENABLED ) ) { LOGGER . info ( "Enabling Nexus analyzer" ) ; retval = true ; } else { LOGGER . debug ( "Nexus analyzer disabled, using Central instead" ) ; } } catch ( InvalidSettingException ise ) { LOGGER . warn ( "Invalid setting. Disabling Nexus analyzer" ) ; } return retval ; } | Determines if this analyzer is enabled |
11,603 | public boolean useProxy ( ) { try { return getSettings ( ) . getString ( Settings . KEYS . PROXY_SERVER ) != null && getSettings ( ) . getBoolean ( Settings . KEYS . ANALYZER_NEXUS_USES_PROXY ) ; } catch ( InvalidSettingException ise ) { LOGGER . warn ( "Failed to parse proxy settings." , ise ) ; return false ; } } | Determine if a proxy should be used for the Nexus Analyzer . |
11,604 | public boolean update ( Engine engine ) throws UpdateException { this . settings = engine . getSettings ( ) ; String url = null ; try { final boolean autoupdate = settings . getBoolean ( Settings . KEYS . AUTO_UPDATE , true ) ; final boolean enabled = settings . getBoolean ( Settings . KEYS . ANALYZER_RETIREJS_ENABLED , true ) ; final File repoFile = new File ( settings . getDataDirectory ( ) , "jsrepository.json" ) ; final boolean proceed = enabled && autoupdate && shouldUpdagte ( repoFile ) ; if ( proceed ) { LOGGER . debug ( "Begin RetireJS Update" ) ; url = settings . getString ( Settings . KEYS . ANALYZER_RETIREJS_REPO_JS_URL , DEFAULT_JS_URL ) ; initializeRetireJsRepo ( settings , new URL ( url ) ) ; } } catch ( InvalidSettingException ex ) { throw new UpdateException ( "Unable to determine if autoupdate is enabled" , ex ) ; } catch ( MalformedURLException ex ) { throw new UpdateException ( String . format ( "Inavlid URL for RetireJS repository (%s)" , url ) , ex ) ; } catch ( IOException ex ) { throw new UpdateException ( "Unable to get the data directory" , ex ) ; } return false ; } | Downloads the current RetireJS data source . |
11,605 | protected boolean shouldUpdagte ( File repo ) throws NumberFormatException { boolean proceed = true ; if ( repo != null && repo . isFile ( ) ) { final int validForHours = settings . getInt ( Settings . KEYS . ANALYZER_RETIREJS_REPO_VALID_FOR_HOURS , 0 ) ; final long lastUpdatedOn = repo . lastModified ( ) ; final long now = System . currentTimeMillis ( ) ; LOGGER . debug ( "Last updated: {}" , lastUpdatedOn ) ; LOGGER . debug ( "Now: {}" , now ) ; final long msValid = validForHours * 60L * 60L * 1000L ; proceed = ( now - lastUpdatedOn ) > msValid ; if ( ! proceed ) { LOGGER . info ( "Skipping RetireJS update since last update was within {} hours." , validForHours ) ; } } return proceed ; } | Determines if the we should update the RetireJS database . |
11,606 | private void initializeRetireJsRepo ( Settings settings , URL repoUrl ) throws UpdateException { try { final File dataDir = settings . getDataDirectory ( ) ; final File tmpDir = settings . getTempDirectory ( ) ; boolean useProxy = false ; if ( null != settings . getString ( Settings . KEYS . PROXY_SERVER ) ) { useProxy = true ; LOGGER . debug ( "Using proxy" ) ; } LOGGER . debug ( "RetireJS Repo URL: {}" , repoUrl . toExternalForm ( ) ) ; final URLConnectionFactory factory = new URLConnectionFactory ( settings ) ; final HttpURLConnection conn = factory . createHttpURLConnection ( repoUrl , useProxy ) ; final String filename = repoUrl . getFile ( ) . substring ( repoUrl . getFile ( ) . lastIndexOf ( "/" ) + 1 , repoUrl . getFile ( ) . length ( ) ) ; if ( conn . getResponseCode ( ) == HttpURLConnection . HTTP_OK ) { final File tmpFile = new File ( tmpDir , filename ) ; final File repoFile = new File ( dataDir , filename ) ; try ( InputStream inputStream = conn . getInputStream ( ) ; FileOutputStream outputStream = new FileOutputStream ( tmpFile ) ) { IOUtils . copy ( inputStream , outputStream ) ; } Files . copy ( tmpFile . toPath ( ) , repoFile . toPath ( ) , StandardCopyOption . REPLACE_EXISTING ) ; if ( ! tmpFile . delete ( ) ) { tmpFile . deleteOnExit ( ) ; } } } catch ( IOException e ) { throw new UpdateException ( "Failed to initialize the RetireJS repo" , e ) ; } } | Initializes the local RetireJS repository |
11,607 | public final void prepare ( Engine engine ) throws InitializationException { if ( isEnabled ( ) ) { prepareAnalyzer ( engine ) ; } else { LOGGER . debug ( "{} has been disabled" , getName ( ) ) ; } } | Initialize the abstract analyzer . |
11,608 | public java . util . logging . Logger getParentLogger ( ) throws SQLFeatureNotSupportedException { Method m = null ; try { m = driver . getClass ( ) . getMethod ( "getParentLogger" ) ; } catch ( Throwable e ) { throw new SQLFeatureNotSupportedException ( ) ; } if ( m != null ) { try { return ( java . util . logging . Logger ) m . invoke ( m ) ; } catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException ex ) { LOGGER . trace ( "" , ex ) ; } } throw new SQLFeatureNotSupportedException ( ) ; } | Wraps the call to the underlying driver s getParentLogger method . |
11,609 | public static int getGeneratedKey ( PreparedStatement statement ) throws DatabaseException { ResultSet rs = null ; int id = 0 ; try { rs = statement . getGeneratedKeys ( ) ; if ( ! rs . next ( ) ) { throw new DatabaseException ( "Unable to get primary key for inserted row" ) ; } id = rs . getInt ( 1 ) ; } catch ( SQLException ex ) { throw new DatabaseException ( "Unable to get primary key for inserted row" ) ; } finally { closeResultSet ( rs ) ; } return id ; } | Returns the generated integer primary key for a newly inserted row . |
11,610 | public static void closeStatement ( Statement statement ) { try { if ( statement != null && ! statement . isClosed ( ) ) { statement . close ( ) ; } } catch ( SQLException ex ) { LOGGER . trace ( statement . toString ( ) , ex ) ; } } | Closes the given statement object ignoring any exceptions that occur . |
11,611 | public static void closeResultSet ( ResultSet rs ) { try { if ( rs != null && ! rs . isClosed ( ) ) { rs . close ( ) ; } } catch ( SQLException ex ) { LOGGER . trace ( rs . toString ( ) , ex ) ; } } | Closes the result set capturing and ignoring any SQLExceptions that occur . |
11,612 | public String url ( String text ) { if ( text == null || text . isEmpty ( ) ) { return text ; } try { return URLEncoder . encode ( text , UTF_8 ) ; } catch ( UnsupportedEncodingException ex ) { LOGGER . warn ( "UTF-8 is not supported?" ) ; LOGGER . info ( "" , ex ) ; } return "" ; } | URL Encodes the provided text . |
11,613 | public String html ( String text ) { if ( text == null || text . isEmpty ( ) ) { return text ; } return StringEscapeUtils . escapeHtml4 ( text ) ; } | HTML Encodes the provided text . |
11,614 | public String xml ( String text ) { if ( text == null || text . isEmpty ( ) ) { return text ; } return StringEscapeUtils . escapeXml11 ( text ) ; } | XML Encodes the provided text . |
11,615 | public String json ( String text ) { if ( text == null || text . isEmpty ( ) ) { return text ; } return StringEscapeUtils . escapeJson ( text ) ; } | JSON Encodes the provided text . |
11,616 | public String javascript ( String text ) { if ( text == null || text . isEmpty ( ) ) { return text ; } return StringEscapeUtils . escapeEcmaScript ( text ) ; } | JavaScript encodes the provided text . |
11,617 | public String csvIdentifiers ( Set < Identifier > ids ) { if ( ids == null || ids . isEmpty ( ) ) { return "\"\"" ; } boolean addComma = false ; final StringBuilder sb = new StringBuilder ( ) ; for ( Identifier id : ids ) { if ( addComma ) { sb . append ( ", " ) ; } else { addComma = true ; } sb . append ( id . getValue ( ) ) ; } if ( sb . length ( ) == 0 ) { return "\"\"" ; } return StringEscapeUtils . escapeCsv ( sb . toString ( ) ) ; } | Takes a set of Identifiers filters them to none CPE and formats them for display in a CSV . |
11,618 | protected ExceptionCollection scanDependencies ( final Engine engine ) throws MojoExecutionException { ExceptionCollection exCol = scanArtifacts ( getProject ( ) , engine , true ) ; for ( MavenProject childProject : getDescendants ( this . getProject ( ) ) ) { final ExceptionCollection ex = scanArtifacts ( childProject , engine , true ) ; if ( ex != null ) { if ( exCol == null ) { exCol = ex ; } exCol . getExceptions ( ) . addAll ( ex . getExceptions ( ) ) ; if ( ex . isFatal ( ) ) { exCol . setFatal ( true ) ; final String msg = String . format ( "Fatal exception(s) analyzing %s" , childProject . getName ( ) ) ; if ( this . isFailOnError ( ) ) { throw new MojoExecutionException ( msg , exCol ) ; } getLog ( ) . error ( msg ) ; if ( getLog ( ) . isDebugEnabled ( ) ) { getLog ( ) . debug ( exCol ) ; } } } } return exCol ; } | Scans the dependencies of the projects in aggregate . |
11,619 | public void closeAnalyzer ( ) throws Exception { if ( tempFileLocation != null && tempFileLocation . exists ( ) ) { LOGGER . debug ( "Attempting to delete temporary files from `{}`" , tempFileLocation . toString ( ) ) ; final boolean success = FileUtils . delete ( tempFileLocation ) ; if ( ! success && tempFileLocation . exists ( ) ) { final String [ ] l = tempFileLocation . list ( ) ; if ( l != null && l . length > 0 ) { LOGGER . warn ( "Failed to delete the Archive Analyzer's temporary files from `{}`, " + "see the log for more details" , tempFileLocation . toString ( ) ) ; } } } } | The close method deletes any temporary files and directories created during analysis . |
11,620 | private void extractAndAnalyze ( Dependency dependency , Engine engine , int scanDepth ) throws AnalysisException { final File f = new File ( dependency . getActualFilePath ( ) ) ; final File tmpDir = getNextTempDirectory ( ) ; extractFiles ( f , tmpDir , engine ) ; final List < Dependency > dependencySet = findMoreDependencies ( engine , tmpDir ) ; if ( dependencySet != null && ! dependencySet . isEmpty ( ) ) { for ( Dependency d : dependencySet ) { if ( d . getFilePath ( ) . startsWith ( tmpDir . getAbsolutePath ( ) ) ) { final String displayPath = String . format ( "%s%s" , dependency . getFilePath ( ) , d . getActualFilePath ( ) . substring ( tmpDir . getAbsolutePath ( ) . length ( ) ) ) ; final String displayName = String . format ( "%s: %s" , dependency . getFileName ( ) , d . getFileName ( ) ) ; d . setFilePath ( displayPath ) ; d . setFileName ( displayName ) ; d . addAllProjectReferences ( dependency . getProjectReferences ( ) ) ; if ( this . accept ( d . getActualFile ( ) ) && scanDepth < maxScanDepth ) { extractAndAnalyze ( d , engine , scanDepth + 1 ) ; } } else { dependencySet . stream ( ) . filter ( ( sub ) -> sub . getFilePath ( ) . startsWith ( tmpDir . getAbsolutePath ( ) ) ) . forEach ( ( sub ) -> { final String displayPath = String . format ( "%s%s" , dependency . getFilePath ( ) , sub . getActualFilePath ( ) . substring ( tmpDir . getAbsolutePath ( ) . length ( ) ) ) ; final String displayName = String . format ( "%s: %s" , dependency . getFileName ( ) , sub . getFileName ( ) ) ; sub . setFilePath ( displayPath ) ; sub . setFileName ( displayName ) ; } ) ; } } } } | Extracts the contents of the archive dependency and scans for additional dependencies . |
11,621 | private void collectMetadataFromArchiveFormat ( Dependency dependency , FilenameFilter folderFilter , FilenameFilter metadataFilter ) throws AnalysisException { final File temp = getNextTempDirectory ( ) ; LOGGER . debug ( "{} exists? {}" , temp , temp . exists ( ) ) ; try { ExtractionUtil . extractFilesUsingFilter ( new File ( dependency . getActualFilePath ( ) ) , temp , metadataFilter ) ; } catch ( ExtractionException ex ) { throw new AnalysisException ( ex ) ; } File matchingFile = getMatchingFile ( temp , folderFilter ) ; if ( matchingFile != null ) { matchingFile = getMatchingFile ( matchingFile , metadataFilter ) ; if ( matchingFile != null ) { collectWheelMetadata ( dependency , matchingFile ) ; } } } | Collects the meta data from an archive . |
11,622 | protected void prepareFileTypeAnalyzer ( Engine engine ) throws InitializationException { try { final File baseDir = getSettings ( ) . getTempDirectory ( ) ; tempFileLocation = File . createTempFile ( "check" , "tmp" , baseDir ) ; if ( ! tempFileLocation . delete ( ) ) { setEnabled ( false ) ; final String msg = String . format ( "Unable to delete temporary file '%s'." , tempFileLocation . getAbsolutePath ( ) ) ; throw new InitializationException ( msg ) ; } if ( ! tempFileLocation . mkdirs ( ) ) { setEnabled ( false ) ; final String msg = String . format ( "Unable to create directory '%s'." , tempFileLocation . getAbsolutePath ( ) ) ; throw new InitializationException ( msg ) ; } } catch ( IOException ex ) { setEnabled ( false ) ; throw new InitializationException ( "Unable to create a temporary file" , ex ) ; } } | Makes sure a usable temporary directory is available . |
11,623 | private static void collectWheelMetadata ( Dependency dependency , File file ) { final InternetHeaders headers = getManifestProperties ( file ) ; addPropertyToEvidence ( dependency , EvidenceType . VERSION , Confidence . HIGHEST , headers , "Version" ) ; addPropertyToEvidence ( dependency , EvidenceType . PRODUCT , Confidence . HIGHEST , headers , "Name" ) ; addPropertyToEvidence ( dependency , EvidenceType . PRODUCT , Confidence . MEDIUM , headers , "Name" ) ; final String name = headers . getHeader ( "Name" , null ) ; final String version = headers . getHeader ( "Version" , null ) ; final String packagePath = String . format ( "%s:%s" , name , version ) ; dependency . setName ( name ) ; dependency . setVersion ( version ) ; dependency . setPackagePath ( packagePath ) ; dependency . setDisplayFileName ( packagePath ) ; final String url = headers . getHeader ( "Home-page" , null ) ; if ( StringUtils . isNotBlank ( url ) ) { if ( UrlStringUtils . isUrl ( url ) ) { dependency . addEvidence ( EvidenceType . VENDOR , METADATA , "vendor" , url , Confidence . MEDIUM ) ; } } addPropertyToEvidence ( dependency , EvidenceType . VENDOR , Confidence . LOW , headers , "Author" ) ; final String summary = headers . getHeader ( "Summary" , null ) ; if ( StringUtils . isNotBlank ( summary ) ) { JarAnalyzer . addDescription ( dependency , summary , METADATA , "summary" ) ; } try { final PackageURL purl = PackageURLBuilder . aPackageURL ( ) . withType ( "pypi" ) . withName ( name ) . withVersion ( version ) . build ( ) ; dependency . addSoftwareIdentifier ( new PurlIdentifier ( purl , Confidence . HIGHEST ) ) ; } catch ( MalformedPackageURLException ex ) { LOGGER . debug ( "Unable to build package url for python" , ex ) ; final GenericIdentifier id = new GenericIdentifier ( "generic:" + name + "@" + version , Confidence . HIGHEST ) ; dependency . addSoftwareIdentifier ( id ) ; } } | Gathers evidence from the METADATA file . |
11,624 | private static void addPropertyToEvidence ( Dependency dependency , EvidenceType type , Confidence confidence , InternetHeaders headers , String property ) { final String value = headers . getHeader ( property , null ) ; LOGGER . debug ( "Property: {}, Value: {}" , property , value ) ; if ( StringUtils . isNotBlank ( value ) ) { dependency . addEvidence ( type , METADATA , property , value , confidence ) ; } } | Adds a value to the evidence collection . |
11,625 | private static File getMatchingFile ( File folder , FilenameFilter filter ) { File result = null ; final File [ ] matches = folder . listFiles ( filter ) ; if ( null != matches && 1 == matches . length ) { result = matches [ 0 ] ; } return result ; } | Returns a list of files that match the given filter this does not recursively scan the directory . |
11,626 | private static InternetHeaders getManifestProperties ( File manifest ) { final InternetHeaders result = new InternetHeaders ( ) ; if ( null == manifest ) { LOGGER . debug ( "Manifest file not found." ) ; } else { try ( InputStream in = new BufferedInputStream ( new FileInputStream ( manifest ) ) ) { result . load ( in ) ; } catch ( MessagingException | FileNotFoundException e ) { LOGGER . warn ( e . getMessage ( ) , e ) ; } catch ( IOException ex ) { LOGGER . warn ( ex . getMessage ( ) , ex ) ; } } return result ; } | Reads the manifest entries from the provided file . |
11,627 | private File getNextTempDirectory ( ) throws AnalysisException { File directory ; do { final int dirCount = DIR_COUNT . incrementAndGet ( ) ; directory = new File ( tempFileLocation , String . valueOf ( dirCount ) ) ; } while ( directory . exists ( ) ) ; if ( ! directory . mkdirs ( ) ) { throw new AnalysisException ( String . format ( "Unable to create temp directory '%s'." , directory . getAbsolutePath ( ) ) ) ; } return directory ; } | Retrieves the next temporary destination directory for extracting an archive . |
11,628 | public void execute ( ) throws BuildException { populateSettings ( ) ; final File db ; try { db = new File ( getSettings ( ) . getDataDirectory ( ) , getSettings ( ) . getString ( Settings . KEYS . DB_FILE_NAME , "odc.mv.db" ) ) ; if ( db . exists ( ) ) { if ( db . delete ( ) ) { log ( "Database file purged; local copy of the NVD has been removed" , Project . MSG_INFO ) ; } else { final String msg = String . format ( "Unable to delete '%s'; please delete the file manually" , db . getAbsolutePath ( ) ) ; if ( this . failOnError ) { throw new BuildException ( msg ) ; } log ( msg , Project . MSG_ERR ) ; } } else { final String msg = String . format ( "Unable to purge database; the database file does not exist: %s" , db . getAbsolutePath ( ) ) ; if ( this . failOnError ) { throw new BuildException ( msg ) ; } log ( msg , Project . MSG_ERR ) ; } } catch ( IOException ex ) { final String msg = "Unable to delete the database" ; if ( this . failOnError ) { throw new BuildException ( msg ) ; } log ( msg , Project . MSG_ERR ) ; } finally { settings . cleanup ( true ) ; } } | Executes the dependency - check purge to delete the existing local copy of the NVD CVE data . |
11,629 | public boolean matches ( String text ) { if ( text == null ) { return false ; } if ( this . regex ) { final Pattern rx ; if ( this . caseSensitive ) { rx = Pattern . compile ( this . value ) ; } else { rx = Pattern . compile ( this . value , Pattern . CASE_INSENSITIVE ) ; } return rx . matcher ( text ) . matches ( ) ; } else { if ( this . caseSensitive ) { return value . equals ( text ) ; } else { return value . equalsIgnoreCase ( text ) ; } } } | Uses the object s properties to determine if the supplied string matches the value of this property . |
11,630 | private synchronized Resources getPath ( ) { if ( path == null ) { path = new Resources ( getProject ( ) ) ; path . setCache ( true ) ; } return path ; } | Returns the path . If the path has not been initialized yet this class is synchronized and will instantiate the path object . |
11,631 | private void dealWithReferences ( ) throws BuildException { if ( isReference ( ) ) { final Object o = refId . getReferencedObject ( getProject ( ) ) ; if ( ! ( o instanceof ResourceCollection ) ) { throw new BuildException ( "refId '" + refId . getRefId ( ) + "' does not refer to a resource collection." ) ; } getPath ( ) . add ( ( ResourceCollection ) o ) ; } } | If this is a reference this method will add the referenced resource collection to the collection of paths . |
11,632 | public void setNspAnalyzerEnabled ( Boolean nodeAnalyzerEnabled ) { log ( "The NspAnalyzerEnabled configuration has been deprecated and replaced by NodeAuditAnalyzerEnabled" , Project . MSG_ERR ) ; log ( "The NspAnalyzerEnabled configuration will be removed in the next major release" ) ; this . nodeAnalyzerEnabled = nodeAnalyzerEnabled ; } | Set the value of nodeAnalyzerEnabled . |
11,633 | public void lock ( ) throws H2DBLockException { try { final File dir = settings . getDataDirectory ( ) ; lockFile = new File ( dir , "odc.update.lock" ) ; checkState ( ) ; int ctr = 0 ; do { try { if ( ! lockFile . exists ( ) && lockFile . createNewFile ( ) ) { file = new RandomAccessFile ( lockFile , "rw" ) ; lock = file . getChannel ( ) . lock ( ) ; file . writeBytes ( magic ) ; file . getChannel ( ) . force ( true ) ; Thread . sleep ( 20 ) ; file . seek ( 0 ) ; final String current = file . readLine ( ) ; if ( current != null && ! current . equals ( magic ) ) { lock . close ( ) ; lock = null ; LOGGER . debug ( "Another process obtained a lock first ({})" , Thread . currentThread ( ) . getName ( ) ) ; } else { addShutdownHook ( ) ; final Timestamp timestamp = new Timestamp ( System . currentTimeMillis ( ) ) ; LOGGER . debug ( "Lock file created ({}) {} @ {}" , Thread . currentThread ( ) . getName ( ) , magic , timestamp . toString ( ) ) ; } } } catch ( IOException | InterruptedException ex ) { LOGGER . trace ( "Expected error as another thread has likely locked the file" , ex ) ; } finally { if ( lock == null && file != null ) { try { file . close ( ) ; file = null ; } catch ( IOException ex ) { LOGGER . trace ( "Unable to close the lock file" , ex ) ; } } } if ( lock == null || ! lock . isValid ( ) ) { try { final Timestamp timestamp = new Timestamp ( System . currentTimeMillis ( ) ) ; LOGGER . debug ( "Sleeping thread {} ({}) for {} seconds because an exclusive lock on the database could not be obtained ({})" , Thread . currentThread ( ) . getName ( ) , magic , SLEEP_DURATION / 1000 , timestamp . toString ( ) ) ; Thread . sleep ( SLEEP_DURATION ) ; } catch ( InterruptedException ex ) { LOGGER . debug ( "sleep was interrupted." , ex ) ; Thread . currentThread ( ) . interrupt ( ) ; } } } while ( ++ ctr < MAX_SLEEP_COUNT && ( lock == null || ! lock . isValid ( ) ) ) ; if ( lock == null || ! lock . isValid ( ) ) { throw new H2DBLockException ( "Unable to obtain the update lock, skipping the database update. Skippinig the database update." ) ; } } catch ( IOException ex ) { throw new H2DBLockException ( ex . getMessage ( ) , ex ) ; } } | Obtains a lock on the H2 database . |
11,634 | private void checkState ( ) throws H2DBLockException { if ( ! lockFile . getParentFile ( ) . isDirectory ( ) && ! lockFile . mkdir ( ) ) { throw new H2DBLockException ( "Unable to create path to data directory." ) ; } if ( lockFile . isFile ( ) ) { if ( getFileAge ( lockFile ) > 30 ) { LOGGER . debug ( "An old db update lock file was found: {}" , lockFile . getAbsolutePath ( ) ) ; if ( ! lockFile . delete ( ) ) { LOGGER . warn ( "An old db update lock file was found but the system was unable to delete " + "the file. Consider manually deleting {}" , lockFile . getAbsolutePath ( ) ) ; } } else { LOGGER . info ( "Lock file found `{}`" , lockFile ) ; LOGGER . info ( "Existing update in progress; waiting for update to complete" ) ; } } } | Checks the state of the custom h2 lock file and under some conditions will attempt to remove the lock file . |
11,635 | public void release ( ) { if ( lock != null ) { try { lock . release ( ) ; lock = null ; } catch ( IOException ex ) { LOGGER . debug ( "Failed to release lock" , ex ) ; } } if ( file != null ) { try { file . close ( ) ; file = null ; } catch ( IOException ex ) { LOGGER . debug ( "Unable to delete lock file" , ex ) ; } } if ( lockFile != null && lockFile . isFile ( ) ) { final String msg = readLockFile ( ) ; if ( msg != null && msg . equals ( magic ) && ! lockFile . delete ( ) ) { LOGGER . error ( "Lock file '{}' was unable to be deleted. Please manually delete this file." , lockFile . toString ( ) ) ; lockFile . deleteOnExit ( ) ; } } lockFile = null ; removeShutdownHook ( ) ; final Timestamp timestamp = new Timestamp ( System . currentTimeMillis ( ) ) ; LOGGER . debug ( "Lock released ({}) {} @ {}" , Thread . currentThread ( ) . getName ( ) , magic , timestamp . toString ( ) ) ; } | Releases the lock on the H2 database . |
11,636 | private String readLockFile ( ) { String msg = null ; try ( RandomAccessFile f = new RandomAccessFile ( lockFile , "rw" ) ) { msg = f . readLine ( ) ; } catch ( IOException ex ) { LOGGER . debug ( String . format ( "Error reading lock file: %s" , lockFile ) , ex ) ; } return msg ; } | Reads the first line from the lock file and returns the results as a string . |
11,637 | private double getFileAge ( File file ) { final Date d = new Date ( ) ; final long modified = file . lastModified ( ) ; final double time = ( d . getTime ( ) - modified ) / 1000.0 / 60.0 ; LOGGER . debug ( "Lock file age is {} minutes" , time ) ; return time ; } | Returns the age of the file in minutes . |
11,638 | public static String derivePomUrl ( String artifactId , String version , String artifactUrl ) { return artifactUrl . substring ( 0 , artifactUrl . lastIndexOf ( '/' ) ) + '/' + artifactId + '-' + version + ".pom" ; } | Tries to determine the URL to the pom . xml . |
11,639 | public synchronized Iterable < Evidence > getIterator ( EvidenceType type , Confidence confidence ) { if ( null != confidence && null != type ) { final Set < Evidence > list ; switch ( type ) { case VENDOR : list = Collections . unmodifiableSet ( new HashSet < > ( vendors ) ) ; break ; case PRODUCT : list = Collections . unmodifiableSet ( new HashSet < > ( products ) ) ; break ; case VERSION : list = Collections . unmodifiableSet ( new HashSet < > ( versions ) ) ; break ; default : return null ; } switch ( confidence ) { case HIGHEST : return EvidenceCollection . HIGHEST_CONFIDENCE . filter ( list ) ; case HIGH : return EvidenceCollection . HIGH_CONFIDENCE . filter ( list ) ; case MEDIUM : return EvidenceCollection . MEDIUM_CONFIDENCE . filter ( list ) ; default : return EvidenceCollection . LOW_CONFIDENCE . filter ( list ) ; } } return null ; } | Used to iterate over evidence of the specified type and confidence . |
11,640 | public synchronized void addEvidence ( EvidenceType type , Evidence e ) { if ( null != type ) { switch ( type ) { case VENDOR : vendors . add ( e ) ; break ; case PRODUCT : products . add ( e ) ; break ; case VERSION : versions . add ( e ) ; break ; default : break ; } } } | Adds evidence to the collection . |
11,641 | public synchronized void removeEvidence ( EvidenceType type , Evidence e ) { if ( null != type ) { switch ( type ) { case VENDOR : vendors . remove ( e ) ; break ; case PRODUCT : products . remove ( e ) ; break ; case VERSION : versions . remove ( e ) ; break ; default : break ; } } } | Removes evidence from the collection . |
11,642 | public void addEvidence ( EvidenceType type , String source , String name , String value , Confidence confidence ) { final Evidence e = new Evidence ( source , name , value , confidence ) ; addEvidence ( type , e ) ; } | Creates an Evidence object from the parameters and adds the resulting object to the evidence collection . |
11,643 | public synchronized Set < Evidence > getEvidence ( EvidenceType type ) { if ( null != type ) { switch ( type ) { case VENDOR : return Collections . unmodifiableSet ( new HashSet < > ( vendors ) ) ; case PRODUCT : return Collections . unmodifiableSet ( new HashSet < > ( products ) ) ; case VERSION : return Collections . unmodifiableSet ( new HashSet < > ( versions ) ) ; default : break ; } } return null ; } | Returns the unmodifiable set of evidence of the given type . |
11,644 | public synchronized boolean contains ( EvidenceType type , Evidence e ) { if ( null != type ) { switch ( type ) { case VENDOR : return vendors . contains ( e ) ; case PRODUCT : return products . contains ( e ) ; case VERSION : return versions . contains ( e ) ; default : break ; } } return false ; } | Tests if the evidence collection contains the given evidence . |
11,645 | public synchronized boolean contains ( EvidenceType type , Confidence confidence ) { if ( null == type ) { return false ; } final Set < Evidence > col ; switch ( type ) { case VENDOR : col = vendors ; break ; case PRODUCT : col = products ; break ; case VERSION : col = versions ; break ; default : return false ; } for ( Evidence e : col ) { if ( e . getConfidence ( ) . equals ( confidence ) ) { return true ; } } return false ; } | Returns whether or not the collection contains evidence of a specified type and confidence . |
11,646 | public Model parse ( File file ) throws PomParseException { try ( FileInputStream fis = new FileInputStream ( file ) ) { return parse ( fis ) ; } catch ( IOException ex ) { LOGGER . debug ( "" , ex ) ; throw new PomParseException ( String . format ( "Unable to parse pom '%s'" , file . toString ( ) ) , ex ) ; } } | Parses the given xml file and returns a Model object containing only the fields dependency - check requires . |
11,647 | public Model parse ( InputStream inputStream ) throws PomParseException { try { final PomHandler handler = new PomHandler ( ) ; final SAXParser saxParser = XmlUtils . buildSecureSaxParser ( ) ; final XMLReader xmlReader = saxParser . getXMLReader ( ) ; xmlReader . setContentHandler ( handler ) ; final BOMInputStream bomStream = new BOMInputStream ( new XmlInputStream ( new PomProjectInputStream ( inputStream ) ) ) ; final ByteOrderMark bom = bomStream . getBOM ( ) ; final String defaultEncoding = StandardCharsets . UTF_8 . name ( ) ; final String charsetName = bom == null ? defaultEncoding : bom . getCharsetName ( ) ; final Reader reader = new InputStreamReader ( bomStream , charsetName ) ; final InputSource in = new InputSource ( reader ) ; xmlReader . parse ( in ) ; return handler . getModel ( ) ; } catch ( ParserConfigurationException | SAXException | FileNotFoundException ex ) { LOGGER . debug ( "" , ex ) ; throw new PomParseException ( ex ) ; } catch ( IOException ex ) { LOGGER . debug ( "" , ex ) ; throw new PomParseException ( ex ) ; } } | Parses the given XML file and returns a Model object containing only the fields dependency - check requires . |
11,648 | protected void runCheck ( ) throws MojoExecutionException , MojoFailureException { if ( getConnectionString ( ) != null && ! getConnectionString ( ) . isEmpty ( ) ) { final String msg = "Unable to purge the local NVD when using a non-default connection string" ; if ( this . isFailOnError ( ) ) { throw new MojoFailureException ( msg ) ; } getLog ( ) . error ( msg ) ; } else { populateSettings ( ) ; final File db ; try { db = new File ( getSettings ( ) . getDataDirectory ( ) , getSettings ( ) . getString ( Settings . KEYS . DB_FILE_NAME , "odc.mv.db" ) ) ; if ( db . exists ( ) ) { if ( db . delete ( ) ) { getLog ( ) . info ( "Database file purged; local copy of the NVD has been removed" ) ; } else { final String msg = String . format ( "Unable to delete '%s'; please delete the file manually" , db . getAbsolutePath ( ) ) ; if ( this . isFailOnError ( ) ) { throw new MojoFailureException ( msg ) ; } getLog ( ) . error ( msg ) ; } } else { final String msg = String . format ( "Unable to purge database; the database file does not exist: %s" , db . getAbsolutePath ( ) ) ; if ( this . isFailOnError ( ) ) { throw new MojoFailureException ( msg ) ; } getLog ( ) . error ( msg ) ; } } catch ( IOException ex ) { final String msg = "Unable to delete the database" ; if ( this . isFailOnError ( ) ) { throw new MojoExecutionException ( msg , ex ) ; } getLog ( ) . error ( msg ) ; } getSettings ( ) . cleanup ( ) ; } } | Purges the local copy of the NVD . |
11,649 | public boolean matches ( Evidence evidence ) { return sourceMatches ( evidence ) && confidenceMatches ( evidence ) && name . equalsIgnoreCase ( evidence . getName ( ) ) && valueMatches ( evidence ) ; } | Tests whether the given Evidence matches this EvidenceMatcher . |
11,650 | public String identifierToSuppressionId ( Identifier id ) { if ( id instanceof PurlIdentifier ) { final PurlIdentifier purl = ( PurlIdentifier ) id ; return purl . toGav ( ) ; } else if ( id instanceof CpeIdentifier ) { try { final CpeIdentifier cpeId = ( CpeIdentifier ) id ; final Cpe cpe = cpeId . getCpe ( ) ; return String . format ( "cpe:/%s:%s:%s" , Convert . wellFormedToCpeUri ( cpe . getPart ( ) ) , Convert . wellFormedToCpeUri ( cpe . getWellFormedVendor ( ) ) , Convert . wellFormedToCpeUri ( cpe . getWellFormedProduct ( ) ) ) ; } catch ( CpeEncodingException ex ) { LOGGER . debug ( "Unable to convert to cpe URI" , ex ) ; } } else if ( id instanceof GenericIdentifier ) { return id . getValue ( ) ; } return null ; } | Converts an identifier into the Suppression string when possible . |
11,651 | public static boolean contains ( File file , String pattern ) throws IOException { try ( Scanner fileScanner = new Scanner ( file , UTF_8 ) ) { final Pattern regex = Pattern . compile ( pattern ) ; if ( fileScanner . findWithinHorizon ( regex , 0 ) != null ) { return true ; } } return false ; } | Determines if the given file contains the given regular expression . |
11,652 | public static boolean contains ( File file , String [ ] patterns ) throws IOException { final List < Pattern > regexes = new ArrayList < > ( ) ; for ( String pattern : patterns ) { regexes . add ( Pattern . compile ( pattern ) ) ; } try ( Scanner fileScanner = new Scanner ( file , UTF_8 ) ) { return regexes . stream ( ) . anyMatch ( ( regex ) -> ( fileScanner . findWithinHorizon ( regex , 0 ) != null ) ) ; } } | Determines if the given file contains the given regular expressions . |
11,653 | private void removeBadSpringMatches ( Dependency dependency ) { String mustContain = null ; for ( Identifier i : dependency . getSoftwareIdentifiers ( ) ) { if ( i . getValue ( ) != null && i . getValue ( ) . startsWith ( "org.springframework." ) ) { final int endPoint = i . getValue ( ) . indexOf ( ':' , 19 ) ; if ( endPoint >= 0 ) { mustContain = i . getValue ( ) . substring ( 19 , endPoint ) . toLowerCase ( ) ; break ; } } } if ( mustContain != null ) { final Set < Identifier > removalSet = new HashSet < > ( ) ; for ( Identifier i : dependency . getVulnerableSoftwareIdentifiers ( ) ) { if ( i . getValue ( ) != null && i . getValue ( ) . startsWith ( "cpe:/a:springsource:" ) && ! i . getValue ( ) . toLowerCase ( ) . contains ( mustContain ) ) { removalSet . add ( i ) ; } } removalSet . forEach ( ( i ) -> { dependency . removeVulnerableSoftwareIdentifier ( i ) ; } ) ; } } | Removes inaccurate matches on springframework CPEs . |
11,654 | protected void prepareFileTypeAnalyzer ( Engine engine ) throws InitializationException { if ( engine . getMode ( ) != Mode . EVIDENCE_COLLECTION ) { try { final Settings settings = engine . getSettings ( ) ; final String [ ] tmp = settings . getArray ( Settings . KEYS . ECOSYSTEM_SKIP_CPEANALYZER ) ; if ( tmp != null ) { final List < String > skipEcosystems = Arrays . asList ( tmp ) ; if ( skipEcosystems . contains ( DEPENDENCY_ECOSYSTEM ) && ! settings . getBoolean ( Settings . KEYS . ANALYZER_NODE_AUDIT_ENABLED ) ) { LOGGER . debug ( "NodePackageAnalyzer enabled without a corresponding vulnerability analyzer" ) ; final String msg = "Invalid Configuration: enabling the Node Package Analyzer without " + "using the Node Audit Analyzer is not supported." ; throw new InitializationException ( msg ) ; } else if ( ! skipEcosystems . contains ( DEPENDENCY_ECOSYSTEM ) ) { LOGGER . warn ( "Using the CPE Analyzer with Node.js can result in many false positives." ) ; } } } catch ( InvalidSettingException ex ) { throw new InitializationException ( "Unable to read configuration settings" , ex ) ; } } } | Performs validation on the configuration to ensure that the correct analyzers are in place . |
11,655 | private boolean isNodeAuditEnabled ( Engine engine ) { for ( Analyzer a : engine . getAnalyzers ( ) ) { if ( a instanceof NodeAuditAnalyzer ) { return a . isEnabled ( ) ; } } return false ; } | Determines if the Node Audit analyzer is enabled . |
11,656 | public int compareTo ( Identifier o ) { if ( o == null ) { throw new IllegalArgumentException ( "Unable to compare a null identifier" ) ; } return new CompareToBuilder ( ) . append ( this . value , o . toString ( ) ) . append ( this . url , o . getUrl ( ) ) . append ( this . confidence , o . getConfidence ( ) ) . toComparison ( ) ; } | Implementation of the comparator interface . |
11,657 | protected List < String > buildArgumentList ( ) { final List < String > args = new ArrayList < > ( ) ; if ( ! StringUtils . isEmpty ( getSettings ( ) . getString ( Settings . KEYS . ANALYZER_ASSEMBLY_DOTNET_PATH ) ) ) { args . add ( getSettings ( ) . getString ( Settings . KEYS . ANALYZER_ASSEMBLY_DOTNET_PATH ) ) ; } else if ( isDotnetPath ( ) ) { args . add ( "dotnet" ) ; } else { return null ; } args . add ( grokAssembly . getPath ( ) ) ; return args ; } | Builds the beginnings of a List for ProcessBuilder |
11,658 | public void prepareFileTypeAnalyzer ( Engine engine ) throws InitializationException { final File location ; try ( InputStream in = FileUtils . getResourceAsStream ( "GrokAssembly.zip" ) ) { if ( in == null ) { throw new InitializationException ( "Unable to extract GrokAssembly.dll - file not found" ) ; } location = FileUtils . createTempDirectory ( getSettings ( ) . getTempDirectory ( ) ) ; ExtractionUtil . extractFiles ( in , location ) ; } catch ( ExtractionException ex ) { throw new InitializationException ( "Unable to extract GrokAssembly.dll" , ex ) ; } catch ( IOException ex ) { throw new InitializationException ( "Unable to create temp directory for GrokAssembly" , ex ) ; } grokAssembly = new File ( location , "GrokAssembly.dll" ) ; baseArgumentList = buildArgumentList ( ) ; if ( baseArgumentList == null ) { setEnabled ( false ) ; LOGGER . error ( "----------------------------------------------------" ) ; LOGGER . error ( ".NET Assembly Analyzer could not be initialized and at least one " + "'exe' or 'dll' was scanned. The 'dotnet' executable could not be found on " + "the path; either disable the Assembly Analyzer or configure the path dotnet core." ) ; LOGGER . error ( "----------------------------------------------------" ) ; return ; } try { final ProcessBuilder pb = new ProcessBuilder ( baseArgumentList ) ; final Process p = pb . start ( ) ; IOUtils . copy ( p . getErrorStream ( ) , NullOutputStream . NULL_OUTPUT_STREAM ) ; final GrokParser grok = new GrokParser ( ) ; final AssemblyData data = grok . parse ( p . getInputStream ( ) ) ; if ( p . waitFor ( ) != 1 || data == null || StringUtils . isEmpty ( data . getError ( ) ) ) { LOGGER . warn ( "An error occurred with the .NET AssemblyAnalyzer, please see the log for more details." ) ; LOGGER . debug ( "GrokAssembly.dll is not working properly" ) ; grokAssembly = null ; setEnabled ( false ) ; throw new InitializationException ( "Could not execute .NET AssemblyAnalyzer" ) ; } } catch ( InitializationException e ) { setEnabled ( false ) ; throw e ; } catch ( IOException | InterruptedException e ) { LOGGER . warn ( "An error occurred with the .NET AssemblyAnalyzer;\n" + "this can be ignored unless you are scanning .NET DLLs. Please see the log for more details." ) ; LOGGER . debug ( "Could not execute GrokAssembly {}" , e . getMessage ( ) ) ; setEnabled ( false ) ; throw new InitializationException ( "An error occurred with the .NET AssemblyAnalyzer" , e ) ; } } | Initialize the analyzer . In this case extract GrokAssembly . dll to a temporary location . |
11,659 | private boolean isDotnetPath ( ) { final String [ ] args = new String [ 2 ] ; args [ 0 ] = "dotnet" ; args [ 1 ] = "--version" ; final ProcessBuilder pb = new ProcessBuilder ( args ) ; try { final Process proc = pb . start ( ) ; final int retCode = proc . waitFor ( ) ; if ( retCode == 0 ) { return true ; } final byte [ ] version = new byte [ 50 ] ; final int read = proc . getInputStream ( ) . read ( version ) ; if ( read > 0 ) { final String v = new String ( version , UTF_8 ) ; if ( v . length ( ) > 0 ) { return true ; } } } catch ( IOException | InterruptedException ex ) { LOGGER . debug ( "Path search failed for dotnet" , ex ) ; } return false ; } | Tests to see if a file is in the system path . |
11,660 | protected static void addMatchingValues ( List < String > packages , String value , Dependency dep , EvidenceType type ) { if ( value == null || value . isEmpty ( ) || packages == null || packages . isEmpty ( ) ) { return ; } for ( String key : packages ) { final int pos = StringUtils . indexOfIgnoreCase ( value , key ) ; if ( ( pos == 0 && ( key . length ( ) == value . length ( ) || ( key . length ( ) < value . length ( ) && ! Character . isLetterOrDigit ( value . charAt ( key . length ( ) ) ) ) ) ) || ( pos > 0 && ! Character . isLetterOrDigit ( value . charAt ( pos - 1 ) ) && ( pos + key . length ( ) == value . length ( ) || ( key . length ( ) < value . length ( ) && ! Character . isLetterOrDigit ( value . charAt ( pos + key . length ( ) ) ) ) ) ) ) { dep . addEvidence ( type , "dll" , "namespace" , key , Confidence . HIGHEST ) ; } } } | Cycles through the collection of class name information to see if parts of the package names are contained in the provided value . If found it will be added as the HIGHEST confidence evidence because we have more then one source corroborating the value . |
11,661 | public ProcessTask call ( ) throws Exception { try { processFiles ( ) ; } catch ( UpdateException ex ) { this . exception = ex ; } finally { settings . cleanup ( false ) ; } return this ; } | Implements the callable interface . |
11,662 | protected void importJSON ( File file ) throws ParserConfigurationException , IOException , SQLException , DatabaseException , ClassNotFoundException , UpdateException { final NvdCveParser parser = new NvdCveParser ( settings , cveDB ) ; parser . parse ( file ) ; } | Imports the NVD CVE JSON File into the database . |
11,663 | private void processFiles ( ) throws UpdateException { LOGGER . info ( "Processing Started for NVD CVE - {}" , downloadTask . getNvdCveInfo ( ) . getId ( ) ) ; final long startProcessing = System . currentTimeMillis ( ) ; try { importJSON ( downloadTask . getFile ( ) ) ; cveDB . commit ( ) ; properties . save ( downloadTask . getNvdCveInfo ( ) ) ; } catch ( ParserConfigurationException | SQLException | DatabaseException | ClassNotFoundException | IOException ex ) { throw new UpdateException ( ex ) ; } finally { downloadTask . cleanup ( ) ; } LOGGER . info ( "Processing Complete for NVD CVE - {} ({} ms)" , downloadTask . getNvdCveInfo ( ) . getId ( ) , System . currentTimeMillis ( ) - startProcessing ) ; } | Processes the NVD CVE XML file and imports the data into the DB . |
11,664 | private StringBuilder read ( int n ) throws IOException { boolean eof = false ; final StringBuilder s = new StringBuilder ( n ) ; while ( s . length ( ) < n && ! eof ) { if ( pushBack . length ( ) == 0 ) { eof = readIntoPushBack ( ) ; } if ( pushBack . length ( ) > 0 ) { s . append ( pushBack . charAt ( 0 ) ) ; pushBack . deleteCharAt ( 0 ) ; } } return s ; } | Read n characters . |
11,665 | private boolean readIntoPushBack ( ) throws IOException { boolean eof = false ; final int ch = in . read ( ) ; if ( ch >= 0 ) { if ( ! ( pulled == 0 && isWhiteSpace ( ch ) ) ) { pulled += 1 ; if ( ch == '&' ) { readAmpersand ( ) ; } else { pushBack . append ( ( char ) ch ) ; } } } else { eof = true ; } return eof ; } | Might not actually push back anything but usually will . |
11,666 | private void readAmpersand ( ) throws IOException { final StringBuilder reference = new StringBuilder ( ) ; int ch ; for ( ch = in . read ( ) ; isAlphaNumeric ( ch ) ; ch = in . read ( ) ) { reference . append ( ( char ) ch ) ; } if ( ch == ';' ) { final String code = XmlEntity . fromNamedReference ( reference ) ; if ( code != null ) { pushBack . append ( code ) ; } else { pushBack . append ( "&" ) . append ( reference ) . append ( ( char ) ch ) ; } } else { pushBack . append ( "&" ) . append ( reference ) . append ( ( char ) ch ) ; } } | Deal with an ampersand in the stream . |
11,667 | private void given ( CharSequence s , int wanted , int got ) { red . append ( s ) ; given += got ; LOGGER . trace ( "Given: [" + wanted + "," + got + "]-" + s ) ; } | Keep track of what we ve given them . |
11,668 | public int read ( byte [ ] data , int offset , int length ) throws IOException { final StringBuilder s = read ( length ) ; int n = 0 ; for ( int i = 0 ; i < Math . min ( length , s . length ( ) ) ; i ++ ) { data [ offset + i ] = ( byte ) s . charAt ( i ) ; n += 1 ; } given ( s , length , n ) ; return n > 0 ? n : - 1 ; } | Reads the next length of bytes from the stream into the given byte array at the given offset . |
11,669 | public synchronized void open ( CveDB cve , Settings settings ) throws IndexException { if ( INSTANCE . usageCount . addAndGet ( 1 ) == 1 ) { try { final File temp = settings . getTempDirectory ( ) ; index = new MMapDirectory ( temp . toPath ( ) ) ; buildIndex ( cve ) ; indexReader = DirectoryReader . open ( index ) ; } catch ( IOException ex ) { throw new IndexException ( ex ) ; } indexSearcher = new IndexSearcher ( indexReader ) ; searchingAnalyzer = createSearchingAnalyzer ( ) ; queryParser = new QueryParser ( Fields . DOCUMENT_KEY , searchingAnalyzer ) ; } } | Creates and loads data into an in memory index . |
11,670 | private Analyzer createSearchingAnalyzer ( ) { final Map < String , Analyzer > fieldAnalyzers = new HashMap < > ( ) ; fieldAnalyzers . put ( Fields . DOCUMENT_KEY , new KeywordAnalyzer ( ) ) ; productFieldAnalyzer = new SearchFieldAnalyzer ( ) ; vendorFieldAnalyzer = new SearchFieldAnalyzer ( ) ; fieldAnalyzers . put ( Fields . PRODUCT , productFieldAnalyzer ) ; fieldAnalyzers . put ( Fields . VENDOR , vendorFieldAnalyzer ) ; return new PerFieldAnalyzerWrapper ( new KeywordAnalyzer ( ) , fieldAnalyzers ) ; } | Creates an Analyzer for searching the CPE Index . |
11,671 | public synchronized void close ( ) { final int count = INSTANCE . usageCount . get ( ) - 1 ; if ( count <= 0 ) { INSTANCE . usageCount . set ( 0 ) ; if ( searchingAnalyzer != null ) { searchingAnalyzer . close ( ) ; searchingAnalyzer = null ; } if ( indexReader != null ) { try { indexReader . close ( ) ; } catch ( IOException ex ) { LOGGER . trace ( "" , ex ) ; } indexReader = null ; } queryParser = null ; indexSearcher = null ; if ( index != null ) { try { index . close ( ) ; } catch ( IOException ex ) { LOGGER . trace ( "" , ex ) ; } index = null ; } } } | Closes the CPE Index . |
11,672 | private void buildIndex ( CveDB cve ) throws IndexException { try ( Analyzer analyzer = createSearchingAnalyzer ( ) ; IndexWriter indexWriter = new IndexWriter ( index , new IndexWriterConfig ( analyzer ) ) ) { final FieldType ft = new FieldType ( TextField . TYPE_STORED ) ; ft . setIndexOptions ( IndexOptions . DOCS ) ; ft . setOmitNorms ( true ) ; final Document doc = new Document ( ) ; final Field v = new Field ( Fields . VENDOR , Fields . VENDOR , ft ) ; final Field p = new Field ( Fields . PRODUCT , Fields . PRODUCT , ft ) ; doc . add ( v ) ; doc . add ( p ) ; final Set < Pair < String , String > > data = cve . getVendorProductList ( ) ; for ( Pair < String , String > pair : data ) { if ( pair . getLeft ( ) != null && pair . getRight ( ) != null ) { v . setStringValue ( pair . getLeft ( ) ) ; p . setStringValue ( pair . getRight ( ) ) ; indexWriter . addDocument ( doc ) ; resetAnalyzers ( ) ; } } indexWriter . commit ( ) ; } catch ( DatabaseException ex ) { LOGGER . debug ( "" , ex ) ; throw new IndexException ( "Error reading CPE data" , ex ) ; } catch ( IOException ex ) { throw new IndexException ( "Unable to close an in-memory index" , ex ) ; } } | Builds the CPE Lucene Index based off of the data within the CveDB . |
11,673 | public synchronized TopDocs search ( String searchString , int maxQueryResults ) throws ParseException , IndexException , IOException { final Query query = parseQuery ( searchString ) ; return search ( query , maxQueryResults ) ; } | Searches the index using the given search string . |
11,674 | public synchronized Query parseQuery ( String searchString ) throws ParseException , IndexException { if ( searchString == null || searchString . trim ( ) . isEmpty ( ) ) { throw new ParseException ( "Query is null or empty" ) ; } LOGGER . debug ( searchString ) ; final Query query = queryParser . parse ( searchString ) ; try { resetAnalyzers ( ) ; } catch ( IOException ex ) { throw new IndexException ( "Unable to reset the analyzer after parsing" , ex ) ; } return query ; } | Parses the given string into a Lucene Query . |
11,675 | public synchronized TopDocs search ( Query query , int maxQueryResults ) throws CorruptIndexException , IOException { return indexSearcher . search ( query , maxQueryResults ) ; } | Searches the index using the given query . |
11,676 | public synchronized String explain ( Query query , int doc ) throws IOException { return indexSearcher . explain ( query , doc ) . toString ( ) ; } | Method to explain queries matches . |
11,677 | public StringColumn append ( String value ) { try { lookupTable . append ( value ) ; } catch ( NoKeysAvailableException ex ) { lookupTable = lookupTable . promoteYourself ( ) ; try { lookupTable . append ( value ) ; } catch ( NoKeysAvailableException e ) { throw new IllegalStateException ( e ) ; } } return this ; } | Added for naming consistency with all other columns |
11,678 | private void addValuesToSelection ( Selection results , byte key ) { if ( key != DEFAULT_RETURN_VALUE ) { int i = 0 ; for ( byte next : values ) { if ( key == next ) { results . add ( i ) ; } i ++ ; } } } | Given a key matching some string add to the selection the index of every record that matches that key |
11,679 | private String [ ] selectColumnNames ( List < String > names , ColumnType [ ] types ) { List < String > header = new ArrayList < > ( ) ; for ( int i = 0 ; i < types . length ; i ++ ) { if ( types [ i ] != SKIP ) { String name = names . get ( i ) ; name = name . trim ( ) ; header . add ( name ) ; } } String [ ] result = new String [ header . size ( ) ] ; return header . toArray ( result ) ; } | Reads column names from header skipping any for which the type == SKIP |
11,680 | private Table summarize ( TableSliceGroup group ) { List < Table > results = new ArrayList < > ( ) ; ArrayListMultimap < String , AggregateFunction < ? , ? > > reductionMultimap = getAggregateFunctionMultimap ( ) ; for ( String name : reductionMultimap . keys ( ) ) { List < AggregateFunction < ? , ? > > reductions = reductionMultimap . get ( name ) ; results . add ( group . aggregate ( name , reductions . toArray ( new AggregateFunction < ? , ? > [ 0 ] ) ) ) ; } return combineTables ( results ) ; } | Associates the columns to be summarized with the functions that match their type . All valid combinations are used |
11,681 | public static StandardTableSliceGroup create ( Table original , String ... columnsNames ) { List < CategoricalColumn < ? > > columns = original . categoricalColumns ( columnsNames ) ; return new StandardTableSliceGroup ( original , columns . toArray ( new CategoricalColumn < ? > [ 0 ] ) ) ; } | Returns a viewGroup splitting the original table on the given columns . The named columns must be CategoricalColumns |
11,682 | private void splitOn ( String ... columnNames ) { List < Column < ? > > columns = getSourceTable ( ) . columns ( columnNames ) ; int byteSize = getByteSize ( columns ) ; byte [ ] currentKey = null ; String currentStringKey = null ; TableSlice view ; Selection selection = new BitmapBackedSelection ( ) ; for ( int row = 0 ; row < getSourceTable ( ) . rowCount ( ) ; row ++ ) { ByteBuffer byteBuffer = ByteBuffer . allocate ( byteSize ) ; String newStringKey = "" ; for ( int col = 0 ; col < columnNames . length ; col ++ ) { if ( col > 0 ) { newStringKey = newStringKey + SPLIT_STRING ; } Column < ? > c = getSourceTable ( ) . column ( columnNames [ col ] ) ; String groupKey = getSourceTable ( ) . getUnformatted ( row , getSourceTable ( ) . columnIndex ( c ) ) ; newStringKey = newStringKey + groupKey ; byteBuffer . put ( c . asBytes ( row ) ) ; } byte [ ] newKey = byteBuffer . array ( ) ; if ( row == 0 ) { currentKey = newKey ; currentStringKey = newStringKey ; } if ( ! Arrays . equals ( newKey , currentKey ) ) { currentKey = newKey ; view = new TableSlice ( getSourceTable ( ) , selection ) ; view . setName ( currentStringKey ) ; currentStringKey = newStringKey ; addSlice ( view ) ; selection = new BitmapBackedSelection ( ) ; selection . add ( row ) ; } else { selection . add ( row ) ; } } if ( ! selection . isEmpty ( ) ) { view = new TableSlice ( getSourceTable ( ) , selection ) ; view . setName ( currentStringKey ) ; addSlice ( view ) ; } } | Splits the sourceTable table into sub - tables grouping on the columns whose names are given in splitColumnNames |
11,683 | public FloatColumn asFloatColumn ( ) { FloatArrayList values = new FloatArrayList ( ) ; for ( int d : data ) { values . add ( d ) ; } values . trim ( ) ; return FloatColumn . create ( this . name ( ) , values . elements ( ) ) ; } | Returns a new FloatColumn containing a value for each value in this column truncating if necessary . |
11,684 | public DoubleColumn asDoubleColumn ( ) { DoubleArrayList values = new DoubleArrayList ( ) ; for ( int d : data ) { values . add ( d ) ; } values . trim ( ) ; return DoubleColumn . create ( this . name ( ) , values . elements ( ) ) ; } | Returns a new DoubleColumn containing a value for each value in this column truncating if necessary . |
11,685 | public void dumpRow ( PrintStream out ) { for ( int i = 0 ; i < columnNames . size ( ) ; i ++ ) { out . print ( "Column " ) ; out . print ( i ) ; out . print ( " " ) ; out . print ( columnNames . get ( columnIndex ) ) ; out . print ( " : " ) ; try { out . println ( line [ i ] ) ; } catch ( ArrayIndexOutOfBoundsException aioobe ) { out . println ( "Unable to get cell " + i + " of this line" ) ; } } } | Dumps to a PrintStream the information relative to the row that caused the problem |
11,686 | public List < LocalTime > asList ( ) { List < LocalTime > times = new ArrayList < > ( ) ; for ( LocalTime time : this ) { times . add ( time ) ; } return times ; } | Returns the entire contents of this column as a list |
11,687 | private static Element row ( int row , Table table , ElementCreator elements , HtmlWriteOptions options ) { Element tr = elements . create ( "tr" , null , row ) ; for ( Column < ? > col : table . columns ( ) ) { if ( options . escapeText ( ) ) { tr . appendChild ( elements . create ( "td" , col , row ) . appendText ( String . valueOf ( col . getString ( row ) ) ) ) ; } else { tr . appendChild ( elements . create ( "td" , col , row ) . appendChild ( new DataNode ( String . valueOf ( col . getString ( row ) ) ) ) ) ; } } return tr ; } | Returns a string containing the html output of one table row |
11,688 | public Reader createReader ( byte [ ] cachedBytes ) throws IOException { if ( cachedBytes != null ) { return new InputStreamReader ( new ByteArrayInputStream ( cachedBytes ) ) ; } if ( inputStream != null ) { return new InputStreamReader ( inputStream , charset ) ; } if ( reader != null ) { return reader ; } return new InputStreamReader ( new FileInputStream ( file ) , charset ) ; } | If cachedBytes are not null returns a Reader created from the cachedBytes . Otherwise returns a Reader from the underlying source . |
11,689 | public long [ ] asEpochSecondArray ( ZoneOffset offset ) { long [ ] output = new long [ data . size ( ) ] ; for ( int i = 0 ; i < data . size ( ) ; i ++ ) { LocalDateTime dateTime = PackedLocalDateTime . asLocalDateTime ( data . getLong ( i ) ) ; if ( dateTime == null ) { output [ i ] = Long . MIN_VALUE ; } else { output [ i ] = dateTime . toEpochSecond ( offset ) ; } } return output ; } | Returns the seconds from epoch for each value as an array based on the given offset |
11,690 | public long [ ] asEpochMillisArray ( ZoneOffset offset ) { long [ ] output = new long [ data . size ( ) ] ; for ( int i = 0 ; i < data . size ( ) ; i ++ ) { LocalDateTime dateTime = PackedLocalDateTime . asLocalDateTime ( data . getLong ( i ) ) ; if ( dateTime == null ) { output [ i ] = Long . MIN_VALUE ; } else { output [ i ] = dateTime . toInstant ( offset ) . toEpochMilli ( ) ; } } return output ; } | Returns an array where each entry is the difference measured in milliseconds between the LocalDateTime and midnight January 1 1970 UTC . |
11,691 | public IntIterator iterator ( ) { return new IntIterator ( ) { private final org . roaringbitmap . IntIterator iterator = bitmap . getIntIterator ( ) ; public int nextInt ( ) { return iterator . next ( ) ; } public int skip ( int k ) { throw new UnsupportedOperationException ( "Views do not support skipping in the iterator" ) ; } public boolean hasNext ( ) { return iterator . hasNext ( ) ; } } ; } | Returns a fastUtil intIterator that wraps a bitmap intIterator |
11,692 | protected static Selection selectNRowsAtRandom ( int n , int max ) { Selection selection = new BitmapBackedSelection ( ) ; if ( n > max ) { throw new IllegalArgumentException ( "Illegal arguments: N (" + n + ") greater than Max (" + max + ")" ) ; } int [ ] rows = new int [ n ] ; if ( n == max ) { for ( int k = 0 ; k < n ; ++ k ) { selection . add ( k ) ; } return selection ; } BitSet bs = new BitSet ( max ) ; int cardinality = 0 ; Random random = new Random ( ) ; while ( cardinality < n ) { int v = random . nextInt ( max ) ; if ( ! bs . get ( v ) ) { bs . set ( v ) ; cardinality ++ ; } } int pos = 0 ; for ( int i = bs . nextSetBit ( 0 ) ; i >= 0 ; i = bs . nextSetBit ( i + 1 ) ) { rows [ pos ++ ] = i ; } for ( int row : rows ) { selection . add ( row ) ; } return selection ; } | Returns an randomly generated selection of size N where Max is the largest possible value |
11,693 | public Boolean get ( int i ) { byte b = data . getByte ( i ) ; if ( b == BooleanColumnType . BYTE_TRUE ) { return Boolean . TRUE ; } if ( b == BooleanColumnType . BYTE_FALSE ) { return Boolean . FALSE ; } return null ; } | Returns the value in row i as a Boolean |
11,694 | public BooleanColumn set ( Selection rowSelection , boolean newValue ) { for ( int row : rowSelection ) { set ( row , newValue ) ; } return this ; } | Conditionally update this column replacing current values with newValue for all rows where the current value matches the selection criteria |
11,695 | @ SuppressWarnings ( { "rawtypes" , "unchecked" } ) public static void copyRowsToTable ( Selection rows , Table oldTable , Table newTable ) { for ( int columnIndex = 0 ; columnIndex < oldTable . columnCount ( ) ; columnIndex ++ ) { Column oldColumn = oldTable . column ( columnIndex ) ; int r = 0 ; for ( int i : rows ) { newTable . column ( columnIndex ) . set ( r , oldColumn , i ) ; r ++ ; } } } | Copies the rows indicated by the row index values in the given selection from oldTable to newTable |
11,696 | public void csv ( String file ) throws IOException { CsvWriteOptions options = CsvWriteOptions . builder ( file ) . build ( ) ; new CsvWriter ( ) . write ( table , options ) ; } | legacy methods left for backwards compatibility |
11,697 | public DoubleColumn map ( ToDoubleFunction < Double > fun ) { DoubleColumn result = DoubleColumn . create ( name ( ) ) ; for ( double t : this ) { try { result . append ( fun . applyAsDouble ( t ) ) ; } catch ( Exception e ) { result . appendMissing ( ) ; } } return result ; } | Maps the function across all rows appending the results to a new NumberColumn |
11,698 | public DoubleColumn filter ( DoublePredicate test ) { DoubleColumn result = DoubleColumn . create ( name ( ) ) ; for ( int i = 0 ; i < size ( ) ; i ++ ) { double d = getDouble ( i ) ; if ( test . test ( d ) ) { result . append ( d ) ; } } return result ; } | Returns a new NumberColumn with only those rows satisfying the predicate |
11,699 | public LongColumn asLongColumn ( ) { LongArrayList values = new LongArrayList ( ) ; for ( double d : data ) { values . add ( ( long ) d ) ; } values . trim ( ) ; return LongColumn . create ( this . name ( ) , values . elements ( ) ) ; } | Returns a new LongColumn containing a value for each value in this column truncating if necessary |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.