idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
7,200
private static String formatStatName ( final String stat ) { if ( stat == null || stat . isEmpty ( ) ) { return stat ; } String name = stat . replace ( " " , "" ) ; return name . substring ( 0 , 1 ) . toLowerCase ( ) + name . substring ( 1 ) ; }
Little helper to convert the first character to lowercase and remove any spaces
7,201
private void printQueryStats ( final HttpQuery query ) { switch ( query . apiVersion ( ) ) { case 0 : case 1 : query . sendReply ( query . serializer ( ) . formatQueryStatsV1 ( QueryStats . getRunningAndCompleteStats ( ) ) ) ; break ; default : throw new BadRequestException ( HttpResponseStatus . NOT_IMPLEMENTED , "Requested API version not implemented" , "Version " + query . apiVersion ( ) + " is not implemented" ) ; } }
Print the detailed query stats to the caller using the proper serializer
7,202
private void dropCaches ( final TSDB tsdb , final Channel chan ) { LOG . warn ( chan + " Dropping all in-memory caches." ) ; tsdb . dropCaches ( ) ; }
Drops in memory caches .
7,203
public Deferred < Boolean > allowHistogramPoint ( final String metric , final long timestamp , final byte [ ] value , final Map < String , String > tags ) { throw new UnsupportedOperationException ( "Not yet implemented." ) ; }
Determine whether or not the data point should be stored . If the data should not be stored the implementation can return false or an exception in the deferred object . Otherwise it should return true and the data point will be written to storage .
7,204
public static TagVFilter getFilter ( final String tagk , final String filter ) { if ( tagk == null || tagk . isEmpty ( ) ) { throw new IllegalArgumentException ( "Tagk cannot be null or empty" ) ; } if ( filter == null || filter . isEmpty ( ) ) { throw new IllegalArgumentException ( "Filter cannot be null or empty" ) ; } if ( filter . length ( ) == 1 && filter . charAt ( 0 ) == '*' ) { return null ; } final int paren = filter . indexOf ( '(' ) ; if ( paren > - 1 ) { final String prefix = filter . substring ( 0 , paren ) . toLowerCase ( ) ; return new Builder ( ) . setTagk ( tagk ) . setFilter ( stripParentheses ( filter ) ) . setType ( prefix ) . build ( ) ; } else if ( filter . contains ( "*" ) ) { return new TagVWildcardFilter ( tagk , filter , true ) ; } else { return null ; } }
Parses the tag value and determines if it s a group by a literal or a filter .
7,205
public static void initializeFilterMap ( final TSDB tsdb ) throws ClassNotFoundException , NoSuchMethodException , NoSuchFieldException , IllegalArgumentException , SecurityException , IllegalAccessException , InvocationTargetException { final List < TagVFilter > filter_plugins = PluginLoader . loadPlugins ( TagVFilter . class ) ; if ( filter_plugins != null ) { for ( final TagVFilter filter : filter_plugins ) { filter . getClass ( ) . getDeclaredMethod ( "description" ) ; filter . getClass ( ) . getDeclaredMethod ( "examples" ) ; filter . getClass ( ) . getDeclaredField ( "FILTER_NAME" ) ; final Method initialize = filter . getClass ( ) . getDeclaredMethod ( "initialize" , TSDB . class ) ; initialize . invoke ( null , tsdb ) ; final Constructor < ? extends TagVFilter > ctor = filter . getClass ( ) . getDeclaredConstructor ( String . class , String . class ) ; final Pair < Class < ? > , Constructor < ? extends TagVFilter > > existing = tagv_filter_map . get ( filter . getType ( ) ) ; if ( existing != null ) { LOG . warn ( "Overloading existing filter " + existing . getClass ( ) . getCanonicalName ( ) + " with new filter " + filter . getClass ( ) . getCanonicalName ( ) ) ; } tagv_filter_map . put ( filter . getType ( ) . toLowerCase ( ) , new Pair < Class < ? > , Constructor < ? extends TagVFilter > > ( filter . getClass ( ) , ctor ) ) ; LOG . info ( "Successfully loaded TagVFilter plugin: " + filter . getClass ( ) . getCanonicalName ( ) ) ; } LOG . info ( "Loaded " + tagv_filter_map . size ( ) + " filters" ) ; } }
Loads plugins from the plugin directory and loads them into the map . Built - in filters don t need to go through this process .
7,206
public static void tagsToFilters ( final Map < String , String > tags , final List < TagVFilter > filters ) { mapToFilters ( tags , filters , true ) ; }
Converts the tag map to a filter list . If a filter already exists for a tag group by then the duplicate is skipped .
7,207
public static void mapToFilters ( final Map < String , String > map , final List < TagVFilter > filters , final boolean group_by ) { if ( map == null || map . isEmpty ( ) ) { return ; } for ( final Map . Entry < String , String > entry : map . entrySet ( ) ) { TagVFilter filter = getFilter ( entry . getKey ( ) , entry . getValue ( ) ) ; if ( filter == null && entry . getValue ( ) . equals ( "*" ) ) { filter = new TagVWildcardFilter ( entry . getKey ( ) , "*" , true ) ; } else if ( filter == null ) { filter = new TagVLiteralOrFilter ( entry . getKey ( ) , entry . getValue ( ) ) ; } if ( group_by ) { filter . setGroupBy ( true ) ; boolean duplicate = false ; for ( final TagVFilter existing : filters ) { if ( filter . equals ( existing ) ) { LOG . debug ( "Skipping duplicate filter: " + existing ) ; existing . setGroupBy ( true ) ; duplicate = true ; break ; } } if ( ! duplicate ) { filters . add ( filter ) ; } } else { filters . add ( filter ) ; } } }
Converts the map to a filter list . If a filter already exists for a tag group by and we re told to process group bys then the duplicate is skipped .
7,208
public static Map < String , Map < String , String > > loadedFilters ( ) { final Map < String , Map < String , String > > filters = new HashMap < String , Map < String , String > > ( tagv_filter_map . size ( ) ) ; for ( final Pair < Class < ? > , Constructor < ? extends TagVFilter > > pair : tagv_filter_map . values ( ) ) { final Map < String , String > filter_meta = new HashMap < String , String > ( 1 ) ; try { Method method = pair . getKey ( ) . getDeclaredMethod ( "description" ) ; filter_meta . put ( "description" , ( String ) method . invoke ( null ) ) ; method = pair . getKey ( ) . getDeclaredMethod ( "examples" ) ; filter_meta . put ( "examples" , ( String ) method . invoke ( null ) ) ; final Field filter_name = pair . getKey ( ) . getDeclaredField ( "FILTER_NAME" ) ; filters . put ( ( String ) filter_name . get ( null ) , filter_meta ) ; } catch ( SecurityException e ) { throw new RuntimeException ( "Unexpected security exception" , e ) ; } catch ( NoSuchMethodException e ) { LOG . error ( "Filter plugin " + pair . getClass ( ) . getCanonicalName ( ) + " did not implement one of the \"description\" or \"examples\" methods" ) ; } catch ( NoSuchFieldException e ) { LOG . error ( "Filter plugin " + pair . getClass ( ) . getCanonicalName ( ) + " did not have the \"FILTER_NAME\" field" ) ; } catch ( IllegalArgumentException e ) { throw new RuntimeException ( "Unexpected exception" , e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Unexpected security exception" , e ) ; } catch ( InvocationTargetException e ) { throw new RuntimeException ( "Unexpected security exception" , e ) ; } } return filters ; }
Runs through the loaded plugin map and dumps the names description and examples into a map to serialize via the API .
7,209
public void runQueries ( final List < Query > queries ) throws Exception { final long start_time = System . currentTimeMillis ( ) / 1000 ; final Thread reporter = new ProgressReporter ( ) ; reporter . start ( ) ; for ( final Query query : queries ) { final List < Scanner > scanners = Internal . getScanners ( query ) ; final List < Thread > threads = new ArrayList < Thread > ( scanners . size ( ) ) ; int i = 0 ; for ( final Scanner scanner : scanners ) { final FsckWorker worker = new FsckWorker ( scanner , i ++ , this . options ) ; worker . setName ( "Fsck #" + i ) ; worker . start ( ) ; threads . add ( worker ) ; } for ( final Thread thread : threads ) { thread . join ( ) ; LOG . info ( "Thread [" + thread + "] Finished" ) ; } } reporter . interrupt ( ) ; logResults ( ) ; final long duration = ( System . currentTimeMillis ( ) / 1000 ) - start_time ; LOG . info ( "Completed fsck in [" + duration + "] seconds" ) ; }
Scans the rows matching one or more standard queries . An aggregator is still required though it s ignored .
7,210
private static void usage ( final ArgP argp , final String errmsg , final int retval ) { System . err . println ( errmsg ) ; System . err . println ( "Usage: fsck" + " [flags] [START-DATE [END-DATE] query [queries...]] \n" + "Scans the OpenTSDB data table for errors. Use the --full-scan flag\n" + "to scan the entire data table or specify a command line query to " + "scan a subset.\n" + "To see the format in which queries should be written, see the help" + " of the 'query' command.\n" + "The --fix or --fix-all flags will attempt to fix errors," + " but be careful when using them.\n" ) ; System . err . print ( argp . usage ( ) ) ; System . exit ( retval ) ; }
Prints usage and exits with the given retval .
7,211
private void logResults ( ) { LOG . info ( "Key Values Processed: " + kvs_processed . get ( ) ) ; LOG . info ( "Rows Processed: " + rows_processed . get ( ) ) ; LOG . info ( "Valid Datapoints: " + valid_datapoints . get ( ) ) ; LOG . info ( "Annotations: " + annotations . get ( ) ) ; LOG . info ( "Invalid Row Keys Found: " + bad_key . get ( ) ) ; LOG . info ( "Invalid Rows Deleted: " + bad_key_fixed . get ( ) ) ; LOG . info ( "Duplicate Datapoints: " + duplicates . get ( ) ) ; LOG . info ( "Duplicate Datapoints Resolved: " + duplicates_fixed . get ( ) ) ; LOG . info ( "Orphaned UID Rows: " + orphans . get ( ) ) ; LOG . info ( "Orphaned UID Rows Deleted: " + orphans_fixed . get ( ) ) ; LOG . info ( "Possible Future Objects: " + future . get ( ) ) ; LOG . info ( "Unknown Objects: " + unknown . get ( ) ) ; LOG . info ( "Unknown Objects Deleted: " + unknown_fixed . get ( ) ) ; LOG . info ( "Unparseable Datapoint Values: " + bad_values . get ( ) ) ; LOG . info ( "Unparseable Datapoint Values Deleted: " + bad_values_deleted . get ( ) ) ; LOG . info ( "Improperly Encoded Floating Point Values: " + value_encoding . get ( ) ) ; LOG . info ( "Improperly Encoded Floating Point Values Fixed: " + value_encoding_fixed . get ( ) ) ; LOG . info ( "Unparseable Compacted Columns: " + bad_compacted_columns . get ( ) ) ; LOG . info ( "Unparseable Compacted Columns Deleted: " + bad_compacted_columns_deleted . get ( ) ) ; LOG . info ( "Datapoints Qualified for VLE : " + vle . get ( ) ) ; LOG . info ( "Datapoints Compressed with VLE: " + vle_fixed . get ( ) ) ; LOG . info ( "Bytes Saved with VLE: " + vle_bytes . get ( ) ) ; LOG . info ( "Total Errors: " + totalErrors ( ) ) ; LOG . info ( "Total Correctable Errors: " + correctable ( ) ) ; LOG . info ( "Total Errors Fixed: " + totalFixed ( ) ) ; }
Helper to dump the atomic counters to the log after a completed FSCK
7,212
public static void main ( String [ ] args ) throws Exception { ArgP argp = new ArgP ( ) ; argp . addOption ( "--help" , "Print help information." ) ; CliOptions . addCommon ( argp ) ; FsckOptions . addDataOptions ( argp ) ; args = CliOptions . parse ( argp , args ) ; if ( argp . has ( "--help" ) ) { usage ( argp , "" , 0 ) ; } Config config = CliOptions . getConfig ( argp ) ; final FsckOptions options = new FsckOptions ( argp , config ) ; final TSDB tsdb = new TSDB ( config ) ; final ArrayList < Query > queries = new ArrayList < Query > ( ) ; if ( args != null && args . length > 0 ) { CliQuery . parseCommandLineQuery ( args , tsdb , queries , null , null ) ; } if ( queries . isEmpty ( ) && ! argp . has ( "--full-scan" ) ) { usage ( argp , "Must supply a query or use the '--full-scan' flag" , 1 ) ; } tsdb . checkNecessaryTablesExist ( ) . joinUninterruptibly ( ) ; argp = null ; final Fsck fsck = new Fsck ( tsdb , options ) ; try { if ( ! queries . isEmpty ( ) ) { fsck . runQueries ( queries ) ; } else { fsck . runFullTable ( ) ; } } finally { tsdb . shutdown ( ) . joinUninterruptibly ( ) ; } System . exit ( fsck . totalErrors ( ) == 0 ? 0 : 1 ) ; }
The main class executed from the tsdb script
7,213
public Deferred < byte [ ] > resolveTagkName ( final TSDB tsdb ) { final Config config = tsdb . getConfig ( ) ; if ( ! case_insensitive && literals . size ( ) <= config . getInt ( "tsd.query.filter.expansion_limit" ) ) { return resolveTags ( tsdb , literals ) ; } else { return super . resolveTagkName ( tsdb ) ; } }
Overridden here so that we can resolve the literal values if we don t have too many of them AND we re not searching with case insensitivity .
7,214
public static void installMyGnuPlot ( ) { if ( ! FOUND_GP ) { LOG . warn ( "Skipping Gnuplot Shell Script Install since Gnuplot executable was not found" ) ; return ; } if ( ! GP_FILE . exists ( ) ) { if ( ! GP_FILE . getParentFile ( ) . exists ( ) ) { GP_FILE . getParentFile ( ) . mkdirs ( ) ; } InputStream is = null ; FileOutputStream fos = null ; try { is = GnuplotInstaller . class . getClassLoader ( ) . getResourceAsStream ( GP_BATCH_FILE_NAME ) ; ChannelBuffer buff = new DynamicChannelBuffer ( is . available ( ) ) ; buff . writeBytes ( is , is . available ( ) ) ; is . close ( ) ; is = null ; fos = new FileOutputStream ( GP_FILE ) ; buff . readBytes ( fos , buff . readableBytes ( ) ) ; fos . close ( ) ; fos = null ; GP_FILE . setExecutable ( true ) ; } catch ( Exception ex ) { throw new IllegalArgumentException ( "Failed to install mygnuplot" , ex ) ; } finally { if ( is != null ) try { is . close ( ) ; } catch ( Exception x ) { } if ( fos != null ) try { fos . close ( ) ; } catch ( Exception x ) { } } } }
Installs the mygnuplot shell file
7,215
public boolean validate ( final List < Map < String , Object > > details ) { if ( this . getMetric ( ) == null || this . getMetric ( ) . isEmpty ( ) ) { if ( details != null ) { details . add ( getHttpDetails ( "Metric name was empty" ) ) ; } LOG . warn ( "Metric name was empty: " + this ) ; return false ; } if ( this . getTimestamp ( ) <= 0 ) { if ( details != null ) { details . add ( getHttpDetails ( "Invalid timestamp" ) ) ; } LOG . warn ( "Invalid timestamp: " + this ) ; return false ; } if ( this . getValue ( ) == null || this . getValue ( ) . isEmpty ( ) ) { if ( details != null ) { details . add ( getHttpDetails ( "Empty value" ) ) ; } LOG . warn ( "Empty value: " + this ) ; return false ; } if ( this . getTags ( ) == null || this . getTags ( ) . size ( ) < 1 ) { if ( details != null ) { details . add ( getHttpDetails ( "Missing tags" ) ) ; } LOG . warn ( "Missing tags: " + this ) ; return false ; } return true ; }
Pre - validation of the various fields to make sure they re valid
7,216
protected final Map < String , Object > getHttpDetails ( final String message ) { final Map < String , Object > map = new HashMap < String , Object > ( ) ; map . put ( "error" , message ) ; map . put ( "datapoint" , this ) ; return map ; }
Creates a map with an error message and this data point to return to the HTTP put data point RPC handler
7,217
final void printAsciiBucket ( final StringBuilder out , final int i ) { out . append ( '[' ) . append ( bucketLowInterval ( i ) ) . append ( '-' ) . append ( i == buckets . length - 1 ? "Inf" : bucketHighInterval ( i ) ) . append ( "): " ) . append ( buckets [ i ] ) . append ( '\n' ) ; }
Prints a bucket of this histogram in a human readable ASCII format .
7,218
private int bucketIndexFor ( final int value ) { if ( value < cutoff ) { return value / interval ; } int bucket = num_linear_buckets + log2rounddown ( ( value - cutoff ) >> exp_bucket_shift ) ; if ( bucket >= buckets . length ) { return buckets . length - 1 ; } return bucket ; }
Finds the index of the bucket in which the given value should be .
7,219
private void setupEmitters ( ) { for ( int i = 0 ; i < dps . length ; i ++ ) { iterators [ i ] = dps [ i ] . iterator ( ) ; if ( ! iterators [ i ] . hasNext ( ) ) { current_values [ i ] = null ; emitter_values [ i ] = null ; } else { current_values [ i ] = iterators [ i ] . next ( ) ; emitter_values [ i ] = new ExpressionDataPoint ( dps [ i ] ) ; emitter_values [ i ] . setIndex ( i ) ; } } }
Iterates over the values and sets up the current and emitter values
7,220
< T extends Validatable > void validateCollection ( final Collection < T > collection , final String name ) { Iterator < T > iterator = collection . iterator ( ) ; int i = 0 ; while ( iterator . hasNext ( ) ) { try { iterator . next ( ) . validate ( ) ; } catch ( final IllegalArgumentException e ) { throw new IllegalArgumentException ( "Invalid " + name + " at index " + i , e ) ; } i ++ ; } }
Iterate through a field that is a collection of POJOs and validate each of them . Inherit member POJO s error message .
7,221
< T extends Validatable > void validatePOJO ( final T pojo , final String name ) { try { pojo . validate ( ) ; } catch ( final IllegalArgumentException e ) { throw new IllegalArgumentException ( "Invalid " + name , e ) ; } }
Validate a single POJO validate
7,222
public Deferred < ArrayList < Object > > shutdown ( ) { INSTANCE . set ( null ) ; final Collection < Deferred < Object > > deferreds = Lists . newArrayList ( ) ; if ( http_plugin_commands != null ) { for ( final Map . Entry < String , HttpRpcPlugin > entry : http_plugin_commands . entrySet ( ) ) { deferreds . add ( entry . getValue ( ) . shutdown ( ) ) ; } } if ( rpc_plugins != null ) { for ( final RpcPlugin rpc : rpc_plugins ) { deferreds . add ( rpc . shutdown ( ) ) ; } } return Deferred . groupInOrder ( deferreds ) ; }
Called to gracefully shutdown the plugin . Implementations should close any IO they have open
7,223
private void handleTelnetRpc ( final Channel chan , final String [ ] command ) { TelnetRpc rpc = rpc_manager . lookupTelnetRpc ( command [ 0 ] ) ; if ( rpc == null ) { rpc = unknown_cmd ; } telnet_rpcs_received . incrementAndGet ( ) ; rpc . execute ( tsdb , chan , command ) ; }
Finds the right handler for a telnet - style RPC and executes it .
7,224
private AbstractHttpQuery createQueryInstance ( final TSDB tsdb , final HttpRequest request , final Channel chan ) throws BadRequestException { final String uri = request . getUri ( ) ; if ( Strings . isNullOrEmpty ( uri ) ) { throw new BadRequestException ( "Request URI is empty" ) ; } else if ( uri . charAt ( 0 ) != '/' ) { throw new BadRequestException ( "Request URI doesn't start with a slash" ) ; } else if ( rpc_manager . isHttpRpcPluginPath ( uri ) ) { http_plugin_rpcs_received . incrementAndGet ( ) ; return new HttpRpcPluginQuery ( tsdb , request , chan ) ; } else { http_rpcs_received . incrementAndGet ( ) ; HttpQuery builtinQuery = new HttpQuery ( tsdb , request , chan ) ; return builtinQuery ; } }
Using the request URI creates a query instance capable of handling the given request .
7,225
private boolean applyCorsConfig ( final HttpRequest req , final AbstractHttpQuery query ) throws BadRequestException { final String domain = req . headers ( ) . get ( "Origin" ) ; if ( query . method ( ) == HttpMethod . OPTIONS || ( cors_domains != null && domain != null && ! domain . isEmpty ( ) ) ) { if ( cors_domains == null || domain == null || domain . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . METHOD_NOT_ALLOWED , "Method not allowed" , "The HTTP method [" + query . method ( ) . getName ( ) + "] is not permitted" ) ; } if ( cors_domains . contains ( "*" ) || cors_domains . contains ( domain . toUpperCase ( ) ) ) { query . response ( ) . headers ( ) . add ( HttpHeaders . ACCESS_CONTROL_ALLOW_ORIGIN , domain ) ; query . response ( ) . headers ( ) . add ( HttpHeaders . ACCESS_CONTROL_ALLOW_METHODS , "GET, POST, PUT, DELETE" ) ; query . response ( ) . headers ( ) . add ( HttpHeaders . ACCESS_CONTROL_ALLOW_HEADERS , cors_headers ) ; if ( query . method ( ) == HttpMethod . OPTIONS ) { query . sendStatusOnly ( HttpResponseStatus . OK ) ; return true ; } } else { throw new BadRequestException ( HttpResponseStatus . OK , "CORS domain not allowed" , "The domain [" + domain + "] is not permitted access" ) ; } } return false ; }
Helper method to apply CORS configuration to a request either a built - in RPC or a user plugin .
7,226
static String getDirectoryFromSystemProp ( final String prop ) { final String dir = System . getProperty ( prop ) ; String err = null ; if ( dir == null ) { err = "' is not set." ; } else if ( dir . isEmpty ( ) ) { err = "' is empty." ; } else if ( dir . charAt ( dir . length ( ) - 1 ) != '/' ) { err = "' is not terminated with `/'." ; } if ( err != null ) { throw new IllegalStateException ( "System property `" + prop + err ) ; } return dir ; }
Returns the directory path stored in the given system property .
7,227
public boolean addChild ( final Branch branch ) { if ( branch == null ) { throw new IllegalArgumentException ( "Null branches are not allowed" ) ; } if ( branches == null ) { branches = new TreeSet < Branch > ( ) ; branches . add ( branch ) ; return true ; } if ( branches . contains ( branch ) ) { return false ; } branches . add ( branch ) ; return true ; }
Adds a child branch to the local branch set if it doesn t exist . Also initializes the set if it hasn t been initialized yet
7,228
public boolean addLeaf ( final Leaf leaf , final Tree tree ) { if ( leaf == null ) { throw new IllegalArgumentException ( "Null leaves are not allowed" ) ; } if ( leaves == null ) { leaves = new HashMap < Integer , Leaf > ( ) ; leaves . put ( leaf . hashCode ( ) , leaf ) ; return true ; } if ( leaves . containsKey ( leaf . hashCode ( ) ) ) { if ( ! leaves . get ( leaf . hashCode ( ) ) . getTsuid ( ) . equals ( leaf . getTsuid ( ) ) ) { final Leaf collision = leaves . get ( leaf . hashCode ( ) ) ; if ( tree != null ) { tree . addCollision ( leaf . getTsuid ( ) , collision . getTsuid ( ) ) ; } LOG . warn ( "Incoming TSUID [" + leaf . getTsuid ( ) + "] collided with existing TSUID [" + collision . getTsuid ( ) + "] on display name [" + collision . getDisplayName ( ) + "]" ) ; } return false ; } else { leaves . put ( leaf . hashCode ( ) , leaf ) ; return true ; } }
Adds a leaf to the local branch looking for collisions
7,229
public Deferred < ArrayList < Boolean > > storeBranch ( final TSDB tsdb , final Tree tree , final boolean store_leaves ) { if ( tree_id < 1 || tree_id > 65535 ) { throw new IllegalArgumentException ( "Missing or invalid tree ID" ) ; } final ArrayList < Deferred < Boolean > > storage_results = new ArrayList < Deferred < Boolean > > ( leaves != null ? leaves . size ( ) + 1 : 1 ) ; final byte [ ] row = this . compileBranchId ( ) ; final byte [ ] storage_data = toStorageJson ( ) ; final PutRequest put = new PutRequest ( tsdb . treeTable ( ) , row , Tree . TREE_FAMILY ( ) , BRANCH_QUALIFIER , storage_data ) ; put . setBufferable ( true ) ; storage_results . add ( tsdb . getClient ( ) . compareAndSet ( put , new byte [ 0 ] ) ) ; if ( store_leaves && leaves != null && ! leaves . isEmpty ( ) ) { for ( final Leaf leaf : leaves . values ( ) ) { storage_results . add ( leaf . storeLeaf ( tsdb , row , tree ) ) ; } } return Deferred . group ( storage_results ) ; }
Attempts to write the branch definition and optionally child leaves to storage via CompareAndSets . Each returned deferred will be a boolean regarding whether the CAS call was successful or not . This will be a mix of the branch call and leaves . Some of these may be false which is OK because if the branch definition already exists we don t need to re - write it . Leaves will return false if there was a collision .
7,230
private static Scanner setupBranchScanner ( final TSDB tsdb , final byte [ ] branch_id ) { final byte [ ] start = branch_id ; final byte [ ] end = Arrays . copyOf ( branch_id , branch_id . length ) ; final Scanner scanner = tsdb . getClient ( ) . newScanner ( tsdb . treeTable ( ) ) ; scanner . setStartKey ( start ) ; byte [ ] tree_id = new byte [ INT_WIDTH ] ; for ( int i = 0 ; i < Tree . TREE_ID_WIDTH ( ) ; i ++ ) { tree_id [ i + ( INT_WIDTH - Tree . TREE_ID_WIDTH ( ) ) ] = end [ i ] ; } int id = Bytes . getInt ( tree_id ) + 1 ; tree_id = Bytes . fromInt ( id ) ; for ( int i = 0 ; i < Tree . TREE_ID_WIDTH ( ) ; i ++ ) { end [ i ] = tree_id [ i + ( INT_WIDTH - Tree . TREE_ID_WIDTH ( ) ) ] ; } scanner . setStopKey ( end ) ; scanner . setFamily ( Tree . TREE_FAMILY ( ) ) ; final StringBuilder buf = new StringBuilder ( ( start . length * 6 ) + 20 ) ; buf . append ( "(?s)" + "^\\Q" ) ; for ( final byte b : start ) { buf . append ( ( char ) ( b & 0xFF ) ) ; } buf . append ( "\\E(?:.{" ) . append ( INT_WIDTH ) . append ( "})?$" ) ; scanner . setKeyRegexp ( buf . toString ( ) , CHARSET ) ; return scanner ; }
Configures an HBase scanner to fetch the requested branch and all child branches . It uses a row key regex filter to match any rows starting with the given branch and another INT_WIDTH bytes deep . Deeper branches are ignored .
7,231
public int purgeTree ( final int tree_id , final boolean delete_definition ) throws Exception { if ( delete_definition ) { LOG . info ( "Deleting tree branches and definition for: " + tree_id ) ; } else { LOG . info ( "Deleting tree branches for: " + tree_id ) ; } Tree . deleteTree ( tsdb , tree_id , delete_definition ) . joinUninterruptibly ( ) ; LOG . info ( "Completed tree deletion for: " + tree_id ) ; return 0 ; }
Attempts to delete all data generated by the given tree and optionally the tree definition itself .
7,232
private Scanner getScanner ( ) throws HBaseException { final short metric_width = TSDB . metrics_width ( ) ; final byte [ ] start_row = Arrays . copyOfRange ( Bytes . fromLong ( start_id ) , 8 - metric_width , 8 ) ; final byte [ ] end_row = Arrays . copyOfRange ( Bytes . fromLong ( end_id ) , 8 - metric_width , 8 ) ; LOG . debug ( "[" + thread_id + "] Start row: " + UniqueId . uidToString ( start_row ) ) ; LOG . debug ( "[" + thread_id + "] End row: " + UniqueId . uidToString ( end_row ) ) ; final Scanner scanner = tsdb . getClient ( ) . newScanner ( tsdb . metaTable ( ) ) ; scanner . setStartKey ( start_row ) ; scanner . setStopKey ( end_row ) ; scanner . setFamily ( "name" . getBytes ( CHARSET ) ) ; scanner . setQualifier ( "ts_meta" . getBytes ( CHARSET ) ) ; return scanner ; }
Returns a scanner set to scan the range configured for this thread
7,233
public RollupInterval getRollupInterval ( final String interval ) { if ( interval == null || interval . isEmpty ( ) ) { throw new IllegalArgumentException ( "Interval cannot be null or empty" ) ; } final RollupInterval rollup = forward_intervals . get ( interval ) ; if ( rollup == null ) { throw new NoSuchRollupForIntervalException ( interval ) ; } return rollup ; }
Fetches the RollupInterval corresponding to the forward interval string map
7,234
public List < RollupInterval > getRollupInterval ( final long interval , final String str_interval ) { if ( interval <= 0 ) { throw new IllegalArgumentException ( "Interval cannot be null or empty" ) ; } final Map < Long , RollupInterval > rollups = new TreeMap < Long , RollupInterval > ( Collections . reverseOrder ( ) ) ; boolean right_match = false ; for ( RollupInterval rollup : forward_intervals . values ( ) ) { if ( rollup . getIntervalSeconds ( ) == interval ) { rollups . put ( ( long ) rollup . getIntervalSeconds ( ) , rollup ) ; right_match = true ; } else if ( interval % rollup . getIntervalSeconds ( ) == 0 ) { rollups . put ( ( long ) rollup . getIntervalSeconds ( ) , rollup ) ; } } if ( rollups . isEmpty ( ) ) { throw new NoSuchRollupForIntervalException ( Long . toString ( interval ) ) ; } List < RollupInterval > best_matches = new ArrayList < RollupInterval > ( rollups . values ( ) ) ; if ( ! right_match ) { LOG . warn ( "No such rollup interval found, " + str_interval + ". So falling " + "back to the next best match " + best_matches . get ( 0 ) . getInterval ( ) ) ; } return best_matches ; }
Fetches the RollupInterval corresponding to the integer interval in seconds . It returns a list of matching RollupInterval and best next matches in the order . It will help to search on the next best rollup tables . It is guaranteed that it return a non - empty list For example if the interval is 1 day then it may return RollupInterval objects in the order 1 day 1 hour 10 minutes 1 minute
7,235
public RollupInterval getRollupIntervalForTable ( final String table ) { if ( table == null || table . isEmpty ( ) ) { throw new IllegalArgumentException ( "The table name cannot be null or empty" ) ; } final RollupInterval rollup = reverse_intervals . get ( table ) ; if ( rollup == null ) { throw new NoSuchRollupForTableException ( table ) ; } return rollup ; }
Fetches the RollupInterval corresponding to the rollup or pre - agg table name .
7,236
public void ensureTablesExist ( final TSDB tsdb ) { final List < Deferred < Object > > deferreds = new ArrayList < Deferred < Object > > ( forward_intervals . size ( ) * 2 ) ; for ( RollupInterval interval : forward_intervals . values ( ) ) { deferreds . add ( tsdb . getClient ( ) . ensureTableExists ( interval . getTemporalTable ( ) ) ) ; deferreds . add ( tsdb . getClient ( ) . ensureTableExists ( interval . getGroupbyTable ( ) ) ) ; } try { Deferred . group ( deferreds ) . joinUninterruptibly ( ) ; } catch ( DeferredGroupException e ) { throw new RuntimeException ( e . getCause ( ) ) ; } catch ( InterruptedException e ) { LOG . warn ( "Interrupted" , e ) ; Thread . currentThread ( ) . interrupt ( ) ; } catch ( Exception e ) { throw new RuntimeException ( "Unexpected exception" , e ) ; } }
Makes sure each of the rollup tables exists
7,237
public static void checkDirectory ( final String dir , final boolean need_write , final boolean create ) { if ( dir . isEmpty ( ) ) throw new IllegalArgumentException ( "Directory path is empty" ) ; final File f = new File ( dir ) ; if ( ! f . exists ( ) && ! ( create && f . mkdirs ( ) ) ) { throw new IllegalArgumentException ( "No such directory [" + dir + "]" ) ; } else if ( ! f . isDirectory ( ) ) { throw new IllegalArgumentException ( "Not a directory [" + dir + "]" ) ; } else if ( need_write && ! f . canWrite ( ) ) { throw new IllegalArgumentException ( "Cannot write to directory [" + dir + "]" ) ; } }
Verifies a directory and checks to see if it s writeable or not if configured
7,238
@ SuppressWarnings ( "unchecked" ) public static final < T > T parseToObject ( final String json , final TypeReference < T > type ) { if ( json == null || json . isEmpty ( ) ) throw new IllegalArgumentException ( "Incoming data was null or empty" ) ; if ( type == null ) throw new IllegalArgumentException ( "Missing type reference" ) ; try { return ( T ) jsonMapper . readValue ( json , type ) ; } catch ( JsonParseException e ) { throw new IllegalArgumentException ( e ) ; } catch ( JsonMappingException e ) { throw new IllegalArgumentException ( e ) ; } catch ( IOException e ) { throw new JSONException ( e ) ; } }
Deserializes a JSON formatted string to a specific class type
7,239
public static final String serializeToString ( final Object object ) { if ( object == null ) throw new IllegalArgumentException ( "Object was null" ) ; try { return jsonMapper . writeValueAsString ( object ) ; } catch ( JsonProcessingException e ) { throw new JSONException ( e ) ; } }
Serializes the given object to a JSON string
7,240
public static final byte [ ] serializeToBytes ( final Object object ) { if ( object == null ) throw new IllegalArgumentException ( "Object was null" ) ; try { return jsonMapper . writeValueAsBytes ( object ) ; } catch ( JsonProcessingException e ) { throw new JSONException ( e ) ; } }
Serializes the given object to a JSON byte array
7,241
public void compile ( ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Compiling " + this ) ; } if ( results . size ( ) < 1 ) { throw new IllegalArgumentException ( "No results for any variables in " + "the expression: " + this ) ; } if ( results . size ( ) < names . size ( ) ) { throw new IllegalArgumentException ( "Not enough query results [" + results . size ( ) + " total results found] for the expression variables [" + names . size ( ) + " expected] " + this ) ; } for ( final String variable : names ) { final ITimeSyncedIterator it = results . get ( variable . toLowerCase ( ) ) ; if ( it == null ) { throw new IllegalArgumentException ( "Missing results for variable " + variable ) ; } if ( it instanceof ExpressionIterator ) { ( ( ExpressionIterator ) it ) . compile ( ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Matched variable " + variable + " to " + it ) ; } } switch ( set_operator ) { case INTERSECTION : iterator = new IntersectionIterator ( id , results , intersect_on_query_tagks , include_agg_tags ) ; break ; case UNION : iterator = new UnionIterator ( id , results , intersect_on_query_tagks , include_agg_tags ) ; } iteration_results = iterator . getResults ( ) ; dps = new ExpressionDataPoint [ iterator . getSeriesSize ( ) ] ; for ( int i = 0 ; i < iterator . getSeriesSize ( ) ; i ++ ) { final Iterator < Entry < String , ExpressionDataPoint [ ] > > it = iteration_results . entrySet ( ) . iterator ( ) ; Entry < String , ExpressionDataPoint [ ] > entry = it . next ( ) ; if ( entry . getValue ( ) == null || entry . getValue ( ) [ i ] == null ) { dps [ i ] = new ExpressionDataPoint ( ) ; } else { dps [ i ] = new ExpressionDataPoint ( entry . getValue ( ) [ i ] ) ; } while ( it . hasNext ( ) ) { entry = it . next ( ) ; if ( entry . getValue ( ) != null && entry . getValue ( ) [ i ] != null ) { dps [ i ] . add ( entry . getValue ( ) [ i ] ) ; } } } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Finished compiling " + this ) ; } }
Builds the iterator by computing the intersection of all series in all sets and sets up the output .
7,242
public static void loadJAR ( String jar ) throws IOException , SecurityException , IllegalArgumentException , NoSuchMethodException , IllegalAccessException , InvocationTargetException { if ( ! jar . toLowerCase ( ) . endsWith ( ".jar" ) ) { throw new IllegalArgumentException ( "File specified did not end with .jar" ) ; } File file = new File ( jar ) ; if ( ! file . exists ( ) ) { throw new FileNotFoundException ( jar ) ; } addFile ( file ) ; }
Attempts to load the given jar into the class path
7,243
private static void searchForJars ( final File file , List < File > jars ) { if ( file . isFile ( ) ) { if ( file . getAbsolutePath ( ) . toLowerCase ( ) . endsWith ( ".jar" ) ) { jars . add ( file ) ; LOG . debug ( "Found a jar: " + file . getAbsolutePath ( ) ) ; } } else if ( file . isDirectory ( ) ) { File [ ] files = file . listFiles ( ) ; if ( files == null ) { LOG . warn ( "Access denied to directory: " + file . getAbsolutePath ( ) ) ; } else { for ( File f : files ) { searchForJars ( f , jars ) ; } } } }
Recursive method to search for JAR files starting at a given level
7,244
private static void addFile ( File f ) throws IOException , SecurityException , IllegalArgumentException , NoSuchMethodException , IllegalAccessException , InvocationTargetException { addURL ( f . toURI ( ) . toURL ( ) ) ; }
Attempts to add the given file object to the class loader
7,245
private DataPoints scale ( final DataPoints points , final double scale_factor ) { final List < DataPoint > dps = new ArrayList < DataPoint > ( ) ; final boolean scale_is_int = ( scale_factor == Math . floor ( scale_factor ) ) && ! Double . isInfinite ( scale_factor ) ; final SeekableView view = points . iterator ( ) ; while ( view . hasNext ( ) ) { DataPoint pt = view . next ( ) ; if ( pt . isInteger ( ) && scale_is_int ) { dps . add ( MutableDataPoint . ofLongValue ( pt . timestamp ( ) , ( long ) scale_factor * pt . longValue ( ) ) ) ; } else { dps . add ( MutableDataPoint . ofDoubleValue ( pt . timestamp ( ) , scale_factor * pt . toDouble ( ) ) ) ; } } final DataPoint [ ] results = new DataPoint [ dps . size ( ) ] ; dps . toArray ( results ) ; return new PostAggregatedDataPoints ( points , results ) ; }
Multiplies each data point in the series by the scale factor maintaining integers if both the data point and scale are integers .
7,246
public HashMap < String , String > parseSuggestV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } try { return JSON . parseToObject ( query . getContent ( ) , new TypeReference < HashMap < String , String > > ( ) { } ) ; } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; } }
Parses a suggestion query
7,247
public HashMap < String , String > parseUidRenameV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } try { return JSON . parseToObject ( json , TR_HASH_MAP ) ; } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; } }
Parses metric tagk or tagv and name to rename UID
7,248
public TSQuery parseQueryV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } try { TSQuery data_query = JSON . parseToObject ( json , TSQuery . class ) ; Set < TSSubQuery > query_set = new LinkedHashSet < TSSubQuery > ( data_query . getQueries ( ) ) ; data_query . getQueries ( ) . clear ( ) ; data_query . getQueries ( ) . addAll ( query_set ) ; return data_query ; } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; } }
Parses a timeseries data query
7,249
public LastPointQuery parseLastPointQueryV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } try { return JSON . parseToObject ( json , LastPointQuery . class ) ; } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; } }
Parses a last data point query
7,250
public UIDMeta parseUidMetaV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } try { return JSON . parseToObject ( json , UIDMeta . class ) ; } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; } }
Parses a single UIDMeta object
7,251
public TSMeta parseTSMetaV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } try { return JSON . parseToObject ( json , TSMeta . class ) ; } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; } }
Parses a single TSMeta object
7,252
public TreeRule parseTreeRuleV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } return JSON . parseToObject ( json , TreeRule . class ) ; }
Parses a single TreeRule object
7,253
public List < TreeRule > parseTreeRulesV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } return JSON . parseToObject ( json , TR_TREE_RULES ) ; }
Parses one or more tree rules
7,254
public Map < String , Object > parseTreeTSUIDsListV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } return JSON . parseToObject ( json , TR_HASH_MAP_OBJ ) ; }
Parses a tree ID and optional list of TSUIDs to search for collisions or not matched TSUIDs .
7,255
public Annotation parseAnnotationV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } return JSON . parseToObject ( json , Annotation . class ) ; }
Parses an annotation object
7,256
public List < Annotation > parseAnnotationsV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } return JSON . parseToObject ( json , TR_ANNOTATIONS ) ; }
Parses a list of annotation objects
7,257
public AnnotationBulkDelete parseAnnotationBulkDeleteV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } return JSON . parseToObject ( json , AnnotationBulkDelete . class ) ; }
Parses a bulk annotation deletion query object
7,258
public SearchQuery parseSearchQueryV1 ( ) { final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } return JSON . parseToObject ( json , SearchQuery . class ) ; }
Parses a SearchQuery request
7,259
public ChannelBuffer formatConfigV1 ( final Config config ) { TreeMap < String , String > map = new TreeMap < String , String > ( config . getMap ( ) ) ; for ( Map . Entry < String , String > entry : map . entrySet ( ) ) { if ( entry . getKey ( ) . toUpperCase ( ) . contains ( "PASS" ) ) { map . put ( entry . getKey ( ) , "********" ) ; } } return serializeJSON ( map ) ; }
Format the running configuration
7,260
private ChannelBuffer serializeJSON ( final Object obj ) { if ( query . hasQueryStringParam ( "jsonp" ) ) { return ChannelBuffers . wrappedBuffer ( JSON . serializeToJSONPBytes ( query . getQueryStringParam ( "jsonp" ) , obj ) ) ; } return ChannelBuffers . wrappedBuffer ( JSON . serializeToBytes ( obj ) ) ; }
Helper object for the format calls to wrap the JSON response in a JSONP function if requested . Used for code dedupe .
7,261
public Deferred < TreeMap < byte [ ] , Span > > scan ( ) { start_time = System . currentTimeMillis ( ) ; int i = 0 ; for ( final Scanner scanner : scanners ) { new ScannerCB ( scanner , i ++ ) . scan ( ) ; } return results ; }
Starts all of the scanners asynchronously and returns the data fetched once all of the scanners have completed . Note that the result may be an exception if one or more of the scanners encountered an exception . The first error will be returned others will be logged .
7,262
private void validateAndTriggerCallback ( final List < KeyValue > kvs , final Map < byte [ ] , List < Annotation > > annotations , final List < SimpleEntry < byte [ ] , List < HistogramDataPoint > > > histograms ) { countdown . countDown ( ) ; final long count = countdown . getCount ( ) ; if ( kvs . size ( ) > 0 ) { kv_map . put ( ( int ) count , kvs ) ; } for ( final byte [ ] key : annotations . keySet ( ) ) { final List < Annotation > notes = annotations . get ( key ) ; if ( notes . size ( ) > 0 ) { annotation_map . put ( key , notes ) ; } } if ( histograms . size ( ) > 0 ) { histMap . put ( ( int ) count , histograms ) ; } if ( countdown . getCount ( ) <= 0 ) { try { mergeAndReturnResults ( ) ; } catch ( final Exception ex ) { results . callback ( ex ) ; } } }
Called each time a scanner completes with valid or empty data .
7,263
private String getGnuplotBasePath ( final TSDB tsdb , final HttpQuery query ) { final Map < String , List < String > > q = query . getQueryString ( ) ; q . remove ( "ignore" ) ; final HashMap < String , List < String > > qs = new HashMap < String , List < String > > ( q ) ; qs . remove ( "png" ) ; qs . remove ( "json" ) ; qs . remove ( "ascii" ) ; return tsdb . getConfig ( ) . getDirectoryName ( "tsd.http.cachedir" ) + Integer . toHexString ( qs . hashCode ( ) ) ; }
Returns the base path to use for the Gnuplot files .
7,264
private boolean isDiskCacheHit ( final HttpQuery query , final long end_time , final int max_age , final String basepath ) throws IOException { final String cachepath = basepath + ( query . hasQueryStringParam ( "ascii" ) ? ".txt" : ".png" ) ; final File cachedfile = new File ( cachepath ) ; if ( cachedfile . exists ( ) ) { final long bytes = cachedfile . length ( ) ; if ( bytes < 21 ) { logWarn ( query , "Cached " + cachepath + " is too small (" + bytes + " bytes) to be valid. Ignoring it." ) ; return false ; } if ( staleCacheFile ( query , end_time , max_age , cachedfile ) ) { return false ; } if ( query . hasQueryStringParam ( "json" ) ) { HashMap < String , Object > map = loadCachedJson ( query , end_time , max_age , basepath ) ; if ( map == null ) { map = new HashMap < String , Object > ( ) ; } map . put ( "timing" , query . processingTimeMillis ( ) ) ; map . put ( "cachehit" , "disk" ) ; query . sendReply ( JSON . serializeToBytes ( map ) ) ; } else if ( query . hasQueryStringParam ( "png" ) || query . hasQueryStringParam ( "ascii" ) ) { query . sendFile ( cachepath , max_age ) ; } else { query . sendReply ( HttpQuery . makePage ( "TSDB Query" , "Your graph is ready" , "<img src=\"" + query . request ( ) . getUri ( ) + "&amp;png\"/><br/>" + "<small>(served from disk cache)</small>" ) ) ; } graphs_diskcache_hit . incrementAndGet ( ) ; return true ; } final HashMap < String , Object > map = loadCachedJson ( query , end_time , max_age , basepath ) ; if ( map == null || ! map . containsKey ( "plotted" ) || ( ( Integer ) map . get ( "plotted" ) ) == 0 ) { return false ; } if ( query . hasQueryStringParam ( "json" ) ) { map . put ( "timing" , query . processingTimeMillis ( ) ) ; map . put ( "cachehit" , "disk" ) ; query . sendReply ( JSON . serializeToBytes ( map ) ) ; } else if ( query . hasQueryStringParam ( "png" ) ) { query . sendReply ( " " ) ; } else { query . sendReply ( HttpQuery . makePage ( "TSDB Query" , "No results" , "Sorry, your query didn't return anything.<br/>" + "<small>(served from disk cache)</small>" ) ) ; } graphs_diskcache_hit . incrementAndGet ( ) ; return true ; }
Checks whether or not it s possible to re - serve this query from disk .
7,265
private static boolean staleCacheFile ( final HttpQuery query , final long end_time , final long max_age , final File cachedfile ) { final long mtime = cachedfile . lastModified ( ) / 1000 ; if ( mtime <= 0 ) { return true ; } final long now = System . currentTimeMillis ( ) / 1000 ; final long staleness = now - mtime ; if ( staleness < 0 ) { logWarn ( query , "Not using file @ " + cachedfile + " with weird" + " mtime in the future: " + mtime ) ; return true ; } if ( 0 < end_time && end_time < now ) { return mtime < end_time ; } if ( staleness > max_age ) { logInfo ( query , "Cached file @ " + cachedfile . getPath ( ) + " is " + staleness + "s stale, which is more than its limit of " + max_age + "s, and needs to be regenerated." ) ; return true ; } return false ; }
Returns whether or not the given cache file can be used or is stale .
7,266
private static void writeFile ( final HttpQuery query , final String path , final byte [ ] contents ) { try { final FileOutputStream out = new FileOutputStream ( path ) ; try { out . write ( contents ) ; } finally { out . close ( ) ; } } catch ( FileNotFoundException e ) { logError ( query , "Failed to create file " + path , e ) ; } catch ( IOException e ) { logError ( query , "Failed to write file " + path , e ) ; } }
Writes the given byte array into a file . This function logs an error but doesn t throw if it fails .
7,267
private static byte [ ] readFile ( final HttpQuery query , final File file , final int max_length ) { final int length = ( int ) file . length ( ) ; if ( length <= 0 ) { return null ; } FileInputStream in ; try { in = new FileInputStream ( file . getPath ( ) ) ; } catch ( FileNotFoundException e ) { return null ; } try { final byte [ ] buf = new byte [ Math . min ( length , max_length ) ] ; final int read = in . read ( buf ) ; if ( read != buf . length ) { logError ( query , "When reading " + file + ": read only " + read + " bytes instead of " + buf . length ) ; return null ; } return buf ; } catch ( IOException e ) { logError ( query , "Error while reading " + file , e ) ; return null ; } finally { try { in . close ( ) ; } catch ( IOException e ) { logError ( query , "Error while closing " + file , e ) ; } } }
Reads a file into a byte array .
7,268
private static String stringify ( final String s ) { final StringBuilder buf = new StringBuilder ( 1 + s . length ( ) + 1 ) ; buf . append ( '"' ) ; HttpQuery . escapeJson ( s , buf ) ; buf . append ( '"' ) ; return buf . toString ( ) ; }
Formats and quotes the given string so it s a suitable Gnuplot string .
7,269
private static String popParam ( final Map < String , List < String > > querystring , final String param ) { final List < String > params = querystring . remove ( param ) ; if ( params == null ) { return null ; } final String given = params . get ( params . size ( ) - 1 ) ; if ( given . contains ( "`" ) || given . contains ( "%60" ) || given . contains ( "&#96;" ) ) { throw new BadRequestException ( "Parameter " + param + " contained a " + "back-tick. That's a no-no." ) ; } return given ; }
Pops out of the query string the given parameter .
7,270
static void setPlotParams ( final HttpQuery query , final Plot plot ) { final HashMap < String , String > params = new HashMap < String , String > ( ) ; final Map < String , List < String > > querystring = query . getQueryString ( ) ; String value ; if ( ( value = popParam ( querystring , "yrange" ) ) != null ) { params . put ( "yrange" , value ) ; } if ( ( value = popParam ( querystring , "y2range" ) ) != null ) { params . put ( "y2range" , value ) ; } if ( ( value = popParam ( querystring , "ylabel" ) ) != null ) { params . put ( "ylabel" , stringify ( value ) ) ; } if ( ( value = popParam ( querystring , "y2label" ) ) != null ) { params . put ( "y2label" , stringify ( value ) ) ; } if ( ( value = popParam ( querystring , "yformat" ) ) != null ) { params . put ( "format y" , stringify ( value ) ) ; } if ( ( value = popParam ( querystring , "y2format" ) ) != null ) { params . put ( "format y2" , stringify ( value ) ) ; } if ( ( value = popParam ( querystring , "xformat" ) ) != null ) { params . put ( "format x" , stringify ( value ) ) ; } if ( ( value = popParam ( querystring , "ylog" ) ) != null ) { params . put ( "logscale y" , "" ) ; } if ( ( value = popParam ( querystring , "y2log" ) ) != null ) { params . put ( "logscale y2" , "" ) ; } if ( ( value = popParam ( querystring , "key" ) ) != null ) { params . put ( "key" , value ) ; } if ( ( value = popParam ( querystring , "title" ) ) != null ) { params . put ( "title" , stringify ( value ) ) ; } if ( ( value = popParam ( querystring , "bgcolor" ) ) != null ) { params . put ( "bgcolor" , value ) ; } if ( ( value = popParam ( querystring , "fgcolor" ) ) != null ) { params . put ( "fgcolor" , value ) ; } if ( ( value = popParam ( querystring , "smooth" ) ) != null ) { params . put ( "smooth" , value ) ; } if ( ( value = popParam ( querystring , "style" ) ) != null ) { params . put ( "style" , value ) ; } if ( ( value = popParam ( querystring , "nokey" ) ) != null ) { params . put ( "key" , null ) ; } plot . setParams ( params ) ; }
Applies the plot parameters from the query to the given plot .
7,271
private static void printMetricHeader ( final PrintWriter writer , final String metric , final long timestamp ) { writer . print ( metric ) ; writer . print ( ' ' ) ; writer . print ( timestamp / 1000L ) ; writer . print ( ' ' ) ; }
Helper method to write metric name and timestamp .
7,272
private static String findGnuplotHelperScript ( ) { if ( ! GnuplotInstaller . FOUND_GP ) { LOG . warn ( "Skipping Gnuplot Shell Script Install since Gnuplot executable was not found" ) ; return null ; } if ( ! GnuplotInstaller . GP_FILE . exists ( ) ) { GnuplotInstaller . installMyGnuPlot ( ) ; } if ( GnuplotInstaller . GP_FILE . exists ( ) && GnuplotInstaller . GP_FILE . canExecute ( ) ) { LOG . info ( "Auto Installed Gnuplot Invoker at [{}]" , GnuplotInstaller . GP_FILE . getAbsolutePath ( ) ) ; return GnuplotInstaller . GP_FILE . getAbsolutePath ( ) ; } final URL url = GraphHandler . class . getClassLoader ( ) . getResource ( WRAPPER ) ; if ( url == null ) { throw new RuntimeException ( "Couldn't find " + WRAPPER + " on the" + " CLASSPATH: " + System . getProperty ( "java.class.path" ) ) ; } final String path = url . getFile ( ) ; LOG . debug ( "Using Gnuplot wrapper at {}" , path ) ; final File file = new File ( path ) ; final String error ; if ( ! file . exists ( ) ) { error = "non-existent" ; } else if ( ! file . canExecute ( ) ) { error = "non-executable" ; } else if ( ! file . canRead ( ) ) { error = "unreadable" ; } else { return path ; } throw new RuntimeException ( "The " + WRAPPER + " found on the" + " CLASSPATH (" + path + ") is a " + error + " file... WTF?" + " CLASSPATH=" + System . getProperty ( "java.class.path" ) ) ; }
Iterate through the class path and look for the Gnuplot helper script .
7,273
public static Deferred < TSMeta > parseFromColumn ( final TSDB tsdb , final KeyValue column , final boolean load_uidmetas ) { if ( column . value ( ) == null || column . value ( ) . length < 1 ) { throw new IllegalArgumentException ( "Empty column value" ) ; } final TSMeta parsed_meta = JSON . parseToObject ( column . value ( ) , TSMeta . class ) ; if ( parsed_meta . tsuid == null || parsed_meta . tsuid . isEmpty ( ) ) { parsed_meta . tsuid = UniqueId . uidToString ( column . key ( ) ) ; } Deferred < TSMeta > meta = getFromStorage ( tsdb , UniqueId . stringToUid ( parsed_meta . tsuid ) ) ; if ( ! load_uidmetas ) { return meta ; } return meta . addCallbackDeferring ( new LoadUIDs ( tsdb , parsed_meta . tsuid ) ) ; }
Parses a TSMeta object from the given column optionally loading the UIDMeta objects
7,274
public static Deferred < Boolean > metaExistsInStorage ( final TSDB tsdb , final String tsuid ) { final GetRequest get = new GetRequest ( tsdb . metaTable ( ) , UniqueId . stringToUid ( tsuid ) ) ; get . family ( FAMILY ) ; get . qualifier ( META_QUALIFIER ) ; final class ExistsCB implements Callback < Boolean , ArrayList < KeyValue > > { public Boolean call ( ArrayList < KeyValue > row ) throws Exception { if ( row == null || row . isEmpty ( ) || row . get ( 0 ) . value ( ) == null ) { return false ; } return true ; } } return tsdb . getClient ( ) . get ( get ) . addCallback ( new ExistsCB ( ) ) ; }
Determines if an entry exists in storage or not . This is used by the UID Manager tool to determine if we need to write a new TSUID entry or not . It will not attempt to verify if the stored data is valid just checks to see if something is stored in the proper column .
7,275
public static Deferred < Boolean > counterExistsInStorage ( final TSDB tsdb , final byte [ ] tsuid ) { final GetRequest get = new GetRequest ( tsdb . metaTable ( ) , tsuid ) ; get . family ( FAMILY ) ; get . qualifier ( COUNTER_QUALIFIER ) ; final class ExistsCB implements Callback < Boolean , ArrayList < KeyValue > > { public Boolean call ( ArrayList < KeyValue > row ) throws Exception { if ( row == null || row . isEmpty ( ) || row . get ( 0 ) . value ( ) == null ) { return false ; } return true ; } } return tsdb . getClient ( ) . get ( get ) . addCallback ( new ExistsCB ( ) ) ; }
Determines if the counter column exists for the TSUID . This is used by the UID Manager tool to determine if we need to write a new TSUID entry or not . It will not attempt to verify if the stored data is valid just checks to see if something is stored in the proper column .
7,276
public void validate ( ) { if ( time == null ) { throw new IllegalArgumentException ( "missing time" ) ; } validatePOJO ( time , "time" ) ; if ( metrics == null || metrics . isEmpty ( ) ) { throw new IllegalArgumentException ( "missing or empty metrics" ) ; } final Set < String > variable_ids = new HashSet < String > ( ) ; for ( Metric metric : metrics ) { if ( variable_ids . contains ( metric . getId ( ) ) ) { throw new IllegalArgumentException ( "duplicated metric id: " + metric . getId ( ) ) ; } variable_ids . add ( metric . getId ( ) ) ; } final Set < String > filter_ids = new HashSet < String > ( ) ; for ( Filter filter : filters ) { if ( filter_ids . contains ( filter . getId ( ) ) ) { throw new IllegalArgumentException ( "duplicated filter id: " + filter . getId ( ) ) ; } filter_ids . add ( filter . getId ( ) ) ; } for ( Expression expression : expressions ) { if ( variable_ids . contains ( expression . getId ( ) ) ) { throw new IllegalArgumentException ( "Duplicated variable or expression id: " + expression . getId ( ) ) ; } variable_ids . add ( expression . getId ( ) ) ; } validateCollection ( metrics , "metric" ) ; if ( filters != null ) { validateCollection ( filters , "filter" ) ; } if ( expressions != null ) { validateCollection ( expressions , "expression" ) ; } validateFilters ( ) ; if ( expressions != null ) { validateCollection ( expressions , "expression" ) ; for ( final Expression exp : expressions ) { if ( exp . getVariables ( ) == null ) { throw new IllegalArgumentException ( "No variables found for an " + "expression?! " + JSON . serializeToString ( exp ) ) ; } for ( final String var : exp . getVariables ( ) ) { if ( ! variable_ids . contains ( var ) ) { throw new IllegalArgumentException ( "Expression [" + exp . getExpr ( ) + "] was missing input " + var ) ; } } } } }
Validates the query
7,277
private void validateFilters ( ) { Set < String > ids = new HashSet < String > ( ) ; for ( Filter filter : filters ) { ids . add ( filter . getId ( ) ) ; } for ( Metric metric : metrics ) { if ( metric . getFilter ( ) != null && ! metric . getFilter ( ) . isEmpty ( ) && ! ids . contains ( metric . getFilter ( ) ) ) { throw new IllegalArgumentException ( String . format ( "unrecognized filter id %s in metric %s" , metric . getFilter ( ) , metric . getId ( ) ) ) ; } } }
Validates the filters making sure each metric has a filter
7,278
public static void validateId ( final String id ) { if ( id == null || id . isEmpty ( ) ) { throw new IllegalArgumentException ( "The ID cannot be null or empty" ) ; } for ( int i = 0 ; i < id . length ( ) ; i ++ ) { final char c = id . charAt ( i ) ; if ( ! ( Character . isLetterOrDigit ( c ) ) ) { throw new IllegalArgumentException ( "Invalid id (\"" + id + "\"): illegal character: " + c ) ; } } if ( id . length ( ) == 1 ) { if ( Character . isDigit ( id . charAt ( 0 ) ) ) { throw new IllegalArgumentException ( "The ID cannot be an integer" ) ; } } }
Makes sure the ID has only letters and characters
7,279
public static void loadPluginPath ( final String plugin_path ) { if ( plugin_path != null && ! plugin_path . isEmpty ( ) ) { try { PluginLoader . loadJARs ( plugin_path ) ; } catch ( Exception e ) { LOG . error ( "Error loading plugins from plugin path: " + plugin_path , e ) ; throw new RuntimeException ( "Error loading plugins from plugin path: " + plugin_path , e ) ; } } }
Called by initializePlugins also used to load startup plugins .
7,280
public Deferred < String > getUidName ( final UniqueIdType type , final byte [ ] uid ) { if ( uid == null ) { throw new IllegalArgumentException ( "Missing UID" ) ; } switch ( type ) { case METRIC : return this . metrics . getNameAsync ( uid ) ; case TAGK : return this . tag_names . getNameAsync ( uid ) ; case TAGV : return this . tag_values . getNameAsync ( uid ) ; default : throw new IllegalArgumentException ( "Unrecognized UID type" ) ; } }
Attempts to find the name for a unique identifier given a type
7,281
public Deferred < ArrayList < Object > > checkNecessaryTablesExist ( ) { final ArrayList < Deferred < Object > > checks = new ArrayList < Deferred < Object > > ( 2 ) ; checks . add ( client . ensureTableExists ( config . getString ( "tsd.storage.hbase.data_table" ) ) ) ; checks . add ( client . ensureTableExists ( config . getString ( "tsd.storage.hbase.uid_table" ) ) ) ; if ( config . enable_tree_processing ( ) ) { checks . add ( client . ensureTableExists ( config . getString ( "tsd.storage.hbase.tree_table" ) ) ) ; } if ( config . enable_realtime_ts ( ) || config . enable_realtime_uid ( ) || config . enable_tsuid_incrementing ( ) ) { checks . add ( client . ensureTableExists ( config . getString ( "tsd.storage.hbase.meta_table" ) ) ) ; } return Deferred . group ( checks ) ; }
Verifies that the data and UID tables exist in HBase and optionally the tree and meta data tables if the user has enabled meta tracking or tree building
7,282
public Deferred < Object > addHistogramPoint ( final String metric , final long timestamp , final byte [ ] raw_data , final Map < String , String > tags ) { if ( raw_data == null || raw_data . length < MIN_HISTOGRAM_BYTES ) { return Deferred . fromError ( new IllegalArgumentException ( "The histogram raw data is invalid: " + Bytes . pretty ( raw_data ) ) ) ; } checkTimestampAndTags ( metric , timestamp , raw_data , tags , ( short ) 0 ) ; final byte [ ] row = IncomingDataPoints . rowKeyTemplate ( this , metric , tags ) ; final byte [ ] qualifier = Internal . getQualifier ( timestamp , HistogramDataPoint . PREFIX ) ; return storeIntoDB ( metric , timestamp , raw_data , tags , ( short ) 0 , row , qualifier ) ; }
Adds an encoded Histogram data point in the TSDB .
7,283
public List < String > suggestMetrics ( final String search , final int max_results ) { return metrics . suggest ( search , max_results ) ; }
Given a prefix search returns matching metric names .
7,284
public List < String > suggestTagNames ( final String search , final int max_results ) { return tag_names . suggest ( search , max_results ) ; }
Given a prefix search returns matching tagk names .
7,285
public List < String > suggestTagValues ( final String search , final int max_results ) { return tag_values . suggest ( search , max_results ) ; }
Given a prefix search returns matching tag values .
7,286
public byte [ ] assignUid ( final String type , final String name ) { Tags . validateString ( type , name ) ; if ( type . toLowerCase ( ) . equals ( "metric" ) ) { try { final byte [ ] uid = this . metrics . getId ( name ) ; throw new IllegalArgumentException ( "Name already exists with UID: " + UniqueId . uidToString ( uid ) ) ; } catch ( NoSuchUniqueName nsue ) { return this . metrics . getOrCreateId ( name ) ; } } else if ( type . toLowerCase ( ) . equals ( "tagk" ) ) { try { final byte [ ] uid = this . tag_names . getId ( name ) ; throw new IllegalArgumentException ( "Name already exists with UID: " + UniqueId . uidToString ( uid ) ) ; } catch ( NoSuchUniqueName nsue ) { return this . tag_names . getOrCreateId ( name ) ; } } else if ( type . toLowerCase ( ) . equals ( "tagv" ) ) { try { final byte [ ] uid = this . tag_values . getId ( name ) ; throw new IllegalArgumentException ( "Name already exists with UID: " + UniqueId . uidToString ( uid ) ) ; } catch ( NoSuchUniqueName nsue ) { return this . tag_values . getOrCreateId ( name ) ; } } else { LOG . warn ( "Unknown type name: " + type ) ; throw new IllegalArgumentException ( "Unknown type name" ) ; } }
Attempts to assign a UID to a name for the given type Used by the UniqueIdRpc call to generate IDs for new metrics tagks or tagvs . The name must pass validation and if it s already assigned a UID this method will throw an error with the proper UID . Otherwise if it can create the UID it will be returned
7,287
public Deferred < Object > deleteUidAsync ( final String type , final String name ) { final UniqueIdType uid_type = UniqueId . stringToUniqueIdType ( type ) ; switch ( uid_type ) { case METRIC : return metrics . deleteAsync ( name ) ; case TAGK : return tag_names . deleteAsync ( name ) ; case TAGV : return tag_values . deleteAsync ( name ) ; default : throw new IllegalArgumentException ( "Unrecognized UID type: " + uid_type ) ; } }
Attempts to delete the given UID name mapping from the storage table as well as the local cache .
7,288
public void renameUid ( final String type , final String oldname , final String newname ) { Tags . validateString ( type , oldname ) ; Tags . validateString ( type , newname ) ; if ( type . toLowerCase ( ) . equals ( "metric" ) ) { try { this . metrics . getId ( oldname ) ; this . metrics . rename ( oldname , newname ) ; } catch ( NoSuchUniqueName nsue ) { throw new IllegalArgumentException ( "Name(\"" + oldname + "\") does not exist" ) ; } } else if ( type . toLowerCase ( ) . equals ( "tagk" ) ) { try { this . tag_names . getId ( oldname ) ; this . tag_names . rename ( oldname , newname ) ; } catch ( NoSuchUniqueName nsue ) { throw new IllegalArgumentException ( "Name(\"" + oldname + "\") does not exist" ) ; } } else if ( type . toLowerCase ( ) . equals ( "tagv" ) ) { try { this . tag_values . getId ( oldname ) ; this . tag_values . rename ( oldname , newname ) ; } catch ( NoSuchUniqueName nsue ) { throw new IllegalArgumentException ( "Name(\"" + oldname + "\") does not exist" ) ; } } else { LOG . warn ( "Unknown type name: " + type ) ; throw new IllegalArgumentException ( "Unknown type name" ) ; } }
Attempts to rename a UID from existing name to the given name Used by the UniqueIdRpc call to rename name of existing metrics tagks or tagvs . The name must pass validation . If the UID doesn t exist the method will throw an error . Chained IllegalArgumentException is directly exposed to caller . If the rename was successful this method returns .
7,289
public void indexTSMeta ( final TSMeta meta ) { if ( search != null ) { search . indexTSMeta ( meta ) . addErrback ( new PluginError ( ) ) ; } }
Index the given timeseries meta object via the configured search plugin
7,290
public void deleteTSMeta ( final String tsuid ) { if ( search != null ) { search . deleteTSMeta ( tsuid ) . addErrback ( new PluginError ( ) ) ; } }
Delete the timeseries meta object from the search index
7,291
public void indexUIDMeta ( final UIDMeta meta ) { if ( search != null ) { search . indexUIDMeta ( meta ) . addErrback ( new PluginError ( ) ) ; } }
Index the given UID meta object via the configured search plugin
7,292
public void deleteUIDMeta ( final UIDMeta meta ) { if ( search != null ) { search . deleteUIDMeta ( meta ) . addErrback ( new PluginError ( ) ) ; } }
Delete the UID meta object from the search index
7,293
public void indexAnnotation ( final Annotation note ) { if ( search != null ) { search . indexAnnotation ( note ) . addErrback ( new PluginError ( ) ) ; } if ( rt_publisher != null ) { rt_publisher . publishAnnotation ( note ) ; } }
Index the given Annotation object via the configured search plugin
7,294
public void deleteAnnotation ( final Annotation note ) { if ( search != null ) { search . deleteAnnotation ( note ) . addErrback ( new PluginError ( ) ) ; } }
Delete the annotation object from the search index
7,295
public Deferred < Boolean > processTSMetaThroughTrees ( final TSMeta meta ) { if ( config . enable_tree_processing ( ) ) { return TreeBuilder . processAllTrees ( this , meta ) ; } return Deferred . fromResult ( false ) ; }
Processes the TSMeta through all of the trees if configured to do so
7,296
public Deferred < SearchQuery > executeSearch ( final SearchQuery query ) { if ( search == null ) { throw new IllegalStateException ( "Searching has not been enabled on this TSD" ) ; } return search . executeQuery ( query ) ; }
Executes a search query using the search plugin
7,297
public void preFetchHBaseMeta ( ) { LOG . info ( "Pre-fetching meta data for all tables" ) ; final long start = System . currentTimeMillis ( ) ; final ArrayList < Deferred < Object > > deferreds = new ArrayList < Deferred < Object > > ( ) ; deferreds . add ( client . prefetchMeta ( table ) ) ; deferreds . add ( client . prefetchMeta ( uidtable ) ) ; try { Deferred . group ( deferreds ) . join ( ) ; LOG . info ( "Fetched meta data for tables in " + ( System . currentTimeMillis ( ) - start ) + "ms" ) ; } catch ( InterruptedException e ) { LOG . error ( "Interrupted" , e ) ; Thread . currentThread ( ) . interrupt ( ) ; return ; } catch ( Exception e ) { LOG . error ( "Failed to prefetch meta for our tables" , e ) ; } }
Blocks while pre - fetching meta data from the data and uid tables so that performance improves particularly with a large number of regions and region servers .
7,298
final Deferred < ArrayList < KeyValue > > get ( final byte [ ] key ) { return client . get ( new GetRequest ( table , key , FAMILY ) ) ; }
Gets the entire given row from the data table .
7,299
final Deferred < Object > put ( final byte [ ] key , final byte [ ] qualifier , final byte [ ] value , long timestamp ) { return client . put ( RequestBuilder . buildPutRequest ( config , table , key , FAMILY , qualifier , value , timestamp ) ) ; }
Puts the given value into the data table .