idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
29,500
public static FilterSupportStatus newCompositeNotSupported ( List < FilterSupportStatus > unsupportedSubfilters ) { StringBuilder builder = new StringBuilder ( ) ; for ( FilterSupportStatus subStatus : unsupportedSubfilters ) { builder . append ( subStatus . getReason ( ) ) ; builder . append ( "\n" ) ; } return new FilterSupportStatus ( false , builder . toString ( ) ) ; }
Static constructor for a not supported status caused by sub - filters not being supported .
29,501
private static String getVersion ( ) { final String defaultVersion = "dev-" + System . currentTimeMillis ( ) ; final String fileName = "bigtable-version.properties" ; final String versionProperty = "bigtable.version" ; try ( InputStream stream = BigtableVersionInfo . class . getResourceAsStream ( fileName ) ) { if ( stream == null ) { LOG . error ( "Could not load properties file bigtable-version.properties" ) ; return defaultVersion ; } Properties properties = new Properties ( ) ; properties . load ( stream ) ; String value = properties . getProperty ( versionProperty ) ; if ( value == null ) { LOG . error ( "%s not found in %s." , versionProperty , fileName ) ; } else if ( value . startsWith ( "$" ) ) { LOG . info ( "%s property is not replaced." , versionProperty ) ; } else { return value ; } } catch ( IOException e ) { LOG . error ( "Error while trying to get user agent name from %s" , e , fileName ) ; } return defaultVersion ; }
Gets user agent from bigtable - version . properties . Returns a default dev user agent with current timestamp if not found .
29,502
public Table getTable ( String tableName ) throws IOException { return getTable ( TableName . valueOf ( tableName ) ) ; }
This should not be used . The hbase shell needs this in hbase 0 . 99 . 2 . Remove this once 1 . 0 . 0 comes out .
29,503
public static List < String > getUnknownFeatures ( HColumnDescriptor columnDescriptor ) { List < String > unknownFeatures = new ArrayList < String > ( ) ; for ( Map . Entry < String , String > entry : columnDescriptor . getConfiguration ( ) . entrySet ( ) ) { String key = entry . getKey ( ) ; if ( ! SUPPORTED_OPTION_KEYS . contains ( key ) && ! IGNORED_OPTION_KEYS . contains ( key ) && ! SUPPORTED_OPTION_VALUES . containsKey ( key ) ) { unknownFeatures . add ( key ) ; } } return unknownFeatures ; }
Build a list of configuration keys that we don t know how to handle
29,504
public static Map < String , String > getUnsupportedFeatures ( HColumnDescriptor columnDescriptor ) { Map < String , String > unsupportedConfiguration = new HashMap < String , String > ( ) ; Map < String , String > configuration = columnDescriptor . getConfiguration ( ) ; for ( Map . Entry < String , String > entry : SUPPORTED_OPTION_VALUES . entrySet ( ) ) { if ( configuration . containsKey ( entry . getKey ( ) ) && configuration . get ( entry . getKey ( ) ) != null && ! entry . getValue ( ) . equals ( configuration . get ( entry . getKey ( ) ) ) ) { unsupportedConfiguration . put ( entry . getKey ( ) , configuration . get ( entry . getKey ( ) ) ) ; } } return unsupportedConfiguration ; }
Build a Map of configuration keys and values describing configuration values we don t support .
29,505
private HeaderCacheElement syncRefresh ( long timeout , TimeUnit timeUnit ) { Span span = TRACER . spanBuilder ( "Bigtable.CredentialsRefresh" ) . startSpan ( ) ; try ( Scope scope = TRACER . withSpan ( span ) ) { return asyncRefresh ( ) . get ( timeout , timeUnit ) ; } catch ( InterruptedException e ) { LOG . warn ( "Interrupted while trying to refresh google credentials." , e ) ; Status status = Status . UNAUTHENTICATED . withDescription ( "Authentication was interrupted." ) . withCause ( e ) ; span . setStatus ( StatusConverter . fromGrpcStatus ( status ) ) ; Thread . currentThread ( ) . interrupt ( ) ; return new HeaderCacheElement ( status ) ; } catch ( ExecutionException e ) { LOG . warn ( "ExecutionException while trying to refresh google credentials." , e ) ; Status status = Status . UNAUTHENTICATED . withDescription ( "ExecutionException during Authentication." ) . withCause ( e ) ; span . setStatus ( StatusConverter . fromGrpcStatus ( status ) ) ; return new HeaderCacheElement ( status ) ; } catch ( TimeoutException e ) { LOG . warn ( "TimeoutException while trying to refresh google credentials." , e ) ; Status status = Status . UNAUTHENTICATED . withDescription ( "TimeoutException during Authentication." ) . withCause ( e ) ; span . setStatus ( StatusConverter . fromGrpcStatus ( status ) ) ; return new HeaderCacheElement ( status ) ; } catch ( Exception e ) { LOG . warn ( "Unexpected execption while trying to refresh google credentials." , e ) ; Status status = Status . UNAUTHENTICATED . withDescription ( "Unexpected execption during Authentication." ) . withCause ( e ) ; span . setStatus ( StatusConverter . fromGrpcStatus ( status ) ) ; return new HeaderCacheElement ( status ) ; } finally { span . end ( ) ; } }
Refresh the credentials and block . Will return an error if the credentials haven t been refreshed . This method should not be called while holding the refresh lock
29,506
Future < HeaderCacheElement > asyncRefresh ( ) { LOG . trace ( "asyncRefresh" ) ; synchronized ( lock ) { try { if ( futureToken != null ) { return futureToken ; } if ( headerCache . getCacheState ( ) == CacheState . Good ) { return Futures . immediateFuture ( headerCache ) ; } Future < HeaderCacheElement > future = executor . submit ( new Callable < HeaderCacheElement > ( ) { public HeaderCacheElement call ( ) { return updateToken ( ) ; } } ) ; if ( ! future . isDone ( ) ) { this . futureToken = future ; } return future ; } catch ( RuntimeException e ) { futureToken = null ; LOG . warn ( "Got an unexpected exception while trying to refresh google credentials." , e ) ; return Futures . immediateFuture ( new HeaderCacheElement ( Status . UNAUTHENTICATED . withDescription ( "Unexpected error trying to authenticate" ) . withCause ( e ) ) ) ; } } }
Refreshes the OAuth2 token asynchronously . This method will only start an async refresh if there isn t a currently running asynchronous refresh and the current token is not Good .
29,507
void revokeUnauthToken ( HeaderToken oldToken ) { synchronized ( lock ) { if ( headerCache . getToken ( ) == oldToken ) { LOG . warn ( "Got unauthenticated response from server, revoking the current token" ) ; headerCache = EMPTY_HEADER ; } else { LOG . info ( "Skipping revoke, since the revoked token has already changed" ) ; } } }
Clear the cache .
29,508
@ SuppressWarnings ( "unchecked" ) protected ResultQueueEntry < T > toResultQueueEntryForEquals ( Object obj ) { if ( ! ( obj instanceof ResultQueueEntry ) || obj == null ) { return null ; } ResultQueueEntry < T > other = ( ResultQueueEntry < T > ) obj ; return type == other . type && getClass ( ) == other . getClass ( ) ? other : null ; }
This is a utility function for checking equality between this and another ResultQueueEntry .
29,509
public static String getSingleFamilyName ( FilterAdapterContext context ) { Preconditions . checkState ( context . getScan ( ) . numFamilies ( ) == 1 , "Cannot getSingleFamilyName() unless there is exactly 1 family." ) ; return Bytes . toString ( context . getScan ( ) . getFamilies ( ) [ 0 ] ) ; }
Extract a single family name from a FilterAdapterContext . Throws if there is not exactly 1 family present in the scan .
29,510
private List < Cell > checkEmptyRow ( KV < ImmutableBytesWritable , Result > kv ) { List < Cell > cells = kv . getValue ( ) . listCells ( ) ; if ( cells == null ) { cells = Collections . emptyList ( ) ; } if ( ! isEmptyRowWarned && cells . isEmpty ( ) ) { logger . warn ( "Encountered empty row. Was input file serialized by HBase 0.94?" ) ; isEmptyRowWarned = true ; } return cells ; }
Warns about empty row on first occurrence only and replaces a null array with 0 - length one .
29,511
public static CloudBigtableTableConfiguration BuildImportConfig ( ImportOptions opts ) { CloudBigtableTableConfiguration . Builder builder = new CloudBigtableTableConfiguration . Builder ( ) . withProjectId ( opts . getBigtableProject ( ) ) . withInstanceId ( opts . getBigtableInstanceId ( ) ) . withTableId ( opts . getBigtableTableId ( ) ) ; if ( opts . getBigtableAppProfileId ( ) != null ) { builder . withAppProfileId ( opts . getBigtableAppProfileId ( ) ) ; } ValueProvider enableThrottling = ValueProvider . NestedValueProvider . of ( opts . getMutationThrottleLatencyMs ( ) , ( Integer throttleMs ) -> String . valueOf ( throttleMs > 0 ) ) ; builder . withConfiguration ( BigtableOptionsFactory . BIGTABLE_BUFFERED_MUTATOR_ENABLE_THROTTLING , enableThrottling ) ; builder . withConfiguration ( BigtableOptionsFactory . BIGTABLE_BUFFERED_MUTATOR_THROTTLING_THRESHOLD_MILLIS , ValueProvider . NestedValueProvider . of ( opts . getMutationThrottleLatencyMs ( ) , String :: valueOf ) ) ; return builder . build ( ) ; }
Builds CloudBigtableTableConfiguration from input runtime parameters for import job .
29,512
public static CloudBigtableScanConfiguration BuildExportConfig ( ExportOptions options ) { ValueProvider < ReadRowsRequest > request = new RequestValueProvider ( options ) ; CloudBigtableScanConfiguration . Builder configBuilder = new CloudBigtableScanConfiguration . Builder ( ) . withProjectId ( options . getBigtableProject ( ) ) . withInstanceId ( options . getBigtableInstanceId ( ) ) . withTableId ( options . getBigtableTableId ( ) ) . withAppProfileId ( options . getBigtableAppProfileId ( ) ) . withRequest ( request ) ; return configBuilder . build ( ) ; }
Builds CloudBigtableScanConfiguration from input runtime parameters for export job .
29,513
public static void setupTracingConfig ( ) { TracingUtilities . setupTracingConfig ( ) ; List < String > descriptors = Arrays . asList ( "BigtableTable.getTableDescriptor" , "BigtableTable.exists" , "BigtableTable.existsAll" , "BigtableTable.batch" , "BigtableTable.batchCallback" , "BigtableTable.get" , "BigtableTable.put" , "BigtableTable.checkAndPut" , "BigtableTable.delete" , "BigtableTable.checkAndDelete" , "BigtableTable.checkAndMutate" , "BigtableTable.mutateRow" , "BigtableTable.append" , "BigtableTable.increment" , "BigtableTable.incrementColumnValue" ) ; Tracing . getExportComponent ( ) . getSampledSpanStore ( ) . registerSpanNamesForCollection ( descriptors ) ; }
This is a one time setup for grpcz pages . This adds all of the methods to the Tracing environment required to show a consistent set of methods relating to Cloud Bigtable and HBase methods on the grpcz page .
29,514
private void waitForCompletions ( long timeoutMs ) throws InterruptedException { Long completedOperation = this . completedOperationIds . pollFirst ( timeoutMs , TimeUnit . MILLISECONDS ) ; if ( completedOperation != null ) { markOperationComplete ( completedOperation ) ; } }
Waits for a completion and then marks it as complete .
29,515
public CompletableFuture < Void > restoreSnapshot ( String snapshotName , boolean takeFailSafeSnapshot ) { CompletableFuture < Void > future = new CompletableFuture < > ( ) ; listSnapshots ( Pattern . compile ( snapshotName ) ) . whenComplete ( ( snapshotDescriptions , err ) -> { if ( err != null ) { future . completeExceptionally ( err ) ; return ; } final TableName tableName = snapshotExists ( snapshotName , snapshotDescriptions ) ; if ( tableName == null ) { future . completeExceptionally ( new RestoreSnapshotException ( "Unable to find the table name for snapshot=" + snapshotName ) ) ; return ; } tableExists ( tableName ) . whenComplete ( ( exists , err2 ) -> { if ( err2 != null ) { future . completeExceptionally ( err2 ) ; } else if ( ! exists ) { completeConditionalOnFuture ( future , cloneSnapshot ( snapshotName , tableName ) ) ; } else { isTableDisabled ( tableName ) . whenComplete ( ( disabled , err4 ) -> { if ( err4 != null ) { future . completeExceptionally ( err4 ) ; } else if ( ! disabled ) { future . completeExceptionally ( new TableNotDisabledException ( tableName ) ) ; } else { completeConditionalOnFuture ( future , restoreSnapshot ( snapshotName , takeFailSafeSnapshot ) ) ; } } ) ; } } ) ; } ) ; return future ; }
Restore the specified snapshot on the original table .
29,516
private TableName snapshotExists ( String snapshotName , List < SnapshotDescription > snapshotDescriptions ) { if ( snapshotDescriptions != null ) { Optional < SnapshotDescription > descriptor = snapshotDescriptions . stream ( ) . filter ( desc -> desc . getName ( ) . equals ( snapshotName ) ) . findFirst ( ) ; return descriptor . isPresent ( ) ? descriptor . get ( ) . getTableName ( ) : null ; } return null ; }
To check Snapshot exists or not .
29,517
public List < ? extends BoundedSource < T > > split ( long desiredBundleSizeBytes , PipelineOptions options ) throws Exception { List < ? extends BoundedSource < T > > splits = delegate . split ( desiredBundleSizeBytes , options ) ; Collections . shuffle ( splits ) ; return splits ; }
Shuffles the delegate source s splits .
29,518
public Filters . Filter adapt ( FilterAdapterContext context , Filter hbaseFilter ) throws IOException { T typedFilter = getTypedFilter ( hbaseFilter ) ; return adapter . adapt ( context , typedFilter ) ; }
Adapt the untyped hbaseFilter instance into a RowFilter .
29,519
public FilterSupportStatus isSupported ( FilterAdapterContext context , Filter hbaseFilter ) { Preconditions . checkArgument ( isFilterAProperSublcass ( hbaseFilter ) ) ; return adapter . isFilterSupported ( context , getTypedFilter ( hbaseFilter ) ) ; }
Determine if the untyped filter is supported .
29,520
public void collectUnsupportedStatuses ( FilterAdapterContext context , Filter filter , List < FilterSupportStatus > statuses ) { Preconditions . checkArgument ( isFilterAProperSublcass ( filter ) ) ; unsupportedStatusCollector . collectUnsupportedStatuses ( context , unchecked ( filter ) , statuses ) ; }
Collect unsupported status objects into the given list .
29,521
public static void setupTracingConfig ( ) { List < String > descriptors = new ArrayList < > ( ) ; addDescriptor ( descriptors , BigtableTableAdminGrpc . getServiceDescriptor ( ) ) ; addDescriptor ( descriptors , BigtableGrpc . getServiceDescriptor ( ) ) ; Tracing . getExportComponent ( ) . getSampledSpanStore ( ) . registerSpanNamesForCollection ( descriptors ) ; }
This is a one time setup for grpcz pages . This adds all of the methods to the Tracing environment required to show a consistent set of methods relating to Cloud Bigtable on the grpcz page . If HBase artifacts are present this will add tracing metadata for HBase methods .
29,522
public synchronized ApiFuture < FlatRow > add ( Query query ) { Preconditions . checkNotNull ( query ) ; ReadRowsRequest request = query . toProto ( requestContext ) ; Preconditions . checkArgument ( request . getRows ( ) . getRowKeysCount ( ) == 1 ) ; ByteString rowKey = request . getRows ( ) . getRowKeysList ( ) . get ( 0 ) ; Preconditions . checkArgument ( ! rowKey . equals ( ByteString . EMPTY ) ) ; final RowFilter filter = request . getFilter ( ) ; Batch batch = batches . get ( filter ) ; if ( batch == null ) { batch = new Batch ( filter ) ; batches . put ( filter , batch ) ; } return batch . addKey ( rowKey ) ; }
Adds the key in the request to a batch read . The future will be resolved when the batch response is received .
29,523
public void flush ( ) { for ( Batch batch : batches . values ( ) ) { Collection < Batch > subbatches = batch . split ( ) ; for ( Batch miniBatch : subbatches ) { threadPool . submit ( miniBatch ) ; } } batches . clear ( ) ; }
Sends all remaining requests to the server . This method does not wait for the method to complete .
29,524
private void readObject ( java . io . ObjectInputStream in ) throws IOException , ClassNotFoundException { in . defaultReadObject ( ) ; try { serialization = serializationClass . newInstance ( ) ; } catch ( IllegalAccessException | InstantiationException e ) { throw new RuntimeException ( "Failed to deserialize " + HadoopSerializationCoder . class . getSimpleName ( ) ) ; } }
Populate a possibly unserializable serialization instance .
29,525
public int getClusterSize ( String clusterId , String zoneId ) { Cluster cluster = getCluster ( clusterId , zoneId ) ; String message = String . format ( "Cluster %s/%s was not found." , clusterId , zoneId ) ; Preconditions . checkNotNull ( cluster , message ) ; return cluster . getServeNodes ( ) ; }
Gets the serve node count of the cluster .
29,526
public void setClusterSize ( String clusterId , String zoneId , int newSize ) throws InterruptedException { setClusterSize ( instanceName . toClusterName ( clusterId ) . getClusterName ( ) , newSize ) ; }
Sets a cluster size to a specific size .
29,527
private void setClusterSize ( String clusterName , int newSize ) throws InterruptedException { Preconditions . checkArgument ( newSize > 0 , "Cluster size must be > 0" ) ; logger . info ( "Updating cluster %s to size %d" , clusterName , newSize ) ; Operation operation = client . updateCluster ( Cluster . newBuilder ( ) . setName ( clusterName ) . setServeNodes ( newSize ) . build ( ) ) ; waitForOperation ( operation . getName ( ) , 60 ) ; logger . info ( "Done updating cluster %s." , clusterName ) ; }
Update a specific cluster s server node count to the number specified
29,528
public void waitForOperation ( String operationName , int maxSeconds ) throws InterruptedException { long endTimeMillis = TimeUnit . SECONDS . toMillis ( maxSeconds ) + System . currentTimeMillis ( ) ; GetOperationRequest request = GetOperationRequest . newBuilder ( ) . setName ( operationName ) . build ( ) ; do { Thread . sleep ( 500 ) ; Operation response = client . getOperation ( request ) ; if ( response . getDone ( ) ) { switch ( response . getResultCase ( ) ) { case RESPONSE : return ; case ERROR : throw new RuntimeException ( "Cluster could not be resized: " + response . getError ( ) ) ; case RESULT_NOT_SET : throw new IllegalStateException ( "System returned invalid response for Operation check: " + response ) ; } } } while ( System . currentTimeMillis ( ) < endTimeMillis ) ; throw new IllegalStateException ( String . format ( "Waited %d seconds and cluster was not resized yet." , maxSeconds ) ) ; }
Waits for an operation like cluster resizing to complete .
29,529
public static Cell filterKv ( Filter filter , Cell kv ) throws IOException { if ( filter != null ) { Filter . ReturnCode code = filter . filterKeyValue ( kv ) ; if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "Filter returned:" + code + " for the key value:" + kv ) ; } if ( ! ( code . equals ( Filter . ReturnCode . INCLUDE ) || code . equals ( Filter . ReturnCode . INCLUDE_AND_NEXT_COL ) ) ) { return null ; } } return kv ; }
Attempt to filter out the keyvalue
29,530
public static void addFilterAndArguments ( Configuration conf , Class < ? extends Filter > clazz , List < String > filterArgs ) { conf . set ( Import . FILTER_CLASS_CONF_KEY , clazz . getName ( ) ) ; conf . setStrings ( Import . FILTER_ARGS_CONF_KEY , filterArgs . toArray ( new String [ filterArgs . size ( ) ] ) ) ; }
Add a Filter to be instantiated on import
29,531
public static Credentials getInputStreamCredential ( InputStream inputStream ) throws IOException { GoogleCredentials credentials = GoogleCredentials . fromStream ( inputStream , getHttpTransportFactory ( ) ) ; if ( credentials instanceof ServiceAccountCredentials ) { return getJwtToken ( ( ServiceAccountCredentials ) credentials ) ; } return credentials . createScoped ( CLOUD_BIGTABLE_ALL_SCOPES ) ; }
Initializes OAuth2 application default credentials based on an inputStream .
29,532
protected void createTable ( TableName tableName , CreateTableRequest request ) throws IOException { try { tableAdminClientWrapper . createTable ( request ) ; } catch ( Throwable throwable ) { throw convertToTableExistsException ( tableName , throwable ) ; } }
Creates a Table .
29,533
public void addColumn ( String tableName , HColumnDescriptor column ) throws IOException { addColumn ( TableName . valueOf ( tableName ) , column ) ; }
shell is switch to use the methods defined in the interface .
29,534
public void snapshot ( byte [ ] snapshotName , byte [ ] tableName ) throws IOException , IllegalArgumentException { snapshot ( snapshotName , TableName . valueOf ( tableName ) ) ; }
This is needed for the hbase shell .
29,535
public ListenableFuture < ResultT > getAsyncResult ( ) { Preconditions . checkState ( operationTimerContext == null ) ; operationTimerContext = rpc . getRpcMetrics ( ) . timeOperation ( ) ; run ( ) ; return completionFuture ; }
Initial execution of the RPC .
29,536
public void awaitCompletion ( ) throws InterruptedException { boolean performedWarning = false ; while ( hasInflightOperations ( ) ) { synchronized ( signal ) { if ( hasInflightOperations ( ) ) { signal . wait ( finishWaitMillis ) ; } } long now = clock . nanoTime ( ) ; if ( now >= noSuccessCheckDeadlineNanos ) { logNoSuccessWarning ( now ) ; resetNoSuccessWarningDeadline ( ) ; performedWarning = true ; } } if ( performedWarning ) { LOG . info ( "awaitCompletion() completed" ) ; } }
Blocks until all outstanding RPCs and retries have completed
29,537
private static void buildReadRowSettings ( Builder builder , BigtableOptions options ) { RetrySettings retrySettings = buildIdempotentRetrySettings ( builder . readRowSettings ( ) . getRetrySettings ( ) , options ) ; builder . readRowSettings ( ) . setRetrySettings ( retrySettings ) . setRetryableCodes ( buildRetryCodes ( options . getRetryOptions ( ) ) ) ; }
To build default Retry settings for Point Read .
29,538
private Filter createValueMatchFilter ( FilterAdapterContext context , SingleColumnValueFilter filter ) throws IOException { ValueFilter valueFilter = new ValueFilter ( filter . getOperator ( ) , filter . getComparator ( ) ) ; return delegateAdapter . toFilter ( context , valueFilter ) ; }
Emit a filter that will match against a single value .
29,539
private void handleTimeoutError ( Status status ) { Preconditions . checkArgument ( status . getCause ( ) instanceof StreamWaitTimeoutException , "status is not caused by a StreamWaitTimeoutException" ) ; StreamWaitTimeoutException e = ( ( StreamWaitTimeoutException ) status . getCause ( ) ) ; rpcTimerContext . close ( ) ; failedCount ++ ; int maxRetries = retryOptions . getMaxScanTimeoutRetries ( ) ; if ( retryOptions . enableRetries ( ) && ++ timeoutRetryCount <= maxRetries ) { LOG . warn ( "The client could not get a response in %d ms. Retrying the scan." , e . getWaitTimeMs ( ) ) ; resetStatusBasedBackoff ( ) ; performRetry ( 0 ) ; } else { LOG . warn ( "The client could not get a response after %d tries, giving up." , timeoutRetryCount ) ; rpc . getRpcMetrics ( ) . markFailure ( ) ; finalizeStats ( status ) ; setException ( getExhaustedRetriesException ( status ) ) ; } }
Special retry handling for watchdog timeouts which uses its own fail counter .
29,540
void markThrottling ( long throttlingDurationInNanos ) { throttlingTimer . update ( throttlingDurationInNanos , TimeUnit . NANOSECONDS ) ; cumulativeThrottlingTimeNanos . addAndGet ( throttlingDurationInNanos ) ; }
This method updates throttling statistics .
29,541
static List < String > parsePattern ( String pattern ) { List < String > parameters = new ArrayList < > ( 4 ) ; Matcher matcher = PARAM_REGEX . matcher ( pattern ) ; while ( matcher . find ( ) ) { parameters . add ( matcher . group ( 1 ) ) ; } return parameters ; }
Get the list of parameters from the given pattern .
29,542
private static List < ParameterFiller > parseParameters ( List < String > parameters ) { List < ParameterFiller > parameterFillers = new ArrayList < > ( parameters . size ( ) ) ; for ( String parameter : parameters ) { ParameterFiller parameterFiller = parseParameter ( parameter ) ; if ( parameterFiller != null ) { parameterFillers . add ( parameterFiller ) ; } } return parameterFillers ; }
Get the list of parameter fillers from the given parameters .
29,543
private static ParameterFiller parseParameter ( String parameter ) { String wrappedParameter = "{" + parameter + "}" ; String trimmedParameter = parameter . trim ( ) ; ParameterFiller parameterFiller = parseDateParameter ( wrappedParameter , trimmedParameter ) ; if ( parameterFiller != null ) { return parameterFiller ; } parameterFiller = parseLevelParameter ( wrappedParameter , trimmedParameter ) ; if ( parameterFiller != null ) { return parameterFiller ; } parameterFiller = parseTagParameter ( wrappedParameter , trimmedParameter ) ; if ( parameterFiller != null ) { return parameterFiller ; } parameterFiller = parseMessageParameter ( wrappedParameter , trimmedParameter ) ; if ( parameterFiller != null ) { return parameterFiller ; } return null ; }
Create a parameter filler if the given parameter is recognizable .
29,544
static DateFiller parseDateParameter ( String wrappedParameter , String trimmedParameter ) { if ( trimmedParameter . startsWith ( PARAMETER_DATE + " " ) && trimmedParameter . length ( ) > PARAMETER_DATE . length ( ) + 1 ) { String dateFormat = trimmedParameter . substring ( PARAMETER_DATE . length ( ) + 1 ) ; return new DateFiller ( wrappedParameter , trimmedParameter , dateFormat ) ; } else if ( trimmedParameter . equals ( PARAMETER_DATE ) ) { return new DateFiller ( wrappedParameter , trimmedParameter , DEFAULT_DATE_FORMAT ) ; } return null ; }
Try to create a date filler if the given parameter is a date parameter .
29,545
static LevelFiller parseLevelParameter ( String wrappedParameter , String trimmedParameter ) { if ( trimmedParameter . equals ( PARAMETER_LEVEL_SHORT ) ) { return new LevelFiller ( wrappedParameter , trimmedParameter , false ) ; } else if ( trimmedParameter . equals ( PARAMETER_LEVEL_LONG ) ) { return new LevelFiller ( wrappedParameter , trimmedParameter , true ) ; } return null ; }
Try to create a level filler if the given parameter is a level parameter .
29,546
static TagFiller parseTagParameter ( String wrappedParameter , String trimmedParameter ) { if ( trimmedParameter . equals ( PARAMETER_TAG ) ) { return new TagFiller ( wrappedParameter , trimmedParameter ) ; } return null ; }
Try to create a tag filler if the given parameter is a tag parameter .
29,547
static MessageFiller parseMessageParameter ( String wrappedParameter , String trimmedParameter ) { if ( trimmedParameter . equals ( PARAMETER_MESSAGE ) ) { return new MessageFiller ( wrappedParameter , trimmedParameter ) ; } return null ; }
Try to create a message filler if the given parameter is a message parameter .
29,548
private void initXlog ( ) { LogConfiguration config = new LogConfiguration . Builder ( ) . logLevel ( BuildConfig . DEBUG ? LogLevel . ALL : LogLevel . NONE ) . tag ( getString ( R . string . global_tag ) ) . addInterceptor ( new BlacklistTagsFilterInterceptor ( "blacklist1" , "blacklist2" , "blacklist3" ) ) . build ( ) ; Printer androidPrinter = new AndroidPrinter ( ) ; Printer filePrinter = new FilePrinter . Builder ( new File ( Environment . getExternalStorageDirectory ( ) , "xlogsample" ) . getPath ( ) ) . fileNameGenerator ( new DateFileNameGenerator ( ) ) . flattener ( new ClassicFlattener ( ) ) . build ( ) ; XLog . init ( config , androidPrinter , filePrinter ) ; globalFilePrinter = filePrinter ; }
Initialize XLog .
29,549
public static String formatJson ( String json ) { assertInitialization ( ) ; return XLog . sLogConfiguration . jsonFormatter . format ( json ) ; }
Format a JSON string using default JSON formatter .
29,550
public static String formatXml ( String xml ) { assertInitialization ( ) ; return XLog . sLogConfiguration . xmlFormatter . format ( xml ) ; }
Format an XML string using default XML formatter .
29,551
public static String formatThrowable ( Throwable throwable ) { assertInitialization ( ) ; return XLog . sLogConfiguration . throwableFormatter . format ( throwable ) ; }
Format a throwable using default throwable formatter .
29,552
public static String formatThread ( Thread thread ) { assertInitialization ( ) ; return XLog . sLogConfiguration . threadFormatter . format ( thread ) ; }
Format a thread using default thread formatter .
29,553
public static String formatStackTrace ( StackTraceElement [ ] stackTrace ) { assertInitialization ( ) ; return XLog . sLogConfiguration . stackTraceFormatter . format ( stackTrace ) ; }
Format a stack trace using default stack trace formatter .
29,554
public static String addBorder ( String [ ] segments ) { assertInitialization ( ) ; return XLog . sLogConfiguration . borderFormatter . format ( segments ) ; }
Add border to string segments using default border formatter .
29,555
void printChunk ( int logLevel , String tag , String msg ) { android . util . Log . println ( logLevel , tag , msg ) ; }
Print single chunk of log in new line .
29,556
public String generateFileName ( int logLevel , long timestamp ) { SimpleDateFormat sdf = mLocalDateFormat . get ( ) ; sdf . setTimeZone ( TimeZone . getDefault ( ) ) ; return sdf . format ( new Date ( timestamp ) ) ; }
Generate a file name which represent a specific date .
29,557
private < T > void println ( int logLevel , T object ) { if ( logLevel < logConfiguration . logLevel ) { return ; } String objectString ; if ( object != null ) { ObjectFormatter < ? super T > objectFormatter = logConfiguration . getObjectFormatter ( object ) ; if ( objectFormatter != null ) { objectString = objectFormatter . format ( object ) ; } else { objectString = object . toString ( ) ; } } else { objectString = "null" ; } printlnInternal ( logLevel , objectString ) ; }
Print an object in a new line .
29,558
private void println ( int logLevel , Object [ ] array ) { if ( logLevel < logConfiguration . logLevel ) { return ; } printlnInternal ( logLevel , Arrays . deepToString ( array ) ) ; }
Print an array in a new line .
29,559
private void printlnInternal ( int logLevel , String msg ) { String tag = logConfiguration . tag ; String thread = logConfiguration . withThread ? logConfiguration . threadFormatter . format ( Thread . currentThread ( ) ) : null ; String stackTrace = logConfiguration . withStackTrace ? logConfiguration . stackTraceFormatter . format ( StackTraceUtil . getCroppedRealStackTrack ( new Throwable ( ) . getStackTrace ( ) , logConfiguration . stackTraceOrigin , logConfiguration . stackTraceDepth ) ) : null ; if ( logConfiguration . interceptors != null ) { LogItem log = new LogItem ( logLevel , tag , thread , stackTrace , msg ) ; for ( Interceptor interceptor : logConfiguration . interceptors ) { log = interceptor . intercept ( log ) ; if ( log == null ) { return ; } if ( log . tag == null || log . msg == null ) { throw new IllegalStateException ( "Interceptor " + interceptor + " should not remove the tag or message of a log," + " if you don't want to print this log," + " just return a null when intercept." ) ; } } logLevel = log . level ; tag = log . tag ; thread = log . threadInfo ; stackTrace = log . stackTraceInfo ; msg = log . msg ; } printer . println ( logLevel , tag , logConfiguration . withBorder ? logConfiguration . borderFormatter . format ( new String [ ] { thread , stackTrace , msg } ) : ( ( thread != null ? ( thread + SystemCompat . lineSeparator ) : "" ) + ( stackTrace != null ? ( stackTrace + SystemCompat . lineSeparator ) : "" ) + msg ) ) ; }
Print a log in a new line internally .
29,560
private String formatArgs ( String format , Object ... args ) { if ( format != null ) { return String . format ( format , args ) ; } else { StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 , N = args . length ; i < N ; i ++ ) { if ( i != 0 ) { sb . append ( ", " ) ; } sb . append ( args [ i ] ) ; } return sb . toString ( ) ; } }
Format a string with arguments .
29,561
public static StackTraceElement [ ] getCroppedRealStackTrack ( StackTraceElement [ ] stackTrace , String stackTraceOrigin , int maxDepth ) { return cropStackTrace ( getRealStackTrack ( stackTrace , stackTraceOrigin ) , maxDepth ) ; }
Get the real stack trace and then crop it with a max depth .
29,562
private static StackTraceElement [ ] getRealStackTrack ( StackTraceElement [ ] stackTrace , String stackTraceOrigin ) { int ignoreDepth = 0 ; int allDepth = stackTrace . length ; String className ; for ( int i = allDepth - 1 ; i >= 0 ; i -- ) { className = stackTrace [ i ] . getClassName ( ) ; if ( className . startsWith ( XLOG_STACK_TRACE_ORIGIN ) || ( stackTraceOrigin != null && className . startsWith ( stackTraceOrigin ) ) ) { ignoreDepth = i + 1 ; break ; } } int realDepth = allDepth - ignoreDepth ; StackTraceElement [ ] realStack = new StackTraceElement [ realDepth ] ; System . arraycopy ( stackTrace , ignoreDepth , realStack , 0 , realDepth ) ; return realStack ; }
Get the real stack trace all elements that come from XLog library would be dropped .
29,563
private static StackTraceElement [ ] cropStackTrace ( StackTraceElement [ ] callStack , int maxDepth ) { int realDepth = callStack . length ; if ( maxDepth > 0 ) { realDepth = Math . min ( maxDepth , realDepth ) ; } StackTraceElement [ ] realStack = new StackTraceElement [ realDepth ] ; System . arraycopy ( callStack , 0 , realStack , 0 , realDepth ) ; return realStack ; }
Crop the stack trace with a max depth .
29,564
private void showPermissionRequestDialog ( final boolean gotoSettings ) { new AlertDialog . Builder ( this ) . setTitle ( R . string . permission_request ) . setMessage ( R . string . permission_explanation ) . setNegativeButton ( android . R . string . cancel , null ) . setPositiveButton ( gotoSettings ? R . string . go_to_settings : R . string . allow , new DialogInterface . OnClickListener ( ) { public void onClick ( DialogInterface dialog , int which ) { if ( gotoSettings ) { startAppSettings ( ) ; } else { requestPermission ( ) ; } } } ) . show ( ) ; }
Show a dialog for user to explain about the permission .
29,565
private void showChangeTagDialog ( ) { View view = getLayoutInflater ( ) . inflate ( R . layout . dialog_change_tag , null , false ) ; final EditText tagEditText = ( EditText ) view . findViewById ( R . id . tag ) ; tagEditText . setText ( tagView . getText ( ) ) ; new AlertDialog . Builder ( this ) . setTitle ( R . string . change_tag ) . setView ( view ) . setNegativeButton ( android . R . string . cancel , null ) . setPositiveButton ( android . R . string . ok , new DialogInterface . OnClickListener ( ) { public void onClick ( DialogInterface dialog , int which ) { String tag = tagEditText . getText ( ) . toString ( ) . trim ( ) ; if ( ! tag . isEmpty ( ) ) { tagView . setText ( tag ) ; } } } ) . show ( ) ; }
Show a dialog for user to change the tag empty text is not allowed .
29,566
private void printLog ( ) { Logger . Builder builder = new Logger . Builder ( ) ; String tag = tagView . getText ( ) . toString ( ) ; if ( ! TextUtils . isEmpty ( tag ) ) { builder . tag ( tag ) ; } if ( threadInfo . isChecked ( ) ) { builder . t ( ) ; } else { builder . nt ( ) ; } if ( stackTraceInfo . isChecked ( ) ) { builder . st ( STACK_TRACE_DEPTHS [ stackTraceDepth . getSelectedItemPosition ( ) ] ) ; } else { builder . nst ( ) ; } if ( border . isChecked ( ) ) { builder . b ( ) ; } else { builder . nb ( ) ; } if ( hasPermission ) { builder . printers ( viewPrinter , new AndroidPrinter ( ) , XLogSampleApplication . globalFilePrinter ) ; } else { builder . printers ( viewPrinter , new AndroidPrinter ( ) ) ; } Logger logger = builder . build ( ) ; int levelPosition = levelView . getSelectedItemPosition ( ) ; int level = LEVELS [ levelPosition ] ; switch ( level ) { case LogLevel . VERBOSE : logger . v ( MESSAGE ) ; break ; case LogLevel . DEBUG : logger . d ( MESSAGE ) ; break ; case LogLevel . INFO : logger . i ( MESSAGE ) ; break ; case LogLevel . WARN : logger . w ( MESSAGE ) ; break ; case LogLevel . ERROR : logger . e ( MESSAGE ) ; break ; } }
Print the configured log .
29,567
private void doPrintln ( long timeMillis , int logLevel , String tag , String msg ) { String lastFileName = writer . getLastFileName ( ) ; if ( lastFileName == null || fileNameGenerator . isFileNameChangeable ( ) ) { String newFileName = fileNameGenerator . generateFileName ( logLevel , System . currentTimeMillis ( ) ) ; if ( newFileName == null || newFileName . trim ( ) . length ( ) == 0 ) { throw new IllegalArgumentException ( "File name should not be empty." ) ; } if ( ! newFileName . equals ( lastFileName ) ) { if ( writer . isOpened ( ) ) { writer . close ( ) ; } cleanLogFilesIfNecessary ( ) ; if ( ! writer . open ( newFileName ) ) { return ; } lastFileName = newFileName ; } } File lastFile = writer . getFile ( ) ; if ( backupStrategy . shouldBackup ( lastFile ) ) { writer . close ( ) ; File backupFile = new File ( folderPath , lastFileName + ".bak" ) ; if ( backupFile . exists ( ) ) { backupFile . delete ( ) ; } lastFile . renameTo ( backupFile ) ; if ( ! writer . open ( lastFileName ) ) { return ; } } String flattenedLog = flattener . flatten ( timeMillis , logLevel , tag , msg ) . toString ( ) ; writer . appendLog ( flattenedLog ) ; }
Do the real job of writing log to file .
29,568
private void cleanLogFilesIfNecessary ( ) { File logDir = new File ( folderPath ) ; File [ ] files = logDir . listFiles ( ) ; if ( files == null ) { return ; } for ( File file : files ) { if ( cleanStrategy . shouldClean ( file ) ) { file . delete ( ) ; } } }
Clean log files if should clean follow strategy
29,569
public static AttributesBuilder attributes ( String [ ] arguments ) { AttributesBuilder attributesBuilder = new AttributesBuilder ( ) ; attributesBuilder . arguments ( arguments ) ; return attributesBuilder ; }
Creates attributes builder .
29,570
public AttributesBuilder attribute ( String attributeName , Object attributeValue ) { this . attributes . setAttribute ( attributeName , attributeValue ) ; return this ; }
Sets custom or unlisted attribute
29,571
public OptionsBuilder templateDirs ( File ... templateDirs ) { for ( File templateDir : templateDirs ) { this . options . setTemplateDirs ( templateDir . getAbsolutePath ( ) ) ; } return this ; }
Sets template directories .
29,572
public OptionsBuilder option ( String option , Object value ) { this . options . setOption ( option , value ) ; return this ; }
Sets a custom or unlisted option .
29,573
public void setTableOfContents2 ( Placement placement ) { this . attributes . put ( TOC_2 , toAsciidoctorFlag ( true ) ) ; this . attributes . put ( TOC_POSITION , placement . getPosition ( ) ) ; }
Sets table of contents 2 attribute .
29,574
public void setTableOfContents ( Placement placement ) { this . attributes . put ( TOC , toAsciidoctorFlag ( true ) ) ; this . attributes . put ( TOC_POSITION , placement . getPosition ( ) ) ; }
Sets if a table of contents should be rendered or not .
29,575
public void setShowTitle ( boolean showTitle ) { if ( showTitle ) { this . attributes . put ( SHOW_TITLE , true ) ; this . attributes . remove ( NOTITLE ) ; } else { this . attributes . put ( NOTITLE , true ) ; this . attributes . remove ( SHOW_TITLE ) ; } }
Sets showtitle value as an alias for notitle!
29,576
private int findIndexOfUnglobbedPart ( String globExpression ) { int result = - 1 ; for ( int i = 0 ; i < globExpression . length ( ) ; i ++ ) { switch ( globExpression . charAt ( i ) ) { case '/' : case '\\' : result = i ; break ; case '*' : return result ; default : } } return result ; }
This method computes the index of the last separator that splits the given glob expression into a leading part not containing wildcards and a trailing part containing wildcards . The last part of the path will never be counted to the leading part .
29,577
public List < File > scan ( ) { File baseDirFile = new File ( baseDir ) ; List < File > includedFiles = walkDirectory ( baseDirFile ) ; return includedFiles ; }
Method that finds all files that meets some criteria .
29,578
public static void setFieldValue ( final Class < ? > source , final Object target , final String fieldName , final Object value ) throws NoSuchFieldException { try { AccessController . doPrivileged ( new PrivilegedExceptionAction < Void > ( ) { public Void run ( ) throws Exception { Field field = source . getDeclaredField ( fieldName ) ; if ( ! field . isAccessible ( ) ) { field . setAccessible ( true ) ; } field . set ( target , value ) ; return null ; } } ) ; } catch ( final PrivilegedActionException pae ) { final Throwable t = pae . getCause ( ) ; if ( t instanceof NoSuchFieldException ) { throw ( NoSuchFieldException ) t ; } else { try { throw ( RuntimeException ) t ; } catch ( final ClassCastException cce ) { throw new RuntimeException ( "Obtained unchecked Exception; this code should never be reached" , t ) ; } } } }
Set a single Field value
29,579
private void _processMetadata ( HashMap < String , Object > result ) { Object methods = result . get ( "methods" ) ; Object attrs = result . get ( "attrs" ) ; Object oneways = result . get ( "oneways" ) ; if ( methods instanceof Object [ ] ) { Object [ ] methods_array = ( Object [ ] ) methods ; this . pyroMethods = new HashSet < String > ( ) ; for ( int i = 0 ; i < methods_array . length ; ++ i ) { this . pyroMethods . add ( ( String ) methods_array [ i ] ) ; } } else if ( methods != null ) { this . pyroMethods = getSetOfStrings ( methods ) ; } if ( attrs instanceof Object [ ] ) { Object [ ] attrs_array = ( Object [ ] ) attrs ; this . pyroAttrs = new HashSet < String > ( ) ; for ( int i = 0 ; i < attrs_array . length ; ++ i ) { this . pyroAttrs . add ( ( String ) attrs_array [ i ] ) ; } } else if ( attrs != null ) { this . pyroAttrs = getSetOfStrings ( attrs ) ; } if ( oneways instanceof Object [ ] ) { Object [ ] oneways_array = ( Object [ ] ) oneways ; this . pyroOneway = new HashSet < String > ( ) ; for ( int i = 0 ; i < oneways_array . length ; ++ i ) { this . pyroOneway . add ( ( String ) oneways_array [ i ] ) ; } } else if ( oneways != null ) { this . pyroOneway = getSetOfStrings ( oneways ) ; } if ( pyroMethods . isEmpty ( ) && pyroAttrs . isEmpty ( ) ) { throw new PyroException ( "remote object doesn't expose any methods or attributes" ) ; } }
Extract meta data and store it in the relevant properties on the proxy . If no attribute or method is exposed at all throw an exception .
29,580
@ SuppressWarnings ( "unchecked" ) protected HashSet < String > getSetOfStrings ( Object strings ) { try { return ( HashSet < String > ) strings ; } catch ( ClassCastException ex ) { Collection < String > list = ( Collection < String > ) strings ; return new HashSet < String > ( list ) ; } }
Converts the given object into a set of strings . The object must either be a HashSet already or a different collection type .
29,581
public Object call ( String method , Object ... arguments ) throws PickleException , PyroException , IOException { return internal_call ( method , null , 0 , true , arguments ) ; }
Call a method on the remote Pyro object this proxy is for .
29,582
public Object getattr ( String attr ) throws PickleException , PyroException , IOException { return this . internal_call ( "__getattr__" , null , 0 , false , attr ) ; }
Get the value of a remote attribute .
29,583
public void setattr ( String attr , Object value ) throws PickleException , PyroException , IOException { this . internal_call ( "__setattr__" , null , 0 , false , attr , value ) ; }
Set a new value on a remote attribute .
29,584
public void close ( ) { if ( this . sock != null ) try { this . sock_in . close ( ) ; this . sock_out . close ( ) ; this . sock . close ( ) ; this . sock = null ; this . sock_in = null ; this . sock_out = null ; } catch ( IOException e ) { } }
Close the network connection of this Proxy . If you re - use the proxy it will automatically reconnect .
29,585
@ SuppressWarnings ( "unchecked" ) protected void _handshake ( ) throws IOException { PyroSerializer ser = PyroSerializer . getFor ( Config . SERIALIZER ) ; Map < String , Object > handshakedata = new HashMap < String , Object > ( ) ; handshakedata . put ( "handshake" , pyroHandshake ) ; if ( Config . METADATA ) handshakedata . put ( "object" , objectid ) ; byte [ ] data = ser . serializeData ( handshakedata ) ; int flags = Config . METADATA ? Message . FLAGS_META_ON_CONNECT : 0 ; Message msg = new Message ( Message . MSG_CONNECT , data , ser . getSerializerId ( ) , flags , sequenceNr , annotations ( ) , pyroHmacKey ) ; IOUtil . send ( sock_out , msg . to_bytes ( ) ) ; if ( Config . MSG_TRACE_DIR != null ) { Message . TraceMessageSend ( sequenceNr , msg . get_header_bytes ( ) , msg . get_annotations_bytes ( ) , msg . data ) ; } msg = Message . recv ( sock_in , new int [ ] { Message . MSG_CONNECTOK , Message . MSG_CONNECTFAIL } , pyroHmacKey ) ; responseAnnotations ( msg . annotations , msg . type ) ; Object handshake_response = "?" ; if ( msg . data != null ) { if ( ( msg . flags & Message . FLAGS_COMPRESSED ) != 0 ) { _decompressMessageData ( msg ) ; } try { ser = PyroSerializer . getFor ( msg . serializer_id ) ; handshake_response = ser . deserializeData ( msg . data ) ; } catch ( Exception x ) { msg . type = Message . MSG_CONNECTFAIL ; handshake_response = "<not available because unsupported serialization format>" ; } } if ( msg . type == Message . MSG_CONNECTOK ) { if ( ( msg . flags & Message . FLAGS_META_ON_CONNECT ) != 0 ) { HashMap < String , Object > response_dict = ( HashMap < String , Object > ) handshake_response ; HashMap < String , Object > metadata = ( HashMap < String , Object > ) response_dict . get ( "meta" ) ; _processMetadata ( metadata ) ; handshake_response = response_dict . get ( "handshake" ) ; try { validateHandshake ( handshake_response ) ; } catch ( IOException x ) { close ( ) ; throw x ; } } } else if ( msg . type == Message . MSG_CONNECTFAIL ) { close ( ) ; throw new PyroException ( "connection rejected, reason: " + handshake_response ) ; } else { close ( ) ; throw new PyroException ( "connect: invalid msg type " + msg . type + " received" ) ; } }
Perform the Pyro protocol connection handshake with the Pyro daemon .
29,586
public static Message from_header ( byte [ ] header ) { if ( header == null || header . length != HEADER_SIZE ) throw new PyroException ( "header data size mismatch" ) ; if ( header [ 0 ] != 'P' || header [ 1 ] != 'Y' || header [ 2 ] != 'R' || header [ 3 ] != 'O' ) throw new PyroException ( "invalid message" ) ; int version = ( ( header [ 4 ] & 0xff ) << 8 ) | ( header [ 5 ] & 0xff ) ; if ( version != Config . PROTOCOL_VERSION ) throw new PyroException ( "invalid protocol version: " + version ) ; int msg_type = ( ( header [ 6 ] & 0xff ) << 8 ) | ( header [ 7 ] & 0xff ) ; int flags = ( ( header [ 8 ] & 0xff ) << 8 ) | ( header [ 9 ] & 0xff ) ; int seq = ( ( header [ 10 ] & 0xff ) << 8 ) | ( header [ 11 ] & 0xff ) ; int data_size = header [ 12 ] & 0xff ; data_size <<= 8 ; data_size |= header [ 13 ] & 0xff ; data_size <<= 8 ; data_size |= header [ 14 ] & 0xff ; data_size <<= 8 ; data_size |= header [ 15 ] & 0xff ; int serializer_id = ( ( header [ 16 ] & 0xff ) << 8 ) | ( header [ 17 ] & 0xff ) ; int annotations_size = ( ( header [ 18 ] & 0xff ) << 8 ) | ( header [ 19 ] & 0xff ) ; int checksum = ( ( header [ 22 ] & 0xff ) << 8 ) | ( header [ 23 ] & 0xff ) ; int actual_checksum = ( msg_type + version + data_size + annotations_size + flags + serializer_id + seq + CHECKSUM_MAGIC ) & 0xffff ; if ( checksum != actual_checksum ) throw new PyroException ( "header checksum mismatch" ) ; Message msg = new Message ( msg_type , serializer_id , flags , seq ) ; msg . data_size = data_size ; msg . annotations_size = annotations_size ; return msg ; }
Parses a message header . Does not yet process the annotations chunks and message data .
29,587
public static PyroSerializer getFor ( Config . SerializerType type ) { switch ( type ) { case pickle : return pickleSerializer ; case serpent : { synchronized ( PyroSerializer . class ) { if ( serpentSerializer == null ) { try { serpentSerializer = new SerpentSerializer ( ) ; final String requiredSerpentVersion = "1.23" ; if ( compareLibraryVersions ( net . razorvine . serpent . LibraryVersion . VERSION , requiredSerpentVersion ) < 0 ) { throw new java . lang . RuntimeException ( "serpent version " + requiredSerpentVersion + " (or newer) is required" ) ; } return serpentSerializer ; } catch ( LinkageError x ) { throw new PyroException ( "serpent serializer unavailable" , x ) ; } } } return serpentSerializer ; } default : throw new IllegalArgumentException ( "unrecognised serializer type: " + type ) ; } }
loaded if serpent . jar is available
29,588
public byte [ ] dumps ( Object o ) throws PickleException , IOException { ByteArrayOutputStream bo = new ByteArrayOutputStream ( ) ; dump ( o , bo ) ; bo . flush ( ) ; return bo . toByteArray ( ) ; }
Pickle a given object graph returning the result as a byte array .
29,589
public void dump ( Object o , OutputStream stream ) throws IOException , PickleException { out = stream ; recurse = 0 ; if ( useMemo ) memo = new HashMap < Integer , Memo > ( ) ; out . write ( Opcodes . PROTO ) ; out . write ( PROTOCOL ) ; save ( o ) ; memo = null ; out . write ( Opcodes . STOP ) ; out . flush ( ) ; if ( recurse != 0 ) throw new PickleException ( "recursive structure error, please report this problem" ) ; }
Pickle a given object graph writing the result to the output stream .
29,590
public void save ( Object o ) throws PickleException , IOException { recurse ++ ; if ( recurse > MAX_RECURSE_DEPTH ) throw new java . lang . StackOverflowError ( "recursion too deep in Pickler.save (>" + MAX_RECURSE_DEPTH + ")" ) ; if ( o == null ) { out . write ( Opcodes . NONE ) ; recurse -- ; return ; } Class < ? > t = o . getClass ( ) ; if ( lookupMemo ( t , o ) || dispatch ( t , o ) ) { recurse -- ; return ; } throw new PickleException ( "couldn't pickle object of type " + t ) ; }
Pickle a single object and write its pickle representation to the output stream . Normally this is used internally by the pickler but you can also utilize it from within custom picklers . This is handy if as part of the custom pickler you need to write a couple of normal objects such as strings or ints that are already supported by the pickler . This method can be called recursively to output sub - objects .
29,591
protected void writeMemo ( Object obj ) throws IOException { if ( ! this . useMemo ) return ; int hash = valueCompare ? obj . hashCode ( ) : System . identityHashCode ( obj ) ; if ( ! memo . containsKey ( hash ) ) { int memo_index = memo . size ( ) ; memo . put ( hash , new Memo ( obj , memo_index ) ) ; if ( memo_index <= 0xFF ) { out . write ( Opcodes . BINPUT ) ; out . write ( ( byte ) memo_index ) ; } else { out . write ( Opcodes . LONG_BINPUT ) ; byte [ ] index_bytes = PickleUtils . integer_to_bytes ( memo_index ) ; out . write ( index_bytes , 0 , 4 ) ; } } }
Write the object to the memo table and output a memo write opcode Only works for hashable objects
29,592
private boolean lookupMemo ( Class < ? > objectType , Object obj ) throws IOException { if ( ! this . useMemo ) return false ; if ( ! objectType . isPrimitive ( ) ) { int hash = valueCompare ? obj . hashCode ( ) : System . identityHashCode ( obj ) ; if ( memo . containsKey ( hash ) && ( valueCompare ? memo . get ( hash ) . obj . equals ( obj ) : memo . get ( hash ) . obj == obj ) ) { int memo_index = memo . get ( hash ) . index ; if ( memo_index <= 0xff ) { out . write ( Opcodes . BINGET ) ; out . write ( ( byte ) memo_index ) ; } else { out . write ( Opcodes . LONG_BINGET ) ; byte [ ] index_bytes = PickleUtils . integer_to_bytes ( memo_index ) ; out . write ( index_bytes , 0 , 4 ) ; } return true ; } } return false ; }
Check the memo table and output a memo lookup if the object is found
29,593
protected IObjectPickler getCustomPickler ( Class < ? > t ) { IObjectPickler pickler = customPicklers . get ( t ) ; if ( pickler != null ) { return pickler ; } for ( Entry < Class < ? > , IObjectPickler > x : customPicklers . entrySet ( ) ) { if ( x . getKey ( ) . isAssignableFrom ( t ) ) { return x . getValue ( ) ; } } return null ; }
Get the custom pickler fot the given class to be able to pickle not just built in collection types . A custom pickler is matched on the interface or abstract base class that the object implements or inherits from .
29,594
public static void print ( Object o ) throws IOException { OutputStreamWriter w = new OutputStreamWriter ( System . out , "UTF-8" ) ; print ( o , w , true ) ; w . flush ( ) ; }
Prettyprint directly to the standard output .
29,595
public static void registerConstructor ( String module , String classname , IObjectConstructor constructor ) { objectConstructors . put ( module + "." + classname , constructor ) ; }
Register additional object constructors for custom classes .
29,596
public Object load ( InputStream stream ) throws PickleException , IOException { stack = new UnpickleStack ( ) ; input = stream ; while ( true ) { short key = PickleUtils . readbyte ( input ) ; if ( key == - 1 ) throw new IOException ( "premature end of file" ) ; Object value = dispatch ( key ) ; if ( value != NO_RETURN_VALUE ) { return value ; } } }
Read a pickled object representation from the given input stream .
29,597
public void close ( ) { if ( stack != null ) stack . clear ( ) ; if ( memo != null ) memo . clear ( ) ; if ( input != null ) try { input . close ( ) ; } catch ( IOException e ) { } }
Close the unpickler and frees the resources such as the unpickle stack and memo table .
29,598
public static String readline ( InputStream input , boolean includeLF ) throws IOException { StringBuilder sb = new StringBuilder ( ) ; while ( true ) { int c = input . read ( ) ; if ( c == - 1 ) { if ( sb . length ( ) == 0 ) throw new IOException ( "premature end of file" ) ; break ; } if ( c != '\n' || includeLF ) sb . append ( ( char ) c ) ; if ( c == '\n' ) break ; } return sb . toString ( ) ; }
read a line of text possibly including the terminating LF char
29,599
public static void readbytes_into ( InputStream input , byte [ ] buffer , int offset , int length ) throws IOException { while ( length > 0 ) { int read = input . read ( buffer , offset , length ) ; if ( read == - 1 ) throw new IOException ( "expected more bytes in input stream" ) ; offset += read ; length -= read ; } }
read a number of signed bytes into the specified location in an existing byte array