idx int64 0 41.2k | question stringlengths 73 5.81k | target stringlengths 5 918 |
|---|---|---|
39,200 | private void understandSchema ( String schema ) throws JSONException { JSONObject j1 = new JSONObject ( schema ) ; JSONArray fields = j1 . getJSONArray ( FIELDS ) ; String fieldName ; String fieldTypeValue ; Object recName ; for ( int k = 0 ; k < fields . length ( ) ; k ++ ) { if ( fields . get ( k ) == null ) { contin... | understand the schema so that we can parse the rest of the file |
39,201 | private void iterateAndPreparePuts ( JSONObject eventDetails , Put p , Hadoop2RecordType recType ) throws JSONException { Iterator < ? > keys = eventDetails . keys ( ) ; while ( keys . hasNext ( ) ) { String key = ( String ) keys . next ( ) ; processAllTypes ( p , recType , eventDetails , key ) ; } } | iterate over the event details and prepare puts |
39,202 | private String getKey ( String key ) throws IllegalArgumentException { String checkKey = JobHistoryKeys . HADOOP2_TO_HADOOP1_MAPPING . containsKey ( key ) ? JobHistoryKeys . HADOOP2_TO_HADOOP1_MAPPING . get ( key ) : key ; return ( JobHistoryKeys . valueOf ( checkKey ) . toString ( ) ) ; } | maintains compatibility between hadoop 1 . 0 keys and hadoop 2 . 0 keys . It also confirms that this key exists in JobHistoryKeys enum |
39,203 | private void populatePut ( Put p , byte [ ] family , String key , long value ) { byte [ ] valueBytes = null ; valueBytes = ( value != 0L ) ? Bytes . toBytes ( value ) : Constants . ZERO_LONG_BYTES ; byte [ ] qualifier = Bytes . toBytes ( getKey ( key ) . toLowerCase ( ) ) ; p . addColumn ( family , qualifier , valueByt... | populates a put for long values |
39,204 | byte [ ] getValue ( String key , int value ) { byte [ ] valueBytes = null ; Class < ? > clazz = JobHistoryKeys . KEY_TYPES . get ( JobHistoryKeys . valueOf ( key ) ) ; if ( clazz == null ) { throw new IllegalArgumentException ( " unknown key " + key + " encountered while parsing " + this . jobKey ) ; } if ( Long . clas... | gets the int values as ints or longs some keys in 2 . 0 are now int they were longs in 1 . 0 this will maintain compatiblity between 1 . 0 and 2 . 0 by casting those ints to long |
39,205 | public byte [ ] getTaskKey ( String prefix , String jobNumber , String fullId ) { String taskComponent = fullId ; if ( fullId == null ) { taskComponent = "" ; } else { String expectedPrefix = prefix + jobNumber + "_" ; if ( ( fullId . startsWith ( expectedPrefix ) ) && ( fullId . length ( ) > expectedPrefix . length ( ... | Returns the Task ID or Task Attempt ID stripped of the leading job ID appended to the job row key . |
39,206 | public byte [ ] getAMKey ( String prefix , String fullId ) { String taskComponent = prefix + fullId ; return taskKeyConv . toBytes ( new TaskKey ( this . jobKey , taskComponent ) ) ; } | Returns the AM Attempt id stripped of the leading job ID appended to the job row key . |
39,207 | public void printAllPuts ( List < Put > p ) { for ( Put p1 : p ) { Map < byte [ ] , List < KeyValue > > d = p1 . getFamilyMap ( ) ; for ( byte [ ] k : d . keySet ( ) ) { System . out . println ( " k " + Bytes . toString ( k ) ) ; } for ( List < KeyValue > lkv : d . values ( ) ) { for ( KeyValue kv : lkv ) { System . ou... | utitlity function for printing all puts |
39,208 | public void moveFlow ( FlowQueueKey oldKey , FlowQueueKey newKey ) throws DataException , IOException { byte [ ] oldRowKey = queueKeyConverter . toBytes ( oldKey ) ; Get get = new Get ( oldRowKey ) ; Table flowQueueTable = null ; try { flowQueueTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . FLOW... | Moves a flow_queue record from one row key to another . All Cells in the existing row will be written to the new row . This would primarily be used for transitioning a flow s data from one status to another . |
39,209 | public PaginatedResult < Flow > getPaginatedFlowsForStatus ( String cluster , Flow . Status status , int limit , String user , byte [ ] startRow ) throws IOException { List < Flow > flows = getFlowsForStatus ( cluster , status , limit + 1 , user , startRow ) ; PaginatedResult < Flow > result = new PaginatedResult < Flo... | Returns a page of flows for the given cluster and status |
39,210 | public JobFile track ( FileStatus jobFileStatus ) { String jobfileName = jobFileStatus . getPath ( ) . getName ( ) ; JobFile jobFile = new JobFile ( jobfileName ) ; if ( jobFile . isJobConfFile ( ) || jobFile . isJobHistoryFile ( ) ) { track ( jobFile . getJobid ( ) ) ; long modificationTimeMillis = jobFileStatus . get... | Converts a jobFileStatus to a JobFile and tracks the min and max modification times and JobIds . |
39,211 | boolean processRecords ( Configuration conf , Connection hbaseConnection , String cluster , int batchSize , int threadCount , String processFileSubstring ) throws IOException , InterruptedException , ClassNotFoundException , ExecutionException , RowKeyParseException { List < ProcessRecord > processRecords = getProcessR... | Pick up the ranges of jobs to process from ProcessRecords . Skip raw rows that have already been processed . |
39,212 | private boolean runJobs ( int threadCount , List < JobRunner > jobRunners ) throws InterruptedException , ExecutionException { ExecutorService execSvc = Executors . newFixedThreadPool ( threadCount ) ; if ( ( jobRunners == null ) || ( jobRunners . size ( ) == 0 ) ) { return true ; } boolean success = true ; try { List ... | Run the jobs and wait for all of them to complete . |
39,213 | private Scan createFlowScan ( byte [ ] rowPrefix , int limit , String version ) { Scan scan = new Scan ( ) ; scan . setStartRow ( rowPrefix ) ; scan . setCaching ( Math . min ( limit , defaultScannerCaching ) ) ; Filter prefixFilter = new WhileMatchFilter ( new PrefixFilter ( rowPrefix ) ) ; if ( version != null && ver... | creates a scan for flow data |
39,214 | public JobDetails getJobByJobID ( String cluster , String jobId ) throws IOException { return getJobByJobID ( cluster , jobId , false ) ; } | Returns a specific job s data by job ID . This version does not populate the job s task data . |
39,215 | private Scan getTaskScan ( JobKey jobKey ) { byte [ ] startKey = Bytes . add ( jobKeyConv . toBytes ( jobKey ) , Constants . SEP_BYTES ) ; Scan scan = new Scan ( ) ; scan . setStartRow ( startKey ) ; scan . setFilter ( new WhileMatchFilter ( new PrefixFilter ( startKey ) ) ) ; scan . setCaching ( 500 ) ; return scan ; ... | Returns a Scan instance to retrieve all the task rows for a given job from the job_history_task table . |
39,216 | public static Configuration parseConfiguration ( Map < byte [ ] , byte [ ] > keyValues ) { Configuration config = new Configuration ( false ) ; byte [ ] configPrefix = Bytes . add ( Constants . JOB_CONF_COLUMN_PREFIX_BYTES , Constants . SEP_BYTES ) ; for ( Map . Entry < byte [ ] , byte [ ] > entry : keyValues . entrySe... | Converts serialized configuration properties back in to a Configuration object . |
39,217 | public static CounterMap parseCounters ( byte [ ] prefix , Map < byte [ ] , byte [ ] > keyValues ) { CounterMap counterValues = new CounterMap ( ) ; byte [ ] counterPrefix = Bytes . add ( prefix , Constants . SEP_BYTES ) ; for ( Map . Entry < byte [ ] , byte [ ] > entry : keyValues . entrySet ( ) ) { byte [ ] key = ent... | Converts encoded key values back into counter objects . |
39,218 | public int removeJob ( JobKey key ) throws IOException { byte [ ] jobRow = jobKeyConv . toBytes ( key ) ; Table historyTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . HISTORY_TABLE ) ) ; historyTable . delete ( new Delete ( jobRow ) ) ; historyTable . close ( ) ; int deleteCount = 1 ; Scan taskSc... | Removes the job s row from the job_history table and all related task rows from the job_history_task table . |
39,219 | public static String getUserNameInConf ( Configuration jobConf ) throws IllegalArgumentException { String userName = jobConf . get ( Constants . USER_CONF_KEY_HADOOP2 ) ; if ( StringUtils . isBlank ( userName ) ) { userName = jobConf . get ( Constants . USER_CONF_KEY ) ; if ( StringUtils . isBlank ( userName ) ) { thro... | Get the user name from the job conf check for hadoop2 config param then hadoop1 |
39,220 | public static boolean contains ( Configuration jobConf , String name ) { if ( StringUtils . isNotBlank ( jobConf . get ( name ) ) ) { return true ; } else { return false ; } } | checks if the jobConf contains a certain parameter |
39,221 | public static String getQueueName ( Configuration jobConf ) { String hRavenQueueName = jobConf . get ( Constants . QUEUENAME_HADOOP2 ) ; if ( StringUtils . isBlank ( hRavenQueueName ) ) { hRavenQueueName = jobConf . get ( Constants . FAIR_SCHEDULER_POOLNAME_HADOOP1 ) ; if ( StringUtils . isBlank ( hRavenQueueName ) ) {... | retrieves the queue name from a hadoop conf looks for hadoop2 and hadoop1 settings |
39,222 | public List < VersionInfo > getDistinctVersions ( String cluster , String user , String appId ) throws IOException { Get get = new Get ( getRowKey ( cluster , user , appId ) ) ; List < VersionInfo > versions = Lists . newArrayList ( ) ; Long ts = 0L ; Table versionsTable = null ; try { versionsTable = hbaseConnection .... | Returns the list of distinct versions for the given application sorted in reverse chronological order |
39,223 | public boolean addVersion ( String cluster , String user , String appId , String version , long timestamp ) throws IOException { boolean updated = false ; byte [ ] rowKey = getRowKey ( cluster , user , appId ) ; byte [ ] versionCol = Bytes . toBytes ( version ) ; int attempts = 0 ; int maxAttempts = 3 ; boolean checkFo... | Adds an entry for the given version if it does not already exist . If the given timestamp is earlier than the currently stored timestamp for the version it will be updated . |
39,224 | public List < Flow > fetchFlowsWithConfig ( String cluster , String username , String batchDesc , String signature , int limit , String ... configProps ) throws IOException { LOG . info ( String . format ( "Fetching last %d matching jobs for cluster=%s, user.name=%s, " + "batch.desc=%s, pig.logical.plan.signature=%s" ,... | Fetches a list of flows that include jobs in that flow that include the specified configuration properties |
39,225 | public List < Flow > fetchFlowsWithConfig ( String cluster , String username , String batchDesc , String signature , int limit , List < String > flowResponseFilters , List < String > jobResponseFilters , List < String > configPropertyFields ) throws IOException { LOG . info ( String . format ( "Fetching last %d matchin... | Fetches a list of flows that include jobs in that flow that include the specified flow fields and job fields specified configuration properties |
39,226 | private StringBuilder buildFlowURL ( String cluster , String username , String batchDesc , String signature , int limit , List < String > flowResponseFilters , List < String > jobResponseFilters ) throws IOException { StringBuilder urlStringBuilder = new StringBuilder ( ) ; urlStringBuilder . append ( "http://" ) ; url... | builds up a StringBuilder with the parameters for the FLOW API |
39,227 | public List < TaskDetails > fetchTaskDetails ( String cluster , String jobId ) throws IOException { String urlString = String . format ( "http://%s/api/v1/tasks/%s/%s" , apiHostname , cluster , jobId ) ; return retrieveTaskDetailsFromUrl ( urlString ) ; } | Fetch details tasks of a given job . |
39,228 | public List < TaskDetails > fetchTaskDetails ( String cluster , String jobId , List < String > taskResponseFilters ) throws IOException { String taskFilters = StringUtil . buildParam ( "include" , taskResponseFilters ) ; String urlString = String . format ( "http://%s/api/v1/tasks/%s/%s?%s" , apiHostname , cluster , jo... | Fetch details tasks of a given job for the specified fields |
39,229 | private void aggreagteJobStats ( JobDetails jobDetails , byte [ ] rowKey , Context context , AggregationConstants . AGGREGATION_TYPE aggType ) throws IOException , InterruptedException { byte [ ] aggStatusCol = null ; switch ( aggType ) { case DAILY : aggStatusCol = AggregationConstants . JOB_DAILY_AGGREGATION_STATUS_C... | aggregate this job s stats only if re - aggregation is turned on OR aggreation is on AND job not already aggregated |
39,230 | private Put getMegaByteMillisPut ( Long mbMillis , JobKey jobKey ) { Put pMb = new Put ( jobKeyConv . toBytes ( jobKey ) ) ; pMb . addColumn ( Constants . INFO_FAM_BYTES , Constants . MEGABYTEMILLIS_BYTES , Bytes . toBytes ( mbMillis ) ) ; return pMb ; } | generates a put for the megabytemillis |
39,231 | Properties loadCostProperties ( Path cachePath , String machineType ) { Properties prop = new Properties ( ) ; InputStream inp = null ; try { inp = new FileInputStream ( cachePath . toString ( ) ) ; prop . load ( inp ) ; return prop ; } catch ( FileNotFoundException fnf ) { LOG . error ( "cost properties does not exist... | looks for cost file in distributed cache |
39,232 | private Double getJobCost ( Long mbMillis , Configuration currentConf ) { Double computeTco = 0.0 ; Long machineMemory = 0L ; Properties prop = null ; String machineType = currentConf . get ( Constants . HRAVEN_MACHINE_TYPE , "default" ) ; LOG . debug ( " machine type " + machineType ) ; try { Path [ ] cacheFiles = Dis... | calculates the cost of this job based on mbMillis machineType and cost details from the properties file |
39,233 | private Put getJobCostPut ( Double jobCost , JobKey jobKey ) { Put pJobCost = new Put ( jobKeyConv . toBytes ( jobKey ) ) ; pJobCost . addColumn ( Constants . INFO_FAM_BYTES , Constants . JOBCOST_BYTES , Bytes . toBytes ( jobCost ) ) ; return pJobCost ; } | generates a put for the job cost |
39,234 | public int compareTo ( JobDetails otherJob ) { if ( otherJob == null ) { return - 1 ; } return new CompareToBuilder ( ) . append ( this . jobKey , otherJob . getJobKey ( ) ) . toComparison ( ) ; } | Compares two JobDetails objects on the basis of their JobKey |
39,235 | Long getCounterValueAsLong ( final CounterMap counters , final String counterGroupName , final String counterName ) { Counter c1 = counters . getCounter ( counterGroupName , counterName ) ; if ( c1 != null ) { return c1 . getValue ( ) ; } else { return 0L ; } } | return a value for that counters from the NavigableMap as a Long |
39,236 | private HadoopVersion getHadoopVersionFromResult ( final JobHistoryKeys key , final NavigableMap < byte [ ] , byte [ ] > infoValues ) { byte [ ] value = infoValues . get ( JobHistoryKeys . KEYS_TO_BYTES . get ( key ) ) ; if ( value != null ) { String hv = Bytes . toString ( value ) ; return HadoopVersion . valueOf ( hv... | return an enum value from the NavigableMap for hadoop version |
39,237 | private byte [ ] readJobFile ( FileStatus fileStatus ) throws IOException { byte [ ] rawBytes = null ; FSDataInputStream fsdis = null ; try { long fileLength = fileStatus . getLen ( ) ; int fileLengthInt = ( int ) fileLength ; rawBytes = new byte [ fileLengthInt ] ; fsdis = hdfs . open ( fileStatus . getPath ( ) ) ; IO... | Get the raw bytes and the last modification millis for this JobFile |
39,238 | public static String cleanseToken ( String token ) { if ( token == null || token . length ( ) == 0 ) { return token ; } ; String cleansed = token . replaceAll ( SPACE , UNDERSCORE ) ; cleansed = cleansed . replaceAll ( Constants . SEP , UNDERSCORE ) ; return cleansed ; } | Takes a string token to be used as a key or qualifier and cleanses out reserved tokens . This operation is not symetrical . Logic is to replace all spaces and exclamation points with underscores . |
39,239 | public static String buildParam ( String paramName , List < String > paramArgs ) throws IOException { StringBuilder sb = new StringBuilder ( ) ; for ( String arg : paramArgs ) { if ( sb . length ( ) > 0 ) { sb . append ( "&" ) ; } sb . append ( paramName ) . append ( "=" ) . append ( URLEncoder . encode ( arg , "UTF-8"... | builds up a String with the parameters for the filtering of fields |
39,240 | public synchronized void seek ( long position ) throws IOException { if ( position < 0 || position >= count ) { throw new IOException ( "cannot seek position " + position + " as it is out of bounds" ) ; } pos = ( int ) position ; } | Seeks and sets position to the specified value . |
39,241 | protected JobDesc create ( QualifiedJobId qualifiedJobId , Configuration jobConf , String appId , String version , Framework framework , long submitTimeMillis ) { if ( null == qualifiedJobId ) { throw new IllegalArgumentException ( "Cannot create a JobKey from a null qualifiedJobId." ) ; } String userName = HadoopConfU... | Factory method to be used by subclasses . |
39,242 | protected String cleanAppId ( String appId ) { return ( appId != null ) ? StringUtil . cleanseToken ( appId ) : Constants . UNKNOWN ; } | Given a potential value for appId return a string that is safe to use in the jobKey |
39,243 | public void populate ( Result result ) { NavigableMap < byte [ ] , byte [ ] > infoValues = result . getFamilyMap ( HdfsConstants . DISK_INFO_FAM_BYTES ) ; this . fileCount += ByteUtil . getValueAsLong ( HdfsConstants . FILE_COUNT_COLUMN_BYTES , infoValues ) ; this . dirCount += ByteUtil . getValueAsLong ( HdfsConstants... | populates the hdfs stats by looking through the hbase result |
39,244 | private static void traverseDirs ( List < FileStatus > fileStatusesList , FileSystem hdfs , Path inputPath , JobFileModifiedRangePathFilter jobFileModifiedRangePathFilter ) throws IOException { FileStatus allFiles [ ] = hdfs . listStatus ( inputPath ) ; for ( FileStatus aFile : allFiles ) { if ( aFile . isDir ( ) ) { t... | Recursively traverses the dirs to get the list of files for a given path filtered as per the input path range filter |
39,245 | public static FileStatus [ ] listFiles ( boolean recurse , FileSystem hdfs , Path inputPath , JobFileModifiedRangePathFilter jobFileModifiedRangePathFilter ) throws IOException { if ( recurse ) { List < FileStatus > fileStatusesList = new ArrayList < FileStatus > ( ) ; traverseDirs ( fileStatusesList , hdfs , inputPath... | Gets the list of files for a given path filtered as per the input path range filter Can go into directories recursively |
39,246 | static String getJobIdFromPath ( Path aPath ) { String fileName = aPath . getName ( ) ; JobFile jf = new JobFile ( fileName ) ; String jobId = jf . getJobid ( ) ; if ( jobId == null ) { throw new ProcessingException ( "job id is null for " + aPath . toUri ( ) ) ; } return jobId ; } | extracts the job id from a Path |
39,247 | public static JobDescFactoryBase getFrameworkSpecificJobDescFactory ( Configuration jobConf ) { Framework framework = getFramework ( jobConf ) ; switch ( framework ) { case PIG : return PIG_JOB_DESC_FACTORY ; case SCALDING : return SCALDING_JOB_DESC_FACTORY ; default : return MR_JOB_DESC_FACTORY ; } } | get framework specific JobDescFactory based on configuration |
39,248 | public static String getCluster ( Configuration jobConf ) { String jobtracker = jobConf . get ( RESOURCE_MANAGER_KEY ) ; if ( jobtracker == null ) { jobtracker = jobConf . get ( JOBTRACKER_KEY ) ; } String cluster = null ; if ( jobtracker != null ) { int portIdx = jobtracker . indexOf ( ':' ) ; if ( portIdx > - 1 ) { j... | Returns the cluster that a give job was run on by mapping the jobtracker hostname to an identifier . |
39,249 | String stripAppId ( String origId ) { if ( origId == null || origId . isEmpty ( ) ) { return "" ; } Matcher m = stripBracketsPattern . matcher ( origId ) ; String cleanedAppId = m . replaceAll ( "" ) ; Matcher tailMatcher = stripSequencePattern . matcher ( cleanedAppId ) ; if ( tailMatcher . matches ( ) ) { cleanedAppI... | Strips out metadata in brackets to get a clean app name . There are multiple job name formats used by various frameworks . This method attempts to normalize these job names into a somewhat human readable appId format . |
39,250 | static long getFlowSubmitTimeMillis ( Configuration jobConf , long submitTimeMillis ) { long cascadingSubmitTimeMillis = jobConf . getLong ( Constants . CASCADING_RUN_CONF_KEY , 0 ) ; if ( cascadingSubmitTimeMillis == 0 ) { String flowId = jobConf . get ( Constants . CASCADING_FLOW_ID_CONF_KEY ) ; if ( flowId != null &... | Returns the flow submit time for this job or a computed substitute that will at least be consistent for all jobs in a flow . |
39,251 | private List < ProcessRecord > createFromResults ( ResultScanner scanner , int maxCount ) { if ( ( maxCount <= 0 ) || ( scanner == null ) ) { return new ArrayList < ProcessRecord > ( 0 ) ; } List < ProcessRecord > records = new ArrayList < ProcessRecord > ( ) ; for ( Result result : scanner ) { byte [ ] row = result . ... | Transform results pulled from a scanner and turn into a list of ProcessRecords . |
39,252 | public ProcessRecord setProcessState ( ProcessRecord processRecord , ProcessState newState ) throws IOException { Put put = new Put ( keyConv . toBytes ( processRecord . getKey ( ) ) ) ; put . addColumn ( Constants . INFO_FAM_BYTES , Constants . PROCESSING_STATE_COLUMN_BYTES , Bytes . toBytes ( newState . getCode ( ) )... | Set the process state for a given processRecord . |
39,253 | public int compareTo ( Flow otherFlow ) { if ( otherFlow == null ) { return - 1 ; } return new CompareToBuilder ( ) . append ( this . key , otherFlow . getFlowKey ( ) ) . toComparison ( ) ; } | Compares two Flow objects on the basis of their FlowKeys |
39,254 | public List < AppSummary > getNewApps ( JobHistoryService jhs , String cluster , String user , long startTime , long endTime , int limit ) throws IOException { byte [ ] startRow = null ; if ( StringUtils . isNotBlank ( user ) ) { startRow = ByteUtil . join ( Constants . SEP_BYTES , Bytes . toBytes ( cluster ) , Bytes .... | scans the app version table to look for jobs that showed up in the given time range creates the flow key that maps to these apps |
39,255 | public List < AppKey > createNewAppKeysFromResults ( Scan scan , long startTime , long endTime , int maxCount ) throws IOException { ResultScanner scanner = null ; List < AppKey > newAppsKeys = new ArrayList < AppKey > ( ) ; Table versionsTable = null ; try { Stopwatch timer = new Stopwatch ( ) . start ( ) ; int rowCou... | creates a list of appkeys from the hbase scan |
39,256 | private AppKey getNewAppKeyFromResult ( Result result , long startTime , long endTime ) throws IOException { byte [ ] rowKey = result . getRow ( ) ; byte [ ] [ ] keyComponents = ByteUtil . split ( rowKey , Constants . SEP_BYTES ) ; String cluster = Bytes . toString ( keyComponents [ 0 ] ) ; String user = Bytes . toStri... | constructs App key from the result set based on cluster user appId picks those results that satisfy the time range criteria |
39,257 | public boolean aggregateJobDetails ( JobDetails jobDetails , AggregationConstants . AGGREGATION_TYPE aggType ) { Table aggTable = null ; try { switch ( aggType ) { case DAILY : aggTable = hbaseConnection . getTable ( TableName . valueOf ( AggregationConstants . AGG_DAILY_TABLE ) ) ; break ; case WEEKLY : aggTable = hba... | creates a list of puts that aggregate the job details and stores in daily or weekly aggregation table |
39,258 | long getNumberRunsScratch ( Map < byte [ ] , byte [ ] > rawFamily ) { long numberRuns = 0L ; if ( rawFamily != null ) { numberRuns = rawFamily . size ( ) ; } if ( numberRuns == 0L ) { LOG . error ( "Number of runs in scratch column family can't be 0," + " if processing within TTL" ) ; throw new ProcessingException ( "N... | interprets the number of runs based on number of columns in raw col family |
39,259 | private Increment incrementAppSummary ( AppAggregationKey appAggKey , JobDetails jobDetails ) { Increment aggIncrement = new Increment ( aggConv . toBytes ( appAggKey ) ) ; aggIncrement . addColumn ( Constants . INFO_FAM_BYTES , AggregationConstants . TOTAL_MAPS_BYTES , jobDetails . getTotalMaps ( ) ) ; aggIncrement . ... | creates an Increment to aggregate job details |
39,260 | boolean updateQueue ( AppAggregationKey appAggKey , Table aggTable , JobDetails jobDetails ) throws IOException { byte [ ] rowKey = aggConv . toBytes ( appAggKey ) ; Get g = new Get ( rowKey ) ; g . addColumn ( AggregationConstants . INFO_FAM_BYTES , AggregationConstants . HRAVEN_QUEUE_BYTES ) ; Result r = aggTable . g... | updates the queue list for this app aggregation |
39,261 | boolean executeCheckAndPut ( Table aggTable , byte [ ] rowKey , byte [ ] existingValueBytes , byte [ ] newValueBytes , byte [ ] famBytes , byte [ ] colBytes ) throws IOException { Put put = new Put ( rowKey ) ; put . addColumn ( famBytes , colBytes , newValueBytes ) ; boolean statusCheckAndPut = aggTable . checkAndPut ... | method to execute an hbase checkAndPut operation |
39,262 | boolean incrNumberRuns ( List < Cell > column , Table aggTable , AppAggregationKey appAggKey ) throws IOException { long expectedValueBeforePut = 0L ; if ( column . size ( ) > 0 ) { try { expectedValueBeforePut = Bytes . toLong ( column . get ( 0 ) . getValue ( ) ) ; } catch ( NumberFormatException e ) { LOG . error ( ... | checks and increments the number of runs for this app aggregation . no need to retry since another map task may have updated it in the mean time |
39,263 | static void loadHadoopClustersProps ( String filename ) { Properties prop = new Properties ( ) ; if ( StringUtils . isBlank ( filename ) ) { filename = Constants . HRAVEN_CLUSTER_PROPERTIES_FILENAME ; } try { InputStream inp = Cluster . class . getResourceAsStream ( "/" + filename ) ; if ( inp == null ) { LOG . error (... | testing with different properties file names |
39,264 | public void load ( ResourceLocation resourceLocation ) { IResource res = Silenced . get ( ( ) -> Minecraft . getMinecraft ( ) . getResourceManager ( ) . getResource ( resourceLocation ) ) ; if ( res == null ) return ; GsonBuilder gsonBuilder = new GsonBuilder ( ) ; gsonBuilder . registerTypeAdapter ( AnimationImporter ... | Loads and reads the JSON . |
39,265 | public Multimap < String , Anim > deserializeAnim ( JsonElement json , Type typeOfT , JsonDeserializationContext context ) throws JsonParseException { Multimap < String , Anim > anims = ArrayListMultimap . create ( ) ; JsonObject obj = json . getAsJsonObject ( ) ; TypeToken < ArrayList < Anim > > token = new TypeToken ... | Deserialize anims multimap . |
39,266 | public boolean onButtonPress ( MouseButton button ) { if ( ! isEnabled ( ) ) return false ; return parent != null ? parent . onButtonPress ( button ) : false ; } | On button press . |
39,267 | public boolean onButtonRelease ( MouseButton button ) { if ( ! isEnabled ( ) ) return false ; return parent != null ? parent . onButtonRelease ( button ) : false ; } | On button release . |
39,268 | public boolean onDoubleClick ( MouseButton button ) { if ( ! isEnabled ( ) ) return false ; return parent != null ? parent . onDoubleClick ( button ) : false ; } | On double click . |
39,269 | public boolean onDrag ( MouseButton button ) { if ( ! isEnabled ( ) ) return false ; return parent != null ? parent . onDrag ( button ) : false ; } | On drag . |
39,270 | public boolean onScrollWheel ( int delta ) { if ( ! isEnabled ( ) ) return false ; for ( IControlComponent c : controlComponents ) if ( c . onScrollWheel ( delta ) ) return true ; return parent != null && ! ( this instanceof IControlComponent ) ? parent . onScrollWheel ( delta ) : false ; } | On scroll wheel . |
39,271 | public void execute ( MinecraftServer server , ICommandSender sender , String [ ] params ) throws CommandException { if ( params . length == 0 ) throw new WrongUsageException ( "malisiscore.commands.usage" ) ; if ( ! parameters . contains ( params [ 0 ] ) ) throw new WrongUsageException ( "malisiscore.commands.usage" )... | Processes the command . |
39,272 | public static MethodNode findMethod ( ClassNode clazz , String name ) { for ( MethodNode method : clazz . methods ) { if ( method . name . equals ( name ) ) { return method ; } } return null ; } | Finds the method with the given name . If multiple methods with the same name exist the first one will be returned |
39,273 | public static AbstractInsnNode findInstruction ( MethodNode method , InsnList matches , int index ) { AbstractInsnNode node = method . instructions . get ( index ) ; AbstractInsnNode match = matches . getFirst ( ) ; while ( node != null ) { if ( insnEqual ( node , match ) ) { AbstractInsnNode m = match . getNext ( ) ; ... | Finds instruction a specific instruction list inside a method starting from the specified index . |
39,274 | public MBlockPos add ( int x , int y , int z ) { return new MBlockPos ( this . getX ( ) + x , this . getY ( ) + y , this . getZ ( ) + z ) ; } | Add the given coordinates to the coordinates of this BlockPos |
39,275 | public MBlockPos offset ( EnumFacing facing , int n ) { return new MBlockPos ( this . getX ( ) + facing . getFrontOffsetX ( ) * n , this . getY ( ) + facing . getFrontOffsetY ( ) * n , this . getZ ( ) + facing . getFrontOffsetZ ( ) * n ) ; } | Offsets this BlockPos n blocks in the given direction |
39,276 | public void setPickedItemStack ( ItemStack itemStack ) { pickedItemStack = checkNotNull ( itemStack ) ; owner . inventory . setItemStack ( itemStack ) ; } | Sets the currently picked itemStack . Update player inventory . |
39,277 | public boolean shouldEndDrag ( int button ) { if ( ! isDraggingItemStack ( ) ) return false ; if ( dragType == DRAG_TYPE_ONE || dragType == DRAG_TYPE_SPREAD ) return dragType == button && draggedSlots . size ( ) > 1 ; return dragType == DRAG_TYPE_PICKUP ; } | Checks if the dragging should end based on the mouse button clicked . |
39,278 | public boolean shouldResetDrag ( int button ) { if ( ! isDraggingItemStack ( ) ) return false ; if ( dragType == DRAG_TYPE_SPREAD ) return button == 1 && draggedSlots . size ( ) > 1 ; if ( dragType == DRAG_TYPE_ONE ) return button == 0 && draggedSlots . size ( ) > 1 ; return dragType != DRAG_TYPE_PICKUP ; } | Checks if the dragging should be reset based on the mouse button clicked . |
39,279 | public void detectAndSendChanges ( ) { playerInventoryCache . sendChanges ( ) ; inventoryCaches . values ( ) . forEach ( InventoryCache :: sendChanges ) ; pickedItemStackCache . update ( ) ; if ( pickedItemStackCache . hasChanged ( ) ) UpdateInventorySlotsMessage . updatePickedItemStack ( pickedItemStackCache . get ( )... | Sends all changes for base inventory player s inventory picked up itemStack and dragged itemStacks . |
39,280 | private ItemStack handleDropPickedStack ( boolean fullStack ) { ItemUtils . ItemStackSplitter iss = new ItemUtils . ItemStackSplitter ( pickedItemStack ) ; iss . split ( fullStack ? ItemUtils . FULL_STACK : 1 ) ; owner . dropItem ( iss . split , true ) ; setPickedItemStack ( iss . source ) ; return iss . source ; } | Drops one or the full itemStack currently picked up . |
39,281 | private ItemStack handleNormalClick ( MalisisSlot slot , boolean fullStack ) { if ( ! getPickedItemStack ( ) . isEmpty ( ) && ! slot . isItemValid ( pickedItemStack ) ) return getPickedItemStack ( ) ; if ( ! getPickedItemStack ( ) . isEmpty ( ) ) { if ( slot . isState ( PLAYER_INSERT | PLAYER_EXTRACT ) ) setPickedItemS... | Handles the normal left or right click . |
39,282 | private ItemStack handleShiftClick ( MalisisInventory inventory , MalisisSlot slot ) { ItemStack itemStack = transferSlotOutOfInventory ( inventory , slot ) ; slot . setItemStack ( itemStack ) ; slot . onSlotChanged ( ) ; return itemStack ; } | Handles shift clicking a slot . |
39,283 | private ItemStack handleHotbar ( MalisisInventory inventory , MalisisSlot hoveredSlot , int num ) { MalisisSlot hotbarSlot = getPlayerInventory ( ) . getSlot ( num ) ; if ( inventory == getPlayerInventory ( ) || hoveredSlot . getItemStack ( ) . isEmpty ( ) ) { if ( hoveredSlot . isState ( PLAYER_INSERT ) ) { ItemStack ... | Handles player pressing 1 - 9 key while hovering a slot . |
39,284 | private ItemStack handleDropSlot ( MalisisSlot hoveredSlot , boolean fullStack ) { ItemStack itemStack = hoveredSlot . getItemStack ( ) ; if ( itemStack . isEmpty ( ) || ! hoveredSlot . isState ( PLAYER_EXTRACT ) ) return itemStack ; ItemUtils . ItemStackSplitter iss = new ItemUtils . ItemStackSplitter ( hoveredSlot . ... | Drops itemStack from hovering slot . |
39,285 | private ItemStack handleDoubleClick ( MalisisInventory inventory , MalisisSlot slot , boolean shiftClick ) { if ( ! inventory . state . is ( PLAYER_EXTRACT ) ) return ItemStack . EMPTY ; if ( ! shiftClick && ! pickedItemStack . isEmpty ( ) ) { for ( int i = 0 ; i < 2 ; i ++ ) { for ( MalisisInventory inv : getInventori... | Handle double clicking on a slot . |
39,286 | private ItemStack handlePickBlock ( MalisisSlot slot ) { if ( slot . getItemStack ( ) . isEmpty ( ) || ! pickedItemStack . isEmpty ( ) ) return ItemStack . EMPTY ; ItemStack itemStack = ItemUtils . copy ( slot . getItemStack ( ) ) ; itemStack . setCount ( itemStack . getMaxStackSize ( ) ) ; setPickedItemStack ( itemSta... | Picks up the itemStack in the slot . |
39,287 | protected void resetDrag ( ) { if ( ! isDraggingItemStack ( ) ) return ; pickedItemStack = draggedItemStack . copy ( ) ; draggedSlots . forEach ( s -> s . setDraggedItemStack ( ItemStack . EMPTY ) ) ; draggedSlots . clear ( ) ; draggedItemStack = null ; dragType = - 1 ; } | Resets the dragging state . |
39,288 | public float getStringWidth ( String text , FontOptions options ) { if ( StringUtils . isEmpty ( text ) ) return 0 ; StringWalker walker = new StringWalker ( text , options ) ; walker . walkToEnd ( ) ; return walker . width ( ) ; } | Gets the rendering width of the text . |
39,289 | public float getStringHeight ( String text , FontOptions options ) { StringWalker walker = new StringWalker ( text , options ) ; walker . walkToEnd ( ) ; return walker . lineHeight ( ) ; } | Gets the rendering height of strings . |
39,290 | public float getMaxStringWidth ( List < String > strings , FontOptions options ) { float width = 0 ; for ( String str : strings ) width = Math . max ( width , getStringWidth ( str , options ) ) ; return width ; } | Gets max rendering width of an array of string . |
39,291 | public float getCharHeight ( char c , FontOptions options ) { return getCharData ( c ) . getCharHeight ( ) / fontGeneratorOptions . fontSize * ( options != null ? options . getFontScale ( ) : 1 ) * 9 ; } | Gets the rendering height of a character . |
39,292 | public float getCharPosition ( String str , FontOptions options , int position , int charOffset ) { if ( StringUtils . isEmpty ( str ) ) return 0 ; str = processString ( str , options ) ; StringWalker walker = new StringWalker ( str , options ) ; walker . skipChars ( true ) ; return 0 ; } | Determines the character for a given X coordinate . |
39,293 | public void setFontOptions ( FontOptions fontOptions ) { checkNotNull ( fontOptions ) ; buildLines = this . fontOptions . isBold ( ) != fontOptions . isBold ( ) || this . fontOptions . getFontScale ( ) != fontOptions . getFontScale ( ) ; this . fontOptions = fontOptions ; } | Sets the font options to use to render . |
39,294 | private String resolveParameter ( String key ) { ICachedData < ? > o = parameters . get ( key ) ; if ( o == null ) return translated ? I18n . format ( key ) : key ; return Objects . toString ( o . get ( ) ) . replace ( "$" , "\\$" ) ; } | Resolve parameter values to use in the text . |
39,295 | private boolean hasParametersChanged ( ) { boolean changed = false ; for ( ICachedData < ? > data : parameters . values ( ) ) { data . update ( ) ; if ( data . hasChanged ( ) ) changed |= true ; } return changed ; } | Checks whether any parameter has changed . |
39,296 | public String applyParameters ( String str ) { Matcher matcher = pattern . matcher ( str ) ; StringBuffer sb = new StringBuffer ( ) ; while ( matcher . find ( ) ) matcher . appendReplacement ( sb , resolveParameter ( matcher . group ( "key" ) ) ) ; matcher . appendTail ( sb ) ; str = sb . toString ( ) ; return translat... | Applies parameters to the text . |
39,297 | public void render ( GuiRenderer renderer ) { update ( ) ; if ( StringUtils . isEmpty ( cache ) ) return ; int x = screenPosition . x ( ) ; int y = screenPosition . y ( ) ; int z = zIndex . getAsInt ( ) ; ClipArea area = null ; if ( parent instanceof UIComponent ) z += parent . getZIndex ( ) ; if ( parent instanceof IC... | Renders all the text based on its set position . |
39,298 | public static void register ( IMalisisMod mod , Object messageHandler ) { register ( mod , messageHandler . getClass ( ) , messageHandler ) ; } | Registers an object to handle mod messages . |
39,299 | @ SuppressWarnings ( "unchecked" ) public static < T > T message ( String modid , String messageName , Object ... data ) { if ( ! Loader . isModLoaded ( modid ) ) return null ; Collection < Pair < Object , Method > > messageList = messages . get ( modid + ":" + messageName ) ; if ( messageList . size ( ) == 0 ) { Malis... | Sends a message to the another mod . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.