idx int64 0 41.2k | question stringlengths 73 5.81k | target stringlengths 5 918 |
|---|---|---|
21,800 | public OperationBuilder parameters ( final List < Parameter > parameters ) { List < Parameter > source = nullToEmptyList ( parameters ) ; List < Parameter > destination = new ArrayList < > ( this . parameters ) ; ParameterMerger merger = new ParameterMerger ( destination , source ) ; this . parameters = new ArrayList < > ( merger . merged ( ) ) ; return this ; } | Updates the input parameters this operation needs |
21,801 | public ApiDescriptionBuilder operations ( List < Operation > operations ) { if ( operations != null ) { this . operations = operations . stream ( ) . sorted ( operationOrdering ) . collect ( toList ( ) ) ; } return this ; } | Updates the operations to the api operation |
21,802 | public static Predicate < RequestHandler > withMethodAnnotation ( final Class < ? extends Annotation > annotation ) { return input -> input . isAnnotatedWith ( annotation ) ; } | Predicate that matches RequestHandler with handlers methods annotated with given annotation |
21,803 | public static Predicate < RequestHandler > withClassAnnotation ( final Class < ? extends Annotation > annotation ) { return input -> declaringClass ( input ) . map ( annotationPresent ( annotation ) ) . orElse ( false ) ; } | Predicate that matches RequestHandler with given annotation on the declaring class of the handler method |
21,804 | public static Predicate < RequestHandler > basePackage ( final String basePackage ) { return input -> declaringClass ( input ) . map ( handlerPackage ( basePackage ) ) . orElse ( true ) ; } | Predicate that matches RequestHandler with given base package name for the class of the handler method . This predicate includes all request handlers matching the provided basePackage |
21,805 | private static String [ ] prepareDefaultConf ( ) throws IOException { final File templateFolder = new File ( "test/local-conf-templates" ) ; final File localConfFolder = new File ( "local/conf" ) ; if ( ! localConfFolder . exists ( ) ) { FileUtils . copyDirectory ( templateFolder , localConfFolder . getParentFile ( ) ) ; log . info ( "Copied local conf templates from " + templateFolder . getAbsolutePath ( ) ) ; } log . info ( "Using conf at " + localConfFolder . getAbsolutePath ( ) ) ; return new String [ ] { "-conf" , "local/conf" } ; } | To enable run out of the box for testing . |
21,806 | public < T > T transaction ( final SQLTransaction < T > operations ) throws SQLException { Connection conn = null ; try { conn = this . queryRunner . getDataSource ( ) . getConnection ( ) ; conn . setAutoCommit ( false ) ; final DatabaseTransOperator transOperator = new DatabaseTransOperator ( this . queryRunner , conn ) ; final T res = operations . execute ( transOperator ) ; conn . commit ( ) ; return res ; } catch ( final SQLException ex ) { logger . error ( "transaction failed" , ex ) ; if ( this . dbMetrics != null ) { this . dbMetrics . markDBFailTransaction ( ) ; } throw ex ; } finally { DbUtils . closeQuietly ( conn ) ; } } | Provide a way to allow users define custom SQL operations without relying on fixed SQL interface . The common use case is to group a sequence of SQL operations without commit every time . |
21,807 | private BasicTimeChecker getBasicTimeChecker ( final Map < String , ConditionChecker > checkers ) { for ( final ConditionChecker checker : checkers . values ( ) ) { if ( checker . getType ( ) . equals ( BasicTimeChecker . type ) ) { return ( BasicTimeChecker ) checker ; } } return null ; } | expirecheckers or triggerCheckers only have BasicTimeChecker today . This should be refactored in future . |
21,808 | public void setupHadoopJobProperties ( ) { if ( getJobProps ( ) . containsKey ( HADOOP_GLOBAL_OPTS ) ) { String hadoopGlobalOps = getJobProps ( ) . getString ( HADOOP_GLOBAL_OPTS ) ; if ( getJobProps ( ) . containsKey ( HADOOP_OPTS ) ) { String hadoopOps = getJobProps ( ) . getString ( HADOOP_OPTS ) ; getJobProps ( ) . put ( HADOOP_OPTS , String . format ( "%s %s" , hadoopOps , hadoopGlobalOps ) ) ; } else { getJobProps ( ) . put ( HADOOP_OPTS , hadoopGlobalOps ) ; } } } | Append HADOOP_GLOBAL_OPTS with HADOOP_OPTS in the given props |
21,809 | public static String formatDuration ( final long startTime , final long endTime ) { if ( startTime == - 1 ) { return "-" ; } final long durationMS ; if ( endTime == - 1 ) { durationMS = System . currentTimeMillis ( ) - startTime ; } else { durationMS = endTime - startTime ; } long seconds = durationMS / 1000 ; if ( seconds < 60 ) { return seconds + " sec" ; } long minutes = seconds / 60 ; seconds %= 60 ; if ( minutes < 60 ) { return minutes + "m " + seconds + "s" ; } long hours = minutes / 60 ; minutes %= 60 ; if ( hours < 24 ) { return hours + "h " + minutes + "m " + seconds + "s" ; } final long days = hours / 24 ; hours %= 24 ; return days + "d " + hours + "h " + minutes + "m" ; } | Format time period pair to Duration String |
21,810 | public static ReadablePeriod parsePeriodString ( final String periodStr ) { final ReadablePeriod period ; final char periodUnit = periodStr . charAt ( periodStr . length ( ) - 1 ) ; if ( periodStr . equals ( "null" ) || periodUnit == 'n' ) { return null ; } final int periodInt = Integer . parseInt ( periodStr . substring ( 0 , periodStr . length ( ) - 1 ) ) ; switch ( periodUnit ) { case 'y' : period = Years . years ( periodInt ) ; break ; case 'M' : period = Months . months ( periodInt ) ; break ; case 'w' : period = Weeks . weeks ( periodInt ) ; break ; case 'd' : period = Days . days ( periodInt ) ; break ; case 'h' : period = Hours . hours ( periodInt ) ; break ; case 'm' : period = Minutes . minutes ( periodInt ) ; break ; case 's' : period = Seconds . seconds ( periodInt ) ; break ; default : throw new IllegalArgumentException ( "Invalid schedule period unit '" + periodUnit ) ; } return period ; } | Parse Period String to a ReadablePeriod Object |
21,811 | public static String createPeriodString ( final ReadablePeriod period ) { String periodStr = "null" ; if ( period == null ) { return periodStr ; } if ( period . get ( DurationFieldType . years ( ) ) > 0 ) { final int years = period . get ( DurationFieldType . years ( ) ) ; periodStr = years + "y" ; } else if ( period . get ( DurationFieldType . months ( ) ) > 0 ) { final int months = period . get ( DurationFieldType . months ( ) ) ; periodStr = months + "M" ; } else if ( period . get ( DurationFieldType . weeks ( ) ) > 0 ) { final int weeks = period . get ( DurationFieldType . weeks ( ) ) ; periodStr = weeks + "w" ; } else if ( period . get ( DurationFieldType . days ( ) ) > 0 ) { final int days = period . get ( DurationFieldType . days ( ) ) ; periodStr = days + "d" ; } else if ( period . get ( DurationFieldType . hours ( ) ) > 0 ) { final int hours = period . get ( DurationFieldType . hours ( ) ) ; periodStr = hours + "h" ; } else if ( period . get ( DurationFieldType . minutes ( ) ) > 0 ) { final int minutes = period . get ( DurationFieldType . minutes ( ) ) ; periodStr = minutes + "m" ; } else if ( period . get ( DurationFieldType . seconds ( ) ) > 0 ) { final int seconds = period . get ( DurationFieldType . seconds ( ) ) ; periodStr = seconds + "s" ; } return periodStr ; } | Convert ReadablePeriod Object to string |
21,812 | private void loadPluginJobTypes ( final JobTypePluginSet plugins ) throws JobTypeManagerException { final File jobPluginsDir = new File ( this . jobTypePluginDir ) ; if ( ! jobPluginsDir . exists ( ) ) { logger . error ( "Job type plugin dir " + this . jobTypePluginDir + " doesn't exist. Will not load any external plugins." ) ; return ; } else if ( ! jobPluginsDir . isDirectory ( ) ) { throw new JobTypeManagerException ( "Job type plugin dir " + this . jobTypePluginDir + " is not a directory!" ) ; } else if ( ! jobPluginsDir . canRead ( ) ) { throw new JobTypeManagerException ( "Job type plugin dir " + this . jobTypePluginDir + " is not readable!" ) ; } Props commonPluginJobProps = null ; final File commonJobPropsFile = new File ( jobPluginsDir , COMMONCONFFILE ) ; if ( commonJobPropsFile . exists ( ) ) { logger . info ( "Common plugin job props file " + commonJobPropsFile + " found. Attempt to load." ) ; try { commonPluginJobProps = new Props ( this . globalProperties , commonJobPropsFile ) ; } catch ( final IOException e ) { throw new JobTypeManagerException ( "Failed to load common plugin job properties" + e . getCause ( ) ) ; } } else { logger . info ( "Common plugin job props file " + commonJobPropsFile + " not found. Using only globals props" ) ; commonPluginJobProps = new Props ( this . globalProperties ) ; } Props commonPluginLoadProps = null ; final File commonLoadPropsFile = new File ( jobPluginsDir , COMMONSYSCONFFILE ) ; if ( commonLoadPropsFile . exists ( ) ) { logger . info ( "Common plugin load props file " + commonLoadPropsFile + " found. Attempt to load." ) ; try { commonPluginLoadProps = new Props ( null , commonLoadPropsFile ) ; } catch ( final IOException e ) { throw new JobTypeManagerException ( "Failed to load common plugin loader properties" + e . getCause ( ) ) ; } } else { logger . info ( "Common plugin load props file " + commonLoadPropsFile + " not found. Using empty props." ) ; commonPluginLoadProps = new Props ( ) ; } plugins . setCommonPluginJobProps ( commonPluginJobProps ) ; plugins . setCommonPluginLoadProps ( commonPluginLoadProps ) ; for ( final File dir : jobPluginsDir . listFiles ( ) ) { if ( dir . isDirectory ( ) && dir . canRead ( ) ) { try { loadJobTypes ( dir , plugins ) ; } catch ( final Exception e ) { logger . error ( "Failed to load jobtype " + dir . getName ( ) + e . getMessage ( ) , e ) ; throw new JobTypeManagerException ( e ) ; } } } } | load Job Types from jobtype plugin dir |
21,813 | public List < ExecutableFlow > fetchRecentlyFinishedFlows ( final Duration maxAge ) throws ExecutorManagerException { return this . executionFlowDao . fetchRecentlyFinishedFlows ( maxAge ) ; } | maxAge indicates how long finished flows are shown in Recently Finished flow page . |
21,814 | private boolean isReady ( ) { if ( this . status != Status . READY ) { return false ; } for ( final Node parent : this . parents ) { if ( ! parent . status . isSuccessEffectively ( ) ) { return false ; } } return true ; } | Checks if the node is ready to run . |
21,815 | void markSuccess ( ) { assertRunningOrKilling ( ) ; changeStatus ( Status . SUCCESS ) ; for ( final Node child : this . children ) { child . runIfAllowed ( ) ; } this . dag . updateDagStatus ( ) ; } | Transitions the node to the success state . |
21,816 | void markFailed ( ) { assertRunningOrKilling ( ) ; changeStatus ( Status . FAILURE ) ; for ( final Node child : this . children ) { child . cancel ( ) ; } this . dag . updateDagStatus ( ) ; } | Transitions the node to the failure state . |
21,817 | void kill ( ) { assert ( this . dag . getStatus ( ) == Status . KILLING ) ; if ( this . status == Status . READY || this . status == Status . BLOCKED ) { changeStatus ( Status . CANCELED ) ; } else if ( this . status == Status . RUNNING ) { changeStatus ( Status . KILLING ) ; } } | Kills a node . |
21,818 | public static File getConfFile ( Props props , String workingDir , String fileName ) { File jobDir = new File ( workingDir , getDirName ( props ) ) ; if ( ! jobDir . exists ( ) ) { jobDir . mkdir ( ) ; } return new File ( jobDir , fileName ) ; } | Resolve the location of the file containing the configuration file . |
21,819 | public static String getDirName ( Props props ) { String dirSuffix = props . get ( CommonJobProperties . NESTED_FLOW_PATH ) ; if ( ( dirSuffix == null ) || ( dirSuffix . length ( ) == 0 ) ) { dirSuffix = props . get ( CommonJobProperties . JOB_ID ) ; if ( ( dirSuffix == null ) || ( dirSuffix . length ( ) == 0 ) ) { throw new RuntimeException ( "azkaban.flow.nested.path and azkaban.job.id were not set" ) ; } } return "_resources_" + dirSuffix . replace ( ':' , '_' ) ; } | For classpath reasons we ll put each link file in a separate directory . This must be called only after the job id has been inserted by the job . |
21,820 | public static String getPath ( Props props , String workingDir ) { return new File ( workingDir , getDirName ( props ) ) . toString ( ) ; } | Gets the path to the directory in which the generated links and Hadoop conf properties files are written . |
21,821 | public static void loadProp ( Props props , Configuration conf , String name ) { String prop = props . get ( name ) ; if ( prop != null ) { conf . set ( name , prop ) ; } } | Loads an Azkaban property into the Hadoop configuration . |
21,822 | private void refreshExecutors ( ) { final List < Pair < Executor , Future < ExecutorInfo > > > futures = new ArrayList < > ( ) ; for ( final Executor executor : this . activeExecutors . getAll ( ) ) { final Future < ExecutorInfo > fetchExecutionInfo = this . executorInfoRefresherService . submit ( ( ) -> this . apiGateway . callForJsonType ( executor . getHost ( ) , executor . getPort ( ) , "/serverStatistics" , null , ExecutorInfo . class ) ) ; futures . add ( new Pair < > ( executor , fetchExecutionInfo ) ) ; } boolean wasSuccess = true ; for ( final Pair < Executor , Future < ExecutorInfo > > refreshPair : futures ) { final Executor executor = refreshPair . getFirst ( ) ; executor . setExecutorInfo ( null ) ; try { final ExecutorInfo executorInfo = refreshPair . getSecond ( ) . get ( 5 , TimeUnit . SECONDS ) ; executor . setExecutorInfo ( executorInfo ) ; logger . info ( String . format ( "Successfully refreshed executor: %s with executor info : %s" , executor , executorInfo ) ) ; } catch ( final TimeoutException e ) { wasSuccess = false ; logger . error ( "Timed out while waiting for ExecutorInfo refresh" + executor , e ) ; } catch ( final Exception e ) { wasSuccess = false ; logger . error ( "Failed to update ExecutorInfo for executor : " + executor , e ) ; } if ( wasSuccess ) { this . lastSuccessfulExecutorInfoRefresh = System . currentTimeMillis ( ) ; } } } | Refresh Executor stats for all the actie executors in this executorManager |
21,823 | public String getQueuedFlowIds ( ) { final List < Integer > allIds = new ArrayList < > ( ) ; getRunningFlowsIdsHelper ( allIds , this . queuedFlows . getAllEntries ( ) ) ; Collections . sort ( allIds ) ; return allIds . toString ( ) ; } | Get execution Ids of all non - dispatched flows |
21,824 | private void dispatch ( final ExecutionReference reference , final ExecutableFlow exflow , final Executor choosenExecutor ) throws ExecutorManagerException { exflow . setUpdateTime ( System . currentTimeMillis ( ) ) ; this . executorLoader . assignExecutor ( choosenExecutor . getId ( ) , exflow . getExecutionId ( ) ) ; try { this . apiGateway . callWithExecutable ( exflow , choosenExecutor , ConnectorParams . EXECUTE_ACTION ) ; } catch ( final ExecutorManagerException ex ) { logger . error ( "Rolling back executor assignment for execution id:" + exflow . getExecutionId ( ) , ex ) ; this . executorLoader . unassignExecutor ( exflow . getExecutionId ( ) ) ; throw new ExecutorManagerException ( ex ) ; } reference . setExecutor ( choosenExecutor ) ; this . runningExecutions . get ( ) . put ( exflow . getExecutionId ( ) , new Pair < > ( reference , exflow ) ) ; synchronized ( this . runningExecutions . get ( ) ) { this . runningExecutions . get ( ) . notifyAll ( ) ; } synchronized ( this ) { this . notifyAll ( ) ; } logger . info ( String . format ( "Successfully dispatched exec %d with error count %d" , exflow . getExecutionId ( ) , reference . getNumErrors ( ) ) ) ; } | Calls executor to dispatch the flow update db to assign the executor and in - memory state of executableFlow . |
21,825 | public void unsetExecutorIdForExecution ( final int executionId ) throws ExecutorManagerException { final String UNSET_EXECUTOR = "UPDATE execution_flows SET executor_id = null where exec_id = ?" ; final SQLTransaction < Integer > unsetExecutor = transOperator -> transOperator . update ( UNSET_EXECUTOR , executionId ) ; try { this . dbOperator . transaction ( unsetExecutor ) ; } catch ( final SQLException e ) { throw new ExecutorManagerException ( "Error unsetting executor id for execution " + executionId , e ) ; } } | set executor id to null for the execution id |
21,826 | public void run ( ) throws IOException { if ( this . isStarted ( ) || this . isComplete ( ) ) { throw new IllegalStateException ( "The process can only be used once." ) ; } final ProcessBuilder builder = new ProcessBuilder ( this . cmd ) ; builder . directory ( new File ( this . workingDir ) ) ; builder . environment ( ) . putAll ( this . env ) ; builder . redirectErrorStream ( true ) ; this . process = builder . start ( ) ; try { this . processId = processId ( this . process ) ; if ( this . processId == 0 ) { this . logger . debug ( "Spawned thread with unknown process id" ) ; } else { this . logger . debug ( "Spawned thread with process id " + this . processId ) ; } this . startupLatch . countDown ( ) ; final LogGobbler outputGobbler = new LogGobbler ( new InputStreamReader ( this . process . getInputStream ( ) , StandardCharsets . UTF_8 ) , this . logger , Level . INFO , 30 ) ; final LogGobbler errorGobbler = new LogGobbler ( new InputStreamReader ( this . process . getErrorStream ( ) , StandardCharsets . UTF_8 ) , this . logger , Level . ERROR , 30 ) ; outputGobbler . start ( ) ; errorGobbler . start ( ) ; int exitCode = - 1 ; try { exitCode = this . process . waitFor ( ) ; } catch ( final InterruptedException e ) { this . logger . info ( "Process interrupted. Exit code is " + exitCode , e ) ; } this . completeLatch . countDown ( ) ; outputGobbler . awaitCompletion ( 5000 ) ; errorGobbler . awaitCompletion ( 5000 ) ; if ( exitCode != 0 ) { throw new ProcessFailureException ( exitCode ) ; } } finally { IOUtils . closeQuietly ( this . process . getInputStream ( ) ) ; IOUtils . closeQuietly ( this . process . getOutputStream ( ) ) ; IOUtils . closeQuietly ( this . process . getErrorStream ( ) ) ; } } | Execute this process blocking until it has completed . |
21,827 | public boolean softKill ( final long time , final TimeUnit unit ) throws InterruptedException { checkStarted ( ) ; if ( this . processId != 0 && isStarted ( ) ) { try { if ( this . isExecuteAsUser ) { final String cmd = String . format ( "%s %s %s %d" , this . executeAsUserBinary , this . effectiveUser , KILL_COMMAND , this . processId ) ; Runtime . getRuntime ( ) . exec ( cmd ) ; } else { final String cmd = String . format ( "%s %d" , KILL_COMMAND , this . processId ) ; Runtime . getRuntime ( ) . exec ( cmd ) ; } return this . completeLatch . await ( time , unit ) ; } catch ( final IOException e ) { this . logger . error ( "Kill attempt failed." , e ) ; } return false ; } return false ; } | Attempt to kill the process waiting up to the given time for it to die |
21,828 | public void hardKill ( ) { checkStarted ( ) ; if ( isRunning ( ) ) { if ( this . processId != 0 ) { try { if ( this . isExecuteAsUser ) { final String cmd = String . format ( "%s %s %s -9 %d" , this . executeAsUserBinary , this . effectiveUser , KILL_COMMAND , this . processId ) ; Runtime . getRuntime ( ) . exec ( cmd ) ; } else { final String cmd = String . format ( "%s -9 %d" , KILL_COMMAND , this . processId ) ; Runtime . getRuntime ( ) . exec ( cmd ) ; } } catch ( final IOException e ) { this . logger . error ( "Kill attempt failed." , e ) ; } } this . process . destroy ( ) ; } } | Force kill this process |
21,829 | private int processId ( final java . lang . Process process ) { int processId = 0 ; try { final Field f = process . getClass ( ) . getDeclaredField ( "pid" ) ; f . setAccessible ( true ) ; processId = f . getInt ( process ) ; } catch ( final Throwable e ) { e . printStackTrace ( ) ; } return processId ; } | Attempt to get the process id for this process |
21,830 | public void render ( ) { try { this . response . setHeader ( "Content-type" , "text/html; charset=UTF-8" ) ; this . response . setCharacterEncoding ( "UTF-8" ) ; this . response . setContentType ( this . mimeType ) ; this . engine . mergeTemplate ( this . template , "UTF-8" , this . context , this . response . getWriter ( ) ) ; } catch ( final Exception e ) { throw new PageRenderException ( e ) ; } } | Renders the page in UTF - 8 |
21,831 | private void validateDepNameUniqueness ( final List < TriggerDependencyBean > dependencies ) { final Set < String > seen = new HashSet < > ( ) ; for ( final TriggerDependencyBean dep : dependencies ) { Preconditions . checkArgument ( seen . add ( dep . getName ( ) ) , String . format ( "duplicate dependency" + ".name %s found, dependency.name should be unique" , dep . getName ( ) ) ) ; } } | check uniqueness of dependency . name |
21,832 | private void validateNameAndTypeArePresent ( final List < TriggerDependencyBean > dependencies ) { for ( final TriggerDependencyBean dep : dependencies ) { Preconditions . checkNotNull ( dep . getName ( ) , "dependency name is required" ) ; Preconditions . checkNotNull ( dep . getType ( ) , "dependency type is required for " + dep . getName ( ) ) ; } } | validate name and type are present |
21,833 | public static void main ( final String [ ] args ) throws ParseException { final CommandLineParser parser = new DefaultParser ( ) ; if ( parser . parse ( createHelpOptions ( ) , args , true ) . hasOption ( HELP_KEY ) ) { new HelpFormatter ( ) . printHelp ( EncryptionCLI . class . getSimpleName ( ) , createOptions ( ) , true ) ; return ; } final CommandLine line = parser . parse ( createOptions ( ) , args ) ; final String passphraseKey = line . getOptionValue ( PASSPHRASE_KEY ) ; final String plainText = line . getOptionValue ( PLAINTEXT_KEY ) ; final String version = line . getOptionValue ( VERSION_KEY ) ; final ICrypto crypto = new Crypto ( ) ; final String cipheredText = crypto . encrypt ( plainText , passphraseKey , Version . fromVerString ( version ) ) ; System . out . println ( cipheredText ) ; } | Outputs ciphered text to STDOUT . |
21,834 | protected void registerFactorFilter ( final FactorFilter < T , V > filter ) { if ( null == filter ) { throw new IllegalArgumentException ( "unable to register factor filter. " + "The passed comaractor is null or has an invalid weight value." ) ; } this . factorFilterList . put ( filter . getFactorName ( ) , filter ) ; logger . debug ( String . format ( "Factor filter added for '%s'." , filter . getFactorName ( ) ) ) ; } | function to register a factorFilter to the internal Map for future reference . |
21,835 | public boolean filterTarget ( final T filteringTarget , final V referencingObject ) { logger . debug ( String . format ( "start filtering '%s' with factor filter for '%s'" , filteringTarget == null ? "(null)" : filteringTarget . toString ( ) , this . getName ( ) ) ) ; final Collection < FactorFilter < T , V > > filterList = this . factorFilterList . values ( ) ; boolean result = true ; for ( final FactorFilter < T , V > filter : filterList ) { result &= filter . filterTarget ( filteringTarget , referencingObject ) ; logger . debug ( String . format ( "[Factor: %s] filter result : %s " , filter . getFactorName ( ) , result ) ) ; if ( ! result ) { break ; } } logger . debug ( String . format ( "Final filtering result : %s " , result ) ) ; return result ; } | function to analyze the target item according to the reference object to decide whether the item should be filtered . |
21,836 | public static int getMaxConcurrentRunsForFlow ( String projectName , String flowName , int defaultMaxConcurrentRuns , Map < Pair < String , String > , Integer > maxConcurrentRunsFlowMap ) { return maxConcurrentRunsFlowMap . getOrDefault ( new Pair ( projectName , flowName ) , defaultMaxConcurrentRuns ) ; } | Get the maximum number of concurrent runs for the specified flow using the value in azkaban . concurrent . runs . oneflow . whitelist if explictly specified for the flow and otherwise azkaban . max . concurrent . runs . oneflow or the default . |
21,837 | public static String [ ] partitionCommandLine ( final String command ) { final ArrayList < String > commands = new ArrayList < > ( ) ; int index = 0 ; StringBuffer buffer = new StringBuffer ( command . length ( ) ) ; boolean isApos = false ; boolean isQuote = false ; while ( index < command . length ( ) ) { final char c = command . charAt ( index ) ; switch ( c ) { case ' ' : if ( ! isQuote && ! isApos ) { final String arg = buffer . toString ( ) ; buffer = new StringBuffer ( command . length ( ) - index ) ; if ( arg . length ( ) > 0 ) { commands . add ( arg ) ; } } else { buffer . append ( c ) ; } break ; case '\'' : if ( ! isQuote ) { isApos = ! isApos ; } else { buffer . append ( c ) ; } break ; case '"' : if ( ! isApos ) { isQuote = ! isQuote ; } else { buffer . append ( c ) ; } break ; default : buffer . append ( c ) ; } index ++ ; } if ( buffer . length ( ) > 0 ) { final String arg = buffer . toString ( ) ; commands . add ( arg ) ; } return commands . toArray ( new String [ commands . size ( ) ] ) ; } | Splits the command into a unix like command line structure . Quotes and single quotes are treated as nested strings . |
21,838 | public static void applyDisabledJobs ( final List < DisabledJob > disabledJobs , final ExecutableFlowBase exflow ) { for ( final DisabledJob disabled : disabledJobs ) { if ( disabled . isEmbeddedFlow ( ) ) { final ExecutableNode node = exflow . getExecutableNode ( disabled . getName ( ) ) ; if ( node != null && node instanceof ExecutableFlowBase ) { applyDisabledJobs ( disabled . getChildren ( ) , ( ExecutableFlowBase ) node ) ; } } else { final ExecutableNode node = exflow . getExecutableNode ( disabled . getName ( ) ) ; if ( node != null ) { node . setStatus ( Status . DISABLED ) ; } } } } | Change job status to disabled in exflow if the job is in disabledJobs |
21,839 | public void closeMBeans ( ) { try { for ( final ObjectName name : registeredMBeans ) { getMbeanServer ( ) . unregisterMBean ( name ) ; logger . info ( "Jmx MBean " + name . getCanonicalName ( ) + " unregistered." ) ; } } catch ( final Exception e ) { logger . error ( "Failed to cleanup MBeanServer" , e ) ; } } | Close all registered MBeans |
21,840 | public Object getMBeanAttribute ( final ObjectName name , final String attribute ) { try { return getMbeanServer ( ) . getAttribute ( name , attribute ) ; } catch ( final Exception e ) { logger . error ( "Retrieve MBeanServer attribute Failure. " + "ObjectName = " + name . toString ( ) + ", " + "attribute = " + attribute , e ) ; return null ; } } | Get MBean Attribute |
21,841 | public Map < String , Object > getMBeanResult ( final String mbeanName ) { final Map < String , Object > ret = new HashMap < > ( ) ; try { final ObjectName name = new ObjectName ( mbeanName ) ; final MBeanInfo info = getMBeanInfo ( name ) ; final MBeanAttributeInfo [ ] mbeanAttrs = info . getAttributes ( ) ; final Map < String , Object > attributes = new TreeMap < > ( ) ; for ( final MBeanAttributeInfo attrInfo : mbeanAttrs ) { final Object obj = getMBeanAttribute ( name , attrInfo . getName ( ) ) ; attributes . put ( attrInfo . getName ( ) , obj ) ; } ret . put ( "attributes" , attributes ) ; } catch ( final Exception e ) { logger . error ( "Invalid MBean Name. name = " + mbeanName , e ) ; ret . put ( "error" , "'" + mbeanName + "' is not a valid mBean name" ) ; } return ret ; } | Get MBean Result |
21,842 | public Object eval ( ) { logger . info ( "Checking sla for execution " + this . execId ) ; final ExecutableFlow flow ; try { flow = this . executorLoader . fetchExecutableFlow ( this . execId ) ; } catch ( final ExecutorManagerException e ) { logger . error ( "Can't get executable flow." , e ) ; e . printStackTrace ( ) ; return true ; } return isSlaMissed ( flow ) ; } | return true to trigger sla action |
21,843 | private void handleChangeManagerStatusRequest ( final HttpServletRequest req , final Map < String , Object > ret , final boolean enableMetricManager ) { try { logger . info ( "Updating metric manager status" ) ; if ( ( enableMetricManager && MetricReportManager . isInstantiated ( ) ) || MetricReportManager . isAvailable ( ) ) { final MetricReportManager metricManager = MetricReportManager . getInstance ( ) ; if ( enableMetricManager ) { metricManager . enableManager ( ) ; } else { metricManager . disableManager ( ) ; } ret . put ( STATUS_PARAM , RESPONSE_SUCCESS ) ; } else { ret . put ( RESPONSE_ERROR , "MetricManager is not available" ) ; } } catch ( final Exception e ) { logger . error ( e ) ; ret . put ( RESPONSE_ERROR , e . getMessage ( ) ) ; } } | enable or disable metric Manager A disable will also purge all data from all metric emitters |
21,844 | private void handleChangeCleaningInterval ( final HttpServletRequest req , final Map < String , Object > ret ) { try { final long newInterval = getLongParam ( req , STATS_MAP_CLEANINGINTERVAL ) ; if ( MetricReportManager . isAvailable ( ) ) { final MetricReportManager metricManager = MetricReportManager . getInstance ( ) ; final InMemoryMetricEmitter memoryEmitter = extractInMemoryMetricEmitter ( metricManager ) ; memoryEmitter . setReportingInterval ( newInterval ) ; ret . put ( STATUS_PARAM , RESPONSE_SUCCESS ) ; } else { ret . put ( RESPONSE_ERROR , "MetricManager is not available" ) ; } } catch ( final Exception e ) { logger . error ( e ) ; ret . put ( RESPONSE_ERROR , e . getMessage ( ) ) ; } } | Update InMemoryMetricEmitter interval to maintain metric snapshots |
21,845 | private InMemoryMetricEmitter extractInMemoryMetricEmitter ( final MetricReportManager metricManager ) { InMemoryMetricEmitter memoryEmitter = null ; for ( final IMetricEmitter emitter : metricManager . getMetricEmitters ( ) ) { if ( emitter instanceof InMemoryMetricEmitter ) { memoryEmitter = ( InMemoryMetricEmitter ) emitter ; break ; } } return memoryEmitter ; } | Get InMemoryMetricEmitter if available else null |
21,846 | private void handleGetAllMMetricsName ( final HttpServletRequest req , final Map < String , Object > ret ) { if ( MetricReportManager . isAvailable ( ) ) { final MetricReportManager metricManager = MetricReportManager . getInstance ( ) ; final List < IMetric < ? > > result = metricManager . getAllMetrics ( ) ; if ( result . size ( ) == 0 ) { ret . put ( RESPONSE_ERROR , "No Metric being tracked" ) ; } else { final List < String > metricNames = new LinkedList < > ( ) ; for ( final IMetric < ? > metric : result ) { metricNames . add ( metric . getName ( ) ) ; } ret . put ( "data" , metricNames ) ; } } else { ret . put ( RESPONSE_ERROR , "MetricReportManager is not available" ) ; } } | Get all the metrics tracked by metric manager |
21,847 | private void handleChangeMetricInterval ( final HttpServletRequest req , final Map < String , Object > ret ) throws ServletException { try { final String metricName = getParam ( req , STATS_MAP_METRICNAMEPARAM ) ; final long newInterval = getLongParam ( req , STATS_MAP_REPORTINGINTERVAL ) ; if ( MetricReportManager . isAvailable ( ) ) { final MetricReportManager metricManager = MetricReportManager . getInstance ( ) ; final TimeBasedReportingMetric < ? > metric = ( TimeBasedReportingMetric < ? > ) metricManager . getMetricFromName ( metricName ) ; metric . updateInterval ( newInterval ) ; ret . put ( STATUS_PARAM , RESPONSE_SUCCESS ) ; } else { ret . put ( RESPONSE_ERROR , "MetricManager is not available" ) ; } } catch ( final Exception e ) { logger . error ( e ) ; ret . put ( RESPONSE_ERROR , e . getMessage ( ) ) ; } } | Update tracking interval for a given metrics |
21,848 | private static Props loadConfigurationFromAzkabanHome ( ) { final String azkabanHome = System . getenv ( "AZKABAN_HOME" ) ; if ( azkabanHome == null ) { logger . error ( "AZKABAN_HOME not set. Will try default." ) ; return null ; } if ( ! new File ( azkabanHome ) . isDirectory ( ) || ! new File ( azkabanHome ) . canRead ( ) ) { logger . error ( azkabanHome + " is not a readable directory." ) ; return null ; } final File confPath = new File ( azkabanHome , Constants . DEFAULT_CONF_PATH ) ; if ( ! confPath . exists ( ) || ! confPath . isDirectory ( ) || ! confPath . canRead ( ) ) { logger . error ( azkabanHome + " does not contain a readable conf directory." ) ; return null ; } return loadAzkabanConfigurationFromDirectory ( confPath ) ; } | Loads the Azkaban property file from the AZKABAN_HOME conf directory |
21,849 | public boolean closeClassLoader ( final ClassLoader cl ) throws ValidatorManagerException { boolean res = false ; if ( cl == null ) { return res ; } final Class classURLClassLoader = URLClassLoader . class ; Field f = null ; try { f = classURLClassLoader . getDeclaredField ( "ucp" ) ; } catch ( final NoSuchFieldException e ) { throw new ValidatorManagerException ( e ) ; } if ( f != null ) { f . setAccessible ( true ) ; Object obj = null ; try { obj = f . get ( cl ) ; } catch ( final IllegalAccessException e ) { throw new ValidatorManagerException ( e ) ; } if ( obj != null ) { final Object ucp = obj ; f = null ; try { f = ucp . getClass ( ) . getDeclaredField ( "loaders" ) ; } catch ( final NoSuchFieldException e ) { throw new ValidatorManagerException ( e ) ; } if ( f != null ) { f . setAccessible ( true ) ; ArrayList loaders = null ; try { loaders = ( ArrayList ) f . get ( ucp ) ; res = true ; } catch ( final IllegalAccessException e ) { throw new ValidatorManagerException ( e ) ; } for ( int i = 0 ; loaders != null && i < loaders . size ( ) ; i ++ ) { obj = loaders . get ( i ) ; f = null ; try { f = obj . getClass ( ) . getDeclaredField ( "jar" ) ; } catch ( final NoSuchFieldException e ) { throw new ValidatorManagerException ( e ) ; } if ( f != null ) { f . setAccessible ( true ) ; try { obj = f . get ( obj ) ; } catch ( final IllegalAccessException e ) { throw new ValidatorManagerException ( e ) ; } if ( obj instanceof JarFile ) { final JarFile jarFile = ( JarFile ) obj ; this . setJarFileNames2Close . add ( jarFile . getName ( ) ) ; try { jarFile . close ( ) ; } catch ( final IOException e ) { throw new ValidatorManagerException ( e ) ; } } } } } } } return res ; } | close jar files of cl |
21,850 | public boolean finalizeNativeLibs ( final ClassLoader cl ) throws ValidatorManagerException { boolean res = false ; final Class classClassLoader = ClassLoader . class ; java . lang . reflect . Field nativeLibraries = null ; try { nativeLibraries = classClassLoader . getDeclaredField ( "nativeLibraries" ) ; } catch ( final NoSuchFieldException e ) { throw new ValidatorManagerException ( e ) ; } if ( nativeLibraries == null ) { return res ; } nativeLibraries . setAccessible ( true ) ; Object obj = null ; try { obj = nativeLibraries . get ( cl ) ; } catch ( final IllegalAccessException e ) { throw new ValidatorManagerException ( e ) ; } if ( ! ( obj instanceof Vector ) ) { return res ; } res = true ; final Vector java_lang_ClassLoader_NativeLibrary = ( Vector ) obj ; for ( final Object lib : java_lang_ClassLoader_NativeLibrary ) { java . lang . reflect . Method finalize = null ; try { finalize = lib . getClass ( ) . getDeclaredMethod ( "finalize" , new Class [ 0 ] ) ; } catch ( final NoSuchMethodException e ) { throw new ValidatorManagerException ( e ) ; } if ( finalize != null ) { finalize . setAccessible ( true ) ; try { finalize . invoke ( lib , new Object [ 0 ] ) ; } catch ( final IllegalAccessException e ) { throw new ValidatorManagerException ( e ) ; } catch ( final InvocationTargetException e ) { throw new ValidatorManagerException ( e ) ; } } } return res ; } | finalize native libraries |
21,851 | public void sendEmail ( final List < String > emailList , final String subject , final String body ) { if ( emailList != null && ! emailList . isEmpty ( ) ) { final EmailMessage message = super . createEmailMessage ( subject , "text/html" , emailList ) ; message . setBody ( body ) ; sendEmail ( message , true , "email message " + body ) ; } } | Send an email to the specified email list |
21,852 | private void sendFailedUpdateEmail ( final Executor executor , final ExecutorManagerException exception , final MailCreator mailCreator , final ImmutableList < ExecutableFlow > flows ) { final EmailMessage message = this . messageCreator . createMessage ( ) ; final boolean mailCreated = mailCreator . createFailedUpdateMessage ( flows , executor , exception , message , this . azkabanName , this . scheme , this . clientHostname , this . clientPortNumber ) ; final List < Integer > executionIds = Lists . transform ( flows , ExecutableFlow :: getExecutionId ) ; sendEmail ( message , mailCreated , "failed update email message for executions " + executionIds ) ; } | Sends a single email about failed updates . |
21,853 | private void waitUntilFlowPreparationFinish ( ) throws InterruptedException { final Duration SLEEP_INTERVAL = Duration . ofSeconds ( 5 ) ; while ( this . preparingFlowCount . intValue ( ) != 0 ) { logger . info ( this . preparingFlowCount + " flow(s) is/are still being setup before complete " + "deactivation." ) ; Thread . sleep ( SLEEP_INTERVAL . toMillis ( ) ) ; } } | Wait until ongoing flow preparation work finishes . |
21,854 | private void configureFlowLevelMetrics ( final FlowRunner flowRunner ) { logger . info ( "Configuring Azkaban metrics tracking for flow runner object" ) ; if ( MetricReportManager . isAvailable ( ) ) { final MetricReportManager metricManager = MetricReportManager . getInstance ( ) ; flowRunner . addListener ( ( NumFailedFlowMetric ) metricManager . getMetricFromName ( NumFailedFlowMetric . NUM_FAILED_FLOW_METRIC_NAME ) ) ; } } | Configure Azkaban metrics tracking for a new flowRunner instance |
21,855 | public void shutdown ( ) { logger . warn ( "Shutting down FlowRunnerManager..." ) ; if ( this . azkabanProps . getBoolean ( ConfigurationKeys . AZKABAN_POLL_MODEL , false ) ) { this . pollingService . shutdown ( ) ; } this . executorService . shutdown ( ) ; boolean result = false ; while ( ! result ) { logger . info ( "Awaiting Shutdown. # of executing flows: " + getNumRunningFlows ( ) ) ; try { result = this . executorService . awaitTermination ( 1 , TimeUnit . MINUTES ) ; } catch ( final InterruptedException e ) { logger . error ( e ) ; } } logger . warn ( "Shutdown FlowRunnerManager complete." ) ; } | This shuts down the flow runner . The call is blocking and awaits execution of all jobs . |
21,856 | public void deleteExecutionDirectory ( ) { logger . warn ( "Deleting execution dir: " + this . executionDirectory . getAbsolutePath ( ) ) ; try { FileUtils . deleteDirectory ( this . executionDirectory ) ; } catch ( final IOException e ) { logger . error ( e ) ; } } | Deleting old execution directory to free disk space . |
21,857 | public static Props loadPluginProps ( final File pluginDir ) { if ( ! pluginDir . exists ( ) ) { logger . error ( "Error! Plugin path " + pluginDir . getPath ( ) + " doesn't exist." ) ; return null ; } if ( ! pluginDir . isDirectory ( ) ) { logger . error ( "The plugin path " + pluginDir + " is not a directory." ) ; return null ; } final File propertiesDir = new File ( pluginDir , "conf" ) ; if ( propertiesDir . exists ( ) && propertiesDir . isDirectory ( ) ) { final File propertiesFile = new File ( propertiesDir , "plugin.properties" ) ; final File propertiesOverrideFile = new File ( propertiesDir , "override.properties" ) ; if ( propertiesFile . exists ( ) ) { if ( propertiesOverrideFile . exists ( ) ) { return loadProps ( null , propertiesFile , propertiesOverrideFile ) ; } else { return loadProps ( null , propertiesFile ) ; } } else { logger . error ( "Plugin conf file " + propertiesFile + " not found." ) ; return null ; } } else { logger . error ( "Plugin conf path " + propertiesDir + " not found." ) ; return null ; } } | Load plugin properties |
21,858 | public static void loadPropsBySuffix ( final File jobPath , final Props props , final String ... suffixes ) { try { if ( jobPath . isDirectory ( ) ) { final File [ ] files = jobPath . listFiles ( ) ; if ( files != null ) { for ( final File file : files ) { loadPropsBySuffix ( file , props , suffixes ) ; } } } else if ( endsWith ( jobPath , suffixes ) ) { props . putAll ( new Props ( null , jobPath . getAbsolutePath ( ) ) ) ; } } catch ( final IOException e ) { throw new RuntimeException ( "Error loading schedule properties." , e ) ; } } | Load properties from the given path |
21,859 | public static boolean isVariableReplacementPattern ( final String value ) { final Matcher matcher = VARIABLE_REPLACEMENT_PATTERN . matcher ( value ) ; return matcher . matches ( ) ; } | Check if the prop value is a variable replacement pattern |
21,860 | private static String resolveVariableExpression ( final String value , final int last , final JexlEngine jexl ) { final int lastIndex = value . lastIndexOf ( "$(" , last ) ; if ( lastIndex == - 1 ) { return value ; } int bracketCount = 0 ; int nextClosed = lastIndex + 2 ; for ( ; nextClosed < value . length ( ) ; ++ nextClosed ) { if ( value . charAt ( nextClosed ) == '(' ) { bracketCount ++ ; } else if ( value . charAt ( nextClosed ) == ')' ) { bracketCount -- ; if ( bracketCount == - 1 ) { break ; } } } if ( nextClosed == value . length ( ) ) { throw new IllegalArgumentException ( "Expression " + value + " not well formed." ) ; } final String innerExpression = value . substring ( lastIndex + 2 , nextClosed ) ; Object result = null ; try { final Expression e = jexl . createExpression ( innerExpression ) ; result = e . evaluate ( new MapContext ( ) ) ; } catch ( final JexlException e ) { throw new IllegalArgumentException ( "Expression " + value + " not well formed. " + e . getMessage ( ) , e ) ; } if ( result == null ) { return value ; } final String newValue = value . substring ( 0 , lastIndex ) + result . toString ( ) + value . substring ( nextClosed + 1 ) ; return resolveVariableExpression ( newValue , lastIndex , jexl ) ; } | Function that looks for expressions to parse . It parses backwards to capture embedded expressions |
21,861 | public static String toJSONString ( final Props props , final boolean localOnly ) { final Map < String , String > map = toStringMap ( props , localOnly ) ; return JSONUtils . toJSON ( map ) ; } | Convert props to json string |
21,862 | public static Map < String , String > toStringMap ( final Props props , final boolean localOnly ) { final HashMap < String , String > map = new HashMap < > ( ) ; final Set < String > keyset = localOnly ? props . localKeySet ( ) : props . getKeySet ( ) ; for ( final String key : keyset ) { final String value = props . get ( key ) ; map . put ( key , value ) ; } return map ; } | Convert props to Map |
21,863 | public static Props fromJSONString ( final String json ) throws IOException { final Map < String , String > obj = ( Map < String , String > ) JSONUtils . parseJSONFromString ( json ) ; final Props props = new Props ( null , obj ) ; return props ; } | Convert json String to Prop Object |
21,864 | public static Props fromHierarchicalMap ( final Map < String , Object > propsMap ) { if ( propsMap == null ) { return null ; } final String source = ( String ) propsMap . get ( "source" ) ; final Map < String , String > propsParams = ( Map < String , String > ) propsMap . get ( "props" ) ; final Map < String , Object > parent = ( Map < String , Object > ) propsMap . get ( "parent" ) ; final Props parentProps = fromHierarchicalMap ( parent ) ; final Props props = new Props ( parentProps , propsParams ) ; props . setSource ( source ) ; return props ; } | Convert a hierarchical Map to Prop Object |
21,865 | public static Map < String , Object > toHierarchicalMap ( final Props props ) { final Map < String , Object > propsMap = new HashMap < > ( ) ; propsMap . put ( "source" , props . getSource ( ) ) ; propsMap . put ( "props" , toStringMap ( props , true ) ) ; if ( props . getParent ( ) != null ) { propsMap . put ( "parent" , toHierarchicalMap ( props . getParent ( ) ) ) ; } return propsMap ; } | Convert a Props object to a hierarchical Map |
21,866 | public static String getPropertyDiff ( Props oldProps , Props newProps ) { final StringBuilder builder = new StringBuilder ( "" ) ; if ( oldProps == null ) { oldProps = new Props ( ) ; } if ( newProps == null ) { newProps = new Props ( ) ; } final MapDifference < String , String > md = Maps . difference ( toStringMap ( oldProps , false ) , toStringMap ( newProps , false ) ) ; final Map < String , String > newlyCreatedProperty = md . entriesOnlyOnRight ( ) ; if ( newlyCreatedProperty != null && newlyCreatedProperty . size ( ) > 0 ) { builder . append ( "Newly created Properties: " ) ; newlyCreatedProperty . forEach ( ( k , v ) -> { builder . append ( "[ " + k + ", " + v + "], " ) ; } ) ; builder . append ( "\n" ) ; } final Map < String , String > deletedProperty = md . entriesOnlyOnLeft ( ) ; if ( deletedProperty != null && deletedProperty . size ( ) > 0 ) { builder . append ( "Deleted Properties: " ) ; deletedProperty . forEach ( ( k , v ) -> { builder . append ( "[ " + k + ", " + v + "], " ) ; } ) ; builder . append ( "\n" ) ; } final Map < String , MapDifference . ValueDifference < String > > diffProperties = md . entriesDiffering ( ) ; if ( diffProperties != null && diffProperties . size ( ) > 0 ) { builder . append ( "Modified Properties: " ) ; diffProperties . forEach ( ( k , v ) -> { builder . append ( "[ " + k + ", " + v . leftValue ( ) + " + v . rightValue ( ) + "], " ) ; } ) ; } return builder . toString ( ) ; } | The difference between old and new Props |
21,867 | private static FactorComparator < Executor > getNumberOfAssignedFlowComparator ( final int weight ) { return FactorComparator . create ( NUMOFASSIGNEDFLOW_COMPARATOR_NAME , weight , new Comparator < Executor > ( ) { public int compare ( final Executor o1 , final Executor o2 ) { final ExecutorInfo stat1 = o1 . getExecutorInfo ( ) ; final ExecutorInfo stat2 = o2 . getExecutorInfo ( ) ; final Integer result = 0 ; if ( statisticsObjectCheck ( stat1 , stat2 , NUMOFASSIGNEDFLOW_COMPARATOR_NAME ) ) { return result ; } return ( ( Integer ) stat1 . getRemainingFlowCapacity ( ) ) . compareTo ( stat2 . getRemainingFlowCapacity ( ) ) ; } } ) ; } | function defines the number of assigned flow comparator . |
21,868 | private static FactorComparator < Executor > getCpuUsageComparator ( final int weight ) { return FactorComparator . create ( CPUUSAGE_COMPARATOR_NAME , weight , new Comparator < Executor > ( ) { public int compare ( final Executor o1 , final Executor o2 ) { final ExecutorInfo stat1 = o1 . getExecutorInfo ( ) ; final ExecutorInfo stat2 = o2 . getExecutorInfo ( ) ; final int result = 0 ; if ( statisticsObjectCheck ( stat1 , stat2 , CPUUSAGE_COMPARATOR_NAME ) ) { return result ; } return ( ( Double ) stat2 . getCpuUsage ( ) ) . compareTo ( stat1 . getCpuUsage ( ) ) ; } } ) ; } | function defines the cpuUsage comparator . |
21,869 | private static FactorComparator < Executor > getLstDispatchedTimeComparator ( final int weight ) { return FactorComparator . create ( LSTDISPATCHED_COMPARATOR_NAME , weight , new Comparator < Executor > ( ) { public int compare ( final Executor o1 , final Executor o2 ) { final ExecutorInfo stat1 = o1 . getExecutorInfo ( ) ; final ExecutorInfo stat2 = o2 . getExecutorInfo ( ) ; final int result = 0 ; if ( statisticsObjectCheck ( stat1 , stat2 , LSTDISPATCHED_COMPARATOR_NAME ) ) { return result ; } return ( ( Long ) stat2 . getLastDispatchedTime ( ) ) . compareTo ( stat1 . getLastDispatchedTime ( ) ) ; } } ) ; } | function defines the last dispatched time comparator . |
21,870 | protected String parseResponse ( final HttpResponse response ) throws HttpResponseException , IOException { final StatusLine statusLine = response . getStatusLine ( ) ; final String responseBody = response . getEntity ( ) != null ? EntityUtils . toString ( response . getEntity ( ) ) : "" ; if ( statusLine . getStatusCode ( ) >= 300 ) { logger . error ( String . format ( "unable to parse response as the response status is %s" , statusLine . getStatusCode ( ) ) ) ; throw new HttpResponseException ( statusLine . getStatusCode ( ) , responseBody ) ; } return responseBody ; } | Implementing the parseResponse function to return de - serialized Json object . |
21,871 | private void prepareTokenFile ( final String user , final Credentials credentials , final File tokenFile , final Logger logger ) throws IOException { writeCredentialsToFile ( credentials , tokenFile , logger ) ; try { assignPermissions ( user , tokenFile , logger ) ; } catch ( final IOException e ) { tokenFile . delete ( ) ; throw e ; } } | Prepare token file . Writes credentials to a token file and sets appropriate permissions to keep the file secure |
21,872 | private void assignPermissions ( final String user , final File tokenFile , final Logger logger ) throws IOException { final List < String > changePermissionsCommand = Arrays . asList ( CHMOD , TOKEN_FILE_PERMISSIONS , tokenFile . getAbsolutePath ( ) ) ; int result = this . executeAsUser . execute ( System . getProperty ( "user.name" ) , changePermissionsCommand ) ; if ( result != 0 ) { throw new IOException ( "Unable to modify permissions. User: " + user ) ; } final List < String > changeOwnershipCommand = Arrays . asList ( CHOWN , user + ":" + GROUP_NAME , tokenFile . getAbsolutePath ( ) ) ; result = this . executeAsUser . execute ( "root" , changeOwnershipCommand ) ; if ( result != 0 ) { throw new IOException ( "Unable to set ownership. User: " + user ) ; } } | Uses execute - as - user binary to reassign file permissions to be readable only by that user . |
21,873 | private IMetaStoreClient createRetryingMetaStoreClient ( final HiveConf hiveConf ) throws MetaException { final HiveMetaHookLoader hookLoader = new HiveMetaHookLoader ( ) { public HiveMetaHook getHook ( final Table tbl ) throws MetaException { if ( tbl == null ) { return null ; } try { final HiveStorageHandler storageHandler = HiveUtils . getStorageHandler ( hiveConf , tbl . getParameters ( ) . get ( META_TABLE_STORAGE ) ) ; return storageHandler == null ? null : storageHandler . getMetaHook ( ) ; } catch ( final HiveException e ) { HadoopSecurityManager_H_2_0 . logger . error ( e . toString ( ) ) ; throw new MetaException ( "Failed to get storage handler: " + e ) ; } } } ; return RetryingMetaStoreClient . getProxy ( hiveConf , hookLoader , HiveMetaStoreClient . class . getName ( ) ) ; } | Method to create a metastore client that retries on failures |
21,874 | Driver provideHiveDriver ( ) { HiveConf hiveConf = provideHiveConf ( ) ; SessionState . start ( hiveConf ) ; return new Driver ( hiveConf ) ; } | Return a Driver that s connected to the real honest - to - goodness Hive |
21,875 | private void shutdown ( final Map < String , Object > respMap ) { try { logger . warn ( "Shutting down executor..." ) ; setActiveInternal ( false ) ; this . application . shutdown ( ) ; respMap . put ( ConnectorParams . STATUS_PARAM , ConnectorParams . RESPONSE_SUCCESS ) ; } catch ( final Exception e ) { logger . error ( e . getMessage ( ) , e ) ; respMap . put ( ConnectorParams . RESPONSE_ERROR , e . getMessage ( ) ) ; } } | Prepare the executor for shutdown . |
21,876 | public long getLastInsertId ( ) throws SQLException { long num = - 1 ; try { num = ( ( Number ) this . queryRunner . query ( this . conn , "SELECT LAST_INSERT_ID();" , new ScalarHandler < > ( 1 ) ) ) . longValue ( ) ; } catch ( final SQLException ex ) { logger . error ( "can not get last insertion ID" ) ; throw ex ; } return num ; } | returns the last id from a previous insert statement . Note that last insert and this operation should use the same connection . |
21,877 | protected void logJobProperties ( ) { if ( this . jobProps != null && this . jobProps . getBoolean ( JOB_DUMP_PROPERTIES_IN_LOG , false ) ) { try { final Map < String , String > flattenedProps = this . jobProps . getFlattened ( ) ; this . info ( "****** Job properties ******" ) ; this . info ( String . format ( "- Note : value is masked if property name ends with '%s'." , SENSITIVE_JOB_PROP_NAME_SUFFIX ) ) ; for ( final Map . Entry < String , String > entry : flattenedProps . entrySet ( ) ) { final String key = entry . getKey ( ) ; final String value = key . endsWith ( SENSITIVE_JOB_PROP_NAME_SUFFIX ) ? SENSITIVE_JOB_PROP_VALUE_PLACEHOLDER : entry . getValue ( ) ; this . info ( String . format ( "%s=%s" , key , value ) ) ; } this . info ( "****** End Job properties ******" ) ; } catch ( final Exception ex ) { this . error ( "failed to log job properties " , ex ) ; } } } | prints the current Job props to the Job log . |
21,878 | public File [ ] initPropsFiles ( ) { final File [ ] files = new File [ 2 ] ; files [ 0 ] = createFlattenedPropsFile ( this . cwd ) ; this . jobProps . put ( ENV_PREFIX + JOB_PROP_ENV , files [ 0 ] . getAbsolutePath ( ) ) ; this . jobProps . put ( ENV_PREFIX + JOB_NAME_ENV , getId ( ) ) ; files [ 1 ] = createOutputPropsFile ( getId ( ) , this . cwd ) ; this . jobProps . put ( ENV_PREFIX + JOB_OUTPUT_PROP_FILE , files [ 1 ] . getAbsolutePath ( ) ) ; return files ; } | initialize temporary and final property file |
21,879 | public Map < String , String > getEnvironmentVariables ( ) { final Props props = getJobProps ( ) ; final Map < String , String > envMap = props . getMapByPrefix ( ENV_PREFIX ) ; return envMap ; } | Get Environment Variables from the Job Properties Table |
21,880 | public String getWorkingDirectory ( ) { final String workingDir = getJobProps ( ) . getString ( WORKING_DIR , this . jobPath ) ; return Utils . ifNull ( workingDir , "" ) ; } | Get Working Directory from Job Properties when it is presented . Otherwise the working directory is the jobPath |
21,881 | < T > T callForJsonType ( final String host , final int port , final String path , final List < Pair < String , String > > paramList , final Class < T > valueType ) throws IOException { final String responseString = callForJsonString ( host , port , path , paramList ) ; if ( null == responseString || responseString . length ( ) == 0 ) { return null ; } return new ObjectMapper ( ) . readValue ( responseString , valueType ) ; } | Call executor and parse the JSON response as an instance of the class given as an argument . |
21,882 | public static void hideUploadButtonWhenNeeded ( final Page page , final Session session , final UserManager userManager , final Boolean lockdownUploadProjects ) { final User user = session . getUser ( ) ; if ( lockdownUploadProjects && ! UserUtils . hasPermissionforAction ( userManager , user , Permission . Type . UPLOADPROJECTS ) ) { page . add ( "hideUploadProject" , true ) ; } } | Method hides the upload button for regular users from relevant pages when the property lockdown . upload . projects is set . The button is displayed for admin users and users with upload permissions . |
21,883 | public String createSlaMessage ( final ExecutableFlow flow ) { final int execId = flow . getExecutionId ( ) ; final String durationStr = durationToString ( this . duration ) ; switch ( this . type . getComponent ( ) ) { case FLOW : final String basicinfo = "SLA Alert: Your flow " + this . flowName + " failed to " + this . type . getStatus ( ) + " within " + durationStr + "<br/>" ; final String expected = "Here are details : <br/>" + "Flow " + this . flowName + " in execution " + execId + " is expected to FINISH within " + durationStr + " from " + fmt . print ( new DateTime ( flow . getStartTime ( ) ) ) + "<br/>" ; final String actual = "Actual flow status is " + flow . getStatus ( ) ; return basicinfo + expected + actual ; case JOB : return "SLA Alert: Your job " + this . jobName + " failed to " + this . type . getStatus ( ) + " within " + durationStr + " in execution " + execId ; default : return "Unrecognized SLA component type " + this . type . getComponent ( ) ; } } | Construct the message for the SLA . |
21,884 | public void cleanupProjectArtifacts ( final int projectId ) { if ( ! isCleanupPermitted ( ) ) { return ; } final Set < String > allResourceIds = findResourceIdsToDelete ( projectId ) ; if ( allResourceIds . size ( ) == 0 ) { return ; } log . warn ( String . format ( "Deleting project artifacts [id: %d]: %s" , projectId , allResourceIds ) ) ; allResourceIds . forEach ( this :: delete ) ; } | Remove all but last N artifacts as configured by AZKABAN_STORAGE_ARTIFACT_MAX_RETENTION |
21,885 | private boolean delete ( final String resourceId ) { final boolean isDeleted = this . storage . delete ( resourceId ) && removeDbEntry ( resourceId ) ; if ( ! isDeleted ) { log . info ( "Failed to delete resourceId: " + resourceId ) ; } return isDeleted ; } | Main Delete Utility . |
21,886 | protected boolean tieBreak ( final T object1 , final T object2 ) { if ( null == object2 ) { return true ; } if ( null == object1 ) { return false ; } return object1 . hashCode ( ) >= object2 . hashCode ( ) ; } | tieBreak method which will kick in when the comparator list generated an equality result for both sides . the tieBreak method will try best to make sure a stable result is returned . |
21,887 | protected void registerFactorComparator ( final FactorComparator < T > comparator ) { if ( null == comparator || Integer . MAX_VALUE - this . getTotalWeight ( ) < comparator . getWeight ( ) ) { throw new IllegalArgumentException ( "unable to register comparator." + " The passed comparator is null or has an invalid weight value." ) ; } this . factorComparatorList . put ( comparator . getFactorName ( ) , comparator ) ; logger . debug ( String . format ( "Factor comparator added for '%s'. Weight = '%s'" , comparator . getFactorName ( ) , comparator . getWeight ( ) ) ) ; } | function to register a factorComparator to the internal Map for future reference . |
21,888 | public int getTotalWeight ( ) { int totalWeight = 0 ; final Collection < FactorComparator < T > > allValues = this . factorComparatorList . values ( ) ; for ( final FactorComparator < T > item : allValues ) { if ( item != null ) { totalWeight += item . getWeight ( ) ; } } return totalWeight ; } | function returns the total weight of the registered comparators . |
21,889 | private List < Object > getFilePreviews ( final String [ ] fileList , final String locationFull , final IStreamProvider streamProvider , final boolean renderResultsAsHtml ) { final List < Object > files = new ArrayList < > ( ) ; InputStream csvInputStream = null ; try { for ( final String fileName : fileList ) { final Map < String , Object > file = new HashMap < > ( ) ; file . put ( "name" , fileName ) ; final String filePath = locationFull + "/" + fileName ; csvInputStream = streamProvider . getFileInputStream ( filePath ) ; final Scanner rowScanner = new Scanner ( csvInputStream , StandardCharsets . UTF_8 . toString ( ) ) ; final List < Object > lines = new ArrayList < > ( ) ; int lineNumber = 0 ; while ( rowScanner . hasNextLine ( ) && lineNumber < ReportalMailCreator . NUM_PREVIEW_ROWS ) { final String csvLine = rowScanner . nextLine ( ) ; final String [ ] data = csvLine . split ( "\",\"" ) ; final List < String > line = new ArrayList < > ( ) ; for ( final String item : data ) { String column = item . replace ( "\"" , "" ) ; if ( ! renderResultsAsHtml ) { column = StringEscapeUtils . escapeHtml ( column ) ; } line . add ( column ) ; } lines . add ( line ) ; lineNumber ++ ; } file . put ( "content" , lines ) ; if ( rowScanner . hasNextLine ( ) ) { file . put ( "hasMore" , true ) ; } files . add ( file ) ; rowScanner . close ( ) ; } } catch ( final Exception e ) { logger . debug ( "Error encountered while processing files in " + locationFull , e ) ; } finally { IOUtils . closeQuietly ( csvInputStream ) ; } return files ; } | Returns a list of file Objects that contain a name property with the file name a content property with the lines in the file and a hasMore property if the file contains more than NUM_PREVIEW_ROWS lines . |
21,890 | public synchronized boolean pauseJobIfPresent ( final String jobName , final String groupName ) throws SchedulerException { if ( ifJobExist ( jobName , groupName ) ) { this . scheduler . pauseJob ( new JobKey ( jobName , groupName ) ) ; return true ; } else { return false ; } } | Pause a job if it s present . |
21,891 | public synchronized boolean isJobPaused ( final String jobName , final String groupName ) throws SchedulerException { if ( ! ifJobExist ( jobName , groupName ) ) { throw new SchedulerException ( String . format ( "Job (job name %s, group name %s) doesn't " + "exist'" , jobName , groupName ) ) ; } final JobKey jobKey = new JobKey ( jobName , groupName ) ; final JobDetail jobDetail = this . scheduler . getJobDetail ( jobKey ) ; final List < ? extends Trigger > triggers = this . scheduler . getTriggersOfJob ( jobDetail . getKey ( ) ) ; for ( final Trigger trigger : triggers ) { final TriggerState triggerState = this . scheduler . getTriggerState ( trigger . getKey ( ) ) ; if ( TriggerState . PAUSED . equals ( triggerState ) ) { return true ; } } return false ; } | Check if job is paused . |
21,892 | public synchronized boolean unscheduleJob ( final String jobName , final String groupName ) throws SchedulerException { return this . scheduler . deleteJob ( new JobKey ( jobName , groupName ) ) ; } | Unschedule a job . |
21,893 | public synchronized boolean scheduleJobIfAbsent ( final String cronExpression , final QuartzJobDescription jobDescription ) throws SchedulerException { requireNonNull ( jobDescription , "jobDescription is null" ) ; if ( ifJobExist ( jobDescription . getJobName ( ) , jobDescription . getGroupName ( ) ) ) { logger . warn ( String . format ( "can not register existing job with job name: " + "%s and group name: %s" , jobDescription . getJobName ( ) , jobDescription . getGroupName ( ) ) ) ; return false ; } if ( ! CronExpression . isValidExpression ( cronExpression ) ) { throw new SchedulerException ( "The cron expression string <" + cronExpression + "> is not valid." ) ; } final JobDetail job = JobBuilder . newJob ( jobDescription . getJobClass ( ) ) . withIdentity ( jobDescription . getJobName ( ) , jobDescription . getGroupName ( ) ) . build ( ) ; job . getJobDataMap ( ) . putAll ( jobDescription . getContextMap ( ) ) ; final Trigger trigger = TriggerBuilder . newTrigger ( ) . withSchedule ( CronScheduleBuilder . cronSchedule ( cronExpression ) . withMisfireHandlingInstructionFireAndProceed ( ) ) . build ( ) ; this . scheduler . scheduleJob ( job , trigger ) ; logger . info ( "Quartz Schedule with jobDetail " + job . getDescription ( ) + " is registered." ) ; return true ; } | Only cron schedule register is supported . Since register might be called when concurrently uploading projects so synchronized is added to ensure thread safety . |
21,894 | private void getFlowsHelper ( final ArrayList < ExecutableFlow > flows , final Collection < Pair < ExecutionReference , ExecutableFlow > > collection ) { collection . stream ( ) . forEach ( ref -> flows . add ( ref . getSecond ( ) ) ) ; } | Helper method to get all flows from collection . |
21,895 | public List < Integer > getQueuedFlowIds ( ) { final List < Integer > allIds = new ArrayList < > ( ) ; try { getExecutionIdsHelper ( allIds , this . executorLoader . fetchQueuedFlows ( ) ) ; } catch ( final ExecutorManagerException e ) { this . logger . error ( "Failed to get queued flow ids." , e ) ; } return allIds ; } | Get execution ids of all non - dispatched flows from database . |
21,896 | public Collection < TriggerInstance > getRecentlyFinished ( final int limit ) { final String query = String . format ( SELECT_RECENTLY_FINISHED , limit ) ; try { return this . dbOperator . query ( query , new TriggerInstanceHandler ( SORT_MODE . SORT_ON_START_TIME_ASC ) ) ; } catch ( final SQLException ex ) { handleSQLException ( ex ) ; } return Collections . emptyList ( ) ; } | Retrieve recently finished trigger instances but flow trigger properties are not populated into the returned trigger instances for efficiency . Flow trigger properties will be retrieved only on request time . |
21,897 | public TriggerInstance getTriggerInstanceByFlowExecId ( final int flowExecId ) { if ( flowExecId == Constants . FAILED_EXEC_ID || flowExecId == Constants . UNASSIGNED_EXEC_ID ) { return null ; } TriggerInstance triggerInstance = null ; try { final Collection < TriggerInstance > res = this . dbOperator . query ( SELECT_EXECUTIONS_BY_EXEC_ID , new TriggerInstanceHandler ( SORT_MODE . SORT_ON_START_TIME_ASC ) , flowExecId ) ; triggerInstance = ! res . isEmpty ( ) ? res . iterator ( ) . next ( ) : null ; } catch ( final SQLException ex ) { handleSQLException ( ex ) ; } populateFlowTriggerProperties ( triggerInstance ) ; return triggerInstance ; } | Retrieve a trigger instance given a flow execution id . Flow trigger properties will also be populated into the returned trigger instance . If flow exec id is - 1 or - 2 then null will be returned . |
21,898 | public TriggerInstance getTriggerInstanceById ( final String triggerInstanceId ) { TriggerInstance triggerInstance = null ; try { final Collection < TriggerInstance > res = this . dbOperator . query ( SELECT_EXECUTIONS_BY_INSTANCE_ID , new TriggerInstanceHandler ( SORT_MODE . SORT_ON_START_TIME_ASC ) , triggerInstanceId ) ; triggerInstance = ! res . isEmpty ( ) ? res . iterator ( ) . next ( ) : null ; } catch ( final SQLException ex ) { handleSQLException ( ex ) ; } populateFlowTriggerProperties ( triggerInstance ) ; return triggerInstance ; } | Retrieve a trigger instance given an instance id . Flow trigger properties will also be populated into the returned trigger instance . |
21,899 | private static IPropertiesValidator getValidator ( GobblinPresets preset ) { Objects . requireNonNull ( preset ) ; switch ( preset ) { case MYSQL_TO_HDFS : return new MySqlToHdfsValidator ( ) ; case HDFS_TO_MYSQL : return new HdfsToMySqlValidator ( ) ; default : throw new UnsupportedOperationException ( "Preset " + preset + " is not supported" ) ; } } | Factory method that provides IPropertiesValidator based on preset in runtime . Using factory method pattern as it is expected to grow . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.