idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
22,000
public void authorize ( ) { if ( this . isSecurityEnabled ) { try { login ( this . keytabPrincipal , this . keytabPath ) ; } catch ( final IOException e ) { log . error ( e ) ; throw new AzkabanException ( String . format ( "Error: Unable to authorize to Hadoop. Principal: %s Keytab: %s" , this . keytabPrincipal , this . keytabPath ) ) ; } } }
API to authorize HDFS access . This logins in the configured user via the keytab . If the user is already logged in then it renews the TGT .
22,001
public static int validate ( final String jobName , final Props serverProps , final Props jobProps , final Collection < String > errors ) { final int maxNumCallback = serverProps . getInt ( JobCallbackConstants . MAX_CALLBACK_COUNT_PROPERTY_KEY , JobCallbackConstants . DEFAULT_MAX_CALLBACK_COUNT ) ; final int maxPostBodyLength = serverProps . getInt ( MAX_POST_BODY_LENGTH_PROPERTY_KEY , DEFAULT_POST_BODY_LENGTH ) ; int totalCallbackCount = 0 ; for ( final JobCallbackStatusEnum jobStatus : JobCallbackStatusEnum . values ( ) ) { totalCallbackCount += validateBasedOnStatus ( jobProps , errors , jobStatus , maxNumCallback , maxPostBodyLength ) ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Found " + totalCallbackCount + " job callbacks for job " + jobName ) ; } return totalCallbackCount ; }
Make sure all the job callback related properties are valid
22,002
static String expandHiveAuxJarsPath ( final String original ) throws IOException { if ( original == null || original . endsWith ( ".jar" ) ) { return original ; } final File [ ] files = new File ( original ) . listFiles ( ) ; if ( files == null || files . length == 0 ) { return original ; } return filesToURIString ( files ) ; }
Normally hive . aux . jars . path is expanded from just being a path to the full list of files in the directory by the hive shell script . Since we normally won t be running from the script it s up to us to do that work here . We use a heuristic that if there is no occurrence of . jar in the original it needs expansion . Otherwise it s already been done for us . Also surround the files with uri niceities .
22,003
public static void validateAllOrNone ( Props props , String ... keys ) { Objects . requireNonNull ( keys ) ; boolean allExist = true ; boolean someExist = false ; for ( String key : keys ) { Object val = props . get ( key ) ; allExist &= val != null ; someExist |= val != null ; } if ( someExist && ! allExist ) { throw new IllegalArgumentException ( "Either all of properties exist or none of them should exist for " + Arrays . toString ( keys ) ) ; } }
Validates if all of the keys exist of none of them exist
22,004
public static void validateAllNotEmpty ( Props props , String ... keys ) { for ( String key : keys ) { props . getString ( key ) ; } }
Validates all keys present in props
22,005
private static Props copyNext ( final Props source ) { Props priorNodeCopy = null ; if ( source . getParent ( ) != null ) { priorNodeCopy = copyNext ( source . getParent ( ) ) ; } final Props dest = new Props ( priorNodeCopy ) ; for ( final String key : source . localKeySet ( ) ) { dest . put ( key , source . get ( key ) ) ; } return dest ; }
Recursive Clone function of Props
22,006
public boolean containsKey ( final Object k ) { return this . _current . containsKey ( k ) || ( this . _parent != null && this . _parent . containsKey ( k ) ) ; }
Check key in current Props then search in parent
22,007
public boolean containsValue ( final Object value ) { return this . _current . containsValue ( value ) || ( this . _parent != null && this . _parent . containsValue ( value ) ) ; }
Check value in current Props then search in parent
22,008
public String get ( final Object key ) { if ( this . _current . containsKey ( key ) ) { return this . _current . get ( key ) ; } else if ( this . _parent != null ) { return this . _parent . get ( key ) ; } else { return null ; } }
Return value if available in current Props otherwise return from parent
22,009
public String put ( final String key , final Long value ) { return this . _current . put ( key , value . toString ( ) ) ; }
Put Long . Stores as String .
22,010
public String put ( final String key , final Double value ) { return this . _current . put ( key , value . toString ( ) ) ; }
Put Double . Stores as String .
22,011
public void putAll ( final Props p ) { if ( p == null ) { return ; } for ( final String key : p . getKeySet ( ) ) { this . put ( key , p . get ( key ) ) ; } }
Put all properties in the props into the current props . Will handle null p .
22,012
public void putLocal ( final Props p ) { for ( final String key : p . localKeySet ( ) ) { this . put ( key , p . get ( key ) ) ; } }
Puts only the local props from p into the current properties
22,013
public Class < ? > getClass ( final String key ) { try { if ( containsKey ( key ) ) { return Class . forName ( get ( key ) ) ; } else { throw new UndefinedPropertyException ( "Missing required property '" + key + "'" ) ; } } catch ( final ClassNotFoundException e ) { throw new IllegalArgumentException ( e ) ; } }
Attempts to return the Class that corresponds to the Props value . If the class doesn t exit an IllegalArgumentException will be thrown .
22,014
public Class < ? > getClass ( final String key , final Class < ? > defaultClass ) { if ( containsKey ( key ) ) { return getClass ( key ) ; } else { return defaultClass ; } }
Gets the class from the Props . If it doesn t exist it will return the defaultClass
22,015
public String getString ( final String key ) { if ( containsKey ( key ) ) { return get ( key ) ; } else { throw new UndefinedPropertyException ( "Missing required property '" + key + "'" ) ; } }
Gets the string from the Props . If it doesn t exist throw and UndefinedPropertiesException
22,016
public List < String > getStringList ( final String key , final String sep ) { final String val = get ( key ) ; if ( val == null || val . trim ( ) . length ( ) == 0 ) { return Collections . emptyList ( ) ; } if ( containsKey ( key ) ) { return Arrays . asList ( val . split ( sep ) ) ; } else { throw new UndefinedPropertyException ( "Missing required property '" + key + "'" ) ; } }
Returns a list of strings with the sep as the separator of the value
22,017
public List < String > getStringList ( final String key , final List < String > defaultValue ) { if ( containsKey ( key ) ) { return getStringList ( key ) ; } else { return defaultValue ; } }
Returns a list of strings with the comma as the separator of the value . If the value is null it ll return the defaultValue .
22,018
public long getLong ( final String name , final long defaultValue ) { if ( containsKey ( name ) ) { return Long . parseLong ( get ( name ) ) ; } else { return defaultValue ; } }
Returns the long representation of the value . If the value is null then the default value is returned . If the value isn t a long then a parse exception will be thrown .
22,019
public long getLong ( final String name ) { if ( containsKey ( name ) ) { return Long . parseLong ( get ( name ) ) ; } else { throw new UndefinedPropertyException ( "Missing required property '" + name + "'" ) ; } }
Returns the long representation of the value . If the value is null then a UndefinedPropertyException will be thrown . If the value isn t a long then a parse exception will be thrown .
22,020
public URI getUri ( final String name ) { if ( containsKey ( name ) ) { try { return new URI ( get ( name ) ) ; } catch ( final URISyntaxException e ) { throw new IllegalArgumentException ( e . getMessage ( ) ) ; } } else { throw new UndefinedPropertyException ( "Missing required property '" + name + "'" ) ; } }
Returns the uri representation of the value . If the value is null then the default value is returned . If the value isn t a uri then a IllegalArgumentException will be thrown .
22,021
public URI getUri ( final String name , final URI defaultValue ) { if ( containsKey ( name ) ) { return getUri ( name ) ; } else { return defaultValue ; } }
Returns the double representation of the value . If the value is null then the default value is returned . If the value isn t a uri then a IllegalArgumentException will be thrown .
22,022
public URI getUri ( final String name , final String defaultValue ) { try { return getUri ( name , new URI ( defaultValue ) ) ; } catch ( final URISyntaxException e ) { throw new IllegalArgumentException ( e . getMessage ( ) ) ; } }
Convert a URI - formatted string value to URI object
22,023
public Properties toAllProperties ( ) { Properties allProp = new Properties ( ) ; allProp . putAll ( toProperties ( ) ) ; if ( _parent != null ) { allProp . putAll ( _parent . toProperties ( ) ) ; } return allProp ; }
Returns a java . util . Properties file populated with both current and parent properties .
22,024
public Map < String , String > getFlattened ( ) { final TreeMap < String , String > returnVal = new TreeMap < > ( ) ; returnVal . putAll ( getMapByPrefix ( "" ) ) ; return returnVal ; }
Returns a new constructed map of all the flattened properties the item in the returned map is sorted alphabetically by the key value .
22,025
public Map < String , String > getMapByPrefix ( final String prefix ) { final Map < String , String > values = ( this . _parent == null ) ? new HashMap < > ( ) : this . _parent . getMapByPrefix ( prefix ) ; if ( prefix == null ) { return values ; } for ( final String key : this . localKeySet ( ) ) { if ( key != null && key . length ( ) >= prefix . length ( ) ) { if ( key . startsWith ( prefix ) ) { values . put ( key . substring ( prefix . length ( ) ) , get ( key ) ) ; } } } return values ; }
Get a new de - duplicated map of all the flattened properties by given prefix . The prefix will be removed in the return map s keySet .
22,026
public Set < String > getKeySet ( ) { final HashSet < String > keySet = new HashSet < > ( ) ; keySet . addAll ( localKeySet ( ) ) ; if ( this . _parent != null ) { keySet . addAll ( this . _parent . getKeySet ( ) ) ; } return keySet ; }
Returns a set of all keys including the parents
22,027
public void logProperties ( final Logger logger , final String comment ) { logger . info ( comment ) ; for ( final String key : getKeySet ( ) ) { logger . info ( " key=" + key + " value=" + get ( key ) ) ; } }
Logs the property in the given logger
22,028
protected synchronized void handleJobStatusChange ( final String jobId , final Status status ) { final BlockingStatus block = this . map . get ( jobId ) ; if ( block != null ) { block . changeStatus ( status ) ; } }
Called to fire events to the JobRunner listeners
22,029
public static MetricReportManager getInstance ( ) { if ( instance == null ) { synchronized ( MetricReportManager . class ) { if ( instance == null ) { logger . info ( "Instantiating MetricReportManager" ) ; instance = new MetricReportManager ( ) ; } } } return instance ; }
Get a singleton object for Metric Manager
22,030
public void addMetric ( final IMetric < ? > metric ) { if ( metric == null ) { throw new IllegalArgumentException ( "Cannot add a null metric" ) ; } if ( getMetricFromName ( metric . getName ( ) ) == null ) { logger . debug ( String . format ( "Adding %s metric in Metric Manager" , metric . getName ( ) ) ) ; this . metrics . add ( metric ) ; metric . updateMetricManager ( this ) ; } else { logger . error ( "Failed to add metric" ) ; } }
Add a metric to be managed by Metric Manager
22,031
public IMetric < ? > getMetricFromName ( final String name ) { IMetric < ? > metric = null ; if ( name != null ) { for ( final IMetric < ? > currentMetric : this . metrics ) { if ( currentMetric . getName ( ) . equals ( name ) ) { metric = currentMetric ; break ; } } } return metric ; }
Get metric object for a given metric name
22,032
public void disableManager ( ) { logger . info ( "Disabling Metric Manager" ) ; if ( isManagerEnabled ) { isManagerEnabled = false ; for ( final IMetricEmitter emitter : this . metricEmitters ) { try { emitter . purgeAllData ( ) ; } catch ( final MetricException ex ) { logger . error ( "Failed to purge data " , ex ) ; } } } }
Disable Metric Manager and ask all emitters to purge all available data .
22,033
public List < InMemoryHistoryNode > getMetrics ( final String metricName , final Date from , final Date to , final Boolean useStats ) throws ClassCastException { final LinkedList < InMemoryHistoryNode > selectedLists = new LinkedList < > ( ) ; if ( this . historyListMapping . containsKey ( metricName ) ) { logger . debug ( "selecting snapshots within time frame" ) ; synchronized ( this . historyListMapping . get ( metricName ) ) { for ( final InMemoryHistoryNode node : this . historyListMapping . get ( metricName ) ) { if ( node . getTimestamp ( ) . after ( from ) && node . getTimestamp ( ) . before ( to ) ) { selectedLists . add ( node ) ; } if ( node . getTimestamp ( ) . after ( to ) ) { break ; } } } if ( useStats ) { statBasedSelectMetricHistory ( selectedLists ) ; } else { generalSelectMetricHistory ( selectedLists ) ; } } cleanUsingTime ( metricName , new Date ( ) ) ; return selectedLists ; }
Get snapshots for a given metric at a given time
22,034
private void statBasedSelectMetricHistory ( final LinkedList < InMemoryHistoryNode > selectedLists ) throws ClassCastException { logger . debug ( "selecting snapshots which are far away from mean value" ) ; final DescriptiveStatistics descStats = getDescriptiveStatistics ( selectedLists ) ; final Double mean = descStats . getMean ( ) ; final Double std = descStats . getStandardDeviation ( ) ; final Iterator < InMemoryHistoryNode > ite = selectedLists . iterator ( ) ; while ( ite . hasNext ( ) ) { final InMemoryHistoryNode currentNode = ite . next ( ) ; final double value = ( ( Number ) currentNode . getValue ( ) ) . doubleValue ( ) ; if ( value < mean + this . standardDeviationFactor * std && value > mean - this . standardDeviationFactor * std ) { ite . remove ( ) ; } } }
filter snapshots using statistically significant points only
22,035
private void generalSelectMetricHistory ( final LinkedList < InMemoryHistoryNode > selectedLists ) { logger . debug ( "selecting snapshots evenly from across the time interval" ) ; if ( selectedLists . size ( ) > this . numInstances ) { final double step = ( double ) selectedLists . size ( ) / this . numInstances ; long nextIndex = 0 , currentIndex = 0 , numSelectedInstances = 1 ; final Iterator < InMemoryHistoryNode > ite = selectedLists . iterator ( ) ; while ( ite . hasNext ( ) ) { ite . next ( ) ; if ( currentIndex == nextIndex ) { nextIndex = ( long ) Math . floor ( numSelectedInstances * step + 0.5 ) ; numSelectedInstances ++ ; } else { ite . remove ( ) ; } currentIndex ++ ; } } }
filter snapshots by evenly selecting points across the interval
22,036
private void cleanUsingTime ( final String metricName , final Date firstAllowedDate ) { if ( this . historyListMapping . containsKey ( metricName ) && this . historyListMapping . get ( metricName ) != null ) { synchronized ( this . historyListMapping . get ( metricName ) ) { InMemoryHistoryNode firstNode = this . historyListMapping . get ( metricName ) . peekFirst ( ) ; long localCopyOfTimeWindow = 0 ; synchronized ( this ) { localCopyOfTimeWindow = this . timeWindow ; } while ( firstNode != null && TimeUnit . MILLISECONDS . toMillis ( firstAllowedDate . getTime ( ) - firstNode . getTimestamp ( ) . getTime ( ) ) > localCopyOfTimeWindow ) { this . historyListMapping . get ( metricName ) . removeFirst ( ) ; firstNode = this . historyListMapping . get ( metricName ) . peekFirst ( ) ; } } } }
Remove snapshots to maintain reporting interval
22,037
@ SuppressWarnings ( "unchecked" ) public void updateExecutions ( ) { this . updaterStage . set ( "Starting update all flows." ) ; final Map < Optional < Executor > , List < ExecutableFlow > > exFlowMap = getFlowToExecutorMap ( ) ; final ArrayList < ExecutableFlow > finalizeFlows = new ArrayList < > ( ) ; for ( final Map . Entry < Optional < Executor > , List < ExecutableFlow > > entry : exFlowMap . entrySet ( ) ) { final Optional < Executor > executorOption = entry . getKey ( ) ; if ( ! executorOption . isPresent ( ) ) { for ( final ExecutableFlow flow : entry . getValue ( ) ) { logger . warn ( "Finalizing execution " + flow . getExecutionId ( ) + ". Executor id of this execution doesn't exist" ) ; finalizeFlows . add ( flow ) ; } continue ; } final Executor executor = executorOption . get ( ) ; this . updaterStage . set ( "Starting update flows on " + executor . getHost ( ) + ":" + executor . getPort ( ) ) ; Map < String , Object > results = null ; try { results = this . apiGateway . updateExecutions ( executor , entry . getValue ( ) ) ; } catch ( final ExecutorManagerException e ) { handleException ( entry , executor , e , finalizeFlows ) ; } if ( results != null ) { final List < Map < String , Object > > executionUpdates = ( List < Map < String , Object > > ) results . get ( ConnectorParams . RESPONSE_UPDATED_FLOWS ) ; for ( final Map < String , Object > updateMap : executionUpdates ) { try { final ExecutableFlow flow = updateExecution ( updateMap ) ; this . updaterStage . set ( "Updated flow " + flow . getExecutionId ( ) ) ; if ( ExecutionControllerUtils . isFinished ( flow ) ) { finalizeFlows . add ( flow ) ; } } catch ( final ExecutorManagerException e ) { final ExecutableFlow flow = e . getExecutableFlow ( ) ; logger . error ( e ) ; if ( flow != null ) { logger . warn ( "Finalizing execution " + flow . getExecutionId ( ) ) ; finalizeFlows . add ( flow ) ; } } } } } this . updaterStage . set ( "Finalizing " + finalizeFlows . size ( ) + " error flows." ) ; for ( final ExecutableFlow flow : finalizeFlows ) { this . executionFinalizer . finalizeFlow ( flow , "Not running on the assigned executor (any more)" , null ) ; } this . updaterStage . set ( "Updated all active flows. Waiting for next round." ) ; }
Updates running executions .
22,038
public synchronized List < String > getTopicList ( ) { final List < String > res = new ArrayList < > ( this . topicEventMap . keySet ( ) ) ; return res ; }
Get a list of topics .
22,039
public synchronized Set < String > regexInTopic ( final String topic , final String payload ) { final Set < String > res = new HashSet < > ( ) ; final Map < String , List < KafkaDependencyInstanceContext > > eventMap = this . topicEventMap . get ( topic ) ; if ( eventMap == null ) { return Collections . emptySet ( ) ; } for ( final Map . Entry < String , List < KafkaDependencyInstanceContext > > entry : eventMap . entrySet ( ) ) { final RegexKafkaDependencyMatcher matcher = new RegexKafkaDependencyMatcher ( Pattern . compile ( entry . getKey ( ) ) ) ; if ( matcher . isMatch ( payload ) ) { res . add ( entry . getKey ( ) ) ; } } return res ; }
Return a set of pattern that matches with the payload .
22,040
public synchronized List < KafkaDependencyInstanceContext > getDepsByTopicAndEvent ( final String topic , final String regex ) { final Map < String , List < KafkaDependencyInstanceContext > > regexMap = this . topicEventMap . get ( topic ) ; if ( regexMap != null ) { return regexMap . get ( regex ) ; } return Collections . emptyList ( ) ; }
Returns dependencies with topic and dependency s event regular expression match
22,041
public static Event create ( final Object runner , final EventType type , final EventData eventData ) throws NullPointerException { Preconditions . checkNotNull ( eventData , "EventData was null" ) ; return new Event ( runner , type , eventData ) ; }
Creates a new event .
22,042
public static String getRealClientIpAddr ( final Map < String , String > httpHeaders , final String remoteAddr ) { String clientIp = httpHeaders . getOrDefault ( X_FORWARDED_FOR_HEADER , null ) ; if ( clientIp == null ) { clientIp = remoteAddr ; } else { final String [ ] ips = clientIp . split ( "," ) ; clientIp = ips [ 0 ] ; } final String [ ] parts = clientIp . split ( ":" ) ; clientIp = parts [ 0 ] ; return clientIp ; }
Gets the actual client IP address inspecting the X - Forwarded - For HTTP header or using the provided remote IP address from the low level TCP connection from the client .
22,043
public static void setPropsInYamlFile ( final String path , final File flowFile , final Props prop ) { final DumperOptions options = new DumperOptions ( ) ; options . setDefaultFlowStyle ( FlowStyle . BLOCK ) ; final NodeBean nodeBean = FlowLoaderUtils . setPropsInNodeBean ( path , flowFile , prop ) ; try ( final BufferedWriter writer = Files . newBufferedWriter ( flowFile . toPath ( ) , StandardCharsets . UTF_8 ) ) { new Yaml ( options ) . dump ( nodeBean , writer ) ; } catch ( final IOException e ) { throw new ProjectManagerException ( "Failed to set properties in flow file " + flowFile . getName ( ) ) ; } }
Sets props in flow yaml file .
22,044
public static NodeBean setPropsInNodeBean ( final String path , final File flowFile , final Props prop ) { final NodeBeanLoader loader = new NodeBeanLoader ( ) ; try { final NodeBean nodeBean = loader . load ( flowFile ) ; final String [ ] pathList = path . split ( Constants . PATH_DELIMITER ) ; if ( overridePropsInNodeBean ( nodeBean , pathList , 0 , prop ) ) { return nodeBean ; } else { logger . error ( "Error setting props for " + path ) ; } } catch ( final Exception e ) { logger . error ( "Failed to set props, error loading flow YAML file " + flowFile ) ; } return null ; }
Sets props in node bean .
22,045
private static boolean overridePropsInNodeBean ( final NodeBean nodeBean , final String [ ] pathList , final int idx , final Props prop ) { if ( idx < pathList . length && nodeBean . getName ( ) . equals ( pathList [ idx ] ) ) { if ( idx == pathList . length - 1 ) { if ( prop . containsKey ( Constants . NODE_TYPE ) ) { nodeBean . setType ( prop . get ( Constants . NODE_TYPE ) ) ; } final Map < String , String > config = prop . getFlattened ( ) ; config . remove ( Constants . NODE_TYPE ) ; nodeBean . setConfig ( config ) ; return true ; } for ( final NodeBean bean : nodeBean . getNodes ( ) ) { if ( overridePropsInNodeBean ( bean , pathList , idx + 1 , prop ) ) { return true ; } } } return false ; }
Helper method to recursively find the node to override props .
22,046
public static Props getPropsFromYamlFile ( final String path , final File flowFile ) { final List < Props > propsList = new ArrayList < > ( ) ; final NodeBeanLoader loader = new NodeBeanLoader ( ) ; try { final NodeBean nodeBean = loader . load ( flowFile ) ; final String [ ] pathList = path . split ( Constants . PATH_DELIMITER ) ; if ( findPropsFromNodeBean ( nodeBean , pathList , 0 , propsList ) ) { if ( ! propsList . isEmpty ( ) ) { return propsList . get ( 0 ) ; } else { logger . error ( "Error getting props for " + path ) ; } } } catch ( final Exception e ) { logger . error ( "Failed to get props, error loading flow YAML file. " , e ) ; } return null ; }
Gets flow or job props from flow yaml file .
22,047
private static boolean findPropsFromNodeBean ( final NodeBean nodeBean , final String [ ] pathList , final int idx , final List < Props > propsList ) { if ( idx < pathList . length && nodeBean . getName ( ) . equals ( pathList [ idx ] ) ) { if ( idx == pathList . length - 1 ) { propsList . add ( nodeBean . getProps ( ) ) ; return true ; } for ( final NodeBean bean : nodeBean . getNodes ( ) ) { if ( findPropsFromNodeBean ( bean , pathList , idx + 1 , propsList ) ) { return true ; } } } return false ; }
Helper method to recursively find props from node bean .
22,048
public static void addEmailPropsToFlow ( final Flow flow , final Props prop ) { final List < String > successEmailList = prop . getStringList ( CommonJobProperties . SUCCESS_EMAILS , Collections . EMPTY_LIST ) ; final Set < String > successEmail = new HashSet < > ( ) ; for ( final String email : successEmailList ) { successEmail . add ( email . toLowerCase ( ) ) ; } final List < String > failureEmailList = prop . getStringList ( CommonJobProperties . FAILURE_EMAILS , Collections . EMPTY_LIST ) ; final Set < String > failureEmail = new HashSet < > ( ) ; for ( final String email : failureEmailList ) { failureEmail . add ( email . toLowerCase ( ) ) ; } final List < String > notifyEmailList = prop . getStringList ( CommonJobProperties . NOTIFY_EMAILS , Collections . EMPTY_LIST ) ; for ( String email : notifyEmailList ) { email = email . toLowerCase ( ) ; successEmail . add ( email ) ; failureEmail . add ( email ) ; } flow . addFailureEmails ( failureEmail ) ; flow . addSuccessEmails ( successEmail ) ; }
Adds email properties to a flow .
22,049
public static ValidationReport generateFlowLoaderReport ( final Set < String > errors ) { final ValidationReport report = new ValidationReport ( ) ; report . addErrorMsgs ( errors ) ; return report ; }
Generate flow loader report validation report .
22,050
public static void checkJobProperties ( final int projectId , final Props props , final Map < String , Props > jobPropsMap , final Set < String > errors ) { if ( ProjectWhitelist . isProjectWhitelisted ( projectId , ProjectWhitelist . WhitelistType . MemoryCheck ) ) { return ; } final MemConfValue maxXms = MemConfValue . parseMaxXms ( props ) ; final MemConfValue maxXmx = MemConfValue . parseMaxXmx ( props ) ; for ( final String jobName : jobPropsMap . keySet ( ) ) { final Props jobProps = jobPropsMap . get ( jobName ) ; final String xms = jobProps . getString ( XMS , null ) ; if ( xms != null && ! PropsUtils . isVariableReplacementPattern ( xms ) && Utils . parseMemString ( xms ) > maxXms . getSize ( ) ) { errors . add ( String . format ( "%s: Xms value has exceeded the allowed limit (max Xms = %s)" , jobName , maxXms . getString ( ) ) ) ; } final String xmx = jobProps . getString ( XMX , null ) ; if ( xmx != null && ! PropsUtils . isVariableReplacementPattern ( xmx ) && Utils . parseMemString ( xmx ) > maxXmx . getSize ( ) ) { errors . add ( String . format ( "%s: Xmx value has exceeded the allowed limit (max Xmx = %s)" , jobName , maxXmx . getString ( ) ) ) ; } JobCallbackValidator . validate ( jobName , props , jobProps , errors ) ; } }
Check job properties .
22,051
public static void cleanUpDir ( final File dir ) { try { if ( dir != null && dir . exists ( ) ) { FileUtils . deleteDirectory ( dir ) ; } } catch ( final IOException e ) { logger . error ( "Failed to delete the directory" , e ) ; dir . deleteOnExit ( ) ; } }
Clean up the directory .
22,052
public Project getProject ( final String name ) { Project fetchedProject = this . projectsByName . get ( name ) ; if ( fetchedProject == null ) { try { fetchedProject = this . projectLoader . fetchProjectByName ( name ) ; if ( fetchedProject != null ) { logger . info ( "Project " + name + " not found in cache, fetched from DB." ) ; } else { logger . info ( "No active project with name " + name + " exists in cache or DB." ) ; } } catch ( final ProjectManagerException e ) { logger . error ( "Could not load project from store." , e ) ; } } return fetchedProject ; }
fetch active project by project name . Queries the cache first then db if not found
22,053
public Project getProject ( final int id ) { Project fetchedProject = this . projectsById . get ( id ) ; if ( fetchedProject == null ) { try { fetchedProject = this . projectLoader . fetchProjectById ( id ) ; } catch ( final ProjectManagerException e ) { logger . error ( "Could not load project from store." , e ) ; } } return fetchedProject ; }
fetch active project from cache and inactive projects from db by project_id
22,054
public synchronized Project purgeProject ( final Project project , final User deleter ) throws ProjectManagerException { this . projectLoader . cleanOlderProjectVersion ( project . getId ( ) , project . getVersion ( ) + 1 , Collections . emptyList ( ) ) ; this . projectLoader . postEvent ( project , EventType . PURGE , deleter . getUserId ( ) , String . format ( "Purged versions before %d" , project . getVersion ( ) + 1 ) ) ; return project ; }
Permanently delete all project files and properties data for all versions of a project and log event in project_events table
22,055
public static String join ( final Collection < String > list , final String delimiter ) { final StringBuffer buffer = new StringBuffer ( ) ; for ( final String str : list ) { buffer . append ( str ) ; buffer . append ( delimiter ) ; } return buffer . toString ( ) ; }
Use this when you don t want to include Apache Common s string for plugins .
22,056
public static String join2 ( final Collection < String > list , final String delimiter ) { final StringBuffer buffer = new StringBuffer ( ) ; boolean first = true ; for ( final String str : list ) { if ( ! first ) { buffer . append ( delimiter ) ; } buffer . append ( str ) ; first = false ; } return buffer . toString ( ) ; }
Don t bother to add delimiter for last element
22,057
private void handleAjaxAction ( final HttpServletRequest req , final HttpServletResponse resp , final Session session , final ProjectManager manager ) throws ServletException , IOException { final String ajaxName = getParam ( req , "ajax" ) ; final HashMap < String , Object > ret = new HashMap < > ( ) ; if ( ajaxName . equals ( "fetchallprojects" ) ) { final List < Project > projects = manager . getProjects ( ) ; final List < SimplifiedProject > simplifiedProjects = toSimplifiedProjects ( projects ) ; ret . put ( "projects" , simplifiedProjects ) ; } else if ( ajaxName . equals ( "fetchuserprojects" ) ) { handleFetchUserProjects ( req , session , manager , ret ) ; } this . writeJSON ( resp , ret ) ; }
ProjectServlet class now handles ajax requests . It returns a
22,058
private void handlePageRender ( final HttpServletRequest req , final HttpServletResponse resp , final Session session , final ProjectManager manager ) { final User user = session . getUser ( ) ; final Page page = newPage ( req , resp , session , "azkaban/webapp/servlet/velocity/index.vm" ) ; if ( this . lockdownCreateProjects && ! UserUtils . hasPermissionforAction ( this . userManager , user , Permission . Type . CREATEPROJECTS ) ) { page . add ( "hideCreateProject" , true ) ; } if ( hasParam ( req , "all" ) ) { final List < Project > projects = manager . getProjects ( ) ; page . add ( "viewProjects" , "all" ) ; page . add ( "projects" , projects ) ; } else if ( hasParam ( req , "group" ) ) { final List < Project > projects = manager . getGroupProjects ( user ) ; page . add ( "viewProjects" , "group" ) ; page . add ( "projects" , projects ) ; } else { final List < Project > projects = manager . getUserProjects ( user ) ; page . add ( "viewProjects" , "personal" ) ; page . add ( "projects" , projects ) ; } page . render ( ) ; }
Renders the user homepage that users see when they log in
22,059
public void schedule ( final Project project , final String submitUser ) throws ProjectManagerException , IOException , SchedulerException { for ( final Flow flow : project . getFlows ( ) ) { if ( flow . isEmbeddedFlow ( ) ) { continue ; } final String flowFileName = flow . getId ( ) + ".flow" ; final int latestFlowVersion = this . projectLoader . getLatestFlowVersion ( flow . getProjectId ( ) , flow . getVersion ( ) , flowFileName ) ; if ( latestFlowVersion > 0 ) { final File tempDir = Files . createTempDir ( ) ; final File flowFile ; try { flowFile = this . projectLoader . getUploadedFlowFile ( project . getId ( ) , project . getVersion ( ) , flowFileName , latestFlowVersion , tempDir ) ; final FlowTrigger flowTrigger = FlowLoaderUtils . getFlowTriggerFromYamlFile ( flowFile ) ; if ( flowTrigger != null ) { final Map < String , Object > contextMap = ImmutableMap . of ( FlowTriggerQuartzJob . SUBMIT_USER , submitUser , FlowTriggerQuartzJob . FLOW_TRIGGER , flowTrigger , FlowTriggerQuartzJob . FLOW_ID , flow . getId ( ) , FlowTriggerQuartzJob . FLOW_VERSION , latestFlowVersion , FlowTriggerQuartzJob . PROJECT_ID , project . getId ( ) ) ; final boolean scheduleSuccess = this . scheduler . scheduleJobIfAbsent ( flowTrigger . getSchedule ( ) . getCronExpression ( ) , new QuartzJobDescription ( FlowTriggerQuartzJob . class , FlowTriggerQuartzJob . JOB_NAME , generateGroupName ( flow ) , contextMap ) ) ; if ( scheduleSuccess ) { logger . info ( "Successfully registered flow {}.{} to scheduler" , project . getName ( ) , flow . getId ( ) ) ; } else { logger . info ( "Fail to register a duplicate flow {}.{} to scheduler" , project . getName ( ) , flow . getId ( ) ) ; } } } catch ( final SchedulerException | IOException ex ) { logger . error ( "Error in registering flow {}.{}" , project . getName ( ) , flow . getId ( ) , ex ) ; throw ex ; } finally { FlowLoaderUtils . cleanUpDir ( tempDir ) ; } } } }
Schedule flows containing flow triggers for this project .
22,060
public List < ScheduledFlowTrigger > getScheduledFlowTriggerJobs ( ) { try { final Scheduler quartzScheduler = this . scheduler . getScheduler ( ) ; final List < String > groupNames = quartzScheduler . getJobGroupNames ( ) ; final List < ScheduledFlowTrigger > flowTriggerJobDetails = new ArrayList < > ( ) ; for ( final String groupName : groupNames ) { final JobKey jobKey = new JobKey ( FlowTriggerQuartzJob . JOB_NAME , groupName ) ; ScheduledFlowTrigger scheduledFlowTrigger = null ; try { final JobDetail job = quartzScheduler . getJobDetail ( jobKey ) ; final JobDataMap jobDataMap = job . getJobDataMap ( ) ; final String flowId = jobDataMap . getString ( FlowTriggerQuartzJob . FLOW_ID ) ; final int projectId = jobDataMap . getInt ( FlowTriggerQuartzJob . PROJECT_ID ) ; final FlowTrigger flowTrigger = ( FlowTrigger ) jobDataMap . get ( FlowTriggerQuartzJob . FLOW_TRIGGER ) ; final String submitUser = jobDataMap . getString ( FlowTriggerQuartzJob . SUBMIT_USER ) ; final List < ? extends Trigger > quartzTriggers = quartzScheduler . getTriggersOfJob ( jobKey ) ; final boolean isPaused = this . scheduler . isJobPaused ( FlowTriggerQuartzJob . JOB_NAME , groupName ) ; final Project project = projectManager . getProject ( projectId ) ; final Flow flow = project . getFlow ( flowId ) ; scheduledFlowTrigger = new ScheduledFlowTrigger ( projectId , this . projectManager . getProject ( projectId ) . getName ( ) , flowId , flowTrigger , submitUser , quartzTriggers . isEmpty ( ) ? null : quartzTriggers . get ( 0 ) , isPaused , flow . isLocked ( ) ) ; } catch ( final Exception ex ) { logger . error ( "Unable to get flow trigger by job key {}" , jobKey , ex ) ; scheduledFlowTrigger = null ; } flowTriggerJobDetails . add ( scheduledFlowTrigger ) ; } return flowTriggerJobDetails ; } catch ( final Exception ex ) { logger . error ( "Unable to get scheduled flow triggers" , ex ) ; return new ArrayList < > ( ) ; } }
Retrieve the list of scheduled flow triggers from quartz database
22,061
public void unschedule ( final Project project ) throws SchedulerException { for ( final Flow flow : project . getFlows ( ) ) { if ( ! flow . isEmbeddedFlow ( ) ) { try { if ( this . scheduler . unscheduleJob ( FlowTriggerQuartzJob . JOB_NAME , generateGroupName ( flow ) ) ) { logger . info ( "Flow {}.{} unregistered from scheduler" , project . getName ( ) , flow . getId ( ) ) ; } } catch ( final SchedulerException e ) { logger . error ( "Fail to unregister flow from scheduler {}.{}" , project . getName ( ) , flow . getId ( ) , e ) ; throw e ; } } } }
Unschedule all possible flows in a project
22,062
private void handleGetAllMetricName ( final int executorId , final HttpServletRequest req , final HashMap < String , Object > ret ) throws IOException { final Map < String , Object > result ; try { result = this . execManagerAdapter . callExecutorStats ( executorId , ConnectorParams . STATS_GET_ALLMETRICSNAME , ( Pair < String , String > [ ] ) null ) ; if ( result . containsKey ( ConnectorParams . RESPONSE_ERROR ) ) { ret . put ( "error" , result . get ( ConnectorParams . RESPONSE_ERROR ) . toString ( ) ) ; } else { ret . put ( "metricList" , result . get ( "data" ) ) ; } } catch ( final ExecutorManagerException e ) { logger . error ( e . getMessage ( ) , e ) ; ret . put ( "error" , "Failed to fetch metric names for executor : " + executorId ) ; } }
Get all metrics tracked by the given executor
22,063
private void handleChangeConfigurationRequest ( final int executorId , final String actionName , final HttpServletRequest req , final HashMap < String , Object > ret ) throws ServletException , IOException { try { final Map < String , Object > result = this . execManagerAdapter . callExecutorStats ( executorId , actionName , getAllParams ( req ) ) ; if ( result . containsKey ( ConnectorParams . RESPONSE_ERROR ) ) { ret . put ( ConnectorParams . RESPONSE_ERROR , result . get ( ConnectorParams . RESPONSE_ERROR ) . toString ( ) ) ; } else { ret . put ( ConnectorParams . STATUS_PARAM , result . get ( ConnectorParams . STATUS_PARAM ) ) ; } } catch ( final ExecutorManagerException ex ) { logger . error ( ex . getMessage ( ) , ex ) ; ret . put ( "error" , "Failed to change config change" ) ; } }
Generic method to facilitate actionName action using Azkaban exec server
22,064
private Pair < String , String > [ ] getAllParams ( final HttpServletRequest req ) { final List < Pair < String , String > > allParams = new LinkedList < > ( ) ; final Iterator it = req . getParameterMap ( ) . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { final Map . Entry pairs = ( Map . Entry ) it . next ( ) ; for ( final Object value : ( String [ ] ) pairs . getValue ( ) ) { allParams . add ( new Pair < > ( ( String ) pairs . getKey ( ) , ( String ) value ) ) ; } } return allParams . toArray ( new Pair [ allParams . size ( ) ] ) ; }
Parse all Http request params
22,065
public Node createNode ( final String name , final NodeProcessor nodeProcessor ) { checkIsBuilt ( ) ; if ( this . nameToNodeMap . get ( name ) != null ) { throw new DagException ( String . format ( "Node names in %s need to be unique. The name " + "(%s) already exists." , this , name ) ) ; } final Node node = new Node ( name , nodeProcessor , this . dag ) ; this . nameToNodeMap . put ( name , node ) ; return node ; }
Creates a new node and adds it to the DagBuilder .
22,066
private void checkCircularDependencies ( ) { class CircularDependencyChecker { private final Set < Node > toVisit = new HashSet < > ( DagBuilder . this . nameToNodeMap . values ( ) ) ; private final Set < Node > finished = new HashSet < > ( ) ; private final Set < Node > ongoing = new HashSet < > ( ) ; private final List < Node > sampleCircularNodes = new ArrayList < > ( ) ; private void check ( ) { while ( ! this . toVisit . isEmpty ( ) ) { final Node node = removeOneNodeFromToVisitSet ( ) ; if ( checkNode ( node ) ) { final String msg = String . format ( "Circular dependency detected. Sample: %s" , this . sampleCircularNodes ) ; throw new DagException ( msg ) ; } } } private Node removeOneNodeFromToVisitSet ( ) { final Iterator < Node > iterator = this . toVisit . iterator ( ) ; final Node node = iterator . next ( ) ; iterator . remove ( ) ; return node ; } private boolean checkNode ( final Node node ) { if ( this . finished . contains ( node ) ) { return false ; } if ( this . ongoing . contains ( node ) ) { this . sampleCircularNodes . add ( node ) ; return true ; } this . toVisit . remove ( node ) ; this . ongoing . add ( node ) ; for ( final Node parent : node . getParents ( ) ) { if ( checkNode ( parent ) ) { this . sampleCircularNodes . add ( node ) ; return true ; } } this . ongoing . remove ( node ) ; this . finished . add ( node ) ; return false ; } } final CircularDependencyChecker checker = new CircularDependencyChecker ( ) ; checker . check ( ) ; }
Checks if the builder contains nodes that form a circular dependency ring .
22,067
public static void finalizeFlow ( final ExecutorLoader executorLoader , final AlerterHolder alerterHolder , final ExecutableFlow flow , final String reason , final Throwable originalError ) { boolean alertUser = true ; try { final ExecutableFlow dsFlow ; if ( isFinished ( flow ) ) { dsFlow = flow ; } else { dsFlow = executorLoader . fetchExecutableFlow ( flow . getExecutionId ( ) ) ; if ( ! isFinished ( dsFlow ) ) { failEverything ( dsFlow ) ; executorLoader . updateExecutableFlow ( dsFlow ) ; } } if ( flow . getEndTime ( ) == - 1 ) { flow . setEndTime ( System . currentTimeMillis ( ) ) ; executorLoader . updateExecutableFlow ( dsFlow ) ; } } catch ( final ExecutorManagerException e ) { alertUser = false ; logger . error ( "Failed to finalize flow " + flow . getExecutionId ( ) + ", do not alert user." , e ) ; } if ( alertUser ) { alertUserOnFlowFinished ( flow , alerterHolder , getFinalizeFlowReasons ( reason , originalError ) ) ; } }
If the current status of the execution is not one of the finished statuses mark the execution as failed in the DB .
22,068
public static void alertUserOnFlowFinished ( final ExecutableFlow flow , final AlerterHolder alerterHolder , final String [ ] extraReasons ) { final ExecutionOptions options = flow . getExecutionOptions ( ) ; final Alerter mailAlerter = alerterHolder . get ( "email" ) ; if ( flow . getStatus ( ) != Status . SUCCEEDED ) { if ( options . getFailureEmails ( ) != null && ! options . getFailureEmails ( ) . isEmpty ( ) ) { try { mailAlerter . alertOnError ( flow , extraReasons ) ; } catch ( final Exception e ) { logger . error ( "Failed to alert on error for execution " + flow . getExecutionId ( ) , e ) ; } } if ( options . getFlowParameters ( ) . containsKey ( "alert.type" ) ) { final String alertType = options . getFlowParameters ( ) . get ( "alert.type" ) ; final Alerter alerter = alerterHolder . get ( alertType ) ; if ( alerter != null ) { try { alerter . alertOnError ( flow , extraReasons ) ; } catch ( final Exception e ) { logger . error ( "Failed to alert on error by " + alertType + " for execution " + flow . getExecutionId ( ) , e ) ; } } else { logger . error ( "Alerter type " + alertType + " doesn't exist. Failed to alert." ) ; } } } else { if ( options . getSuccessEmails ( ) != null && ! options . getSuccessEmails ( ) . isEmpty ( ) ) { try { mailAlerter . alertOnSuccess ( flow ) ; } catch ( final Exception e ) { logger . error ( "Failed to alert on success for execution " + flow . getExecutionId ( ) , e ) ; } } if ( options . getFlowParameters ( ) . containsKey ( "alert.type" ) ) { final String alertType = options . getFlowParameters ( ) . get ( "alert.type" ) ; final Alerter alerter = alerterHolder . get ( alertType ) ; if ( alerter != null ) { try { alerter . alertOnSuccess ( flow ) ; } catch ( final Exception e ) { logger . error ( "Failed to alert on success by " + alertType + " for execution " + flow . getExecutionId ( ) , e ) ; } } else { logger . error ( "Alerter type " + alertType + " doesn't exist. Failed to alert." ) ; } } } }
When a flow is finished alert the user as is configured in the execution options .
22,069
public static void alertUserOnFirstError ( final ExecutableFlow flow , final AlerterHolder alerterHolder ) { final ExecutionOptions options = flow . getExecutionOptions ( ) ; if ( options . getNotifyOnFirstFailure ( ) ) { logger . info ( "Alert on first error of execution " + flow . getExecutionId ( ) ) ; final Alerter mailAlerter = alerterHolder . get ( "email" ) ; try { mailAlerter . alertOnFirstError ( flow ) ; } catch ( final Exception e ) { logger . error ( "Failed to send first error email." + e . getMessage ( ) , e ) ; } if ( options . getFlowParameters ( ) . containsKey ( "alert.type" ) ) { final String alertType = options . getFlowParameters ( ) . get ( "alert.type" ) ; final Alerter alerter = alerterHolder . get ( alertType ) ; if ( alerter != null ) { try { alerter . alertOnFirstError ( flow ) ; } catch ( final Exception e ) { logger . error ( "Failed to alert by " + alertType , e ) ; } } else { logger . error ( "Alerter type " + alertType + " doesn't exist. Failed to alert." ) ; } } } }
Alert the user when the flow has encountered the first error .
22,070
public static String [ ] getFinalizeFlowReasons ( final String reason , final Throwable originalError ) { final List < String > reasons = new LinkedList < > ( ) ; reasons . add ( reason ) ; if ( originalError != null ) { reasons . add ( ExceptionUtils . getStackTrace ( originalError ) ) ; } return reasons . toArray ( new String [ reasons . size ( ) ] ) ; }
Get the reasons to finalize the flow .
22,071
public static void failEverything ( final ExecutableFlow exFlow ) { final long time = System . currentTimeMillis ( ) ; for ( final ExecutableNode node : exFlow . getExecutableNodes ( ) ) { switch ( node . getStatus ( ) ) { case SUCCEEDED : case FAILED : case KILLED : case SKIPPED : case DISABLED : continue ; case READY : node . setStatus ( Status . KILLING ) ; break ; default : node . setStatus ( Status . FAILED ) ; break ; } if ( node . getStartTime ( ) == - 1 ) { node . setStartTime ( time ) ; } if ( node . getEndTime ( ) == - 1 ) { node . setEndTime ( time ) ; } } if ( exFlow . getEndTime ( ) == - 1 ) { exFlow . setEndTime ( time ) ; } exFlow . setStatus ( Status . FAILED ) ; }
Set the flow status to failed and fail every node inside the flow .
22,072
public static boolean isFinished ( final ExecutableFlow flow ) { switch ( flow . getStatus ( ) ) { case SUCCEEDED : case FAILED : case KILLED : return true ; default : return false ; } }
Check if the flow status is finished .
22,073
public static void main ( final String [ ] args ) throws Exception { StdOutErrRedirect . redirectOutAndErrToLog ( ) ; logger . info ( "Starting Jetty Azkaban Executor..." ) ; if ( System . getSecurityManager ( ) == null ) { Policy . setPolicy ( new Policy ( ) { public boolean implies ( final ProtectionDomain domain , final Permission permission ) { return true ; } } ) ; System . setSecurityManager ( new SecurityManager ( ) ) ; } final Props props = AzkabanServer . loadProps ( args ) ; if ( props == null ) { logger . error ( "Azkaban Properties not loaded." ) ; logger . error ( "Exiting Azkaban Executor Server..." ) ; return ; } final Injector injector = Guice . createInjector ( new AzkabanCommonModule ( props ) , new AzkabanExecServerModule ( ) ) ; SERVICE_PROVIDER . setInjector ( injector ) ; launch ( injector . getInstance ( AzkabanExecutorServer . class ) ) ; }
Azkaban using Jetty
22,074
private void configureMetricReports ( ) throws MetricException { final Props props = getAzkabanProps ( ) ; if ( props != null && props . getBoolean ( "executor.metric.reports" , false ) ) { logger . info ( "Starting to configure Metric Reports" ) ; final MetricReportManager metricManager = MetricReportManager . getInstance ( ) ; final IMetricEmitter metricEmitter = new InMemoryMetricEmitter ( props ) ; metricManager . addMetricEmitter ( metricEmitter ) ; logger . info ( "Adding number of failed flow metric" ) ; metricManager . addMetric ( new NumFailedFlowMetric ( metricManager , props . getInt ( METRIC_INTERVAL + NumFailedFlowMetric . NUM_FAILED_FLOW_METRIC_NAME , props . getInt ( METRIC_INTERVAL + "default" ) ) ) ) ; logger . info ( "Adding number of failed jobs metric" ) ; metricManager . addMetric ( new NumFailedJobMetric ( metricManager , props . getInt ( METRIC_INTERVAL + NumFailedJobMetric . NUM_FAILED_JOB_METRIC_NAME , props . getInt ( METRIC_INTERVAL + "default" ) ) ) ) ; logger . info ( "Adding number of running Jobs metric" ) ; metricManager . addMetric ( new NumRunningJobMetric ( metricManager , props . getInt ( METRIC_INTERVAL + NumRunningJobMetric . NUM_RUNNING_JOB_METRIC_NAME , props . getInt ( METRIC_INTERVAL + "default" ) ) ) ) ; logger . info ( "Adding number of running flows metric" ) ; metricManager . addMetric ( new NumRunningFlowMetric ( this . runnerManager , metricManager , props . getInt ( METRIC_INTERVAL + NumRunningFlowMetric . NUM_RUNNING_FLOW_METRIC_NAME , props . getInt ( METRIC_INTERVAL + "default" ) ) ) ) ; logger . info ( "Adding number of queued flows metric" ) ; metricManager . addMetric ( new NumQueuedFlowMetric ( this . runnerManager , metricManager , props . getInt ( METRIC_INTERVAL + NumQueuedFlowMetric . NUM_QUEUED_FLOW_METRIC_NAME , props . getInt ( METRIC_INTERVAL + "default" ) ) ) ) ; logger . info ( "Completed configuring Metric Reports" ) ; } }
Configure Metric Reporting as per azkaban . properties settings
22,075
private void loadCustomJMXAttributeProcessor ( final Props props ) { final String jmxAttributeEmitter = props . get ( CUSTOM_JMX_ATTRIBUTE_PROCESSOR_PROPERTY ) ; if ( jmxAttributeEmitter != null ) { try { logger . info ( "jmxAttributeEmitter: " + jmxAttributeEmitter ) ; final Constructor < Props > [ ] constructors = ( Constructor < Props > [ ] ) Class . forName ( jmxAttributeEmitter ) . getConstructors ( ) ; constructors [ 0 ] . newInstance ( props . toProperties ( ) ) ; } catch ( final Exception e ) { logger . error ( "Encountered error while loading and instantiating " + jmxAttributeEmitter , e ) ; throw new IllegalStateException ( "Encountered error while loading and instantiating " + jmxAttributeEmitter , e ) ; } } else { logger . info ( "No value for property: " + CUSTOM_JMX_ATTRIBUTE_PROCESSOR_PROPERTY + " was found" ) ; } }
Load a custom class which is provided by a configuration CUSTOM_JMX_ATTRIBUTE_PROCESSOR_PROPERTY .
22,076
public String getHost ( ) { if ( this . props . containsKey ( ConfigurationKeys . AZKABAN_SERVER_HOST_NAME ) ) { final String hostName = this . props . getString ( Constants . ConfigurationKeys . AZKABAN_SERVER_HOST_NAME ) ; if ( ! StringUtils . isEmpty ( hostName ) ) { return hostName ; } } String host = "unkownHost" ; try { host = InetAddress . getLocalHost ( ) . getCanonicalHostName ( ) ; } catch ( final Exception e ) { logger . error ( "Failed to fetch LocalHostName" ) ; } return host ; }
Get the hostname
22,077
public int getPort ( ) { final Connector [ ] connectors = this . server . getConnectors ( ) ; checkState ( connectors . length >= 1 , "Server must have at least 1 connector" ) ; return connectors [ 0 ] . getLocalPort ( ) ; }
Get the current server port
22,078
public void run ( ) { if ( isTriggerExpired ( ) ) { logger . info ( this + " expired" ) ; return ; } final boolean isTriggerConditionMet = this . triggerCondition . isMet ( ) ; if ( isTriggerConditionMet ) { logger . info ( "Condition " + this . triggerCondition . getExpression ( ) + " met" ) ; for ( final TriggerAction action : this . actions ) { try { action . doAction ( ) ; } catch ( final Exception e ) { logger . error ( "Failed to do action " + action . getDescription ( ) + " for execution " + azkaban . execapp . Trigger . this . execId , e ) ; } } } }
Perform the action if trigger condition is met
22,079
protected void fillRemainingMemoryPercent ( final ExecutorInfo stats ) { if ( exists_Bash && exists_Cat && exists_Grep && exists_Meminfo ) { try { final ArrayList < String > output = Utils . runProcess ( "/bin/bash" , "-c" , "/bin/cat /proc/meminfo | grep -E \"^MemTotal:|^MemFree:|^Buffers:|^Cached:|^SwapCached:\"" ) ; long totalMemory = 0 ; long totalFreeMemory = 0 ; Long parsedResult = ( long ) 0 ; if ( output . size ( ) == 5 ) { for ( final String result : output ) { parsedResult = extractMemoryInfo ( "MemTotal" , result ) ; if ( null != parsedResult ) { totalMemory = parsedResult ; continue ; } parsedResult = extractMemoryInfo ( "MemFree" , result ) ; if ( null != parsedResult ) { totalFreeMemory += parsedResult ; continue ; } parsedResult = extractMemoryInfo ( "Buffers" , result ) ; if ( null != parsedResult ) { totalFreeMemory += parsedResult ; continue ; } parsedResult = extractMemoryInfo ( "SwapCached" , result ) ; if ( null != parsedResult ) { totalFreeMemory += parsedResult ; continue ; } parsedResult = extractMemoryInfo ( "Cached" , result ) ; if ( null != parsedResult ) { totalFreeMemory += parsedResult ; continue ; } } } else { logger . error ( "failed to get total/free memory info as the bash call returned invalid result." + String . format ( " Output from the bash call - %s " , output . toString ( ) ) ) ; } stats . setRemainingMemoryInMB ( totalFreeMemory / 1024 ) ; stats . setRemainingMemoryPercent ( totalMemory == 0 ? 0 : ( ( double ) totalFreeMemory / ( double ) totalMemory ) * 100 ) ; } catch ( final Exception ex ) { logger . error ( "failed fetch system memory info " + "as exception is captured when fetching result from bash call. Ex -" + ex . getMessage ( ) ) ; } } else { logger . error ( "failed fetch system memory info, one or more files from the following list are missing - " + "'/bin/bash'," + "'/bin/cat'," + "'/proc/loadavg'" ) ; } }
fill the result set with the percent of the remaining system memory on the server .
22,080
protected synchronized void populateStatistics ( final boolean noCache ) { if ( noCache || System . currentTimeMillis ( ) - lastRefreshedTime > cacheTimeInMilliseconds ) { final ExecutorInfo stats = new ExecutorInfo ( ) ; fillRemainingMemoryPercent ( stats ) ; fillRemainingFlowCapacityAndLastDispatchedTime ( stats ) ; fillCpuUsage ( stats ) ; cachedstats = stats ; lastRefreshedTime = System . currentTimeMillis ( ) ; } }
call the data providers to fill the returning data container for statistics data . This function refreshes the static cached copy of data in case if necessary .
22,081
protected void fillRemainingFlowCapacityAndLastDispatchedTime ( final ExecutorInfo stats ) { final AzkabanExecutorServer server = AzkabanExecutorServer . getApp ( ) ; if ( server != null ) { final FlowRunnerManager runnerMgr = AzkabanExecutorServer . getApp ( ) . getFlowRunnerManager ( ) ; final int assignedFlows = runnerMgr . getNumRunningFlows ( ) + runnerMgr . getNumQueuedFlows ( ) ; stats . setRemainingFlowCapacity ( runnerMgr . getMaxNumRunningFlows ( ) - assignedFlows ) ; stats . setNumberOfAssignedFlows ( assignedFlows ) ; stats . setLastDispatchedTime ( runnerMgr . getLastFlowSubmittedTime ( ) ) ; } else { logger . error ( "failed to get data for remaining flow capacity or LastDispatchedTime" + " as the AzkabanExecutorServer has yet been initialized." ) ; } }
fill the result set with the remaining flow capacity .
22,082
public void init ( Props sysProps , Props jobProps , final Logger logger ) { shouldProxy = sysProps . getBoolean ( HadoopSecurityManager . ENABLE_PROXYING , false ) ; jobProps . put ( HadoopSecurityManager . ENABLE_PROXYING , Boolean . toString ( shouldProxy ) ) ; obtainTokens = sysProps . getBoolean ( HadoopSecurityManager . OBTAIN_BINARY_TOKEN , false ) ; if ( shouldProxy ) { logger . info ( "Initiating hadoop security manager." ) ; try { hadoopSecurityManager = HadoopJobUtils . loadHadoopSecurityManager ( sysProps , logger ) ; } catch ( RuntimeException e ) { e . printStackTrace ( ) ; throw new RuntimeException ( "Failed to get hadoop security manager!" + e . getCause ( ) ) ; } } }
Initialize the Hadoop Proxy Object
22,083
public void setupPropsForProxy ( Props props , Props jobProps , final Logger logger ) throws Exception { if ( isProxyEnabled ( ) ) { userToProxy = jobProps . getString ( HadoopSecurityManager . USER_TO_PROXY ) ; logger . info ( "Need to proxy. Getting tokens." ) ; tokenFile = HadoopJobUtils . getHadoopTokens ( hadoopSecurityManager , props , logger ) ; jobProps . put ( "env." + HADOOP_TOKEN_FILE_LOCATION , tokenFile . getAbsolutePath ( ) ) ; } }
Setup Job Properties when the proxy is enabled
22,084
public String getJVMArgument ( Props sysProps , Props jobProps , final Logger logger ) { String secure = "" ; if ( shouldProxy ) { logger . info ( "Setting up secure proxy info for child process" ) ; secure = " -D" + HadoopSecurityManager . USER_TO_PROXY + "=" + jobProps . getString ( HadoopSecurityManager . USER_TO_PROXY ) ; String extraToken = sysProps . getString ( HadoopSecurityManager . OBTAIN_BINARY_TOKEN , "false" ) ; if ( extraToken != null ) { secure += " -D" + HadoopSecurityManager . OBTAIN_BINARY_TOKEN + "=" + extraToken ; } logger . info ( "Secure settings = " + secure ) ; } else { logger . info ( "Not setting up secure proxy info for child process" ) ; } return secure ; }
Generate JVM Proxy Secure Argument
22,085
public void cancelHadoopTokens ( final Logger logger ) { if ( tokenFile == null ) { return ; } try { hadoopSecurityManager . cancelTokens ( tokenFile , userToProxy , logger ) ; } catch ( HadoopSecurityManagerException e ) { logger . error ( e . getCause ( ) + e . getMessage ( ) ) ; } catch ( Exception e ) { logger . error ( e . getCause ( ) + e . getMessage ( ) ) ; } if ( tokenFile . exists ( ) ) { tokenFile . delete ( ) ; } }
Cancel Hadoop Tokens
22,086
public void killAllSpawnedHadoopJobs ( Props jobProps , final Logger logger ) { if ( tokenFile == null ) { return ; } final String logFilePath = jobProps . getString ( CommonJobProperties . JOB_LOG_FILE ) ; logger . info ( "Log file path is: " + logFilePath ) ; HadoopJobUtils . proxyUserKillAllSpawnedHadoopJobs ( logFilePath , jobProps , tokenFile , logger ) ; }
Kill all Spawned Hadoop Jobs
22,087
public static void updateProjectNotifications ( final Project project , final ProjectManager pm ) throws ProjectManagerException { final Flow flow = project . getFlows ( ) . get ( 0 ) ; final ArrayList < String > successEmails = new ArrayList < > ( ) ; final String successNotifications = ( String ) project . getMetadata ( ) . get ( "notifications" ) ; final String [ ] successEmailSplit = successNotifications . split ( "\\s*,\\s*|\\s*;\\s*|\\s+" ) ; successEmails . addAll ( Arrays . asList ( successEmailSplit ) ) ; final ArrayList < String > failureEmails = new ArrayList < > ( ) ; final String failureNotifications = ( String ) project . getMetadata ( ) . get ( "failureNotifications" ) ; final String [ ] failureEmailSplit = failureNotifications . split ( "\\s*,\\s*|\\s*;\\s*|\\s+" ) ; failureEmails . addAll ( Arrays . asList ( failureEmailSplit ) ) ; @ SuppressWarnings ( "unchecked" ) final Map < String , String > subscription = ( Map < String , String > ) project . getMetadata ( ) . get ( "subscription" ) ; if ( subscription != null ) { successEmails . addAll ( subscription . values ( ) ) ; } final ArrayList < String > successEmailList = new ArrayList < > ( ) ; for ( final String email : successEmails ) { if ( ! email . trim ( ) . isEmpty ( ) ) { successEmailList . add ( email ) ; } } final ArrayList < String > failureEmailList = new ArrayList < > ( ) ; for ( final String email : failureEmails ) { if ( ! email . trim ( ) . isEmpty ( ) ) { failureEmailList . add ( email ) ; } } flow . getSuccessEmails ( ) . clear ( ) ; flow . getFailureEmails ( ) . clear ( ) ; flow . addSuccessEmails ( successEmailList ) ; flow . addFailureEmails ( failureEmailList ) ; pm . updateFlow ( project , flow ) ; }
Updates the email notifications saved in the project s flow .
22,088
public static Set < String > parseUniqueEmails ( final String emailList , final String splitRegex ) { final Set < String > uniqueEmails = new HashSet < > ( ) ; if ( emailList == null ) { return uniqueEmails ; } final String [ ] emails = emailList . trim ( ) . split ( splitRegex ) ; for ( final String email : emails ) { if ( ! email . isEmpty ( ) ) { uniqueEmails . add ( email ) ; } } return uniqueEmails ; }
Given a string containing multiple emails splits it based on the given regular expression and returns a set containing the unique non - empty emails .
22,089
public static boolean isValidEmailAddress ( final String email ) { if ( email == null ) { return false ; } boolean result = true ; try { final InternetAddress emailAddr = new InternetAddress ( email ) ; emailAddr . validate ( ) ; } catch ( final AddressException ex ) { result = false ; } return result ; }
Returns true if the given email is valid and false otherwise .
22,090
public static String getEmailDomain ( final String email ) { if ( email == null || email . isEmpty ( ) ) { return null ; } final int atSignIndex = email . indexOf ( '@' ) ; if ( atSignIndex != - 1 ) { return email . substring ( atSignIndex + 1 ) ; } return null ; }
Given an email string returns the domain part if it exists and null otherwise .
22,091
public void finalizeFlow ( final ExecutableFlow flow , final String reason , final Throwable originalError ) { final int execId = flow . getExecutionId ( ) ; boolean alertUser = true ; this . updaterStage . set ( "finalizing flow " + execId ) ; try { final ExecutableFlow dsFlow ; if ( ExecutionControllerUtils . isFinished ( flow ) ) { dsFlow = flow ; } else { this . updaterStage . set ( "finalizing flow " + execId + " loading from db" ) ; dsFlow = this . executorLoader . fetchExecutableFlow ( execId ) ; if ( ! ExecutionControllerUtils . isFinished ( dsFlow ) ) { this . updaterStage . set ( "finalizing flow " + execId + " failing the flow" ) ; ExecutionControllerUtils . failEverything ( dsFlow ) ; this . executorLoader . updateExecutableFlow ( dsFlow ) ; } } this . updaterStage . set ( "finalizing flow " + execId + " deleting active reference" ) ; if ( flow . getEndTime ( ) == - 1 ) { flow . setEndTime ( System . currentTimeMillis ( ) ) ; this . executorLoader . updateExecutableFlow ( dsFlow ) ; } this . executorLoader . removeActiveExecutableReference ( execId ) ; this . updaterStage . set ( "finalizing flow " + execId + " cleaning from memory" ) ; this . runningExecutions . get ( ) . remove ( execId ) ; } catch ( final ExecutorManagerException e ) { alertUser = false ; logger . error ( e ) ; } this . updaterStage . set ( "finalizing flow " + execId + " alerting and emailing" ) ; if ( alertUser ) { ExecutionControllerUtils . alertUserOnFlowFinished ( flow , this . alerterHolder , ExecutionControllerUtils . getFinalizeFlowReasons ( reason , originalError ) ) ; } }
If the current status of the execution is not one of the finished statuses marks the execution as failed in the DB . Removes the execution from the running executions cache .
22,092
private static ArrayList < URL > getUrls ( File [ ] files ) { final ArrayList < URL > urls = new ArrayList < > ( ) ; for ( File file : files ) { try { final URL url = file . toURI ( ) . toURL ( ) ; urls . add ( url ) ; } catch ( final MalformedURLException e ) { logger . error ( "File is not convertible to URL." , e ) ; } } return urls ; }
Convert a list of files to a list of files URLs
22,093
public FlowLoader createFlowLoader ( final File projectDir ) throws ProjectManagerException { if ( checkForValidProjectYamlFile ( projectDir ) ) { return new DirectoryYamlFlowLoader ( this . props ) ; } else { return new DirectoryFlowLoader ( this . props ) ; } }
Creates flow loader based on project YAML file inside project directory .
22,094
public void loadValidators ( final Props props , final Logger log ) { this . validators = new LinkedHashMap < > ( ) ; if ( ! props . containsKey ( ValidatorConfigs . XML_FILE_PARAM ) ) { logger . warn ( "Azkaban properties file does not contain the key " + ValidatorConfigs . XML_FILE_PARAM ) ; return ; } final String xmlPath = props . get ( ValidatorConfigs . XML_FILE_PARAM ) ; final File file = new File ( xmlPath ) ; if ( ! file . exists ( ) ) { logger . error ( "Azkaban validator configuration file " + xmlPath + " does not exist." ) ; return ; } final DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory . newInstance ( ) ; DocumentBuilder builder = null ; try { builder = docBuilderFactory . newDocumentBuilder ( ) ; } catch ( final ParserConfigurationException e ) { throw new ValidatorManagerException ( "Exception while parsing validator xml. Document builder not created." , e ) ; } Document doc = null ; try { doc = builder . parse ( file ) ; } catch ( final SAXException e ) { throw new ValidatorManagerException ( "Exception while parsing " + xmlPath + ". Invalid XML." , e ) ; } catch ( final IOException e ) { throw new ValidatorManagerException ( "Exception while parsing " + xmlPath + ". Error reading file." , e ) ; } final NodeList tagList = doc . getChildNodes ( ) ; final Node azkabanValidators = tagList . item ( 0 ) ; final NodeList azkabanValidatorsList = azkabanValidators . getChildNodes ( ) ; for ( int i = 0 ; i < azkabanValidatorsList . getLength ( ) ; ++ i ) { final Node node = azkabanValidatorsList . item ( i ) ; if ( node . getNodeType ( ) == Node . ELEMENT_NODE ) { if ( node . getNodeName ( ) . equals ( VALIDATOR_TAG ) ) { parseValidatorTag ( node , props , log ) ; } } } }
Instances of the validators are created here rather than in the constructors . This is because some validators might need to maintain project - specific states . By instantiating the validators here it ensures that the validator objects are project - specific rather than global .
22,095
public static URI buildUri ( final String host , final int port , final String path , final boolean isHttp , final Pair < String , String > ... params ) throws IOException { final URIBuilder builder = new URIBuilder ( ) ; builder . setScheme ( isHttp ? "http" : "https" ) . setHost ( host ) . setPort ( port ) ; if ( null != path && path . length ( ) > 0 ) { builder . setPath ( path ) ; } if ( params != null ) { for ( final Pair < String , String > pair : params ) { builder . setParameter ( pair . getFirst ( ) , pair . getSecond ( ) ) ; } } try { return builder . build ( ) ; } catch ( final URISyntaxException e ) { throw new IOException ( e ) ; } }
helper function to build a valid URI .
22,096
private static HttpEntityEnclosingRequestBase completeRequest ( final HttpEntityEnclosingRequestBase request , final List < Pair < String , String > > params ) throws UnsupportedEncodingException { if ( request != null ) { if ( null != params && ! params . isEmpty ( ) ) { final List < NameValuePair > formParams = params . stream ( ) . map ( pair -> new BasicNameValuePair ( pair . getFirst ( ) , pair . getSecond ( ) ) ) . collect ( Collectors . toList ( ) ) ; final HttpEntity entity = new UrlEncodedFormEntity ( formParams , "UTF-8" ) ; request . setEntity ( entity ) ; } } return request ; }
helper function to fill the request with header entries and posting body .
22,097
public T httpPost ( final URI uri , final List < Pair < String , String > > params ) throws IOException { if ( null == uri ) { logger . error ( " unable to perform httpPost as the passed uri is null." ) ; return null ; } final HttpPost post = new HttpPost ( uri ) ; return this . sendAndReturn ( completeRequest ( post , params ) ) ; }
function to perform a Post http request .
22,098
protected T sendAndReturn ( final HttpUriRequest request ) throws IOException { try ( CloseableHttpClient client = HttpClients . createDefault ( ) ) { return this . parseResponse ( client . execute ( request ) ) ; } }
function to dispatch the request and pass back the response .
22,099
private void logRequest ( final HttpServletRequest req , final Session session ) { final StringBuilder buf = new StringBuilder ( ) ; buf . append ( getRealClientIpAddr ( req ) ) . append ( " " ) ; if ( session != null && session . getUser ( ) != null ) { buf . append ( session . getUser ( ) . getUserId ( ) ) . append ( " " ) ; } else { buf . append ( " - " ) . append ( " " ) ; } buf . append ( "\"" ) ; buf . append ( req . getMethod ( ) ) . append ( " " ) ; buf . append ( req . getRequestURI ( ) ) . append ( " " ) ; if ( req . getQueryString ( ) != null && ! isIllegalPostRequest ( req ) ) { buf . append ( req . getQueryString ( ) ) . append ( " " ) ; } else { buf . append ( "-" ) . append ( " " ) ; } buf . append ( req . getProtocol ( ) ) . append ( "\" " ) ; final String userAgent = req . getHeader ( "User-Agent" ) ; if ( this . shouldLogRawUserAgent ) { buf . append ( userAgent ) ; } else { if ( StringUtils . isFromBrowser ( userAgent ) ) { buf . append ( "browser" ) ; } else { buf . append ( "not-browser" ) ; } } logger . info ( buf . toString ( ) ) ; }
Log out request - the format should be close to Apache access log format