idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
17,100
def heartbeat ( queue_name , task_id , owner , message , index ) : task = _get_task_with_policy ( queue_name , task_id , owner ) if task . heartbeat_number > index : return False task . heartbeat = message task . heartbeat_number = index now = datetime . datetime . utcnow ( ) timeout_delta = task . eta - task . last_lease task . eta = now + timeout_delta task . last_lease = now db . session . add ( task ) signals . task_updated . send ( app , task = task ) return True
Sets the heartbeat status of the task and extends its lease .
17,101
def finish ( queue_name , task_id , owner , error = False ) : task = _get_task_with_policy ( queue_name , task_id , owner ) if not task . status == WorkQueue . LIVE : logging . warning ( 'Finishing already dead task. queue=%r, task_id=%r, ' 'owner=%r, status=%r' , task . queue_name , task_id , owner , task . status ) return False if not error : task . status = WorkQueue . DONE else : task . status = WorkQueue . ERROR task . finished = datetime . datetime . utcnow ( ) db . session . add ( task ) signals . task_updated . send ( app , task = task ) return True
Marks a work item on a queue as finished .
17,102
def cancel ( ** kwargs ) : task_list = _query ( ** kwargs ) for task in task_list : task . status = WorkQueue . CANCELED task . finished = datetime . datetime . utcnow ( ) db . session . add ( task ) return len ( task_list )
Cancels work items based on their criteria .
17,103
def handle_add ( queue_name ) : source = request . form . get ( 'source' , request . remote_addr , type = str ) try : task_id = work_queue . add ( queue_name , payload = request . form . get ( 'payload' , type = str ) , content_type = request . form . get ( 'content_type' , type = str ) , source = source , task_id = request . form . get ( 'task_id' , type = str ) ) except work_queue . Error , e : return utils . jsonify_error ( e ) db . session . commit ( ) logging . info ( 'Task added: queue=%r, task_id=%r, source=%r' , queue_name , task_id , source ) return flask . jsonify ( task_id = task_id )
Adds a task to a queue .
17,104
def handle_lease ( queue_name ) : owner = request . form . get ( 'owner' , request . remote_addr , type = str ) try : task_list = work_queue . lease ( queue_name , owner , request . form . get ( 'count' , 1 , type = int ) , request . form . get ( 'timeout' , 60 , type = int ) ) except work_queue . Error , e : return utils . jsonify_error ( e ) if not task_list : return flask . jsonify ( tasks = [ ] ) db . session . commit ( ) task_ids = [ t [ 'task_id' ] for t in task_list ] logging . debug ( 'Task leased: queue=%r, task_ids=%r, owner=%r' , queue_name , task_ids , owner ) return flask . jsonify ( tasks = task_list )
Leases a task from a queue .
17,105
def handle_heartbeat ( queue_name ) : task_id = request . form . get ( 'task_id' , type = str ) message = request . form . get ( 'message' , type = str ) index = request . form . get ( 'index' , type = int ) try : work_queue . heartbeat ( queue_name , task_id , request . form . get ( 'owner' , request . remote_addr , type = str ) , message , index ) except work_queue . Error , e : return utils . jsonify_error ( e ) db . session . commit ( ) logging . debug ( 'Task heartbeat: queue=%r, task_id=%r, message=%r, index=%d' , queue_name , task_id , message , index ) return flask . jsonify ( success = True )
Updates the heartbeat message for a task .
17,106
def handle_finish ( queue_name ) : task_id = request . form . get ( 'task_id' , type = str ) owner = request . form . get ( 'owner' , request . remote_addr , type = str ) error = request . form . get ( 'error' , type = str ) is not None try : work_queue . finish ( queue_name , task_id , owner , error = error ) except work_queue . Error , e : return utils . jsonify_error ( e ) db . session . commit ( ) logging . debug ( 'Task finished: queue=%r, task_id=%r, owner=%r, error=%r' , queue_name , task_id , owner , error ) return flask . jsonify ( success = True )
Marks a task on a queue as finished .
17,107
def view_all_work_queues ( ) : count_list = list ( db . session . query ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status , func . count ( work_queue . WorkQueue . task_id ) ) . group_by ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status ) ) queue_dict = { } for name , status , count in count_list : queue_dict [ ( name , status ) ] = dict ( name = name , status = status , count = count ) max_created_list = list ( db . session . query ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status , func . max ( work_queue . WorkQueue . created ) ) . group_by ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status ) ) for name , status , newest_created in max_created_list : queue_dict [ ( name , status ) ] [ 'newest_created' ] = newest_created min_eta_list = list ( db . session . query ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status , func . min ( work_queue . WorkQueue . eta ) ) . group_by ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status ) ) for name , status , oldest_eta in min_eta_list : queue_dict [ ( name , status ) ] [ 'oldest_eta' ] = oldest_eta queue_list = list ( queue_dict . values ( ) ) queue_list . sort ( key = lambda x : ( x [ 'name' ] , x [ 'status' ] ) ) context = dict ( queue_list = queue_list , ) return render_template ( 'view_work_queue_index.html' , ** context )
Page for viewing the index of all active work queues .
17,108
def manage_work_queue ( queue_name ) : modify_form = forms . ModifyWorkQueueTaskForm ( ) if modify_form . validate_on_submit ( ) : primary_key = ( modify_form . task_id . data , queue_name ) task = work_queue . WorkQueue . query . get ( primary_key ) if task : logging . info ( 'Action: %s task_id=%r' , modify_form . action . data , modify_form . task_id . data ) if modify_form . action . data == 'retry' : task . status = work_queue . WorkQueue . LIVE task . lease_attempts = 0 task . heartbeat = 'Retrying ...' db . session . add ( task ) else : db . session . delete ( task ) db . session . commit ( ) else : logging . warning ( 'Could not find task_id=%r to delete' , modify_form . task_id . data ) return redirect ( url_for ( 'manage_work_queue' , queue_name = queue_name ) ) query = ( work_queue . WorkQueue . query . filter_by ( queue_name = queue_name ) . order_by ( work_queue . WorkQueue . created . desc ( ) ) ) status = request . args . get ( 'status' , '' , type = str ) . lower ( ) if status in work_queue . WorkQueue . STATES : query = query . filter_by ( status = status ) else : status = None item_list = list ( query . limit ( 100 ) ) work_list = [ ] for item in item_list : form = forms . ModifyWorkQueueTaskForm ( ) form . task_id . data = item . task_id form . delete . data = True work_list . append ( ( item , form ) ) context = dict ( queue_name = queue_name , status = status , work_list = work_list , ) return render_template ( 'view_work_queue.html' , ** context )
Page for viewing the contents of a work queue .
17,109
def retryable_transaction ( attempts = 3 , exceptions = ( OperationalError , ) ) : assert len ( exceptions ) > 0 assert attempts > 0 def wrapper ( f ) : @ functools . wraps ( f ) def wrapped ( * args , ** kwargs ) : for i in xrange ( attempts ) : try : return f ( * args , ** kwargs ) except exceptions , e : if i == ( attempts - 1 ) : raise logging . warning ( 'Retryable error in transaction on attempt %d. %s: %s' , i + 1 , e . __class__ . __name__ , e ) db . session . rollback ( ) return wrapped return wrapper
Decorator retries a function when expected exceptions are raised .
17,110
def jsonify_assert ( asserted , message , status_code = 400 ) : if asserted : return try : raise AssertionError ( message ) except AssertionError , e : stack = traceback . extract_stack ( ) stack . pop ( ) logging . error ( 'Assertion failed: %s\n%s' , str ( e ) , '' . join ( traceback . format_list ( stack ) ) ) abort ( jsonify_error ( e , status_code = status_code ) )
Asserts something is true aborts the request if not .
17,111
def jsonify_error ( message_or_exception , status_code = 400 ) : if isinstance ( message_or_exception , Exception ) : message = '%s: %s' % ( message_or_exception . __class__ . __name__ , message_or_exception ) else : message = message_or_exception logging . debug ( 'Returning status=%s, error message: %s' , status_code , message ) response = jsonify ( error = message ) response . status_code = status_code return response
Returns a JSON payload that indicates the request had an error .
17,112
def ignore_exceptions ( f ) : @ functools . wraps ( f ) def wrapped ( * args , ** kwargs ) : try : return f ( * args , ** kwargs ) except : logging . exception ( "Ignoring exception in %r" , f ) return wrapped
Decorator catches and ignores any exceptions raised by this function .
17,113
def timesince ( when ) : if not when : return '' now = datetime . datetime . utcnow ( ) if now > when : diff = now - when suffix = 'ago' else : diff = when - now suffix = 'from now' periods = ( ( diff . days / 365 , 'year' , 'years' ) , ( diff . days / 30 , 'month' , 'months' ) , ( diff . days / 7 , 'week' , 'weeks' ) , ( diff . days , 'day' , 'days' ) , ( diff . seconds / 3600 , 'hour' , 'hours' ) , ( diff . seconds / 60 , 'minute' , 'minutes' ) , ( diff . seconds , 'second' , 'seconds' ) , ) for period , singular , plural in periods : if period : return '%d %s %s' % ( period , singular if period == 1 else plural , suffix ) return 'now'
Returns string representing time since or time until .
17,114
def human_uuid ( ) : return base64 . b32encode ( hashlib . sha1 ( uuid . uuid4 ( ) . bytes ) . digest ( ) ) . lower ( ) . strip ( '=' )
Returns a good UUID for using as a human readable string .
17,115
def get_deployment_timestamp ( ) : if os . environ . get ( 'SERVER_SOFTWARE' , '' ) . startswith ( 'Google App Engine' ) : version_id = os . environ . get ( 'CURRENT_VERSION_ID' ) major_version , timestamp = version_id . split ( '.' , 1 ) return timestamp return 'test'
Returns a unique string represeting the current deployment .
17,116
def real_main ( new_url = None , baseline_url = None , upload_build_id = None , upload_release_name = None ) : coordinator = workers . get_coordinator ( ) fetch_worker . register ( coordinator ) coordinator . start ( ) item = UrlPairDiff ( new_url , baseline_url , upload_build_id , upload_release_name = upload_release_name , heartbeat = workers . PrintWorkflow ) item . root = True coordinator . input_queue . put ( item ) coordinator . wait_one ( ) coordinator . stop ( ) coordinator . join ( )
Runs the ur_pair_diff .
17,117
def fetch_internal ( item , request ) : from flask import make_response from werkzeug . test import EnvironBuilder from dpxdt . server import app environ_base = { 'REMOTE_ADDR' : '127.0.0.1' , } data = request . get_data ( ) if data and not isinstance ( data , str ) : data = '' . join ( list ( data ) ) builder = EnvironBuilder ( path = request . get_selector ( ) , base_url = '%s://%s' % ( request . get_type ( ) , request . get_host ( ) ) , method = request . get_method ( ) , data = data , headers = request . header_items ( ) , environ_base = environ_base ) with app . request_context ( builder . get_environ ( ) ) : response = make_response ( app . dispatch_request ( ) ) LOGGER . info ( '"%s" %s via internal routing' , request . get_selector ( ) , response . status_code ) item . status_code = response . status_code item . content_type = response . mimetype if item . result_path : with open ( item . result_path , 'wb' ) as result_file : for piece in response . iter_encoded ( ) : result_file . write ( piece ) else : item . data = response . get_data ( ) return item
Fetches the given request by using the local Flask context .
17,118
def fetch_normal ( item , request ) : try : conn = urllib2 . urlopen ( request , timeout = item . timeout_seconds ) except urllib2 . HTTPError , e : conn = e except ( urllib2 . URLError , ssl . SSLError ) , e : item . status_code = 400 return item try : item . status_code = conn . getcode ( ) item . content_type = conn . info ( ) . gettype ( ) if item . result_path : with open ( item . result_path , 'wb' ) as result_file : shutil . copyfileobj ( conn , result_file ) else : item . data = conn . read ( ) except socket . timeout , e : item . status_code = 400 return item finally : conn . close ( ) return item
Fetches the given request over HTTP .
17,119
def json ( self ) : if self . _data_json : return self . _data_json if not self . data or self . content_type != 'application/json' : return None self . _data_json = json . loads ( self . data ) return self . _data_json
Returns de - JSONed data or None if it s a different content type .
17,120
def maybe_imgur ( self , path ) : if not FLAGS . imgur_client_id : return path im = pyimgur . Imgur ( FLAGS . imgur_client_id ) uploaded_image = im . upload_image ( path ) return '%s %s' % ( path , uploaded_image . link )
Uploads a file to imgur if requested via command line flags .
17,121
def real_main ( release_url = None , tests_json_path = None , upload_build_id = None , upload_release_name = None ) : coordinator = workers . get_coordinator ( ) fetch_worker . register ( coordinator ) coordinator . start ( ) data = open ( FLAGS . tests_json_path ) . read ( ) tests = load_tests ( data ) item = DiffMyImages ( release_url , tests , upload_build_id , upload_release_name , heartbeat = workers . PrintWorkflow ) item . root = True coordinator . input_queue . put ( item ) coordinator . wait_one ( ) coordinator . stop ( ) coordinator . join ( )
Runs diff_my_images .
17,122
def clean_url ( url , force_scheme = None ) : url = str ( url ) url_parts = urlparse . urlparse ( url ) path_parts = [ ] for part in url_parts . path . split ( '/' ) : if part == '.' : continue elif part == '..' : if path_parts : path_parts . pop ( ) else : path_parts . append ( part ) url_parts = list ( url_parts ) if force_scheme : url_parts [ 0 ] = force_scheme url_parts [ 2 ] = '/' . join ( path_parts ) if FLAGS . keep_query_string == False : url_parts [ 4 ] = '' url_parts [ 5 ] = '' if not url_parts [ 2 ] : url_parts [ 2 ] = '/' return urlparse . urlunparse ( url_parts )
Cleans the given URL .
17,123
def extract_urls ( url , data , unescape = HTMLParser . HTMLParser ( ) . unescape ) : parts = urlparse . urlparse ( url ) prefix = '%s://%s' % ( parts . scheme , parts . netloc ) accessed_dir = os . path . dirname ( parts . path ) if not accessed_dir . endswith ( '/' ) : accessed_dir += '/' for pattern , replacement in REPLACEMENT_REGEXES : fixed = replacement % { 'base' : prefix , 'accessed_dir' : accessed_dir , } data = re . sub ( pattern , fixed , data ) result = set ( ) for match in re . finditer ( MAYBE_HTML_URL_REGEX , data ) : found_url = unescape ( match . groupdict ( ) [ 'absurl' ] ) found_url = clean_url ( found_url , force_scheme = parts [ 0 ] ) result . add ( found_url ) return result
Extracts the URLs from an HTML document .
17,124
def prune_urls ( url_set , start_url , allowed_list , ignored_list ) : result = set ( ) for url in url_set : allowed = False for allow_url in allowed_list : if url . startswith ( allow_url ) : allowed = True break if not allowed : continue ignored = False for ignore_url in ignored_list : if url . startswith ( ignore_url ) : ignored = True break if ignored : continue prefix , suffix = ( url . rsplit ( '.' , 1 ) + [ '' ] ) [ : 2 ] if suffix . lower ( ) in IGNORE_SUFFIXES : continue result . add ( url ) return result
Prunes URLs that should be ignored .
17,125
def real_main ( start_url = None , ignore_prefixes = None , upload_build_id = None , upload_release_name = None ) : coordinator = workers . get_coordinator ( ) fetch_worker . register ( coordinator ) coordinator . start ( ) item = SiteDiff ( start_url = start_url , ignore_prefixes = ignore_prefixes , upload_build_id = upload_build_id , upload_release_name = upload_release_name , heartbeat = workers . PrintWorkflow ) item . root = True coordinator . input_queue . put ( item ) coordinator . wait_one ( ) coordinator . stop ( ) coordinator . join ( )
Runs the site_diff .
17,126
def render_or_send ( func , message ) : if request . endpoint != func . func_name : mail . send ( message ) if ( current_user . is_authenticated ( ) and current_user . superuser ) : return render_template ( 'debug_email.html' , message = message )
Renders an email message for debugging or actually sends it .
17,127
def send_ready_for_review ( build_id , release_name , release_number ) : build = models . Build . query . get ( build_id ) if not build . send_email : logging . debug ( 'Not sending ready for review email because build does not have ' 'email enabled. build_id=%r' , build . id ) return ops = operations . BuildOps ( build_id ) release , run_list , stats_dict , _ = ops . get_release ( release_name , release_number ) if not run_list : logging . debug ( 'Not sending ready for review email because there are ' ' no runs. build_id=%r, release_name=%r, release_number=%d' , build . id , release . name , release . number ) return title = '%s: %s - Ready for review' % ( build . name , release . name ) email_body = render_template ( 'email_ready_for_review.html' , build = build , release = release , run_list = run_list , stats_dict = stats_dict ) recipients = [ ] if build . email_alias : recipients . append ( build . email_alias ) else : for user in build . owners : recipients . append ( user . email_address ) if not recipients : logging . debug ( 'Not sending ready for review email because there are no ' 'recipients. build_id=%r, release_name=%r, release_number=%d' , build . id , release . name , release . number ) return message = Message ( title , recipients = recipients ) message . html = email_body logging . info ( 'Sending ready for review email for build_id=%r, ' 'release_name=%r, release_number=%d to %r' , build . id , release . name , release . number , recipients ) return render_or_send ( send_ready_for_review , message )
Sends an email indicating that the release is ready for review .
17,128
def homepage ( ) : if current_user . is_authenticated ( ) : if not login_fresh ( ) : logging . debug ( 'User needs a fresh token' ) abort ( login . needs_refresh ( ) ) auth . claim_invitations ( current_user ) build_list = operations . UserOps ( current_user . get_id ( ) ) . get_builds ( ) return render_template ( 'home.html' , build_list = build_list , show_video_and_promo_text = app . config [ 'SHOW_VIDEO_AND_PROMO_TEXT' ] )
Renders the homepage .
17,129
def new_build ( ) : form = forms . BuildForm ( ) if form . validate_on_submit ( ) : build = models . Build ( ) form . populate_obj ( build ) build . owners . append ( current_user ) db . session . add ( build ) db . session . flush ( ) auth . save_admin_log ( build , created_build = True , message = build . name ) db . session . commit ( ) operations . UserOps ( current_user . get_id ( ) ) . evict ( ) logging . info ( 'Created build via UI: build_id=%r, name=%r' , build . id , build . name ) return redirect ( url_for ( 'view_build' , id = build . id ) ) return render_template ( 'new_build.html' , build_form = form )
Page for crediting or editing a build .
17,130
def view_build ( ) : build = g . build page_size = min ( request . args . get ( 'page_size' , 10 , type = int ) , 50 ) offset = request . args . get ( 'offset' , 0 , type = int ) ops = operations . BuildOps ( build . id ) has_next_page , candidate_list , stats_counts = ops . get_candidates ( page_size , offset ) release_dict = { } created_dict = { } run_stats_dict = { } for candidate in candidate_list : release_list = release_dict . setdefault ( candidate . name , [ ] ) release_list . append ( candidate ) max_created = created_dict . get ( candidate . name , candidate . created ) created_dict [ candidate . name ] = max ( candidate . created , max_created ) run_stats_dict [ candidate . id ] = dict ( runs_total = 0 , runs_complete = 0 , runs_successful = 0 , runs_failed = 0 , runs_baseline = 0 , runs_pending = 0 ) for release_list in release_dict . itervalues ( ) : release_list . sort ( key = lambda x : x . number , reverse = True ) release_age_list = [ ( value , key ) for key , value in created_dict . iteritems ( ) ] release_age_list . sort ( reverse = True ) release_name_list = [ key for _ , key in release_age_list ] for candidate_id , status , count in stats_counts : stats_dict = run_stats_dict [ candidate_id ] for key in ops . get_stats_keys ( status ) : stats_dict [ key ] += count return render_template ( 'view_build.html' , build = build , release_name_list = release_name_list , release_dict = release_dict , run_stats_dict = run_stats_dict , has_next_page = has_next_page , current_offset = offset , next_offset = offset + page_size , last_offset = max ( 0 , offset - page_size ) , page_size = page_size )
Page for viewing all releases in a build .
17,131
def view_release ( ) : build = g . build if request . method == 'POST' : form = forms . ReleaseForm ( request . form ) else : form = forms . ReleaseForm ( request . args ) form . validate ( ) ops = operations . BuildOps ( build . id ) release , run_list , stats_dict , approval_log = ops . get_release ( form . name . data , form . number . data ) if not release : abort ( 404 ) if request . method == 'POST' : decision_states = ( models . Release . REVIEWING , models . Release . RECEIVING , models . Release . PROCESSING ) if form . good . data and release . status in decision_states : release . status = models . Release . GOOD auth . save_admin_log ( build , release_good = True , release = release ) elif form . bad . data and release . status in decision_states : release . status = models . Release . BAD auth . save_admin_log ( build , release_bad = True , release = release ) elif form . reviewing . data and release . status in ( models . Release . GOOD , models . Release . BAD ) : release . status = models . Release . REVIEWING auth . save_admin_log ( build , release_reviewing = True , release = release ) else : logging . warning ( 'Bad state transition for name=%r, number=%r, form=%r' , release . name , release . number , form . data ) abort ( 400 ) db . session . add ( release ) db . session . commit ( ) ops . evict ( ) return redirect ( url_for ( 'view_release' , id = build . id , name = release . name , number = release . number ) ) form . good . data = True form . bad . data = True form . reviewing . data = True return render_template ( 'view_release.html' , build = build , release = release , run_list = run_list , release_form = form , approval_log = approval_log , stats_dict = stats_dict )
Page for viewing all tests runs in a release .
17,132
def _get_artifact_context ( run , file_type ) : sha1sum = None image_file = False log_file = False config_file = False if request . path == '/image' : image_file = True if file_type == 'before' : sha1sum = run . ref_image elif file_type == 'diff' : sha1sum = run . diff_image elif file_type == 'after' : sha1sum = run . image else : abort ( 400 ) elif request . path == '/log' : log_file = True if file_type == 'before' : sha1sum = run . ref_log elif file_type == 'diff' : sha1sum = run . diff_log elif file_type == 'after' : sha1sum = run . log else : abort ( 400 ) elif request . path == '/config' : config_file = True if file_type == 'before' : sha1sum = run . ref_config elif file_type == 'after' : sha1sum = run . config else : abort ( 400 ) return image_file , log_file , config_file , sha1sum
Gets the artifact details for the given run and file_type .
17,133
def get_coordinator ( ) : workflow_queue = Queue . Queue ( ) complete_queue = Queue . Queue ( ) coordinator = WorkflowThread ( workflow_queue , complete_queue ) coordinator . register ( WorkflowItem , workflow_queue ) return coordinator
Creates a coordinator and returns it .
17,134
def _print_repr ( self , depth ) : if depth <= 0 : return '%s.%s#%d' % ( self . __class__ . __module__ , self . __class__ . __name__ , id ( self ) ) return '%s.%s(%s)#%d' % ( self . __class__ . __module__ , self . __class__ . __name__ , self . _print_tree ( self . _get_dict_for_repr ( ) , depth - 1 ) , id ( self ) )
Print this WorkItem to the given stack depth .
17,135
def error ( self ) : for item in self : if isinstance ( item , WorkItem ) and item . error : return item . error return None
Returns the error for this barrier and all work items if any .
17,136
def outstanding ( self ) : done_count = 0 for item in self : if not self . wait_any and item . fire_and_forget : done_count += 1 elif item . done : done_count += 1 if self . wait_any and done_count > 0 : return False if done_count == len ( self ) : return False return True
Returns whether or not this barrier has pending work .
17,137
def get_item ( self ) : if self . was_list : result = ResultList ( ) for item in self : if isinstance ( item , WorkflowItem ) : if item . done and not item . error : result . append ( item . result ) else : result . append ( item ) else : result . append ( item ) return result else : return self [ 0 ]
Returns the item to send back into the workflow generator .
17,138
def start ( self ) : assert not self . interrupted for thread in self . worker_threads : thread . start ( ) WorkerThread . start ( self )
Starts the coordinator thread and all related worker threads .
17,139
def stop ( self ) : if self . interrupted : return for thread in self . worker_threads : thread . interrupted = True self . interrupted = True
Stops the coordinator thread and all related threads .
17,140
def join ( self ) : for thread in self . worker_threads : thread . join ( ) WorkerThread . join ( self )
Joins the coordinator thread and all worker threads .
17,141
def wait_one ( self ) : while True : try : item = self . output_queue . get ( True , self . polltime ) except Queue . Empty : continue except KeyboardInterrupt : LOGGER . debug ( 'Exiting' ) return else : item . check_result ( ) return
Waits until this worker has finished one work item or died .
17,142
def superuser_required ( f ) : @ functools . wraps ( f ) @ login_required def wrapped ( * args , ** kwargs ) : if not ( current_user . is_authenticated ( ) and current_user . superuser ) : abort ( 403 ) return f ( * args , ** kwargs ) return wrapped
Requires the requestor to be a super user .
17,143
def can_user_access_build ( param_name ) : build_id = ( request . args . get ( param_name , type = int ) or request . form . get ( param_name , type = int ) or request . json [ param_name ] ) if not build_id : logging . debug ( 'Build ID in param_name=%r was missing' , param_name ) abort ( 400 ) ops = operations . UserOps ( current_user . get_id ( ) ) build , user_is_owner = ops . owns_build ( build_id ) if not build : logging . debug ( 'Could not find build_id=%r' , build_id ) abort ( 404 ) if current_user . is_authenticated ( ) and not user_is_owner : ops . evict ( ) claim_invitations ( current_user ) build , user_is_owner = ops . owns_build ( build_id ) if not user_is_owner : if current_user . is_authenticated ( ) and current_user . superuser : pass elif request . method != 'GET' : logging . debug ( 'No way to log in user via modifying request' ) abort ( 403 ) elif build . public : pass elif current_user . is_authenticated ( ) : logging . debug ( 'User does not have access to this build' ) abort ( flask . Response ( 'You cannot access this build' , 403 ) ) else : logging . debug ( 'Redirecting user to login to get build access' ) abort ( login . unauthorized ( ) ) elif not login_fresh ( ) : logging . debug ( 'User login is old; forcing refresh' ) abort ( login . needs_refresh ( ) ) return build
Determines if the current user can access the build ID in the request .
17,144
def build_access_required ( function_or_param_name ) : def get_wrapper ( param_name , f ) : @ functools . wraps ( f ) def wrapped ( * args , ** kwargs ) : g . build = can_user_access_build ( param_name ) if not utils . is_production ( ) : time . sleep ( 0.5 ) return f ( * args , ** kwargs ) return wrapped if isinstance ( function_or_param_name , basestring ) : return lambda f : get_wrapper ( function_or_param_name , f ) else : return get_wrapper ( 'id' , function_or_param_name )
Decorator ensures user has access to the build ID in the request .
17,145
def _get_api_key_ops ( ) : auth_header = request . authorization if not auth_header : logging . debug ( 'API request lacks authorization header' ) abort ( flask . Response ( 'API key required' , 401 , { 'WWW-Authenticate' : 'Basic realm="API key required"' } ) ) return operations . ApiKeyOps ( auth_header . username , auth_header . password )
Gets the operations . ApiKeyOps instance for the current request .
17,146
def current_api_key ( ) : if app . config . get ( 'IGNORE_AUTH' ) : return models . ApiKey ( id = 'anonymous_superuser' , secret = '' , superuser = True ) ops = _get_api_key_ops ( ) api_key = ops . get ( ) logging . debug ( 'Authenticated as API key=%r' , api_key . id ) return api_key
Determines the API key for the current request .
17,147
def can_api_key_access_build ( param_name ) : build_id = ( request . args . get ( param_name , type = int ) or request . form . get ( param_name , type = int ) or request . json [ param_name ] ) utils . jsonify_assert ( build_id , 'build_id required' ) if app . config . get ( 'IGNORE_AUTH' ) : api_key = models . ApiKey ( id = 'anonymous_superuser' , secret = '' , superuser = True ) build = models . Build . query . get ( build_id ) utils . jsonify_assert ( build is not None , 'build must exist' , 404 ) else : ops = _get_api_key_ops ( ) api_key , build = ops . can_access_build ( build_id ) return api_key , build
Determines if the current API key can access the build in the request .
17,148
def build_api_access_required ( f ) : @ functools . wraps ( f ) def wrapped ( * args , ** kwargs ) : g . api_key , g . build = can_api_key_access_build ( 'build_id' ) return f ( * args , ** kwargs ) return wrapped
Decorator ensures API key has access to the build ID in the request .
17,149
def superuser_api_key_required ( f ) : @ functools . wraps ( f ) def wrapped ( * args , ** kwargs ) : api_key = current_api_key ( ) g . api_key = api_key utils . jsonify_assert ( api_key . superuser , 'API key=%r must be a super user' % api_key . id , 403 ) return f ( * args , ** kwargs ) return wrapped
Decorator ensures only superuser API keys can request this function .
17,150
def manage_api_keys ( ) : build = g . build create_form = forms . CreateApiKeyForm ( ) if create_form . validate_on_submit ( ) : api_key = models . ApiKey ( ) create_form . populate_obj ( api_key ) api_key . id = utils . human_uuid ( ) api_key . secret = utils . password_uuid ( ) save_admin_log ( build , created_api_key = True , message = api_key . id ) db . session . add ( api_key ) db . session . commit ( ) logging . info ( 'Created API key=%r for build_id=%r' , api_key . id , build . id ) return redirect ( url_for ( 'manage_api_keys' , build_id = build . id ) ) create_form . build_id . data = build . id api_key_query = ( models . ApiKey . query . filter_by ( build_id = build . id ) . order_by ( models . ApiKey . created . desc ( ) ) . limit ( 1000 ) ) revoke_form_list = [ ] for api_key in api_key_query : form = forms . RevokeApiKeyForm ( ) form . id . data = api_key . id form . build_id . data = build . id form . revoke . data = True revoke_form_list . append ( ( api_key , form ) ) return render_template ( 'view_api_keys.html' , build = build , create_form = create_form , revoke_form_list = revoke_form_list )
Page for viewing and creating API keys .
17,151
def revoke_api_key ( ) : build = g . build form = forms . RevokeApiKeyForm ( ) if form . validate_on_submit ( ) : api_key = models . ApiKey . query . get ( form . id . data ) if api_key . build_id != build . id : logging . debug ( 'User does not have access to API key=%r' , api_key . id ) abort ( 403 ) api_key . active = False save_admin_log ( build , revoked_api_key = True , message = api_key . id ) db . session . add ( api_key ) db . session . commit ( ) ops = operations . ApiKeyOps ( api_key . id , api_key . secret ) ops . evict ( ) return redirect ( url_for ( 'manage_api_keys' , build_id = build . id ) )
Form submission handler for revoking API keys .
17,152
def claim_invitations ( user ) : invitation_user_id = '%s:%s' % ( models . User . EMAIL_INVITATION , user . email_address ) invitation_user = models . User . query . get ( invitation_user_id ) if invitation_user : invited_build_list = list ( invitation_user . builds ) if not invited_build_list : return db . session . add ( user ) logging . debug ( 'Found %d build admin invitations for id=%r, user=%r' , len ( invited_build_list ) , invitation_user_id , user ) for build in invited_build_list : build . owners . remove ( invitation_user ) if not build . is_owned_by ( user . id ) : build . owners . append ( user ) logging . debug ( 'Claiming invitation for build_id=%r' , build . id ) save_admin_log ( build , invite_accepted = True ) else : logging . debug ( 'User already owner of build. ' 'id=%r, build_id=%r' , user . id , build . id ) db . session . add ( build ) db . session . delete ( invitation_user ) db . session . commit ( ) db . session . add ( current_user )
Claims any pending invitations for the given user s email address .
17,153
def manage_admins ( ) : build = g . build db . session . add ( build ) db . session . refresh ( build ) add_form = forms . AddAdminForm ( ) if add_form . validate_on_submit ( ) : invitation_user_id = '%s:%s' % ( models . User . EMAIL_INVITATION , add_form . email_address . data ) invitation_user = models . User . query . get ( invitation_user_id ) if not invitation_user : invitation_user = models . User ( id = invitation_user_id , email_address = add_form . email_address . data ) db . session . add ( invitation_user ) db . session . add ( build ) db . session . add ( invitation_user ) db . session . refresh ( build , lockmode = 'update' ) build . owners . append ( invitation_user ) save_admin_log ( build , invited_new_admin = True , message = invitation_user . email_address ) db . session . commit ( ) logging . info ( 'Added user=%r as owner to build_id=%r' , invitation_user . id , build . id ) return redirect ( url_for ( 'manage_admins' , build_id = build . id ) ) add_form . build_id . data = build . id revoke_form_list = [ ] for user in build . owners : form = forms . RemoveAdminForm ( ) form . user_id . data = user . id form . build_id . data = build . id form . revoke . data = True revoke_form_list . append ( ( user , form ) ) return render_template ( 'view_admins.html' , build = build , add_form = add_form , revoke_form_list = revoke_form_list )
Page for viewing and managing build admins .
17,154
def revoke_admin ( ) : build = g . build form = forms . RemoveAdminForm ( ) if form . validate_on_submit ( ) : user = models . User . query . get ( form . user_id . data ) if not user : logging . debug ( 'User being revoked admin access does not exist.' 'id=%r, build_id=%r' , form . user_id . data , build . id ) abort ( 400 ) if user == current_user : logging . debug ( 'User trying to remove themself as admin. ' 'id=%r, build_id=%r' , user . id , build . id ) abort ( 400 ) db . session . add ( build ) db . session . add ( user ) db . session . refresh ( build , lockmode = 'update' ) db . session . refresh ( user , lockmode = 'update' ) user_is_owner = build . owners . filter_by ( id = user . id ) if not user_is_owner : logging . debug ( 'User being revoked admin access is not owner. ' 'id=%r, build_id=%r.' , user . id , build . id ) abort ( 400 ) build . owners . remove ( user ) save_admin_log ( build , revoked_admin = True , message = user . email_address ) db . session . commit ( ) operations . UserOps ( user . get_id ( ) ) . evict ( ) return redirect ( url_for ( 'manage_admins' , build_id = build . id ) )
Form submission handler for revoking admin access to a build .
17,155
def save_admin_log ( build , ** kwargs ) : message = kwargs . pop ( 'message' , None ) release = kwargs . pop ( 'release' , None ) run = kwargs . pop ( 'run' , None ) if not len ( kwargs ) == 1 : raise TypeError ( 'Must specify a LOG_TYPE argument' ) log_enum = kwargs . keys ( ) [ 0 ] log_type = getattr ( models . AdminLog , log_enum . upper ( ) , None ) if not log_type : raise TypeError ( 'Bad log_type argument: %s' % log_enum ) if current_user . is_anonymous ( ) : user_id = None else : user_id = current_user . get_id ( ) log = models . AdminLog ( build_id = build . id , log_type = log_type , message = message , user_id = user_id ) if release : log . release_id = release . id if run : log . run_id = run . id log . release_id = run . release_id db . session . add ( log )
Saves an action to the admin log .
17,156
def view_admin_log ( ) : build = g . build log_list = ( models . AdminLog . query . filter_by ( build_id = build . id ) . order_by ( models . AdminLog . created . desc ( ) ) . all ( ) ) return render_template ( 'view_admin_log.html' , build = build , log_list = log_list )
Page for viewing the log of admin activity .
17,157
def verify_binary ( flag_name , process_args = None ) : if process_args is None : process_args = [ ] path = getattr ( FLAGS , flag_name ) if not path : logging . error ( 'Flag %r not set' % flag_name ) sys . exit ( 1 ) with open ( os . devnull , 'w' ) as dev_null : try : subprocess . check_call ( [ path ] + process_args , stdout = dev_null , stderr = subprocess . STDOUT ) except : logging . exception ( '--%s binary at path %r does not work' , flag_name , path ) sys . exit ( 1 )
Exits the program if the binary from the given flag doesn t run .
17,158
def create_release ( ) : build = g . build release_name = request . form . get ( 'release_name' ) utils . jsonify_assert ( release_name , 'release_name required' ) url = request . form . get ( 'url' ) utils . jsonify_assert ( release_name , 'url required' ) release = models . Release ( name = release_name , url = url , number = 1 , build_id = build . id ) last_candidate = ( models . Release . query . filter_by ( build_id = build . id , name = release_name ) . order_by ( models . Release . number . desc ( ) ) . first ( ) ) if last_candidate : release . number += last_candidate . number if last_candidate . status == models . Release . PROCESSING : canceled_task_count = work_queue . cancel ( release_id = last_candidate . id ) logging . info ( 'Canceling %d tasks for previous attempt ' 'build_id=%r, release_name=%r, release_number=%d' , canceled_task_count , build . id , last_candidate . name , last_candidate . number ) last_candidate . status = models . Release . BAD db . session . add ( last_candidate ) db . session . add ( release ) db . session . commit ( ) signals . release_updated_via_api . send ( app , build = build , release = release ) logging . info ( 'Created release: build_id=%r, release_name=%r, url=%r, ' 'release_number=%d' , build . id , release . name , url , release . number ) return flask . jsonify ( success = True , build_id = build . id , release_name = release . name , release_number = release . number , url = url )
Creates a new release candidate for a build .
17,159
def _check_release_done_processing ( release ) : if release . status != models . Release . PROCESSING : logging . info ( 'Release not in processing state yet: build_id=%r, ' 'name=%r, number=%d' , release . build_id , release . name , release . number ) return False query = models . Run . query . filter_by ( release_id = release . id ) for run in query : if run . status == models . Run . NEEDS_DIFF : return False if run . ref_config and not run . ref_image : return False if run . config and not run . image : return False logging . info ( 'Release done processing, now reviewing: build_id=%r, ' 'name=%r, number=%d' , release . build_id , release . name , release . number ) build_id = release . build_id release_name = release . name release_number = release . number @ utils . after_this_request def send_notification_email ( response ) : emails . send_ready_for_review ( build_id , release_name , release_number ) release . status = models . Release . REVIEWING db . session . add ( release ) return True
Moves a release candidate to reviewing if all runs are done .
17,160
def _get_release_params ( ) : release_name = request . form . get ( 'release_name' ) utils . jsonify_assert ( release_name , 'release_name required' ) release_number = request . form . get ( 'release_number' , type = int ) utils . jsonify_assert ( release_number is not None , 'release_number required' ) return release_name , release_number
Gets the release params from the current request .
17,161
def _find_last_good_run ( build ) : run_name = request . form . get ( 'run_name' , type = str ) utils . jsonify_assert ( run_name , 'run_name required' ) last_good_release = ( models . Release . query . filter_by ( build_id = build . id , status = models . Release . GOOD ) . order_by ( models . Release . created . desc ( ) ) . first ( ) ) last_good_run = None if last_good_release : logging . debug ( 'Found last good release for: build_id=%r, ' 'release_name=%r, release_number=%d' , build . id , last_good_release . name , last_good_release . number ) last_good_run = ( models . Run . query . filter_by ( release_id = last_good_release . id , name = run_name ) . first ( ) ) if last_good_run : logging . debug ( 'Found last good run for: build_id=%r, ' 'release_name=%r, release_number=%d, ' 'run_name=%r' , build . id , last_good_release . name , last_good_release . number , last_good_run . name ) return last_good_release , last_good_run
Finds the last good release and run for a build .
17,162
def find_run ( ) : build = g . build last_good_release , last_good_run = _find_last_good_run ( build ) if last_good_run : return flask . jsonify ( success = True , build_id = build . id , release_name = last_good_release . name , release_number = last_good_release . number , run_name = last_good_run . name , url = last_good_run . url , image = last_good_run . image , log = last_good_run . log , config = last_good_run . config ) return utils . jsonify_error ( 'Run not found' )
Finds the last good run of the given name for a release .
17,163
def _get_or_create_run ( build ) : release_name , release_number = _get_release_params ( ) run_name = request . form . get ( 'run_name' , type = str ) utils . jsonify_assert ( run_name , 'run_name required' ) release = ( models . Release . query . filter_by ( build_id = build . id , name = release_name , number = release_number ) . first ( ) ) utils . jsonify_assert ( release , 'release does not exist' ) run = ( models . Run . query . filter_by ( release_id = release . id , name = run_name ) . first ( ) ) if not run : logging . info ( 'Created run: build_id=%r, release_name=%r, ' 'release_number=%d, run_name=%r' , build . id , release . name , release . number , run_name ) run = models . Run ( release_id = release . id , name = run_name , status = models . Run . DATA_PENDING ) db . session . add ( run ) db . session . flush ( ) return release , run
Gets a run for a build or creates it if it does not exist .
17,164
def _enqueue_capture ( build , release , run , url , config_data , baseline = False ) : try : config_dict = json . loads ( config_data ) except Exception , e : abort ( utils . jsonify_error ( e ) ) config_dict [ 'targetUrl' ] = url config_data = json . dumps ( config_dict ) config_artifact = _save_artifact ( build , config_data , 'application/json' ) db . session . add ( config_artifact ) db . session . flush ( ) suffix = '' if baseline : suffix = ':baseline' task_id = '%s:%s%s' % ( run . id , hashlib . sha1 ( url ) . hexdigest ( ) , suffix ) logging . info ( 'Enqueueing capture task=%r, baseline=%r' , task_id , baseline ) work_queue . add ( constants . CAPTURE_QUEUE_NAME , payload = dict ( build_id = build . id , release_name = release . name , release_number = release . number , run_name = run . name , url = url , config_sha1sum = config_artifact . id , baseline = baseline , ) , build_id = build . id , release_id = release . id , run_id = run . id , source = 'request_run' , task_id = task_id ) if baseline : run . ref_url = url run . ref_config = config_artifact . id else : run . url = url run . config = config_artifact . id
Enqueues a task to run a capture process .
17,165
def request_run ( ) : build = g . build current_release , current_run = _get_or_create_run ( build ) current_url = request . form . get ( 'url' , type = str ) config_data = request . form . get ( 'config' , default = '{}' , type = str ) utils . jsonify_assert ( current_url , 'url to capture required' ) utils . jsonify_assert ( config_data , 'config document required' ) config_artifact = _enqueue_capture ( build , current_release , current_run , current_url , config_data ) ref_url = request . form . get ( 'ref_url' , type = str ) ref_config_data = request . form . get ( 'ref_config' , type = str ) utils . jsonify_assert ( bool ( ref_url ) == bool ( ref_config_data ) , 'ref_url and ref_config must both be specified or not specified' ) if ref_url and ref_config_data : ref_config_artifact = _enqueue_capture ( build , current_release , current_run , ref_url , ref_config_data , baseline = True ) else : _ , last_good_run = _find_last_good_run ( build ) if last_good_run : current_run . ref_url = last_good_run . url current_run . ref_image = last_good_run . image current_run . ref_log = last_good_run . log current_run . ref_config = last_good_run . config db . session . add ( current_run ) db . session . commit ( ) signals . run_updated_via_api . send ( app , build = build , release = current_release , run = current_run ) return flask . jsonify ( success = True , build_id = build . id , release_name = current_release . name , release_number = current_release . number , run_name = current_run . name , url = current_run . url , config = current_run . config , ref_url = current_run . ref_url , ref_config = current_run . ref_config )
Requests a new run for a release candidate .
17,166
def runs_done ( ) : build = g . build release_name , release_number = _get_release_params ( ) release = ( models . Release . query . filter_by ( build_id = build . id , name = release_name , number = release_number ) . with_lockmode ( 'update' ) . first ( ) ) utils . jsonify_assert ( release , 'Release does not exist' ) release . status = models . Release . PROCESSING db . session . add ( release ) _check_release_done_processing ( release ) db . session . commit ( ) signals . release_updated_via_api . send ( app , build = build , release = release ) logging . info ( 'Runs done for release: build_id=%r, release_name=%r, ' 'release_number=%d' , build . id , release . name , release . number ) results_url = url_for ( 'view_release' , id = build . id , name = release . name , number = release . number , _external = True ) return flask . jsonify ( success = True , results_url = results_url )
Marks a release candidate as having all runs reported .
17,167
def _save_artifact ( build , data , content_type ) : sha1sum = hashlib . sha1 ( data ) . hexdigest ( ) artifact = models . Artifact . query . filter_by ( id = sha1sum ) . first ( ) if artifact : logging . debug ( 'Upload already exists: artifact_id=%r' , sha1sum ) else : logging . info ( 'Upload received: artifact_id=%r, content_type=%r' , sha1sum , content_type ) artifact = models . Artifact ( id = sha1sum , content_type = content_type , data = data ) _artifact_created ( artifact ) artifact . owners . append ( build ) return artifact
Saves an artifact to the DB and returns it .
17,168
def upload ( ) : build = g . build utils . jsonify_assert ( len ( request . files ) == 1 , 'Need exactly one uploaded file' ) file_storage = request . files . values ( ) [ 0 ] data = file_storage . read ( ) content_type , _ = mimetypes . guess_type ( file_storage . filename ) artifact = _save_artifact ( build , data , content_type ) db . session . add ( artifact ) db . session . commit ( ) return flask . jsonify ( success = True , build_id = build . id , sha1sum = artifact . id , content_type = content_type )
Uploads an artifact referenced by a run .
17,169
def _get_artifact_response ( artifact ) : response = flask . Response ( artifact . data , mimetype = artifact . content_type ) response . cache_control . public = True response . cache_control . max_age = 8640000 response . set_etag ( artifact . id ) return response
Gets the response object for the given artifact .
17,170
def download ( ) : try : build = auth . can_user_access_build ( 'build_id' ) except HTTPException : logging . debug ( 'User access to artifact failed. Trying API key.' ) _ , build = auth . can_api_key_access_build ( 'build_id' ) sha1sum = request . args . get ( 'sha1sum' , type = str ) if not sha1sum : logging . debug ( 'Artifact sha1sum=%r not supplied' , sha1sum ) abort ( 404 ) artifact = models . Artifact . query . get ( sha1sum ) if not artifact : logging . debug ( 'Artifact sha1sum=%r does not exist' , sha1sum ) abort ( 404 ) build_id = request . args . get ( 'build_id' , type = int ) if not build_id : logging . debug ( 'build_id missing for artifact sha1sum=%r' , sha1sum ) abort ( 404 ) is_owned = artifact . owners . filter_by ( id = build_id ) . first ( ) if not is_owned : logging . debug ( 'build_id=%r not owner of artifact sha1sum=%r' , build_id , sha1sum ) abort ( 403 ) @ utils . after_this_request def no_session ( response ) : if 'Set-Cookie' in response . headers : del response . headers [ 'Set-Cookie' ] if not utils . is_production ( ) : time . sleep ( 1.5 ) if request . if_none_match and request . if_none_match . contains ( sha1sum ) : response = flask . Response ( status = 304 ) return response return _get_artifact_response ( artifact )
Downloads an artifact by it s content hash .
17,171
def evict ( self ) : logging . debug ( 'Evicting cache for %r' , self . cache_key ) _clear_version_cache ( self . cache_key ) self . versioned_cache_key = None
Evict all caches related to these operations .
17,172
def sort_run ( run ) : if run . status in models . Run . DIFF_NEEDED_STATES : return ( 0 , run . name ) return ( 1 , run . name )
Sort function for runs within a release .
17,173
def parse ( obj , required_properties = None , additional_properties = None , ignore_optional_property_errors = None ) : if not ( required_properties is additional_properties is ignore_optional_property_errors is None ) : with parsing ( required_properties = required_properties , additional_properties = additional_properties , ignore_optional_property_errors = ignore_optional_property_errors ) : return parse ( obj ) validator = None if isinstance ( obj , Validator ) : validator = obj elif inspect . isclass ( obj ) and issubclass ( obj , Validator ) : validator = obj ( ) else : try : validator = _NAMED_VALIDATORS [ obj ] except ( KeyError , TypeError ) : for factory in _VALIDATOR_FACTORIES : validator = factory ( obj ) if validator is not None : break else : if inspect . isclass ( validator ) and issubclass ( validator , Validator ) : _NAMED_VALIDATORS [ obj ] = validator = validator ( ) if not isinstance ( validator , Validator ) : raise SchemaError ( "%r cannot be parsed as a Validator" % obj ) return validator
Try to parse the given obj as a validator instance .
17,174
def parsing ( ** kwargs ) : from . validators import Object with _VALIDATOR_FACTORIES_LOCK : old_values = { } for key , value in iteritems ( kwargs ) : if value is not None : attr = key . upper ( ) old_values [ key ] = getattr ( Object , attr ) setattr ( Object , attr , value ) try : yield finally : for key , value in iteritems ( kwargs ) : if value is not None : setattr ( Object , key . upper ( ) , old_values [ key ] )
Context manager for overriding the default validator parsing rules for the following code block .
17,175
def register ( name , validator ) : if not isinstance ( validator , Validator ) : raise TypeError ( "Validator instance expected, %s given" % validator . __class__ ) _NAMED_VALIDATORS [ name ] = validator
Register a validator instance under the given name .
17,176
def accepts ( ** schemas ) : validate = parse ( schemas ) . validate @ decorator def validating ( func , * args , ** kwargs ) : validate ( inspect . getcallargs ( func , * args , ** kwargs ) , adapt = False ) return func ( * args , ** kwargs ) return validating
Create a decorator for validating function parameters .
17,177
def returns ( schema ) : validate = parse ( schema ) . validate @ decorator def validating ( func , * args , ** kwargs ) : ret = func ( * args , ** kwargs ) validate ( ret , adapt = False ) return ret return validating
Create a decorator for validating function return value .
17,178
def adapts ( ** schemas ) : validate = parse ( schemas ) . validate @ decorator def adapting ( func , * args , ** kwargs ) : adapted = validate ( inspect . getcallargs ( func , * args , ** kwargs ) , adapt = True ) argspec = inspect . getargspec ( func ) if argspec . varargs is argspec . keywords is None : return func ( ** adapted ) adapted_varargs = adapted . pop ( argspec . varargs , ( ) ) adapted_keywords = adapted . pop ( argspec . keywords , { } ) if not adapted_varargs : if adapted_keywords : adapted . update ( adapted_keywords ) return func ( ** adapted ) adapted_posargs = [ adapted [ arg ] for arg in argspec . args ] adapted_posargs . extend ( adapted_varargs ) return func ( * adapted_posargs , ** adapted_keywords ) return adapting
Create a decorator for validating and adapting function parameters .
17,179
def get_checksum_metadata_tag ( self ) : if not self . _checksums : print ( "Warning: No checksums have been computed for this file." ) return { str ( _hash_name ) : str ( _hash_value ) for _hash_name , _hash_value in self . _checksums . items ( ) }
Returns a map of checksum values by the name of the hashing function that produced it .
17,180
def compute_checksum ( self ) : if self . _filename . startswith ( "s3://" ) : print ( "Warning: Did not perform client-side checksumming for file in S3. To be implemented." ) pass else : checksumCalculator = self . ChecksumCalculator ( self . _filename ) self . _checksums = checksumCalculator . compute ( )
Calculates checksums for a given file .
17,181
def upload_files ( self , file_paths , file_size_sum = 0 , dcp_type = "data" , target_filename = None , use_transfer_acceleration = True , report_progress = False , sync = True ) : self . _setup_s3_agent_for_file_upload ( file_count = len ( file_paths ) , file_size_sum = file_size_sum , use_transfer_acceleration = use_transfer_acceleration ) pool = ThreadPool ( ) if report_progress : print ( "\nStarting upload of %s files to upload area %s" % ( len ( file_paths ) , self . uuid ) ) for file_path in file_paths : pool . add_task ( self . _upload_file , file_path , target_filename = target_filename , use_transfer_acceleration = use_transfer_acceleration , report_progress = report_progress , sync = sync ) pool . wait_for_completion ( ) if report_progress : number_of_errors = len ( self . s3agent . failed_uploads ) if number_of_errors == 0 : print ( "Completed upload of %d files to upload area %s\n" % ( self . s3agent . file_upload_completed_count , self . uuid ) ) else : error = "\nThe following files failed:" for k , v in self . s3agent . failed_uploads . items ( ) : error += "\n%s: [Exception] %s" % ( k , v ) error += "\nPlease retry or contact an hca administrator at data-help@humancellatlas.org for help.\n" raise UploadException ( error )
A function that takes in a list of file paths and other optional args for parallel file upload
17,182
def validation_status ( self , filename ) : return self . upload_service . api_client . validation_status ( area_uuid = self . uuid , filename = filename )
Get status and results of latest validation job for a file .
17,183
def _item_exists_in_bucket ( self , bucket , key , checksums ) : try : obj = self . target_s3 . meta . client . head_object ( Bucket = bucket , Key = key ) if obj and obj . containsKey ( 'Metadata' ) : if obj [ 'Metadata' ] == checksums : return True except ClientError : return False
Returns true if the key already exists in the current bucket and the clientside checksum matches the file s checksums and false otherwise .
17,184
def upload_to_cloud ( file_handles , staging_bucket , replica , from_cloud = False ) : s3 = boto3 . resource ( "s3" ) file_uuids = [ ] key_names = [ ] abs_file_paths = [ ] if from_cloud : file_uuids , key_names = _copy_from_s3 ( file_handles [ 0 ] , s3 ) else : destination_bucket = s3 . Bucket ( staging_bucket ) for raw_fh in file_handles : file_size = os . path . getsize ( raw_fh . name ) multipart_chunksize = s3_multipart . get_s3_multipart_chunk_size ( file_size ) tx_cfg = TransferConfig ( multipart_threshold = s3_multipart . MULTIPART_THRESHOLD , multipart_chunksize = multipart_chunksize ) with ChecksummingBufferedReader ( raw_fh , multipart_chunksize ) as fh : file_uuid = str ( uuid . uuid4 ( ) ) key_name = "{}/{}" . format ( file_uuid , os . path . basename ( fh . raw . name ) ) destination_bucket . upload_fileobj ( fh , key_name , Config = tx_cfg , ExtraArgs = { 'ContentType' : _mime_type ( fh . raw . name ) , } ) sums = fh . get_checksums ( ) metadata = { "hca-dss-s3_etag" : sums [ "s3_etag" ] , "hca-dss-sha1" : sums [ "sha1" ] , "hca-dss-sha256" : sums [ "sha256" ] , "hca-dss-crc32c" : sums [ "crc32c" ] , } s3 . meta . client . put_object_tagging ( Bucket = destination_bucket . name , Key = key_name , Tagging = dict ( TagSet = encode_tags ( metadata ) ) ) file_uuids . append ( file_uuid ) key_names . append ( key_name ) abs_file_paths . append ( fh . raw . name ) return file_uuids , key_names , abs_file_paths
Upload files to cloud .
17,185
def download ( self , bundle_uuid , replica , version = "" , download_dir = "" , metadata_files = ( '*' , ) , data_files = ( '*' , ) , num_retries = 10 , min_delay_seconds = 0.25 ) : errors = 0 with concurrent . futures . ThreadPoolExecutor ( self . threads ) as executor : futures_to_dss_file = { executor . submit ( task ) : dss_file for dss_file , task in self . _download_tasks ( bundle_uuid , replica , version , download_dir , metadata_files , data_files , num_retries , min_delay_seconds ) } for future in concurrent . futures . as_completed ( futures_to_dss_file ) : dss_file = futures_to_dss_file [ future ] try : future . result ( ) except Exception as e : errors += 1 logger . warning ( 'Failed to download file %s version %s from replica %s' , dss_file . uuid , dss_file . version , dss_file . replica , exc_info = e ) if errors : raise RuntimeError ( '{} file(s) failed to download' . format ( errors ) )
Download a bundle and save it to the local filesystem as a directory .
17,186
def _download_file ( self , dss_file , dest_path , num_retries = 10 , min_delay_seconds = 0.25 ) : directory , _ = os . path . split ( dest_path ) if directory : try : os . makedirs ( directory ) except OSError as e : if e . errno != errno . EEXIST : raise with atomic_write ( dest_path , mode = "wb" , overwrite = True ) as fh : if dss_file . size == 0 : return download_hash = self . _do_download_file ( dss_file , fh , num_retries , min_delay_seconds ) if download_hash . lower ( ) != dss_file . sha256 . lower ( ) : logger . error ( "%s" , "File {}: GET FAILED. Checksum mismatch." . format ( dss_file . uuid ) ) raise ValueError ( "Expected sha256 {} Received sha256 {}" . format ( dss_file . sha256 . lower ( ) , download_hash . lower ( ) ) )
Attempt to download the data . If a retryable exception occurs we wait a bit and retry again . The delay increases each time we fail and decreases each time we successfully read a block . We set a quota for the number of failures that goes up with every successful block read and down with each failure .
17,187
def _do_download_file ( self , dss_file , fh , num_retries , min_delay_seconds ) : hasher = hashlib . sha256 ( ) delay = min_delay_seconds retries_left = num_retries while True : try : response = self . get_file . _request ( dict ( uuid = dss_file . uuid , version = dss_file . version , replica = dss_file . replica ) , stream = True , headers = { 'Range' : "bytes={}-" . format ( fh . tell ( ) ) } , ) try : if not response . ok : logger . error ( "%s" , "File {}: GET FAILED." . format ( dss_file . uuid ) ) logger . error ( "%s" , "Response: {}" . format ( response . text ) ) break consume_bytes = int ( fh . tell ( ) ) server_start = 0 content_range_header = response . headers . get ( 'Content-Range' , None ) if content_range_header is not None : cre = re . compile ( "bytes (\d+)-(\d+)" ) mo = cre . search ( content_range_header ) if mo is not None : server_start = int ( mo . group ( 1 ) ) consume_bytes -= server_start assert consume_bytes >= 0 if server_start > 0 and consume_bytes == 0 : logger . info ( "%s" , "File {}: Resuming at {}." . format ( dss_file . uuid , server_start ) ) elif consume_bytes > 0 : logger . info ( "%s" , "File {}: Resuming at {}. Dropping {} bytes to match" . format ( dss_file . uuid , server_start , consume_bytes ) ) while consume_bytes > 0 : bytes_to_read = min ( consume_bytes , 1024 * 1024 ) content = response . iter_content ( chunk_size = bytes_to_read ) chunk = next ( content ) if chunk : consume_bytes -= len ( chunk ) for chunk in response . iter_content ( chunk_size = 1024 * 1024 ) : if chunk : fh . write ( chunk ) hasher . update ( chunk ) retries_left = min ( retries_left + 1 , num_retries ) delay = max ( delay / 2 , min_delay_seconds ) break finally : response . close ( ) except ( ChunkedEncodingError , ConnectionError , ReadTimeout ) : if retries_left > 0 : logger . info ( "%s" , "File {}: GET FAILED. Attempting to resume." . format ( dss_file . uuid ) ) time . sleep ( delay ) delay *= 2 retries_left -= 1 continue raise return hasher . hexdigest ( )
Abstracts away complications for downloading a file handles retries and delays and computes its hash
17,188
def _write_output_manifest ( self , manifest , filestore_root ) : output = os . path . basename ( manifest ) fieldnames , source_manifest = self . _parse_manifest ( manifest ) if 'file_path' not in fieldnames : fieldnames . append ( 'file_path' ) with atomic_write ( output , overwrite = True ) as f : delimiter = b'\t' if USING_PYTHON2 else '\t' writer = csv . DictWriter ( f , fieldnames , delimiter = delimiter , quoting = csv . QUOTE_NONE ) writer . writeheader ( ) for row in source_manifest : row [ 'file_path' ] = self . _file_path ( row [ 'file_sha256' ] , filestore_root ) writer . writerow ( row ) if os . path . isfile ( output ) : logger . warning ( 'Overwriting manifest %s' , output ) logger . info ( 'Rewrote manifest %s with additional column containing path to downloaded files.' , output )
Adds the file path column to the manifest and writes the copy to the current directory . If the original manifest is in the current directory it is overwritten with a warning .
17,189
def hardlink ( source , link_name ) : if sys . version_info < ( 3 , ) and platform . system ( ) == 'Windows' : import ctypes create_hard_link = ctypes . windll . kernel32 . CreateHardLinkW create_hard_link . argtypes = [ ctypes . c_wchar_p , ctypes . c_wchar_p , ctypes . c_void_p ] create_hard_link . restype = ctypes . wintypes . BOOL res = create_hard_link ( link_name , source , None ) if res == 0 : raise ctypes . WinError ( ) else : try : os . link ( source , link_name ) except OSError as e : if e . errno != errno . EEXIST : raise else : source_stat = os . stat ( source ) dest_stat = os . stat ( link_name ) if source_stat . st_dev != dest_stat . st_dev or source_stat . st_ino != dest_stat . st_ino : raise
Create a hardlink in a portable way
17,190
def request_with_retries_on_post_search ( self , session , url , query , json_input , stream , headers ) : status_code = 500 if '/v1/search' in url : retry_count = 10 else : retry_count = 1 while status_code in ( 500 , 502 , 503 , 504 ) and retry_count > 0 : try : retry_count -= 1 res = session . request ( self . http_method , url , params = query , json = json_input , stream = stream , headers = headers , timeout = self . client . timeout_policy ) status_code = res . status_code except SwaggerAPIException : if retry_count > 0 : pass else : raise return res
Submit a request and retry POST search requests specifically .
17,191
def refresh_swagger ( self ) : try : os . remove ( self . _get_swagger_filename ( self . swagger_url ) ) except EnvironmentError as e : logger . warn ( os . strerror ( e . errno ) ) else : self . __init__ ( )
Manually refresh the swagger document . This can help resolve errors communicate with the API .
17,192
def add_area ( self , uri ) : if uri . area_uuid not in self . _config . upload . areas : self . _config . upload . areas [ uri . area_uuid ] = { 'uri' : uri . uri } self . save ( )
Record information about a new Upload Area
17,193
def select_area ( self , area_uuid ) : self . _config . upload . current_area = area_uuid self . save ( )
Update the current area to be the area with this UUID .
17,194
def create_area ( self , area_uuid ) : response = self . _make_request ( 'post' , path = "/area/{id}" . format ( id = area_uuid ) , headers = { 'Api-Key' : self . auth_token } ) return response . json ( )
Create an Upload Area
17,195
def area_exists ( self , area_uuid ) : response = requests . head ( self . _url ( path = "/area/{id}" . format ( id = area_uuid ) ) ) return response . ok
Check if an Upload Area exists
17,196
def delete_area ( self , area_uuid ) : self . _make_request ( 'delete' , path = "/area/{id}" . format ( id = area_uuid ) , headers = { 'Api-Key' : self . auth_token } ) return True
Delete an Upload Area
17,197
def credentials ( self , area_uuid ) : response = self . _make_request ( "post" , path = "/area/{uuid}/credentials" . format ( uuid = area_uuid ) ) return response . json ( )
Get AWS credentials required to directly upload files to Upload Area in S3
17,198
def file_upload_notification ( self , area_uuid , filename ) : url_safe_filename = urlparse . quote ( filename ) path = ( "/area/{area_uuid}/{filename}" . format ( area_uuid = area_uuid , filename = url_safe_filename ) ) response = self . _make_request ( 'post' , path = path ) return response . ok
Notify Upload Service that a file has been placed in an Upload Area
17,199
def files_info ( self , area_uuid , file_list ) : path = "/area/{uuid}/files_info" . format ( uuid = area_uuid ) file_list = [ urlparse . quote ( filename ) for filename in file_list ] response = self . _make_request ( 'put' , path = path , json = file_list ) return response . json ( )
Get information about files