idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
5,800
def _format_level_2 ( rows , list_embeds , embed_many ) : def _uniqify_list ( list_of_dicts ) : result = [ ] set_ids = set ( ) for v in list_of_dicts : if v [ 'id' ] in set_ids : continue set_ids . add ( v [ 'id' ] ) result . append ( v ) return result row_ids_to_embed_values = { } for row in rows : if row [ 'id' ] not in row_ids_to_embed_values : row_ids_to_embed_values [ row [ 'id' ] ] = { } for embd in list_embeds : if embd not in row : continue if embd not in row_ids_to_embed_values [ row [ 'id' ] ] : if embed_many [ embd ] : row_ids_to_embed_values [ row [ 'id' ] ] [ embd ] = [ row [ embd ] ] else : row_ids_to_embed_values [ row [ 'id' ] ] [ embd ] = row [ embd ] else : if embed_many [ embd ] : row_ids_to_embed_values [ row [ 'id' ] ] [ embd ] . append ( row [ embd ] ) for embd in list_embeds : if embd in row_ids_to_embed_values [ row [ 'id' ] ] : embed_values = row_ids_to_embed_values [ row [ 'id' ] ] [ embd ] if isinstance ( embed_values , list ) : row_ids_to_embed_values [ row [ 'id' ] ] [ embd ] = _uniqify_list ( embed_values ) else : row_ids_to_embed_values [ row [ 'id' ] ] [ embd ] = { } if embed_many [ embd ] : row_ids_to_embed_values [ row [ 'id' ] ] [ embd ] = [ ] result = [ ] seen = set ( ) for row in rows : if row [ 'id' ] in seen : continue seen . add ( row [ 'id' ] ) new_row = { } for field in row : if field not in list_embeds : new_row [ field ] = row [ field ] row_ids_to_embed_values_keys = list ( row_ids_to_embed_values [ new_row [ 'id' ] ] . keys ( ) ) row_ids_to_embed_values_keys . sort ( ) for embd in list_embeds : if embd in row_ids_to_embed_values_keys : if '.' in embd : prefix , suffix = embd . split ( '.' , 1 ) new_row [ prefix ] [ suffix ] = row_ids_to_embed_values [ new_row [ 'id' ] ] [ embd ] else : new_row [ embd ] = row_ids_to_embed_values [ new_row [ 'id' ] ] [ embd ] else : new_row_embd_value = { } if embed_many [ embd ] : new_row_embd_value = [ ] if '.' in embd : prefix , suffix = embd . split ( '.' , 1 ) new_row [ prefix ] [ suffix ] = new_row_embd_value else : new_row [ embd ] = new_row_embd_value result . append ( new_row ) return result
From the _format_level_1 function we have a list of rows . Because of using joins we have as many rows as join result .
5,801
def common_values_dict ( ) : now = datetime . datetime . utcnow ( ) . isoformat ( ) etag = utils . gen_etag ( ) values = { 'id' : utils . gen_uuid ( ) , 'created_at' : now , 'updated_at' : now , 'etag' : etag } return values
Build a basic values object used in every create method .
5,802
def get_identity ( identity ) : return flask . Response ( json . dumps ( { 'identity' : { 'id' : identity . id , 'etag' : identity . etag , 'name' : identity . name , 'fullname' : identity . fullname , 'email' : identity . email , 'timezone' : identity . timezone , 'teams' : _encode_dict ( identity . teams ) } } ) , 200 , headers = { 'ETag' : identity . etag } , content_type = 'application/json' )
Returns some information about the currently authenticated identity
5,803
def get_issues_by_resource ( resource_id , table ) : v1_utils . verify_existence_and_get ( resource_id , table ) if table . name == 'jobs' : JJI = models . JOIN_JOBS_ISSUES JJC = models . JOIN_JOBS_COMPONENTS JCI = models . JOIN_COMPONENTS_ISSUES j1 = sql . join ( _TABLE , sql . join ( JCI , JJC , sql . and_ ( JCI . c . component_id == JJC . c . component_id , JJC . c . job_id == resource_id , ) , ) , _TABLE . c . id == JCI . c . issue_id , ) query = sql . select ( [ _TABLE ] ) . select_from ( j1 ) rows = flask . g . db_conn . execute ( query ) rows = [ dict ( row ) for row in rows ] j2 = sql . join ( _TABLE , JJI , sql . and_ ( _TABLE . c . id == JJI . c . issue_id , JJI . c . job_id == resource_id ) ) query2 = sql . select ( [ _TABLE ] ) . select_from ( j2 ) rows2 = flask . g . db_conn . execute ( query2 ) rows += [ dict ( row ) for row in rows2 ] else : JCI = models . JOIN_COMPONENTS_ISSUES query = ( sql . select ( [ _TABLE ] ) . select_from ( JCI . join ( _TABLE ) ) . where ( JCI . c . component_id == resource_id ) ) rows = flask . g . db_conn . execute ( query ) rows = [ dict ( row ) for row in rows ] for row in rows : if row [ 'tracker' ] == 'github' : l_tracker = github . Github ( row [ 'url' ] ) elif row [ 'tracker' ] == 'bugzilla' : l_tracker = bugzilla . Bugzilla ( row [ 'url' ] ) row . update ( l_tracker . dump ( ) ) return flask . jsonify ( { 'issues' : rows , '_meta' : { 'count' : len ( rows ) } } )
Get all issues for a specific job .
5,804
def unattach_issue ( resource_id , issue_id , table ) : v1_utils . verify_existence_and_get ( issue_id , _TABLE ) if table . name == 'jobs' : join_table = models . JOIN_JOBS_ISSUES where_clause = sql . and_ ( join_table . c . job_id == resource_id , join_table . c . issue_id == issue_id ) else : join_table = models . JOIN_COMPONENTS_ISSUES where_clause = sql . and_ ( join_table . c . component_id == resource_id , join_table . c . issue_id == issue_id ) query = join_table . delete ( ) . where ( where_clause ) result = flask . g . db_conn . execute ( query ) if not result . rowcount : raise dci_exc . DCIConflict ( '%s_issues' % table . name , issue_id ) return flask . Response ( None , 204 , content_type = 'application/json' )
Unattach an issue from a specific job .
5,805
def attach_issue ( resource_id , table , user_id ) : data = schemas . issue . post ( flask . request . json ) issue = _get_or_create_issue ( data ) if table . name == 'jobs' : join_table = models . JOIN_JOBS_ISSUES else : join_table = models . JOIN_COMPONENTS_ISSUES key = '%s_id' % table . name [ 0 : - 1 ] query = join_table . insert ( ) . values ( { 'user_id' : user_id , 'issue_id' : issue [ 'id' ] , key : resource_id } ) try : flask . g . db_conn . execute ( query ) except sa_exc . IntegrityError : raise dci_exc . DCICreationConflict ( join_table . name , '%s, issue_id' % key ) result = json . dumps ( { 'issue' : dict ( issue ) } ) return flask . Response ( result , 201 , content_type = 'application/json' )
Attach an issue to a specific job .
5,806
def collect_staticroot_removal ( app , blueprints ) : collect_root = app . extensions [ 'collect' ] . static_root return [ bp for bp in blueprints if ( bp . has_static_folder and bp . static_folder != collect_root ) ]
Remove collect s static root folder from list .
5,807
def get_all_jobstates ( user , job_id ) : args = schemas . args ( flask . request . args . to_dict ( ) ) job = v1_utils . verify_existence_and_get ( job_id , models . JOBS ) if user . is_not_super_admin ( ) and user . is_not_read_only_user ( ) : if ( job [ 'team_id' ] not in user . teams_ids and job [ 'team_id' ] not in user . child_teams_ids ) : raise dci_exc . Unauthorized ( ) query = v1_utils . QueryBuilder ( _TABLE , args , _JS_COLUMNS ) query . add_extra_condition ( _TABLE . c . job_id == job_id ) nb_rows = query . get_number_of_rows ( ) rows = query . execute ( fetchall = True ) rows = v1_utils . format_result ( rows , _TABLE . name , args [ 'embed' ] , _EMBED_MANY ) return flask . jsonify ( { 'jobstates' : rows , '_meta' : { 'count' : nb_rows } } )
Get all jobstates .
5,808
def create_client ( access_token ) : url = 'http://keycloak:8080/auth/admin/realms/dci-test/clients' r = requests . post ( url , data = json . dumps ( client_data ) , headers = get_auth_headers ( access_token ) ) if r . status_code in ( 201 , 409 ) : print ( 'Keycloak client dci created successfully.' ) else : raise Exception ( 'Error while creating Keycloak client dci:\nstatus code %s\n' 'error: %s' % ( r . status_code , r . content ) )
Create the dci client in the master realm .
5,809
def create_user_dci ( access_token ) : user_data = { 'username' : 'dci' , 'email' : 'dci@distributed-ci.io' , 'enabled' : True , 'emailVerified' : True , 'credentials' : [ { 'type' : 'password' , 'value' : 'dci' } ] } r = requests . post ( 'http://keycloak:8080/auth/admin/realms/dci-test/users' , data = json . dumps ( user_data ) , headers = get_auth_headers ( access_token ) ) if r . status_code in ( 201 , 409 ) : print ( 'Keycloak user dci created successfully.' ) else : raise Exception ( 'Error while creating user dci:\nstatus code %s\n' 'error: %s' % ( r . status_code , r . content ) )
Create the a dci user . username = dci password = dci email = dci
5,810
def _serializeBooleans ( params ) : serialized = { } for name , value in params . items ( ) : if value is True : value = 'true' elif value is False : value = 'false' serialized [ name ] = value return serialized for k , v in params . items ( ) : if isinstance ( v , bool ) : params [ k ] = str ( v ) . lower ( )
Convert all booleans to lowercase strings
5,811
def request ( self , method , url , parameters = dict ( ) ) : parameters = self . _serializeBooleans ( parameters ) headers = { 'App-Key' : self . apikey } if self . accountemail : headers . update ( { 'Account-Email' : self . accountemail } ) if method . upper ( ) == 'GET' : response = requests . get ( self . url + url , params = parameters , auth = ( self . username , self . password ) , headers = headers ) elif method . upper ( ) == 'POST' : response = requests . post ( self . url + url , data = parameters , auth = ( self . username , self . password ) , headers = headers ) elif method . upper ( ) == 'PUT' : response = requests . put ( self . url + url , data = parameters , auth = ( self . username , self . password ) , headers = headers ) elif method . upper ( ) == 'DELETE' : response = requests . delete ( self . url + url , params = parameters , auth = ( self . username , self . password ) , headers = headers ) else : raise Exception ( "Invalid method in pingdom request" ) self . shortlimit = response . headers . get ( 'Req-Limit-Short' , self . shortlimit ) self . longlimit = response . headers . get ( 'Req-Limit-Long' , self . longlimit ) if response . status_code != 200 : sys . stderr . write ( 'ERROR from %s: %d' % ( response . url , response . status_code ) ) sys . stderr . write ( 'Returned data: %s\n' % response . json ( ) ) response . raise_for_status ( ) return response
Requests wrapper function
5,812
def getChecks ( self , ** parameters ) : for key in parameters : if key not in [ 'limit' , 'offset' , 'tags' ] : sys . stderr . write ( '%s not a valid argument for getChecks()\n' % key ) response = self . request ( 'GET' , 'checks' , parameters ) return [ PingdomCheck ( self , x ) for x in response . json ( ) [ 'checks' ] ]
Pulls all checks from pingdom
5,813
def getCheck ( self , checkid ) : check = PingdomCheck ( self , { 'id' : checkid } ) check . getDetails ( ) return check
Returns a detailed description of a specified check .
5,814
def probes ( self , ** kwargs ) : for key in kwargs : if key not in [ 'limit' , 'offset' , 'onlyactive' , 'includedeleted' ] : sys . stderr . write ( "'%s'" % key + ' is not a valid argument ' + 'of probes()\n' ) return self . request ( "GET" , "probes" , kwargs ) . json ( ) [ 'probes' ]
Returns a list of all Pingdom probe servers
5,815
def traceroute ( self , host , probeid ) : response = self . request ( 'GET' , 'traceroute' , { 'host' : host , 'probeid' : probeid } ) return response . json ( ) [ 'traceroute' ]
Perform a traceroute to a specified target from a specified Pingdom probe .
5,816
def getContacts ( self , ** kwargs ) : for key in kwargs : if key not in [ 'limit' , 'offset' ] : sys . stderr . write ( "'%s'" % key + ' is not a valid argument ' + 'of getContacts()\n' ) return [ PingdomContact ( self , x ) for x in self . request ( "GET" , "notification_contacts" , kwargs ) . json ( ) [ 'contacts' ] ]
Returns a list of all contacts .
5,817
def modifyContacts ( self , contactids , paused ) : response = self . request ( "PUT" , "notification_contacts" , { 'contactids' : contactids , 'paused' : paused } ) return response . json ( ) [ 'message' ]
Modifies a list of contacts .
5,818
def getEmailReports ( self ) : reports = [ PingdomEmailReport ( self , x ) for x in self . request ( 'GET' , 'reports.email' ) . json ( ) [ 'subscriptions' ] ] return reports
Returns a list of PingdomEmailReport instances .
5,819
def newEmailReport ( self , name , ** kwargs ) : for key in kwargs : if key not in [ 'checkid' , 'frequency' , 'contactids' , 'additionalemails' ] : sys . stderr . write ( "'%s'" % key + ' is not a valid argument ' + 'of newEmailReport()\n' ) parameters = { 'name' : name } for key , value in kwargs . iteritems ( ) : parameters [ key ] = value return self . request ( 'POST' , 'reports.email' , parameters ) . json ( ) [ 'message' ]
Creates a new email report
5,820
def getSharedReports ( self ) : response = self . request ( 'GET' , 'reports.shared' ) . json ( ) [ 'shared' ] [ 'banners' ] reports = [ PingdomSharedReport ( self , x ) for x in response ] return reports
Returns a list of PingdomSharedReport instances
5,821
def download ( url , directory , filename = None ) : if not filename : _ , filename = os . path . split ( url ) directory = os . path . expanduser ( directory ) ensure_directory ( directory ) filepath = os . path . join ( directory , filename ) if os . path . isfile ( filepath ) : return filepath print ( 'Download' , filepath ) with urlopen ( url ) as response , open ( filepath , 'wb' ) as file_ : shutil . copyfileobj ( response , file_ ) return filepath
Download a file and return its filename on the local file system . If the file is already there it will not be downloaded again . The filename is derived from the url if not provided . Return the filepath .
5,822
def ensure_directory ( directory ) : directory = os . path . expanduser ( directory ) try : os . makedirs ( directory ) except OSError as e : if e . errno != errno . EEXIST : raise e
Create the directories along the provided directory path that do not exist .
5,823
def _process_validation_function_s ( validation_func , auto_and_wrapper = True ) : if validation_func is None : raise ValueError ( 'mandatory validation_func is None' ) elif not isinstance ( validation_func , list ) : validation_func = [ validation_func ] elif len ( validation_func ) == 0 : raise ValueError ( 'provided validation_func list is empty' ) final_list = [ ] for v in validation_func : v = as_function ( v ) if isinstance ( v , tuple ) : if len ( v ) == 2 : if isinstance ( v [ 1 ] , str ) : final_list . append ( _failure_raiser ( v [ 0 ] , help_msg = v [ 1 ] ) ) elif isinstance ( v [ 1 ] , type ) and issubclass ( v [ 1 ] , WrappingFailure ) : final_list . append ( _failure_raiser ( v [ 0 ] , failure_type = v [ 1 ] ) ) else : raise TypeError ( 'base validation function(s) not compliant with the allowed syntax. Base validation' ' function(s) can be {}. Found [{}].' . format ( supported_syntax , str ( v ) ) ) else : raise TypeError ( 'base validation function(s) not compliant with the allowed syntax. Base validation' ' function(s) can be {}. Found [{}].' . format ( supported_syntax , str ( v ) ) ) elif callable ( v ) : final_list . append ( v ) elif isinstance ( v , list ) : final_list . append ( and_ ( * v ) ) else : raise TypeError ( 'base validation function(s) not compliant with the allowed syntax. Base validation' ' function(s) can be {}. Found [{}].' . format ( supported_syntax , str ( v ) ) ) if auto_and_wrapper : return and_ ( * final_list ) else : return final_list
This function handles the various ways that users may enter validation functions so as to output a single callable method . Setting auto_and_wrapper to False allows callers to get a list of callables instead .
5,824
def pop_kwargs ( kwargs , names_with_defaults , allow_others = False ) : all_arguments = [ ] for name , default_ in names_with_defaults : try : val = kwargs . pop ( name ) except KeyError : val = default_ all_arguments . append ( val ) if not allow_others and len ( kwargs ) > 0 : raise ValueError ( "Unsupported arguments: %s" % kwargs ) if len ( names_with_defaults ) == 1 : return all_arguments [ 0 ] else : return all_arguments
Internal utility method to extract optional arguments from kwargs .
5,825
def get_details ( self ) : need_to_print_value = True failures_for_print = OrderedDict ( ) for validator , failure in self . failures . items ( ) : name = get_callable_name ( validator ) if isinstance ( failure , Exception ) : if isinstance ( failure , WrappingFailure ) or isinstance ( failure , CompositionFailure ) : need_to_print_value = False failures_for_print [ name ] = '{exc_type}: {msg}' . format ( exc_type = type ( failure ) . __name__ , msg = str ( failure ) ) else : failures_for_print [ name ] = str ( failure ) if need_to_print_value : value_str = ' for value [{val}]' . format ( val = self . wrong_value ) else : value_str = '' key_values_str = [ repr ( key ) + ': ' + repr ( val ) for key , val in failures_for_print . items ( ) ] failures_for_print_str = '{' + ', ' . join ( key_values_str ) + '}' msg = '{what}{possibly_value}. Successes: {success} / Failures: {fails}' '' . format ( what = self . get_what ( ) , possibly_value = value_str , success = self . successes , fails = failures_for_print_str ) return msg
Overrides the base method in order to give details on the various successes and failures
5,826
def play_all_validators ( self , validators , value ) : successes = list ( ) failures = OrderedDict ( ) for validator in validators : name = get_callable_name ( validator ) try : res = validator ( value ) if result_is_success ( res ) : successes . append ( name ) else : failures [ validator ] = res except Exception as exc : failures [ validator ] = exc return successes , failures
Utility method to play all the provided validators on the provided value and output the
5,827
def gen_secret ( length = 64 ) : charset = string . ascii_letters + string . digits return '' . join ( random . SystemRandom ( ) . choice ( charset ) for _ in range ( length ) )
Generates a secret of given length
5,828
def _tokenize ( cls , sentence ) : while True : match = cls . _regex_tag . search ( sentence ) if not match : yield from cls . _split ( sentence ) return chunk = sentence [ : match . start ( ) ] yield from cls . _split ( chunk ) tag = match . group ( 0 ) yield tag sentence = sentence [ ( len ( chunk ) + len ( tag ) ) : ]
Split a sentence while preserving tags .
5,829
def dump ( self ) : return { 'title' : self . title , 'issue_id' : self . issue_id , 'reporter' : self . reporter , 'assignee' : self . assignee , 'status' : self . status , 'product' : self . product , 'component' : self . component , 'created_at' : self . created_at , 'updated_at' : self . updated_at , 'closed_at' : self . closed_at , 'status_code' : self . status_code }
Return the object itself .
5,830
def is_product_owner ( self , team_id ) : if self . is_super_admin ( ) : return True team_id = uuid . UUID ( str ( team_id ) ) return team_id in self . child_teams_ids
Ensure the user is a PRODUCT_OWNER .
5,831
def is_in_team ( self , team_id ) : if self . is_super_admin ( ) : return True team_id = uuid . UUID ( str ( team_id ) ) return team_id in self . teams or team_id in self . child_teams_ids
Test if user is in team
5,832
def is_remoteci ( self , team_id = None ) : if team_id is None : return self . _is_remoteci team_id = uuid . UUID ( str ( team_id ) ) if team_id not in self . teams_ids : return False return self . teams [ team_id ] [ 'role' ] == 'REMOTECI'
Ensure ther resource has the role REMOTECI .
5,833
def is_feeder ( self , team_id = None ) : if team_id is None : return self . _is_feeder team_id = uuid . UUID ( str ( team_id ) ) if team_id not in self . teams_ids : return False return self . teams [ team_id ] [ 'role' ] == 'FEEDER'
Ensure ther resource has the role FEEDER .
5,834
def delete ( self , destroy = True ) : removed_child = self . adapter . remove_node_child ( self . adapter . get_node_parent ( self . impl_node ) , self . impl_node , destroy_node = destroy ) if removed_child is not None : return self . adapter . wrap_node ( removed_child , None , self . adapter ) else : return None
Delete this node from the owning document .
5,835
def xpath ( self , xpath , ** kwargs ) : result = self . adapter . xpath_on_node ( self . impl_node , xpath , ** kwargs ) if isinstance ( result , ( list , tuple ) ) : return [ self . _maybe_wrap_node ( r ) for r in result ] else : return self . _maybe_wrap_node ( result )
Perform an XPath query on the current node .
5,836
def set_attributes ( self , attr_obj = None , ns_uri = None , ** attr_dict ) : self . _set_element_attributes ( self . impl_node , attr_obj = attr_obj , ns_uri = ns_uri , ** attr_dict )
Add or update this element s attributes where attributes can be specified in a number of ways .
5,837
def set_ns_prefix ( self , prefix , ns_uri ) : self . _add_ns_prefix_attr ( self . impl_node , prefix , ns_uri )
Define a namespace prefix that will serve as shorthand for the given namespace URI in element names .
5,838
def add_element ( self , name , ns_uri = None , attributes = None , text = None , before_this_element = False ) : prefix , local_name , node_ns_uri = self . adapter . get_ns_info_from_node_name ( name , self . impl_node ) if prefix : qname = u'%s:%s' % ( prefix , local_name ) else : qname = local_name if node_ns_uri is None : if ns_uri is None : node_ns_uri = self . adapter . get_ns_uri_for_prefix ( self . impl_node , None ) else : node_ns_uri = ns_uri child_elem = self . adapter . new_impl_element ( qname , node_ns_uri , parent = self . impl_node ) if not prefix and '}' in name : self . _set_element_attributes ( child_elem , { 'xmlns' : node_ns_uri } , ns_uri = self . XMLNS_URI ) elif ns_uri is not None : self . _set_element_attributes ( child_elem , { 'xmlns' : ns_uri } , ns_uri = self . XMLNS_URI ) if attributes is not None : self . _set_element_attributes ( child_elem , attr_obj = attributes ) if text is not None : self . _add_text ( child_elem , text ) if before_this_element : self . adapter . add_node_child ( self . adapter . get_node_parent ( self . impl_node ) , child_elem , before_sibling = self . impl_node ) else : self . adapter . add_node_child ( self . impl_node , child_elem ) return self . adapter . wrap_node ( child_elem , self . adapter . impl_document , self . adapter )
Add a new child element to this element with an optional namespace definition . If no namespace is provided the child will be assigned to the default namespace .
5,839
def add_text ( self , text ) : if not isinstance ( text , basestring ) : text = unicode ( text ) self . _add_text ( self . impl_node , text )
Add a text node to this element .
5,840
def add_instruction ( self , target , data ) : self . _add_instruction ( self . impl_node , target , data )
Add an instruction node to this element .
5,841
def filter ( self , local_name = None , name = None , ns_uri = None , node_type = None , filter_fn = None , first_only = False ) : if filter_fn is None : def filter_fn ( n ) : if node_type is not None : if isinstance ( node_type , int ) : if not n . is_type ( node_type ) : return False elif n . __class__ != node_type : return False if name is not None and n . name != name : return False if local_name is not None and n . local_name != local_name : return False if ns_uri is not None and n . ns_uri != ns_uri : return False return True nodelist = filter ( filter_fn , self ) if first_only : return nodelist [ 0 ] if nodelist else None else : return NodeList ( nodelist )
Apply filters to the set of nodes in this list .
5,842
def get_all_analytics ( user , job_id ) : args = schemas . args ( flask . request . args . to_dict ( ) ) v1_utils . verify_existence_and_get ( job_id , models . JOBS ) query = v1_utils . QueryBuilder ( _TABLE , args , _A_COLUMNS ) if user . is_not_super_admin ( ) and not user . is_read_only_user ( ) : query . add_extra_condition ( _TABLE . c . team_id . in_ ( user . teams_ids ) ) query . add_extra_condition ( _TABLE . c . job_id == job_id ) nb_rows = query . get_number_of_rows ( ) rows = query . execute ( fetchall = True ) rows = v1_utils . format_result ( rows , _TABLE . name ) return flask . jsonify ( { 'analytics' : rows , '_meta' : { 'count' : nb_rows } } )
Get all analytics of a job .
5,843
def get_analytic ( user , job_id , anc_id ) : v1_utils . verify_existence_and_get ( job_id , models . JOBS ) analytic = v1_utils . verify_existence_and_get ( anc_id , _TABLE ) analytic = dict ( analytic ) if not user . is_in_team ( analytic [ 'team_id' ] ) : raise dci_exc . Unauthorized ( ) return flask . jsonify ( { 'analytic' : analytic } )
Get an analytic .
5,844
def retrieve_info ( self ) : scheme = urlparse ( self . url ) . scheme netloc = urlparse ( self . url ) . netloc query = urlparse ( self . url ) . query if scheme not in ( 'http' , 'https' ) : return for item in query . split ( '&' ) : if 'id=' in item : ticket_id = item . split ( '=' ) [ 1 ] break else : return bugzilla_url = '%s://%s/%s%s' % ( scheme , netloc , _URI_BASE , ticket_id ) result = requests . get ( bugzilla_url ) self . status_code = result . status_code if result . status_code == 200 : tree = ElementTree . fromstring ( result . content ) self . title = tree . findall ( "./bug/short_desc" ) . pop ( ) . text self . issue_id = tree . findall ( "./bug/bug_id" ) . pop ( ) . text self . reporter = tree . findall ( "./bug/reporter" ) . pop ( ) . text self . assignee = tree . findall ( "./bug/assigned_to" ) . pop ( ) . text self . status = tree . findall ( "./bug/bug_status" ) . pop ( ) . text self . product = tree . findall ( "./bug/product" ) . pop ( ) . text self . component = tree . findall ( "./bug/component" ) . pop ( ) . text self . created_at = tree . findall ( "./bug/creation_ts" ) . pop ( ) . text self . updated_at = tree . findall ( "./bug/delta_ts" ) . pop ( ) . text try : self . closed_at = ( tree . findall ( "./bug/cf_last_closed" ) . pop ( ) . text ) except IndexError : pass
Query Bugzilla API to retrieve the needed infos .
5,845
def is_in ( allowed_values ) : def is_in_allowed_values ( x ) : if x in allowed_values : return True else : raise NotInAllowedValues ( wrong_value = x , allowed_values = allowed_values ) is_in_allowed_values . __name__ = 'is_in_{}' . format ( allowed_values ) return is_in_allowed_values
Values in validation_function generator . Returns a validation_function to check that x is in the provided set of allowed values
5,846
def is_subset ( reference_set ) : def is_subset_of ( x ) : missing = x - reference_set if len ( missing ) == 0 : return True else : raise NotSubset ( wrong_value = x , reference_set = reference_set , unsupported = missing ) is_subset_of . __name__ = 'is_subset_of_{}' . format ( reference_set ) return is_subset_of
Is subset validation_function generator . Returns a validation_function to check that x is a subset of reference_set
5,847
def contains ( ref_value ) : def contains_ref_value ( x ) : if ref_value in x : return True else : raise DoesNotContainValue ( wrong_value = x , ref_value = ref_value ) contains_ref_value . __name__ = 'contains_{}' . format ( ref_value ) return contains_ref_value
Contains validation_function generator . Returns a validation_function to check that ref_value in x
5,848
def is_superset ( reference_set ) : def is_superset_of ( x ) : missing = reference_set - x if len ( missing ) == 0 : return True else : raise NotSuperset ( wrong_value = x , reference_set = reference_set , missing = missing ) is_superset_of . __name__ = 'is_superset_of_{}' . format ( reference_set ) return is_superset_of
Is superset validation_function generator . Returns a validation_function to check that x is a superset of reference_set
5,849
def on_all_ ( * validation_func ) : validation_function_func = _process_validation_function_s ( list ( validation_func ) ) def on_all_val ( x ) : for idx , x_elt in enumerate ( x ) : try : res = validation_function_func ( x_elt ) except Exception as e : raise InvalidItemInSequence ( wrong_value = x_elt , wrapped_func = validation_function_func , validation_outcome = e ) if not result_is_success ( res ) : raise InvalidItemInSequence ( wrong_value = x_elt , wrapped_func = validation_function_func , validation_outcome = res ) return True on_all_val . __name__ = 'apply_<{}>_on_all_elts' . format ( get_callable_name ( validation_function_func ) ) return on_all_val
Generates a validation_function for collection inputs where each element of the input will be validated against the validation_functions provided . For convenience a list of validation_functions can be provided and will be replaced with an and_ .
5,850
def get_jobs_events_from_sequence ( user , sequence ) : args = schemas . args ( flask . request . args . to_dict ( ) ) if user . is_not_super_admin ( ) : raise dci_exc . Unauthorized ( ) query = sql . select ( [ models . JOBS_EVENTS ] ) . select_from ( models . JOBS_EVENTS . join ( models . JOBS , models . JOBS . c . id == models . JOBS_EVENTS . c . job_id ) ) . where ( _TABLE . c . id >= sequence ) sort_list = v1_utils . sort_query ( args [ 'sort' ] , _JOBS_EVENTS_COLUMNS , default = 'created_at' ) query = v1_utils . add_sort_to_query ( query , sort_list ) if args . get ( 'limit' , None ) : query = query . limit ( args . get ( 'limit' ) ) if args . get ( 'offset' , None ) : query = query . offset ( args . get ( 'offset' ) ) rows = flask . g . db_conn . execute ( query ) . fetchall ( ) query_nb_rows = sql . select ( [ func . count ( models . JOBS_EVENTS . c . id ) ] ) nb_rows = flask . g . db_conn . execute ( query_nb_rows ) . scalar ( ) return json . jsonify ( { 'jobs_events' : rows , '_meta' : { 'count' : nb_rows } } )
Get all the jobs events from a given sequence number .
5,851
def get_node_text ( self , node ) : text_children = [ n . nodeValue for n in self . get_node_children ( node ) if n . nodeType == xml . dom . Node . TEXT_NODE ] if text_children : return u'' . join ( text_children ) else : return None
Return contatenated value of all text node children of this element
5,852
def set_node_text ( self , node , text ) : for child in self . get_node_children ( node ) : if child . nodeType == xml . dom . Node . TEXT_NODE : self . remove_node_child ( node , child , True ) if text is not None : text_node = self . new_impl_text ( text ) self . add_node_child ( node , text_node )
Set text value as sole Text child node of element ; any existing Text nodes are removed
5,853
def _get_contents ( self ) : def files ( ) : for value in super ( GlobBundle , self ) . _get_contents ( ) : for path in glob . glob ( value ) : yield path return list ( files ( ) )
Create strings from glob strings .
5,854
def get_none_policy_text ( none_policy , verbose = False ) : if none_policy is NonePolicy . SKIP : return "accept None without performing validation" if verbose else 'SKIP' elif none_policy is NonePolicy . FAIL : return "fail on None without performing validation" if verbose else 'FAIL' elif none_policy is NonePolicy . VALIDATE : return "validate None as any other values" if verbose else 'VALIDATE' elif none_policy is NoneArgPolicy . SKIP_IF_NONABLE_ELSE_FAIL : return "accept None without validation if the argument is optional, otherwise fail on None" if verbose else 'SKIP_IF_NONABLE_ELSE_FAIL' elif none_policy is NoneArgPolicy . SKIP_IF_NONABLE_ELSE_VALIDATE : return "accept None without validation if the argument is optional, otherwise validate None as any other " "values" if verbose else 'SKIP_IF_NONABLE_ELSE_VALIDATE' else : raise ValueError ( 'Invalid none_policy ' + str ( none_policy ) )
Returns a user - friendly description of a NonePolicy taking into account NoneArgPolicy
5,855
def _add_none_handler ( validation_callable , none_policy ) : if none_policy is NonePolicy . SKIP : return _none_accepter ( validation_callable ) elif none_policy is NonePolicy . FAIL : return _none_rejecter ( validation_callable ) elif none_policy is NonePolicy . VALIDATE : return validation_callable else : raise ValueError ( 'Invalid none_policy : ' + str ( none_policy ) )
Adds a wrapper or nothing around the provided validation_callable depending on the selected policy
5,856
def create_manually ( cls , validation_function_name , var_name , var_value , validation_outcome = None , help_msg = None , append_details = True , ** kw_context_args ) : def val_fun ( x ) : pass val_fun . __name__ = validation_function_name validator = Validator ( val_fun , error_type = cls , help_msg = help_msg , ** kw_context_args ) e = validator . _create_validation_error ( var_name , var_value , validation_outcome , error_type = cls , help_msg = help_msg , ** kw_context_args ) return e
Creates an instance without using a Validator .
5,857
def get_variable_str ( self ) : if self . var_name is None : prefix = '' else : prefix = self . var_name suffix = str ( self . var_value ) if len ( suffix ) == 0 : suffix = "''" elif len ( suffix ) > self . __max_str_length_displayed__ : suffix = '' if len ( prefix ) > 0 and len ( suffix ) > 0 : return prefix + '=' + suffix else : return prefix + suffix
Utility method to get the variable value or var_name = value if name is not None . Note that values with large string representations will not get printed
5,858
def _create_validation_error ( self , name , value , validation_outcome = None , error_type = None , help_msg = None , ** kw_context_args ) : error_type = error_type or self . error_type help_msg = help_msg or self . help_msg ctx = copy ( self . kw_context_args ) ctx . update ( kw_context_args ) name = self . _get_name_for_errors ( name ) if issubclass ( error_type , TypeError ) or issubclass ( error_type , ValueError ) : new_error_type = error_type else : additional_type = None if isinstance ( validation_outcome , Exception ) : if is_error_of_type ( validation_outcome , TypeError ) : additional_type = TypeError elif is_error_of_type ( validation_outcome , ValueError ) : additional_type = ValueError if additional_type is None : additional_type = ValueError new_error_type = add_base_type_dynamically ( error_type , additional_type ) return new_error_type ( validator = self , var_value = value , var_name = name , validation_outcome = validation_outcome , help_msg = help_msg , ** ctx )
The function doing the final error raising .
5,859
def is_valid ( self , value ) : try : res = self . main_function ( value ) return result_is_success ( res ) except Exception : return False
Validates the provided value and returns a boolean indicating success or failure . Any Exception happening in the validation process will be silently caught .
5,860
def create_tags ( user ) : values = { 'id' : utils . gen_uuid ( ) , 'created_at' : datetime . datetime . utcnow ( ) . isoformat ( ) } values . update ( schemas . tag . post ( flask . request . json ) ) with flask . g . db_conn . begin ( ) : where_clause = sql . and_ ( _TABLE . c . name == values [ 'name' ] ) query = sql . select ( [ _TABLE . c . id ] ) . where ( where_clause ) if flask . g . db_conn . execute ( query ) . fetchone ( ) : raise dci_exc . DCIConflict ( 'Tag already exists' , values ) query = _TABLE . insert ( ) . values ( ** values ) flask . g . db_conn . execute ( query ) result = json . dumps ( { 'tag' : values } ) return flask . Response ( result , 201 , content_type = 'application/json' )
Create a tag .
5,861
def get_tags ( user ) : args = schemas . args ( flask . request . args . to_dict ( ) ) query = v1_utils . QueryBuilder ( _TABLE , args , _T_COLUMNS ) nb_rows = query . get_number_of_rows ( ) rows = query . execute ( fetchall = True ) rows = v1_utils . format_result ( rows , _TABLE . name ) return flask . jsonify ( { 'tags' : rows , '_meta' : { 'count' : nb_rows } } )
Get all tags .
5,862
def load_entrypoint ( self , entry_point_group ) : for ep in pkg_resources . iter_entry_points ( entry_point_group ) : self . env . register ( ep . name , ep . load ( ) )
Load entrypoint .
5,863
def reject ( ) : auth_message = ( 'Could not verify your access level for that URL.' 'Please login with proper credentials.' ) auth_message = json . dumps ( { '_status' : 'Unauthorized' , 'message' : auth_message } ) headers = { 'WWW-Authenticate' : 'Basic realm="Login required"' } return flask . Response ( auth_message , 401 , headers = headers , content_type = 'application/json' )
Sends a 401 reject response that enables basic auth .
5,864
def modify ( self , ** kwargs ) : for key in kwargs : if key not in [ 'email' , 'cellphone' , 'countrycode' , 'countryiso' , 'defaultsmsprovider' , 'directtwitter' , 'twitteruser' , 'name' ] : sys . stderr . write ( "'%s'" % key + ' is not a valid argument ' + 'of <PingdomContact>.modify()\n' ) response = self . pingdom . request ( 'PUT' , 'notification_contacts/%s' % self . id , kwargs ) return response . json ( ) [ 'message' ]
Modify a contact .
5,865
def _check ( user , topic ) : if topic [ 'export_control' ] : product = v1_utils . verify_existence_and_get ( topic [ 'product_id' ] , models . PRODUCTS ) return ( user . is_in_team ( product [ 'team_id' ] ) or product [ 'team_id' ] in user . parent_teams_ids ) return False
If the topic has it s export_control set to True then all the teams under the product team can access to the topic s resources .
5,866
def get_stream_or_content_from_request ( request ) : if request . stream . tell ( ) : logger . info ( 'Request stream already consumed. ' 'Storing file content using in-memory data.' ) return request . data else : logger . info ( 'Storing file content using request stream.' ) return request . stream
Ensure the proper content is uploaded .
5,867
def gen_etag ( ) : my_salt = gen_uuid ( ) if six . PY2 : my_salt = my_salt . decode ( 'utf-8' ) elif six . PY3 : my_salt = my_salt . encode ( 'utf-8' ) md5 = hashlib . md5 ( ) md5 . update ( my_salt ) return md5 . hexdigest ( )
Generate random etag based on MD5 .
5,868
def delete ( self ) : response = self . pingdom . request ( 'DELETE' , 'reports.shared/%s' % self . id ) return response . json ( ) [ 'message' ]
Delete this email report
5,869
def get_all_components ( user , topic_id ) : args = schemas . args ( flask . request . args . to_dict ( ) ) query = v1_utils . QueryBuilder ( _TABLE , args , _C_COLUMNS ) query . add_extra_condition ( sql . and_ ( _TABLE . c . topic_id == topic_id , _TABLE . c . state != 'archived' ) ) nb_rows = query . get_number_of_rows ( ) rows = query . execute ( fetchall = True ) rows = v1_utils . format_result ( rows , _TABLE . name , args [ 'embed' ] , _EMBED_MANY ) if user . is_not_super_admin ( ) : rows = [ row for row in rows if row [ 'export_control' ] ] return flask . jsonify ( { 'components' : rows , '_meta' : { 'count' : nb_rows } } )
Get all components of a topic .
5,870
def get_component_types_from_topic ( topic_id , db_conn = None ) : db_conn = db_conn or flask . g . db_conn query = sql . select ( [ models . TOPICS ] ) . where ( models . TOPICS . c . id == topic_id ) topic = db_conn . execute ( query ) . fetchone ( ) topic = dict ( topic ) return topic [ 'component_types' ]
Returns the component types of a topic .
5,871
def get_component_types ( topic_id , remoteci_id , db_conn = None ) : db_conn = db_conn or flask . g . db_conn rconfiguration = remotecis . get_remoteci_configuration ( topic_id , remoteci_id , db_conn = db_conn ) if ( rconfiguration is not None and rconfiguration [ 'component_types' ] is not None ) : component_types = rconfiguration [ 'component_types' ] else : component_types = get_component_types_from_topic ( topic_id , db_conn = db_conn ) return component_types , rconfiguration
Returns either the topic component types or the rconfigration s component types .
5,872
def get_last_components_by_type ( component_types , topic_id , db_conn = None ) : db_conn = db_conn or flask . g . db_conn schedule_components_ids = [ ] for ct in component_types : where_clause = sql . and_ ( models . COMPONENTS . c . type == ct , models . COMPONENTS . c . topic_id == topic_id , models . COMPONENTS . c . export_control == True , models . COMPONENTS . c . state == 'active' ) query = ( sql . select ( [ models . COMPONENTS . c . id ] ) . where ( where_clause ) . order_by ( sql . desc ( models . COMPONENTS . c . created_at ) ) ) cmpt_id = db_conn . execute ( query ) . fetchone ( ) if cmpt_id is None : msg = 'Component of type "%s" not found or not exported.' % ct raise dci_exc . DCIException ( msg , status_code = 412 ) cmpt_id = cmpt_id [ 0 ] if cmpt_id in schedule_components_ids : msg = ( 'Component types %s malformed: type %s duplicated.' % ( component_types , ct ) ) raise dci_exc . DCIException ( msg , status_code = 412 ) schedule_components_ids . append ( cmpt_id ) return schedule_components_ids
For each component type of a topic get the last one .
5,873
def verify_and_get_components_ids ( topic_id , components_ids , component_types , db_conn = None ) : db_conn = db_conn or flask . g . db_conn if len ( components_ids ) != len ( component_types ) : msg = 'The number of component ids does not match the number ' 'of component types %s' % component_types raise dci_exc . DCIException ( msg , status_code = 412 ) schedule_component_types = set ( ) for c_id in components_ids : where_clause = sql . and_ ( models . COMPONENTS . c . id == c_id , models . COMPONENTS . c . topic_id == topic_id , models . COMPONENTS . c . export_control == True , models . COMPONENTS . c . state == 'active' ) query = ( sql . select ( [ models . COMPONENTS ] ) . where ( where_clause ) ) cmpt = db_conn . execute ( query ) . fetchone ( ) if cmpt is None : msg = 'Component id %s not found or not exported' % c_id raise dci_exc . DCIException ( msg , status_code = 412 ) cmpt = dict ( cmpt ) if cmpt [ 'type' ] in schedule_component_types : msg = ( 'Component types malformed: type %s duplicated.' % cmpt [ 'type' ] ) raise dci_exc . DCIException ( msg , status_code = 412 ) schedule_component_types . add ( cmpt [ 'type' ] ) return components_ids
Process some verifications of the provided components ids .
5,874
def retrieve_tags_from_component ( user , c_id ) : JCT = models . JOIN_COMPONENTS_TAGS query = ( sql . select ( [ models . TAGS ] ) . select_from ( JCT . join ( models . TAGS ) ) . where ( JCT . c . component_id == c_id ) ) rows = flask . g . db_conn . execute ( query ) return flask . jsonify ( { 'tags' : rows , '_meta' : { 'count' : rows . rowcount } } )
Retrieve all tags attached to a component .
5,875
def add_tag_for_component ( user , c_id ) : v1_utils . verify_existence_and_get ( c_id , _TABLE ) values = { 'component_id' : c_id } component_tagged = tags . add_tag_to_resource ( values , models . JOIN_COMPONENTS_TAGS ) return flask . Response ( json . dumps ( component_tagged ) , 201 , content_type = 'application/json' )
Add a tag on a specific component .
5,876
def delete_tag_for_component ( user , c_id , tag_id ) : query = _TABLE_TAGS . delete ( ) . where ( _TABLE_TAGS . c . tag_id == tag_id and _TABLE_TAGS . c . component_id == c_id ) try : flask . g . db_conn . execute ( query ) except sa_exc . IntegrityError : raise dci_exc . DCICreationConflict ( _TABLE_TAGS . c . tag_id , 'tag_id' ) return flask . Response ( None , 204 , content_type = 'application/json' )
Delete a tag on a specific component .
5,877
def should_be_hidden_as_cause ( exc ) : from valid8 . validation_lib . types import HasWrongType , IsWrongType return isinstance ( exc , ( HasWrongType , IsWrongType ) )
Used everywhere to decide if some exception type should be displayed or hidden as the casue of an error
5,878
def _failure_raiser ( validation_callable , failure_type = None , help_msg = None , ** kw_context_args ) : if failure_type is not None and help_msg is not None : raise ValueError ( 'Only one of failure_type and help_msg can be set at the same time' ) validation_callable = as_function ( validation_callable ) def raiser ( x ) : try : res = validation_callable ( x ) except Exception as e : res = e if not result_is_success ( res ) : typ = failure_type or WrappingFailure exc = typ ( wrapped_func = validation_callable , wrong_value = x , validation_outcome = res , help_msg = help_msg , ** kw_context_args ) raise exc raiser . __name__ = get_callable_name ( validation_callable ) return raiser
Wraps the provided validation function so that in case of failure it raises the given failure_type or a WrappingFailure with the given help message .
5,879
def _none_accepter ( validation_callable ) : def accept_none ( x ) : if x is not None : return validation_callable ( x ) else : return True accept_none . __name__ = 'skip_on_none({})' . format ( get_callable_name ( validation_callable ) ) return accept_none
Wraps the given validation callable to accept None values silently . When a None value is received by the wrapper it is not passed to the validation_callable and instead this function will return True . When any other value is received the validation_callable is called as usual .
5,880
def _none_rejecter ( validation_callable ) : def reject_none ( x ) : if x is not None : return validation_callable ( x ) else : raise ValueIsNone ( wrong_value = x ) reject_none . __name__ = 'reject_none({})' . format ( get_callable_name ( validation_callable ) ) return reject_none
Wraps the given validation callable to reject None values . When a None value is received by the wrapper it is not passed to the validation_callable and instead this function will raise a WrappingFailure . When any other value is received the validation_callable is called as usual .
5,881
def get_help_msg ( self , dotspace_ending = False , ** kwargs ) : context = self . get_context_for_help_msgs ( kwargs ) if self . help_msg is not None and len ( self . help_msg ) > 0 : context = copy ( context ) try : help_msg = self . help_msg variables = re . findall ( "{\S+}" , help_msg ) for v in set ( variables ) : v = v [ 1 : - 1 ] if v in context and len ( str ( context [ v ] ) ) > self . __max_str_length_displayed__ : new_name = '@@@@' + v + '@@@@' help_msg = help_msg . replace ( '{' + v + '}' , '{' + new_name + '}' ) context [ new_name ] = "(too big for display)" help_msg = help_msg . format ( ** context ) except KeyError as e : raise HelpMsgFormattingException ( self . help_msg , e , context ) if dotspace_ending : return end_with_dot_space ( help_msg ) else : return help_msg else : return ''
The method used to get the formatted help message according to kwargs . By default it returns the help_msg attribute whether it is defined at the instance level or at the class level .
5,882
def get_details ( self ) : if isinstance ( self . validation_outcome , Exception ) : if isinstance ( self . validation_outcome , Failure ) : end_str = '' else : end_str = ' for value [{value}]' . format ( value = self . wrong_value ) contents = 'Function [{wrapped}] raised [{exception}: {details}]{end}.' '' . format ( wrapped = get_callable_name ( self . wrapped_func ) , exception = type ( self . validation_outcome ) . __name__ , details = self . validation_outcome , end = end_str ) else : contents = 'Function [{wrapped}] returned [{result}] for value [{value}].' '' . format ( wrapped = get_callable_name ( self . wrapped_func ) , result = self . validation_outcome , value = self . wrong_value ) return contents
Overrides the method in Failure so as to add a few details about the wrapped function and outcome
5,883
def get_context_for_help_msgs ( self , context_dict ) : context_dict = copy ( context_dict ) context_dict [ 'wrapped_func' ] = get_callable_name ( context_dict [ 'wrapped_func' ] ) return context_dict
We override this method from HelpMsgMixIn to replace wrapped_func with its name
5,884
def decorate_several_with_validation ( func , _out_ = None , none_policy = None , ** validation_funcs ) : if _out_ is not None : func = decorate_with_validation ( func , _OUT_KEY , _out_ , none_policy = none_policy ) for att_name , att_validation_funcs in validation_funcs . items ( ) : func = decorate_with_validation ( func , att_name , att_validation_funcs , none_policy = none_policy ) return func
This method is equivalent to applying decorate_with_validation once for each of the provided arguments of the function func as well as output _out_ . validation_funcs keyword arguments are validation functions for each arg name .
5,885
def _get_final_none_policy_for_validator ( is_nonable , none_policy ) : if none_policy in { NonePolicy . VALIDATE , NonePolicy . SKIP , NonePolicy . FAIL } : none_policy_to_use = none_policy elif none_policy is NoneArgPolicy . SKIP_IF_NONABLE_ELSE_VALIDATE : none_policy_to_use = NonePolicy . SKIP if is_nonable else NonePolicy . VALIDATE elif none_policy is NoneArgPolicy . SKIP_IF_NONABLE_ELSE_FAIL : none_policy_to_use = NonePolicy . SKIP if is_nonable else NonePolicy . FAIL else : raise ValueError ( 'Invalid none policy: ' + str ( none_policy ) ) return none_policy_to_use
Depending on none_policy and of the fact that the target parameter is nonable or not returns a corresponding NonePolicy
5,886
def decorate_with_validators ( func , func_signature = None , ** validators ) : for arg_name , validator in validators . items ( ) : if not isinstance ( validator , list ) : validators [ arg_name ] = [ validator ] if hasattr ( func , '__wrapped__' ) and hasattr ( func . __wrapped__ , '__validators__' ) : for arg_name , validator in validators . items ( ) : for v in validator : if arg_name in func . __wrapped__ . __validators__ : func . __wrapped__ . __validators__ [ arg_name ] . append ( v ) else : func . __wrapped__ . __validators__ [ arg_name ] = [ v ] return func else : if hasattr ( func , '__validators__' ) : raise ValueError ( 'Function ' + str ( func ) + ' already has a defined __validators__ attribute, valid8 ' 'decorators can not be applied on it' ) else : try : func . __validators__ = validators except AttributeError : raise ValueError ( "Error - Could not add validators list to function '%s'" % func ) func_signature = func_signature or signature ( func ) @ wraps ( func ) def validating_wrapper ( * args , ** kwargs ) : apply_on_each_func_args_sig ( func , args , kwargs , func_signature , func_to_apply = _assert_input_is_valid , func_to_apply_params_dict = func . __validators__ ) res = func ( * args , ** kwargs ) if _OUT_KEY in func . __validators__ : for validator in func . __validators__ [ _OUT_KEY ] : validator . assert_valid ( res ) return res return validating_wrapper
Utility method to decorate the provided function with the provided input and output Validator objects . Since this method takes Validator objects as argument it is for advanced users .
5,887
def generate_nonce_timestamp ( ) : global count rng = botan . rng ( ) . get ( 30 ) uuid4 = uuid . uuid4 ( ) . bytes tmpnonce = ( bytes ( str ( count ) . encode ( 'utf-8' ) ) ) + uuid4 + rng nonce = tmpnonce [ : 41 ] count += 1 return nonce
Generate unique nonce with counter uuid and rng .
5,888
def schedule_jobs ( user ) : values = schemas . job_schedule . post ( flask . request . json ) values . update ( { 'id' : utils . gen_uuid ( ) , 'created_at' : datetime . datetime . utcnow ( ) . isoformat ( ) , 'updated_at' : datetime . datetime . utcnow ( ) . isoformat ( ) , 'etag' : utils . gen_etag ( ) , 'status' : 'new' , 'remoteci_id' : user . id , 'user_agent' : flask . request . environ . get ( 'HTTP_USER_AGENT' ) , 'client_version' : flask . request . environ . get ( 'HTTP_CLIENT_VERSION' ) , } ) topic_id = values . pop ( 'topic_id' ) topic_id_secondary = values . pop ( 'topic_id_secondary' ) components_ids = values . pop ( 'components_ids' ) remoteci = v1_utils . verify_existence_and_get ( user . id , models . REMOTECIS ) if remoteci [ 'state' ] != 'active' : message = 'RemoteCI "%s" is disabled.' % remoteci [ 'id' ] raise dci_exc . DCIException ( message , status_code = 412 ) topic = v1_utils . verify_existence_and_get ( topic_id , models . TOPICS ) if topic [ 'state' ] != 'active' : msg = 'Topic %s:%s not active.' % ( topic_id , topic [ 'name' ] ) raise dci_exc . DCIException ( msg , status_code = 412 ) v1_utils . verify_team_in_topic ( user , topic_id ) if topic_id_secondary : topic_secondary = v1_utils . verify_existence_and_get ( topic_id_secondary , models . TOPICS ) if topic_secondary [ 'state' ] != 'active' : msg = 'Topic %s:%s not active.' % ( topic_id_secondary , topic [ 'name' ] ) raise dci_exc . DCIException ( msg , status_code = 412 ) v1_utils . verify_team_in_topic ( user , topic_id_secondary ) dry_run = values . pop ( 'dry_run' ) if dry_run : component_types = components . get_component_types_from_topic ( topic_id ) components_ids = components . get_last_components_by_type ( component_types , topic_id ) return flask . Response ( json . dumps ( { 'components_ids' : components_ids , 'job' : None } ) , 201 , content_type = 'application/json' ) remotecis . kill_existing_jobs ( remoteci [ 'id' ] ) values = _build_job ( topic_id , remoteci , components_ids , values , topic_id_secondary = topic_id_secondary ) return flask . Response ( json . dumps ( { 'job' : values } ) , 201 , headers = { 'ETag' : values [ 'etag' ] } , content_type = 'application/json' )
Dispatch jobs to remotecis .
5,889
def create_new_update_job_from_an_existing_job ( user , job_id ) : values = { 'id' : utils . gen_uuid ( ) , 'created_at' : datetime . datetime . utcnow ( ) . isoformat ( ) , 'updated_at' : datetime . datetime . utcnow ( ) . isoformat ( ) , 'etag' : utils . gen_etag ( ) , 'status' : 'new' } original_job_id = job_id original_job = v1_utils . verify_existence_and_get ( original_job_id , models . JOBS ) if not user . is_in_team ( original_job [ 'team_id' ] ) : raise dci_exc . Unauthorized ( ) remoteci_id = str ( original_job [ 'remoteci_id' ] ) remoteci = v1_utils . verify_existence_and_get ( remoteci_id , models . REMOTECIS ) values . update ( { 'remoteci_id' : remoteci_id } ) topic_id = str ( original_job [ 'topic_id' ] ) v1_utils . verify_existence_and_get ( topic_id , models . TOPICS ) values . update ( { 'user_agent' : flask . request . environ . get ( 'HTTP_USER_AGENT' ) , 'client_version' : flask . request . environ . get ( 'HTTP_CLIENT_VERSION' ) , } ) values = _build_job ( topic_id , remoteci , [ ] , values , update_previous_job_id = original_job_id ) return flask . Response ( json . dumps ( { 'job' : values } ) , 201 , headers = { 'ETag' : values [ 'etag' ] } , content_type = 'application/json' )
Create a new job in the same topic as the job_id provided and associate the latest components of this topic .
5,890
def create_new_upgrade_job_from_an_existing_job ( user ) : values = schemas . job_upgrade . post ( flask . request . json ) values . update ( { 'id' : utils . gen_uuid ( ) , 'created_at' : datetime . datetime . utcnow ( ) . isoformat ( ) , 'updated_at' : datetime . datetime . utcnow ( ) . isoformat ( ) , 'etag' : utils . gen_etag ( ) , 'status' : 'new' } ) original_job_id = values . pop ( 'job_id' ) original_job = v1_utils . verify_existence_and_get ( original_job_id , models . JOBS ) if not user . is_in_team ( original_job [ 'team_id' ] ) : raise dci_exc . Unauthorized ( ) remoteci_id = str ( original_job [ 'remoteci_id' ] ) remoteci = v1_utils . verify_existence_and_get ( remoteci_id , models . REMOTECIS ) values . update ( { 'remoteci_id' : remoteci_id } ) topic_id = str ( original_job [ 'topic_id' ] ) topic = v1_utils . verify_existence_and_get ( topic_id , models . TOPICS ) values . update ( { 'user_agent' : flask . request . environ . get ( 'HTTP_USER_AGENT' ) , 'client_version' : flask . request . environ . get ( 'HTTP_CLIENT_VERSION' ) , } ) next_topic_id = topic [ 'next_topic_id' ] if not next_topic_id : raise dci_exc . DCIException ( "topic %s does not contains a next topic" % topic_id ) values = _build_job ( next_topic_id , remoteci , [ ] , values , previous_job_id = original_job_id ) return flask . Response ( json . dumps ( { 'job' : values } ) , 201 , headers = { 'ETag' : values [ 'etag' ] } , content_type = 'application/json' )
Create a new job in the next topic of the topic of the provided job_id .
5,891
def get_all_jobs ( user , topic_id = None ) : args = schemas . args ( flask . request . args . to_dict ( ) ) query = v1_utils . QueryBuilder ( _TABLE , args , _JOBS_COLUMNS ) if user . is_not_super_admin ( ) and not user . is_read_only_user ( ) : query . add_extra_condition ( sql . or_ ( _TABLE . c . team_id . in_ ( user . teams_ids ) , _TABLE . c . team_id . in_ ( user . child_teams_ids ) ) ) if topic_id is not None : query . add_extra_condition ( _TABLE . c . topic_id == topic_id ) query . add_extra_condition ( _TABLE . c . state != 'archived' ) nb_rows = query . get_number_of_rows ( ) rows = query . execute ( fetchall = True ) rows = v1_utils . format_result ( rows , _TABLE . name , args [ 'embed' ] , _EMBED_MANY ) return flask . jsonify ( { 'jobs' : rows , '_meta' : { 'count' : nb_rows } } )
Get all jobs .
5,892
def update_job_by_id ( user , job_id ) : if_match_etag = utils . check_and_get_etag ( flask . request . headers ) values = schemas . job . put ( flask . request . json ) job = v1_utils . verify_existence_and_get ( job_id , _TABLE ) job = dict ( job ) if not user . is_in_team ( job [ 'team_id' ] ) : raise dci_exc . Unauthorized ( ) status = values . get ( 'status' ) if status and job . get ( 'status' ) != status : jobstates . insert_jobstate ( user , { 'status' : status , 'job_id' : job_id } ) if status in models . FINAL_STATUSES : jobs_events . create_event ( job_id , status , job [ 'topic_id' ] ) where_clause = sql . and_ ( _TABLE . c . etag == if_match_etag , _TABLE . c . id == job_id ) values [ 'etag' ] = utils . gen_etag ( ) query = _TABLE . update ( ) . returning ( * _TABLE . columns ) . where ( where_clause ) . values ( ** values ) result = flask . g . db_conn . execute ( query ) if not result . rowcount : raise dci_exc . DCIConflict ( 'Job' , job_id ) return flask . Response ( json . dumps ( { 'job' : result . fetchone ( ) } ) , 200 , headers = { 'ETag' : values [ 'etag' ] } , content_type = 'application/json' )
Update a job
5,893
def get_all_results_from_jobs ( user , j_id ) : job = v1_utils . verify_existence_and_get ( j_id , _TABLE ) if not user . is_in_team ( job [ 'team_id' ] ) and not user . is_read_only_user ( ) : raise dci_exc . Unauthorized ( ) query = sql . select ( [ models . TESTS_RESULTS ] ) . where ( models . TESTS_RESULTS . c . job_id == job [ 'id' ] ) all_tests_results = flask . g . db_conn . execute ( query ) . fetchall ( ) results = [ ] for test_result in all_tests_results : test_result = dict ( test_result ) results . append ( { 'filename' : test_result [ 'name' ] , 'name' : test_result [ 'name' ] , 'total' : test_result [ 'total' ] , 'failures' : test_result [ 'failures' ] , 'errors' : test_result [ 'errors' ] , 'skips' : test_result [ 'skips' ] , 'time' : test_result [ 'time' ] , 'regressions' : test_result [ 'regressions' ] , 'successfixes' : test_result [ 'successfixes' ] , 'success' : test_result [ 'success' ] , 'file_id' : test_result [ 'file_id' ] } ) return flask . jsonify ( { 'results' : results , '_meta' : { 'count' : len ( results ) } } )
Get all results from job .
5,894
def get_tags_from_job ( user , job_id ) : job = v1_utils . verify_existence_and_get ( job_id , _TABLE ) if not user . is_in_team ( job [ 'team_id' ] ) and not user . is_read_only_user ( ) : raise dci_exc . Unauthorized ( ) JTT = models . JOIN_JOBS_TAGS query = ( sql . select ( [ models . TAGS ] ) . select_from ( JTT . join ( models . TAGS ) ) . where ( JTT . c . job_id == job_id ) ) rows = flask . g . db_conn . execute ( query ) return flask . jsonify ( { 'tags' : rows , '_meta' : { 'count' : rows . rowcount } } )
Retrieve all tags attached to a job .
5,895
def add_tag_to_job ( user , job_id ) : job = v1_utils . verify_existence_and_get ( job_id , _TABLE ) if not user . is_in_team ( job [ 'team_id' ] ) : raise dci_exc . Unauthorized ( ) values = { 'job_id' : job_id } job_tagged = tags . add_tag_to_resource ( values , models . JOIN_JOBS_TAGS ) return flask . Response ( json . dumps ( job_tagged ) , 201 , content_type = 'application/json' )
Add a tag to a job .
5,896
def delete_tag_from_job ( user , job_id , tag_id ) : _JJT = models . JOIN_JOBS_TAGS job = v1_utils . verify_existence_and_get ( job_id , _TABLE ) if not user . is_in_team ( job [ 'team_id' ] ) : raise dci_exc . Unauthorized ( ) v1_utils . verify_existence_and_get ( tag_id , models . TAGS ) query = _JJT . delete ( ) . where ( sql . and_ ( _JJT . c . tag_id == tag_id , _JJT . c . job_id == job_id ) ) try : flask . g . db_conn . execute ( query ) except sa_exc . IntegrityError : raise dci_exc . DCICreationConflict ( 'tag' , 'tag_id' ) return flask . Response ( None , 204 , content_type = 'application/json' )
Delete a tag from a job .
5,897
def _is_node_an_element ( self , node ) : if isinstance ( node , BaseET . Element ) : return True if hasattr ( node , 'makeelement' ) and isinstance ( node . tag , basestring ) : return True
Return True if the given node is an ElementTree Element a fact that can be tricky to determine if the cElementTree implementation is used .
5,898
def get_to_purge_archived_resources ( user , table ) : if user . is_not_super_admin ( ) : raise dci_exc . Unauthorized ( ) archived_resources = get_archived_resources ( table ) return flask . jsonify ( { table . name : archived_resources , '_meta' : { 'count' : len ( archived_resources ) } } )
List the entries to be purged from the database .
5,899
def purge_archived_resources ( user , table ) : if user . is_not_super_admin ( ) : raise dci_exc . Unauthorized ( ) where_clause = sql . and_ ( table . c . state == 'archived' ) query = table . delete ( ) . where ( where_clause ) flask . g . db_conn . execute ( query ) return flask . Response ( None , 204 , content_type = 'application/json' )
Remove the entries to be purged from the database .