idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
12,800
def lookup ( self , path , must_be_leaf = False ) : assert ( type ( path ) == type ( self . name ) ) d = self . color_scheme tokens = path . split ( '.' ) for t in tokens [ : - 1 ] : d = d . get ( t ) if d == None : raise Exception ( "Path '%s' not found." ) if must_be_leaf : assert ( type ( d [ tokens [ - 1 ] ] ) == type ( self . name ) ) return d [ tokens [ - 1 ] ]
Looks up a part of the color scheme . If used for looking up colors must_be_leaf should be True .
12,801
def resolve_pid ( fetched_pid ) : return PersistentIdentifier . get ( pid_type = fetched_pid . pid_type , pid_value = fetched_pid . pid_value , pid_provider = fetched_pid . provider . pid_provider )
Retrieve the real PID given a fetched PID .
12,802
def ordered ( self , ord = 'desc' ) : if ord not in ( 'asc' , 'desc' , ) : raise ord_f = getattr ( PIDRelation . index , ord ) ( ) return self . order_by ( ord_f )
Order the query result on the relations indexes .
12,803
def status ( self , status_in ) : if isinstance ( status_in , PIDStatus ) : status_in = [ status_in , ] return self . filter ( self . _filtered_pid_class . status . in_ ( status_in ) )
Filter the PIDs based on their status .
12,804
def _resolved_pid ( self ) : if not isinstance ( self . pid , PersistentIdentifier ) : return resolve_pid ( self . pid ) return self . pid
Resolve self . pid if it is a fetched pid .
12,805
def _get_child_relation ( self , child_pid ) : return PIDRelation . query . filter_by ( parent = self . _resolved_pid , child = child_pid , relation_type = self . relation_type . id ) . one ( )
Retrieve the relation between this node and a child PID .
12,806
def _check_child_limits ( self , child_pid ) : if self . max_children is not None and self . children . count ( ) >= self . max_children : raise PIDRelationConsistencyError ( "Max number of children is set to {}." . format ( self . max_children ) ) if self . max_parents is not None and PIDRelation . query . filter_by ( child = child_pid , relation_type = self . relation_type . id ) . count ( ) >= self . max_parents : raise PIDRelationConsistencyError ( "This pid already has the maximum number of parents." )
Check that inserting a child is within the limits .
12,807
def _connected_pids ( self , from_parent = True ) : to_pid = aliased ( PersistentIdentifier , name = 'to_pid' ) if from_parent : to_relation = PIDRelation . child_id from_relation = PIDRelation . parent_id else : to_relation = PIDRelation . parent_id from_relation = PIDRelation . child_id query = PIDQuery ( [ to_pid ] , db . session ( ) , _filtered_pid_class = to_pid ) . join ( PIDRelation , to_pid . id == to_relation ) if isinstance ( self . pid , PersistentIdentifier ) : query = query . filter ( from_relation == self . pid . id ) else : from_pid = aliased ( PersistentIdentifier , name = 'from_pid' ) query = query . join ( from_pid , from_pid . id == from_relation ) . filter ( from_pid . pid_value == self . pid . pid_value , from_pid . pid_type == self . pid . pid_type , ) return query
Follow a relationship to find connected PIDs . abs .
12,808
def insert_child ( self , child_pid ) : self . _check_child_limits ( child_pid ) try : with db . session . begin_nested ( ) : if not isinstance ( child_pid , PersistentIdentifier ) : child_pid = resolve_pid ( child_pid ) return PIDRelation . create ( self . _resolved_pid , child_pid , self . relation_type . id , None ) except IntegrityError : raise PIDRelationConsistencyError ( "PID Relation already exists." )
Add the given PID to the list of children PIDs .
12,809
def index ( self , child_pid ) : if not isinstance ( child_pid , PersistentIdentifier ) : child_pid = resolve_pid ( child_pid ) relation = PIDRelation . query . filter_by ( parent = self . _resolved_pid , child = child_pid , relation_type = self . relation_type . id ) . one ( ) return relation . index
Index of the child in the relation .
12,810
def is_last_child ( self , child_pid ) : last_child = self . last_child if last_child is None : return False return last_child == child_pid
Determine if pid is the latest version of a resource .
12,811
def last_child ( self ) : return self . children . filter ( PIDRelation . index . isnot ( None ) ) . ordered ( ) . first ( )
Get the latest PID as pointed by the Head PID .
12,812
def next_child ( self , child_pid ) : relation = self . _get_child_relation ( child_pid ) if relation . index is not None : return self . children . filter ( PIDRelation . index > relation . index ) . ordered ( ord = 'asc' ) . first ( ) else : return None
Get the next child PID in the PID relation .
12,813
def insert_child ( self , child_pid , index = - 1 ) : self . _check_child_limits ( child_pid ) if index is None : index = - 1 try : with db . session . begin_nested ( ) : if not isinstance ( child_pid , PersistentIdentifier ) : child_pid = resolve_pid ( child_pid ) child_relations = self . _resolved_pid . child_relations . filter ( PIDRelation . relation_type == self . relation_type . id ) . order_by ( PIDRelation . index ) . all ( ) relation_obj = PIDRelation . create ( self . _resolved_pid , child_pid , self . relation_type . id , None ) if index == - 1 : child_relations . append ( relation_obj ) else : child_relations . insert ( index , relation_obj ) for idx , c in enumerate ( child_relations ) : c . index = idx except IntegrityError : raise PIDRelationConsistencyError ( "PID Relation already exists." )
Insert a new child into a PID concept .
12,814
def asset ( url = None ) : url = url . lstrip ( '/' ) assets_path = app . config . get ( 'ASSETS_PATH' ) if not assets_path : url_for = app . jinja_env . globals . get ( 'url_for' ) url = url_for ( 'static' , filename = url ) else : assets_path = assets_path . rstrip ( '/' ) url = assets_path + '/' + url version = app . config . get ( 'ASSETS_VERSION' ) if not version : return url sign = '?' if sign in url : sign = '&' pattern = '{url}{sign}v{version}' return pattern . format ( url = url , sign = sign , version = version )
Asset helper Generates path to a static asset based on configuration base path and support for versioning . Will easily allow you to move your assets away to a CDN without changing templates . Versioning allows you to cache your asset changes forever by the webserver .
12,815
def pick_a_model_randomly ( models : List [ Any ] ) -> Any : try : return random . choice ( models ) except IndexError as e : raise ModelPickerException ( cause = e )
Naive picking function return one of the models chosen randomly .
12,816
def link ( origin = None , rel = None , value = None , attributes = None , source = None ) : attributes = attributes or { } def _link ( ctx ) : if source : if not callable ( source ) : raise ValueError ( 'Link source must be a pattern action function' ) contexts = source ( ctx ) for ctx in contexts : ctx . output_model . add ( ctx . current_link [ ORIGIN ] , ctx . current_link [ RELATIONSHIP ] , ctx . current_link [ TARGET ] , attributes ) return ( o , r , v , a ) = ctx . current_link _origin = origin ( ctx ) if callable ( origin ) else origin o_list = [ o ] if _origin is None else ( _origin if isinstance ( _origin , list ) else [ _origin ] ) _rel = rel ( ctx ) if callable ( rel ) else rel r_list = [ r ] if _rel is None else ( _rel if isinstance ( _rel , list ) else [ _rel ] ) _value = value ( ctx ) if callable ( value ) else value v_list = [ v ] if _value is None else ( _value if isinstance ( _value , list ) else [ _value ] ) _attributes = attributes ( ctx ) if callable ( attributes ) else attributes for ( o , r , v , a ) in [ ( o , r , v , a ) for o in o_list for r in r_list for v in v_list ] : ctx . output_model . add ( o , r , v , attributes ) return return _link
Action function generator to create a link based on the context s current link or on provided parameters
12,817
def attr ( aid ) : def _attr ( ctx ) : return ctx . current_link [ ATTRIBUTES ] . get ( aid ) return _attr
Action function generator to retrieve an attribute from the current link
12,818
def values ( * rels ) : def _values ( ctx ) : computed_rels = [ rel ( ctx ) if callable ( rel ) else rel for rel in rels ] return computed_rels return _values
Action function generator to compute a set of relationships from criteria
12,819
def foreach ( origin = None , rel = None , target = None , attributes = None ) : def _foreach ( ctx ) : _origin = origin ( ctx ) if callable ( origin ) else origin _rel = rel ( ctx ) if callable ( rel ) else rel _target = target ( ctx ) if callable ( target ) else target _attributes = attributes ( ctx ) if callable ( attributes ) else attributes ( o , r , t , a ) = ctx . current_link o = [ o ] if _origin is None else ( _origin if isinstance ( _origin , list ) else [ _origin ] ) r = [ r ] if _rel is None else ( _rel if isinstance ( _rel , list ) else [ _rel ] ) t = [ t ] if _target is None else ( _target if isinstance ( _target , list ) else [ _target ] ) a = [ a ] if _attributes is None else ( _attributes if isinstance ( _attributes , list ) else [ _attributes ] ) return [ ctx . copy ( current_link = ( curr_o , curr_r , curr_t , curr_a ) ) for ( curr_o , curr_r , curr_t , curr_a ) in itertools . product ( o , r , t , a ) ] return _foreach
Action function generator to compute a combination of links
12,820
def res ( arg ) : def _res ( ctx ) : _arg = arg ( ctx ) if callable ( arg ) else arg return I ( arg ) return _res
Convert the argument into an IRI ref
12,821
def static_singleton ( * args , ** kwargs ) : def __static_singleton_wrapper ( cls ) : if cls not in __singleton_instances : __singleton_instances [ cls ] = cls ( * args , ** kwargs ) return __singleton_instances [ cls ] return __static_singleton_wrapper
STATIC Singleton Design Pattern Decorator Class is initialized with arguments passed into the decorator .
12,822
def get_method_documentation ( method ) : from inspect import getargspec result = { 'name' : method . __name__ , 'friendly_name' : ' ' . join ( [ name . capitalize ( ) for name in method . __name__ . split ( '_' ) ] ) , } arg_specs = getargspec ( method ) arguments = { } if not arg_specs . defaults : if len ( arg_specs . args [ 1 : ] ) > 0 : arguments [ 'required' ] = list ( arg_specs . args [ 1 : ] ) else : if len ( arg_specs . args [ 1 : - ( len ( arg_specs . defaults ) ) ] ) : arguments [ 'required' ] = list ( arg_specs . args [ 1 : - ( len ( arg_specs . defaults ) ) ] ) arguments [ 'optional' ] = { } for i in range ( len ( arg_specs . defaults ) ) : arguments [ 'optional' ] [ arg_specs . args [ - ( len ( arg_specs . defaults ) ) + i ] ] = arg_specs . defaults [ i ] if arguments != { } : result [ 'parameters' ] = arguments doc = method . __doc__ . strip ( ) if method . __doc__ else '' if ':' in method . __doc__ : doc = { 'summary' : method . __doc__ [ 0 : doc . find ( ' :' ) ] . strip ( ) } params = re . findall ( r":param ([^\s]*): (.*)\n" , method . __doc__ ) if len ( params ) > 0 : doc [ 'parameters' ] = { } for param in params : doc [ 'parameters' ] [ param [ 0 ] ] = param [ 1 ] . strip ( ) regex = re . compile ( r":returns:(.*)" , re . MULTILINE | re . DOTALL ) returns = regex . search ( method . __doc__ ) if returns and returns . group ( 0 ) : doc [ 'return' ] = returns . group ( 0 ) . replace ( ':returns:' , '' ) . replace ( '\n ' , '\n' ) . strip ( ) if doc != '' : result [ 'help' ] = doc return result
This function uses inspect to retrieve information about a method .
12,823
def sort_dictionary_list ( dict_list , sort_key ) : if not dict_list or len ( dict_list ) == 0 : return dict_list dict_list . sort ( key = itemgetter ( sort_key ) ) return dict_list
sorts a list of dictionaries based on the value of the sort_key
12,824
def safe_info ( self , dic = None ) : if dic is None and dic != { } : dic = self . to_dict ( ) output = { } for ( key , value ) in dic . items ( ) : if key [ 0 ] != '_' : if isinstance ( value , SerializableObject ) : output [ key ] = value . safe_info ( ) elif isinstance ( value , dict ) : output [ key ] = self . safe_info ( dic = value ) elif isinstance ( value , list ) : output [ key ] = [ ] for f in value : if isinstance ( f , SerializableObject ) : output [ key ] . append ( f . safe_info ( ) ) elif isinstance ( f , dict ) : output [ key ] . append ( self . safe_info ( dic = f ) ) else : output [ key ] . append ( f ) else : output [ key ] = value return output
Returns public information of the object
12,825
def run ( host = '0.0.0.0' , port = 5000 , reload = True , debug = True ) : from werkzeug . serving import run_simple app = bootstrap . get_app ( ) return run_simple ( hostname = host , port = port , application = app , use_reloader = reload , use_debugger = debug , )
Run development server
12,826
def shell ( ) : app = bootstrap . get_app ( ) context = dict ( app = app ) app_context = app . app_context ( ) app_context . push ( ) ipython = importlib . util . find_spec ( "IPython" ) if ipython : from IPython import embed embed ( user_ns = context ) else : import code code . interact ( local = context )
Start application - aware shell
12,827
def _push ( self , title , view , class_name , is_class , ** kwargs ) : set_view_attr ( view , "title" , title , cls_name = class_name ) module_name = view . __module__ method_name = view . __name__ _endpoint = build_endpoint_route_name ( view , "index" if is_class else method_name , class_name ) endpoint = kwargs . pop ( "endpoint" , _endpoint ) kwargs . setdefault ( "endpoint_kwargs" , { } ) order = kwargs . pop ( "order" , 0 ) _nav_tags = get_view_attr ( view , "nav_tags" , [ "default" ] , cls_name = class_name ) tags = kwargs . pop ( "tags" , _nav_tags ) if not isinstance ( tags , list ) : _ = tags tags = [ _ ] kwargs [ "tags" ] = tags visible = kwargs . pop ( "visible" , [ True ] ) if not isinstance ( visible , list ) : visible = [ visible ] if get_view_attr ( view , "nav_visible" , cls_name = class_name ) is False : visible = False kwargs [ "view" ] = view kwargs [ "visible" ] = visible kwargs [ "active" ] = False kwargs [ "key" ] = class_name if is_class : kwargs [ "endpoint" ] = endpoint kwargs [ "has_subnav" ] = True else : kwargs [ "has_subnav" ] = False kwargs . update ( { "order" : order , "has_subnav" : False , "title" : title , "endpoint" : endpoint , } ) self . _title_map [ endpoint ] = title path = "%s.%s" % ( module_name , method_name if is_class else class_name ) attach_to = kwargs . pop ( "attach_to" , [ ] ) if not attach_to : attach_to . append ( path ) for path in attach_to : if path not in self . MENU : self . MENU [ path ] = { "title" : None , "endpoint" : None , "endpoint_kwargs" : { } , "order" : None , "subnav" : [ ] , "kwargs" : { } } if is_class : self . MENU [ path ] [ "title" ] = title self . MENU [ path ] [ "order" ] = order self . MENU [ path ] [ "kwargs" ] = kwargs else : self . MENU [ path ] [ "subnav" ] . append ( kwargs )
Push nav data stack
12,828
def render ( self ) : menu_list = [ ] menu_index = 0 for _ , menu in copy . deepcopy ( self . MENU ) . items ( ) : subnav = [ ] menu [ "kwargs" ] [ "_id" ] = str ( menu_index ) menu [ "kwargs" ] [ "active" ] = False if "visible" in menu [ "kwargs" ] : menu [ "kwargs" ] [ "visible" ] = self . _test_visibility ( menu [ "kwargs" ] [ "visible" ] ) for s in menu [ "subnav" ] : if s [ "title" ] : s [ "title" ] = self . _get_title ( s [ "title" ] ) if s [ "endpoint" ] == request . endpoint : s [ "active" ] = True menu [ "kwargs" ] [ "active" ] = True s [ "visible" ] = self . _test_visibility ( s [ "visible" ] ) menu_index += 1 s [ "_id" ] = str ( menu_index ) subnav . append ( s ) _kwargs = menu [ "kwargs" ] if menu [ "title" ] : _kwargs . update ( { "subnav" : self . _sort ( subnav ) , "order" : menu [ "order" ] , "title" : self . _get_title ( menu [ "title" ] ) } ) menu_list . append ( _kwargs ) else : menu_list += subnav menu_index += 1 return self . _sort ( menu_list )
Render the menu into a sorted by order multi dict
12,829
def add_qtl_to_marker ( marker , qtls ) : cnt = 0 for qtl in qtls : if qtl [ - 1 ] == marker [ 0 ] : cnt = cnt + 1 marker . append ( str ( cnt ) ) return marker
Add the number of QTLs found for a given marker .
12,830
def add_qtl_to_map ( qtlfile , mapfile , outputfile = 'map_with_qtls.csv' ) : qtl_list = read_input_file ( qtlfile , ',' ) map_list = read_input_file ( mapfile , ',' ) map_list [ 0 ] . append ( '# QTLs' ) markers = [ ] markers . append ( map_list [ 0 ] ) qtl_cnt = 0 for marker in map_list [ 1 : ] : markers . append ( add_qtl_to_marker ( marker , qtl_list [ 1 : ] ) ) qtl_cnt = qtl_cnt + int ( markers [ - 1 ] [ - 1 ] ) LOG . info ( '- %s markers processed in %s' % ( len ( markers ) , mapfile ) ) LOG . info ( '- %s QTLs located in the map: %s' % ( qtl_cnt , outputfile ) ) write_matrix ( outputfile , markers )
This function adds to a genetic map for each marker the number of significant QTLs found .
12,831
def send_command ( self , data , read_delay = None ) : self . _write ( data ) if read_delay : time . sleep ( read_delay ) return self . _read ( )
Write data to the port and return the response form it
12,832
def serialize_relations ( pid ) : data = { } relations = PIDRelation . get_child_relations ( pid ) . all ( ) for relation in relations : rel_cfg = resolve_relation_type_config ( relation . relation_type ) dump_relation ( rel_cfg . api ( relation . parent ) , rel_cfg , pid , data ) parent_relations = PIDRelation . get_parent_relations ( pid ) . all ( ) rel_cfgs = set ( [ resolve_relation_type_config ( p ) for p in parent_relations ] ) for rel_cfg in rel_cfgs : dump_relation ( rel_cfg . api ( pid ) , rel_cfg , pid , data ) return data
Serialize the relations for given PID .
12,833
def dump_relation ( api , rel_cfg , pid , data ) : schema_class = rel_cfg . schema if schema_class is not None : schema = schema_class ( ) schema . context [ 'pid' ] = pid result , errors = schema . dump ( api ) data . setdefault ( rel_cfg . name , [ ] ) . append ( result )
Dump a specific relation to a data dict .
12,834
def add_item ( self , url , title = None , selection = None , jsonp = None , redirect = None , response_info = False ) : parameters = { 'username' : self . user , 'password' : self . password , 'url' : url , } if title is not None : parameters [ 'title' ] = title else : parameters [ 'auto-title' ] = 1 if selection is not None : parameters [ 'selection' ] = selection if redirect is not None : parameters [ 'redirect' ] = redirect if jsonp is not None : parameters [ 'jsonp' ] = jsonp status , headers = self . _query ( self . addurl , parameters ) if jsonp is not None : return status statustxt = self . add_status_codes [ int ( status ) ] if response_info : return ( int ( status ) , statustxt , headers [ 'title' ] , headers [ 'location' ] ) else : return ( int ( status ) , statustxt )
Method to add a new item to a instapaper account
12,835
def _query ( self , url = None , params = "" ) : if url is None : raise NoUrlError ( "No URL was provided." ) headers = { 'location' : None , 'title' : None } headerdata = urllib . urlencode ( params ) try : request = urllib2 . Request ( url , headerdata ) response = urllib2 . urlopen ( request ) if 'jsonp' in params : status = response . read ( ) else : status = response . getcode ( ) info = response . info ( ) try : headers [ 'location' ] = info [ 'Content-Location' ] except KeyError : pass try : headers [ 'title' ] = info [ 'X-Instapaper-Title' ] except KeyError : pass return ( status , headers ) except urllib2 . HTTPError as exception : if 'jsonp' in params : return ( '%s({"status":%d})' % ( params [ 'jsonp' ] , exception . code ) , headers ) else : return ( exception . code , headers ) except IOError as exception : return ( exception . code , headers )
method to query a URL with the given parameters
12,836
def cors ( * args , ** kwargs ) : def decorator ( fn ) : cors_fn = flask_cors . cross_origin ( automatic_options = False , * args , ** kwargs ) if inspect . isclass ( fn ) : apply_function_to_members ( fn , cors_fn ) else : return cors_fn ( fn ) return fn return decorator
A wrapper around flask - cors cross_origin to also act on classes
12,837
def get_residue_mapping ( self ) : if len ( self . sequence_ids ) == 2 : if not self . alignment_output : self . align ( ) assert ( self . alignment_output ) return self . _create_residue_map ( self . _get_alignment_lines ( ) , self . sequence_ids [ 1 ] , self . sequence_ids [ 2 ] ) else : return None
Returns a mapping between the sequences ONLY IF there are exactly two . This restriction makes the code much simpler .
12,838
def realign ( self , cut_off , chains_to_skip = set ( ) ) : if cut_off != self . cut_off : self . cut_off = cut_off for c in self . chains : if c not in chains_to_skip : self . clustal_matches [ c ] = None self . substring_matches [ c ] = None if self . alignment . get ( c ) : del self . alignment [ c ] if self . seqres_to_uniparc_sequence_maps . get ( c ) : del self . seqres_to_uniparc_sequence_maps [ c ] self . _align_with_clustal ( chains_to_skip = chains_to_skip ) self . _align_with_substrings ( chains_to_skip = chains_to_skip ) self . _check_alignments ( chains_to_skip = chains_to_skip ) self . _get_residue_mapping ( chains_to_skip = chains_to_skip )
Alter the cut - off and run alignment again . This is much quicker than creating a new PDBUniParcSequenceAligner object as the UniParcEntry creation etc . in the constructor does not need to be repeated .
12,839
def _determine_representative_chains ( self ) : equivalence_fiber = { } matched_chains = set ( ) for chain_id , equivalent_chains in self . identical_sequences . iteritems ( ) : matched_chains . add ( chain_id ) equivalent_chain_ids = set ( ) for equivalent_chain in equivalent_chains : assert ( len ( equivalent_chain ) == 6 ) assert ( ( equivalent_chain [ : 5 ] == '%s_' % self . pdb_id ) or ( equivalent_chain [ : 5 ] == '%s:' % self . pdb_id ) ) equivalent_chain_ids . add ( equivalent_chain [ 5 ] ) found = False for equivalent_chain_id in equivalent_chain_ids : if equivalence_fiber . get ( equivalent_chain_id ) : found = True assert ( equivalence_fiber [ equivalent_chain_id ] == equivalent_chain_ids . union ( set ( [ chain_id ] ) ) ) break if not found : equivalence_fiber [ chain_id ] = set ( equivalent_chain_ids ) equivalence_fiber [ chain_id ] . add ( chain_id ) for c in self . chains : if c not in matched_chains : equivalence_fiber [ c ] = set ( [ c ] ) self . equivalence_fiber = equivalence_fiber self . representative_chains = equivalence_fiber . keys ( )
Quotient the chains to get equivalence classes of chains . These will be used for the actual mapping .
12,840
def _get_uniparc_sequences_through_uniprot_ACs ( self , mapping_pdb_id , uniprot_ACs , cache_dir ) : m = uniprot_map ( 'ACC' , 'UPARC' , uniprot_ACs , cache_dir = cache_dir ) UniParcIDs = [ ] for _ , v in m . iteritems ( ) : UniParcIDs . extend ( v ) mapping = { mapping_pdb_id : [ ] } for UniParcID in UniParcIDs : entry = UniParcEntry ( UniParcID , cache_dir = cache_dir ) mapping [ mapping_pdb_id ] . append ( entry ) return mapping
Get the UniParc sequences associated with the UniProt accession number .
12,841
def _align_with_substrings ( self , chains_to_skip = set ( ) ) : for c in self . representative_chains : if c not in chains_to_skip : fasta_sequence = self . fasta [ c ] substring_matches = { } for uniparc_id , uniparc_sequence in sorted ( self . uniparc_sequences . iteritems ( ) ) : uniparc_sequence = str ( uniparc_sequence ) idx = uniparc_sequence . find ( fasta_sequence ) if idx != - 1 : substring_matches [ uniparc_id ] = 0 elif len ( fasta_sequence ) > 30 : idx = uniparc_sequence . find ( fasta_sequence [ 5 : - 5 ] ) if idx != - 1 : substring_matches [ uniparc_id ] = 5 else : idx = uniparc_sequence . find ( fasta_sequence [ 7 : - 7 ] ) if idx != - 1 : substring_matches [ uniparc_id ] = 7 elif len ( fasta_sequence ) > 15 : idx = uniparc_sequence . find ( fasta_sequence [ 3 : - 3 ] ) if idx != - 1 : substring_matches [ uniparc_id ] = 3 self . substring_matches [ c ] = substring_matches colortext . pcyan ( '*' * 100 ) pprint . pprint ( self . substring_matches ) if self . restrict_to_uniparc_values : for c in self . representative_chains : if set ( map ( str , self . substring_matches [ c ] . keys ( ) ) ) . intersection ( set ( self . restrict_to_uniparc_values ) ) > 0 : restricted_matches = dict ( ( str ( k ) , self . substring_matches [ c ] [ k ] ) for k in self . substring_matches [ c ] . keys ( ) if str ( k ) in self . restrict_to_uniparc_values ) if len ( restricted_matches ) != len ( self . substring_matches [ c ] ) : removed_matches = sorted ( set ( self . substring_matches [ c ] . keys ( ) ) . difference ( set ( restricted_matches ) ) ) self . substring_matches [ c ] = restricted_matches for c_1 , related_chains in self . equivalence_fiber . iteritems ( ) : for c_2 in related_chains : self . substring_matches [ c_2 ] = self . substring_matches [ c_1 ]
Simple substring - based matching
12,842
def _get_residue_mapping ( self , chains_to_skip = set ( ) ) : for c in self . representative_chains : if c not in chains_to_skip : if self . alignment . get ( c ) : uniparc_entry = self . get_uniparc_object ( c ) sa = SequenceAligner ( ) sa . add_sequence ( c , self . fasta [ c ] ) sa . add_sequence ( uniparc_entry . UniParcID , uniparc_entry . sequence ) sa . align ( ) residue_mapping , residue_match_mapping = sa . get_residue_mapping ( ) s = PDBUniParcSequenceMap ( ) assert ( sorted ( residue_mapping . keys ( ) ) == sorted ( residue_match_mapping . keys ( ) ) ) for k , v in residue_mapping . iteritems ( ) : s . add ( k , ( uniparc_entry . UniParcID , v ) , residue_match_mapping [ k ] ) self . seqres_to_uniparc_sequence_maps [ c ] = s else : self . seqres_to_uniparc_sequence_maps [ c ] = PDBUniParcSequenceMap ( ) for c_1 , related_chains in self . equivalence_fiber . iteritems ( ) : for c_2 in related_chains : if self . seqres_to_uniparc_sequence_maps . get ( c_1 ) : self . seqres_to_uniparc_sequence_maps [ c_2 ] = self . seqres_to_uniparc_sequence_maps [ c_1 ]
Creates a mapping between the residues of the chains and the associated UniParc entries .
12,843
def get_corresponding_chains ( self , from_pdb_id , from_chain_id , to_pdb_id ) : chains = self . chain_map . get ( from_pdb_id , { } ) . get ( from_chain_id , { } ) . get ( to_pdb_id , [ ] ) return sorted ( chains )
Should be called after get_mutations .
12,844
def get_chain_mutations ( self , pdb_id_1 , chain_1 , pdb_id_2 , chain_2 ) : p1 = self . add_pdb ( pdb_id_1 ) p2 = self . add_pdb ( pdb_id_2 ) sifts_1 , pdb_1 = p1 [ 'sifts' ] , p1 [ 'pdb' ] sifts_2 , pdb_2 = p2 [ 'sifts' ] , p2 [ 'pdb' ] seqres_to_atom_sequence_maps_1 = sifts_1 . seqres_to_atom_sequence_maps . get ( chain_1 , { } ) seqres_1 , atom_1 = pdb_1 . seqres_sequences . get ( chain_1 ) , pdb_1 . atom_sequences . get ( chain_1 ) seqres_2 , atom_2 = pdb_2 . seqres_sequences . get ( chain_2 ) , pdb_2 . atom_sequences . get ( chain_2 ) if not seqres_1 : raise Exception ( 'No SEQRES sequence for chain {0} of {1}.' . format ( chain_1 , pdb_1 ) ) if not atom_1 : raise Exception ( 'No ATOM sequence for chain {0} of {1}.' . format ( chain_1 , pdb_1 ) ) if not seqres_2 : raise Exception ( 'No SEQRES sequence for chain {0} of {1}.' . format ( chain_2 , pdb_2 ) ) if not atom_2 : raise Exception ( 'No ATOM sequence for chain {0} of {1}.' . format ( chain_2 , pdb_2 ) ) seqres_str_1 = str ( seqres_1 ) seqres_str_2 = str ( seqres_2 ) sa = SequenceAligner ( ) sa . add_sequence ( '{0}_{1}' . format ( pdb_id_1 , chain_1 ) , seqres_str_1 ) sa . add_sequence ( '{0}_{1}' . format ( pdb_id_2 , chain_2 ) , seqres_str_2 ) sa . align ( ) seqres_residue_mapping , seqres_match_mapping = sa . get_residue_mapping ( ) seqres_sequence_map = SequenceMap ( ) assert ( sorted ( seqres_residue_mapping . keys ( ) ) == sorted ( seqres_match_mapping . keys ( ) ) ) for k , v in seqres_residue_mapping . iteritems ( ) : seqres_sequence_map . add ( k , v , seqres_match_mapping [ k ] ) self . seqres_sequence_maps [ ( pdb_id_1 , chain_1 ) ] [ ( pdb_id_2 , chain_2 ) ] = seqres_sequence_map mutations = [ ] clustal_symbols = SubstitutionScore . clustal_symbols for seqres_res_id , v in seqres_match_mapping . iteritems ( ) : if clustal_symbols [ v . clustal ] != '*' : seqres_wt_residue = seqres_1 [ seqres_res_id ] seqres_mutant_residue = seqres_2 [ seqres_residue_mapping [ seqres_res_id ] ] atom_res_id = None atom_chain_res_id = seqres_to_atom_sequence_maps_1 . get ( seqres_res_id ) try : if atom_chain_res_id : assert ( atom_chain_res_id [ 0 ] == chain_1 ) atom_residue = atom_1 [ atom_chain_res_id ] atom_res_id = atom_chain_res_id [ 1 : ] assert ( atom_residue . ResidueAA == seqres_wt_residue . ResidueAA ) assert ( atom_residue . ResidueID == atom_res_id ) except : atom_res_id = None if seqres_wt_residue . ResidueAA != 'X' : raise seqres_mutation = ChainMutation ( seqres_wt_residue . ResidueAA , seqres_res_id , seqres_mutant_residue . ResidueAA , Chain = chain_1 ) atom_mutation = None if atom_res_id : atom_mutation = ChainMutation ( seqres_wt_residue . ResidueAA , atom_res_id , seqres_mutant_residue . ResidueAA , Chain = chain_1 ) mutations . append ( PDBMutationPair ( seqres_mutation , atom_mutation ) ) return mutations
Returns a list of tuples each containing a SEQRES Mutation object and an ATOM Mutation object representing the mutations from pdb_id_1 chain_1 to pdb_id_2 chain_2 .
12,845
def get_mapping_from_db3_file ( db_path ) : import sqlite3 conn = sqlite3 . connect ( db_path ) results = conn . cursor ( ) . execute ( ) rosetta_residue_ids = [ ] mapping = { } for r in results : mapping [ "%s%s%s" % ( r [ 0 ] , str ( r [ 1 ] ) . rjust ( 4 ) , r [ 2 ] ) ] = { 'pose_residue_id' : r [ 4 ] , 'name3' : r [ 5 ] , 'res_type' : r [ 6 ] } rosetta_residue_ids . append ( r [ 4 ] ) raw_residue_list = [ r for r in conn . cursor ( ) . execute ( ) ] assert ( sorted ( [ r [ 0 ] for r in raw_residue_list ] ) == sorted ( rosetta_residue_ids ) ) return mapping
Does the work of reading the Rosetta SQLite3 . db3 file to retrieve the mapping
12,846
def add_company_quarter ( self , company_name , quarter_name , dt , calendar_id = 'notices' ) : assert ( calendar_id in self . configured_calendar_ids . keys ( ) ) calendarId = self . configured_calendar_ids [ calendar_id ] quarter_name = quarter_name . title ( ) quarter_numbers = { 'Spring' : 1 , 'Summer' : 2 , 'Fall' : 3 , 'Winter' : 4 } assert ( quarter_name in quarter_numbers . keys ( ) ) start_time = datetime ( year = dt . year , month = dt . month , day = dt . day , hour = 0 , minute = 0 , second = 0 , tzinfo = self . timezone ) + timedelta ( days = - 1 ) end_time = start_time + timedelta ( days = 3 , seconds = - 1 ) summary = '%s %s Quarter begins' % ( company_name , quarter_name ) events = self . get_events ( start_time . isoformat ( ) , end_time . isoformat ( ) , ignore_cancelled = True ) for event in events : if event . summary . find ( summary ) != - 1 : return False event_body = { 'summary' : summary , 'description' : summary , 'start' : { 'date' : dt . isoformat ( ) , 'timeZone' : self . timezone_string } , 'end' : { 'date' : dt . isoformat ( ) , 'timeZone' : self . timezone_string } , 'status' : 'confirmed' , 'gadget' : { 'display' : 'icon' , 'iconLink' : 'https://guybrush.ucsf.edu/images/Q%d_32.png' % quarter_numbers [ quarter_name ] , 'title' : summary , } , 'extendedProperties' : { 'shared' : { 'event_type' : '%s quarter' % company_name , 'quarter_name' : quarter_name } } } colortext . warning ( '\n%s\n' % pprint . pformat ( event_body ) ) created_event = self . service . events ( ) . insert ( calendarId = self . configured_calendar_ids [ calendar_id ] , body = event_body ) . execute ( ) return True
Adds a company_name quarter event to the calendar . dt should be a date object . Returns True if the event was added .
12,847
def create_space ( self ) : cur = self . _conn . cursor ( ) cur . executescript ( SQL_MODEL ) self . _conn . commit ( ) cur . close ( ) return
Set up a new table space for the first time
12,848
def drop_space ( self ) : cur = self . _conn . cursor ( ) cur . executescript ( DROP_SQL_MODEL ) self . _conn . commit ( ) cur . close ( ) return
Dismantle an existing table space
12,849
def eval ( self , text ) : program = Program ( text , echo = self . echo , transforms = self . transforms ) tokens = program . gen_tokens ( ) for sentence in program . gen_sentences ( tokens , self . aliases ) : if self . echo : self . terminal . debug ( str ( sentence ) ) program . interpret ( sentence , self . commands )
Respond to text entered by the user .
12,850
def interact ( self ) : lines = "" for line in self . read ( ) : lines += line try : self . eval ( lines ) except ValueError : pass except KeyboardInterrupt as e : raise e except : self . terminal . error ( traceback . format_exc ( ) ) break else : break
Get a command from the user and respond to it .
12,851
def serve_forever ( self , banner = None ) : if banner : print ( banner ) while True : try : self . interact ( ) except KeyboardInterrupt : print pass except SystemExit : break
Handle one interaction at a time until shutdown .
12,852
def process_results ( output_dir , config ) : print ( '\nanalyzing results...\n' ) res = output_results ( output_dir , config ) if res : print ( 'created: %s/results.html\n' % output_dir ) else : print ( 'results cannot be processed' )
Process results and output them
12,853
def copy_config ( project_path , output_dir ) : project_config = os . path . join ( project_path , 'config.json' ) saved_config = os . path . join ( output_dir , 'config.json' ) shutil . copy ( project_config , saved_config )
Copy current config file to output directory
12,854
def start_hq ( output_dir , config , topic , is_master = True , ** kwargs ) : HightQuarter = get_hq_class ( config . get ( 'hq_class' ) ) hq = HightQuarter ( output_dir , config , topic , ** kwargs ) hq . setup ( ) if is_master : hq . wait_turrets ( config . get ( "min_turrets" , 1 ) ) hq . run ( ) hq . tear_down ( )
Start a HQ
12,855
def generate_output_path ( args , project_path ) : milisec = datetime . now ( ) . microsecond dirname = 'results_{}_{}' . format ( time . strftime ( '%Y.%m.%d_%H.%M.%S' , time . localtime ( ) ) , str ( milisec ) ) return os . path . join ( project_path , 'results' , dirname )
Generate default output directory
12,856
def run ( args ) : kwargs = vars ( args ) if 'func' in kwargs : del kwargs [ 'func' ] project_path = kwargs . pop ( 'project_path' ) config = configure ( project_path , kwargs . get ( 'config_file' ) ) output_dir = kwargs . pop ( 'output_dir' , None ) or generate_output_path ( args , project_path ) stats_handler . init_stats ( output_dir , config ) topic = args . publisher_channel or uuid . uuid4 ( ) . hex print ( "External publishing topic is %s" % topic ) start_hq ( output_dir , config , topic , ** kwargs ) if not args . no_results : process_results ( output_dir , config ) copy_config ( project_path , output_dir ) print ( 'done.\n' )
Start an oct project
12,857
def guest_access ( func ) : def decorated ( * _ , ** kwargs ) : public_profiles = current_app . config [ 'USER_PUBLIC_PROFILES' ] if not public_profiles : if not current_user . is_authenticated : abort ( 401 ) elif current_user . id != kwargs [ 'id' ] : abort ( 403 ) return func ( ** kwargs ) return decorated
Guest access decorator Checks if public profiles option is enabled in config and checks access to profile pages based on that .
12,858
def only_owner ( func ) : def decorated ( * _ , ** kwargs ) : id = kwargs [ 'id' ] if not current_user . is_authenticated : abort ( 401 ) elif current_user . id != id : abort ( 403 ) return func ( ** kwargs ) return decorated
Only owner decorator Restricts access to view ony to profile owner
12,859
def load ( config_file ) : with open ( config_file , "r" ) as f : def env_get ( ) : return dict ( os . environ ) tmpl = Template ( f . read ( ) ) return Config ( yaml . load ( tmpl . render ( ** env_get ( ) ) ) )
Processes and loads config file .
12,860
def get_term_by_year_and_quarter ( year , quarter ) : url = "{}/{},{}.json" . format ( term_res_url_prefix , year , quarter . lower ( ) ) return _json_to_term_model ( get_resource ( url ) )
Returns a uw_sws . models . Term object for the passed year and quarter .
12,861
def get_current_term ( ) : url = "{}/current.json" . format ( term_res_url_prefix ) term = _json_to_term_model ( get_resource ( url ) ) if datetime . now ( ) > term . grade_submission_deadline : return get_next_term ( ) return term
Returns a uw_sws . models . Term object for the current term .
12,862
def get_term_before ( aterm ) : prev_year = aterm . year prev_quarter = QUARTER_SEQ [ QUARTER_SEQ . index ( aterm . quarter ) - 1 ] if prev_quarter == "autumn" : prev_year -= 1 return get_term_by_year_and_quarter ( prev_year , prev_quarter )
Returns a uw_sws . models . Term object for the term before the term given .
12,863
def get_term_after ( aterm ) : next_year = aterm . year if aterm . quarter == "autumn" : next_quarter = QUARTER_SEQ [ 0 ] else : next_quarter = QUARTER_SEQ [ QUARTER_SEQ . index ( aterm . quarter ) + 1 ] if next_quarter == "winter" : next_year += 1 return get_term_by_year_and_quarter ( next_year , next_quarter )
Returns a uw_sws . models . Term object for the term after the term given .
12,864
def get_term_by_date ( date ) : year = date . year term = None for quarter in ( 'autumn' , 'summer' , 'spring' , 'winter' ) : term = get_term_by_year_and_quarter ( year , quarter ) if date >= term . first_day_quarter : break if date < term . first_day_quarter : term = get_term_by_year_and_quarter ( year - 1 , 'autumn' ) term_after = get_term_after ( term ) if term_after . first_day_quarter > date : return term else : return term_after pass
Returns a term for the datetime . date object given .
12,865
def logging_feature ( app ) : app . logger . setLevel ( logging . INFO ) email_exceptions = app . config . get ( 'LOGGING_EMAIL_EXCEPTIONS_TO_ADMINS' ) if email_exceptions and not app . debug and not app . testing : mail_handler = mail_logger ( app ) app . logger . addHandler ( mail_handler ) if not app . testing : file_handler = file_logger ( app ) app . logger . addHandler ( file_handler )
Add logging Accepts flask application and registers logging functionality within it
12,866
async def rtm ( self ) -> AsyncIterator [ Event ] : response = cast ( RTMStart , await self . api ( "rtm.start" ) ) self . me = Auto . generate ( response . self_ , "Me" , recursive = False ) self . team = Auto . generate ( response . team , "Team" , recursive = False ) self . channels . fill ( Channel . build ( item ) for item in response . channels ) self . users . fill ( User . build ( item ) for item in response . users ) self . groups . fill ( Group . build ( item ) for item in response . groups ) log . debug ( f"received {len(self.users)} users, {len(self.channels)} channels " f"and {len(self.groups)} groups from rtm.start" ) async with self . session . ws_connect ( response [ "url" ] ) as ws : async for msg in ws : event : Event = Event . generate ( msg . json ( ) , recursive = False ) if event . type == "goodbye" : break yield event
Connect to the realtime event API and start yielding events .
12,867
def _apply ( self , method_name , * args , ** kwargs ) : return [ getattr ( member , method_name ) ( * args , ** kwargs ) for member in self . forms ]
Call method_name with args and kwargs on each member .
12,868
def html_id ( self , field_name , form = None ) : if form is None : form = self return form . auto_id % ( form . add_prefix ( field_name ) , )
Return the html ID for the given field_name .
12,869
def save ( self ) : for form in self . _forms : if isinstance ( form , BaseForm ) : form . save ( commit = False ) self . instance . save ( ) for form in self . forms : if isinstance ( form , BaseForm ) : if hasattr ( form , 'save_m2m' ) : form . save_m2m ( ) if hasattr ( form , 'save_related' ) : form . save_related ( ) for form in self . _forms : if isinstance ( form , BaseFormSet ) : form . save ( commit = True ) return self . instance
Save the changes to the instance and any related objects .
12,870
def make_csv ( self ) : import csv try : from StringIO import StringIO except ImportError : from io import StringIO out = StringIO ( ) writer = csv . writer ( out , delimiter = '|' , lineterminator = '\n' , quoting = csv . QUOTE_MINIMAL ) if self . function == 'total' : writer . writerows ( self . results ) elif self . function == 'top' : rows = [ [ 'Value' , self . headers . strip ( '"' ) ] ] if self . results [ 0 ] is not None : for res in self . results : if res is not None : rows . append ( tuple ( [ res [ 0 ] , ',' . join ( res [ 1 ] ) ] ) ) writer . writerows ( rows ) elif self . function == 'table' : rows = [ [ header . strip ( '"' ) for header in re . split ( '\s*,\s*' , self . headers ) ] ] for res in sorted ( self . results , key = lambda x : x [ 0 ] ) : row = list ( res [ : - 1 ] ) lastcol = get_fmt_results ( res [ - 1 ] , limit = 10 ) if lastcol [ - 1 ] [ 0 ] == '[' and lastcol [ - 1 ] [ - 1 ] == ']' : row . append ( u'{0} {1}' . format ( u', ' . join ( lastcol [ : - 1 ] ) , lastcol [ - 1 ] ) ) else : row . append ( u', ' . join ( lastcol ) ) rows . append ( row ) writer . writerows ( rows ) self . csv = out . getvalue ( )
Get the text representation of a report element as csv .
12,871
def make ( self , apps ) : for ( appname , app ) in sorted ( apps . items ( ) , key = lambda x : ( x [ 1 ] . priority , x [ 0 ] ) ) : logger . info ( 'Getting report results from %r' , appname ) for report_data in app . report_data : if report_data . subreport != self . name : continue if report_data . function == 'total' : for opt in report_data : match = report_data . parse_report_data ( opt ) cond = match . group ( 'condition' ) valfld = match . group ( 'valfld' ) unit = match . group ( 'unit' ) itemtitle = match . group ( 'fields' ) . strip ( '"' ) total = report_data . rules [ opt ] . total_events ( cond , valfld ) if total == 0 : continue if unit is not None : total , unit = get_value_unit ( total , unit , 'T' ) total = '{0} {1}' . format ( total , unit ) else : total = str ( total ) report_data . results . append ( tuple ( [ total , itemtitle ] ) ) elif report_data . function == 'top' : k = int ( report_data . topnum ) for opt in report_data : match = report_data . parse_report_data ( opt ) valfld = match . group ( 'valfld' ) field = match . group ( 'fields' ) usemax = match . group ( 'add2res' ) is None toplist = report_data . rules [ opt ] . top_events ( k , valfld , usemax , field ) report_data . results . extend ( toplist ) elif report_data . function == 'table' : cols = len ( re . split ( '\s*,\s*' , report_data . headers ) ) for opt in report_data : match = report_data . parse_report_data ( opt ) cond = match . group ( 'condition' ) fields = re . split ( '\s*,\s*' , match . group ( 'fields' ) ) tablelist = report_data . rules [ opt ] . list_events ( cond , cols , fields ) report_data . results . extend ( tablelist ) if report_data . results : self . report_data . append ( report_data ) for report_data in self . report_data : if report_data . function == 'top' : report_data . results = sorted ( report_data . results , key = lambda x : x [ 0 ] , reverse = True ) unit = None for opt in report_data : match = report_data . parse_report_data ( opt ) unit = match . group ( 'unit' ) if unit is not None : break for res in report_data . results : if unit is not None : v , u = get_value_unit ( res [ 0 ] , unit , 'T' ) res [ 0 ] = '{0} {1}' . format ( v , u ) else : res [ 0 ] = str ( res [ 0 ] )
Make subreport items from results .
12,872
def make_format ( self , fmt , width ) : if not self . report_data : return for data_item in self . report_data : if data_item . results : if fmt is None or fmt == 'text' : data_item . make_text ( width ) elif fmt == 'html' : data_item . make_html ( ) elif fmt == 'csv' : data_item . make_csv ( )
Make subreport text in a specified format
12,873
def compact_tables ( self ) : items_to_del = set ( ) for i in range ( len ( self . report_data ) ) : if i in items_to_del : continue if self . report_data [ i ] . function [ 0 : 5 ] == 'table' : for j in range ( i + 1 , len ( self . report_data ) ) : if self . report_data [ j ] . function [ 0 : 5 ] == 'table' : if self . report_data [ i ] == self . report_data [ j ] : logger . debug ( 'Merge of 2 identical report tables: {0}' . format ( self . report_data [ i ] . title ) ) items_to_del . add ( j ) self . report_data [ i ] . results . extend ( self . report_data [ j ] . results ) if items_to_del : for i in reversed ( sorted ( items_to_del , key = lambda x : x ) ) : self . report_data . pop ( i )
Compact report items of type table with same results type . Report items of type tables in the same subreport is merged into one . The data are ordered by 1st column .
12,874
def make ( self , apps ) : for subreport in self . subreports : logger . debug ( 'Make subreport "{0}"' . format ( subreport . name ) ) subreport . make ( apps ) for subreport in self . subreports : subreport . compact_tables ( )
Create the report from application results
12,875
def get_report_parts ( self , apps , formats ) : for fmt in formats : width = 100 if fmt is not None else tui . get_terminal_size ( ) [ 0 ] for sr in self . subreports : sr . make_format ( fmt , width ) logger . debug ( 'Build a map for arguments and run\'s statistics ...' ) value_mapping = { 'title' : self . title , 'patterns' : ', ' . join ( [ repr ( pattern ) for pattern in self . args . patterns ] ) or None , 'pattern_files' : ', ' . join ( self . args . pattern_files ) or None , 'hosts' : ', ' . join ( self . args . hosts ) or None , 'apps' : u', ' . join ( [ u'%s(%d)' % ( app . name , app . matches ) for app in apps . values ( ) if app . matches > 0 ] ) , 'version' : __version__ } filters = [ ] for flt in self . args . filters : filters . append ( ' AND ' . join ( [ '%s=%r' % ( k , v . pattern ) for k , v in flt . items ( ) ] ) ) if filters : value_mapping [ 'filters' ] = ' OR ' . join ( [ '(%s)' % item for item in filters ] ) else : value_mapping [ 'filters' ] = filters [ 0 ] if filters else None value_mapping . update ( self . stats ) report = [ ] for fmt in formats : if fmt == 'text' : logger . info ( 'appends a text page report' ) report . append ( self . make_text_page ( value_mapping ) ) elif fmt == 'html' : logger . info ( 'appends a html page report' ) report . append ( self . make_html_page ( value_mapping ) ) elif fmt == 'csv' : logger . info ( 'extends with a list of csv subreports' ) report . extend ( self . make_csv_tables ( ) ) return report
Make report item texts in a specified format .
12,876
def set_stats ( self , run_stats ) : self . stats = run_stats . copy ( ) self . stats [ 'files' ] = ', ' . join ( self . stats [ 'files' ] ) self . stats [ 'tot_files' ] = len ( run_stats [ 'files' ] ) self . stats [ 'extra_tags' ] = ', ' . join ( self . stats [ 'extra_tags' ] )
Set run statistics for the report .
12,877
def make_html_page ( self , valumap ) : logger . info ( 'Making an html report using template %r.' , self . html_template ) fh = open ( self . html_template ) template = fh . read ( ) fh . close ( ) parts = [ ] for sr in self . subreports : report_data = [ item . html for item in sr . report_data if item . html ] if report_data : parts . append ( '\n<h2>{1}</h2>\n' . format ( sr . title , sr . reptext ) ) parts . extend ( report_data ) parts . append ( '\n<hr/>' ) valumap [ 'subreports' ] = '\n' . join ( parts ) html_page = Template ( template ) . safe_substitute ( valumap ) return TextPart ( fmt = 'html' , text = html_page , ext = 'html' )
Builds the report as html page using the template page from file .
12,878
def make_text_page ( self , valumap ) : logger . info ( 'Making a text report page using template %r.' , self . text_template ) fh = open ( self . text_template ) template = fh . read ( ) fh . close ( ) parts = [ ] for sr in self . subreports : report_data = [ item . text for item in sr . report_data if item . text ] if report_data : parts . append ( '\n{1}\n***** {0} *****\n{1}' . format ( sr . title , '*' * ( len ( sr . title ) + 12 ) ) ) parts . extend ( report_data ) valumap [ 'subreports' ] = '\n' . join ( parts ) text_page = Template ( template ) . safe_substitute ( valumap ) return TextPart ( fmt = 'text' , text = text_page , ext = 'txt' )
Builds the report as text page using the template page from file .
12,879
def make_csv_tables ( self ) : logger . info ( 'Generate csv report tables' ) report_parts = [ ] for sr in self . subreports : for data_item in sr . report_data : report_parts . append ( TextPart ( fmt = 'csv' , text = data_item . csv , ext = 'csv' ) ) return report_parts
Builds the report as a list of csv tables with titles .
12,880
def fetch_items ( self ) : offset = self . per_page * ( self . page - 1 ) items = self . _query . limit ( self . per_page ) . offset ( offset ) . all ( ) return items
Fetch items Performs a query to retrieve items based on current query and pagination settings .
12,881
def next_page ( self ) : if self . is_last_page ( ) : return False self . page += 1 self . items = self . fetch_items ( ) return True
Next page Uses query object to fetch next slice of items unless on last page in which case does nothing
12,882
def previous_page ( self ) : if self . is_first_page ( ) : return False self . page -= 1 self . items = self . fetch_items ( ) return True
Previous page Uses query object to fetch previous slice of items unless on first page in which case does nothing
12,883
def _make_spec_file ( self ) : spec_file = setuptools . command . bdist_rpm . bdist_rpm . _make_spec_file ( self ) spec_file . append ( '%config(noreplace) /etc/lograptor/lograptor.conf' ) spec_file . append ( '%config(noreplace) /etc/lograptor/report_template.*' ) spec_file . append ( '%config(noreplace) /etc/lograptor/conf.d/*.conf' ) return spec_file
Customize spec file inserting %config section
12,884
def user_save_event ( user ) : msg = 'User ({}){} updated/saved' . format ( user . id , user . email ) current_app . logger . info ( msg )
Handle persist event for user entities
12,885
def user_got_role_event ( user , role ) : msg = 'User ({}){} got new role [{}]' current_app . logger . info ( msg . format ( user . id , user . email , role . handle ) )
User got new role
12,886
def generate_hash ( self , length = 30 ) : import random , string chars = string . ascii_letters + string . digits ran = random . SystemRandom ( ) . choice hash = '' . join ( ran ( chars ) for i in range ( length ) ) return hash
Generate random string of given length
12,887
def gravatar ( self , size ) : hash = md5 ( self . email . encode ( 'utf-8' ) ) . hexdigest ( ) url = 'http://www.gravatar.com/avatar/{}?d=mm&s={}' return url . format ( hash , size )
Get url to gravatar
12,888
def is_locked ( self ) : now = datetime . datetime . utcnow ( ) if self . locked_until and self . locked_until >= now : return True elif self . locked_until and self . locked_until < now : self . unlock_account ( ) return False else : return False
Is locked? Checks locking and possibly unlocks upon timeout if account was previously locked .
12,889
def lock_account ( self , minutes = 30 ) : period = datetime . timedelta ( minutes = minutes ) self . locked_until = datetime . datetime . utcnow ( ) + period
Lock user account for a period
12,890
def increment_failed_logins ( self ) : if not self . failed_logins : self . failed_logins = 1 elif not self . failed_login_limit_reached ( ) : self . failed_logins += 1 else : self . reset_login_counter ( ) self . lock_account ( 30 )
Increment failed logins counter
12,891
def failed_login_limit_reached ( self ) : login_limit = 10 if self . failed_logins and self . failed_logins >= login_limit : return True else : return False
A boolean method to check for failed login limit being reached
12,892
def email_secure ( self ) : email = self . _email if not email : return '' address , host = email . split ( '@' ) if len ( address ) <= 2 : return ( '*' * len ( address ) ) + '@' + host import re host = '@' + host obfuscated = re . sub ( r'[a-zA-z0-9]' , '*' , address [ 1 : - 1 ] ) return address [ : 1 ] + obfuscated + address [ - 1 : ] + host
Obfuscated email used for display
12,893
def email ( self , email ) : if email == self . email : return email = email . lower ( ) if self . _email is None : self . _email = email self . require_email_confirmation ( ) else : self . email_new = email self . require_email_confirmation ( )
Set email and generate confirmation
12,894
def require_email_confirmation ( self ) : self . email_confirmed = False self . email_link = self . generate_hash ( 50 ) now = datetime . datetime . utcnow ( ) self . email_link_expires = now + datetime . timedelta ( hours = 24 )
Mark email as unconfirmed
12,895
def cancel_email_change ( self ) : if not self . email_new : return self . email_new = None self . email_confirmed = True self . email_link = None self . email_new = None self . email_link_expires = None
Cancel email change for new users and roll back data
12,896
def email_link_expired ( self , now = None ) : if not now : now = datetime . datetime . utcnow ( ) return self . email_link_expires < now
Check if email link expired
12,897
def password ( self , password ) : from boiler . user . util . passlib import passlib_context password = str ( password ) encrypted = passlib_context . encrypt ( password ) self . _password = encrypted
Encode a string and set as password
12,898
def verify_password ( self , password ) : if self . password is None : return False from boiler . user . util . passlib import passlib_context return passlib_context . verify ( str ( password ) , self . password )
Verify a given string for being valid password
12,899
def generate_password_link ( self ) : self . password_link = self . generate_hash ( 50 ) now = datetime . datetime . utcnow ( ) self . password_link_expires = now + datetime . timedelta ( hours = 24 )
Generates a link to reset password