idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
9,100
def key_description ( self ) : "Return a description of the key" vk , scan , flags = self . _get_key_info ( ) desc = '' if vk : if vk in CODE_NAMES : desc = CODE_NAMES [ vk ] else : desc = "VK %d" % vk else : desc = "%s" % self . key return desc
Return a description of the key
9,101
def _get_key_info ( self ) : "Virtual keys have extended flag set" if ( ( self . key >= 33 and self . key <= 46 ) or ( self . key >= 91 and self . key <= 93 ) ) : flags = KEYEVENTF_EXTENDEDKEY else : flags = 0 return self . key , MapVirtualKey ( self . key , 0 ) , flags
Virtual keys have extended flag set
9,102
def _get_key_info ( self ) : vkey_scan = LoByte ( VkKeyScan ( self . key ) ) return ( vkey_scan , MapVirtualKey ( vkey_scan , 0 ) , 0 )
EscapedKeyAction doesn t send it as Unicode and the vk and scan code are generated differently
9,103
def setup ( self ) : self . template = self . _generate_inline_policy ( ) if self . dry_run is not True : self . client = self . _get_client ( ) username = self . _get_username_for_key ( ) policy_document = self . _generate_inline_policy ( ) self . _attach_inline_policy ( username , policy_document ) pass
Method runs the plugin attaching policies to the user in question
9,104
def _get_policies ( self ) : username = self . _get_username_for_key ( ) policies = self . client . list_user_policies ( UserName = username ) return policies
Returns all the policy names for a given user
9,105
def _get_username_for_key ( self ) : response = self . client . get_access_key_last_used ( AccessKeyId = self . compromised_resource [ 'access_key_id' ] ) username = response [ 'UserName' ] return username
Find the user for a given access key
9,106
def _generate_inline_policy ( self ) : template_name = self . _locate_file ( 'deny-sts-before-time.json.j2' ) template_file = open ( template_name ) template_contents = template_file . read ( ) template_file . close ( ) jinja_template = Template ( template_contents ) policy_document = jinja_template . render ( before_date = self . _get_date ( ) ) return policy_document
Renders a policy from a jinja template
9,107
def _attach_inline_policy ( self , username , policy_document ) : response = self . client . put_user_policy ( UserName = username , PolicyName = "threatresponse-temporal-key-revocation" , PolicyDocument = policy_document ) logger . info ( 'An inline policy has been attached for' ' {u} revoking sts tokens.' . format ( u = username ) ) return response
Attaches the policy to the user
9,108
def _locate_file ( self , pattern , root = os . path . dirname ( 'revokests_key.py' ) ) : for path , dirs , files in os . walk ( os . path . abspath ( root ) ) : for filename in fnmatch . filter ( files , pattern ) : return os . path . join ( path , filename )
Locate all files matching supplied filename pattern in and below
9,109
def generate_tsv_pep_protein_quants ( fns ) : for fn in fns : header = get_tsv_header ( fn ) for pquant in generate_split_tsv_lines ( fn , header ) : yield os . path . basename ( fn ) , header , pquant
Unlike generate_tsv_lines_multifile this generates tsv lines from multiple files that may have different headers . Yields fn header as well as quant data for each protein quant
9,110
def mzmlfn_kronikfeature_generator ( mzmlfns , kronikfns ) : for mzmlfn , kronikfn in zip ( mzmlfns , kronikfns ) : for quant_el in generate_kronik_feats ( kronikfn ) : yield os . path . basename ( mzmlfn ) , quant_el
Generates tuples of spectra filename and corresponding output features from kronik
9,111
def generate_split_tsv_lines ( fn , header ) : for line in generate_tsv_psms_line ( fn ) : yield { x : y for ( x , y ) in zip ( header , line . strip ( ) . split ( '\t' ) ) }
Returns dicts with header - keys and psm statistic values
9,112
def get_proteins_from_psm ( line ) : proteins = line [ mzidtsvdata . HEADER_PROTEIN ] . split ( ';' ) outproteins = [ ] for protein in proteins : prepost_protein = re . sub ( '\(pre=.*post=.*\)' , '' , protein ) . strip ( ) outproteins . append ( prepost_protein ) return outproteins
From a line return list of proteins reported by Mzid2TSV . When unrolled lines are given this returns the single protein from the line .
9,113
def aug_sysargv ( cmdstr ) : import shlex argv = shlex . split ( cmdstr ) sys . argv . extend ( argv )
DEBUG FUNC modify argv to look like you ran a command
9,114
def get_module_verbosity_flags ( * labels ) : verbose_prefix_list = [ '--verbose-' , '--verb' , '--verb-' ] veryverbose_prefix_list = [ '--veryverbose-' , '--veryverb' , '--veryverb-' ] verbose_flags = tuple ( [ prefix + lbl for prefix , lbl in itertools . product ( verbose_prefix_list , labels ) ] ) veryverbose_flags = tuple ( [ prefix + lbl for prefix , lbl in itertools . product ( veryverbose_prefix_list , labels ) ] ) veryverbose_module = get_argflag ( veryverbose_flags ) or VERYVERBOSE verbose_module = ( get_argflag ( verbose_flags ) or veryverbose_module or VERBOSE ) if veryverbose_module : verbose_module = 2 return verbose_module , veryverbose_module
checks for standard flags for enableing module specific verbosity
9,115
def get_argflag ( argstr_ , default = False , help_ = '' , return_specified = None , need_prefix = True , return_was_specified = False , argv = None , debug = None , ** kwargs ) : if argv is None : argv = sys . argv assert isinstance ( default , bool ) , 'default must be boolean' argstr_list = meta_util_iter . ensure_iterable ( argstr_ ) _register_arg ( argstr_list , bool , default , help_ ) parsed_val = default was_specified = False if debug is None : debug = DEBUG import os for key , val in os . environ . items ( ) : key = key . upper ( ) sentinal = 'UTOOL_' if key . startswith ( sentinal ) : flag = '--' + key [ len ( sentinal ) : ] . lower ( ) . replace ( '_' , '-' ) if val . upper ( ) in [ 'TRUE' , 'ON' ] : pass elif val . upper ( ) in [ 'FALSE' , 'OFF' ] : continue else : continue new_argv = [ flag ] argv = argv [ : ] + new_argv if debug : print ( 'ENV SPECIFIED COMMAND LINE' ) print ( 'argv.extend(new_argv=%r)' % ( new_argv , ) ) for argstr in argstr_list : if not ( argstr . find ( '--' ) == 0 or ( argstr . find ( '-' ) == 0 and len ( argstr ) == 2 ) ) : raise AssertionError ( 'Invalid argstr: %r' % ( argstr , ) ) if not need_prefix : noprefix = argstr . replace ( '--' , '' ) if noprefix in argv : parsed_val = True was_specified = True break noarg = argstr . replace ( '--' , '--no' ) if argstr in argv : parsed_val = True was_specified = True break elif noarg in argv : parsed_val = False was_specified = True break elif argstr + '=True' in argv : parsed_val = True was_specified = True break elif argstr + '=False' in argv : parsed_val = False was_specified = True break if return_specified is None : return_specified = return_was_specified if return_specified : return parsed_val , was_specified else : return parsed_val
Checks if the commandline has a flag or a corresponding noflag
9,116
def get_arg_dict ( argv = None , prefix_list = [ '--' ] , type_hints = { } ) : r if argv is None : argv = sys . argv arg_dict = { } def startswith_prefix ( arg ) : return any ( [ arg . startswith ( prefix ) for prefix in prefix_list ] ) def argx_has_value ( argv , argx ) : if argv [ argx ] . find ( '=' ) > - 1 : return True if argx + 1 < len ( argv ) and not startswith_prefix ( argv [ argx + 1 ] ) : return True return False def get_arg_value ( argv , argx , argname ) : if argv [ argx ] . find ( '=' ) > - 1 : return '=' . join ( argv [ argx ] . split ( '=' ) [ 1 : ] ) else : type_ = type_hints . get ( argname , None ) if type_ is None : return argv [ argx + 1 ] else : return parse_arglist_hack ( argx , argv = argv ) for argx in range ( len ( argv ) ) : arg = argv [ argx ] for prefix in prefix_list : if arg . startswith ( prefix ) : argname = arg [ len ( prefix ) : ] if argx_has_value ( argv , argx ) : if arg . find ( '=' ) > - 1 : argname = arg [ len ( prefix ) : arg . find ( '=' ) ] argvalue = get_arg_value ( argv , argx , argname ) arg_dict [ argname ] = argvalue else : arg_dict [ argname ] = True break return arg_dict
r Yet another way for parsing args
9,117
def argv_flag_dec ( * argin , ** kwargs ) : kwargs = kwargs . copy ( ) kwargs [ 'default' ] = kwargs . get ( 'default' , False ) from utool import util_decor @ util_decor . ignores_exc_tb ( outer_wrapper = False ) def wrap_argv_flag_dec ( func ) : return __argv_flag_dec ( func , ** kwargs ) assert len ( argin ) < 2 , 'specify 0 or 1 args' if len ( argin ) == 1 and util_type . is_funclike ( argin [ 0 ] ) : func = argin [ 0 ] return wrap_argv_flag_dec ( func ) else : return wrap_argv_flag_dec
Decorators which control program flow based on sys . argv the decorated function does not execute without its corresponding flag
9,118
def __argv_flag_dec ( func , default = False , quiet = QUIET , indent = False ) : from utool import util_decor flagname = meta_util_six . get_funcname ( func ) if flagname . find ( 'no' ) == 0 : flagname = flagname [ 2 : ] flags = ( '--' + flagname . replace ( '_' , '-' ) , '--' + flagname , ) @ util_decor . ignores_exc_tb ( outer_wrapper = False ) def GaurdWrapper ( * args , ** kwargs ) : from utool import util_print default_ = kwargs . pop ( 'default' , default ) alias_flags = kwargs . pop ( 'alias_flags' , [ ] ) is_flagged = ( get_argflag ( flags , default_ ) or get_argflag ( '--print-all' ) or any ( [ get_argflag ( _ ) for _ in alias_flags ] ) ) if flagname in kwargs : is_flagged = kwargs . pop ( flagname ) if is_flagged : func_label = flags [ 0 ] . replace ( '--' , '' ) . replace ( 'print-' , '' ) print ( '\n+ --- ' + func_label + ' ' ) use_indent = indent is not False if indent is True : indent_ = '[%s]' % func_label else : indent_ = indent with util_print . Indenter ( indent_ , enabled = use_indent ) : ret = func ( * args , ** kwargs ) print ( 'L ' + func_label + ' \n' ) return ret else : PRINT_DISABLED_FLAGDEC = not get_argflag ( '--noinform' , help_ = 'does not print disabled flag decorators' ) if not quiet and PRINT_DISABLED_FLAGDEC : print ( '~~~ %s ~~~' % flags [ 0 ] ) meta_util_six . set_funcname ( GaurdWrapper , meta_util_six . get_funcname ( func ) ) return GaurdWrapper
Logic for controlling if a function gets called based on command line
9,119
def get_argv_tail ( scriptname , prefer_main = None , argv = None ) : r if argv is None : argv = sys . argv import utool as ut modname = ut . get_argval ( '-m' , help_ = 'specify module name to profile' , argv = argv ) if modname is not None : modpath = ut . get_modpath ( modname , prefer_main = prefer_main ) argvx = argv . index ( modname ) + 1 argv_tail = [ modpath ] + argv [ argvx : ] else : try : argvx = argv . index ( scriptname ) except ValueError : for argvx , arg in enumerate ( argv ) : if scriptname in arg : break argv_tail = argv [ ( argvx + 1 ) : ] return argv_tail
r gets the rest of the arguments after a script has been invoked hack . accounts for python - m scripts .
9,120
def get_cmdline_varargs ( argv = None ) : if argv is None : argv = sys . argv scriptname = argv [ 0 ] if scriptname == '' : pos_start = 0 pos_end = 0 else : pos_start = pos_end = 1 for idx in range ( pos_start , len ( argv ) ) : if argv [ idx ] . startswith ( '-' ) : pos_end = idx break else : pos_end = len ( argv ) cmdline_varargs = argv [ pos_start : pos_end ] return cmdline_varargs
Returns positional args specified directly after the scriptname and before any args starting with - on the commandline .
9,121
def argval ( key , default = None , type = None , smartcast = True , return_exists = False , argv = None ) : defaultable_types = ( tuple , list , int , float ) if type is None and isinstance ( default , defaultable_types ) : type = builtins . type ( default ) return get_argval ( key , type_ = type , default = default , return_was_specified = return_exists , smartcast = smartcast , argv = argv )
alias for get_argval
9,122
def plot_real_feature ( df , feature_name , bins = 50 , figsize = ( 15 , 15 ) ) : ix_negative_target = df [ df . target == 0 ] . index ix_positive_target = df [ df . target == 1 ] . index plt . figure ( figsize = figsize ) ax_overall_dist = plt . subplot2grid ( ( 3 , 2 ) , ( 0 , 0 ) , colspan = 2 ) ax_target_conditional_dist = plt . subplot2grid ( ( 3 , 2 ) , ( 1 , 0 ) , colspan = 2 ) ax_botplot = plt . subplot2grid ( ( 3 , 2 ) , ( 2 , 0 ) ) ax_violin_plot = plt . subplot2grid ( ( 3 , 2 ) , ( 2 , 1 ) ) ax_overall_dist . set_title ( 'Distribution of {}' . format ( feature_name ) , fontsize = 16 ) sns . distplot ( df [ feature_name ] , bins = 50 , ax = ax_overall_dist ) sns . distplot ( df . loc [ ix_positive_target ] [ feature_name ] , bins = bins , ax = ax_target_conditional_dist , label = 'Positive Target' ) sns . distplot ( df . loc [ ix_negative_target ] [ feature_name ] , bins = bins , ax = ax_target_conditional_dist , label = 'Negative Target' ) ax_target_conditional_dist . legend ( loc = 'upper right' , prop = { 'size' : 14 } ) sns . boxplot ( y = feature_name , x = 'target' , data = df , ax = ax_botplot ) sns . violinplot ( y = feature_name , x = 'target' , data = df , ax = ax_violin_plot ) plt . show ( )
Plot the distribution of a real - valued feature conditioned by the target .
9,123
def plot_pair ( df , feature_name_1 , feature_name_2 , kind = 'scatter' , alpha = 0.01 , ** kwargs ) : plt . figure ( ) sns . jointplot ( feature_name_1 , feature_name_2 , df , alpha = alpha , kind = kind , ** kwargs ) plt . show ( )
Plot a scatterplot of two features against one another and calculate Pearson correlation coefficient .
9,124
def plot_feature_correlation_heatmap ( df , features , font_size = 9 , figsize = ( 15 , 15 ) , save_filename = None ) : features = features [ : ] features += [ 'target' ] mcorr = df [ features ] . corr ( ) mask = np . zeros_like ( mcorr , dtype = np . bool ) mask [ np . triu_indices_from ( mask ) ] = True cmap = sns . diverging_palette ( 220 , 10 , as_cmap = True ) fig = plt . figure ( figsize = figsize ) heatmap = sns . heatmap ( mcorr , mask = mask , cmap = cmap , square = True , annot = True , fmt = '0.2f' , annot_kws = { 'size' : font_size } , ) heatmap . tick_params ( axis = 'both' , which = 'major' , labelsize = font_size ) heatmap . tick_params ( axis = 'both' , which = 'minor' , labelsize = font_size ) heatmap . set_xticklabels ( features , rotation = 90 ) heatmap . set_yticklabels ( reversed ( features ) ) plt . show ( ) if save_filename is not None : fig . savefig ( save_filename , dpi = 300 )
Plot a correlation heatmap between every feature pair .
9,125
def scatterplot_matrix ( df , features , downsample_frac = None , figsize = ( 15 , 15 ) ) : if downsample_frac : df = df . sample ( frac = downsample_frac ) plt . figure ( figsize = figsize ) sns . pairplot ( df [ features ] , hue = 'target' ) plt . show ( )
Plot a scatterplot matrix for a list of features colored by target value .
9,126
def process_nested_tags ( self , node , tag = '' ) : if tag == '' : t = node . ltag else : t = tag . lower ( ) for child in node . children : self . xml_node_stack = [ child ] + self . xml_node_stack ctagl = child . ltag if ctagl in self . tag_parse_table and ctagl in self . valid_children [ t ] : self . tag_parse_table [ ctagl ] ( child ) else : self . parse_component_by_typename ( child , child . tag ) self . xml_node_stack = self . xml_node_stack [ 1 : ]
Process child tags .
9,127
def parse ( self , xmltext ) : xml = LEMSXMLNode ( xe . XML ( xmltext ) ) if xml . ltag != 'lems' and xml . ltag != 'neuroml' : raise ParseError ( '<Lems> expected as root element (or even <neuroml>), found: {0}' . format ( xml . ltag ) ) self . process_nested_tags ( xml )
Parse a string containing LEMS XML text .
9,128
def raise_error ( self , message , * params , ** key_params ) : s = 'Parser error in ' self . xml_node_stack . reverse ( ) if len ( self . xml_node_stack ) > 1 : node = self . xml_node_stack [ 0 ] s += '<{0}' . format ( node . tag ) if 'name' in node . lattrib : s += ' name=\"{0}\"' . format ( node . lattrib [ 'name' ] ) if 'id' in node . lattrib : s += ' id=\"{0}\"' . format ( node . lattrib [ 'id' ] ) s += '>' for node in self . xml_node_stack [ 1 : ] : s += '.<{0}' . format ( node . tag ) if 'name' in node . lattrib : s += ' name=\"{0}\"' . format ( node . lattrib [ 'name' ] ) if 'id' in node . lattrib : s += ' id=\"{0}\"' . format ( node . lattrib [ 'id' ] ) s += '>' s += ':\n ' + message raise ParseError ( s , * params , ** key_params ) self . xml_node_stack . reverse ( )
Raise a parse error .
9,129
def parse_component_by_typename ( self , node , type_ ) : if 'id' in node . lattrib : id_ = node . lattrib [ 'id' ] else : id_ = node . tag if 'type' in node . lattrib : type_ = node . lattrib [ 'type' ] else : type_ = node . tag component = Component ( id_ , type_ ) if self . current_component : component . set_parent_id ( self . current_component . id ) self . current_component . add_child ( component ) else : self . model . add_component ( component ) for key in node . attrib : if key . lower ( ) not in [ 'id' , 'type' ] : component . set_parameter ( key , node . attrib [ key ] ) old_component = self . current_component self . current_component = component self . process_nested_tags ( node , 'component' ) self . current_component = old_component
Parses components defined directly by component name .
9,130
def generate_tags_multiple_files ( input_files , tag , ignore_tags , ns = None ) : return itertools . chain . from_iterable ( [ generate_xmltags ( fn , tag , ignore_tags , ns ) for fn in input_files ] )
Calls xmltag generator for multiple files .
9,131
def generate_tags_multiple_files_strings ( input_files , ns , tag , ignore_tags ) : for el in generate_tags_multiple_files ( input_files , tag , ignore_tags , ns ) : yield formatting . string_and_clear ( el , ns )
Creates stringified xml output of elements with certain tag .
9,132
def generate_xmltags ( fn , returntag , ignore_tags , ns = None ) : xmlns = create_namespace ( ns ) ns_ignore = [ '{0}{1}' . format ( xmlns , x ) for x in ignore_tags ] for ac , el in etree . iterparse ( fn ) : if el . tag == '{0}{1}' . format ( xmlns , returntag ) : yield el elif el . tag in ns_ignore : formatting . clear_el ( el )
Base generator for percolator xml psm peptide protein output as well as for mzML mzIdentML . ignore_tags are the ones that are cleared when met by parser .
9,133
def add_component_type ( self , component_type ) : name = component_type . name if ':' in name : name = name . replace ( ':' , '_' ) component_type . name = name self . component_types [ name ] = component_type
Adds a component type to the model .
9,134
def add ( self , child ) : if isinstance ( child , Include ) : self . add_include ( child ) elif isinstance ( child , Dimension ) : self . add_dimension ( child ) elif isinstance ( child , Unit ) : self . add_unit ( child ) elif isinstance ( child , ComponentType ) : self . add_component_type ( child ) elif isinstance ( child , Component ) : self . add_component ( child ) elif isinstance ( child , FatComponent ) : self . add_fat_component ( child ) elif isinstance ( child , Constant ) : self . add_constant ( child ) else : raise ModelError ( 'Unsupported child element' )
Adds a typed child object to the model .
9,135
def include_file ( self , path , include_dirs = [ ] ) : if self . include_includes : if self . debug : print ( "------------------ Including a file: %s" % path ) inc_dirs = include_dirs if include_dirs else self . include_dirs parser = LEMSFileParser ( self , inc_dirs , self . include_includes ) if os . access ( path , os . F_OK ) : if not path in self . included_files : parser . parse ( open ( path ) . read ( ) ) self . included_files . append ( path ) return else : if self . debug : print ( "Already included: %s" % path ) return else : for inc_dir in inc_dirs : new_path = ( inc_dir + '/' + path ) if os . access ( new_path , os . F_OK ) : if not new_path in self . included_files : parser . parse ( open ( new_path ) . read ( ) ) self . included_files . append ( new_path ) return else : if self . debug : print ( "Already included: %s" % path ) return msg = 'Unable to open ' + path if self . fail_on_missing_includes : raise Exception ( msg ) elif self . debug : print ( msg )
Includes a file into the current model .
9,136
def import_from_file ( self , filepath ) : inc_dirs = self . include_directories [ : ] inc_dirs . append ( dirname ( filepath ) ) parser = LEMSFileParser ( self , inc_dirs , self . include_includes ) with open ( filepath ) as f : parser . parse ( f . read ( ) )
Import a model from a file .
9,137
def export_to_dom ( self ) : namespaces = 'xmlns="http://www.neuroml.org/lems/%s" ' + 'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:schemaLocation="http://www.neuroml.org/lems/%s %s"' namespaces = namespaces % ( self . target_lems_version , self . target_lems_version , self . schema_location ) xmlstr = '<Lems %s>' % namespaces for include in self . includes : xmlstr += include . toxml ( ) for target in self . targets : xmlstr += '<Target component="{0}"/>' . format ( target ) for dimension in self . dimensions : xmlstr += dimension . toxml ( ) for unit in self . units : xmlstr += unit . toxml ( ) for constant in self . constants : xmlstr += constant . toxml ( ) for component_type in self . component_types : xmlstr += component_type . toxml ( ) for component in self . components : xmlstr += component . toxml ( ) xmlstr += '</Lems>' xmldom = minidom . parseString ( xmlstr ) return xmldom
Exports this model to a DOM .
9,138
def export_to_file ( self , filepath , level_prefix = ' ' ) : xmldom = self . export_to_dom ( ) xmlstr = xmldom . toprettyxml ( level_prefix , '\n' , ) f = open ( filepath , 'w' ) f . write ( xmlstr ) f . close ( )
Exports this model to a file .
9,139
def resolve ( self ) : model = self . copy ( ) for ct in model . component_types : model . resolve_component_type ( ct ) for c in model . components : if c . id not in model . fat_components : model . add ( model . fatten_component ( c ) ) for c in ct . constants : c2 = c . copy ( ) c2 . numeric_value = model . get_numeric_value ( c2 . value , c2 . dimension ) model . add ( c2 ) return model
Resolves references in this model .
9,140
def resolve_component_type ( self , component_type ) : if component_type . extends : try : base_ct = self . component_types [ component_type . extends ] except : raise ModelError ( "Component type '{0}' trying to extend unknown component type '{1}'" , component_type . name , component_type . extends ) self . resolve_component_type ( base_ct ) self . merge_component_types ( component_type , base_ct ) component_type . types = set . union ( component_type . types , base_ct . types ) component_type . extends = None
Resolves references in the specified component type .
9,141
def merge_component_types ( self , ct , base_ct ) : for parameter in base_ct . parameters : if parameter . name in ct . parameters : p = ct . parameters [ parameter . name ] basep = base_ct . parameters [ parameter . name ] if p . fixed : p . value = p . fixed_value p . dimension = basep . dimension else : ct . parameters [ parameter . name ] = base_ct . parameters [ parameter . name ] merge_maps ( ct . properties , base_ct . properties ) merge_maps ( ct . derived_parameters , base_ct . derived_parameters ) merge_maps ( ct . index_parameters , base_ct . index_parameters ) merge_maps ( ct . constants , base_ct . constants ) merge_maps ( ct . exposures , base_ct . exposures ) merge_maps ( ct . requirements , base_ct . requirements ) merge_maps ( ct . component_requirements , base_ct . component_requirements ) merge_maps ( ct . instance_requirements , base_ct . instance_requirements ) merge_maps ( ct . children , base_ct . children ) merge_maps ( ct . texts , base_ct . texts ) merge_maps ( ct . links , base_ct . links ) merge_maps ( ct . paths , base_ct . paths ) merge_maps ( ct . event_ports , base_ct . event_ports ) merge_maps ( ct . component_references , base_ct . component_references ) merge_maps ( ct . attachments , base_ct . attachments ) merge_maps ( ct . dynamics . state_variables , base_ct . dynamics . state_variables ) merge_maps ( ct . dynamics . derived_variables , base_ct . dynamics . derived_variables ) merge_maps ( ct . dynamics . conditional_derived_variables , base_ct . dynamics . conditional_derived_variables ) merge_maps ( ct . dynamics . time_derivatives , base_ct . dynamics . time_derivatives ) merge_maps ( ct . dynamics . kinetic_schemes , base_ct . dynamics . kinetic_schemes ) merge_lists ( ct . structure . event_connections , base_ct . structure . event_connections ) merge_lists ( ct . structure . child_instances , base_ct . structure . child_instances ) merge_lists ( ct . structure . multi_instantiates , base_ct . structure . multi_instantiates ) merge_maps ( ct . simulation . runs , base_ct . simulation . runs ) merge_maps ( ct . simulation . records , base_ct . simulation . records ) merge_maps ( ct . simulation . event_records , base_ct . simulation . event_records ) merge_maps ( ct . simulation . data_displays , base_ct . simulation . data_displays ) merge_maps ( ct . simulation . data_writers , base_ct . simulation . data_writers ) merge_maps ( ct . simulation . event_writers , base_ct . simulation . event_writers )
Merge various maps in the given component type from a base component type .
9,142
def resolve_simulation ( self , fc , ct ) : for run in ct . simulation . runs : try : run2 = Run ( fc . component_references [ run . component ] . referenced_component , run . variable , fc . parameters [ run . increment ] . numeric_value , fc . parameters [ run . total ] . numeric_value ) except : raise ModelError ( "Unable to resolve simulation run parameters in component '{0}'" , fc . id ) fc . simulation . add ( run2 ) for record in ct . simulation . records : try : record2 = Record ( fc . paths [ record . quantity ] . value , fc . parameters [ record . scale ] . numeric_value if record . scale else 1 , fc . texts [ record . color ] . value if record . color else '#000000' ) except : raise ModelError ( "Unable to resolve simulation record parameters in component '{0}'" , fc . id ) fc . simulation . add ( record2 ) for event_record in ct . simulation . event_records : try : event_record2 = EventRecord ( fc . paths [ event_record . quantity ] . value , fc . texts [ event_record . eventPort ] . value ) except : raise ModelError ( "Unable to resolve simulation event_record parameters in component '{0}'" , fc . id ) fc . simulation . add ( event_record2 ) for dd in ct . simulation . data_displays : try : dd2 = DataDisplay ( fc . texts [ dd . title ] . value , '' ) if 'timeScale' in fc . parameters : dd2 . timeScale = fc . parameters [ 'timeScale' ] . numeric_value except : raise ModelError ( "Unable to resolve simulation display parameters in component '{0}'" , fc . id ) fc . simulation . add ( dd2 ) for dw in ct . simulation . data_writers : try : path = '.' if fc . texts [ dw . path ] and fc . texts [ dw . path ] . value : path = fc . texts [ dw . path ] . value dw2 = DataWriter ( path , fc . texts [ dw . file_name ] . value ) except : raise ModelError ( "Unable to resolve simulation writer parameters in component '{0}'" , fc . id ) fc . simulation . add ( dw2 ) for ew in ct . simulation . event_writers : try : path = '.' if fc . texts [ ew . path ] and fc . texts [ ew . path ] . value : path = fc . texts [ ew . path ] . value ew2 = EventWriter ( path , fc . texts [ ew . file_name ] . value , fc . texts [ ew . format ] . value ) except : raise ModelError ( "Unable to resolve simulation writer parameters in component '{0}'" , fc . id ) fc . simulation . add ( ew2 )
Resolve simulation specifications .
9,143
def get_numeric_value ( self , value_str , dimension = None ) : n = None i = len ( value_str ) while n is None : try : part = value_str [ 0 : i ] nn = float ( part ) n = nn s = value_str [ i : ] except ValueError : i = i - 1 number = n sym = s numeric_value = None if sym == '' : numeric_value = number else : if sym in self . units : unit = self . units [ sym ] if dimension : if dimension != unit . dimension and dimension != '*' : raise SimBuildError ( "Unit symbol '{0}' cannot " "be used for dimension '{1}'" , sym , dimension ) else : dimension = unit . dimension numeric_value = ( number * ( 10 ** unit . power ) * unit . scale ) + unit . offset else : raise SimBuildError ( "Unknown unit symbol '{0}'. Known: {1}" , sym , self . units ) return numeric_value
Get the numeric value for a parameter value specification .
9,144
def start_msstitch ( exec_drivers , sysargs ) : parser = populate_parser ( exec_drivers ) args = parser . parse_args ( sysargs [ 1 : ] ) args . func ( ** vars ( args ) )
Passed all drivers of executable checks which command is passed to the executable and then gets the options for a driver parses them from command line and runs the driver
9,145
def merged ( * dicts , ** kwargs ) : if not dicts : return Struct ( ) result = dict ( ) for d in dicts : result . update ( d ) result . update ( kwargs ) struct_type = type ( dicts [ 0 ] ) return struct_type ( ** result )
Merge dictionaries . Later keys overwrite .
9,146
def order_derived_parameters ( component ) : if len ( component . derived_parameters ) == 0 : return [ ] ordering = [ ] dps = [ ] for dp in component . derived_parameters : dps . append ( dp . name ) maxcount = 5 count = maxcount while count > 0 and dps != [ ] : count = count - 1 for dp1 in dps : value = component . derived_parameters [ dp1 ] . value found = False for dp2 in dps : if dp1 != dp2 and dp2 in value : found = True if not found : ordering . append ( dp1 ) del dps [ dps . index ( dp1 ) ] count = maxcount break if count == 0 : raise SimBuildError ( ( "Unable to find ordering for derived " "parameter in component '{0}'" ) . format ( component ) ) return ordering
Finds ordering of derived_parameters .
9,147
def order_derived_variables ( regime ) : ordering = [ ] dvs = [ ] dvsnoexp = [ ] maxcount = 5 for dv in regime . derived_variables : if dv . expression_tree == None : dvsnoexp . append ( dv . name ) else : dvs . append ( dv . name ) for dv in regime . conditional_derived_variables : if len ( dv . cases ) == 0 : dvsnoexp . append ( dv . name ) else : dvs . append ( dv . name ) count = maxcount while count > 0 and dvs != [ ] : count = count - 1 for dv1 in dvs : if dv1 in regime . derived_variables : dv = regime . derived_variables [ dv1 ] else : dv = regime . conditional_derived_variables [ dv1 ] found = False if isinstance ( dv , DerivedVariable ) : exp_tree = dv . expression_tree for dv2 in dvs : if dv1 != dv2 and is_var_in_exp_tree ( dv2 , exp_tree ) : found = True else : for case in dv . cases : for dv2 in dvs : if dv1 != dv2 and ( is_var_in_exp_tree ( dv2 , case . condition_expression_tree ) or is_var_in_exp_tree ( dv2 , case . value_expression_tree ) ) : found = True if not found : ordering . append ( dv1 ) del dvs [ dvs . index ( dv1 ) ] count = maxcount break if count == 0 : raise SimBuildError ( ( "Unable to find ordering for derived " "variables in regime '{0}'" ) . format ( regime . name ) ) return dvsnoexp + ordering
Finds ordering of derived_variables .
9,148
def build ( self ) : self . sim = Simulation ( ) for component_id in self . model . targets : if component_id not in self . model . components : raise SimBuildError ( "Unable to find target component '{0}'" , component_id ) component = self . model . fat_components [ component_id ] runnable = self . build_runnable ( component ) self . sim . add_runnable ( runnable ) return self . sim
Build the simulation components from the model .
9,149
def build_event_connections ( self , component , runnable , structure ) : if self . debug : print ( "\n++++++++ Calling build_event_connections of %s with runnable %s, parent %s" % ( component . id , runnable . id , runnable . parent ) ) for ec in structure . event_connections : if self . debug : print ( ec . toxml ( ) ) source = runnable . parent . resolve_path ( ec . from_ ) target = runnable . parent . resolve_path ( ec . to ) if ec . receiver : receiver_template = self . build_runnable ( ec . receiver , target ) receiver = receiver_template . copy ( ) receiver . id = "{0}__{1}__" . format ( component . id , receiver_template . id ) if ec . receiver_container : target . add_attachment ( receiver , ec . receiver_container ) target . add_child ( receiver_template . id , receiver ) target = receiver else : source = runnable . resolve_path ( ec . from_ ) target = runnable . resolve_path ( ec . to ) source_port = ec . source_port target_port = ec . target_port if not source_port : if len ( source . event_out_ports ) == 1 : source_port = source . event_out_ports [ 0 ] else : raise SimBuildError ( ( "No source event port " "uniquely identifiable" " in '{0}'" ) . format ( source . id ) ) if not target_port : if len ( target . event_in_ports ) == 1 : target_port = target . event_in_ports [ 0 ] else : raise SimBuildError ( ( "No destination event port " "uniquely identifiable " "in '{0}'" ) . format ( target ) ) if self . debug : print ( "register_event_out_callback\n Source: %s, %s (port: %s) \n -> %s, %s (port: %s)" % ( source , id ( source ) , source_port , target , id ( target ) , target_port ) ) source . register_event_out_callback ( source_port , lambda : target . inc_event_in ( target_port ) )
Adds event connections to a runnable component based on the structure specifications in the component model .
9,150
def build_structure ( self , component , runnable , structure ) : if self . debug : print ( "\n++++++++ Calling build_structure of %s with runnable %s, parent %s" % ( component . id , runnable . id , runnable . parent ) ) for ch in structure . child_instances : child_runnable = self . build_runnable ( ch . referenced_component , runnable ) runnable . add_child ( child_runnable . id , child_runnable ) runnable . add_child_typeref ( ch . component , child_runnable ) for mi in structure . multi_instantiates : template = self . build_runnable ( mi . component , runnable ) for i in range ( mi . number ) : instance = template . copy ( ) instance . id = "{0}__{1}__{2}" . format ( component . id , template . id , i ) runnable . array . append ( instance ) for fe in structure . for_eachs : self . build_foreach ( component , runnable , fe ) self . build_event_connections ( component , runnable , structure )
Adds structure to a runnable component based on the structure specifications in the component model .
9,151
def build_foreach ( self , component , runnable , foreach , name_mappings = { } ) : if self . debug : print ( "\n++++++++ Calling build_foreach of %s with runnable %s, parent %s, name_mappings: %s" % ( component . id , runnable . id , runnable . parent , name_mappings ) ) target_array = runnable . resolve_path ( foreach . instances ) for target_runnable in target_array : if self . debug : print ( "Applying contents of for_each to %s, as %s" % ( target_runnable . id , foreach . as_ ) ) name_mappings [ foreach . as_ ] = target_runnable for fe2 in foreach . for_eachs : target_array2 = runnable . resolve_path ( fe2 . instances ) for target_runnable2 in target_array2 : name_mappings [ fe2 . as_ ] = target_runnable2 self . build_foreach ( component , runnable , fe2 , name_mappings ) for ec in foreach . event_connections : source = name_mappings [ ec . from_ ] target = name_mappings [ ec . to ] source_port = ec . source_port target_port = ec . target_port if not source_port : if len ( source . event_out_ports ) == 1 : source_port = source . event_out_ports [ 0 ] else : raise SimBuildError ( ( "No source event port " "uniquely identifiable" " in '{0}'" ) . format ( source . id ) ) if not target_port : if len ( target . event_in_ports ) == 1 : target_port = target . event_in_ports [ 0 ] else : raise SimBuildError ( ( "No destination event port " "uniquely identifiable " "in '{0}'" ) . format ( target ) ) if self . debug : print ( "register_event_out_callback\n Source: %s, %s (port: %s) \n -> %s, %s (port: %s)" % ( source , id ( source ) , source_port , target , id ( target ) , target_port ) ) source . register_event_out_callback ( source_port , lambda : target . inc_event_in ( target_port ) )
Iterate over ForEach constructs and process nested elements .
9,152
def process_simulation_specs ( self , component , runnable , simulation ) : for run in simulation . runs : cid = run . component . id + '_' + component . id target = self . build_runnable ( run . component , runnable , cid ) self . sim . add_runnable ( target ) self . current_record_target = target target . configure_time ( run . increment , run . total )
Process simulation - related aspects to a runnable component based on the dynamics specifications in the component model .
9,153
def build_expression_from_tree ( self , runnable , regime , tree_node ) : component_type = self . model . component_types [ runnable . component . type ] dynamics = component_type . dynamics if tree_node . type == ExprNode . VALUE : if tree_node . value [ 0 ] . isalpha ( ) : if tree_node . value == 't' : return 'self.time_completed' elif tree_node . value in component_type . requirements : var_prefix = 'self' v = tree_node . value r = runnable while ( v not in r . instance_variables and v not in r . derived_variables ) : var_prefix = '{0}.{1}' . format ( var_prefix , 'parent' ) r = r . parent if r == None : raise SimBuildError ( "Unable to resolve required " "variable '{0}'" . format ( v ) ) return '{0}.{1}' . format ( var_prefix , v ) elif ( tree_node . value in dynamics . derived_variables or ( regime is not None and tree_node . value in regime . derived_variables ) ) : return 'self.{0}' . format ( tree_node . value ) else : return 'self.{0}_shadow' . format ( tree_node . value ) else : return tree_node . value elif tree_node . type == ExprNode . FUNC1 : pattern = '({0}({1}))' func = self . convert_func ( tree_node . func ) if 'random.uniform' in func : pattern = '({0}(0,{1}))' return pattern . format ( func , self . build_expression_from_tree ( runnable , regime , tree_node . param ) ) else : return '({0}) {1} ({2})' . format ( self . build_expression_from_tree ( runnable , regime , tree_node . left ) , self . convert_op ( tree_node . op ) , self . build_expression_from_tree ( runnable , regime , tree_node . right ) )
Recursively builds a Python expression from a parsed expression tree .
9,154
def build_event_handler ( self , runnable , regime , event_handler ) : if isinstance ( event_handler , OnCondition ) : return self . build_on_condition ( runnable , regime , event_handler ) elif isinstance ( event_handler , OnEvent ) : return self . build_on_event ( runnable , regime , event_handler ) elif isinstance ( event_handler , OnStart ) : return self . build_on_start ( runnable , regime , event_handler ) elif isinstance ( event_handler , OnEntry ) : return self . build_on_entry ( runnable , regime , event_handler ) else : return [ ]
Build event handler code .
9,155
def build_on_condition ( self , runnable , regime , on_condition ) : on_condition_code = [ ] on_condition_code += [ 'if {0}:' . format ( self . build_expression_from_tree ( runnable , regime , on_condition . expression_tree ) ) ] for action in on_condition . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_condition_code += [ ' ' + line ] return on_condition_code
Build OnCondition event handler code .
9,156
def build_on_event ( self , runnable , regime , on_event ) : on_event_code = [ ] if self . debug : on_event_code += [ 'print("Maybe handling something for %s ("+str(id(self))+")")' % ( runnable . id ) , 'print("EICs ("+str(id(self))+"): "+str(self.event_in_counters))' ] on_event_code += [ 'count = self.event_in_counters[\'{0}\']' . format ( on_event . port ) , 'while count > 0:' , ' print(" Handling event")' if self . debug else '' , ' count -= 1' ] for action in on_event . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_event_code += [ ' ' + line ] on_event_code += [ 'self.event_in_counters[\'{0}\'] = 0' . format ( on_event . port ) , ] return on_event_code
Build OnEvent event handler code .
9,157
def build_on_start ( self , runnable , regime , on_start ) : on_start_code = [ ] for action in on_start . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_start_code += [ line ] return on_start_code
Build OnStart start handler code .
9,158
def build_on_entry ( self , runnable , regime , on_entry ) : on_entry_code = [ ] on_entry_code += [ 'if self.current_regime != self.last_regime:' ] on_entry_code += [ ' self.last_regime = self.current_regime' ] for action in on_entry . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_entry_code += [ ' ' + line ] return on_entry_code
Build OnEntry start handler code .
9,159
def build_action ( self , runnable , regime , action ) : if isinstance ( action , StateAssignment ) : return self . build_state_assignment ( runnable , regime , action ) if isinstance ( action , EventOut ) : return self . build_event_out ( action ) if isinstance ( action , Transition ) : return self . build_transition ( action ) else : return [ 'pass' ]
Build event handler action code .
9,160
def build_state_assignment ( self , runnable , regime , state_assignment ) : return [ 'self.{0} = {1}' . format ( state_assignment . variable , self . build_expression_from_tree ( runnable , regime , state_assignment . expression_tree ) ) ]
Build state assignment code .
9,161
def build_event_out ( self , event_out ) : event_out_code = [ 'if "{0}" in self.event_out_callbacks:' . format ( event_out . port ) , ' for c in self.event_out_callbacks[\'{0}\']:' . format ( event_out . port ) , ' c()' ] return event_out_code
Build event out code .
9,162
def build_reduce_code ( self , result , select , reduce ) : select = select . replace ( '/' , '.' ) select = select . replace ( ' ' , '' ) if reduce == 'add' : reduce_op = '+' acc_start = 0 else : reduce_op = '*' acc_start = 1 bits = re . split ( '\[.*\]' , select ) seps = re . findall ( '\[.*\]' , select ) code = [ 'self.{0} = {1}' . format ( result , acc_start ) ] code += [ 'self.{0}_shadow = {1}' . format ( result , acc_start ) ] code += [ 'try:' ] if len ( bits ) == 1 : target = select code += [ ' self.{0} = self.{1}' . format ( result , target ) ] code += [ ' self.{0}_shadow = self.{1}' . format ( result , target ) ] elif len ( bits ) == 2 : sep = seps [ 0 ] [ 1 : - 1 ] if sep == '*' : array = bits [ 0 ] ref = bits [ 1 ] code += [ ' acc = {0}' . format ( acc_start ) ] code += [ ' for o in self.{0}:' . format ( array ) ] code += [ ' acc = acc {0} o{1}' . format ( reduce_op , ref ) ] code += [ ' self.{0} = acc' . format ( result ) ] code += [ ' self.{0}_shadow = acc' . format ( result ) ] else : bits2 = sep . split ( '=' ) if len ( bits2 ) > 1 : array = bits [ 0 ] ref = bits [ 1 ] code += [ ' acc = {0}' . format ( acc_start ) ] code += [ ' for o in self.{0}:' . format ( array ) ] code += [ ' if o.{0} == {1}:' . format ( bits2 [ 0 ] , bits2 [ 1 ] ) ] code += [ ' acc = acc {0} o{1}' . format ( reduce_op , ref ) ] code += [ ' self.{0} = acc' . format ( result ) ] code += [ ' self.{0}_shadow = acc' . format ( result ) ] else : raise SimbuildError ( "Invalid reduce target - '{0}'" . format ( select ) ) else : raise SimbuildError ( "Invalid reduce target - '{0}'" . format ( select ) ) code += [ 'except:' ] code += [ ' pass' ] return code
Builds a reduce operation on the selected target range .
9,163
def add_recording_behavior ( self , component , runnable ) : simulation = component . simulation for rec in simulation . records : rec . id = runnable . id self . current_record_target . add_variable_recorder ( self . current_data_output , rec )
Adds recording - related dynamics to a runnable component based on the dynamics specifications in the component model .
9,164
def check_static_member_vars ( class_ , fpath = None , only_init = True ) : import utool as ut if isinstance ( class_ , six . string_types ) : classname = class_ if fpath is None : raise Exception ( 'must specify fpath' ) else : if not isinstance ( class_ , type ) : class_instance = class_ class_ = class_instance . __class__ classname = class_ . __name__ if fpath is None : module = ut . get_module_from_class ( class_ ) fpath = ut . get_modpath ( module ) sourcecode = ut . readfrom ( fpath ) import redbaron baron = redbaron . RedBaron ( sourcecode ) for node in baron : if node . type == 'class' and node . name == classname : classnode = node break def find_parent_method ( node ) : par = node . parent_find ( 'def' ) if par is not None and par . parent is not None : if par . parent . type == 'class' : return par else : return find_parent_method ( par ) class_methods = [ ] for node in classnode : if node . type == 'def' : if only_init : if node . name == '__init__' : class_methods . append ( node ) else : class_methods . append ( node ) class_vars = [ ] self_vars = [ ] for method_node in class_methods : self_var = method_node . arguments [ 0 ] . dumps ( ) self_vars . append ( self_var ) for assign in method_node . find_all ( 'assignment' ) : if assign . target . dumps ( ) . startswith ( self_var + '.' ) : class_vars . append ( assign . target . value [ 1 ] . dumps ( ) ) static_attrs = ut . unique ( class_vars ) return static_attrs if False : self_var = self_vars [ 0 ] complex_cases = [ ] simple_cases = [ ] all_self_ref = classnode . find_all ( 'name_' , value = re . compile ( '.*' + self_var + '\\.*' ) ) for x in all_self_ref : if x . parent . type == 'def_argument' : continue if x . parent . type == 'atomtrailers' : atom = x . parent if ut . depth ( atom . fst ( ) ) <= 3 : simple_cases . append ( atom ) else : complex_cases . append ( atom ) accessed_attrs = [ ] for x in simple_cases : if x . value [ 0 ] . dumps ( ) == self_var : attr = x . value [ 1 ] . dumps ( ) accessed_attrs . append ( attr ) accessed_attrs = ut . unique ( accessed_attrs ) ut . setdiff ( accessed_attrs , class_vars )
class_ can either be live object or a classname
9,165
def get_funcnames_from_modpath ( modpath , include_methods = True ) : import utool as ut if True : import jedi source = ut . read_from ( modpath ) definition_list = jedi . names ( source ) funcname_list = [ definition . name for definition in definition_list if definition . type == 'function' ] if include_methods : classdef_list = [ definition for definition in definition_list if definition . type == 'class' ] defined_methods = ut . flatten ( [ definition . defined_names ( ) for definition in classdef_list ] ) funcname_list += [ method . name for method in defined_methods if method . type == 'function' and not method . name . startswith ( '_' ) ] else : import redbaron sourcecode = ut . read_from ( modpath ) baron = redbaron . RedBaron ( sourcecode ) funcname_list = [ node . name for node in baron . find_all ( 'def' , recursive = include_methods ) if not node . name . startswith ( '_' ) ] return funcname_list
Get all functions defined in module
9,166
def help_members ( obj , use_other = False ) : r import utool as ut attrnames = dir ( obj ) attr_list = [ getattr ( obj , attrname ) for attrname in attrnames ] attr_types = ut . lmap ( ut . type_str , map ( type , attr_list ) ) unique_types , groupxs = ut . group_indices ( attr_types ) type_to_items = ut . dzip ( unique_types , ut . apply_grouping ( attr_list , groupxs ) ) type_to_itemname = ut . dzip ( unique_types , ut . apply_grouping ( attrnames , groupxs ) ) memtypes = [ 'instancemethod' ] func_mems = ut . dict_subset ( type_to_items , memtypes , [ ] ) func_list = ut . flatten ( func_mems . values ( ) ) defsig_list = [ ] num_unbound_args_list = [ ] num_args_list = [ ] for func in func_list : argspec = ut . get_func_argspec ( func ) args = argspec . args unbound_args = get_unbound_args ( argspec ) defsig = ut . func_defsig ( func ) defsig_list . append ( defsig ) num_unbound_args_list . append ( len ( unbound_args ) ) num_args_list . append ( len ( args ) ) group = ut . hierarchical_group_items ( defsig_list , [ num_unbound_args_list , num_args_list ] ) print ( repr ( obj ) ) print ( ut . repr3 ( group , strvals = True ) ) if use_other : other_mems = ut . delete_keys ( type_to_items . copy ( ) , memtypes ) other_mems_attrnames = ut . dict_subset ( type_to_itemname , other_mems . keys ( ) ) named_other_attrs = ut . dict_union_combine ( other_mems_attrnames , other_mems , lambda x , y : list ( zip ( x , y ) ) ) print ( ut . repr4 ( named_other_attrs , nl = 2 , strvals = True ) )
r Inspects members of a class
9,167
def is_defined_by_module ( item , module , parent = None ) : flag = False if isinstance ( item , types . ModuleType ) : if not hasattr ( item , '__file__' ) : try : import utool as ut name = ut . get_modname_from_modpath ( module . __file__ ) flag = name in str ( item ) except : flag = False else : item_modpath = os . path . realpath ( dirname ( item . __file__ ) ) mod_fpath = module . __file__ . replace ( '.pyc' , '.py' ) if not mod_fpath . endswith ( '__init__.py' ) : flag = False else : modpath = os . path . realpath ( dirname ( mod_fpath ) ) modpath = modpath . replace ( '.pyc' , '.py' ) flag = item_modpath . startswith ( modpath ) elif hasattr ( item , '_utinfo' ) : orig_func = item . _utinfo [ 'orig_func' ] flag = is_defined_by_module ( orig_func , module , parent ) else : if isinstance ( item , staticmethod ) : item = item . __func__ try : func_globals = meta_util_six . get_funcglobals ( item ) func_module_name = func_globals [ '__name__' ] if func_module_name == 'line_profiler' : valid_names = dir ( module ) if parent is not None : valid_names += dir ( parent ) if item . func_name in valid_names : if len ( item . func_name ) > 6 : flag = True elif func_module_name == module . __name__ : flag = True except AttributeError : if hasattr ( item , '__module__' ) : flag = item . __module__ == module . __name__ return flag
Check if item is directly defined by a module . This check may be prone to errors .
9,168
def is_bateries_included ( item ) : flag = False if hasattr ( item , '__call__' ) and hasattr ( item , '__module__' ) : if item . __module__ is not None : module = sys . modules [ item . __module__ ] if module == builtins : flag = True elif hasattr ( module , '__file__' ) : flag = LIB_PATH == dirname ( module . __file__ ) return flag
Returns if a value is a python builtin function
9,169
def dummy_func ( arg1 , arg2 , arg3 = None , arg4 = [ 1 , 2 , 3 ] , arg5 = { } , ** kwargs ) : foo = kwargs . get ( 'foo' , None ) bar = kwargs . pop ( 'bar' , 4 ) foo2 = kwargs [ 'foo2' ] foobar = str ( foo ) + str ( bar ) + str ( foo2 ) return foobar
test func for kwargs parseing
9,170
def get_docstr ( func_or_class ) : import utool as ut try : docstr_ = func_or_class . func_doc except AttributeError : docstr_ = func_or_class . __doc__ if docstr_ is None : docstr_ = '' docstr = ut . unindent ( docstr_ ) return docstr
Get the docstring from a live object
9,171
def find_funcs_called_with_kwargs ( sourcecode , target_kwargs_name = 'kwargs' ) : r import ast sourcecode = 'from __future__ import print_function\n' + sourcecode pt = ast . parse ( sourcecode ) child_funcnamess = [ ] debug = False or VERYVERB_INSPECT if debug : print ( '\nInput:' ) print ( 'target_kwargs_name = %r' % ( target_kwargs_name , ) ) print ( '\nSource:' ) print ( sourcecode ) import astor print ( '\nParse:' ) print ( astor . dump ( pt ) ) class KwargParseVisitor ( ast . NodeVisitor ) : def visit_FunctionDef ( self , node ) : if debug : print ( '\nVISIT FunctionDef node = %r' % ( node , ) ) print ( 'node.args.kwarg = %r' % ( node . args . kwarg , ) ) if six . PY2 : kwarg_name = node . args . kwarg else : if node . args . kwarg is None : kwarg_name = None else : kwarg_name = node . args . kwarg . arg if kwarg_name != target_kwargs_name : ast . NodeVisitor . generic_visit ( self , node ) def visit_Call ( self , node ) : if debug : print ( '\nVISIT Call node = %r' % ( node , ) ) if isinstance ( node . func , ast . Attribute ) : try : funcname = node . func . value . id + '.' + node . func . attr except AttributeError : funcname = None elif isinstance ( node . func , ast . Name ) : funcname = node . func . id else : raise NotImplementedError ( 'do not know how to parse: node.func = %r' % ( node . func , ) ) if six . PY2 : kwargs = node . kwargs kwargs_name = None if kwargs is None else kwargs . id if funcname is not None and kwargs_name == target_kwargs_name : child_funcnamess . append ( funcname ) if debug : print ( 'funcname = %r' % ( funcname , ) ) print ( 'kwargs_name = %r' % ( kwargs_name , ) ) else : if node . keywords : for kwargs in node . keywords : if kwargs . arg is None : if hasattr ( kwargs . value , 'id' ) : kwargs_name = kwargs . value . id if funcname is not None and kwargs_name == target_kwargs_name : child_funcnamess . append ( funcname ) if debug : print ( 'funcname = %r' % ( funcname , ) ) print ( 'kwargs_name = %r' % ( kwargs_name , ) ) ast . NodeVisitor . generic_visit ( self , node ) try : KwargParseVisitor ( ) . visit ( pt ) except Exception : raise pass return child_funcnamess
r Finds functions that are called with the keyword kwargs variable
9,172
def get_func_argspec ( func ) : if hasattr ( func , '_utinfo' ) : argspec = func . _utinfo [ 'orig_argspec' ] return argspec if isinstance ( func , property ) : func = func . fget try : argspec = inspect . getargspec ( func ) except Exception : argspec = inspect . getfullargspec ( func ) return argspec
wrapper around inspect . getargspec but takes into account utool decorators
9,173
def parse_func_kwarg_keys ( func , with_vals = False ) : sourcecode = get_func_sourcecode ( func , strip_docstr = True , strip_comments = True ) kwkeys = parse_kwarg_keys ( sourcecode , with_vals = with_vals ) return kwkeys
hacky inference of kwargs keys
9,174
def get_func_kwargs ( func , recursive = True ) : import utool as ut argspec = ut . get_func_argspec ( func ) if argspec . defaults is None : header_kw = { } else : header_kw = dict ( zip ( argspec . args [ : : - 1 ] , argspec . defaults [ : : - 1 ] ) ) if argspec . keywords is not None : header_kw . update ( dict ( ut . recursive_parse_kwargs ( func ) ) ) return header_kw
func = ibeis . run_experiment
9,175
def argparse_funckw ( func , defaults = { } , ** kwargs ) : import utool as ut funckw_ = ut . get_funckw ( func , recursive = True ) funckw_ . update ( defaults ) funckw = ut . argparse_dict ( funckw_ , ** kwargs ) return funckw
allows kwargs to be specified on the commandline from testfuncs
9,176
def _qt_set_leaf_data ( self , qvar ) : if VERBOSE_PREF : print ( '' ) print ( '+--- [pref.qt_set_leaf_data]' ) print ( '[pref.qt_set_leaf_data] qvar = %r' % qvar ) print ( '[pref.qt_set_leaf_data] _intern.name=%r' % self . _intern . name ) print ( '[pref.qt_set_leaf_data] _intern.type_=%r' % self . _intern . get_type ( ) ) print ( '[pref.qt_set_leaf_data] type(_intern.value)=%r' % type ( self . _intern . value ) ) print ( '[pref.qt_set_leaf_data] _intern.value=%r' % self . _intern . value ) if self . _tree . parent is None : raise Exception ( '[Pref.qtleaf] Cannot set root preference' ) if self . qt_is_editable ( ) : new_val = '[Pref.qtleaf] BadThingsHappenedInPref' if self . _intern . value == PrefNode : raise Exception ( '[Pref.qtleaf] Qt can only change leafs' ) elif self . _intern . value is None : def cast_order ( var , order = [ bool , int , float , six . text_type ] ) : for type_ in order : try : ret = type_ ( var ) return ret except Exception : continue new_val = cast_order ( six . text_type ( qvar ) ) self . _intern . get_type ( ) if isinstance ( self . _intern . value , bool ) : print ( 'qvar = %r' % ( qvar , ) ) new_val = util_type . smart_cast ( qvar , bool ) print ( 'new_val = %r' % ( new_val , ) ) elif isinstance ( self . _intern . value , int ) : new_val = int ( qvar ) elif self . _intern . get_type ( ) in util_type . VALID_FLOAT_TYPES : new_val = float ( qvar ) elif isinstance ( self . _intern . value , six . string_types ) : new_val = six . text_type ( qvar ) elif isinstance ( self . _intern . value , PrefChoice ) : new_val = six . text_type ( qvar ) if new_val . upper ( ) == 'NONE' : new_val = None else : try : type_ = self . _intern . get_type ( ) if type_ is not None : new_val = type_ ( six . text_type ( qvar ) ) else : new_val = six . text_type ( qvar ) except Exception : raise NotImplementedError ( ( '[Pref.qtleaf] Unknown internal type. ' 'type(_intern.value) = %r, ' '_intern.get_type() = %r, ' ) % type ( self . _intern . value ) , self . _intern . get_type ( ) ) if isinstance ( new_val , six . string_types ) : if new_val . lower ( ) == 'none' : new_val = None elif new_val . lower ( ) == 'true' : new_val = True elif new_val . lower ( ) == 'false' : new_val = False if VERBOSE_PREF : print ( '---' ) print ( '[pref.qt_set_leaf_data] new_val=%r' % new_val ) print ( '[pref.qt_set_leaf_data] type(new_val)=%r' % type ( new_val ) ) print ( 'L _ [pref.qt_set_leaf_data]' ) return self . _tree . parent . pref_update ( self . _intern . name , new_val ) return 'PrefNotEditable'
Sets backend data using QVariants
9,177
def toggle ( self , key ) : val = self [ key ] assert isinstance ( val , bool ) , 'key[%r] = %r is not a bool' % ( key , val ) self . pref_update ( key , not val )
Toggles a boolean key
9,178
def change_combo_val ( self , new_val ) : choice_obj = self . _intern . value assert isinstance ( self . _intern . value , PrefChoice ) , 'must be a choice' return choice_obj . get_tuple ( )
Checks to see if a selection is a valid index or choice of a combo preference
9,179
def iteritems ( self ) : for ( key , val ) in six . iteritems ( self . __dict__ ) : if key in self . _printable_exclude : continue yield ( key , val )
Wow this class is messed up . I had to overwrite items when moving to python3 just because I haden t called it yet
9,180
def to_dict ( self , split_structs_bit = False ) : pref_dict = { } struct_dict = { } for ( key , val ) in six . iteritems ( self ) : if split_structs_bit and isinstance ( val , Pref ) : struct_dict [ key ] = val continue pref_dict [ key ] = val if split_structs_bit : return ( pref_dict , struct_dict ) return pref_dict
Converts prefeters to a dictionary . Children Pref can be optionally separated
9,181
def save ( self ) : fpath = self . get_fpath ( ) if fpath in [ '' , None ] : if self . _tree . parent is not None : if VERBOSE_PREF : print ( '[pref.save] Can my parent save me?' ) return self . _tree . parent . save ( ) if VERBOSE_PREF : print ( '[pref.save] I cannot be saved. I have no parents.' ) return False with open ( fpath , 'wb' ) as f : print ( '[pref] Saving to ' + fpath ) pref_dict = self . to_dict ( ) pickle . dump ( pref_dict , f , protocol = 2 ) return True
Saves prefs to disk in dict format
9,182
def load ( self ) : if VERBOSE_PREF : print ( '[pref.load()]' ) fpath = self . get_fpath ( ) try : with open ( fpath , 'rb' ) as f : if VERBOSE_PREF : print ( 'load: %r' % fpath ) pref_dict = pickle . load ( f ) except EOFError as ex1 : util_dbg . printex ( ex1 , 'did not load pref fpath=%r correctly' % fpath , iswarning = True ) raise except ImportError as ex2 : util_dbg . printex ( ex2 , 'did not load pref fpath=%r correctly' % fpath , iswarning = True ) raise if not util_type . is_dict ( pref_dict ) : raise Exception ( 'Preference file is corrupted' ) self . add_dict ( pref_dict ) return True
Read pref dict stored on disk . Overwriting current values .
9,183
def full_name ( self ) : if self . _tree . parent is None : return self . _intern . name return self . _tree . parent . full_name ( ) + '.' + self . _intern . name
returns name all the way up the tree
9,184
def pref_update ( self , key , new_val ) : print ( 'Update and save pref from: %s=%r, to: %s=%r' % ( key , six . text_type ( self [ key ] ) , key , six . text_type ( new_val ) ) ) self . __setattr__ ( key , new_val ) return self . save ( )
Changes a preference value and saves it to disk
9,185
def __get_permissions ( self , res , ** kwargs ) : response = res . _ ( ** kwargs ) return response . get ( 'permissions' , None )
This call returns current login user s permissions .
9,186
def inject_all_external_modules ( self , classname = None , allow_override = 'override+warn' , strict = True ) : if classname is None : classname = self . __class__ . __name__ NEW = True if NEW : classkey_list = [ key for key in __CLASSTYPE_ATTRIBUTES__ if key [ 0 ] == classname ] else : injected_modules = get_injected_modules ( classname ) classkey_list = [ module . CLASS_INJECT_KEY for module in injected_modules ] for classkey in classkey_list : inject_instance ( self , classkey = classkey , allow_override = allow_override , strict = False ) for classkey in classkey_list : postinject_instance ( self , classkey = classkey )
dynamically injects registered module methods into a class instance
9,187
def decorate_class_method ( func , classkey = None , skipmain = False ) : global __CLASSTYPE_ATTRIBUTES__ assert classkey is not None , 'must specify classkey' __CLASSTYPE_ATTRIBUTES__ [ classkey ] . append ( func ) return func
Will inject all decorated function as methods of classkey
9,188
def decorate_postinject ( func , classkey = None , skipmain = False ) : global __CLASSTYPE_POSTINJECT_FUNCS__ assert classkey is not None , 'must specify classkey' __CLASSTYPE_POSTINJECT_FUNCS__ [ classkey ] . append ( func ) return func
Will perform func with argument self after inject_instance is called on classkey
9,189
def inject_func_as_method ( self , func , method_name = None , class_ = None , allow_override = False , allow_main = False , verbose = True , override = None , force = False ) : if override is not None : allow_override = override if method_name is None : method_name = get_funcname ( func ) if force : allow_override = True allow_main = True old_method = getattr ( self , method_name , None ) new_method = func . __get__ ( self , self . __class__ ) if old_method is not None : old_im_func = get_method_func ( old_method ) new_im_func = get_method_func ( new_method ) if not allow_main and old_im_func is not None and ( get_funcglobals ( old_im_func ) [ '__name__' ] != '__main__' and get_funcglobals ( new_im_func ) [ '__name__' ] == '__main__' ) : if True or VERBOSE_CLASS : print ( '[util_class] skipping re-inject of %r from __main__' % method_name ) return if old_method is new_method or old_im_func is new_im_func : return elif allow_override is False : raise AssertionError ( 'Overrides are not allowed. Already have method_name=%r' % ( method_name ) ) elif allow_override == 'warn' : print ( 'WARNING: Overrides are not allowed. Already have method_name=%r. Skipping' % ( method_name ) ) return elif allow_override == 'override+warn' : print ( 'WARNING: Overrides are allowed, but dangerous. method_name=%r.' % ( method_name ) ) print ( 'old_method = %r, im_func=%s' % ( old_method , str ( old_im_func ) ) ) print ( 'new_method = %r, im_func=%s' % ( new_method , str ( new_im_func ) ) ) print ( get_funcglobals ( old_im_func ) [ '__name__' ] ) print ( get_funcglobals ( new_im_func ) [ '__name__' ] ) del old_method setattr ( self , method_name , new_method )
Injects a function into an object as a method
9,190
def inject_func_as_unbound_method ( class_ , func , method_name = None ) : if method_name is None : method_name = get_funcname ( func ) setattr ( class_ , method_name , func )
This is actually quite simple
9,191
def reloading_meta_metaclass_factory ( BASE_TYPE = type ) : class ReloadingMetaclass2 ( BASE_TYPE ) : def __init__ ( metaself , name , bases , dct ) : super ( ReloadingMetaclass2 , metaself ) . __init__ ( name , bases , dct ) metaself . rrr = reload_class return ReloadingMetaclass2
hack for pyqt
9,192
def reload_class ( self , verbose = True , reload_module = True ) : import utool as ut verbose = verbose or VERBOSE_CLASS classname = self . __class__ . __name__ try : modname = self . __class__ . __module__ if verbose : print ( '[class] reloading ' + classname + ' from ' + modname ) if hasattr ( self , '_on_reload' ) : if verbose > 1 : print ( '[class] calling _on_reload for ' + classname ) self . _on_reload ( ) elif verbose > 1 : print ( '[class] ' + classname + ' does not have an _on_reload function' ) def find_base_clases ( _class , find_base_clases = None ) : class_list = [ ] for _baseclass in _class . __bases__ : parents = find_base_clases ( _baseclass , find_base_clases ) class_list . extend ( parents ) if _class is not object : class_list . append ( _class ) return class_list head_class = self . __class__ class_list = find_base_clases ( head_class , find_base_clases ) ignore = { HashComparable2 } class_list = [ _class for _class in class_list if _class not in ignore ] for _class in class_list : if verbose : print ( '[class] reloading parent ' + _class . __name__ + ' from ' + _class . __module__ ) if _class . __module__ == '__main__' : main_module_ = sys . modules [ _class . __module__ ] main_modname = ut . get_modname_from_modpath ( main_module_ . __file__ ) module_ = sys . modules [ main_modname ] else : module_ = sys . modules [ _class . __module__ ] if hasattr ( module_ , 'rrr' ) : if reload_module : module_ . rrr ( verbose = verbose ) else : if reload_module : import imp if verbose : print ( '[class] reloading ' + _class . __module__ + ' with imp' ) try : imp . reload ( module_ ) except ( ImportError , AttributeError ) : print ( '[class] fallback reloading ' + _class . __module__ + ' with imp' ) imp . load_source ( module_ . __name__ , module_ . __file__ ) _newclass = getattr ( module_ , _class . __name__ ) reload_class_methods ( self , _newclass , verbose = verbose ) if hasattr ( self , '_initialize_self' ) : if verbose > 1 : print ( '[class] calling _initialize_self for ' + classname ) self . _initialize_self ( ) elif verbose > 1 : print ( '[class] ' + classname + ' does not have an _initialize_self function' ) except Exception as ex : ut . printex ( ex , 'Error Reloading Class' , keys = [ 'modname' , 'module' , 'class_' , 'class_list' , 'self' , ] ) raise
special class reloading function This function is often injected as rrr of classes
9,193
def reload_class_methods ( self , class_ , verbose = True ) : if verbose : print ( '[util_class] Reloading self=%r as class_=%r' % ( self , class_ ) ) self . __class__ = class_ for key in dir ( class_ ) : func = getattr ( class_ , key ) if isinstance ( func , types . MethodType ) : inject_func_as_method ( self , func , class_ = class_ , allow_override = True , verbose = verbose )
rebinds all class methods
9,194
def remove_private_obfuscation ( self ) : classname = self . __class__ . __name__ attrlist = [ attr for attr in dir ( self ) if attr . startswith ( '_' + classname + '__' ) ] for attr in attrlist : method = getattr ( self , attr ) truename = attr . replace ( '_' + classname + '__' , '__' ) setattr ( self , truename , method )
removes the python obfuscation of class privates so they can be executed as they appear in class source . Useful when playing with IPython .
9,195
def create_peptidequant_lookup ( fns , pqdb , poolnames , pepseq_colnr , ms1_qcolpattern = None , isobqcolpattern = None , psmnrpattern = None , fdrcolpattern = None , pepcolpattern = None ) : patterns = [ ms1_qcolpattern , fdrcolpattern , pepcolpattern ] storefuns = [ pqdb . store_precursor_quants , pqdb . store_fdr , pqdb . store_pep ] create_pep_protein_quant_lookup ( fns , pqdb , poolnames , pepseq_colnr , patterns , storefuns , isobqcolpattern , psmnrpattern )
Calls lower level function to create a peptide quant lookup
9,196
def create_proteinquant_lookup ( fns , pqdb , poolnames , protacc_colnr , ms1_qcolpattern = None , isobqcolpattern = None , psmnrpattern = None , probcolpattern = None , fdrcolpattern = None , pepcolpattern = None ) : patterns = [ ms1_qcolpattern , probcolpattern , fdrcolpattern , pepcolpattern ] storefuns = [ pqdb . store_precursor_quants , pqdb . store_probability , pqdb . store_fdr , pqdb . store_pep ] create_pep_protein_quant_lookup ( fns , pqdb , poolnames , protacc_colnr , patterns , storefuns , isobqcolpattern , psmnrpattern )
Calls lower level function to create a protein quant lookup
9,197
def create_pep_protein_quant_lookup ( fns , pqdb , poolnames , featcolnr , patterns , storefuns , isobqcolpattern = None , psmnrpattern = None ) : tablefn_map = create_tablefn_map ( fns , pqdb , poolnames ) feat_map = pqdb . get_feature_map ( ) for pattern , storefun in zip ( patterns , storefuns ) : if pattern is None : continue colmap = get_colmap ( fns , pattern , single_col = True ) if colmap : store_single_col_data ( fns , tablefn_map , feat_map , storefun , featcolnr , colmap ) if isobqcolpattern is not None : isocolmap = get_colmap ( fns , isobqcolpattern , antipattern = psmnrpattern ) else : return if psmnrpattern is not None : psmcolmap = get_colmap ( fns , psmnrpattern ) else : psmcolmap = False create_isobaric_quant_lookup ( fns , tablefn_map , feat_map , pqdb , featcolnr , isocolmap , psmcolmap )
Does the work when creating peptide and protein quant lookups . This loops through storing options and parses columns passing on to the storing functions
9,198
def store_single_col_data ( fns , prottable_id_map , pacc_map , pqdbmethod , protacc_colnr , colmap ) : to_store = [ ] for fn , header , pquant in tsvreader . generate_tsv_pep_protein_quants ( fns ) : pacc_id = pacc_map [ pquant [ header [ protacc_colnr ] ] ] pqdata = ( pacc_id , prottable_id_map [ fn ] , pquant [ colmap [ fn ] ] ) to_store . append ( pqdata ) if len ( to_store ) > 10000 : pqdbmethod ( to_store ) to_store = [ ] pqdbmethod ( to_store )
General method to store single column data from protein tables in lookup
9,199
def map_psmnrcol_to_quantcol ( quantcols , psmcols , tablefn_map ) : if not psmcols : for fn in quantcols : for qcol in quantcols [ fn ] : yield ( tablefn_map [ fn ] , qcol ) else : for fn in quantcols : for qcol , psmcol in zip ( quantcols [ fn ] , psmcols [ fn ] ) : yield ( tablefn_map [ fn ] , qcol , psmcol )
This function yields tuples of table filename isobaric quant column and if necessary number - of - PSM column