idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
54,400
def on_exit ( self ) : answer = messagebox . askyesnocancel ( "Exit" , "Do you want to save as you quit the application?" ) if answer : self . save ( ) self . quit ( ) self . destroy ( ) elif answer is None : pass else : self . quit ( ) self . destroy ( )
When you click to exit this function is called prompts whether to save
54,401
def make_gui ( self ) : self . option_window = Toplevel ( ) self . option_window . protocol ( "WM_DELETE_WINDOW" , self . on_exit ) self . canvas_frame = tk . Frame ( self , height = 500 ) self . option_frame = tk . Frame ( self . option_window , height = 300 ) self . canvas_frame . pack ( side = tk . LEFT , fill = tk . BOTH , expand = True ) self . option_frame . pack ( side = tk . RIGHT , fill = None , expand = False ) self . make_options_frame ( ) self . make_canvas_frame ( ) self . disable_singlecolor ( )
Setups the general structure of the gui the first function called
54,402
def make_options_frame ( self ) : self . tab_frame = ttk . Notebook ( self . option_frame , width = 800 ) self . tab_configure = tk . Frame ( self . tab_frame ) self . tab_classify = tk . Frame ( self . tab_frame ) self . make_configure_tab ( ) self . make_classify_tab ( ) self . tab_frame . add ( self . tab_configure , text = "Configure" ) self . tab_frame . add ( self . tab_classify , text = "Classify" ) self . tab_frame . pack ( fill = tk . BOTH , expand = True )
make the frame that allows for configuration and classification
54,403
def disable_multicolor ( self ) : for color in [ 'red' , 'green' , 'blue' ] : self . multicolorscales [ color ] . config ( state = tk . DISABLED , bg = 'grey' ) self . multicolorframes [ color ] . config ( bg = 'grey' ) self . multicolorlabels [ color ] . config ( bg = 'grey' ) self . multicolordropdowns [ color ] . config ( bg = 'grey' , state = tk . DISABLED ) self . multicolorminscale [ color ] . config ( bg = 'grey' , state = tk . DISABLED ) self . multicolormaxscale [ color ] . config ( bg = 'grey' , state = tk . DISABLED ) self . singlecolorscale . config ( state = tk . NORMAL , bg = self . single_color_theme ) self . singlecolorframe . config ( bg = self . single_color_theme ) self . singlecolorlabel . config ( bg = self . single_color_theme ) self . singlecolordropdown . config ( bg = self . single_color_theme , state = tk . NORMAL ) self . singlecolorminscale . config ( bg = self . single_color_theme , state = tk . NORMAL ) self . singlecolormaxscale . config ( bg = self . single_color_theme , state = tk . NORMAL )
swap from the multicolor image to the single color image
54,404
def update_button_action ( self ) : if self . mode . get ( ) == 3 : self . configure_threecolor_image ( ) elif self . mode . get ( ) == 1 : self . configure_singlecolor_image ( ) else : raise ValueError ( "mode can only be singlecolor or threecolor" ) self . imageplot . set_data ( self . image ) if self . mode . get ( ) == 1 : self . imageplot . set_cmap ( 'gist_gray' ) self . fig . canvas . draw_idle ( )
when update button is clicked refresh the data preview
54,405
def make_configure_tab ( self ) : modeframe = tk . Frame ( self . tab_configure ) self . mode = tk . IntVar ( ) singlecolor = tk . Radiobutton ( modeframe , text = "Single color" , variable = self . mode , value = 1 , command = lambda : self . disable_multicolor ( ) ) multicolor = tk . Radiobutton ( modeframe , text = "Three color" , variable = self . mode , value = 3 , command = lambda : self . disable_singlecolor ( ) ) self . mode . set ( 3 ) singlecolor . pack ( side = tk . LEFT ) multicolor . pack ( side = tk . LEFT ) updatebutton = tk . Button ( master = modeframe , text = "Update" , command = self . update_button_action ) updatebutton . pack ( side = tk . RIGHT ) modeframe . grid ( row = 0 , column = 0 ) self . setup_multicolor ( ) self . setup_singlecolor ( )
initial set up of configure tab
54,406
def make_classify_tab ( self ) : self . pick_frame = tk . Frame ( self . tab_classify ) self . pick_frame2 = tk . Frame ( self . tab_classify ) self . solar_class_var = tk . IntVar ( ) self . solar_class_var . set ( 0 ) buttonnum = 0 frame = [ self . pick_frame , self . pick_frame2 ] for text , value in self . config . solar_classes : b = tk . Radiobutton ( frame [ buttonnum % 2 ] , text = text , variable = self . solar_class_var , value = value , background = self . config . solar_colors [ text ] , indicatoron = 0 , width = 50 , height = 2 , command = self . change_class ) b . pack ( fill = tk . BOTH , expand = 1 ) buttonnum += 1 self . pick_frame . grid ( row = 0 , column = 0 , rowspan = 5 , sticky = tk . W + tk . E + tk . N + tk . S ) self . pick_frame2 . grid ( row = 0 , column = 1 , rowspan = 5 , sticky = tk . W + tk . E + tk . N + tk . S ) undobutton = tk . Button ( master = self . tab_classify , text = "Undo" , command = self . undobutton_action ) undobutton . grid ( row = 6 , column = 0 , columnspan = 2 , sticky = tk . W + tk . E )
initial set up of classification tab
54,407
def setup_singlecolor ( self ) : self . singlecolorframe = tk . Frame ( self . tab_configure , bg = self . single_color_theme ) channel_choices = sorted ( list ( self . data . keys ( ) ) ) self . singlecolorlabel = tk . Label ( self . singlecolorframe , text = "single" , bg = self . single_color_theme , width = 10 ) self . singlecolorvar = tk . StringVar ( ) self . singlecolorpower = tk . DoubleVar ( ) self . singlecolormin = tk . DoubleVar ( ) self . singlecolormax = tk . DoubleVar ( ) self . singlecolordropdown = tk . OptionMenu ( self . singlecolorframe , self . singlecolorvar , * channel_choices ) self . singlecolorscale = tk . Scale ( self . singlecolorframe , variable = self . singlecolorpower , orient = tk . HORIZONTAL , from_ = self . config . ranges [ 'single_color_power_min' ] , bg = self . single_color_theme , to_ = self . config . ranges [ 'single_color_power_max' ] , resolution = self . config . ranges [ 'single_color_power_resolution' ] , length = 200 ) self . singlecolorminscale = tk . Scale ( self . singlecolorframe , variable = self . singlecolormin , orient = tk . HORIZONTAL , from_ = 0 , bg = self . single_color_theme , to_ = self . config . ranges [ 'single_color_vmin' ] , resolution = self . config . ranges [ 'single_color_vresolution' ] , length = 200 ) self . singlecolormaxscale = tk . Scale ( self . singlecolorframe , variable = self . singlecolormax , orient = tk . HORIZONTAL , from_ = self . config . ranges [ 'single_color_vmax' ] , bg = self . single_color_theme , to_ = 100 , resolution = self . config . ranges [ 'single_color_vresolution' ] , length = 200 ) self . singlecolorvar . set ( self . config . products_map [ self . config . default [ 'single' ] ] ) self . singlecolorpower . set ( self . config . default [ 'single_power' ] ) self . singlecolormin . set ( 0 ) self . singlecolormax . set ( 100 ) self . singlecolordropdown . config ( bg = self . single_color_theme , width = 10 ) self . singlecolorlabel . pack ( side = tk . LEFT ) self . singlecolorscale . pack ( side = tk . RIGHT ) self . singlecolormaxscale . pack ( side = tk . RIGHT ) self . singlecolorminscale . pack ( side = tk . RIGHT ) self . singlecolordropdown . pack ( ) self . singlecolorframe . grid ( row = 4 , columnspan = 5 , rowspan = 1 )
initial setup of single color options and variables
54,408
def undobutton_action ( self ) : if len ( self . history ) > 1 : old = self . history . pop ( - 1 ) self . selection_array = old self . mask . set_data ( old ) self . fig . canvas . draw_idle ( )
when undo is clicked revert the thematic map to the previous state
54,409
def change_class ( self ) : self . toolbarcenterframe . config ( text = "Draw: {}" . format ( self . config . solar_class_name [ self . solar_class_var . get ( ) ] ) )
on changing the classification label update the draw text
54,410
def values ( self ) : self . vals [ 'nfft' ] = self . ui . nfftSpnbx . value ( ) self . vals [ 'window' ] = str ( self . ui . windowCmbx . currentText ( ) ) . lower ( ) self . vals [ 'overlap' ] = self . ui . overlapSpnbx . value ( ) return self . vals
Gets the parameter values
54,411
def main ( ) : parser = argparse . ArgumentParser ( description = 'A pipeline that generates analysis pipelines.' ) parser . add_argument ( 'input' , nargs = '?' , help = 'A valid metapipe configuration file.' ) parser . add_argument ( '-o' , '--output' , help = 'An output destination. If none is provided, the ' 'results will be printed to stdout.' , default = sys . stdout ) parser . add_argument ( '-t' , '--temp' , help = 'A desired metapipe binary file. This is used to store ' 'temp data between generation and execution. ' '(Default: "%(default)s")' , default = '.metapipe' ) parser . add_argument ( '-s' , '--shell' , help = 'The path to the shell to be used when executing the ' 'pipeline. (Default: "%(default)s)"' , default = '/bin/bash' ) parser . add_argument ( '-r' , '--run' , help = 'Run the pipeline as soon as it\'s ready.' , action = 'store_true' ) parser . add_argument ( '-n' , '--name' , help = 'A name for the pipeline.' , default = '' ) parser . add_argument ( '-j' , '--job-type' , help = 'The destination for calculations (i.e. local, a PBS ' 'queue on a cluster, etc).\nOptions: {}. ' '(Default: "%(default)s)"' . format ( JOB_TYPES . keys ( ) ) , default = 'local' ) parser . add_argument ( '-p' , '--max-jobs' , help = 'The maximum number of concurrent jobs allowed. ' 'Defaults to maximum available cores.' , default = None ) parser . add_argument ( '--report-type' , help = 'The output report type. By default metapipe will ' 'print updates to the console. \nOptions: {}. ' '(Default: "%(default)s)"' . format ( QUEUE_TYPES . keys ( ) ) , default = 'text' ) parser . add_argument ( '-v' , '--version' , help = 'Displays the current version of the application.' , action = 'store_true' ) args = parser . parse_args ( ) if args . version : print ( 'Version: {}' . format ( __version__ ) ) sys . exit ( 0 ) try : with open ( args . input ) as f : config = f . read ( ) except IOError : print ( 'No valid config file found.' ) return - 1 run ( config , args . max_jobs , args . output , args . job_type , args . report_type , args . shell , args . temp , args . run )
Parses the command - line args and calls run .
54,412
def run ( config , max_jobs , output = sys . stdout , job_type = 'local' , report_type = 'text' , shell = '/bin/bash' , temp = '.metapipe' , run_now = False ) : if max_jobs == None : max_jobs = cpu_count ( ) parser = Parser ( config ) try : command_templates = parser . consume ( ) except ValueError as e : raise SyntaxError ( 'Invalid config file. \n%s' % e ) options = '\n' . join ( parser . global_options ) queue_type = QUEUE_TYPES [ report_type ] pipeline = Runtime ( command_templates , queue_type , JOB_TYPES , job_type , max_jobs ) template = env . get_template ( 'output_script.tmpl.sh' ) with open ( temp , 'wb' ) as f : pickle . dump ( pipeline , f , 2 ) script = template . render ( shell = shell , temp = os . path . abspath ( temp ) , options = options ) if run_now : output = output if output != sys . stdout else PIPELINE_ALIAS submit_job = make_submit_job ( shell , output , job_type ) submit_job . submit ( ) try : f = open ( output , 'w' ) output = f except TypeError : pass output . write ( script ) f . close ( )
Create the metapipe based on the provided input .
54,413
def make_submit_job ( shell , output , job_type ) : run_cmd = [ shell , output ] submit_command = Command ( alias = PIPELINE_ALIAS , cmds = run_cmd ) submit_job = get_job ( submit_command , job_type ) submit_job . make ( ) return submit_job
Preps the metapipe main job to be submitted .
54,414
def yaml ( modules_to_register : Iterable [ Any ] = None , classes_to_register : Iterable [ Any ] = None ) -> ruamel . yaml . YAML : yaml = ruamel . yaml . YAML ( typ = "rt" ) yaml . representer . add_representer ( np . ndarray , numpy_to_yaml ) yaml . constructor . add_constructor ( "!numpy_array" , numpy_from_yaml ) yaml = register_module_classes ( yaml = yaml , modules = modules_to_register ) yaml = register_classes ( yaml = yaml , classes = classes_to_register ) return yaml
Create a YAML object for loading a YAML configuration .
54,415
def register_classes ( yaml : ruamel . yaml . YAML , classes : Optional [ Iterable [ Any ] ] = None ) -> ruamel . yaml . YAML : if classes is None : classes = [ ] for cls in classes : logger . debug ( f"Registering class {cls} with YAML" ) yaml . register_class ( cls ) return yaml
Register externally defined classes .
54,416
def register_module_classes ( yaml : ruamel . yaml . YAML , modules : Optional [ Iterable [ Any ] ] = None ) -> ruamel . yaml . YAML : if modules is None : modules = [ ] classes_to_register = set ( ) for module in modules : module_classes = [ member [ 1 ] for member in inspect . getmembers ( module , inspect . isclass ) ] classes_to_register . update ( module_classes ) return register_classes ( yaml = yaml , classes = classes_to_register )
Register all classes in the given modules with the YAML object .
54,417
def numpy_to_yaml ( representer : Representer , data : np . ndarray ) -> Sequence [ Any ] : return representer . represent_sequence ( "!numpy_array" , data . tolist ( ) )
Write a numpy array to YAML .
54,418
def numpy_from_yaml ( constructor : Constructor , data : ruamel . yaml . nodes . SequenceNode ) -> np . ndarray : values = [ constructor . construct_object ( n ) for n in data . value ] logger . debug ( f"{data}, {values}" ) return np . array ( values )
Read an array from YAML to numpy .
54,419
def enum_to_yaml ( cls : Type [ T_EnumToYAML ] , representer : Representer , data : T_EnumToYAML ) -> ruamel . yaml . nodes . ScalarNode : return representer . represent_scalar ( f"!{cls.__name__}" , f"{str(data)}" )
Encodes YAML representation .
54,420
def enum_from_yaml ( cls : Type [ T_EnumFromYAML ] , constructor : Constructor , node : ruamel . yaml . nodes . ScalarNode ) -> T_EnumFromYAML : return cls [ node . value ]
Decode YAML representation .
54,421
def _get_current_ids ( self , source = True , meta = True , spectra = True , spectra_annotation = True ) : c = self . c if source : c . execute ( 'SELECT max(id) FROM library_spectra_source' ) last_id_origin = c . fetchone ( ) [ 0 ] if last_id_origin : self . current_id_origin = last_id_origin + 1 else : self . current_id_origin = 1 if meta : c . execute ( 'SELECT max(id) FROM library_spectra_meta' ) last_id_meta = c . fetchone ( ) [ 0 ] if last_id_meta : self . current_id_meta = last_id_meta + 1 else : self . current_id_meta = 1 if spectra : c . execute ( 'SELECT max(id) FROM library_spectra' ) last_id_spectra = c . fetchone ( ) [ 0 ] if last_id_spectra : self . current_id_spectra = last_id_spectra + 1 else : self . current_id_spectra = 1 if spectra_annotation : c . execute ( 'SELECT max(id) FROM library_spectra_annotation' ) last_id_spectra_annotation = c . fetchone ( ) [ 0 ] if last_id_spectra_annotation : self . current_id_spectra_annotation = last_id_spectra_annotation + 1 else : self . current_id_spectra_annotation = 1
Get the current id for each table in the database
54,422
def _update_libdata ( self , line ) : if re . match ( '^Comment.*$' , line , re . IGNORECASE ) : comments = re . findall ( '"([^"]*)"' , line ) for c in comments : self . _parse_meta_info ( c ) self . _parse_compound_info ( c ) self . _parse_meta_info ( line ) self . _parse_compound_info ( line ) if self . collect_meta and ( re . match ( '^Num Peaks(.*)$' , line , re . IGNORECASE ) or re . match ( '^PK\$PEAK:(.*)' , line , re . IGNORECASE ) or re . match ( '^PK\$ANNOTATION(.*)' , line , re . IGNORECASE ) ) : self . _store_compound_info ( ) self . _store_meta_info ( ) self . meta_info = get_blank_dict ( self . meta_regex ) self . compound_info = get_blank_dict ( self . compound_regex ) self . other_names = [ ] self . collect_meta = False if re . match ( '^PK\$PEAK: m/z int\. rel\.int\.$' , line , re . IGNORECASE ) : self . ignore_additional_spectra_info = True if re . match ( '^Num Peaks(.*)$' , line , re . IGNORECASE ) or re . match ( '^PK\$PEAK:(.*)' , line , re . IGNORECASE ) : self . start_spectra = True return elif re . match ( '^PK\$ANNOTATION(.*)' , line , re . IGNORECASE ) : self . start_spectra_annotation = True match = re . match ( '^PK\$ANNOTATION:(.*)' , line , re . IGNORECASE ) columns = match . group ( 1 ) cl = columns . split ( ) self . spectra_annotation_indexes = { i : cl . index ( i ) for i in cl } return if self . start_spectra_annotation : self . _parse_spectra_annotation ( line ) if self . start_spectra : self . _parse_spectra ( line )
Update the library meta data from the current line being parsed
54,423
def _store_compound_info ( self ) : other_name_l = [ name for name in self . other_names if name != self . compound_info [ 'name' ] ] self . compound_info [ 'other_names' ] = ' <#> ' . join ( other_name_l ) if not self . compound_info [ 'inchikey_id' ] : self . _set_inchi_pcc ( self . compound_info [ 'pubchem_id' ] , 'cid' , 0 ) if not self . compound_info [ 'inchikey_id' ] : self . _set_inchi_pcc ( self . compound_info [ 'smiles' ] , 'smiles' , 0 ) if not self . compound_info [ 'inchikey_id' ] : self . _set_inchi_pcc ( self . compound_info [ 'name' ] , 'name' , 0 ) if not self . compound_info [ 'inchikey_id' ] : print ( 'WARNING, cant get inchi key for ' , self . compound_info ) print ( self . meta_info ) print ( '#########################' ) self . compound_info [ 'inchikey_id' ] = 'UNKNOWN_' + str ( uuid . uuid4 ( ) ) if not self . compound_info [ 'pubchem_id' ] and self . compound_info [ 'inchikey_id' ] : self . _set_inchi_pcc ( self . compound_info [ 'inchikey_id' ] , 'inchikey' , 0 ) if not self . compound_info [ 'name' ] : self . compound_info [ 'name' ] = 'unknown name' if not self . compound_info [ 'inchikey_id' ] in self . compound_ids : self . compound_info_all . append ( tuple ( self . compound_info . values ( ) ) + ( str ( datetime . datetime . now ( ) ) , str ( datetime . datetime . now ( ) ) , ) ) self . compound_ids . append ( self . compound_info [ 'inchikey_id' ] )
Update the compound_info dictionary with the current chunk of compound details
54,424
def _store_meta_info ( self ) : if not self . meta_info [ 'precursor_mz' ] and self . meta_info [ 'precursor_type' ] and self . compound_info [ 'exact_mass' ] : self . meta_info [ 'precursor_mz' ] = get_precursor_mz ( float ( self . compound_info [ 'exact_mass' ] ) , self . meta_info [ 'precursor_type' ] ) if not self . meta_info [ 'polarity' ] : m = re . search ( '^\[.*\](\-|\+)' , self . meta_info [ 'precursor_type' ] , re . IGNORECASE ) if m : polarity = m . group ( 1 ) . strip ( ) if polarity == '+' : self . meta_info [ 'polarity' ] = 'positive' elif polarity == '-' : self . meta_info [ 'polarity' ] = 'negative' if not self . meta_info [ 'accession' ] : self . meta_info [ 'accession' ] = 'unknown accession' self . meta_info_all . append ( ( str ( self . current_id_meta ) , ) + tuple ( self . meta_info . values ( ) ) + ( str ( self . current_id_origin ) , self . compound_info [ 'inchikey_id' ] , ) )
Update the meta dictionary with the current chunk of meta data details
54,425
def _parse_spectra_annotation ( self , line ) : if re . match ( '^PK\$NUM_PEAK(.*)' , line , re . IGNORECASE ) : self . start_spectra_annotation = False return saplist = line . split ( ) sarow = ( self . current_id_spectra_annotation , float ( saplist [ self . spectra_annotation_indexes [ 'm/z' ] ] ) if 'm/z' in self . spectra_annotation_indexes else None , saplist [ self . spectra_annotation_indexes [ 'tentative_formula' ] ] if 'tentative_formula' in self . spectra_annotation_indexes else None , float ( saplist [ self . spectra_annotation_indexes [ 'mass_error(ppm)' ] ] ) if 'mass_error(ppm)' in self . spectra_annotation_indexes else None , self . current_id_meta ) self . spectra_annotation_all . append ( sarow ) self . current_id_spectra_annotation += 1
Parse and store the spectral annotation details
54,426
def _parse_spectra ( self , line ) : if line in [ '\n' , '\r\n' , '//\n' , '//\r\n' , '' , '//' ] : self . start_spectra = False self . current_id_meta += 1 self . collect_meta = True return splist = line . split ( ) if len ( splist ) > 2 and not self . ignore_additional_spectra_info : additional_info = '' . join ( map ( str , splist [ 2 : len ( splist ) ] ) ) else : additional_info = '' srow = ( self . current_id_spectra , float ( splist [ 0 ] ) , float ( splist [ 1 ] ) , additional_info , self . current_id_meta ) self . spectra_all . append ( srow ) self . current_id_spectra += 1
Parse and store the spectral details
54,427
def _set_inchi_pcc ( self , in_str , pcp_type , elem ) : if not in_str : return 0 try : pccs = pcp . get_compounds ( in_str , pcp_type ) except pcp . BadRequestError as e : print ( e ) return 0 except pcp . TimeoutError as e : print ( e ) return 0 except pcp . ServerError as e : print ( e ) return 0 except URLError as e : print ( e ) return 0 except BadStatusLine as e : print ( e ) return 0 if pccs : pcc = pccs [ elem ] self . compound_info [ 'inchikey_id' ] = pcc . inchikey self . compound_info [ 'pubchem_id' ] = pcc . cid self . compound_info [ 'molecular_formula' ] = pcc . molecular_formula self . compound_info [ 'molecular_weight' ] = pcc . molecular_weight self . compound_info [ 'exact_mass' ] = pcc . exact_mass self . compound_info [ 'smiles' ] = pcc . canonical_smiles if len ( pccs ) > 1 : print ( 'WARNING, multiple compounds for ' , self . compound_info )
Check pubchem compounds via API for both an inchikey and any available compound details
54,428
def _get_other_names ( self , line ) : m = re . search ( self . compound_regex [ 'other_names' ] [ 0 ] , line , re . IGNORECASE ) if m : self . other_names . append ( m . group ( 1 ) . strip ( ) )
Parse and extract any other names that might be recorded for the compound
54,429
def _parse_meta_info ( self , line ) : if self . mslevel : self . meta_info [ 'ms_level' ] = self . mslevel if self . polarity : self . meta_info [ 'polarity' ] = self . polarity for k , regexes in six . iteritems ( self . meta_regex ) : for reg in regexes : m = re . search ( reg , line , re . IGNORECASE ) if m : self . meta_info [ k ] = m . group ( 1 ) . strip ( )
Parse and extract all meta data by looping through the dictionary of meta_info regexs
54,430
def _parse_compound_info ( self , line ) : for k , regexes in six . iteritems ( self . compound_regex ) : for reg in regexes : if self . compound_info [ k ] : continue m = re . search ( reg , line , re . IGNORECASE ) if m : self . compound_info [ k ] = m . group ( 1 ) . strip ( ) self . _get_other_names ( line )
Parse and extract all compound data by looping through the dictionary of compound_info regexs
54,431
def insert_data ( self , remove_data = False , db_type = 'sqlite' ) : if self . update_source : import msp2db self . c . execute ( "INSERT INTO library_spectra_source (id, name, parsing_software) VALUES" " ({a}, '{b}', 'msp2db-v{c}')" . format ( a = self . current_id_origin , b = self . source , c = msp2db . __version__ ) ) self . conn . commit ( ) if self . compound_info_all : self . compound_info_all = _make_sql_compatible ( self . compound_info_all ) cn = ', ' . join ( self . compound_info . keys ( ) ) + ',created_at,updated_at' insert_query_m ( self . compound_info_all , columns = cn , conn = self . conn , table = 'metab_compound' , db_type = db_type ) self . meta_info_all = _make_sql_compatible ( self . meta_info_all ) cn = 'id,' + ', ' . join ( self . meta_info . keys ( ) ) + ',library_spectra_source_id, inchikey_id' insert_query_m ( self . meta_info_all , columns = cn , conn = self . conn , table = 'library_spectra_meta' , db_type = db_type ) cn = "id, mz, i, other, library_spectra_meta_id" insert_query_m ( self . spectra_all , columns = cn , conn = self . conn , table = 'library_spectra' , db_type = db_type ) if self . spectra_annotation_all : cn = "id, mz, tentative_formula, mass_error, library_spectra_meta_id" insert_query_m ( self . spectra_annotation_all , columns = cn , conn = self . conn , table = 'library_spectra_annotation' , db_type = db_type ) if remove_data : self . meta_info_all = [ ] self . spectra_all = [ ] self . spectra_annotation_all = [ ] self . compound_info_all = [ ] self . _get_current_ids ( source = False )
Insert data stored in the current chunk of parsing into the selected database
54,432
def line ( line_def , ** kwargs ) : def replace ( s ) : return "(%s)" % ansi . aformat ( s . group ( ) [ 1 : ] , attrs = [ "bold" , ] ) return ansi . aformat ( re . sub ( '@.?' , replace , line_def ) , ** kwargs )
Highlights a character in the line
54,433
def try_and_error ( * funcs ) : def validate ( value ) : exc = None for func in funcs : try : return func ( value ) except ( ValueError , TypeError ) as e : exc = e raise exc return validate
Apply multiple validation functions
54,434
def validate_text ( value ) : possible_transform = [ 'axes' , 'fig' , 'data' ] validate_transform = ValidateInStrings ( 'transform' , possible_transform , True ) tests = [ validate_float , validate_float , validate_str , validate_transform , dict ] if isinstance ( value , six . string_types ) : xpos , ypos = rcParams [ 'texts.default_position' ] return [ ( xpos , ypos , value , 'axes' , { 'ha' : 'right' } ) ] elif isinstance ( value , tuple ) : value = [ value ] try : value = list ( value ) [ : ] except TypeError : raise ValueError ( "Value must be string or list of tuples!" ) for i , val in enumerate ( value ) : try : val = tuple ( val ) except TypeError : raise ValueError ( "Text must be an iterable of the form " "(x, y, s[, trans, params])!" ) if len ( val ) < 3 : raise ValueError ( "Text tuple must at least be like [x, y, s], with floats x, " "y and string s!" ) elif len ( val ) == 3 or isinstance ( val [ 3 ] , dict ) : val = list ( val ) val . insert ( 3 , 'data' ) if len ( val ) == 4 : val += [ { } ] val = tuple ( val ) if len ( val ) > 5 : raise ValueError ( "Text tuple must not be longer then length 5. It can be " "like (x, y, s[, trans, params])!" ) value [ i ] = ( validate ( x ) for validate , x in zip ( tests , val ) ) return value
Validate a text formatoption
54,435
def validate_none ( b ) : if isinstance ( b , six . string_types ) : b = b . lower ( ) if b is None or b == 'none' : return None else : raise ValueError ( 'Could not convert "%s" to None' % b )
Validate that None is given
54,436
def validate_axiscolor ( value ) : validate = try_and_error ( validate_none , validate_color ) possible_keys = { 'right' , 'left' , 'top' , 'bottom' } try : value = dict ( value ) false_keys = set ( value ) - possible_keys if false_keys : raise ValueError ( "Wrong keys (%s)!" % ( ', ' . join ( false_keys ) ) ) for key , val in value . items ( ) : value [ key ] = validate ( val ) except : value = dict ( zip ( possible_keys , repeat ( validate ( value ) ) ) ) return value
Validate a dictionary containing axiscolor definitions
54,437
def validate_cbarpos ( value ) : patt = 'sh|sv|fl|fr|ft|fb|b|r' if value is True : value = { 'b' } elif not value : value = set ( ) elif isinstance ( value , six . string_types ) : for s in re . finditer ( '[^%s]+' % patt , value ) : warn ( "Unknown colorbar position %s!" % s . group ( ) , RuntimeWarning ) value = set ( re . findall ( patt , value ) ) else : value = validate_stringset ( value ) for s in ( s for s in value if not re . match ( patt , s ) ) : warn ( "Unknown colorbar position %s!" % s ) value . remove ( s ) return value
Validate a colorbar position
54,438
def validate_cmap ( val ) : from matplotlib . colors import Colormap try : return validate_str ( val ) except ValueError : if not isinstance ( val , Colormap ) : raise ValueError ( "Could not find a valid colormap!" ) return val
Validate a colormap
54,439
def validate_cmaps ( cmaps ) : cmaps = { validate_str ( key ) : validate_colorlist ( val ) for key , val in cmaps } for key , val in six . iteritems ( cmaps ) : cmaps . setdefault ( key + '_r' , val [ : : - 1 ] ) return cmaps
Validate a dictionary of color lists
54,440
def validate_lineplot ( value ) : if value is None : return value elif isinstance ( value , six . string_types ) : return six . text_type ( value ) else : value = list ( value ) for i , v in enumerate ( value ) : if v is None : pass elif isinstance ( v , six . string_types ) : value [ i ] = six . text_type ( v ) else : raise ValueError ( 'Expected None or string, found %s' % ( v , ) ) return value
Validate the value for the LinePlotter . plot formatoption
54,441
def visit_GpxModel ( self , gpx_model , * args , ** kwargs ) : result = OrderedDict ( ) put_scalar = lambda name , json_name = None : self . optional_attribute_scalar ( result , gpx_model , name , json_name ) put_list = lambda name , json_name = None : self . optional_attribute_list ( result , gpx_model , name , json_name ) put_scalar ( 'creator' ) put_scalar ( 'metadata' ) put_list ( 'waypoints' ) put_list ( 'routes' ) put_list ( 'tracks' ) put_list ( 'extensions' ) return result
Render a GPXModel as a single JSON structure .
54,442
def visit_Metadata ( self , metadata , * args , ** kwargs ) : result = OrderedDict ( ) put_scalar = lambda name , json_name = None : self . optional_attribute_scalar ( result , metadata , name , json_name ) put_list = lambda name , json_name = None : self . optional_attribute_list ( result , metadata , name , json_name ) put_scalar ( 'name' ) put_scalar ( 'description' ) put_scalar ( 'author' ) put_scalar ( 'copyright' ) put_list ( 'links' ) put_scalar ( 'time' ) put_scalar ( 'keywords' ) put_scalar ( 'bounds' ) put_list ( 'extensions' ) return result
Render GPX Metadata as a single JSON structure .
54,443
def swap_default ( mode , equation , symbol_names , default , ** kwargs ) : if mode == 'subs' : swap_f = _subs default_swap_f = _subs elif mode == 'limit' : swap_f = _limit default_swap_f = _subs elif mode == 'limit_default' : swap_f = _subs default_swap_f = _limit else : raise ValueError ( ) result = equation for s in symbol_names : if s in kwargs : if isinstance ( kwargs [ s ] , Iterable ) : continue else : result = swap_f ( result , s , kwargs [ s ] ) else : result = default_swap_f ( result , s , default ) return result
Given a sympy equation or equality along with a list of symbol names substitute the specified default value for each symbol for which a value is not provided through a keyword argument .
54,444
def z_transfer_functions ( ) : r V1 , V2 , Z1 , Z2 = sp . symbols ( 'V1 V2 Z1 Z2' ) xfer_funcs = pd . Series ( [ sp . Eq ( V2 / Z2 , V1 / ( Z1 + Z2 ) ) , sp . Eq ( V2 / V1 , Z2 / Z1 ) ] , index = [ 1 , 2 ] ) xfer_funcs . index . name = 'Hardware version' return xfer_funcs
r Return a symbolic equality representation of the transfer function of RMS voltage measured by either control board analog feedback circuits .
54,445
def has_option ( section , name ) : cfg = ConfigParser . SafeConfigParser ( { "working_dir" : "/tmp" , "debug" : "0" } ) cfg . read ( CONFIG_LOCATIONS ) return cfg . has_option ( section , name )
Wrapper around ConfigParser s has_option method .
54,446
def get ( section , name ) : cfg = ConfigParser . SafeConfigParser ( { "working_dir" : "/tmp" , "debug" : "0" } ) cfg . read ( CONFIG_LOCATIONS ) val = cfg . get ( section , name ) return val . strip ( "'" ) . strip ( '"' )
Wrapper around ConfigParser s get method .
54,447
def make_key ( table_name , objid ) : key = datastore . Key ( ) path = key . path_element . add ( ) path . kind = table_name path . name = str ( objid ) return key
Create an object key for storage .
54,448
def extract_entity ( found ) : obj = dict ( ) for prop in found . entity . property : obj [ prop . name ] = prop . value . string_value return obj
Copy found entity to a dict .
54,449
def read_rec ( table_name , objid ) : req = datastore . LookupRequest ( ) req . key . extend ( [ make_key ( table_name , objid ) ] ) for found in datastore . lookup ( req ) . found : yield extract_entity ( found )
Generator that yields keyed recs from store .
54,450
def read_by_indexes ( table_name , index_name_values = None ) : req = datastore . RunQueryRequest ( ) query = req . query query . kind . add ( ) . name = table_name if not index_name_values : index_name_values = [ ] for name , val in index_name_values : queryFilter = query . filter . property_filter queryFilter . property . name = name queryFilter . operator = datastore . PropertyFilter . EQUAL queryFilter . value . string_value = str ( val ) loop_its = 0 have_more = True while have_more : resp = datastore . run_query ( req ) found_something = False for found in resp . batch . entity_result : yield extract_entity ( found ) found_something = True if not found_something : loop_its += 1 if loop_its > 5 : raise ValueError ( "Exceeded the excessive query threshold" ) if resp . batch . more_results != datastore . QueryResultBatch . NOT_FINISHED : have_more = False else : have_more = True end_cursor = resp . batch . end_cursor query . start_cursor . CopyFrom ( end_cursor )
Index reader .
54,451
def delete_table ( table_name ) : to_delete = [ make_key ( table_name , rec [ 'id' ] ) for rec in read_by_indexes ( table_name , [ ] ) ] with DatastoreTransaction ( ) as tx : tx . get_commit_req ( ) . mutation . delete . extend ( to_delete )
Mainly for testing .
54,452
def get_commit_req ( self ) : if not self . commit_req : self . commit_req = datastore . CommitRequest ( ) self . commit_req . transaction = self . tx return self . commit_req
Lazy commit request getter .
54,453
def call ( command , stdin = None , stdout = subprocess . PIPE , env = os . environ , cwd = None , shell = False , output_log_level = logging . INFO , sensitive_info = False ) : if not sensitive_info : logger . debug ( "calling command: %s" % command ) else : logger . debug ( "calling command with sensitive information" ) try : args = command if shell else whitespace_smart_split ( command ) kw = { } if not shell and not which ( args [ 0 ] , cwd = cwd ) : raise CommandMissingException ( args [ 0 ] ) if shell : kw [ 'shell' ] = True process = subprocess . Popen ( args , stdin = subprocess . PIPE , stdout = stdout , stderr = subprocess . STDOUT , env = env , cwd = cwd , ** kw ) output = process . communicate ( input = stdin ) [ 0 ] if output is not None : try : logger . log ( output_log_level , output . decode ( 'utf-8' ) ) except UnicodeDecodeError : pass return ( process . returncode , output ) except OSError : e = sys . exc_info ( ) [ 1 ] if not sensitive_info : logger . exception ( "Error running command: %s" % command ) logger . error ( "Root directory: %s" % cwd ) if stdin : logger . error ( "stdin: %s" % stdin ) raise e
Better smarter call logic
54,454
def whitespace_smart_split ( command ) : return_array = [ ] s = "" in_double_quotes = False escape = False for c in command : if c == '"' : if in_double_quotes : if escape : s += c escape = False else : s += c in_double_quotes = False else : in_double_quotes = True s += c else : if in_double_quotes : if c == '\\' : escape = True s += c else : escape = False s += c else : if c == ' ' : return_array . append ( s ) s = "" else : s += c if s != "" : return_array . append ( s ) return return_array
Split a command by whitespace taking care to not split on whitespace within quotes .
54,455
def sync ( self ) : phase = _get_phase ( self . _formula_instance ) self . logger . info ( "%s %s..." % ( phase . verb . capitalize ( ) , self . feature_name ) ) message = "...finished %s %s." % ( phase . verb , self . feature_name ) result = getattr ( self , phase . name ) ( ) if result or phase in ( PHASE . INSTALL , PHASE . REMOVE ) : self . logger . info ( message ) else : self . logger . debug ( message ) return result
execute the steps required to have the feature end with the desired state .
54,456
def isloaded ( self , name ) : if name is None : return True if isinstance ( name , str ) : return ( name in [ x . __module__ for x in self ] ) if isinstance ( name , Iterable ) : return set ( name ) . issubset ( [ x . __module__ for x in self ] ) return False
Checks if given hook module has been loaded
54,457
def hook ( self , function , dependencies = None ) : if not isinstance ( dependencies , ( Iterable , type ( None ) , str ) ) : raise TypeError ( "Invalid list of dependencies provided!" ) if not hasattr ( function , "__deps__" ) : function . __deps__ = dependencies if self . isloaded ( function . __deps__ ) : self . append ( function ) else : self . _later . append ( function ) for ext in self . _later : if self . isloaded ( ext . __deps__ ) : self . _later . remove ( ext ) self . hook ( ext )
Tries to load a hook
54,458
def parse_from_json ( json_str ) : try : message_dict = json . loads ( json_str ) except ValueError : raise ParseError ( "Mal-formed JSON input." ) upload_keys = message_dict . get ( 'uploadKeys' , False ) if upload_keys is False : raise ParseError ( "uploadKeys does not exist. At minimum, an empty array is required." ) elif not isinstance ( upload_keys , list ) : raise ParseError ( "uploadKeys must be an array object." ) upload_type = message_dict [ 'resultType' ] try : if upload_type == 'orders' : return orders . parse_from_dict ( message_dict ) elif upload_type == 'history' : return history . parse_from_dict ( message_dict ) else : raise ParseError ( 'Unified message has unknown upload_type: %s' % upload_type ) except TypeError as exc : raise ParseError ( exc . message )
Given a Unified Uploader message parse the contents and return a MarketOrderList or MarketHistoryList instance .
54,459
def encode_to_json ( order_or_history ) : if isinstance ( order_or_history , MarketOrderList ) : return orders . encode_to_json ( order_or_history ) elif isinstance ( order_or_history , MarketHistoryList ) : return history . encode_to_json ( order_or_history ) else : raise Exception ( "Must be one of MarketOrderList or MarketHistoryList." )
Given an order or history entry encode it to JSON and return .
54,460
def add ( self , classifier , threshold , begin = None , end = None ) : boosted_machine = bob . learn . boosting . BoostedMachine ( ) if begin is None : begin = 0 if end is None : end = len ( classifier . weak_machines ) for i in range ( begin , end ) : boosted_machine . add_weak_machine ( classifier . weak_machines [ i ] , classifier . weights [ i ] ) self . cascade . append ( boosted_machine ) self . thresholds . append ( threshold ) self . _indices ( )
Adds a new strong classifier with the given threshold to the cascade .
54,461
def create_from_boosted_machine ( self , boosted_machine , classifiers_per_round , classification_thresholds = - 5. ) : indices = list ( range ( 0 , len ( boosted_machine . weak_machines ) , classifiers_per_round ) ) if indices [ - 1 ] != len ( boosted_machine . weak_machines ) : indices . append ( len ( boosted_machine . weak_machines ) ) self . cascade = [ ] self . indices = [ ] for i in range ( len ( indices ) - 1 ) : machine = bob . learn . boosting . BoostedMachine ( ) for index in range ( indices [ i ] , indices [ i + 1 ] ) : machine . add_weak_machine ( boosted_machine . weak_machines [ index ] , boosted_machine . weights [ index , 0 ] ) self . cascade . append ( machine ) if isinstance ( classification_thresholds , ( int , float ) ) : self . thresholds = [ classification_thresholds ] * len ( self . cascade ) else : self . thresholds = classification_thresholds
Creates this cascade from the given boosted machine by simply splitting off strong classifiers that have classifiers_per_round weak classifiers .
54,462
def save ( self , hdf5 ) : hdf5 . set ( "Thresholds" , self . thresholds ) for i in range ( len ( self . cascade ) ) : hdf5 . create_group ( "Classifier_%d" % ( i + 1 ) ) hdf5 . cd ( "Classifier_%d" % ( i + 1 ) ) self . cascade [ i ] . save ( hdf5 ) hdf5 . cd ( ".." ) hdf5 . create_group ( "FeatureExtractor" ) hdf5 . cd ( "FeatureExtractor" ) self . extractor . save ( hdf5 ) hdf5 . cd ( ".." )
Saves this cascade into the given HDF5 file .
54,463
def load ( self , hdf5 ) : self . thresholds = hdf5 . read ( "Thresholds" ) self . cascade = [ ] for i in range ( len ( self . thresholds ) ) : hdf5 . cd ( "Classifier_%d" % ( i + 1 ) ) self . cascade . append ( bob . learn . boosting . BoostedMachine ( hdf5 ) ) hdf5 . cd ( ".." ) hdf5 . cd ( "FeatureExtractor" ) self . extractor = FeatureExtractor ( hdf5 ) hdf5 . cd ( ".." ) self . _indices ( )
Loads this cascade from the given HDF5 file .
54,464
def check ( ctx , repository , config ) : ctx . obj = Repo ( repository = repository , config = config )
Check commits .
54,465
def message ( obj , commit = 'HEAD' , skip_merge_commits = False ) : from . . kwalitee import check_message options = obj . options repository = obj . repository if options . get ( 'colors' ) is not False : colorama . init ( autoreset = True ) reset = colorama . Style . RESET_ALL yellow = colorama . Fore . YELLOW green = colorama . Fore . GREEN red = colorama . Fore . RED else : reset = yellow = green = red = '' try : sha = 'oid' commits = _pygit2_commits ( commit , repository ) except ImportError : try : sha = 'hexsha' commits = _git_commits ( commit , repository ) except ImportError : click . echo ( 'To use this feature, please install pygit2. ' 'GitPython will also work but is not recommended ' '(python <= 2.7 only).' , file = sys . stderr ) return 2 template = '{0}commit {{commit.{1}}}{2}\n\n' . format ( yellow , sha , reset ) template += '{message}{errors}' count = 0 ident = ' ' re_line = re . compile ( '^' , re . MULTILINE ) for commit in commits : if skip_merge_commits and _is_merge_commit ( commit ) : continue message = commit . message errors = check_message ( message , ** options ) message = re . sub ( re_line , ident , message ) if errors : count += 1 errors . insert ( 0 , red ) else : errors = [ green , 'Everything is OK.' ] errors . append ( reset ) click . echo ( template . format ( commit = commit , message = message . encode ( 'utf-8' ) , errors = '\n' . join ( errors ) ) ) if min ( count , 1 ) : raise click . Abort
Check the messages of the commits .
54,466
def get_obj_subcmds ( obj ) : subcmds = [ ] for label in dir ( obj . __class__ ) : if label . startswith ( "_" ) : continue if isinstance ( getattr ( obj . __class__ , label , False ) , property ) : continue rvalue = getattr ( obj , label ) if not callable ( rvalue ) or not is_cmd ( rvalue ) : continue if isinstance ( obj , types . MethodType ) and label in ( "im_func" , "im_self" , "im_class" ) : continue command_name = getattr ( rvalue , "command_name" , label [ : - 1 ] if label . endswith ( "_" ) else label ) subcmds . append ( ( command_name , rvalue ) ) return OrderedDict ( subcmds )
Fetch action in callable attributes which and commands
54,467
def get_module_resources ( mod ) : path = os . path . dirname ( os . path . realpath ( mod . __file__ ) ) prefix = kf . basename ( mod . __file__ , ( ".py" , ".pyc" ) ) if not os . path . exists ( mod . __file__ ) : import pkg_resources for resource_name in pkg_resources . resource_listdir ( mod . __name__ , '' ) : if resource_name . startswith ( "%s_" % prefix ) and resource_name . endswith ( ".py" ) : module_name , _ext = os . path . splitext ( kf . basename ( resource_name ) ) yield module_name for f in glob . glob ( os . path . join ( path , '%s_*.py' % prefix ) ) : module_name , _ext = os . path . splitext ( kf . basename ( f ) ) yield module_name
Return probed sub module names from given module
54,468
def get_mod_subcmds ( mod ) : subcmds = get_obj_subcmds ( mod ) path = os . path . dirname ( os . path . realpath ( mod . __file__ ) ) if mod . __package__ is None : sys . path . insert ( 0 , os . path . dirname ( path ) ) mod . __package__ = kf . basename ( path ) for module_name in get_module_resources ( mod ) : try : mod = importlib . import_module ( ".%s" % module_name , mod . __package__ ) except ImportError as e : msg . warn ( "%r could not be loaded: %s" % ( module_name , e . message ) ) continue except IOError as e : print ( "%s" % module_name ) raise if hasattr ( mod , "Command" ) and is_cmd ( mod . Command ) : obj = mod . Command if obj . __doc__ is None : msg . warn ( "Missing doc string for command from " "module %s" % module_name ) continue if isinstance ( obj , type ) : obj = obj ( ) name = module_name . split ( "_" , 1 ) [ 1 ] if name in subcmds : raise ValueError ( "Module command %r conflicts with already defined object " "command." % name ) subcmds [ name ] = obj return subcmds
Fetch action in same directory in python module
54,469
def get_help ( obj , env , subcmds ) : doc = txt . dedent ( obj . __doc__ or "" ) env = env . copy ( ) doc = doc . strip ( ) if not re . search ( r"^usage:\s*$" , doc , flags = re . IGNORECASE | re . MULTILINE ) : doc += txt . dedent ( ) help_line = ( " %%-%ds %%s" % ( max ( [ 5 ] + [ len ( a ) for a in subcmds ] ) , ) ) env [ "actions" ] = "\n" . join ( help_line % ( name , get_help ( subcmd , subcmd_env ( env , name ) , { } ) . split ( "\n" ) [ 0 ] ) for name , subcmd in subcmds . items ( ) ) env [ "actions_help" ] = "" if not env [ "actions" ] else ( "ACTION could be one of:\n\n" "%(actions)s\n\n" "See '%(surcmd)s help ACTION' for more information " "on a specific command." % env ) if "%(std_usage)s" in doc : env [ "std_usage" ] = txt . indent ( ( "%(surcmd)s --help\n" "%(surcmd)s --version" + ( ( "\n%(surcmd)s help [COMMAND]" "\n%(surcmd)s ACTION [ARGS...]" ) if subcmds else "" ) ) % env , _find_prefix ( doc , "%(std_usage)s" ) , first = "" ) if "%(std_options)s" in doc : env [ "std_options" ] = txt . indent ( "--help Show this screen.\n" "--version Show version." , _find_prefix ( doc , "%(std_options)s" ) , first = "" ) if subcmds and "%(actions_help)s" not in doc : doc += "\n\n%(actions_help)s" try : output = doc % env except KeyError as e : msg . err ( "Doc interpolation of %s needed missing key %r" % ( aformat ( env [ "surcmd" ] , attrs = [ "bold" , ] ) , e . args [ 0 ] ) ) exit ( 1 ) except Exception as e : msg . err ( "Documentation of %s is not valid. Please check it:\n%s" % ( aformat ( env [ "surcmd" ] , attrs = [ "bold" , ] ) , doc ) ) exit ( 1 ) return output
Interpolate complete help doc of given object
54,470
def get_calling_prototype ( acallable ) : assert callable ( acallable ) if inspect . ismethod ( acallable ) or inspect . isfunction ( acallable ) : args , vargs , vkwargs , defaults = inspect . getargspec ( acallable ) elif not inspect . isfunction ( acallable ) and hasattr ( acallable , "__call__" ) : args , vargs , vkwargs , defaults = inspect . getargspec ( acallable . __call__ ) args = args [ 1 : ] else : raise ValueError ( "Hum, %r is a callable, but not a function/method, " "nor a instance with __call__ arg..." % acallable ) if vargs or vkwargs : raise SyntaxError ( "variable *arg or **kwarg are not supported." ) if is_bound ( acallable ) : args = args [ 1 : ] if defaults is None : defaults = ( ) return args , defaults
Returns actual working calling prototype
54,471
def initialize ( self ) : if not os . path . exists ( self . root_dir ) : os . makedirs ( self . root_dir ) assert os . path . isdir ( self . root_dir ) , "%s is not a directory! Please move or remove it." % self . root_dir for d in [ "bin" , "lib" , "include" ] : target_path = os . path . join ( self . root_dir , d ) if not os . path . exists ( target_path ) : os . makedirs ( target_path ) if not os . path . exists ( self . manifest_path ) : open ( self . manifest_path , "w+" ) . close ( ) self . new = False
Generate the root directory root if it doesn t already exist
54,472
def finalize ( self ) : if self . rc_file : self . rc_file . close ( ) if self . env_file : self . env_file . close ( )
finalize any open file handles
54,473
def remove ( self ) : if self . rc_file : self . rc_file . close ( ) if self . env_file : self . env_file . close ( ) shutil . rmtree ( self . root_dir )
Removes the sprinter directory if it exists
54,474
def symlink_to_bin ( self , name , path ) : self . __symlink_dir ( "bin" , name , path ) os . chmod ( os . path . join ( self . root_dir , "bin" , name ) , os . stat ( path ) . st_mode | stat . S_IXUSR | stat . S_IRUSR )
Symlink an object at path to name in the bin folder .
54,475
def remove_feature ( self , feature_name ) : self . clear_feature_symlinks ( feature_name ) if os . path . exists ( self . install_directory ( feature_name ) ) : self . __remove_path ( self . install_directory ( feature_name ) )
Remove an feature from the environment root folder .
54,476
def clear_feature_symlinks ( self , feature_name ) : logger . debug ( "Clearing feature symlinks for %s" % feature_name ) feature_path = self . install_directory ( feature_name ) for d in ( 'bin' , 'lib' ) : if os . path . exists ( os . path . join ( self . root_dir , d ) ) : for link in os . listdir ( os . path . join ( self . root_dir , d ) ) : path = os . path . join ( self . root_dir , d , link ) if feature_path in os . path . realpath ( path ) : getattr ( self , 'remove_from_%s' % d ) ( link )
Clear the symlinks for a feature in the symlinked path
54,477
def add_to_env ( self , content ) : if not self . rewrite_config : raise DirectoryException ( "Error! Directory was not intialized w/ rewrite_config." ) if not self . env_file : self . env_path , self . env_file = self . __get_env_handle ( self . root_dir ) self . env_file . write ( content + '\n' )
add content to the env script .
54,478
def add_to_rc ( self , content ) : if not self . rewrite_config : raise DirectoryException ( "Error! Directory was not intialized w/ rewrite_config." ) if not self . rc_file : self . rc_path , self . rc_file = self . __get_rc_handle ( self . root_dir ) self . rc_file . write ( content + '\n' )
add content to the rc script .
54,479
def add_to_gui ( self , content ) : if not self . rewrite_config : raise DirectoryException ( "Error! Directory was not intialized w/ rewrite_config." ) if not self . gui_file : self . gui_path , self . gui_file = self . __get_gui_handle ( self . root_dir ) self . gui_file . write ( content + '\n' )
add content to the gui script .
54,480
def __remove_path ( self , path ) : curpath = os . path . abspath ( os . curdir ) if not os . path . exists ( path ) : logger . warn ( "Attempted to remove a non-existent path %s" % path ) return try : if os . path . islink ( path ) : os . unlink ( path ) elif os . path . isdir ( path ) : shutil . rmtree ( path ) else : os . unlink ( path ) if curpath == path : os . chdir ( tempfile . gettempdir ( ) ) except OSError : logger . error ( "Unable to remove object at path %s" % path ) raise DirectoryException ( "Unable to remove object at path %s" % path )
Remove an object
54,481
def __get_rc_handle ( self , root_dir ) : rc_path = os . path . join ( root_dir , '.rc' ) env_path = os . path . join ( root_dir , '.env' ) fh = open ( rc_path , "w+" ) fh . write ( source_template % ( env_path , env_path ) ) return ( rc_path , fh )
get the filepath and filehandle to the rc file for the environment
54,482
def __symlink_dir ( self , dir_name , name , path ) : target_dir = os . path . join ( self . root_dir , dir_name ) if not os . path . exists ( target_dir ) : os . makedirs ( target_dir ) target_path = os . path . join ( self . root_dir , dir_name , name ) logger . debug ( "Attempting to symlink %s to %s..." % ( path , target_path ) ) if os . path . exists ( target_path ) : if os . path . islink ( target_path ) : os . remove ( target_path ) else : logger . warn ( "%s is not a symlink! please remove it manually." % target_path ) return os . symlink ( path , target_path )
Symlink an object at path to name in the dir_name folder . remove it if it already exists .
54,483
def list_docs ( self , options = None ) : if options is None : raise ValueError ( "Please pass in an options dict" ) default_options = { "page" : 1 , "per_page" : 100 , "raise_exception_on_failure" : False , "user_credentials" : self . api_key , } options = dict ( list ( default_options . items ( ) ) + list ( options . items ( ) ) ) raise_exception_on_failure = options . pop ( "raise_exception_on_failure" ) resp = requests . get ( "%sdocs" % ( self . _url ) , params = options , timeout = self . _timeout ) if raise_exception_on_failure and resp . status_code != 200 : raise DocumentListingFailure ( resp . content , resp . status_code ) return resp
Return list of previously created documents .
54,484
def status ( self , status_id , raise_exception_on_failure = False ) : query = { "output" : "json" , "user_credentials" : self . api_key } resp = requests . get ( "%sstatus/%s" % ( self . _url , status_id ) , params = query , timeout = self . _timeout ) if raise_exception_on_failure and resp . status_code != 200 : raise DocumentStatusFailure ( resp . content , resp . status_code ) if resp . status_code == 200 : as_json = json . loads ( resp . content ) if as_json [ "status" ] == "completed" : as_json [ "download_key" ] = _get_download_key ( as_json [ "download_url" ] ) return as_json return resp
Return the status of the generation job .
54,485
def download ( self , download_key , raise_exception_on_failure = False ) : query = { "output" : "json" , "user_credentials" : self . api_key } resp = requests . get ( "%sdownload/%s" % ( self . _url , download_key ) , params = query , timeout = self . _timeout , ) if raise_exception_on_failure and resp . status_code != 200 : raise DocumentDownloadFailure ( resp . content , resp . status_code ) return resp
Download the file represented by the download_key .
54,486
def _get_parsing_plan_for_multifile_children ( self , obj_on_fs : PersistedObject , desired_type : Type [ Any ] , logger : Logger ) -> Dict [ str , Any ] : n_children = len ( obj_on_fs . get_multifile_children ( ) ) subtypes , key_type = _extract_collection_base_type ( desired_type ) if isinstance ( subtypes , tuple ) : if n_children != len ( subtypes ) : raise FolderAndFilesStructureError . create_for_multifile_tuple ( obj_on_fs , len ( subtypes ) , len ( obj_on_fs . get_multifile_children ( ) ) ) else : subtypes = [ subtypes ] * n_children children_plan = OrderedDict ( ) for ( child_name , child_fileobject ) , child_typ in zip ( sorted ( obj_on_fs . get_multifile_children ( ) . items ( ) ) , subtypes ) : t , child_parser = self . parser_finder . build_parser_for_fileobject_and_desiredtype ( child_fileobject , child_typ , logger ) children_plan [ child_name ] = child_parser . create_parsing_plan ( t , child_fileobject , logger , _main_call = False ) return children_plan
Simply inspects the required type to find the base type expected for items of the collection and relies on the ParserFinder to find the parsing plan
54,487
def plot_stat_summary ( df , fig = None ) : if fig is None : fig = plt . figure ( figsize = ( 8 , 8 ) ) grid = GridSpec ( 3 , 2 ) stats = calculate_stats ( df , groupby = [ 'test_capacitor' , 'frequency' ] ) . dropna ( ) for i , stat in enumerate ( [ 'RMSE %' , 'cv %' , 'bias %' ] ) : axis = fig . add_subplot ( grid [ i , 0 ] ) axis . set_title ( stat ) plot_colormap ( stats , stat , axis = axis , fig = fig ) axis = fig . add_subplot ( grid [ i , 1 ] ) axis . set_title ( stat ) try : axis . hist ( stats [ stat ] . values , bins = 50 ) except AttributeError : print stats [ stat ] . describe ( ) fig . tight_layout ( )
Plot stats grouped by test capacitor load _and_ frequency .
54,488
def load_manifest ( raw_manifest , namespace = None , ** kwargs ) : if isinstance ( raw_manifest , configparser . RawConfigParser ) : return Manifest ( raw_manifest ) manifest = create_configparser ( ) if not manifest . has_section ( 'config' ) : manifest . add_section ( 'config' ) _load_manifest_interpret_source ( manifest , raw_manifest , ** kwargs ) return Manifest ( manifest , namespace = namespace )
wrapper method which generates the manifest from various sources
54,489
def _load_manifest_from_url ( manifest , url , verify_certificate = True , username = None , password = None ) : try : if username and password : manifest_file_handler = StringIO ( lib . authenticated_get ( username , password , url , verify = verify_certificate ) . decode ( "utf-8" ) ) else : manifest_file_handler = StringIO ( lib . cleaned_request ( 'get' , url , verify = verify_certificate ) . text ) manifest . readfp ( manifest_file_handler ) except requests . exceptions . RequestException : logger . debug ( "" , exc_info = True ) error_message = sys . exc_info ( ) [ 1 ] raise ManifestException ( "There was an error retrieving {0}!\n {1}" . format ( url , str ( error_message ) ) )
load a url body into a manifest
54,490
def _load_manifest_from_file ( manifest , path ) : path = os . path . abspath ( os . path . expanduser ( path ) ) if not os . path . exists ( path ) : raise ManifestException ( "Manifest does not exist at {0}!" . format ( path ) ) manifest . read ( path ) if not manifest . has_option ( 'config' , 'source' ) : manifest . set ( 'config' , 'source' , str ( path ) )
load manifest from file
54,491
def formula_sections ( self ) : if self . dtree is not None : return self . dtree . order else : return [ s for s in self . manifest . sections ( ) if s != "config" ]
Return all sections related to a formula re - ordered according to the depends section .
54,492
def is_affirmative ( self , section , option ) : return self . has_option ( section , option ) and lib . is_affirmative ( self . get ( section , option ) )
Return true if the section option combo exists and it is set to a truthy value .
54,493
def write ( self , file_handle ) : for k , v in self . inputs . write_values ( ) . items ( ) : self . set ( 'config' , k , v ) self . set ( 'config' , 'namespace' , self . namespace ) self . manifest . write ( file_handle )
write the current state to a file manifest
54,494
def get_context_dict ( self ) : context_dict = { } for s in self . sections ( ) : for k , v in self . manifest . items ( s ) : context_dict [ "%s:%s" % ( s , k ) ] = v for k , v in self . inputs . values ( ) . items ( ) : context_dict [ "config:{0}" . format ( k ) ] = v context_dict . update ( self . additional_context_variables . items ( ) ) context_dict . update ( dict ( [ ( "%s|escaped" % k , re . escape ( str ( v ) or "" ) ) for k , v in context_dict . items ( ) ] ) ) return context_dict
return a context dict of the desired state
54,495
def get ( self , section , key , default = MANIFEST_NULL_KEY ) : if not self . manifest . has_option ( section , key ) and default is not MANIFEST_NULL_KEY : return default return self . manifest . get ( section , key )
Returns the value if it exist or default if default is set
54,496
def __parse_namespace ( self ) : if self . manifest . has_option ( 'config' , 'namespace' ) : return self . manifest . get ( 'config' , 'namespace' ) elif self . manifest . has_option ( 'config' , 'source' ) : return NAMESPACE_REGEX . search ( self . manifest . get ( 'config' , 'source' ) ) . groups ( ) [ 0 ] else : logger . warn ( 'Could not parse namespace implicitely' ) return None
Parse the namespace from various sources
54,497
def __generate_dependency_tree ( self ) : dependency_dict = { } for s in self . manifest . sections ( ) : if s != "config" : if self . manifest . has_option ( s , 'depends' ) : dependency_list = [ d . strip ( ) for d in re . split ( '\n|,' , self . manifest . get ( s , 'depends' ) ) ] dependency_dict [ s ] = dependency_list else : dependency_dict [ s ] = [ ] try : return DependencyTree ( dependency_dict ) except DependencyTreeException : dte = sys . exc_info ( ) [ 1 ] raise ManifestException ( "Dependency tree for manifest is invalid! %s" % str ( dte ) )
Generate the dependency tree object
54,498
def __substitute_objects ( self , value , context_dict ) : if type ( value ) == dict : return dict ( [ ( k , self . __substitute_objects ( v , context_dict ) ) for k , v in value . items ( ) ] ) elif type ( value ) == str : try : return value % context_dict except KeyError : e = sys . exc_info ( ) [ 1 ] logger . warn ( "Could not specialize %s! Error: %s" % ( value , e ) ) return value else : return value
recursively substitute value with the context_dict
54,499
def __setup_inputs ( self ) : input_object = Inputs ( ) for s in self . manifest . sections ( ) : if self . has_option ( s , 'inputs' ) : input_object . add_inputs_from_inputstring ( self . get ( s , 'inputs' ) ) for k , v in self . items ( 'config' ) : if input_object . is_input ( s ) : input_object . set_input ( k , v ) return input_object
Setup the inputs object