idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
12,600 | def _save_potentials ( self , directory ) : print ( 'saving potentials' ) digits = int ( np . ceil ( np . log10 ( self . configs . configs . shape [ 0 ] ) ) ) for i in range ( 0 , self . configs . configs . shape [ 0 ] ) : pot_data = self . get_potential ( i ) filename_raw = 'pot{0:0' + '{0}' . format ( digits ) + '}.dat' filename = directory + os . sep + filename_raw . format ( i + 1 ) nodes = self . grid . nodes [ 'sorted' ] [ : , 1 : 3 ] all_data = np . hstack ( ( nodes , pot_data [ 0 ] [ : , np . newaxis ] , pot_data [ 1 ] [ : , np . newaxis ] , ) ) with open ( filename , 'wb' ) as fid : np . savetxt ( fid , all_data ) | save potentials to a directory |
12,601 | def clear_measurements ( self ) : mid_list = self . assignments . get ( 'measurements' , None ) if mid_list is not None : for mid in mid_list : self . configs . delete_measurements ( mid = mid ) self . assignments [ 'measurements' ] = None | Forget any previous measurements |
12,602 | def measurements ( self ) : mid = self . assignments . get ( 'measurements' , None ) if mid is None : return_value = self . model ( voltages = True , sensitivities = False , potentials = False , ) if return_value is None : print ( 'cannot model' ) return cids = self . assignments [ 'measurements' ] measurements = np . vstack ( ( self . configs . measurements [ cids [ 0 ] ] , self . configs . measurements [ cids [ 1 ] ] , ) ) . T return measurements | Return the measurements associated with this instance . |
12,603 | def _read_sensitivities ( self , sens_dir ) : if self . assignments [ 'sensitivities' ] is not None : print ( 'Sensitivities already imported. Will not overwrite!' ) return else : self . assignments [ 'sensitivities' ] = { } sens_files = sorted ( glob ( sens_dir + os . sep + 'sens*.dat' ) ) for nr , filename in enumerate ( sens_files ) : with open ( filename , 'r' ) as fid : metadata = np . fromstring ( fid . readline ( ) . strip ( ) , sep = ' ' , count = 2 ) meta_re = metadata [ 0 ] meta_im = metadata [ 1 ] sens_data = np . loadtxt ( fid ) cids = self . parman . add_data ( sens_data [ : , 2 : 4 ] , [ meta_re , meta_im ] , ) self . assignments [ 'sensitivities' ] [ nr ] = cids | import sensitivities from a directory |
12,604 | def _read_potentials ( self , pot_dir ) : if self . assignments [ 'potentials' ] is not None : print ( 'Potentials already imported. Will not overwrite!' ) return else : self . assignments [ 'potentials' ] = { } pot_files = sorted ( glob ( pot_dir + os . sep + 'pot*.dat' ) ) for nr , filename in enumerate ( pot_files ) : with open ( filename , 'r' ) as fid : pot_data = np . loadtxt ( fid ) nids = self . nodeman . add_data ( pot_data [ : , 2 : 4 ] , ) self . assignments [ 'potentials' ] [ nr ] = nids | import potentials from a directory |
12,605 | def get_potential ( self , config_nr ) : if self . assignments [ 'potentials' ] is None : self . _check_state ( ) if self . can_model : self . model ( potentials = True ) nids = self . assignments [ 'potentials' ] [ config_nr ] pot_data = [ self . nodeman . nodevals [ nid ] for nid in nids ] return pot_data | Return potential data for a given measurement configuration . |
12,606 | def get_sensitivity ( self , config_nr ) : if self . assignments [ 'sensitivities' ] is None : self . _check_state ( ) if self . can_model : self . model ( sensitivities = True ) cids = self . assignments [ 'sensitivities' ] [ config_nr ] sens_data = [ self . parman . parsets [ cid ] for cid in cids ] meta_data = [ self . parman . metadata [ cid ] for cid in cids ] return sens_data , meta_data | return a sensitivity as well as corresponding metadata for a given measurement configuration . Indices start at zero . |
12,607 | def read_voltages ( self , voltage_file ) : measurements_raw = np . loadtxt ( voltage_file , skiprows = 1 , ) measurements = np . atleast_2d ( measurements_raw ) A = ( measurements [ : , 0 ] / 1e4 ) . astype ( int ) B = ( measurements [ : , 0 ] % 1e4 ) . astype ( int ) M = ( measurements [ : , 1 ] / 1e4 ) . astype ( int ) N = ( measurements [ : , 1 ] % 1e4 ) . astype ( int ) ABMN = np . vstack ( ( A , B , M , N ) ) . T if self . configs . configs is None : self . configs . configs = ABMN else : if not np . all ( ABMN == self . configs . configs ) : for nr , ( old_config , new_config ) in enumerate ( zip ( self . configs . configs , ABMN ) ) : if np . all ( old_config == new_config ) : continue current_electrodes_are_equal = np . all ( old_config [ 0 : 2 ] == new_config [ 0 : 2 ] ) voltage_electrodes_are_switched = np . all ( old_config [ 2 : 4 ] == new_config [ 4 : 1 : - 1 ] ) if ( current_electrodes_are_equal and voltage_electrodes_are_switched ) : if len ( self . configs . measurements . keys ( ) ) > 0 : raise Exception ( 'need to switch electrode polarity, but ' + 'there are already measurements stored for ' + 'the old configuration!' ) else : self . configs . configs [ nr , : ] = new_config else : raise Exception ( 'There was an error matching configurations of ' + 'voltages with configurations already imported' ) mid_mag = self . configs . add_measurements ( measurements [ : , 2 ] ) mid_pha = self . configs . add_measurements ( measurements [ : , 3 ] ) self . assignments [ 'measurements' ] = [ mid_mag , mid_pha ] | import voltages from a volt . dat file |
12,608 | def model ( self , voltages = True , sensitivities = False , potentials = False , output_directory = None , silent = False , ) : self . _check_state ( ) if self . can_model : if output_directory is not None : if not os . path . isdir ( output_directory ) : os . makedirs ( output_directory ) tempdir = output_directory self . _model ( voltages , sensitivities , potentials , tempdir ) else : raise IOError ( 'output directory already exists: {0}' . format ( output_directory ) ) else : with tempfile . TemporaryDirectory ( dir = self . tempdir ) as tempdir : self . _model ( voltages , sensitivities , potentials , tempdir , silent = silent ) return 1 else : print ( 'Sorry, not all required information to model are present' ) print ( 'Check:' ) print ( '1) configurations present: self.configs.configs' ) print ( '2) is a model present' ) return None | Forward model the tomodir and read in the results |
12,609 | def _invert ( self , tempdir , catch_output = True , ** kwargs ) : nr_cores = kwargs . get ( 'cores' , 2 ) print ( 'attempting inversion in directory: {0}' . format ( tempdir ) ) pwd = os . getcwd ( ) os . chdir ( tempdir ) self . save_to_tomodir ( '.' ) os . chdir ( 'exe' ) binary = CRBin . get ( 'CRTomo' ) print ( 'Using binary: {0}' . format ( binary ) ) print ( 'calling CRTomo' ) env_omp = os . environ . get ( 'OMP_NUM_THREADS' , '' ) os . environ [ 'OMP_NUM_THREADS' ] = '{0}' . format ( nr_cores ) if catch_output : subprocess . check_output ( binary , shell = True , stderr = subprocess . STDOUT , ) else : subprocess . call ( binary , shell = True , ) os . environ [ 'OMP_NUM_THREADS' ] = env_omp print ( 'finished' ) os . chdir ( pwd ) self . read_inversion_results ( tempdir ) | Internal function than runs an inversion using CRTomo . |
12,610 | def invert ( self , output_directory = None , catch_output = True , ** kwargs ) : self . _check_state ( ) if self . can_invert : if output_directory is not None : if not os . path . isdir ( output_directory ) : os . makedirs ( output_directory ) tempdir = output_directory self . _invert ( tempdir , catch_output , ** kwargs ) else : raise IOError ( 'output directory already exists: {0}' . format ( output_directory ) ) else : with tempfile . TemporaryDirectory ( dir = self . tempdir ) as tempdir : self . _invert ( tempdir , catch_output , ** kwargs ) return 0 else : print ( 'Sorry, no measurements present, cannot model yet' ) return 1 | Invert this instance and import the result files |
12,611 | def read_inversion_results ( self , tomodir ) : self . _read_inversion_results ( tomodir ) self . _read_inv_ctr ( tomodir ) self . _read_resm_m ( tomodir ) self . _read_eps_ctr ( tomodir ) | Import inversion results from a tomodir into this instance |
12,612 | def plot_eps_data_hist ( self , dfs ) : if 'datum' in dfs [ 0 ] : dc_inv = True else : dc_inv = False nr_y = len ( dfs ) size_y = 5 / 2.54 * nr_y if dc_inv : nr_x = 1 else : nr_x = 3 size_x = 15 / 2.54 fig , axes = plt . subplots ( nr_y , nr_x , figsize = ( size_x , size_y ) ) axes = np . atleast_2d ( axes ) df = dfs [ 0 ] if dc_inv : ax = axes [ 0 , 0 ] ax . hist ( df [ 'datum' ] / df [ 'eps_r' ] , 100 , ) ax . set_xlabel ( r'$-log(|R|) / \epsilon_r$' ) ax . set_ylabel ( r'count' ) else : ax = axes [ 0 , 0 ] ax . hist ( df [ '-log(|R|)' ] / df [ 'eps' ] , 100 , ) ax . set_xlabel ( r'$-log(|R|)$' ) ax . set_ylabel ( r'count' ) ax = axes [ 0 , 1 ] ax . hist ( df [ '-log(|R|)' ] / df [ 'eps_r' ] , 100 , ) ax . set_xlabel ( r'$-log(|R|) / \epsilon_r$' ) ax . set_ylabel ( r'count' ) ax = axes [ 0 , 2 ] phase_data = df [ '-Phase(rad)' ] / df [ 'eps_p' ] if not np . all ( np . isinf ( phase_data ) | np . isnan ( phase_data ) ) : ax . hist ( phase_data , 100 , ) ax . set_xlabel ( r'$-\phi[rad] / \epsilon_p$' ) ax . set_ylabel ( r'count' ) for it , df in enumerate ( dfs [ 1 : ] ) : ax = axes [ 1 + it , 0 ] ax . hist ( df [ 'psi' ] , 100 ) rms = np . sqrt ( 1 / df [ 'psi' ] . shape [ 0 ] * np . sum ( df [ 'psi' ] ** 2 ) ) ax . axvline ( rms , color = 'k' , linestyle = 'dashed' ) ax . set_title ( 'iteration: {0}' . format ( it ) ) ax . set_xlabel ( 'psi' ) ax . set_ylabel ( r'count' ) ax = axes [ 1 + it , 1 ] Rdat = df [ 'Re(d)' ] Rmod = df [ 'Re(f(m))' ] ax . scatter ( Rdat , Rmod , ) ax . set_xlabel ( r'$log(R_{data}~[\Omega])$' ) ax . set_ylabel ( r'$log(R_{mod}~[\Omega])$' ) ax = axes [ 1 + it , 2 ] phidat = df [ 'Im(d)' ] phimod = df [ 'Im(f(m))' ] ax . scatter ( phidat , phimod , ) ax . set_xlabel ( r'$\phi_{data}~[mrad]$' ) ax . set_ylabel ( r'$\phi_{mod}~[mrad]$' ) fig . tight_layout ( ) fig . savefig ( 'eps_plot_hist.png' , dpi = 300 ) | Plot histograms of data residuals and data error weighting |
12,613 | def _read_eps_ctr ( tomodir ) : epsctr_file = tomodir + os . sep + 'inv' + os . sep + 'eps.ctr' if not os . path . isfile ( epsctr_file ) : print ( 'eps.ctr not found: {0}' . format ( epsctr_file ) ) print ( os . getcwd ( ) ) return 1 with open ( epsctr_file , 'r' ) as fid : lines = fid . readlines ( ) group = itertools . groupby ( lines , lambda x : x == '\n' ) dfs = [ ] for x in group : if not x [ 0 ] : data = [ y for y in x [ 1 ] ] if data [ 0 ] . startswith ( 'IT' ) or data [ 0 ] . startswith ( 'PIT' ) : del ( data [ 0 ] ) data [ 0 ] = data [ 0 ] . replace ( '-Phase (rad)' , '-Phase(rad)' ) tfile = StringIO ( '' . join ( data ) ) df = pd . read_csv ( tfile , delim_whitespace = True , na_values = [ 'Infinity' ] , ) dfs . append ( df ) return dfs | Parse a CRTomo eps . ctr file . |
12,614 | def _read_resm_m ( self , tomodir ) : resm_file = tomodir + os . sep + 'inv' + os . sep + 'res_m.diag' if not os . path . isfile ( resm_file ) : print ( 'res_m.diag not found: {0}' . format ( resm_file ) ) print ( os . getcwd ( ) ) return 1 with open ( resm_file , 'rb' ) as fid : first_line = fid . readline ( ) . strip ( ) header_raw = np . fromstring ( first_line , count = 4 , sep = ' ' ) header_raw subdata = np . genfromtxt ( fid ) print ( subdata . shape ) pid = self . parman . add_data ( subdata [ : , 0 ] ) self . assignments [ 'resm' ] = pid | Read in the resolution matrix of an inversion |
12,615 | def register_forward_model ( self , pid_mag , pid_pha ) : self . register_magnitude_model ( pid_mag ) self . register_phase_model ( pid_pha ) | Register parameter sets as the forward models for magnitude and phase |
12,616 | def register_magnitude_model ( self , pid ) : if self . assignments [ 'forward_model' ] is None : self . assignments [ 'forward_model' ] = [ None , None ] self . assignments [ 'forward_model' ] [ 0 ] = pid | Set a given parameter model to the forward magnitude model |
12,617 | def register_phase_model ( self , pid ) : if self . assignments [ 'forward_model' ] is None : self . assignments [ 'forward_model' ] = [ None , None ] self . assignments [ 'forward_model' ] [ 1 ] = pid | Set a given parameter model to the forward phase model |
12,618 | def add_homogeneous_model ( self , magnitude , phase = 0 ) : if self . assignments [ 'forward_model' ] is not None : print ( 'model already set, will overwrite' ) magnitude_model = np . ones ( self . grid . nr_of_elements ) * magnitude phase_model = np . ones ( self . grid . nr_of_elements ) * phase pid_mag = self . parman . add_data ( magnitude_model ) pid_pha = self . parman . add_data ( phase_model ) self . assignments [ 'forward_model' ] = [ pid_mag , pid_pha ] return pid_mag , pid_pha | Add a homogeneous resistivity model to the tomodir . This is useful for synthetic measurements . |
12,619 | def show_parset ( self , pid ) : fig , ax = plt . subplots ( ) self . plot . plot_elements_to_ax ( pid , ax = ax ) return fig , ax | Plot a given parameter set |
12,620 | def cythonize ( * args , ** kwargs ) : global cythonize from Cython . Build import cythonize return cythonize ( * args , ** kwargs ) | dirty hack only import cythonize at the time you use it . |
12,621 | def response ( self , url ) : resp = requests . get ( url ) . content return self . parseresponse ( resp ) | Grab an API response . |
12,622 | def errorhandle ( self , resp ) : if self . format == 'json' : parsed = xmltodict . parse ( resp ) errors = parsed [ self . RESPONSE_TOKEN ] [ self . ERROR_TOKEN ] if type ( errors ) is list and len ( errors ) > 1 : messages = ", " . join ( [ " " . join ( [ "{}: {}" . format ( k , v ) for k , v in e . items ( ) ] ) for e in errors ] ) else : overlimit = any ( 'transaction limit' in msg . lower ( ) for msg in errors . values ( ) ) if overlimit : raise APILimitExceeded ( "This API key has used up its daily quota of calls." ) else : messages = " " . join ( [ "{}: {}" . format ( k , v ) for k , v in errors . items ( ) ] ) elif self . format == 'xml' : import xml . etree . ElementTree as ET errors = ET . fromstring ( resp ) . findall ( self . ERROR_TOKEN ) messages = ", " . join ( err . find ( 'msg' ) . text for err in errors ) else : raise ValueError ( "Invalid API response format specified: {}." % self . format ) raise BustimeError ( "API returned: {}" . format ( messages ) ) | Parse API error responses and raise appropriate exceptions . |
12,623 | def parseresponse ( self , resp ) : if sys . version_info . major > 2 : resp = resp . decode ( 'utf-8' ) if self . RESPONSE_TOKEN not in resp : raise BustimeError ( "The Bustime API returned an invalid response: {}" . format ( resp ) ) elif self . ERROR_TOKEN in resp : return self . errorhandle ( resp ) else : if self . format == 'json' : return xmltodict . parse ( resp ) [ self . RESPONSE_TOKEN ] elif self . format == 'xml' : return resp | Parse an API response . |
12,624 | def get_stack_refs ( refs : list ) : refs = list ( refs ) refs . reverse ( ) stack_refs = [ ] last_stack = None while refs : ref = refs . pop ( ) if last_stack is not None and re . compile ( r'v[0-9][a-zA-Z0-9-]*$' ) . match ( ref ) : stack_refs . append ( StackReference ( last_stack , ref ) ) else : try : with open ( ref ) as fd : data = yaml . safe_load ( fd ) ref = data [ 'SenzaInfo' ] [ 'StackName' ] except ( OSError , IOError ) : pass if refs : version = refs . pop ( ) else : version = None stack_refs . append ( StackReference ( ref , version ) ) last_stack = ref return stack_refs | Returns a list of stack references with name and version . |
12,625 | def instance_for_arguments ( self , arguments ) : model_instance = ModelInstance ( ) for prior_model_tuple in self . prior_model_tuples : setattr ( model_instance , prior_model_tuple . name , prior_model_tuple . prior_model . instance_for_arguments ( arguments ) ) return model_instance | Creates a ModelInstance which has an attribute and class instance corresponding to every PriorModel attributed to this instance . |
12,626 | def mapper_from_partial_prior_arguments ( self , arguments ) : original_prior_dict = { prior : prior for prior in self . priors } return self . mapper_from_prior_arguments ( { ** original_prior_dict , ** arguments } ) | Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors keeping existing priors where no mapping is provided . |
12,627 | def mapper_from_prior_arguments ( self , arguments ) : mapper = copy . deepcopy ( self ) for prior_model_tuple in self . prior_model_tuples : setattr ( mapper , prior_model_tuple . name , prior_model_tuple . prior_model . gaussian_prior_model_for_arguments ( arguments ) ) return mapper | Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors . |
12,628 | def mapper_from_gaussian_tuples ( self , tuples , a = None , r = None ) : prior_tuples = self . prior_tuples_ordered_by_id prior_class_dict = self . prior_class_dict arguments = { } for i , prior_tuple in enumerate ( prior_tuples ) : prior = prior_tuple . prior cls = prior_class_dict [ prior ] mean = tuples [ i ] [ 0 ] if a is not None and r is not None : raise exc . PriorException ( "Width of new priors cannot be both relative and absolute." ) if a is not None : width_type = "a" value = a elif r is not None : width_type = "r" value = r else : width_type , value = conf . instance . prior_width . get_for_nearest_ancestor ( cls , prior_tuple . name ) if width_type == "r" : width = value * mean elif width_type == "a" : width = value else : raise exc . PriorException ( "Prior widths must be relative 'r' or absolute 'a' e.g. a, 1.0" ) if isinstance ( prior , GaussianPrior ) : limits = ( prior . lower_limit , prior . upper_limit ) else : limits = conf . instance . prior_limit . get_for_nearest_ancestor ( cls , prior_tuple . name ) arguments [ prior ] = GaussianPrior ( mean , max ( tuples [ i ] [ 1 ] , width ) , * limits ) return self . mapper_from_prior_arguments ( arguments ) | Creates a new model mapper from a list of floats describing the mean values of gaussian priors . The widths \ of the new priors are taken from the width_config . The new gaussian priors must be provided in the same \ order as the priors associated with model . |
12,629 | def info ( self ) : info = [ ] for prior_model_name , prior_model in self . prior_model_tuples : info . append ( prior_model . name + '\n' ) info . extend ( [ f"{prior_model_name}_{item}" for item in prior_model . info ] ) return '\n' . join ( info ) | Use the priors that make up the model_mapper to generate information on each parameter of the overall model . |
12,630 | def push ( config , force = False ) : repo = config . repo active_branch = repo . active_branch if active_branch . name == "master" : error_out ( "Can't commit when on the master branch. " "You really ought to do work in branches." ) state = read ( config . configfile ) if not state . get ( "FORK_NAME" ) : info_out ( "Can't help you push the commit. Please run: gg config --help" ) return 0 try : repo . remotes [ state [ "FORK_NAME" ] ] except IndexError : error_out ( "There is no remote called '{}'" . format ( state [ "FORK_NAME" ] ) ) destination = repo . remotes [ state [ "FORK_NAME" ] ] if force : pushed , = destination . push ( force = True ) info_out ( pushed . summary ) else : pushed , = destination . push ( ) if ( pushed . flags & git . remote . PushInfo . REJECTED or pushed . flags & git . remote . PushInfo . REMOTE_REJECTED ) : error_out ( 'The push was rejected ("{}")' . format ( pushed . summary ) , False ) try_force_push = input ( "Try to force push? [Y/n] " ) . lower ( ) . strip ( ) if try_force_push not in ( "no" , "n" ) : pushed , = destination . push ( force = True ) info_out ( pushed . summary ) else : return 0 | Create push the current branch . |
12,631 | def chkpath ( path ) : if os . path . exists ( path ) : return path else : msg = "{0} does not exist." . format ( path ) raise argparse . ArgumentTypeError ( msg ) | Checks if a path exists . |
12,632 | def readin_volt ( filename ) : with open ( filename , 'r' ) as fid : content = np . loadtxt ( fid , skiprows = 1 , usecols = [ 0 , 1 , 2 ] ) volt = content [ : , 2 ] elecs = content [ : , 0 : 2 ] return elecs , volt | Read in measurement data from a volt . dat file and return electrodes and measured resistance . |
12,633 | def save_volt ( elecs , volt , filename ) : content = np . column_stack ( ( elecs , volt , np . zeros ( len ( volt ) ) ) ) with open ( filename , 'w' ) as fid : fid . write ( '{0}\n' . format ( content . shape [ 0 ] ) ) with open ( filename , 'ab' ) as fid : np . savetxt ( fid , np . array ( content ) , fmt = '%i %i %f %f' ) | Save the values in volt - format . |
12,634 | def main ( ) : options = handle_options ( ) elecs , d_obs = readin_volt ( options . d_obs ) elecs , d_est = readin_volt ( options . d_est ) elecs , d_estTC = readin_volt ( options . d_estTC ) volt_corr = calc_correction ( d_obs , d_est , d_estTC , ) save_volt ( elecs , volt_corr , options . output , ) | Function to remove temperature effect from field data |
12,635 | def recursive_update ( default , custom ) : if not isinstance ( default , dict ) or not isinstance ( custom , dict ) : raise TypeError ( 'Params of recursive_update should be dicts' ) for key in custom : if isinstance ( custom [ key ] , dict ) and isinstance ( default . get ( key ) , dict ) : default [ key ] = recursive_update ( default [ key ] , custom [ key ] ) else : default [ key ] = custom [ key ] return default | Return a dict merged from default and custom |
12,636 | def cleanup ( config , searchstring , force = False ) : repo = config . repo branches_ = list ( find ( repo , searchstring ) ) if not branches_ : error_out ( "No branches found" ) elif len ( branches_ ) > 1 : error_out ( "More than one branch found.{}" . format ( "\n\t" . join ( [ "" ] + [ x . name for x in branches_ ] ) ) ) assert len ( branches_ ) == 1 branch_name = branches_ [ 0 ] . name active_branch = repo . active_branch if branch_name == active_branch . name : error_out ( "Can't clean up the current active branch." ) upstream_remote = None fork_remote = None state = read ( config . configfile ) origin_name = state . get ( "ORIGIN_NAME" , "origin" ) for remote in repo . remotes : if remote . name == origin_name : upstream_remote = remote break if not upstream_remote : error_out ( "No remote called {!r} found" . format ( origin_name ) ) repo . heads . master . checkout ( ) upstream_remote . pull ( repo . heads . master ) merged_branches = [ x . strip ( ) for x in repo . git . branch ( "--merged" ) . splitlines ( ) if x . strip ( ) and not x . strip ( ) . startswith ( "*" ) ] was_merged = branch_name in merged_branches certain = was_merged or force if not certain : certain = ( input ( "Are you certain {} is actually merged? [Y/n] " . format ( branch_name ) ) . lower ( ) . strip ( ) != "n" ) if not certain : return 1 if was_merged : repo . git . branch ( "-d" , branch_name ) else : repo . git . branch ( "-D" , branch_name ) fork_remote = None state = read ( config . configfile ) for remote in repo . remotes : if remote . name == state . get ( "FORK_NAME" ) : fork_remote = remote break if fork_remote : fork_remote . push ( ":" + branch_name ) info_out ( "Remote branch on fork deleted too." ) | Deletes a found branch locally and remotely . |
12,637 | def calc_correction ( temp , mag , add = False , T_std = 10 , m = 0.021 ) : if mag . shape [ 1 ] == 3 : if add : data_x = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag [ : , 0 ] data_y = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag [ : , 1 ] data_z = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag [ : , 2 ] return np . column_stack ( ( data_x , data_y , data_z ) ) else : data_x = ( m * ( temp - 25 ) + 1 ) / ( m * ( T_std - 25 ) + 1 ) * mag [ : , 0 ] data_y = ( m * ( temp - 25 ) + 1 ) / ( m * ( T_std - 25 ) + 1 ) * mag [ : , 1 ] data_z = ( m * ( temp - 25 ) + 1 ) / ( m * ( T_std - 25 ) + 1 ) * mag [ : , 2 ] return np . column_stack ( ( data_x , data_y , data_z ) ) else : if add : data_i = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag return data_i else : data_std = ( m * ( temp - 25 ) + 1 ) / ( m * ( T_std - 25 ) + 1 ) * mag return data_std | Function to add or substract the temperature effect to given data . The function can be called in python scripts . For application via command line in a file system use the script td_correct_temperature . py . The data is taken and given in Ohmm . |
12,638 | def save_mag_to_file ( mag , filename , rhofile ) : if rhofile : null = np . zeros ( len ( mag ) ) if mag . shape [ 1 ] == 3 : null = np . column_stack ( ( null , null , null , null ) ) result = np . column_stack ( ( mag , null ) ) with open ( filename , 'w' ) as fid : fid . write ( '{0}\n' . format ( mag . shape [ 0 ] ) ) with open ( filename , 'ab' ) as fid : np . savetxt ( fid , np . array ( result ) , fmt = '%f' ) else : with open ( 'inv/rho00.mag' , 'r' ) as fid : coor = np . loadtxt ( fid , skiprows = 1 , usecols = [ 0 , 1 ] ) if mag . shape [ 1 ] == 3 : logx = [ math . log ( d , 10 ) for d in mag [ : , 0 ] ] logy = [ math . log ( d , 10 ) for d in mag [ : , 1 ] ] logz = [ math . log ( d , 10 ) for d in mag [ : , 2 ] ] mag_log = np . column_stack ( ( logx , logy , logz ) ) else : mag_log = [ math . log ( d , 10 ) for d in mag ] content = np . column_stack ( ( coor [ : , 0 ] , coor [ : , 1 ] , mag_log ) ) with open ( filename , 'w' ) as fid : fid . write ( '{0}\n' . format ( content . shape [ 0 ] ) ) with open ( filename , 'ab' ) as fid : np . savetxt ( fid , np . array ( content ) , fmt = '%f' ) | Save the values in rho - or mag - format . |
12,639 | def main ( ) : options = handle_options ( ) tempdata = readin_temp ( options . temp_file ) magdata = readin_rho ( options . filename , options . rhofile , aniso = options . aniso ) mag_corr = calc_correction ( temp = tempdata , mag = magdata , add = options . add , T_std = options . T_std , m = options . m , ) save_mag_to_file ( mag_corr , options . output , options . rhofile ) | Function to add or substract the temperature effect to data in a tomodir |
12,640 | def singular ( plural ) : if plural . endswith ( 'ies' ) : return plural [ : - 3 ] + 'y' if plural . endswith ( 's' ) : return plural [ : - 1 ] raise ValueError ( 'unknown plural form %r' % ( plural , ) ) | Take a plural English word and turn it into singular |
12,641 | def plot_single_configuration ( self , config_nr , sens_file ) : indices = elem . load_column_file_to_elements_advanced ( sens_file , [ 2 , 3 ] , False , False ) elem . plt_opt . title = '' elem . plt_opt . reverse = True elem . plt_opt . cbmin = - 1 elem . plt_opt . cbmax = 1 elem . plt_opt . cblabel = r'fill' elem . plt_opt . xlabel = 'x (m)' elem . plt_opt . ylabel = 'z (m)' fig = plt . figure ( figsize = ( 5 , 7 ) ) ax = fig . add_subplot ( 111 ) ax , pm , cb = elem . plot_element_data_to_ax ( indices [ 0 ] , ax , scale = 'asinh' , no_cb = False , ) ax . scatter ( self . sens_centers [ config_nr , 0 ] , self . sens_centers [ config_nr , 1 ] , marker = '*' , s = 50 , color = 'w' , edgecolors = 'w' , ) self . color_electrodes ( config_nr , ax ) sensf = sens_file . split ( 'sens' ) [ - 1 ] sensf = sensf . split ( '.' ) [ 0 ] out = 'sens_center_' + sensf + '.png' fig . savefig ( out , bbox_inches = 'tight' , dpi = 300 ) fig . clf ( ) plt . close ( fig ) | plot sensitivity distribution with center of mass for a single configuration . The electrodes used are colored . |
12,642 | def plot_sens_center ( self , frequency = 2 ) : try : colors = np . loadtxt ( self . volt_file , skiprows = 1 ) except IOError : print ( 'IOError opening {0}' . format ( volt_file ) ) exit ( ) if ( len ( colors . shape ) > 1 ) : print ( 'Artificial or Multi frequency data' ) colors = colors [ : , frequency ] . flatten ( ) colors = colors [ ~ np . isnan ( colors ) ] elem . load_elem_file ( self . elem_file ) elem . load_elec_file ( self . elec_file ) nr_elements = len ( elem . element_type_list [ 0 ] ) elem . element_data = np . zeros ( ( nr_elements , 1 ) ) * np . nan elem . plt_opt . title = ' ' elem . plt_opt . reverse = True elem . plt_opt . cbmin = - 1 elem . plt_opt . cbmax = 1 elem . plt_opt . cblabel = self . cblabel elem . plt_opt . xlabel = 'x (m)' elem . plt_opt . ylabel = 'z (m)' fig = plt . figure ( figsize = ( 5 , 7 ) ) ax = fig . add_subplot ( 111 ) ax , pm , cb = elem . plot_element_data_to_ax ( 0 , ax , scale = 'linear' , no_cb = True ) ax . scatter ( self . sens_centers [ : , 0 ] , self . sens_centers [ : , 1 ] , c = colors , s = 100 , edgecolors = 'none' ) cb_pos = mpl_get_cb_bound_next_to_plot ( ax ) ax1 = fig . add_axes ( cb_pos , frame_on = True ) cmap = mpl . cm . jet_r norm = mpl . colors . Normalize ( vmin = np . nanmin ( colors ) , vmax = np . nanmax ( colors ) ) mpl . colorbar . ColorbarBase ( ax1 , cmap = cmap , norm = norm , orientation = 'vertical' ) fig . savefig ( self . output_file , bbox_inches = 'tight' , dpi = 300 ) | plot sensitivity center distribution for all configurations in config . dat . The centers of mass are colored by the data given in volt_file . |
12,643 | def color_electrodes ( self , config_nr , ax ) : electrodes = np . loadtxt ( options . config_file , skiprows = 1 ) electrodes = self . configs [ ~ np . isnan ( self . configs ) . any ( 1 ) ] electrodes = electrodes . astype ( int ) conf = [ ] for dim in range ( 0 , electrodes . shape [ 1 ] ) : c = electrodes [ config_nr , dim ] a = np . round ( c / 10000 ) - 1 b = np . mod ( c , 10000 ) - 1 conf . append ( a ) conf . append ( b ) Ex , Ez = elem . get_electrodes ( ) color = [ '#ffed00' , '#ffed00' , '#ff0000' , '#ff0000' ] ax . scatter ( Ex [ conf ] , Ez [ conf ] , c = color , marker = 's' , s = 60 , clip_on = False , edgecolors = 'k' ) | Color the electrodes used in specific configuration . Voltage electrodes are yellow Current electrodes are red ?! |
12,644 | def compute_sens ( self , elem_file , elec_file , configs ) : CRMod_config = CRMod . config ( ) if self . options . sink is not None : print ( '2D mode with sink {0}' . format ( self . options . sink ) ) CRMod_config [ '2D' ] = 0 CRMod_config [ 'fictitious_sink' ] = 'T' CRMod_config [ 'sink_node' ] = self . options . sink CRMod_config [ 'write_sens' ] = 'T' CRMod_instance = CRMod . CRMod ( CRMod_config ) CRMod_instance . elemfile = elem_file CRMod_instance . elecfile = elec_file CRMod_instance . configdata = configs resistivity = 100 fid = open ( elem_file , 'r' ) fid . readline ( ) elements = int ( fid . readline ( ) . strip ( ) . split ( ) [ 1 ] ) fid . close ( ) rhodata = '{0}\n' . format ( elements ) for i in range ( 0 , elements ) : rhodata += '{0} 0\n' . format ( resistivity ) CRMod_instance . rhodata = rhodata CRMod_instance . run_in_tempdir ( ) volt_file = CRMod_instance . volt_file sens_files = CRMod_instance . sens_files return sens_files , volt_file , CRMod_instance . temp_dir | Compute the sensitivities for the given input data . A CRMod instance is called to create the sensitivity files . |
12,645 | def compute_center_of_mass ( self , filename ) : sens = np . loadtxt ( filename , skiprows = 1 ) X = sens [ : , 0 ] Z = sens [ : , 1 ] C = sens [ : , 2 ] x_center = 0 z_center = 0 sens_sum = 0 for i in range ( 0 , C . shape [ 0 ] ) : if ( self . weight == 0 ) : weight = ( C [ i ] ) if ( self . weight == 1 ) : weight = np . abs ( C [ i ] ) if ( self . weight == 2 ) : weight = np . log10 ( np . abs ( C [ i ] ) ) if ( self . weight == 3 ) : weight = np . sqrt ( np . abs ( C [ i ] ) ) x_center += ( X [ i ] * weight ) z_center += ( Z [ i ] * weight ) sens_sum += weight x_center /= sens_sum z_center /= sens_sum return ( x_center , z_center ) | Center of mass is computed using the sensitivity data output from CRMod Data weights can be applied using command line options |
12,646 | def handle_cmd_options ( ) : parser = OptionParser ( ) parser . add_option ( "-s" , "--silent" , action = "store_true" , dest = "silent" , help = "print any warnings" , default = False ) ( options , args ) = parser . parse_args ( ) return options , args | Get the options from the command line . |
12,647 | def move ( fname , folder , options ) : if os . path . isfile ( fname ) : shutil . move ( fname , folder ) else : if options . silent is False : print ( '{0} missing' . format ( fname ) ) | Move file to dir if existing |
12,648 | def print_tb ( tb , limit = None , file = None ) : if file is None : file = sys . stderr if limit is None : if hasattr ( sys , 'tracebacklimit' ) : limit = sys . tracebacklimit file . write ( '\n' . join ( format_tb ( tb , limit ) ) + '\n' ) | Print up to limit stack trace entries from the traceback tb . |
12,649 | def print_exception ( etype , value , tb , limit = None , file = None , chain = True ) : import traceback if file is None : file = sys . stderr if tb : file . write ( 'Traceback (most recent call last):\n' ) print_tb ( tb , limit , file ) lines = traceback . format_exception_only ( etype , value ) for line in lines : file . write ( line ) | Print exception up to limit stack trace entries from tb to file . |
12,650 | def construct ( self , data ) : occurrences = { } main_occurrences = { } for pkg in data [ "data" ] [ "dependencies" ] : package = pkg [ "package" ] for item in pkg [ "dependencies" ] : dep = item [ "name" ] if package != "." : deps = map ( lambda l : "%s/%s" % ( package , l ) , item [ "location" ] ) else : deps = item [ "location" ] if dep not in occurrences : occurrences [ dep ] = deps else : occurrences [ dep ] = occurrences [ dep ] + deps self . occurrences = occurrences for main in data [ "data" ] [ "main" ] : filename = main [ "filename" ] for dep in main [ "dependencies" ] : if dep not in main_occurrences : main_occurrences [ dep ] = [ filename ] else : main_occurrences [ dep ] . append ( filename ) self . main_occurrences = main_occurrences self . test_directories = sorted ( map ( lambda l : l [ "test" ] , data [ "data" ] [ "tests" ] ) ) self . provided_packages = sorted ( data [ "data" ] [ "packages" ] ) imported_packages = [ ] imported_native_packages = [ ] for path in occurrences : try : self . ipparser . parse ( path ) except ValueError : continue if self . ipparser . isNative ( ) : imported_native_packages . append ( path ) else : imported_packages . append ( path ) self . imported_packages = sorted ( imported_packages ) self . imported_native_packages = sorted ( imported_native_packages ) self . main_packages = map ( lambda l : l [ "filename" ] , data [ "data" ] [ "main" ] ) | Construct info about a project from artefact |
12,651 | def join_import_from ( self , import_spec ) : if not self . isroot and not self . ispkg : parent = self . name . rpartition ( '.' ) [ 0 ] else : parent = self . name return join_import_from ( import_spec , parent ) | Joins a relative import like from . foo import bar with this module as its parent module . If the module is not a root module or package root it will be joined with the package root . |
12,652 | def is_tomodir ( directory ) : if os . path . isdir ( directory ) : if ( os . path . isdir ( directory + "/exe" ) and os . path . isdir ( directory + "/config" ) and os . path . isdir ( directory + "/rho" ) and os . path . isdir ( directory + "/inv" ) and os . path . isdir ( directory + "/mod" ) ) : return True else : return False else : return False | Check if the supplied directory is a tomodir |
12,653 | def td_is_finished ( tomodir ) : if not is_tomodir ( tomodir ) : raise Exception ( 'Supplied directory is not a tomodir!' ) if ( os . path . isfile ( tomodir + os . sep + 'config/config.dat' ) and os . path . isfile ( tomodir + os . sep + 'rho/rho.dat' ) and os . path . isfile ( tomodir + os . sep + 'grid/elem.dat' ) and os . path . isfile ( tomodir + os . sep + 'grid/elec.dat' ) and os . path . isfile ( tomodir + os . sep + 'exe/crmod.cfg' ) and os . path . isfile ( tomodir + os . sep + 'mod/volt.dat' ) ) : crmod_is_finished = True else : crmod_is_finished = False if ( os . path . isfile ( tomodir + os . sep + 'grid/elem.dat' ) and os . path . isfile ( tomodir + os . sep + 'grid/elec.dat' ) and os . path . isfile ( tomodir + os . sep + 'exe/crtomo.cfg' ) and os . path . isfile ( tomodir + os . sep + 'inv/inv.ctr' ) and os . path . isfile ( tomodir + os . sep + 'inv/run.ctr' ) and os . path . isfile ( tomodir + os . sep + 'mod/volt.dat' ) ) : with open ( tomodir + os . sep + 'inv/run.ctr' , 'r' ) as fid : lines = fid . readlines ( ) crtomo_is_finished = False for line in lines [ - 5 : ] : test_line = line . strip ( ) regex = re . compile ( 'CPU' ) result = regex . match ( test_line ) if result is not None : crtomo_is_finished = True else : crtomo_is_finished = False return crmod_is_finished , crtomo_is_finished | Return the state of modeling and inversion for a given tomodir . The result does not take into account sensitivities or potentials as optionally generated by CRMod . |
12,654 | def is_sipdir ( directory ) : is_sipdir = True if ( not os . path . isfile ( directory + os . sep + 'frequencies.dat' ) ) : is_sipdir = False if ( not os . path . isdir ( directory + os . sep + 'invmod' ) ) : is_sipdir = False return is_sipdir | Simple check if the supplied directory is a SIP directory . |
12,655 | def sipdir_is_finished ( sipdir ) : if not is_sipdir ( sipdir ) : raise Exception ( 'Directory is not a valid SIP directory!' ) subdirs_raw = sorted ( glob . glob ( sipdir + os . sep + 'invmod' + os . sep + '*' ) ) subdirs = [ x for x in subdirs_raw if os . path . isdir ( x ) ] crmod_finished = True crtomo_finished = True for subdir in subdirs : subcrmod , subcrtomo = td_is_finished ( subdir ) if not subcrmod : crmod_finished = False if not subcrtomo : crtomo_finished = False return crmod_finished , crtomo_finished | Return the state of modeling and inversion for a given SIP dir . The result does not take into account sensitivities or potentials as optionally generated by CRMod . |
12,656 | def enable_neutron_hack ( self , os_username , os_password , os_project_id , os_auth_url ) : self . yum_install ( [ 'python-neutronclient' ] ) self . send_file ( pkg_data_filename ( 'static' , 'ovb_fix_neutron_addr' ) , '/usr/local/bin/ovb_fix_neutron_addr' , unix_mode = 0o755 ) content = unit = 'ovb_fix_neutron_addr.service' self . create_file ( '/usr/lib/systemd/system/%s' % unit , content . format ( os_username = os_username , os_password = protect_password ( os_password ) , os_project_id = os_project_id , os_auth_url = os_auth_url ) ) self . run ( 'systemctl enable %s' % unit ) self . run ( 'systemctl start %s' % unit ) | Enable the neutron hack on the undercloud . |
12,657 | def patch_ironic_ramdisk ( self ) : tmpdir = self . run ( 'mktemp -d' ) [ 0 ] . rstrip ( '\n' ) self . run ( 'cd {tmpdir}; zcat /home/stack/ironic-python-agent.initramfs| cpio -id' . format ( tmpdir = tmpdir ) ) self . send_file ( pkg_data_filename ( 'static' , 'ironic-wipefs.patch' ) , '/tmp/ironic-wipefs.patch' ) self . run ( 'cd {tmpdir}; patch -p0 < /tmp/ironic-wipefs.patch' . format ( tmpdir = tmpdir ) ) self . run ( 'cd {tmpdir}; find . | cpio --create --format=newc > /home/stack/ironic-python-agent.initramfs' . format ( tmpdir = tmpdir ) ) | Clean the disk before flushing the new image . |
12,658 | def show_menu ( title , options , default = None , height = None , width = None , multiselect = False , precolored = False ) : plugins = [ FilterPlugin ( ) ] if any ( isinstance ( opt , OptionGroup ) for opt in options ) : plugins . append ( OptionGroupPlugin ( ) ) if title : plugins . append ( TitlePlugin ( title ) ) if precolored : plugins . append ( PrecoloredPlugin ( ) ) menu = Termenu ( options , default = default , height = height , width = width , multiselect = multiselect , plugins = plugins ) return menu . show ( ) | Shows an interactive menu in the terminal . |
12,659 | def pluggable ( method ) : def wrapped ( self , * args , ** kwargs ) : if hasattr ( self , "_plugins" ) : return getattr ( self . _plugins [ - 1 ] , method . __name__ ) ( * args , ** kwargs ) else : return method ( self , * args , ** kwargs ) wrapped . original = method return wrapped | Mark a class method as extendable with plugins . |
12,660 | def register_plugin ( host , plugin ) : class OriginalMethods ( object ) : def __getattr__ ( self , name ) : return lambda * args , ** kwargs : getattr ( host , name ) . original ( host , * args , ** kwargs ) if not hasattr ( host , "_plugins" ) : host . _plugins = [ OriginalMethods ( ) ] plugin . parent = host . _plugins [ - 1 ] plugin . host = host host . _plugins . append ( plugin ) | Register a plugin with a host object . Some |
12,661 | def chdir ( self , path = None ) : if path is None : self . _cwd = None return if not stat . S_ISDIR ( self . stat ( path ) . st_mode ) : raise SFTPError ( errno . ENOTDIR , "%s: %s" % ( os . strerror ( errno . ENOTDIR ) , path ) ) self . _cwd = b ( self . normalize ( path ) ) | Change the current directory of this SFTP session . Since SFTP doesn t really have the concept of a current working directory this is emulated by Paramiko . Once you use this method to set a working directory all operations on this . SFTPClient object will be relative to that path . You can pass in None to stop using a current working directory . |
12,662 | def get_int ( self ) : byte = self . get_bytes ( 1 ) if byte == max_byte : return util . inflate_long ( self . get_binary ( ) ) byte += self . get_bytes ( 3 ) return struct . unpack ( '>I' , byte ) [ 0 ] | Fetch an int from the stream . |
12,663 | def set_logger ( name , level = 'INFO' , fmt = None , datefmt = None , propagate = 1 , remove_handlers = False ) : logger = logging . getLogger ( name ) logger . setLevel ( getattr ( logging , level ) ) logger . propagate = propagate if remove_handlers : logger . handlers = [ ] return handler = None for h in logger . handlers : if isinstance ( h , logging . StreamHandler ) : handler = h break if not handler : handler = logging . StreamHandler ( ) logger . addHandler ( handler ) formatter_kwgs = { } for i in ( 'fmt' , 'datefmt' ) : if locals ( ) [ i ] is not None : formatter_kwgs [ i ] = locals ( ) [ i ] handler . setFormatter ( BaseFormatter ( ** formatter_kwgs ) ) | This function will clear the previous handlers and set only one handler which will only be StreamHandler for the logger . |
12,664 | def format ( self , record ) : self . _format_record ( record ) record_dict = { } for k , v in record . __dict__ . items ( ) : if isinstance ( k , str ) : k = decode_ ( k , 'utf8' ) if isinstance ( v , str ) : v = decode_ ( v , 'utf8' , 'replace' ) record_dict [ k ] = v if 'color' in self . fmt or 'end_color' in self . fmt : record_dict [ 'color' ] , record_dict [ 'end_color' ] = _color ( record . levelno ) log = self . ufmt % record_dict if record . exc_text : if log [ - 1 : ] != '\n' : log += '\n' log += decode_ ( record . exc_text , 'utf8' , 'replace' ) log = log . replace ( '\n' , '\n' + self . tab ) return log | return log in unicode |
12,665 | def list ( self , source_ids = None , seniority = "all" , stage = None , date_start = "1494539999" , date_end = TIMESTAMP_NOW , filter_id = None , page = 1 , limit = 30 , sort_by = 'ranking' , filter_reference = None , order_by = None ) : query_params = { } query_params [ "date_end" ] = _validate_timestamp ( date_end , "date_end" ) query_params [ "date_start" ] = _validate_timestamp ( date_start , "date_start" ) if filter_id : query_params [ "filter_id" ] = _validate_filter_id ( filter_id ) if filter_reference : query_params [ "filter_reference" ] = _validate_filter_reference ( filter_reference ) query_params [ "limit" ] = _validate_limit ( limit ) query_params [ "page" ] = _validate_page ( page ) query_params [ "seniority" ] = _validate_seniority ( seniority ) query_params [ "sort_by" ] = _validate_sort_by ( sort_by ) query_params [ "source_ids" ] = json . dumps ( _validate_source_ids ( source_ids ) ) query_params [ "stage" ] = _validate_stage ( stage ) query_params [ "order_by" ] = order_by response = self . client . get ( "profiles" , query_params ) return response . json ( ) | Retreive all profiles that match the query param . |
12,666 | def add ( self , source_id = None , file_path = None , profile_reference = "" , timestamp_reception = None , training_metadata = [ ] ) : data = { } data [ "source_id" ] = _validate_source_id ( source_id ) data [ "profile_reference" ] = _validate_profile_reference ( profile_reference ) data [ "timestamp_reception" ] = _validate_timestamp ( timestamp_reception , "timestamp_reception" ) data [ "training_metadata" ] = _validate_training_metadata ( training_metadata ) files = _get_file_metadata ( file_path , profile_reference ) response = None with open ( file_path , 'rb' ) as in_file : files = ( files [ 0 ] , in_file , files [ 2 ] ) response = self . client . post ( "profile" , data = data , files = { "file" : files } ) return response . json ( ) | Add a profile resume to a sourced id . |
12,667 | def addList ( self , source_id , dir_path , is_recurcive = False , timestamp_reception = None , training_metadata = [ ] ) : if not path . isdir ( dir_path ) : raise ValueError ( dir_path + ' is not a directory' ) files_to_send = _get_files_from_dir ( dir_path , is_recurcive ) succeed_upload = { } failed_upload = { } for file_path in files_to_send : try : resp = self . add ( source_id = source_id , file_path = file_path , profile_reference = "" , timestamp_reception = timestamp_reception , training_metadata = training_metadata ) if resp [ 'code' ] != 200 and resp [ 'code' ] != 201 : failed_upload [ file_path ] = ValueError ( 'Invalid response: ' + str ( resp ) ) else : succeed_upload [ file_path ] = resp except BaseException as e : failed_upload [ file_path ] = e result = { 'success' : succeed_upload , 'fail' : failed_upload } return result | Add all profile from a given directory . |
12,668 | def get ( self , source_id = None , profile_id = None , profile_reference = None ) : query_params = { } query_params [ "source_id" ] = _validate_source_id ( source_id ) if profile_id : query_params [ "profile_id" ] = _validate_profile_id ( profile_id ) if profile_reference : query_params [ "profile_reference" ] = _validate_profile_reference ( profile_reference ) response = self . client . get ( 'profile' , query_params ) return response . json ( ) | Retrieve the profile information associated with profile id . |
12,669 | def set ( self , source_id = None , profile_id = None , filter_id = None , stage = None , profile_reference = None , filter_reference = None ) : data = { } data [ "source_id" ] = _validate_source_id ( source_id ) if profile_id : data [ "profile_id" ] = _validate_profile_id ( profile_id ) if filter_id : data [ "filter_id" ] = _validate_filter_id ( filter_id ) if profile_reference : data [ "profile_reference" ] = _validate_profile_reference ( profile_reference ) if filter_reference : data [ "filter_reference" ] = _validate_filter_reference ( filter_reference ) data [ "stage" ] = _validate_stage ( stage ) response = self . client . patch ( 'profile/stage' , data = data ) return response . json ( ) | Edit the profile stage given a filter . |
12,670 | def get ( self , source_id = None , profile_id = None , profile_reference = None , filter_id = None , filter_reference = None ) : query_params = { } query_params [ "source_id" ] = _validate_source_id ( source_id ) if profile_id : query_params [ "profile_id" ] = _validate_profile_id ( profile_id ) if profile_reference : query_params [ "profile_reference" ] = _validate_profile_reference ( profile_reference ) if filter_id : query_params [ "filter_id" ] = _validate_filter_id ( filter_id ) if filter_reference : query_params [ "filter_reference" ] = _validate_filter_reference ( filter_reference ) response = self . client . get ( 'profile/revealing' , query_params ) return response | Retrieve the interpretability information . |
12,671 | def check ( self , profile_data , training_metadata = [ ] ) : data = { "profile_json" : _validate_dict ( profile_data , "profile_data" ) , "training_metadata" : _validate_training_metadata ( training_metadata ) , } response = self . client . post ( "profile/json/check" , data = data ) return response . json ( ) | Use the api to check weither the profile_data are valid . |
12,672 | def add ( self , source_id , profile_data , training_metadata = [ ] , profile_reference = None , timestamp_reception = None ) : data = { "source_id" : _validate_source_id ( source_id ) , "profile_json" : _validate_dict ( profile_data , "profile_data" ) , "training_metadata" : _validate_training_metadata ( training_metadata ) , "profile_reference" : profile_reference } if timestamp_reception is not None : data [ 'timestamp_reception' ] = _validate_timestamp ( timestamp_reception , 'timestamp_reception' ) response = self . client . post ( "profile/json" , data = data ) return response . json ( ) | Use the api to add a new profile using profile_data . |
12,673 | def md5sum ( self , f ) : m = hashlib . md5 ( ) fh = open ( f , 'r' ) while 1 : chunk = fh . read ( BUF_SIZE ) if not chunk : break m . update ( chunk ) fh . close ( ) return m . hexdigest ( ) | md5sums a file returning the hex digest |
12,674 | def iterdupes ( self , compare = None , filt = None ) : if not compare : compare = self . md5sum seen_siz = { } seen_sum = { } size_func = lambda x : os . stat ( x ) . st_size for ( fsize , f ) in self . iteritems ( want_dirs = False , func = size_func , filt = filt ) : if fsize not in seen_siz : seen_siz [ fsize ] = f continue else : if seen_siz [ fsize ] : chksum = compare ( seen_siz [ fsize ] ) if chksum in seen_sum : yield ( chksum , seen_siz [ fsize ] ) else : seen_sum [ chksum ] = seen_siz [ fsize ] seen_siz [ fsize ] = None chksum = compare ( f ) if chksum in seen_sum : if seen_sum [ chksum ] : yield ( chksum , seen_sum [ chksum ] ) seen_sum [ chksum ] = None yield ( chksum , f ) else : seen_sum [ chksum ] = f | streaming item iterator with low overhead duplicate file detection |
12,675 | def objects_to_root ( objects : List ) -> Root : def _to_tree ( objs : Iterable ) -> Dict : path_tree = { } for obj in objs : is_dir = obj . key . endswith ( '/' ) chunks = [ chunk for chunk in obj . key . split ( '/' ) if chunk ] chunk_count = len ( chunks ) tmp = path_tree for i , chunk in enumerate ( chunks ) : is_last_chunk = i == chunk_count - 1 if is_last_chunk and not is_dir : tmp [ chunk ] = obj else : if chunk not in tmp : tmp [ chunk ] = { } tmp = tmp [ chunk ] return path_tree def _to_entity ( key : str , value : Union [ Dict , Any ] ) -> Entity : if isinstance ( value , dict ) : return Directory ( key , { key_ : _to_entity ( key_ , value_ ) for key_ , value_ in value . items ( ) } ) return File ( pathlib . PurePath ( value . key ) . name , value . size , value . e_tag . strip ( '"' ) ) tree = _to_tree ( objects ) return Root ( { pathlib . PurePath ( key ) . name : _to_entity ( key , value ) for key , value in tree . items ( ) } ) | Convert a list of s3 ObjectSummaries into a directory tree . |
12,676 | def _delete ( self , paths : Iterable [ str ] ) -> None : for chunk in util . chunk ( paths , self . _MAX_DELETES_PER_REQUEST ) : keys = list ( [ self . _prefix + key for key in chunk ] ) logger . info ( 'Deleting %d objects (%s)' , len ( keys ) , ', ' . join ( keys ) ) response = self . _bucket . delete_objects ( Delete = { 'Objects' : [ { 'Key' : key } for key in keys ] , 'Quiet' : True } ) logger . debug ( 'Delete objects response: %s' , response ) | Delete a collection of paths from S3 . |
12,677 | def _upload ( self , items : Iterable [ Tuple [ str , str ] ] ) -> None : for src , key in items : logger . info ( f'Uploading {src} to {key}' ) mimetype , _ = mimetypes . guess_type ( src ) if mimetype is None : logger . warning ( f'Could not guess MIME type for {src}' ) mimetype = 'application/octet-stream' logger . debug ( f'Deduced MIME type: {mimetype}' ) self . _bucket . upload_file ( src , key , ExtraArgs = { 'ContentType' : mimetype } ) | Upload a collection of paths to S3 . |
12,678 | def rotmat ( alpha ) : R = np . array ( ( ( np . cos ( alpha ) , - np . sin ( alpha ) ) , ( np . sin ( alpha ) , np . cos ( alpha ) ) ) ) return R | Rotate around z - axis |
12,679 | def apply_async ( self , args = None , kwargs = None , ** options ) : result = super ( LoggedTask , self ) . apply_async ( args = args , kwargs = kwargs , ** options ) log . info ( 'Task {}[{}] submitted with arguments {}, {}' . format ( self . name , result . id , args , kwargs ) ) return result | Emit a log statement when the task is submitted . |
12,680 | def on_retry ( self , exc , task_id , args , kwargs , einfo ) : super ( LoggedTask , self ) . on_retry ( exc , task_id , args , kwargs , einfo ) log . warning ( '[{}] retried due to {}' . format ( task_id , getattr ( einfo , 'traceback' , None ) ) ) | Capture the exception that caused the task to be retried if any . |
12,681 | def on_failure ( self , exc , task_id , args , kwargs , einfo ) : log . error ( '[{}] failed due to {}' . format ( task_id , getattr ( einfo , 'traceback' , None ) ) ) super ( LoggedTask , self ) . on_failure ( exc , task_id , args , kwargs , einfo ) | Capture the exception that caused the task to fail if any . |
12,682 | def nodes_to_object ( self , node , object ) : "Map all child nodes to one object's attributes" for n in list ( node ) : self . node_to_object ( n , object ) | Map all child nodes to one object s attributes |
12,683 | def node_to_object ( self , node , object ) : "Map a single node to one object's attributes" attribute = self . to_lower ( node . tag ) attribute = "_yield" if attribute == "yield" else attribute try : valueString = node . text or "" value = float ( valueString ) except ValueError : value = node . text try : setattr ( object , attribute , value ) except AttributeError ( ) : sys . stderr . write ( "Attribute <%s> not supported." % attribute ) | Map a single node to one object s attributes |
12,684 | def parse ( self , xml_file ) : "Get a list of parsed recipes from BeerXML input" recipes = [ ] with open ( xml_file , "rt" ) as f : tree = ElementTree . parse ( f ) for recipeNode in tree . iter ( ) : if self . to_lower ( recipeNode . tag ) != "recipe" : continue recipe = Recipe ( ) recipes . append ( recipe ) for recipeProperty in list ( recipeNode ) : tag_name = self . to_lower ( recipeProperty . tag ) if tag_name == "fermentables" : for fermentable_node in list ( recipeProperty ) : fermentable = Fermentable ( ) self . nodes_to_object ( fermentable_node , fermentable ) recipe . fermentables . append ( fermentable ) elif tag_name == "yeasts" : for yeast_node in list ( recipeProperty ) : yeast = Yeast ( ) self . nodes_to_object ( yeast_node , yeast ) recipe . yeasts . append ( yeast ) elif tag_name == "hops" : for hop_node in list ( recipeProperty ) : hop = Hop ( ) self . nodes_to_object ( hop_node , hop ) recipe . hops . append ( hop ) elif tag_name == "miscs" : for misc_node in list ( recipeProperty ) : misc = Misc ( ) self . nodes_to_object ( misc_node , misc ) recipe . miscs . append ( misc ) elif tag_name == "style" : style = Style ( ) recipe . style = style self . nodes_to_object ( recipeProperty , style ) elif tag_name == "mash" : for mash_node in list ( recipeProperty ) : mash = Mash ( ) recipe . mash = mash if self . to_lower ( mash_node . tag ) == "mash_steps" : for mash_step_node in list ( mash_node ) : mash_step = MashStep ( ) self . nodes_to_object ( mash_step_node , mash_step ) mash . steps . append ( mash_step ) else : self . nodes_to_object ( mash_node , mash ) else : self . node_to_object ( recipeProperty , recipe ) return recipes | Get a list of parsed recipes from BeerXML input |
12,685 | def to_lower ( self , string ) : "Helper function to transform strings to lower case" value = None try : value = string . lower ( ) except AttributeError : value = "" finally : return value | Helper function to transform strings to lower case |
12,686 | def _to_dot_key ( cls , section , key = None ) : if key : return ( NON_ALPHA_NUM . sub ( '_' , section . lower ( ) ) , NON_ALPHA_NUM . sub ( '_' , key . lower ( ) ) ) else : return NON_ALPHA_NUM . sub ( '_' , section . lower ( ) ) | Return the section and key in dot notation format . |
12,687 | def save ( self , target_file = None , as_template = False ) : self . _read_sources ( ) if not target_file : if not self . _last_source : raise AttributeError ( 'Target file is required when last source is not set during instantiation' ) target_file = self . _last_source output = str ( self ) if as_template : output_tmpl = [ ] for line in output . split ( '\n' ) : if line and not line . startswith ( '#' ) : line = '# %s' % line output_tmpl . append ( line ) output = '\n' . join ( output_tmpl ) with open ( target_file , 'w' ) as fp : fp . write ( output ) | Save the config |
12,688 | def _parse_extra ( self , fp ) : comment = '' section = '' fp . seek ( 0 ) for line in fp : line = line . rstrip ( ) if not line : if comment : comment += '\n' continue if line . startswith ( '#' ) : comment += line + '\n' continue if line . startswith ( '[' ) : section = line . strip ( '[]' ) self . _add_dot_key ( section ) if comment : self . _comments [ section ] = comment . rstrip ( ) elif CONFIG_KEY_RE . match ( line ) : key = line . split ( '=' , 1 ) [ 0 ] . strip ( ) self . _add_dot_key ( section , key ) if comment : self . _comments [ ( section , key ) ] = comment . rstrip ( ) comment = '' if comment : self . _comments [ self . LAST_COMMENT_KEY ] = comment | Parse and store the config comments and create maps for dot notion lookup |
12,689 | def _typed_value ( self , value ) : if value not in self . _value_cache : new_value = value if is_int ( value ) : new_value = int ( value ) elif is_float ( value ) : new_value = float ( value ) elif is_bool ( value ) : new_value = to_bool ( value ) elif is_none ( value ) : new_value = None self . _value_cache [ value ] = new_value return self . _value_cache [ value ] | Transform string value to an actual data type of the same value . |
12,690 | def add_section ( self , section , comment = None ) : self . _read_sources ( ) if self . _to_dot_key ( section ) in self . _dot_keys : raise DuplicateSectionError ( section ) self . _parser . add_section ( section ) self . _add_dot_key ( section ) if comment : self . _set_comment ( section , comment ) | Add a section |
12,691 | def _set_comment ( self , section , comment , key = None ) : if '\n' in comment : comment = '\n# ' . join ( comment . split ( '\n' ) ) comment = '# ' + comment if key : self . _comments [ ( section , key ) ] = comment else : self . _comments [ section ] = comment | Set a comment for section or key |
12,692 | def _sample_actions ( self , state : Sequence [ tf . Tensor ] ) -> Tuple [ Sequence [ tf . Tensor ] , tf . Tensor , tf . Tensor ] : default = self . compiler . compile_default_action ( self . batch_size ) bound_constraints = self . compiler . compile_action_bound_constraints ( state ) action = self . _sample_action ( bound_constraints , default ) n , action , checking = self . _check_preconditions ( state , action , bound_constraints , default ) return action , n , checking | Returns sampled action fluents and tensors related to the sampling . |
12,693 | def _check_preconditions ( self , state : Sequence [ tf . Tensor ] , action : Sequence [ tf . Tensor ] , bound_constraints : Dict [ str , Constraints ] , default : Sequence [ tf . Tensor ] ) -> Tuple [ tf . Tensor , Sequence [ tf . Tensor ] , tf . Tensor ] : def condition ( i , a , checking ) : not_checking = tf . reduce_any ( tf . logical_not ( checking ) ) return not_checking def body ( i , a , checking ) : new_action = [ ] new_sampled_action = self . _sample_action ( bound_constraints , default ) new_preconds_checking = self . compiler . compile_action_preconditions_checking ( state , new_sampled_action ) for action_fluent , new_sampled_action_fluent in zip ( a , new_sampled_action ) : new_action_fluent = tf . where ( checking , action_fluent , new_sampled_action_fluent ) new_action . append ( new_action_fluent ) new_action = tuple ( new_action ) new_checking = tf . logical_or ( checking , new_preconds_checking ) return ( i + 1 , new_action , new_checking ) i0 = tf . constant ( 0 ) preconds_checking = self . compiler . compile_action_preconditions_checking ( state , action ) return tf . while_loop ( condition , body , loop_vars = [ i0 , action , preconds_checking ] ) | Samples action fluents until all preconditions are satisfied . |
12,694 | def _sample_action ( self , constraints : Dict [ str , Constraints ] , default : Sequence [ tf . Tensor ] , prob : float = 0.3 ) -> Sequence [ tf . Tensor ] : ordering = self . compiler . rddl . domain . action_fluent_ordering dtypes = map ( rddl2tf . utils . range_type_to_dtype , self . compiler . rddl . action_range_type ) size = self . compiler . rddl . action_size action = [ ] for name , dtype , size , default_value in zip ( ordering , dtypes , size , default ) : action_fluent = self . _sample_action_fluent ( name , dtype , size , constraints , default_value , prob ) action . append ( action_fluent ) return tuple ( action ) | Samples action fluents respecting the given bound constraints . |
12,695 | def _sample_action_fluent ( self , name : str , dtype : tf . DType , size : Sequence [ int ] , constraints : Dict [ str , Constraints ] , default_value : tf . Tensor , prob : float ) -> tf . Tensor : shape = [ self . batch_size ] + list ( size ) if dtype == tf . float32 : bounds = constraints . get ( name ) if bounds is None : low , high = - self . MAX_REAL_VALUE , self . MAX_REAL_VALUE dist = tf . distributions . Uniform ( low = low , high = high ) sampled_fluent = dist . sample ( shape ) else : low , high = bounds batch = ( low is not None and low . batch ) or ( high is not None and high . batch ) low = tf . cast ( low . tensor , tf . float32 ) if low is not None else - self . MAX_REAL_VALUE high = tf . cast ( high . tensor , tf . float32 ) if high is not None else self . MAX_REAL_VALUE dist = tf . distributions . Uniform ( low = low , high = high ) if batch : sampled_fluent = dist . sample ( ) elif isinstance ( low , tf . Tensor ) or isinstance ( high , tf . Tensor ) : if ( low + high ) . shape . as_list ( ) == list ( size ) : sampled_fluent = dist . sample ( [ self . batch_size ] ) else : raise ValueError ( 'bounds are not compatible with action fluent.' ) else : sampled_fluent = dist . sample ( shape ) elif dtype == tf . int32 : logits = [ 1.0 ] * self . MAX_INT_VALUE dist = tf . distributions . Categorical ( logits = logits , dtype = tf . int32 ) sampled_fluent = dist . sample ( shape ) elif dtype == tf . bool : probs = 0.5 dist = tf . distributions . Bernoulli ( probs = probs , dtype = tf . bool ) sampled_fluent = dist . sample ( shape ) select_default = tf . distributions . Bernoulli ( prob , dtype = tf . bool ) . sample ( self . batch_size ) action_fluent = tf . where ( select_default , default_value , sampled_fluent ) return action_fluent | Samples the action fluent with given name dtype and size . |
12,696 | def UnitToLNode ( u : Unit , node : Optional [ LNode ] = None , toL : Optional [ dict ] = None , optimizations = [ ] ) -> LNode : if toL is None : toL = { } if node is None : root = LNode ( name = u . _name , originObj = u , node2lnode = toL ) else : root = node stmPorts = { } netCtx = NetCtxs ( root ) for su in u . _units : n = root . addNode ( name = su . _name , originObj = su ) UnitToLNode ( su , n , toL , optimizations ) for stm in u . _ctx . statements : n = addStmAsLNode ( root , stm , stmPorts , netCtx ) for intf in u . _interfaces : addPort ( root , intf ) for stm in u . _ctx . statements : n = toL . get ( stm , None ) if n is not None : if isinstance ( n , VirtualLNode ) : p = None else : p = stmPorts [ n ] r = StatementRenderer ( n , toL , p , netCtx ) r . renderContent ( ) for s in u . _ctx . signals : if not s . hidden : net , _ = netCtx . getDefault ( s ) for e in s . endpoints : if isinstance ( e , PortItem ) : net . addEndpoint ( toL [ e ] ) for d in s . drivers : if isinstance ( d , PortItem ) : net . addDriver ( toL [ d ] ) netCtx . applyConnections ( root ) for opt in optimizations : opt ( root ) isRootOfWholeGraph = root . parent is None if not isRootOfWholeGraph : for intf in u . _interfaces : ext_p = toL [ originObjOfPort ( intf ) ] . parentNode nodePort = addPortToLNode ( root , intf ) if intf . _direction == INTF_DIRECTION . SLAVE : src = nodePort dst = ext_p . addPort ( "" , PortType . INPUT , PortSide . WEST ) else : src = ext_p . addPort ( "" , PortType . OUTPUT , PortSide . EAST ) dst = nodePort root . addEdge ( src , dst , name = repr ( intf ) , originObj = intf ) return root | Build LNode instance from Unit instance |
12,697 | def configure ( self , rhsm = None , repositories = None ) : if rhsm is not None : self . rhsm_register ( rhsm ) if repositories is not None : self . enable_repositories ( repositories ) self . create_stack_user ( ) self . deploy_hypervisor ( ) | This method will configure the host0 and run the hypervisor . |
12,698 | def deploy_hypervisor ( self ) : self . yum_install ( [ 'libvirt-daemon-driver-nwfilter' , 'libvirt-client' , 'libvirt-daemon-config-network' , 'libvirt-daemon-driver-nodedev' , 'libvirt-daemon-kvm' , 'libvirt-python' , 'libvirt-daemon-config-nwfilter' , 'libvirt-glib' , 'libvirt-daemon' , 'libvirt-daemon-driver-storage' , 'libvirt' , 'libvirt-daemon-driver-network' , 'libvirt-devel' , 'libvirt-gobject' , 'libvirt-daemon-driver-secret' , 'libvirt-daemon-driver-qemu' , 'libvirt-daemon-driver-interface' , 'libguestfs-tools' , 'virt-install' , 'genisoimage' , 'openstack-tripleo' , 'instack-undercloud' ] ) self . run ( 'sed -i "s,#auth_unix_rw,auth_unix_rw," /etc/libvirt/libvirtd.conf' ) self . run ( 'systemctl start libvirtd' ) self . run ( 'systemctl status libvirtd' ) self . install_base_packages ( ) self . clean_system ( ) self . yum_update ( ) | Install the libvirtd and instack - undercloud packages . |
12,699 | def build_undercloud_on_libvirt ( self , image_path , rhsm = None , repositories = [ ] ) : self . run ( 'sysctl net.ipv4.ip_forward=1' ) self . fetch_image ( path = image_path , dest = '/home/stack/guest_image.qcow2' , user = 'stack' ) self . run ( "LIBGUESTFS_BACKEND=direct virt-customize -a /home/stack/guest_image.qcow2 --run-command 'echo MTU=\"1400\" >> /etc/sysconfig/network-scripts/ifcfg-eth0'" ) env = Environment ( ) env . loader = FileSystemLoader ( pkg_data_filename ( 'template' ) ) template = env . get_template ( 'virt-setup-env.j2' ) self . run ( 'mkdir -p /home/stack/DIB' , user = 'stack' ) self . run ( 'cp -v /etc/yum.repos.d/*.repo /home/stack/DIB' , user = 'stack' ) dib_yum_repo_conf = self . run ( 'find /home/stack/DIB -type f' , user = 'stack' ) [ 0 ] . split ( ) virt_setup_template = { 'dib_yum_repo_conf' : dib_yum_repo_conf , 'node' : { 'count' : 2 , 'mem' : 6144 , 'cpu' : 2 } , 'undercloud_node_mem' : 8192 , 'guest_image_name' : '/home/stack/guest_image.qcow2' } if rhsm is not None : virt_setup_template [ 'rhsm' ] = { 'login' : rhsm . get ( 'login' ) , 'password' : rhsm . get ( 'password' , os . environ . get ( 'RHN_PW' ) ) , 'pool_id' : rhsm . get ( 'pool_id' , '' ) , 'repositories' : [ i [ 'name' ] for i in repositories if i [ 'type' ] == 'rhsm_channel' ] } virt_setup_env = template . render ( virt_setup_template ) self . create_file ( 'virt-setup-env' , virt_setup_env , user = 'stack' ) self . run ( 'virsh destroy instack' , ignore_error = True ) self . run ( 'virsh undefine instack --remove-all-storage' , ignore_error = True ) self . run ( 'source virt-setup-env; instack-virt-setup' , user = 'stack' ) undercloud_ip = self . run ( '/sbin/ip n | grep $(tripleo get-vm-mac instack) | awk \'{print $1;}\'' , user = 'stack' ) [ 0 ] assert undercloud_ip , 'undercloud should have an IP' undercloud = Undercloud ( hostname = undercloud_ip , via_ip = self . hostname , user = 'root' , key_filename = self . _key_filename ) return undercloud | Build the Undercloud by using instack - virt - setup script . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.