idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
12,600 | def _save_potentials ( self , directory ) : print ( 'saving potentials' ) digits = int ( np . ceil ( np . log10 ( self . configs . configs . shape [ 0 ] ) ) ) for i in range ( 0 , self . configs . configs . shape [ 0 ] ) : pot_data = self . get_potential ( i ) filename_raw = 'pot{0:0' + '{0}' . format ( digits ) + '}.d... | save potentials to a directory |
12,601 | def clear_measurements ( self ) : mid_list = self . assignments . get ( 'measurements' , None ) if mid_list is not None : for mid in mid_list : self . configs . delete_measurements ( mid = mid ) self . assignments [ 'measurements' ] = None | Forget any previous measurements |
12,602 | def measurements ( self ) : mid = self . assignments . get ( 'measurements' , None ) if mid is None : return_value = self . model ( voltages = True , sensitivities = False , potentials = False , ) if return_value is None : print ( 'cannot model' ) return cids = self . assignments [ 'measurements' ] measurements = np . ... | Return the measurements associated with this instance . |
12,603 | def _read_sensitivities ( self , sens_dir ) : if self . assignments [ 'sensitivities' ] is not None : print ( 'Sensitivities already imported. Will not overwrite!' ) return else : self . assignments [ 'sensitivities' ] = { } sens_files = sorted ( glob ( sens_dir + os . sep + 'sens*.dat' ) ) for nr , filename in enumera... | import sensitivities from a directory |
12,604 | def _read_potentials ( self , pot_dir ) : if self . assignments [ 'potentials' ] is not None : print ( 'Potentials already imported. Will not overwrite!' ) return else : self . assignments [ 'potentials' ] = { } pot_files = sorted ( glob ( pot_dir + os . sep + 'pot*.dat' ) ) for nr , filename in enumerate ( pot_files )... | import potentials from a directory |
12,605 | def get_potential ( self , config_nr ) : if self . assignments [ 'potentials' ] is None : self . _check_state ( ) if self . can_model : self . model ( potentials = True ) nids = self . assignments [ 'potentials' ] [ config_nr ] pot_data = [ self . nodeman . nodevals [ nid ] for nid in nids ] return pot_data | Return potential data for a given measurement configuration . |
12,606 | def get_sensitivity ( self , config_nr ) : if self . assignments [ 'sensitivities' ] is None : self . _check_state ( ) if self . can_model : self . model ( sensitivities = True ) cids = self . assignments [ 'sensitivities' ] [ config_nr ] sens_data = [ self . parman . parsets [ cid ] for cid in cids ] meta_data = [ sel... | return a sensitivity as well as corresponding metadata for a given measurement configuration . Indices start at zero . |
12,607 | def read_voltages ( self , voltage_file ) : measurements_raw = np . loadtxt ( voltage_file , skiprows = 1 , ) measurements = np . atleast_2d ( measurements_raw ) A = ( measurements [ : , 0 ] / 1e4 ) . astype ( int ) B = ( measurements [ : , 0 ] % 1e4 ) . astype ( int ) M = ( measurements [ : , 1 ] / 1e4 ) . astype ( in... | import voltages from a volt . dat file |
12,608 | def model ( self , voltages = True , sensitivities = False , potentials = False , output_directory = None , silent = False , ) : self . _check_state ( ) if self . can_model : if output_directory is not None : if not os . path . isdir ( output_directory ) : os . makedirs ( output_directory ) tempdir = output_directory s... | Forward model the tomodir and read in the results |
12,609 | def _invert ( self , tempdir , catch_output = True , ** kwargs ) : nr_cores = kwargs . get ( 'cores' , 2 ) print ( 'attempting inversion in directory: {0}' . format ( tempdir ) ) pwd = os . getcwd ( ) os . chdir ( tempdir ) self . save_to_tomodir ( '.' ) os . chdir ( 'exe' ) binary = CRBin . get ( 'CRTomo' ) print ( 'U... | Internal function than runs an inversion using CRTomo . |
12,610 | def invert ( self , output_directory = None , catch_output = True , ** kwargs ) : self . _check_state ( ) if self . can_invert : if output_directory is not None : if not os . path . isdir ( output_directory ) : os . makedirs ( output_directory ) tempdir = output_directory self . _invert ( tempdir , catch_output , ** kw... | Invert this instance and import the result files |
12,611 | def read_inversion_results ( self , tomodir ) : self . _read_inversion_results ( tomodir ) self . _read_inv_ctr ( tomodir ) self . _read_resm_m ( tomodir ) self . _read_eps_ctr ( tomodir ) | Import inversion results from a tomodir into this instance |
12,612 | def plot_eps_data_hist ( self , dfs ) : if 'datum' in dfs [ 0 ] : dc_inv = True else : dc_inv = False nr_y = len ( dfs ) size_y = 5 / 2.54 * nr_y if dc_inv : nr_x = 1 else : nr_x = 3 size_x = 15 / 2.54 fig , axes = plt . subplots ( nr_y , nr_x , figsize = ( size_x , size_y ) ) axes = np . atleast_2d ( axes ) df = dfs [... | Plot histograms of data residuals and data error weighting |
12,613 | def _read_eps_ctr ( tomodir ) : epsctr_file = tomodir + os . sep + 'inv' + os . sep + 'eps.ctr' if not os . path . isfile ( epsctr_file ) : print ( 'eps.ctr not found: {0}' . format ( epsctr_file ) ) print ( os . getcwd ( ) ) return 1 with open ( epsctr_file , 'r' ) as fid : lines = fid . readlines ( ) group = itertool... | Parse a CRTomo eps . ctr file . |
12,614 | def _read_resm_m ( self , tomodir ) : resm_file = tomodir + os . sep + 'inv' + os . sep + 'res_m.diag' if not os . path . isfile ( resm_file ) : print ( 'res_m.diag not found: {0}' . format ( resm_file ) ) print ( os . getcwd ( ) ) return 1 with open ( resm_file , 'rb' ) as fid : first_line = fid . readline ( ) . strip... | Read in the resolution matrix of an inversion |
12,615 | def register_forward_model ( self , pid_mag , pid_pha ) : self . register_magnitude_model ( pid_mag ) self . register_phase_model ( pid_pha ) | Register parameter sets as the forward models for magnitude and phase |
12,616 | def register_magnitude_model ( self , pid ) : if self . assignments [ 'forward_model' ] is None : self . assignments [ 'forward_model' ] = [ None , None ] self . assignments [ 'forward_model' ] [ 0 ] = pid | Set a given parameter model to the forward magnitude model |
12,617 | def register_phase_model ( self , pid ) : if self . assignments [ 'forward_model' ] is None : self . assignments [ 'forward_model' ] = [ None , None ] self . assignments [ 'forward_model' ] [ 1 ] = pid | Set a given parameter model to the forward phase model |
12,618 | def add_homogeneous_model ( self , magnitude , phase = 0 ) : if self . assignments [ 'forward_model' ] is not None : print ( 'model already set, will overwrite' ) magnitude_model = np . ones ( self . grid . nr_of_elements ) * magnitude phase_model = np . ones ( self . grid . nr_of_elements ) * phase pid_mag = self . pa... | Add a homogeneous resistivity model to the tomodir . This is useful for synthetic measurements . |
12,619 | def show_parset ( self , pid ) : fig , ax = plt . subplots ( ) self . plot . plot_elements_to_ax ( pid , ax = ax ) return fig , ax | Plot a given parameter set |
12,620 | def cythonize ( * args , ** kwargs ) : global cythonize from Cython . Build import cythonize return cythonize ( * args , ** kwargs ) | dirty hack only import cythonize at the time you use it . |
12,621 | def response ( self , url ) : resp = requests . get ( url ) . content return self . parseresponse ( resp ) | Grab an API response . |
12,622 | def errorhandle ( self , resp ) : if self . format == 'json' : parsed = xmltodict . parse ( resp ) errors = parsed [ self . RESPONSE_TOKEN ] [ self . ERROR_TOKEN ] if type ( errors ) is list and len ( errors ) > 1 : messages = ", " . join ( [ " " . join ( [ "{}: {}" . format ( k , v ) for k , v in e . items ( ) ] ) for... | Parse API error responses and raise appropriate exceptions . |
12,623 | def parseresponse ( self , resp ) : if sys . version_info . major > 2 : resp = resp . decode ( 'utf-8' ) if self . RESPONSE_TOKEN not in resp : raise BustimeError ( "The Bustime API returned an invalid response: {}" . format ( resp ) ) elif self . ERROR_TOKEN in resp : return self . errorhandle ( resp ) else : if self ... | Parse an API response . |
12,624 | def get_stack_refs ( refs : list ) : refs = list ( refs ) refs . reverse ( ) stack_refs = [ ] last_stack = None while refs : ref = refs . pop ( ) if last_stack is not None and re . compile ( r'v[0-9][a-zA-Z0-9-]*$' ) . match ( ref ) : stack_refs . append ( StackReference ( last_stack , ref ) ) else : try : with open ( ... | Returns a list of stack references with name and version . |
12,625 | def instance_for_arguments ( self , arguments ) : model_instance = ModelInstance ( ) for prior_model_tuple in self . prior_model_tuples : setattr ( model_instance , prior_model_tuple . name , prior_model_tuple . prior_model . instance_for_arguments ( arguments ) ) return model_instance | Creates a ModelInstance which has an attribute and class instance corresponding to every PriorModel attributed to this instance . |
12,626 | def mapper_from_partial_prior_arguments ( self , arguments ) : original_prior_dict = { prior : prior for prior in self . priors } return self . mapper_from_prior_arguments ( { ** original_prior_dict , ** arguments } ) | Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors keeping existing priors where no mapping is provided . |
12,627 | def mapper_from_prior_arguments ( self , arguments ) : mapper = copy . deepcopy ( self ) for prior_model_tuple in self . prior_model_tuples : setattr ( mapper , prior_model_tuple . name , prior_model_tuple . prior_model . gaussian_prior_model_for_arguments ( arguments ) ) return mapper | Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors . |
12,628 | def mapper_from_gaussian_tuples ( self , tuples , a = None , r = None ) : prior_tuples = self . prior_tuples_ordered_by_id prior_class_dict = self . prior_class_dict arguments = { } for i , prior_tuple in enumerate ( prior_tuples ) : prior = prior_tuple . prior cls = prior_class_dict [ prior ] mean = tuples [ i ] [ 0 ]... | Creates a new model mapper from a list of floats describing the mean values of gaussian priors . The widths \ of the new priors are taken from the width_config . The new gaussian priors must be provided in the same \ order as the priors associated with model . |
12,629 | def info ( self ) : info = [ ] for prior_model_name , prior_model in self . prior_model_tuples : info . append ( prior_model . name + '\n' ) info . extend ( [ f"{prior_model_name}_{item}" for item in prior_model . info ] ) return '\n' . join ( info ) | Use the priors that make up the model_mapper to generate information on each parameter of the overall model . |
12,630 | def push ( config , force = False ) : repo = config . repo active_branch = repo . active_branch if active_branch . name == "master" : error_out ( "Can't commit when on the master branch. " "You really ought to do work in branches." ) state = read ( config . configfile ) if not state . get ( "FORK_NAME" ) : info_out ( "... | Create push the current branch . |
12,631 | def chkpath ( path ) : if os . path . exists ( path ) : return path else : msg = "{0} does not exist." . format ( path ) raise argparse . ArgumentTypeError ( msg ) | Checks if a path exists . |
12,632 | def readin_volt ( filename ) : with open ( filename , 'r' ) as fid : content = np . loadtxt ( fid , skiprows = 1 , usecols = [ 0 , 1 , 2 ] ) volt = content [ : , 2 ] elecs = content [ : , 0 : 2 ] return elecs , volt | Read in measurement data from a volt . dat file and return electrodes and measured resistance . |
12,633 | def save_volt ( elecs , volt , filename ) : content = np . column_stack ( ( elecs , volt , np . zeros ( len ( volt ) ) ) ) with open ( filename , 'w' ) as fid : fid . write ( '{0}\n' . format ( content . shape [ 0 ] ) ) with open ( filename , 'ab' ) as fid : np . savetxt ( fid , np . array ( content ) , fmt = '%i %i %f... | Save the values in volt - format . |
12,634 | def main ( ) : options = handle_options ( ) elecs , d_obs = readin_volt ( options . d_obs ) elecs , d_est = readin_volt ( options . d_est ) elecs , d_estTC = readin_volt ( options . d_estTC ) volt_corr = calc_correction ( d_obs , d_est , d_estTC , ) save_volt ( elecs , volt_corr , options . output , ) | Function to remove temperature effect from field data |
12,635 | def recursive_update ( default , custom ) : if not isinstance ( default , dict ) or not isinstance ( custom , dict ) : raise TypeError ( 'Params of recursive_update should be dicts' ) for key in custom : if isinstance ( custom [ key ] , dict ) and isinstance ( default . get ( key ) , dict ) : default [ key ] = recursiv... | Return a dict merged from default and custom |
12,636 | def cleanup ( config , searchstring , force = False ) : repo = config . repo branches_ = list ( find ( repo , searchstring ) ) if not branches_ : error_out ( "No branches found" ) elif len ( branches_ ) > 1 : error_out ( "More than one branch found.{}" . format ( "\n\t" . join ( [ "" ] + [ x . name for x in branches_ ]... | Deletes a found branch locally and remotely . |
12,637 | def calc_correction ( temp , mag , add = False , T_std = 10 , m = 0.021 ) : if mag . shape [ 1 ] == 3 : if add : data_x = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag [ : , 0 ] data_y = ( m * ( T_std - 25 ) + 1 ) / ( m * ( temp - 25 ) + 1 ) * mag [ : , 1 ] data_z = ( m * ( T_std - 25 ) + 1 ) / ( m * ( t... | Function to add or substract the temperature effect to given data . The function can be called in python scripts . For application via command line in a file system use the script td_correct_temperature . py . The data is taken and given in Ohmm . |
12,638 | def save_mag_to_file ( mag , filename , rhofile ) : if rhofile : null = np . zeros ( len ( mag ) ) if mag . shape [ 1 ] == 3 : null = np . column_stack ( ( null , null , null , null ) ) result = np . column_stack ( ( mag , null ) ) with open ( filename , 'w' ) as fid : fid . write ( '{0}\n' . format ( mag . shape [ 0 ]... | Save the values in rho - or mag - format . |
12,639 | def main ( ) : options = handle_options ( ) tempdata = readin_temp ( options . temp_file ) magdata = readin_rho ( options . filename , options . rhofile , aniso = options . aniso ) mag_corr = calc_correction ( temp = tempdata , mag = magdata , add = options . add , T_std = options . T_std , m = options . m , ) save_mag... | Function to add or substract the temperature effect to data in a tomodir |
12,640 | def singular ( plural ) : if plural . endswith ( 'ies' ) : return plural [ : - 3 ] + 'y' if plural . endswith ( 's' ) : return plural [ : - 1 ] raise ValueError ( 'unknown plural form %r' % ( plural , ) ) | Take a plural English word and turn it into singular |
12,641 | def plot_single_configuration ( self , config_nr , sens_file ) : indices = elem . load_column_file_to_elements_advanced ( sens_file , [ 2 , 3 ] , False , False ) elem . plt_opt . title = '' elem . plt_opt . reverse = True elem . plt_opt . cbmin = - 1 elem . plt_opt . cbmax = 1 elem . plt_opt . cblabel = r'fill' elem . ... | plot sensitivity distribution with center of mass for a single configuration . The electrodes used are colored . |
12,642 | def plot_sens_center ( self , frequency = 2 ) : try : colors = np . loadtxt ( self . volt_file , skiprows = 1 ) except IOError : print ( 'IOError opening {0}' . format ( volt_file ) ) exit ( ) if ( len ( colors . shape ) > 1 ) : print ( 'Artificial or Multi frequency data' ) colors = colors [ : , frequency ] . flatten ... | plot sensitivity center distribution for all configurations in config . dat . The centers of mass are colored by the data given in volt_file . |
12,643 | def color_electrodes ( self , config_nr , ax ) : electrodes = np . loadtxt ( options . config_file , skiprows = 1 ) electrodes = self . configs [ ~ np . isnan ( self . configs ) . any ( 1 ) ] electrodes = electrodes . astype ( int ) conf = [ ] for dim in range ( 0 , electrodes . shape [ 1 ] ) : c = electrodes [ config_... | Color the electrodes used in specific configuration . Voltage electrodes are yellow Current electrodes are red ?! |
12,644 | def compute_sens ( self , elem_file , elec_file , configs ) : CRMod_config = CRMod . config ( ) if self . options . sink is not None : print ( '2D mode with sink {0}' . format ( self . options . sink ) ) CRMod_config [ '2D' ] = 0 CRMod_config [ 'fictitious_sink' ] = 'T' CRMod_config [ 'sink_node' ] = self . options . s... | Compute the sensitivities for the given input data . A CRMod instance is called to create the sensitivity files . |
12,645 | def compute_center_of_mass ( self , filename ) : sens = np . loadtxt ( filename , skiprows = 1 ) X = sens [ : , 0 ] Z = sens [ : , 1 ] C = sens [ : , 2 ] x_center = 0 z_center = 0 sens_sum = 0 for i in range ( 0 , C . shape [ 0 ] ) : if ( self . weight == 0 ) : weight = ( C [ i ] ) if ( self . weight == 1 ) : weight = ... | Center of mass is computed using the sensitivity data output from CRMod Data weights can be applied using command line options |
12,646 | def handle_cmd_options ( ) : parser = OptionParser ( ) parser . add_option ( "-s" , "--silent" , action = "store_true" , dest = "silent" , help = "print any warnings" , default = False ) ( options , args ) = parser . parse_args ( ) return options , args | Get the options from the command line . |
12,647 | def move ( fname , folder , options ) : if os . path . isfile ( fname ) : shutil . move ( fname , folder ) else : if options . silent is False : print ( '{0} missing' . format ( fname ) ) | Move file to dir if existing |
12,648 | def print_tb ( tb , limit = None , file = None ) : if file is None : file = sys . stderr if limit is None : if hasattr ( sys , 'tracebacklimit' ) : limit = sys . tracebacklimit file . write ( '\n' . join ( format_tb ( tb , limit ) ) + '\n' ) | Print up to limit stack trace entries from the traceback tb . |
12,649 | def print_exception ( etype , value , tb , limit = None , file = None , chain = True ) : import traceback if file is None : file = sys . stderr if tb : file . write ( 'Traceback (most recent call last):\n' ) print_tb ( tb , limit , file ) lines = traceback . format_exception_only ( etype , value ) for line in lines : f... | Print exception up to limit stack trace entries from tb to file . |
12,650 | def construct ( self , data ) : occurrences = { } main_occurrences = { } for pkg in data [ "data" ] [ "dependencies" ] : package = pkg [ "package" ] for item in pkg [ "dependencies" ] : dep = item [ "name" ] if package != "." : deps = map ( lambda l : "%s/%s" % ( package , l ) , item [ "location" ] ) else : deps = item... | Construct info about a project from artefact |
12,651 | def join_import_from ( self , import_spec ) : if not self . isroot and not self . ispkg : parent = self . name . rpartition ( '.' ) [ 0 ] else : parent = self . name return join_import_from ( import_spec , parent ) | Joins a relative import like from . foo import bar with this module as its parent module . If the module is not a root module or package root it will be joined with the package root . |
12,652 | def is_tomodir ( directory ) : if os . path . isdir ( directory ) : if ( os . path . isdir ( directory + "/exe" ) and os . path . isdir ( directory + "/config" ) and os . path . isdir ( directory + "/rho" ) and os . path . isdir ( directory + "/inv" ) and os . path . isdir ( directory + "/mod" ) ) : return True else : ... | Check if the supplied directory is a tomodir |
12,653 | def td_is_finished ( tomodir ) : if not is_tomodir ( tomodir ) : raise Exception ( 'Supplied directory is not a tomodir!' ) if ( os . path . isfile ( tomodir + os . sep + 'config/config.dat' ) and os . path . isfile ( tomodir + os . sep + 'rho/rho.dat' ) and os . path . isfile ( tomodir + os . sep + 'grid/elem.dat' ) a... | Return the state of modeling and inversion for a given tomodir . The result does not take into account sensitivities or potentials as optionally generated by CRMod . |
12,654 | def is_sipdir ( directory ) : is_sipdir = True if ( not os . path . isfile ( directory + os . sep + 'frequencies.dat' ) ) : is_sipdir = False if ( not os . path . isdir ( directory + os . sep + 'invmod' ) ) : is_sipdir = False return is_sipdir | Simple check if the supplied directory is a SIP directory . |
12,655 | def sipdir_is_finished ( sipdir ) : if not is_sipdir ( sipdir ) : raise Exception ( 'Directory is not a valid SIP directory!' ) subdirs_raw = sorted ( glob . glob ( sipdir + os . sep + 'invmod' + os . sep + '*' ) ) subdirs = [ x for x in subdirs_raw if os . path . isdir ( x ) ] crmod_finished = True crtomo_finished = T... | Return the state of modeling and inversion for a given SIP dir . The result does not take into account sensitivities or potentials as optionally generated by CRMod . |
12,656 | def enable_neutron_hack ( self , os_username , os_password , os_project_id , os_auth_url ) : self . yum_install ( [ 'python-neutronclient' ] ) self . send_file ( pkg_data_filename ( 'static' , 'ovb_fix_neutron_addr' ) , '/usr/local/bin/ovb_fix_neutron_addr' , unix_mode = 0o755 ) content = unit = 'ovb_fix_neutron_addr.s... | Enable the neutron hack on the undercloud . |
12,657 | def patch_ironic_ramdisk ( self ) : tmpdir = self . run ( 'mktemp -d' ) [ 0 ] . rstrip ( '\n' ) self . run ( 'cd {tmpdir}; zcat /home/stack/ironic-python-agent.initramfs| cpio -id' . format ( tmpdir = tmpdir ) ) self . send_file ( pkg_data_filename ( 'static' , 'ironic-wipefs.patch' ) , '/tmp/ironic-wipefs.patch' ) sel... | Clean the disk before flushing the new image . |
12,658 | def show_menu ( title , options , default = None , height = None , width = None , multiselect = False , precolored = False ) : plugins = [ FilterPlugin ( ) ] if any ( isinstance ( opt , OptionGroup ) for opt in options ) : plugins . append ( OptionGroupPlugin ( ) ) if title : plugins . append ( TitlePlugin ( title ) ) ... | Shows an interactive menu in the terminal . |
12,659 | def pluggable ( method ) : def wrapped ( self , * args , ** kwargs ) : if hasattr ( self , "_plugins" ) : return getattr ( self . _plugins [ - 1 ] , method . __name__ ) ( * args , ** kwargs ) else : return method ( self , * args , ** kwargs ) wrapped . original = method return wrapped | Mark a class method as extendable with plugins . |
12,660 | def register_plugin ( host , plugin ) : class OriginalMethods ( object ) : def __getattr__ ( self , name ) : return lambda * args , ** kwargs : getattr ( host , name ) . original ( host , * args , ** kwargs ) if not hasattr ( host , "_plugins" ) : host . _plugins = [ OriginalMethods ( ) ] plugin . parent = host . _plug... | Register a plugin with a host object . Some |
12,661 | def chdir ( self , path = None ) : if path is None : self . _cwd = None return if not stat . S_ISDIR ( self . stat ( path ) . st_mode ) : raise SFTPError ( errno . ENOTDIR , "%s: %s" % ( os . strerror ( errno . ENOTDIR ) , path ) ) self . _cwd = b ( self . normalize ( path ) ) | Change the current directory of this SFTP session . Since SFTP doesn t really have the concept of a current working directory this is emulated by Paramiko . Once you use this method to set a working directory all operations on this . SFTPClient object will be relative to that path . You can pass in None to stop using a... |
12,662 | def get_int ( self ) : byte = self . get_bytes ( 1 ) if byte == max_byte : return util . inflate_long ( self . get_binary ( ) ) byte += self . get_bytes ( 3 ) return struct . unpack ( '>I' , byte ) [ 0 ] | Fetch an int from the stream . |
12,663 | def set_logger ( name , level = 'INFO' , fmt = None , datefmt = None , propagate = 1 , remove_handlers = False ) : logger = logging . getLogger ( name ) logger . setLevel ( getattr ( logging , level ) ) logger . propagate = propagate if remove_handlers : logger . handlers = [ ] return handler = None for h in logger . h... | This function will clear the previous handlers and set only one handler which will only be StreamHandler for the logger . |
12,664 | def format ( self , record ) : self . _format_record ( record ) record_dict = { } for k , v in record . __dict__ . items ( ) : if isinstance ( k , str ) : k = decode_ ( k , 'utf8' ) if isinstance ( v , str ) : v = decode_ ( v , 'utf8' , 'replace' ) record_dict [ k ] = v if 'color' in self . fmt or 'end_color' in self .... | return log in unicode |
12,665 | def list ( self , source_ids = None , seniority = "all" , stage = None , date_start = "1494539999" , date_end = TIMESTAMP_NOW , filter_id = None , page = 1 , limit = 30 , sort_by = 'ranking' , filter_reference = None , order_by = None ) : query_params = { } query_params [ "date_end" ] = _validate_timestamp ( date_end ,... | Retreive all profiles that match the query param . |
12,666 | def add ( self , source_id = None , file_path = None , profile_reference = "" , timestamp_reception = None , training_metadata = [ ] ) : data = { } data [ "source_id" ] = _validate_source_id ( source_id ) data [ "profile_reference" ] = _validate_profile_reference ( profile_reference ) data [ "timestamp_reception" ] = _... | Add a profile resume to a sourced id . |
12,667 | def addList ( self , source_id , dir_path , is_recurcive = False , timestamp_reception = None , training_metadata = [ ] ) : if not path . isdir ( dir_path ) : raise ValueError ( dir_path + ' is not a directory' ) files_to_send = _get_files_from_dir ( dir_path , is_recurcive ) succeed_upload = { } failed_upload = { } fo... | Add all profile from a given directory . |
12,668 | def get ( self , source_id = None , profile_id = None , profile_reference = None ) : query_params = { } query_params [ "source_id" ] = _validate_source_id ( source_id ) if profile_id : query_params [ "profile_id" ] = _validate_profile_id ( profile_id ) if profile_reference : query_params [ "profile_reference" ] = _vali... | Retrieve the profile information associated with profile id . |
12,669 | def set ( self , source_id = None , profile_id = None , filter_id = None , stage = None , profile_reference = None , filter_reference = None ) : data = { } data [ "source_id" ] = _validate_source_id ( source_id ) if profile_id : data [ "profile_id" ] = _validate_profile_id ( profile_id ) if filter_id : data [ "filter_i... | Edit the profile stage given a filter . |
12,670 | def get ( self , source_id = None , profile_id = None , profile_reference = None , filter_id = None , filter_reference = None ) : query_params = { } query_params [ "source_id" ] = _validate_source_id ( source_id ) if profile_id : query_params [ "profile_id" ] = _validate_profile_id ( profile_id ) if profile_reference :... | Retrieve the interpretability information . |
12,671 | def check ( self , profile_data , training_metadata = [ ] ) : data = { "profile_json" : _validate_dict ( profile_data , "profile_data" ) , "training_metadata" : _validate_training_metadata ( training_metadata ) , } response = self . client . post ( "profile/json/check" , data = data ) return response . json ( ) | Use the api to check weither the profile_data are valid . |
12,672 | def add ( self , source_id , profile_data , training_metadata = [ ] , profile_reference = None , timestamp_reception = None ) : data = { "source_id" : _validate_source_id ( source_id ) , "profile_json" : _validate_dict ( profile_data , "profile_data" ) , "training_metadata" : _validate_training_metadata ( training_meta... | Use the api to add a new profile using profile_data . |
12,673 | def md5sum ( self , f ) : m = hashlib . md5 ( ) fh = open ( f , 'r' ) while 1 : chunk = fh . read ( BUF_SIZE ) if not chunk : break m . update ( chunk ) fh . close ( ) return m . hexdigest ( ) | md5sums a file returning the hex digest |
12,674 | def iterdupes ( self , compare = None , filt = None ) : if not compare : compare = self . md5sum seen_siz = { } seen_sum = { } size_func = lambda x : os . stat ( x ) . st_size for ( fsize , f ) in self . iteritems ( want_dirs = False , func = size_func , filt = filt ) : if fsize not in seen_siz : seen_siz [ fsize ] = f... | streaming item iterator with low overhead duplicate file detection |
12,675 | def objects_to_root ( objects : List ) -> Root : def _to_tree ( objs : Iterable ) -> Dict : path_tree = { } for obj in objs : is_dir = obj . key . endswith ( '/' ) chunks = [ chunk for chunk in obj . key . split ( '/' ) if chunk ] chunk_count = len ( chunks ) tmp = path_tree for i , chunk in enumerate ( chunks ) : is_l... | Convert a list of s3 ObjectSummaries into a directory tree . |
12,676 | def _delete ( self , paths : Iterable [ str ] ) -> None : for chunk in util . chunk ( paths , self . _MAX_DELETES_PER_REQUEST ) : keys = list ( [ self . _prefix + key for key in chunk ] ) logger . info ( 'Deleting %d objects (%s)' , len ( keys ) , ', ' . join ( keys ) ) response = self . _bucket . delete_objects ( Dele... | Delete a collection of paths from S3 . |
12,677 | def _upload ( self , items : Iterable [ Tuple [ str , str ] ] ) -> None : for src , key in items : logger . info ( f'Uploading {src} to {key}' ) mimetype , _ = mimetypes . guess_type ( src ) if mimetype is None : logger . warning ( f'Could not guess MIME type for {src}' ) mimetype = 'application/octet-stream' logger . ... | Upload a collection of paths to S3 . |
12,678 | def rotmat ( alpha ) : R = np . array ( ( ( np . cos ( alpha ) , - np . sin ( alpha ) ) , ( np . sin ( alpha ) , np . cos ( alpha ) ) ) ) return R | Rotate around z - axis |
12,679 | def apply_async ( self , args = None , kwargs = None , ** options ) : result = super ( LoggedTask , self ) . apply_async ( args = args , kwargs = kwargs , ** options ) log . info ( 'Task {}[{}] submitted with arguments {}, {}' . format ( self . name , result . id , args , kwargs ) ) return result | Emit a log statement when the task is submitted . |
12,680 | def on_retry ( self , exc , task_id , args , kwargs , einfo ) : super ( LoggedTask , self ) . on_retry ( exc , task_id , args , kwargs , einfo ) log . warning ( '[{}] retried due to {}' . format ( task_id , getattr ( einfo , 'traceback' , None ) ) ) | Capture the exception that caused the task to be retried if any . |
12,681 | def on_failure ( self , exc , task_id , args , kwargs , einfo ) : log . error ( '[{}] failed due to {}' . format ( task_id , getattr ( einfo , 'traceback' , None ) ) ) super ( LoggedTask , self ) . on_failure ( exc , task_id , args , kwargs , einfo ) | Capture the exception that caused the task to fail if any . |
12,682 | def nodes_to_object ( self , node , object ) : "Map all child nodes to one object's attributes" for n in list ( node ) : self . node_to_object ( n , object ) | Map all child nodes to one object s attributes |
12,683 | def node_to_object ( self , node , object ) : "Map a single node to one object's attributes" attribute = self . to_lower ( node . tag ) attribute = "_yield" if attribute == "yield" else attribute try : valueString = node . text or "" value = float ( valueString ) except ValueError : value = node . text try : setattr ( ... | Map a single node to one object s attributes |
12,684 | def parse ( self , xml_file ) : "Get a list of parsed recipes from BeerXML input" recipes = [ ] with open ( xml_file , "rt" ) as f : tree = ElementTree . parse ( f ) for recipeNode in tree . iter ( ) : if self . to_lower ( recipeNode . tag ) != "recipe" : continue recipe = Recipe ( ) recipes . append ( recipe ) for rec... | Get a list of parsed recipes from BeerXML input |
12,685 | def to_lower ( self , string ) : "Helper function to transform strings to lower case" value = None try : value = string . lower ( ) except AttributeError : value = "" finally : return value | Helper function to transform strings to lower case |
12,686 | def _to_dot_key ( cls , section , key = None ) : if key : return ( NON_ALPHA_NUM . sub ( '_' , section . lower ( ) ) , NON_ALPHA_NUM . sub ( '_' , key . lower ( ) ) ) else : return NON_ALPHA_NUM . sub ( '_' , section . lower ( ) ) | Return the section and key in dot notation format . |
12,687 | def save ( self , target_file = None , as_template = False ) : self . _read_sources ( ) if not target_file : if not self . _last_source : raise AttributeError ( 'Target file is required when last source is not set during instantiation' ) target_file = self . _last_source output = str ( self ) if as_template : output_tm... | Save the config |
12,688 | def _parse_extra ( self , fp ) : comment = '' section = '' fp . seek ( 0 ) for line in fp : line = line . rstrip ( ) if not line : if comment : comment += '\n' continue if line . startswith ( '#' ) : comment += line + '\n' continue if line . startswith ( '[' ) : section = line . strip ( '[]' ) self . _add_dot_key ( sec... | Parse and store the config comments and create maps for dot notion lookup |
12,689 | def _typed_value ( self , value ) : if value not in self . _value_cache : new_value = value if is_int ( value ) : new_value = int ( value ) elif is_float ( value ) : new_value = float ( value ) elif is_bool ( value ) : new_value = to_bool ( value ) elif is_none ( value ) : new_value = None self . _value_cache [ value ]... | Transform string value to an actual data type of the same value . |
12,690 | def add_section ( self , section , comment = None ) : self . _read_sources ( ) if self . _to_dot_key ( section ) in self . _dot_keys : raise DuplicateSectionError ( section ) self . _parser . add_section ( section ) self . _add_dot_key ( section ) if comment : self . _set_comment ( section , comment ) | Add a section |
12,691 | def _set_comment ( self , section , comment , key = None ) : if '\n' in comment : comment = '\n# ' . join ( comment . split ( '\n' ) ) comment = '# ' + comment if key : self . _comments [ ( section , key ) ] = comment else : self . _comments [ section ] = comment | Set a comment for section or key |
12,692 | def _sample_actions ( self , state : Sequence [ tf . Tensor ] ) -> Tuple [ Sequence [ tf . Tensor ] , tf . Tensor , tf . Tensor ] : default = self . compiler . compile_default_action ( self . batch_size ) bound_constraints = self . compiler . compile_action_bound_constraints ( state ) action = self . _sample_action ( b... | Returns sampled action fluents and tensors related to the sampling . |
12,693 | def _check_preconditions ( self , state : Sequence [ tf . Tensor ] , action : Sequence [ tf . Tensor ] , bound_constraints : Dict [ str , Constraints ] , default : Sequence [ tf . Tensor ] ) -> Tuple [ tf . Tensor , Sequence [ tf . Tensor ] , tf . Tensor ] : def condition ( i , a , checking ) : not_checking = tf . redu... | Samples action fluents until all preconditions are satisfied . |
12,694 | def _sample_action ( self , constraints : Dict [ str , Constraints ] , default : Sequence [ tf . Tensor ] , prob : float = 0.3 ) -> Sequence [ tf . Tensor ] : ordering = self . compiler . rddl . domain . action_fluent_ordering dtypes = map ( rddl2tf . utils . range_type_to_dtype , self . compiler . rddl . action_range_... | Samples action fluents respecting the given bound constraints . |
12,695 | def _sample_action_fluent ( self , name : str , dtype : tf . DType , size : Sequence [ int ] , constraints : Dict [ str , Constraints ] , default_value : tf . Tensor , prob : float ) -> tf . Tensor : shape = [ self . batch_size ] + list ( size ) if dtype == tf . float32 : bounds = constraints . get ( name ) if bounds i... | Samples the action fluent with given name dtype and size . |
12,696 | def UnitToLNode ( u : Unit , node : Optional [ LNode ] = None , toL : Optional [ dict ] = None , optimizations = [ ] ) -> LNode : if toL is None : toL = { } if node is None : root = LNode ( name = u . _name , originObj = u , node2lnode = toL ) else : root = node stmPorts = { } netCtx = NetCtxs ( root ) for su in u . _u... | Build LNode instance from Unit instance |
12,697 | def configure ( self , rhsm = None , repositories = None ) : if rhsm is not None : self . rhsm_register ( rhsm ) if repositories is not None : self . enable_repositories ( repositories ) self . create_stack_user ( ) self . deploy_hypervisor ( ) | This method will configure the host0 and run the hypervisor . |
12,698 | def deploy_hypervisor ( self ) : self . yum_install ( [ 'libvirt-daemon-driver-nwfilter' , 'libvirt-client' , 'libvirt-daemon-config-network' , 'libvirt-daemon-driver-nodedev' , 'libvirt-daemon-kvm' , 'libvirt-python' , 'libvirt-daemon-config-nwfilter' , 'libvirt-glib' , 'libvirt-daemon' , 'libvirt-daemon-driver-storag... | Install the libvirtd and instack - undercloud packages . |
12,699 | def build_undercloud_on_libvirt ( self , image_path , rhsm = None , repositories = [ ] ) : self . run ( 'sysctl net.ipv4.ip_forward=1' ) self . fetch_image ( path = image_path , dest = '/home/stack/guest_image.qcow2' , user = 'stack' ) self . run ( "LIBGUESTFS_BACKEND=direct virt-customize -a /home/stack/guest_image.qc... | Build the Undercloud by using instack - virt - setup script . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.