idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
53,600
def _sequence_map ( cls , seq , path ) : if not any ( path ) : return seq result = [ ] for item in seq : try : result . append ( cls . __resolve_path ( item , path ) ) except ( KeyError , IndexError ) : pass return result
Apply a kwargsql expression to every item of a sequence and returns it .
53,601
def populate ( projects_to_filter = None , group = None ) : if projects_to_filter is None : projects_to_filter = [ ] import benchbuild . projects as all_projects all_projects . discover ( ) prjs = ProjectRegistry . projects if projects_to_filter : prjs = { } for filter_project in set ( projects_to_filter ) : try : prjs . update ( { x : y for x , y in ProjectRegistry . projects . items ( prefix = filter_project ) } ) except KeyError : pass if group : groupkeys = set ( group ) prjs = { name : cls for name , cls in prjs . items ( ) if cls . GROUP in groupkeys } return { x : prjs [ x ] for x in prjs if prjs [ x ] . DOMAIN != "debug" or x in projects_to_filter }
Populate the list of projects that belong to this experiment .
53,602
def cc ( project , detect_project = False ) : from benchbuild . utils import cmd cc_name = str ( CFG [ "compiler" ] [ "c" ] ) wrap_cc ( cc_name , compiler ( cc_name ) , project , detect_project = detect_project ) return cmd [ "./{}" . format ( cc_name ) ]
Return a clang that hides CFLAGS and LDFLAGS .
53,603
def cxx ( project , detect_project = False ) : from benchbuild . utils import cmd cxx_name = str ( CFG [ "compiler" ] [ "cxx" ] ) wrap_cc ( cxx_name , compiler ( cxx_name ) , project , detect_project = detect_project ) return cmd [ "./{name}" . format ( name = cxx_name ) ]
Return a clang ++ that hides CFLAGS and LDFLAGS .
53,604
def compiler ( name ) : pinfo = __get_paths ( ) _compiler = local [ name ] _compiler = _compiler . setenv ( PATH = pinfo [ "path" ] , LD_LIBRARY_PATH = pinfo [ "ld_library_path" ] ) return _compiler
Get a usable clang ++ plumbum command .
53,605
def strip_path_prefix ( ipath , prefix ) : if prefix is None : return ipath return ipath [ len ( prefix ) : ] if ipath . startswith ( prefix ) else ipath
Strip prefix from path .
53,606
def unpickle ( pickle_file ) : pickle = None with open ( pickle_file , "rb" ) as pickle_f : pickle = dill . load ( pickle_f ) if not pickle : LOG . error ( "Could not load python object from file" ) return pickle
Unpickle a python object from the given path .
53,607
def wrap_cc ( filepath , compiler , project , python = sys . executable , detect_project = False ) : env = __create_jinja_env ( ) template = env . get_template ( 'run_compiler.py.inc' ) cc_fname = local . path ( filepath ) . with_suffix ( ".benchbuild.cc" , depth = 0 ) cc_f = persist ( compiler , filename = cc_fname ) project_file = persist ( project , suffix = ".project" ) with open ( filepath , 'w' ) as wrapper : wrapper . write ( template . render ( cc_f = cc_f , project_file = project_file , python = python , detect_project = detect_project ) ) chmod ( "+x" , filepath ) LOG . debug ( "Placed wrapper in: %s for compiler %s" , local . path ( filepath ) , str ( compiler ) ) LOG . debug ( "Placed project in: %s" , local . path ( project_file ) ) LOG . debug ( "Placed compiler command in: %s" , local . path ( cc_f ) ) return local [ filepath ]
Substitute a compiler with a script that hides CFLAGS & LDFLAGS .
53,608
def persist ( id_obj , filename = None , suffix = None ) : if suffix is None : suffix = ".pickle" if hasattr ( id_obj , 'id' ) : ident = id_obj . id else : ident = str ( id ( id_obj ) ) if filename is None : filename = "{obj_id}{suffix}" . format ( obj_id = ident , suffix = suffix ) with open ( filename , 'wb' ) as obj_file : dill . dump ( id_obj , obj_file ) return os . path . abspath ( filename )
Persist an object in the filesystem .
53,609
def load ( filename ) : if not os . path . exists ( filename ) : LOG . error ( "load object - File '%s' does not exist." , filename ) return None obj = None with open ( filename , 'rb' ) as obj_file : obj = dill . load ( obj_file ) return obj
Load a pickled obj from the filesystem .
53,610
def extract_adjacent_shapes ( df_shapes , shape_i_column , extend = .5 ) : df_scaled_x = extend_shapes ( df_shapes , 'x' , extend ) df_scaled_y = extend_shapes ( df_shapes , 'y' , extend ) df_corners = df_shapes . groupby ( shape_i_column ) . agg ( { 'x' : [ 'min' , 'max' ] , 'y' : [ 'min' , 'max' ] } ) row_list = [ ] for shapeNumber in df_shapes [ shape_i_column ] . drop_duplicates ( ) : df_stretched = df_scaled_x [ df_scaled_x [ shape_i_column ] . isin ( [ shapeNumber ] ) ] xmin_x , xmax_x , ymin_x , ymax_x = ( df_stretched . x . min ( ) , df_stretched . x . max ( ) , df_stretched . y . min ( ) , df_stretched . y . max ( ) ) df_stretched = df_scaled_y [ df_scaled_y [ shape_i_column ] . isin ( [ shapeNumber ] ) ] xmin_y , xmax_y , ymin_y , ymax_y = ( df_stretched . x . min ( ) , df_stretched . x . max ( ) , df_stretched . y . min ( ) , df_stretched . y . max ( ) ) adjacent = df_corners [ ( ( df_corners . x [ 'min' ] < xmax_x ) & ( df_corners . x [ 'max' ] >= xmax_x ) | ( df_corners . x [ 'min' ] < xmin_x ) & ( df_corners . x [ 'max' ] >= xmin_x ) ) & ( df_corners . y [ 'min' ] < ymax_x ) & ( df_corners . y [ 'max' ] > ymin_x ) | ( ( df_corners . y [ 'min' ] < ymax_y ) & ( df_corners . y [ 'max' ] >= ymax_y ) | ( df_corners . y [ 'min' ] < ymin_y ) & ( df_corners . y [ 'max' ] >= ymin_y ) ) & ( ( df_corners . x [ 'min' ] < xmax_y ) & ( df_corners . x [ 'max' ] > xmin_y ) ) ] . index . values for shape in adjacent : temp_dict = { } reverse_dict = { } temp_dict [ 'source' ] = shapeNumber reverse_dict [ 'source' ] = shape temp_dict [ 'target' ] = shape reverse_dict [ 'target' ] = shapeNumber if ( reverse_dict not in row_list ) : row_list . append ( temp_dict ) df_connected = ( pd . DataFrame ( row_list ) [ [ 'source' , 'target' ] ] . sort_index ( axis = 1 , ascending = True ) . sort_values ( [ 'source' , 'target' ] ) ) return df_connected
Generate list of connections between adjacent polygon shapes based on geometrical closeness .
53,611
def es_client ( self ) : es_conf = self . campaign . export . elasticsearch return Elasticsearch ( self . hosts , ** es_conf . connection_params )
Get Elasticsearch client
53,612
def index_name ( self ) : fmt = self . campaign . export . elasticsearch . index_name fields = dict ( date = self . report [ 'date' ] ) return fmt . format ( ** fields ) . lower ( )
Get Elasticsearch index name associated to the campaign
53,613
def remove_index ( self ) : self . index_client . close ( self . index_name ) self . index_client . delete ( self . index_name )
Remove Elasticsearch index associated to the campaign
53,614
def extract_build_info ( exe_path , elf_section = ELF_SECTION ) : build_info = { } with mkdtemp ( ) as tempd , pushd ( tempd ) : proc = subprocess . Popen ( [ OBJCOPY , DUMP_SECTION , "{secn}={ofile}" . format ( secn = elf_section , ofile = BUILDINFO_FILE ) , exe_path , ] , stderr = subprocess . PIPE , ) proc . wait ( ) errno = proc . returncode stderr = proc . stderr . read ( ) if errno or len ( stderr ) : LOGGER . warning ( 'objcopy failed with errno %s.' , errno ) if len ( stderr ) : LOGGER . warning ( 'objcopy failed with following msg:\n%s' , stderr ) return build_info with open ( BUILDINFO_FILE ) as build_info_f : try : build_info = json . load ( build_info_f , object_hook = byteify ) except JSONDcdError as jsde : LOGGER . warning ( 'benchmark executable build is not valid json:' ) LOGGER . warning ( jsde . msg ) LOGGER . warning ( 'build info section content:' ) LOGGER . warning ( jsde . doc ) return build_info
Extracts the build information from a given executable .
53,615
def no_args ( ** kwargs ) : from benchbuild . utils . cmd import uchroot as uchrt prefixes = CFG [ "container" ] [ "prefixes" ] . value p_paths , p_libs = env ( prefixes ) uchrt = run . with_env_recursive ( uchrt , LD_LIBRARY_PATH = path . list_to_path ( p_libs ) , PATH = path . list_to_path ( p_paths ) ) return uchrt
Return the uchroot command without any customizations .
53,616
def with_mounts ( * args , uchroot_cmd_fn = no_args , ** kwargs ) : mounts = CFG [ "container" ] [ "mounts" ] . value prefixes = CFG [ "container" ] [ "prefixes" ] . value uchroot_opts , mounts = __mounts__ ( "mnt" , mounts ) uchroot_cmd = uchroot_cmd_fn ( ** kwargs ) uchroot_cmd = uchroot_cmd [ uchroot_opts ] uchroot_cmd = uchroot_cmd [ args ] paths , libs = env ( mounts ) prefix_paths , prefix_libs = env ( prefixes ) uchroot_cmd = run . with_env_recursive ( uchroot_cmd , LD_LIBRARY_PATH = path . list_to_path ( libs + prefix_libs ) , PATH = path . list_to_path ( paths + prefix_paths ) ) return uchroot_cmd
Return a uchroot command with all mounts enabled .
53,617
def clean_env ( uchroot_cmd , varnames ) : env = uchroot_cmd [ "/usr/bin/env" ] __clean_env = env [ "-u" , "," . join ( varnames ) ] return __clean_env
Returns a uchroot cmd that runs inside a filtered environment .
53,618
def mounts ( prefix , __mounts ) : i = 0 mntpoints = [ ] for mount in __mounts : if not isinstance ( mount , dict ) : mntpoint = "{0}/{1}" . format ( prefix , str ( i ) ) mntpoints . append ( mntpoint ) i = i + 1 return mntpoints
Compute the mountpoints of the current user .
53,619
def env ( mounts ) : f_mounts = [ m . strip ( "/" ) for m in mounts ] root = local . path ( "/" ) ld_libs = [ root / m / "lib" for m in f_mounts ] ld_libs . extend ( [ root / m / "lib64" for m in f_mounts ] ) paths = [ root / m / "bin" for m in f_mounts ] paths . extend ( [ root / m / "sbin" for m in f_mounts ] ) paths . extend ( [ root / m for m in f_mounts ] ) return paths , ld_libs
Compute the environment of the change root for the user .
53,620
def get_abbreviations ( kb ) : return { "%s$$n%i" % ( author . get_urn ( ) , i ) : abbrev for author in kb . get_authors ( ) for i , abbrev in enumerate ( author . get_abbreviations ( ) ) if author . get_urn ( ) is not None }
For the sake of profiling .
53,621
def get_authors ( self ) : Person = self . _session . get_class ( surf . ns . EFRBROO [ 'F10_Person' ] ) return list ( Person . all ( ) )
Returns the authors in the Knowledge Base .
53,622
def get_works ( self ) : Work = self . _session . get_class ( surf . ns . EFRBROO [ 'F1_Work' ] ) return list ( Work . all ( ) )
Return the author s works .
53,623
def get_author_label ( self , urn ) : author = self . get_resource_by_urn ( urn ) names = author . get_names ( ) en_names = sorted ( [ name [ 1 ] for name in names if name [ 0 ] == "en" ] , key = len ) try : assert len ( en_names ) > 0 return en_names [ 0 ] except Exception as e : none_names = sorted ( [ name [ 1 ] for name in names if name [ 0 ] == None ] , key = len ) try : return none_names [ 0 ] except Exception as e : la_names = sorted ( [ name [ 1 ] for name in names if name [ 0 ] == "la" ] , key = len ) try : assert len ( la_names ) > 0 return la_names [ 0 ] except Exception as e : return None
Get the label corresponding to the author identified by the CTS URN .
53,624
def get_statistics ( self ) : statistics = { "number_authors" : 0 , "number_author_names" : 0 , "number_author_abbreviations" : 0 , "number_works" : 0 , "number_work_titles" : 0 , "number_title_abbreviations" : 0 , "number_opus_maximum" : 0 , } for author in self . get_authors ( ) : if author . get_urn ( ) is not None : opmax = True if self . get_opus_maximum_of ( author . get_urn ( ) ) is not None else False if opmax : statistics [ "number_opus_maximum" ] += 1 statistics [ "number_authors" ] += 1 statistics [ "number_author_names" ] += len ( author . get_names ( ) ) statistics [ "number_author_abbreviations" ] += len ( author . get_abbreviations ( ) ) for work in author . get_works ( ) : statistics [ "number_works" ] += 1 statistics [ "number_work_titles" ] += len ( work . get_titles ( ) ) statistics [ "number_title_abbreviations" ] += len ( work . get_abbreviations ( ) ) return statistics
Gather basic stats about the Knowledge Base and its contents .
53,625
def to_json ( self ) : return json . dumps ( { "statistics" : self . get_statistics ( ) , "authors" : [ json . loads ( author . to_json ( ) ) for author in self . get_authors ( ) ] } , indent = 2 )
Serialises the content of the KnowledgeBase as JSON .
53,626
def write_yaml_report ( func ) : @ wraps ( func ) def _wrapper ( * args , ** kwargs ) : now = datetime . datetime . now ( ) with Timer ( ) as timer : data = func ( * args , ** kwargs ) if isinstance ( data , ( SEQUENCES , types . GeneratorType ) ) : report = dict ( children = list ( map ( str , data ) ) ) elif isinstance ( data , MAPPINGS ) : report = data else : raise Exception ( 'Unexpected data type: %s' , type ( data ) ) report [ 'elapsed' ] = timer . elapsed report [ 'date' ] = now . isoformat ( ) if "no_exec" not in kwargs and report is not None : with open ( YAML_REPORT_FILE , 'w' ) as ostr : yaml . dump ( report , ostr , default_flow_style = False ) return report return _wrapper
Decorator used in campaign node post - processing
53,627
def traverse ( self ) : builder = self . child_builder for child in self . _children : with pushd ( str ( child ) ) : yield child , builder ( child )
Enumerate children and build associated objects
53,628
def pad_position_l ( self , i ) : if i >= self . n_pads_l : raise ModelError ( "pad index out-of-bounds" ) return ( self . length - self . pad_length ) / ( self . n_pads_l - 1 ) * i + self . pad_length / 2
Determines the position of the ith pad in the length direction . Assumes equally spaced pads .
53,629
def pad_position_w ( self , i ) : if i >= self . n_pads_w : raise ModelError ( "pad index out-of-bounds" ) return ( self . width - self . pad_width ) / ( self . n_pads_w - 1 ) * i + self . pad_width / 2
Determines the position of the ith pad in the width direction . Assumes equally spaced pads .
53,630
def add_to_batch ( self , batch ) : for name in self . paths : svg_path = self . paths [ name ] svg_path . add_to_batch ( batch )
Adds paths to the given batch object . They are all added as GL_TRIANGLES so the batch will aggregate them all into a single OpenGL primitive .
53,631
def alphanum_key ( s ) : return [ int ( c ) if c . isdigit ( ) else c for c in _RE_INT . split ( s ) ]
Turn a string into a list of string and number chunks .
53,632
def discover ( ) : if CFG [ "plugins" ] [ "autoload" ] : experiment_plugins = CFG [ "plugins" ] [ "experiments" ] . value for exp_plugin in experiment_plugins : try : importlib . import_module ( exp_plugin ) except ImportError as import_error : LOG . error ( "Could not find '%s'" , exp_plugin ) LOG . error ( "ImportError: %s" , import_error . msg )
Import all experiments listed in PLUGINS_EXPERIMENTS .
53,633
def print_projects ( projects = None ) : grouped_by = { } if not projects : print ( "Your selection didn't include any projects for this experiment." ) return for name in projects : prj = projects [ name ] if prj . GROUP not in grouped_by : grouped_by [ prj . GROUP ] = [ ] grouped_by [ prj . GROUP ] . append ( "{name}/{group}" . format ( name = prj . NAME , group = prj . GROUP ) ) for name in grouped_by : print ( "group: {0}" . format ( name ) ) group_projects = sorted ( grouped_by [ name ] ) for prj in group_projects : prj_cls = projects [ prj ] version_str = None if hasattr ( prj_cls , 'versions' ) : version_str = ", " . join ( prj_cls . versions ( ) ) project_id = "{0}/{1}" . format ( prj_cls . NAME , prj_cls . GROUP ) project_str = " name: {id:<32} version: {version:<24} source: {src}" . format ( id = str ( project_id ) , version = str ( prj_cls . VERSION ) , src = str ( prj_cls . SRC_FILE ) ) print ( project_str ) if prj_cls . __doc__ : docstr = prj_cls . __doc__ . strip ( "\n " ) print ( " description: {desc}" . format ( desc = docstr ) ) if version_str : print ( " versions: {versions}" . format ( versions = version_str ) ) print ( )
Print a list of projects registered for that experiment .
53,634
def _commandline ( repositories , port = 8000 , host = "127.0.0.1" , debug = False , cache = None , cache_path = "./cache" , redis = None ) : if cache == "redis" : nautilus_cache = RedisCache ( redis ) cache_type = "redis" elif cache == "filesystem" : nautilus_cache = FileSystemCache ( cache_path ) cache_type = "simple" else : nautilus_cache = NullCache ( ) cache_type = "simple" app = Flask ( "Nautilus" ) if debug : app . logger . setLevel ( logging . INFO ) resolver = NautilusCtsResolver ( resource = repositories ) nautilus = FlaskNautilus ( app = app , resolver = resolver ) nautilus . resolver . parse ( ) app . run ( debug = debug , port = port , host = host )
Run a CTS API from command line .
53,635
def render ( template = None , ostr = None , ** kwargs ) : jinja_environment . filters [ 'texscape' ] = tex_escape template = template or DEFAULT_TEMPLATE ostr = ostr or sys . stdout jinja_template = jinja_environment . get_template ( template ) jinja_template . stream ( ** kwargs ) . dump ( ostr )
Generate report from a campaign
53,636
def load_json ( ffp , custom = None , verbose = 0 ) : data = json . load ( open ( ffp ) ) return ecp_dict_to_objects ( data , custom , verbose = verbose )
Given a json file it creates a dictionary of sfsi objects
53,637
def loads_json ( p_str , custom = None , meta = False , verbose = 0 ) : data = json . loads ( p_str ) if meta : md = { } for item in data : if item != "models" : md [ item ] = data [ item ] return ecp_dict_to_objects ( data , custom , verbose = verbose ) , md else : return ecp_dict_to_objects ( data , custom , verbose = verbose )
Given a json string it creates a dictionary of sfsi objects
53,638
def migrate_ecp ( in_ffp , out_ffp ) : objs , meta_data = load_json_and_meta ( in_ffp ) ecp_output = Output ( ) for m_type in objs : for instance in objs [ m_type ] : ecp_output . add_to_dict ( objs [ m_type ] [ instance ] ) ecp_output . name = meta_data [ "name" ] ecp_output . units = meta_data [ "units" ] ecp_output . comments = meta_data [ "comments" ] ecp_output . sfsimodels_version = meta_data [ "sfsimodels_version" ] p_str = json . dumps ( ecp_output . to_dict ( ) , skipkeys = [ "__repr__" ] , indent = 4 ) a = open ( out_ffp , "w" ) a . write ( p_str ) a . close ( )
Migrates and ECP file to the current version of sfsimodels
53,639
def add_to_dict ( self , an_object , extras = None ) : if an_object . id is None : raise ModelError ( "id must be set on object before adding to output." ) if hasattr ( an_object , "base_type" ) : mtype = an_object . base_type elif hasattr ( an_object , "type" ) : if an_object . type in standard_types : mtype = an_object . type else : mtype = "custom_type" else : raise ModelError ( "Object does not have attribute 'base_type' or 'type', cannot add to output." ) if mtype not in self . unordered_models : self . unordered_models [ mtype ] = OrderedDict ( ) if hasattr ( an_object , "add_to_dict" ) : an_object . add_to_dict ( self . unordered_models ) elif hasattr ( an_object , "to_dict" ) : self . unordered_models [ mtype ] [ an_object . unique_hash ] = an_object . to_dict ( compression = self . compression ) else : raise ModelError ( "Object does not have method 'to_dict', cannot add to output." )
Convert models to json serialisable output
53,640
def add_to_output ( self , mtype , m_id , serialisable_dict ) : if mtype not in self . unordered_models : self . unordered_models [ mtype ] = OrderedDict ( ) self . unordered_models [ mtype ] [ m_id ] = serialisable_dict
Can add additional objects or dictionaries to output file that don t conform to standard objects .
53,641
def available_cpu_count ( ) -> int : try : match = re . search ( r'(?m)^Cpus_allowed:\s*(.*)$' , open ( '/proc/self/status' ) . read ( ) ) if match : res = bin ( int ( match . group ( 1 ) . replace ( ',' , '' ) , 16 ) ) . count ( '1' ) if res > 0 : return res except IOError : LOG . debug ( "Could not get the number of allowed CPUs" ) try : import psutil return psutil . cpu_count ( ) except ( ImportError , AttributeError ) : LOG . debug ( "Could not get the number of allowed CPUs" ) try : res = int ( os . sysconf ( 'SC_NPROCESSORS_ONLN' ) ) if res > 0 : return res except ( AttributeError , ValueError ) : LOG . debug ( "Could not get the number of allowed CPUs" ) try : res = open ( '/proc/cpuinfo' ) . read ( ) . count ( 'processor\t:' ) if res > 0 : return res except IOError : LOG . debug ( "Could not get the number of allowed CPUs" ) raise Exception ( 'Can not determine number of CPUs on this system' )
Get the number of available CPUs .
53,642
def escape_yaml ( raw_str : str ) -> str : escape_list = [ char for char in raw_str if char in [ '!' , '{' , '[' ] ] if len ( escape_list ) == 0 : return raw_str str_quotes = '"' i_str_quotes = "'" if str_quotes in raw_str and str_quotes not in raw_str [ 1 : - 1 ] : return raw_str if str_quotes in raw_str [ 1 : - 1 ] : raw_str = i_str_quotes + raw_str + i_str_quotes else : raw_str = str_quotes + raw_str + str_quotes return raw_str
Shell - Escape a yaml input string .
53,643
def to_yaml ( value ) -> str : stream = yaml . io . StringIO ( ) dumper = ConfigDumper ( stream , default_flow_style = True , width = sys . maxsize ) val = None try : dumper . open ( ) dumper . represent ( value ) val = stream . getvalue ( ) . strip ( ) dumper . close ( ) finally : dumper . dispose ( ) return val
Convert a given value to a YAML string .
53,644
def to_env_var ( env_var : str , value ) -> str : val = to_yaml ( value ) ret_val = "%s=%s" % ( env_var , escape_yaml ( val ) ) return ret_val
Create an environment variable from a name and a value .
53,645
def find_config ( test_file = None , defaults = None , root = os . curdir ) : if defaults is None : defaults = [ ".benchbuild.yml" , ".benchbuild.yaml" ] def walk_rec ( cur_path , root ) : cur_path = local . path ( root ) / test_file if cur_path . exists ( ) : return cur_path new_root = local . path ( root ) / os . pardir return walk_rec ( cur_path , new_root ) if new_root != root else None if test_file is not None : return walk_rec ( test_file , root ) for test_file in defaults : ret = walk_rec ( test_file , root ) if ret is not None : return ret
Find the path to the default config file .
53,646
def setup_config ( cfg , config_filenames = None , env_var_name = None ) : if env_var_name is None : env_var_name = "BB_CONFIG_FILE" config_path = os . getenv ( env_var_name , None ) if not config_path : config_path = find_config ( defaults = config_filenames ) if config_path : cfg . load ( config_path ) cfg [ "config_file" ] = os . path . abspath ( config_path ) cfg . init_from_env ( )
This will initialize the given configuration object .
53,647
def upgrade ( cfg ) : db_node = cfg [ "db" ] old_db_elems = [ "host" , "name" , "port" , "pass" , "user" , "dialect" ] has_old_db_elems = [ x in db_node for x in old_db_elems ] if any ( has_old_db_elems ) : print ( "Old database configuration found. " "Converting to new connect_string. " "This will *not* be stored in the configuration automatically." ) cfg [ "db" ] [ "connect_string" ] = "{dialect}://{user}:{password}@{host}:{port}/{name}" . format ( dialect = cfg [ "db" ] [ "dialect" ] [ "value" ] , user = cfg [ "db" ] [ "user" ] [ "value" ] , password = cfg [ "db" ] [ "pass" ] [ "value" ] , host = cfg [ "db" ] [ "host" ] [ "value" ] , port = cfg [ "db" ] [ "port" ] [ "value" ] , name = cfg [ "db" ] [ "name" ] [ "value" ] )
Provide forward migration for configuration files .
53,648
def uuid_constructor ( loader , node ) : value = loader . construct_scalar ( node ) return uuid . UUID ( value )
Construct a uuid . UUID object form a scalar YAML node .
53,649
def uuid_add_implicit_resolver ( Loader = ConfigLoader , Dumper = ConfigDumper ) : uuid_regex = r'^\b[a-f0-9]{8}-\b[a-f0-9]{4}-\b[a-f0-9]{4}-\b[a-f0-9]{4}-\b[a-f0-9]{12}$' pattern = re . compile ( uuid_regex ) yaml . add_implicit_resolver ( '!uuid' , pattern , Loader = Loader , Dumper = Dumper )
Attach an implicit pattern resolver for UUID objects .
53,650
def store ( self , config_file ) : selfcopy = copy . deepcopy ( self ) selfcopy . filter_exports ( ) with open ( config_file , 'w' ) as outf : yaml . dump ( selfcopy . node , outf , width = 80 , indent = 4 , default_flow_style = False , Dumper = ConfigDumper )
Store the configuration dictionary to a file .
53,651
def load ( self , _from ) : def load_rec ( inode , config ) : for k in config : if isinstance ( config [ k ] , dict ) and k not in [ 'value' , 'default' ] : if k in inode : load_rec ( inode [ k ] , config [ k ] ) else : LOG . debug ( "+ config element: '%s'" , k ) else : inode [ k ] = config [ k ] with open ( _from , 'r' ) as infile : obj = yaml . load ( infile , Loader = ConfigLoader ) upgrade ( obj ) load_rec ( self . node , obj ) self [ 'config_file' ] = os . path . abspath ( _from )
Load the configuration dictionary from file .
53,652
def init_from_env ( self ) : if 'default' in self . node : env_var = self . __to_env_var__ ( ) . upper ( ) if self . has_value ( ) : env_val = self . node [ 'value' ] else : env_val = self . node [ 'default' ] env_val = os . getenv ( env_var , to_yaml ( env_val ) ) try : self . node [ 'value' ] = yaml . load ( str ( env_val ) , Loader = ConfigLoader ) except ValueError : self . node [ 'value' ] = env_val else : if isinstance ( self . node , dict ) : for k in self . node : self [ k ] . init_from_env ( )
Initialize this node from environment .
53,653
def value ( self ) : def validate ( node_value ) : if hasattr ( node_value , 'validate' ) : node_value . validate ( ) return node_value if 'value' in self . node : return validate ( self . node [ 'value' ] ) return self
Return the node value if we re a leaf node .
53,654
def to_env_dict ( self ) : entries = { } if self . has_value ( ) : return { self . __to_env_var__ ( ) : self . node [ 'value' ] } if self . has_default ( ) : return { self . __to_env_var__ ( ) : self . node [ 'default' ] } for k in self . node : entries . update ( self [ k ] . to_env_dict ( ) ) return entries
Convert configuration object to a flat dictionary .
53,655
def rename_keys ( record : Mapping , key_map : Mapping ) -> dict : new_record = dict ( ) for k , v in record . items ( ) : key = key_map [ k ] if k in key_map else k new_record [ key ] = v return new_record
New record with same keys or renamed keys if key found in key_map .
53,656
def replace_keys ( record : Mapping , key_map : Mapping ) -> dict : return { key_map [ k ] : v for k , v in record . items ( ) if k in key_map }
New record with renamed keys including keys only found in key_map .
53,657
def inject_nulls ( data : Mapping , field_names ) -> dict : record = dict ( ) for field in field_names : record [ field ] = data . get ( field , None ) return record
Insert None as value for missing fields .
53,658
def read_struct ( fstream ) : line = fstream . readline ( ) . strip ( ) fragments = line . split ( "," ) fragments = [ x for x in fragments if x is not None ] partition = dict ( ) if not len ( fragments ) >= 3 : return None partition [ "struct" ] = fragments [ 0 ] partition [ "info" ] = fragments [ 1 ] partition [ "num_lines" ] = fragments [ 2 ] struct = None if partition is not None and partition [ "struct" ] == "STRUCT" : num_lines = int ( partition [ "num_lines" ] . strip ( ) ) struct = { } for _ in range ( num_lines ) : cols = fetch_cols ( fstream ) struct . update ( { cols [ 0 ] : cols [ 1 : ] } ) return struct
Read a likwid struct from the text stream .
53,659
def read_table ( fstream ) : pos = fstream . tell ( ) line = fstream . readline ( ) . strip ( ) fragments = line . split ( "," ) fragments = [ x for x in fragments if x is not None ] partition = dict ( ) if not len ( fragments ) >= 4 : return None partition [ "table" ] = fragments [ 0 ] partition [ "group" ] = fragments [ 1 ] partition [ "set" ] = fragments [ 2 ] partition [ "num_lines" ] = fragments [ 3 ] struct = None if partition is not None and partition [ "table" ] == "TABLE" : num_lines = int ( partition [ "num_lines" ] . strip ( ) ) struct = { } header = fetch_cols ( fstream ) struct . update ( { header [ 0 ] : header [ 1 : ] } ) for _ in range ( num_lines ) : cols = fetch_cols ( fstream ) struct . update ( { cols [ 0 ] : cols [ 1 : ] } ) else : fstream . seek ( pos ) return struct
Read a likwid table info from the text stream .
53,660
def read_structs ( fstream ) : struct = read_struct ( fstream ) while struct is not None : yield struct struct = read_struct ( fstream )
Read all structs from likwid s file stream .
53,661
def read_tables ( fstream ) : table = read_table ( fstream ) while table is not None : yield table table = read_table ( fstream )
Read all tables from likwid s file stream .
53,662
def get_measurements ( region , core_info , data , extra_offset = 0 ) : measurements = [ ] clean_core_info = [ x for x in core_info if x ] cores = len ( clean_core_info ) for k in data : if k not in [ "1" , "Region Info" , "Event" , "Metric" , "CPU clock" ] : slot = data [ k ] for i in range ( cores ) : core = core_info [ i ] idx = extra_offset + i if core and slot [ idx ] : measurements . append ( ( region , k , core , slot [ idx ] ) ) return measurements
Get the complete measurement info from likwid s region info .
53,663
def perfcounters ( infile ) : measurements = [ ] with open ( infile , 'r' ) as in_file : read_struct ( in_file ) for region_struct in read_structs ( in_file ) : region = region_struct [ "1" ] [ 1 ] core_info = region_struct [ "Region Info" ] measurements += get_measurements ( region , core_info , region_struct ) for table_struct in read_tables ( in_file ) : core_info = None if "Event" in table_struct : offset = 1 core_info = table_struct [ "Event" ] [ offset : ] measurements += get_measurements ( region , core_info , table_struct , offset ) elif "Metric" in table_struct : core_info = table_struct [ "Metric" ] measurements += get_measurements ( region , core_info , table_struct ) return measurements
Get a complete list of all measurements .
53,664
def cli_common ( doc , ** kwargs ) : arguments = docopt ( doc , version = 'hpcbench ' + __version__ , ** kwargs ) setup_logger ( arguments [ '-v' ] , arguments [ '--log' ] ) load_components ( ) try : import matplotlib except ImportError : pass else : matplotlib . use ( 'PS' ) return arguments
Program initialization for all provided executables
53,665
def compute_shape_centers ( df_shapes , shape_i_column , inplace = False ) : if not isinstance ( shape_i_column , bytes ) : raise KeyError ( 'Shape index must be a single column.' ) if not inplace : df_shapes = df_shapes . copy ( ) df_bounding_boxes = get_bounding_boxes ( df_shapes , shape_i_column ) path_centers = ( df_bounding_boxes [ [ 'x' , 'y' ] ] + .5 * df_bounding_boxes [ [ 'width' , 'height' ] ] . values ) df_shapes [ 'x_center' ] = path_centers . x [ df_shapes [ shape_i_column ] ] . values df_shapes [ 'y_center' ] = path_centers . y [ df_shapes [ shape_i_column ] ] . values center_offset = ( df_shapes [ [ 'x' , 'y' ] ] - df_shapes [ [ 'x_center' , 'y_center' ] ] . values ) return df_shapes . join ( center_offset , rsuffix = '_center_offset' )
Compute the center point of each polygon shape and the offset of each vertex to the corresponding polygon center point .
53,666
def refsDecl ( self ) : for refsDecl in self . graph . objects ( self . asNode ( ) , RDF_NAMESPACES . TEI . replacementPattern ) : return str ( refsDecl )
ResfDecl expression of the citation scheme
53,667
def initializeProfile ( self ) : LOGGER . debug ( "> Building '{0}' profile." . format ( self . __file ) ) sections_file_parser = SectionsFileParser ( self . __file ) sections_file_parser . parse ( ) if sections_file_parser . sections : fileStructureParsingError = lambda attribute : foundations . exceptions . FileStructureParsingError ( "{0} | No '{1}' attribute found, '{2}' file structure seems invalid!" . format ( self . __class__ . __name__ , attribute , self . __file ) ) self . __directory = os . path . dirname ( self . __file ) self . __name = sections_file_parser . get_value ( "Name" , "Component" , default = None ) if self . __name is None : raise fileStructureParsingError ( "Name" ) self . __title = sections_file_parser . get_value ( "Title" , "Component" , default = None ) if self . __title is None : self . __title = self . __name self . __package = sections_file_parser . get_value ( "Module" , "Component" , default = None ) if self . __package is None : raise fileStructureParsingError ( "Module" ) self . __attribute = sections_file_parser . get_value ( "Object" , "Component" , default = None ) if self . __attribute is None : raise fileStructureParsingError ( "Object" ) self . __require = sections_file_parser . get_value ( "Require" , "Component" , default = None ) self . __require = list ( ) if self . __require is None else self . __require . split ( "|" ) self . __version = sections_file_parser . get_value ( "Version" , "Component" , default = None ) if self . __version is None : raise fileStructureParsingError ( "Version" ) self . __author = sections_file_parser . get_value ( "Author" , "Informations" , default = None ) self . __email = sections_file_parser . get_value ( "Email" , "Informations" , default = None ) self . __url = sections_file_parser . get_value ( "Url" , "Informations" , default = None ) self . __description = sections_file_parser . get_value ( "Description" , "Informations" , default = None ) return True else : raise foundations . exceptions . FileStructureParsingError ( "{0} | No sections found, '{1}' file structure seems invalid!" . format ( self . __class__ . __name__ , self . __file ) )
Initializes the Component Profile .
53,668
def register_component ( self , path ) : component = foundations . strings . get_splitext_basename ( path ) LOGGER . debug ( "> Current Component: '{0}'." . format ( component ) ) profile = Profile ( file = path ) if profile . initializeProfile ( ) : if os . path . isfile ( os . path . join ( profile . directory , profile . package ) + ".py" ) or os . path . isdir ( os . path . join ( profile . directory , profile . package ) ) or os . path . basename ( profile . directory ) == profile . package : self . __components [ profile . name ] = profile return True else : raise manager . exceptions . ComponentModuleError ( "{0} | '{1}' has no associated module and has been rejected!" . format ( self . __class__ . __name__ , component ) ) else : raise manager . exceptions . ComponentProfileError ( "{0} | '{1}' is not a valid Component and has been rejected!" . format ( self . __class__ . __name__ , component ) )
Registers a Component using given path .
53,669
def register_components ( self ) : unregistered_components = [ ] for path in self . paths : for file in foundations . walkers . files_walker ( path , ( "\.{0}$" . format ( self . __extension ) , ) , ( "\._" , ) ) : if not self . register_component ( file ) : unregistered_components . append ( file ) if not unregistered_components : return True else : raise manager . exceptions . ComponentRegistrationError ( "{0} | '{1}' Components failed to register!" . format ( self . __class__ . __name__ , ", " . join ( unregistered_components ) ) )
Registers the Components .
53,670
def instantiate_component ( self , component , callback = None ) : profile = self . __components [ component ] callback and callback ( profile ) LOGGER . debug ( "> Current Component: '{0}'." . format ( component ) ) if os . path . isfile ( os . path . join ( profile . directory , profile . package ) + ".py" ) or os . path . isdir ( os . path . join ( profile . directory , profile . package ) ) : path = profile . directory elif os . path . basename ( profile . directory ) == profile . package : path = os . path . join ( profile . directory , ".." ) not path in sys . path and sys . path . append ( path ) profile . module = __import__ ( profile . package ) object = profile . attribute in profile . module . __dict__ and getattr ( profile . module , profile . attribute ) or None if object and inspect . isclass ( object ) : instance = object ( name = profile . name ) for category , type in self . __categories . iteritems ( ) : if type . __name__ in ( base . __name__ for base in object . __bases__ ) : profile . category = category profile . interface = instance LOGGER . info ( "{0} | '{1}' Component has been instantiated!" . format ( self . __class__ . __name__ , profile . name ) ) return True else : del ( self . __components [ component ] ) raise manager . exceptions . ComponentInterfaceError ( "{0} | '{1}' Component has no Interface and has been rejected!" . format ( self . __class__ . __name__ , profile . name ) )
Instantiates given Component .
53,671
def instantiate_components ( self , callback = None ) : uninstantiated_components = [ component for component in self . list_components ( ) if not self . instantiate_component ( component , callback ) ] if not uninstantiated_components : return True else : raise manager . exceptions . ComponentInstantiationError ( "{0} | '{1}' Components failed to instantiate!" . format ( self . __class__ . __name__ , ", " . join ( uninstantiated_components ) ) )
Instantiates the Components .
53,672
def reload_component ( self , component ) : dependents = list ( reversed ( self . list_dependents ( component ) ) ) dependents . append ( component ) for dependent in dependents : profile = self . __components [ dependent ] module = __import__ ( profile . package ) reload ( module ) object = profile . attribute in dir ( module ) and getattr ( module , profile . attribute ) or None if object and inspect . isclass ( object ) : for type in self . __categories . itervalues ( ) : if type . __name__ in ( base . __name__ for base in object . __bases__ ) : instance = object ( name = profile . name ) profile . module = module profile . interface = instance LOGGER . info ( "{0} | '{1}' Component has been reloaded!" . format ( self . __class__ . __name__ , profile . name ) ) return True
Reload given Component module .
53,673
def list_components ( self , dependency_order = True ) : if dependency_order : return list ( itertools . chain . from_iterable ( [ sorted ( list ( batch ) ) for batch in foundations . common . dependency_resolver ( dict ( ( key , value . require ) for ( key , value ) in self ) ) ] ) ) else : return [ key for ( key , value ) in self ]
Lists the Components by dependency resolving .
53,674
def list_dependents ( self , component , dependents = None ) : dependents = set ( ) if dependents is None else dependents for name , profile in self : if not component in profile . require : continue dependents . add ( name ) self . list_dependents ( name , dependents ) return sorted ( list ( dependents ) , key = ( self . list_components ( ) ) . index )
Lists given Component dependents Components .
53,675
def filter_components ( self , pattern , category = None ) : filtered_components = [ ] for component , profile in self : if category : if profile . category != category : continue if re . search ( pattern , component ) : filtered_components . append ( component ) return filtered_components
Filters the Components using given regex pattern .
53,676
def get_profile ( self , component ) : components = self . filter_components ( r"^{0}$" . format ( component ) ) if components != [ ] : return self . __components [ foundations . common . get_first_item ( components ) ]
Gets given Component profile .
53,677
def get_interface ( self , component ) : profile = self . get_profile ( component ) if profile : return profile . interface
Gets given Component interface .
53,678
def get_component_attribute_name ( component ) : search = re . search ( r"(?P<category>\w+)\.(?P<name>\w+)" , component ) if search : name = "{0}{1}{2}" . format ( search . group ( "category" ) , search . group ( "name" ) [ 0 ] . upper ( ) , search . group ( "name" ) [ 1 : ] ) LOGGER . debug ( "> Component name: '{0}' to attribute name Active_QLabel: '{1}'." . format ( component , name ) ) else : name = component return name
Gets given Component attribute name .
53,679
def output_to_table ( obj , olist = 'inputs' , oformat = 'latex' , table_ends = False , prefix = "" ) : para = "" property_list = [ ] if olist == 'inputs' : property_list = obj . inputs elif olist == 'all' : for item in obj . __dict__ : if "_" != item [ 0 ] : property_list . append ( item ) for item in property_list : if hasattr ( obj , item ) : value = getattr ( obj , item ) value_str = format_value ( value ) if oformat == "latex" : delimeter = " & " else : delimeter = "," para += "{0}{1}{2}\\\\\n" . format ( prefix + format_name ( item ) , delimeter , value_str ) if table_ends : para = add_table_ends ( para , oformat ) return para
Compile the properties to a table .
53,680
def format_value ( value , sf = 3 ) : if isinstance ( value , str ) : return value elif isinstance ( value , list ) or isinstance ( value , np . ndarray ) : value = list ( value ) for i in range ( len ( value ) ) : vv = format_value ( value [ i ] ) value [ i ] = vv return "[" + ", " . join ( value ) + "]" elif value is None : return "N/A" else : fmt_str = "{0:.%ig}" % sf return fmt_str . format ( value )
convert a parameter value into a formatted string with certain significant figures
53,681
def add_table_ends ( para , oformat = 'latex' , caption = "caption-text" , label = "table" ) : fpara = "" if oformat == 'latex' : fpara += "\\begin{table}[H]\n" fpara += "\\centering\n" fpara += "\\begin{tabular}{cc}\n" fpara += "\\toprule\n" fpara += "Parameter & Value \\\\\n" fpara += "\\midrule\n" fpara += para fpara += "\\bottomrule\n" fpara += "\\end{tabular}\n" fpara += "\\caption{%s \label{tab:%s}}\n" % ( caption , label ) fpara += "\\end{table}\n\n" return fpara
Adds the latex table ends
53,682
def draw_shapes_svg_layer ( df_shapes , shape_i_columns , layer_name , layer_number = 1 , use_svg_path = True ) : minx , miny = df_shapes [ [ 'x' , 'y' ] ] . min ( ) . values maxx , maxy = df_shapes [ [ 'x' , 'y' ] ] . max ( ) . values width = maxx - minx height = maxy - miny dwg = svgwrite . Drawing ( 'should_not_exist.svg' , size = ( width , height ) , debug = False ) nsmap = INKSCAPE_NSMAP dwg . attribs [ 'xmlns:inkscape' ] = nsmap [ 'inkscape' ] svg_root = dwg . g ( id = 'layer%d' % layer_number , ** { 'inkscape:label' : layer_name , 'inkscape:groupmode' : 'layer' } ) minx , miny = df_shapes [ [ 'x' , 'y' ] ] . min ( ) . values for shape_i , df_shape_i in df_shapes . groupby ( shape_i_columns ) : attr_columns = [ c for c in df_shape_i . columns if c not in ( 'vertex_i' , 'x' , 'y' ) ] attrs = df_shape_i . iloc [ 0 ] [ attr_columns ] . to_dict ( ) vertices = df_shape_i [ [ 'x' , 'y' ] ] . values . tolist ( ) if not use_svg_path : p = Polygon ( vertices , debug = False , ** attrs ) else : commands = [ 'M %s,%s' % tuple ( vertices [ 0 ] ) ] commands += [ 'L %s,%s' % tuple ( v ) for v in vertices [ 1 : ] ] while vertices [ 0 ] == vertices [ - 1 ] : del vertices [ - 1 ] commands += [ 'Z' ] p = Path_ ( d = ' ' . join ( commands ) , debug = False , ** attrs ) svg_root . add ( p ) dwg . add ( svg_root ) output = StringIO . StringIO ( ) dwg . write ( output ) output . seek ( 0 ) return output
Draw shapes as a layer in a SVG file .
53,683
def draw_lines_svg_layer ( df_endpoints , layer_name , layer_number = 1 ) : dwg = svgwrite . Drawing ( 'should_not_exist.svg' , profile = 'tiny' , debug = False ) dwg . attribs [ 'width' ] = df_endpoints [ [ 'x_source' , 'x_target' ] ] . values . max ( ) dwg . attribs [ 'height' ] = df_endpoints [ [ 'y_source' , 'y_target' ] ] . values . max ( ) nsmap = INKSCAPE_NSMAP dwg . attribs [ 'xmlns:inkscape' ] = nsmap [ 'inkscape' ] coord_columns = [ 'x_source' , 'y_source' , 'x_target' , 'y_target' ] line_layer = dwg . g ( id = 'layer%d' % layer_number , ** { 'inkscape:label' : layer_name , 'inkscape:groupmode' : 'layer' } ) for i , ( x1 , y1 , x2 , y2 ) in df_endpoints [ coord_columns ] . iterrows ( ) : line_i = dwg . line ( ( x1 , y1 ) , ( x2 , y2 ) , id = 'line%d' % i , style = 'stroke:#000000; stroke-width:0.1;' ) line_layer . add ( line_i ) dwg . add ( line_layer ) output = StringIO . StringIO ( ) dwg . write ( output ) output . seek ( 0 ) return output
Draw lines defined by endpoint coordinates as a layer in a SVG file .
53,684
def dts_error ( self , error_name , message = None ) : self . nautilus_extension . logger . info ( "DTS error thrown {} for {} ({})" . format ( error_name , request . path , message ) ) j = jsonify ( { "error" : error_name , "message" : message } ) j . status_code = 404 return j
Create a DTS Error reply
53,685
def r_dts_collection ( self , objectId = None ) : try : j = self . resolver . getMetadata ( objectId = objectId ) . export ( Mimetypes . JSON . DTS . Std ) j = jsonify ( j ) j . status_code = 200 except NautilusError as E : return self . dts_error ( error_name = E . __class__ . __name__ , message = E . __doc__ ) return j
DTS Collection Metadata reply for given objectId
53,686
def create_run ( cmd , project , exp , grp ) : from benchbuild . utils import schema as s session = s . Session ( ) run = s . Run ( command = str ( cmd ) , project_name = project . name , project_group = project . group , experiment_name = exp , run_group = str ( grp ) , experiment_group = project . experiment . id ) session . add ( run ) session . commit ( ) return ( run , session )
Create a new run in the database .
53,687
def create_run_group ( prj ) : from benchbuild . utils import schema as s session = s . Session ( ) experiment = prj . experiment group = s . RunGroup ( id = prj . run_uuid , experiment = experiment . id ) session . add ( group ) session . commit ( ) return ( group , session )
Create a new run_group in the database .
53,688
def persist_project ( project ) : from benchbuild . utils . schema import Project , Session session = Session ( ) projects = session . query ( Project ) . filter ( Project . name == project . name ) . filter ( Project . group_name == project . group ) name = project . name desc = project . __doc__ domain = project . domain group_name = project . group version = project . version ( ) if callable ( project . version ) else project . version try : src_url = project . src_uri except AttributeError : src_url = 'unknown' if projects . count ( ) == 0 : newp = Project ( ) newp . name = name newp . description = desc newp . src_url = src_url newp . domain = domain newp . group_name = group_name newp . version = version session . add ( newp ) else : newp_value = { "name" : name , "description" : desc , "src_url" : src_url , "domain" : domain , "group_name" : group_name , "version" : version } projects . update ( newp_value ) session . commit ( ) return ( projects , session )
Persist this project in the benchbuild database .
53,689
def persist_experiment ( experiment ) : from benchbuild . utils . schema import Experiment , Session session = Session ( ) cfg_exp = experiment . id LOG . debug ( "Using experiment ID stored in config: %s" , cfg_exp ) exps = session . query ( Experiment ) . filter ( Experiment . id == cfg_exp ) desc = str ( CFG [ "experiment_description" ] ) name = experiment . name if exps . count ( ) == 0 : newe = Experiment ( ) newe . id = cfg_exp newe . name = name newe . description = desc session . add ( newe ) ret = newe else : exps . update ( { 'name' : name , 'description' : desc } ) ret = exps . first ( ) try : session . commit ( ) except IntegrityError : session . rollback ( ) persist_experiment ( experiment ) return ( ret , session )
Persist this experiment in the benchbuild database .
53,690
def persist_perf ( run , session , svg_path ) : from benchbuild . utils import schema as s with open ( svg_path , 'r' ) as svg_file : svg_data = svg_file . read ( ) session . add ( s . Metadata ( name = "perf.flamegraph" , value = svg_data , run_id = run . id ) )
Persist the flamegraph in the database .
53,691
def persist_config ( run , session , cfg ) : from benchbuild . utils import schema as s for cfg_elem in cfg : session . add ( s . Config ( name = cfg_elem , value = cfg [ cfg_elem ] , run_id = run . id ) )
Persist the configuration in as key - value pairs .
53,692
def apis ( self ) : value = self . attributes [ 'apis' ] if isinstance ( value , six . string_types ) : value = shlex . split ( value ) return value
List of API to test
53,693
def pre_execute ( self , execution , context ) : path = self . _fspath if path : path = path . format ( benchmark = context . benchmark , api = execution [ 'category' ] , ** execution . get ( 'metas' , { } ) ) if self . clean_path : shutil . rmtree ( path , ignore_errors = True ) if execution [ 'metas' ] [ 'file_mode' ] == 'onefile' : path = osp . dirname ( path ) if not osp . exists ( path ) : os . makedirs ( path )
Make sure the named directory is created if possible
53,694
def file_mode ( self ) : fms = self . attributes [ 'file_mode' ] eax = set ( ) if isinstance ( fms , six . string_types ) : fms = shlex . split ( fms ) for fm in fms : if fm == 'both' : eax . add ( 'fpp' ) eax . add ( 'onefile' ) elif fm in [ 'fpp' , 'onefile' ] : eax . add ( fm ) else : raise Exception ( 'Invalid IOR file mode: ' + fm ) return eax
onefile fpp or both
53,695
def device ( self ) : hdevice = self . _libinput . libinput_event_get_device ( self . _hevent ) return Device ( hdevice , self . _libinput )
The device associated with this event .
53,696
def absolute_coords ( self ) : if self . type != EventType . POINTER_MOTION_ABSOLUTE : raise AttributeError ( _wrong_prop . format ( self . type ) ) abs_x = self . _libinput . libinput_event_pointer_get_absolute_x ( self . _handle ) abs_y = self . _libinput . libinput_event_pointer_get_absolute_y ( self . _handle ) return abs_x , abs_y
The current absolute coordinates of the pointer event in mm from the top left corner of the device .
53,697
def transform_absolute_coords ( self , width , height ) : if self . type != EventType . POINTER_MOTION_ABSOLUTE : raise AttributeError ( _wrong_meth . format ( self . type ) ) abs_x = self . _libinput . libinput_event_pointer_get_absolute_x_transformed ( self . _handle , width ) abs_y = self . _libinput . libinput_event_pointer_get_absolute_y_transformed ( self . _handle , height ) return abs_x , abs_y
Return the current absolute coordinates of the pointer event transformed to screen coordinates .
53,698
def button_state ( self ) : if self . type != EventType . POINTER_BUTTON : raise AttributeError ( _wrong_prop . format ( self . type ) ) return self . _libinput . libinput_event_pointer_get_button_state ( self . _handle )
The button state that triggered this event .
53,699
def seat_button_count ( self ) : if self . type != EventType . POINTER_BUTTON : raise AttributeError ( _wrong_prop . format ( self . type ) ) return self . _libinput . libinput_event_pointer_get_seat_button_count ( self . _handle )
The total number of buttons pressed on all devices on the associated seat after the event was triggered .