idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
53,600
def _sequence_map ( cls , seq , path ) : if not any ( path ) : return seq result = [ ] for item in seq : try : result . append ( cls . __resolve_path ( item , path ) ) except ( KeyError , IndexError ) : pass return result
Apply a kwargsql expression to every item of a sequence and returns it .
53,601
def populate ( projects_to_filter = None , group = None ) : if projects_to_filter is None : projects_to_filter = [ ] import benchbuild . projects as all_projects all_projects . discover ( ) prjs = ProjectRegistry . projects if projects_to_filter : prjs = { } for filter_project in set ( projects_to_filter ) : try : prjs...
Populate the list of projects that belong to this experiment .
53,602
def cc ( project , detect_project = False ) : from benchbuild . utils import cmd cc_name = str ( CFG [ "compiler" ] [ "c" ] ) wrap_cc ( cc_name , compiler ( cc_name ) , project , detect_project = detect_project ) return cmd [ "./{}" . format ( cc_name ) ]
Return a clang that hides CFLAGS and LDFLAGS .
53,603
def cxx ( project , detect_project = False ) : from benchbuild . utils import cmd cxx_name = str ( CFG [ "compiler" ] [ "cxx" ] ) wrap_cc ( cxx_name , compiler ( cxx_name ) , project , detect_project = detect_project ) return cmd [ "./{name}" . format ( name = cxx_name ) ]
Return a clang ++ that hides CFLAGS and LDFLAGS .
53,604
def compiler ( name ) : pinfo = __get_paths ( ) _compiler = local [ name ] _compiler = _compiler . setenv ( PATH = pinfo [ "path" ] , LD_LIBRARY_PATH = pinfo [ "ld_library_path" ] ) return _compiler
Get a usable clang ++ plumbum command .
53,605
def strip_path_prefix ( ipath , prefix ) : if prefix is None : return ipath return ipath [ len ( prefix ) : ] if ipath . startswith ( prefix ) else ipath
Strip prefix from path .
53,606
def unpickle ( pickle_file ) : pickle = None with open ( pickle_file , "rb" ) as pickle_f : pickle = dill . load ( pickle_f ) if not pickle : LOG . error ( "Could not load python object from file" ) return pickle
Unpickle a python object from the given path .
53,607
def wrap_cc ( filepath , compiler , project , python = sys . executable , detect_project = False ) : env = __create_jinja_env ( ) template = env . get_template ( 'run_compiler.py.inc' ) cc_fname = local . path ( filepath ) . with_suffix ( ".benchbuild.cc" , depth = 0 ) cc_f = persist ( compiler , filename = cc_fname ) ...
Substitute a compiler with a script that hides CFLAGS & LDFLAGS .
53,608
def persist ( id_obj , filename = None , suffix = None ) : if suffix is None : suffix = ".pickle" if hasattr ( id_obj , 'id' ) : ident = id_obj . id else : ident = str ( id ( id_obj ) ) if filename is None : filename = "{obj_id}{suffix}" . format ( obj_id = ident , suffix = suffix ) with open ( filename , 'wb' ) as obj...
Persist an object in the filesystem .
53,609
def load ( filename ) : if not os . path . exists ( filename ) : LOG . error ( "load object - File '%s' does not exist." , filename ) return None obj = None with open ( filename , 'rb' ) as obj_file : obj = dill . load ( obj_file ) return obj
Load a pickled obj from the filesystem .
53,610
def extract_adjacent_shapes ( df_shapes , shape_i_column , extend = .5 ) : df_scaled_x = extend_shapes ( df_shapes , 'x' , extend ) df_scaled_y = extend_shapes ( df_shapes , 'y' , extend ) df_corners = df_shapes . groupby ( shape_i_column ) . agg ( { 'x' : [ 'min' , 'max' ] , 'y' : [ 'min' , 'max' ] } ) row_list = [ ] ...
Generate list of connections between adjacent polygon shapes based on geometrical closeness .
53,611
def es_client ( self ) : es_conf = self . campaign . export . elasticsearch return Elasticsearch ( self . hosts , ** es_conf . connection_params )
Get Elasticsearch client
53,612
def index_name ( self ) : fmt = self . campaign . export . elasticsearch . index_name fields = dict ( date = self . report [ 'date' ] ) return fmt . format ( ** fields ) . lower ( )
Get Elasticsearch index name associated to the campaign
53,613
def remove_index ( self ) : self . index_client . close ( self . index_name ) self . index_client . delete ( self . index_name )
Remove Elasticsearch index associated to the campaign
53,614
def extract_build_info ( exe_path , elf_section = ELF_SECTION ) : build_info = { } with mkdtemp ( ) as tempd , pushd ( tempd ) : proc = subprocess . Popen ( [ OBJCOPY , DUMP_SECTION , "{secn}={ofile}" . format ( secn = elf_section , ofile = BUILDINFO_FILE ) , exe_path , ] , stderr = subprocess . PIPE , ) proc . wait ( ...
Extracts the build information from a given executable .
53,615
def no_args ( ** kwargs ) : from benchbuild . utils . cmd import uchroot as uchrt prefixes = CFG [ "container" ] [ "prefixes" ] . value p_paths , p_libs = env ( prefixes ) uchrt = run . with_env_recursive ( uchrt , LD_LIBRARY_PATH = path . list_to_path ( p_libs ) , PATH = path . list_to_path ( p_paths ) ) return uchrt
Return the uchroot command without any customizations .
53,616
def with_mounts ( * args , uchroot_cmd_fn = no_args , ** kwargs ) : mounts = CFG [ "container" ] [ "mounts" ] . value prefixes = CFG [ "container" ] [ "prefixes" ] . value uchroot_opts , mounts = __mounts__ ( "mnt" , mounts ) uchroot_cmd = uchroot_cmd_fn ( ** kwargs ) uchroot_cmd = uchroot_cmd [ uchroot_opts ] uchroot_...
Return a uchroot command with all mounts enabled .
53,617
def clean_env ( uchroot_cmd , varnames ) : env = uchroot_cmd [ "/usr/bin/env" ] __clean_env = env [ "-u" , "," . join ( varnames ) ] return __clean_env
Returns a uchroot cmd that runs inside a filtered environment .
53,618
def mounts ( prefix , __mounts ) : i = 0 mntpoints = [ ] for mount in __mounts : if not isinstance ( mount , dict ) : mntpoint = "{0}/{1}" . format ( prefix , str ( i ) ) mntpoints . append ( mntpoint ) i = i + 1 return mntpoints
Compute the mountpoints of the current user .
53,619
def env ( mounts ) : f_mounts = [ m . strip ( "/" ) for m in mounts ] root = local . path ( "/" ) ld_libs = [ root / m / "lib" for m in f_mounts ] ld_libs . extend ( [ root / m / "lib64" for m in f_mounts ] ) paths = [ root / m / "bin" for m in f_mounts ] paths . extend ( [ root / m / "sbin" for m in f_mounts ] ) paths...
Compute the environment of the change root for the user .
53,620
def get_abbreviations ( kb ) : return { "%s$$n%i" % ( author . get_urn ( ) , i ) : abbrev for author in kb . get_authors ( ) for i , abbrev in enumerate ( author . get_abbreviations ( ) ) if author . get_urn ( ) is not None }
For the sake of profiling .
53,621
def get_authors ( self ) : Person = self . _session . get_class ( surf . ns . EFRBROO [ 'F10_Person' ] ) return list ( Person . all ( ) )
Returns the authors in the Knowledge Base .
53,622
def get_works ( self ) : Work = self . _session . get_class ( surf . ns . EFRBROO [ 'F1_Work' ] ) return list ( Work . all ( ) )
Return the author s works .
53,623
def get_author_label ( self , urn ) : author = self . get_resource_by_urn ( urn ) names = author . get_names ( ) en_names = sorted ( [ name [ 1 ] for name in names if name [ 0 ] == "en" ] , key = len ) try : assert len ( en_names ) > 0 return en_names [ 0 ] except Exception as e : none_names = sorted ( [ name [ 1 ] for...
Get the label corresponding to the author identified by the CTS URN .
53,624
def get_statistics ( self ) : statistics = { "number_authors" : 0 , "number_author_names" : 0 , "number_author_abbreviations" : 0 , "number_works" : 0 , "number_work_titles" : 0 , "number_title_abbreviations" : 0 , "number_opus_maximum" : 0 , } for author in self . get_authors ( ) : if author . get_urn ( ) is not None ...
Gather basic stats about the Knowledge Base and its contents .
53,625
def to_json ( self ) : return json . dumps ( { "statistics" : self . get_statistics ( ) , "authors" : [ json . loads ( author . to_json ( ) ) for author in self . get_authors ( ) ] } , indent = 2 )
Serialises the content of the KnowledgeBase as JSON .
53,626
def write_yaml_report ( func ) : @ wraps ( func ) def _wrapper ( * args , ** kwargs ) : now = datetime . datetime . now ( ) with Timer ( ) as timer : data = func ( * args , ** kwargs ) if isinstance ( data , ( SEQUENCES , types . GeneratorType ) ) : report = dict ( children = list ( map ( str , data ) ) ) elif isinstan...
Decorator used in campaign node post - processing
53,627
def traverse ( self ) : builder = self . child_builder for child in self . _children : with pushd ( str ( child ) ) : yield child , builder ( child )
Enumerate children and build associated objects
53,628
def pad_position_l ( self , i ) : if i >= self . n_pads_l : raise ModelError ( "pad index out-of-bounds" ) return ( self . length - self . pad_length ) / ( self . n_pads_l - 1 ) * i + self . pad_length / 2
Determines the position of the ith pad in the length direction . Assumes equally spaced pads .
53,629
def pad_position_w ( self , i ) : if i >= self . n_pads_w : raise ModelError ( "pad index out-of-bounds" ) return ( self . width - self . pad_width ) / ( self . n_pads_w - 1 ) * i + self . pad_width / 2
Determines the position of the ith pad in the width direction . Assumes equally spaced pads .
53,630
def add_to_batch ( self , batch ) : for name in self . paths : svg_path = self . paths [ name ] svg_path . add_to_batch ( batch )
Adds paths to the given batch object . They are all added as GL_TRIANGLES so the batch will aggregate them all into a single OpenGL primitive .
53,631
def alphanum_key ( s ) : return [ int ( c ) if c . isdigit ( ) else c for c in _RE_INT . split ( s ) ]
Turn a string into a list of string and number chunks .
53,632
def discover ( ) : if CFG [ "plugins" ] [ "autoload" ] : experiment_plugins = CFG [ "plugins" ] [ "experiments" ] . value for exp_plugin in experiment_plugins : try : importlib . import_module ( exp_plugin ) except ImportError as import_error : LOG . error ( "Could not find '%s'" , exp_plugin ) LOG . error ( "ImportErr...
Import all experiments listed in PLUGINS_EXPERIMENTS .
53,633
def print_projects ( projects = None ) : grouped_by = { } if not projects : print ( "Your selection didn't include any projects for this experiment." ) return for name in projects : prj = projects [ name ] if prj . GROUP not in grouped_by : grouped_by [ prj . GROUP ] = [ ] grouped_by [ prj . GROUP ] . append ( "{name}/...
Print a list of projects registered for that experiment .
53,634
def _commandline ( repositories , port = 8000 , host = "127.0.0.1" , debug = False , cache = None , cache_path = "./cache" , redis = None ) : if cache == "redis" : nautilus_cache = RedisCache ( redis ) cache_type = "redis" elif cache == "filesystem" : nautilus_cache = FileSystemCache ( cache_path ) cache_type = "simple...
Run a CTS API from command line .
53,635
def render ( template = None , ostr = None , ** kwargs ) : jinja_environment . filters [ 'texscape' ] = tex_escape template = template or DEFAULT_TEMPLATE ostr = ostr or sys . stdout jinja_template = jinja_environment . get_template ( template ) jinja_template . stream ( ** kwargs ) . dump ( ostr )
Generate report from a campaign
53,636
def load_json ( ffp , custom = None , verbose = 0 ) : data = json . load ( open ( ffp ) ) return ecp_dict_to_objects ( data , custom , verbose = verbose )
Given a json file it creates a dictionary of sfsi objects
53,637
def loads_json ( p_str , custom = None , meta = False , verbose = 0 ) : data = json . loads ( p_str ) if meta : md = { } for item in data : if item != "models" : md [ item ] = data [ item ] return ecp_dict_to_objects ( data , custom , verbose = verbose ) , md else : return ecp_dict_to_objects ( data , custom , verbose ...
Given a json string it creates a dictionary of sfsi objects
53,638
def migrate_ecp ( in_ffp , out_ffp ) : objs , meta_data = load_json_and_meta ( in_ffp ) ecp_output = Output ( ) for m_type in objs : for instance in objs [ m_type ] : ecp_output . add_to_dict ( objs [ m_type ] [ instance ] ) ecp_output . name = meta_data [ "name" ] ecp_output . units = meta_data [ "units" ] ecp_output ...
Migrates and ECP file to the current version of sfsimodels
53,639
def add_to_dict ( self , an_object , extras = None ) : if an_object . id is None : raise ModelError ( "id must be set on object before adding to output." ) if hasattr ( an_object , "base_type" ) : mtype = an_object . base_type elif hasattr ( an_object , "type" ) : if an_object . type in standard_types : mtype = an_obje...
Convert models to json serialisable output
53,640
def add_to_output ( self , mtype , m_id , serialisable_dict ) : if mtype not in self . unordered_models : self . unordered_models [ mtype ] = OrderedDict ( ) self . unordered_models [ mtype ] [ m_id ] = serialisable_dict
Can add additional objects or dictionaries to output file that don t conform to standard objects .
53,641
def available_cpu_count ( ) -> int : try : match = re . search ( r'(?m)^Cpus_allowed:\s*(.*)$' , open ( '/proc/self/status' ) . read ( ) ) if match : res = bin ( int ( match . group ( 1 ) . replace ( ',' , '' ) , 16 ) ) . count ( '1' ) if res > 0 : return res except IOError : LOG . debug ( "Could not get the number of ...
Get the number of available CPUs .
53,642
def escape_yaml ( raw_str : str ) -> str : escape_list = [ char for char in raw_str if char in [ '!' , '{' , '[' ] ] if len ( escape_list ) == 0 : return raw_str str_quotes = '"' i_str_quotes = "'" if str_quotes in raw_str and str_quotes not in raw_str [ 1 : - 1 ] : return raw_str if str_quotes in raw_str [ 1 : - 1 ] :...
Shell - Escape a yaml input string .
53,643
def to_yaml ( value ) -> str : stream = yaml . io . StringIO ( ) dumper = ConfigDumper ( stream , default_flow_style = True , width = sys . maxsize ) val = None try : dumper . open ( ) dumper . represent ( value ) val = stream . getvalue ( ) . strip ( ) dumper . close ( ) finally : dumper . dispose ( ) return val
Convert a given value to a YAML string .
53,644
def to_env_var ( env_var : str , value ) -> str : val = to_yaml ( value ) ret_val = "%s=%s" % ( env_var , escape_yaml ( val ) ) return ret_val
Create an environment variable from a name and a value .
53,645
def find_config ( test_file = None , defaults = None , root = os . curdir ) : if defaults is None : defaults = [ ".benchbuild.yml" , ".benchbuild.yaml" ] def walk_rec ( cur_path , root ) : cur_path = local . path ( root ) / test_file if cur_path . exists ( ) : return cur_path new_root = local . path ( root ) / os . par...
Find the path to the default config file .
53,646
def setup_config ( cfg , config_filenames = None , env_var_name = None ) : if env_var_name is None : env_var_name = "BB_CONFIG_FILE" config_path = os . getenv ( env_var_name , None ) if not config_path : config_path = find_config ( defaults = config_filenames ) if config_path : cfg . load ( config_path ) cfg [ "config_...
This will initialize the given configuration object .
53,647
def upgrade ( cfg ) : db_node = cfg [ "db" ] old_db_elems = [ "host" , "name" , "port" , "pass" , "user" , "dialect" ] has_old_db_elems = [ x in db_node for x in old_db_elems ] if any ( has_old_db_elems ) : print ( "Old database configuration found. " "Converting to new connect_string. " "This will *not* be stored in t...
Provide forward migration for configuration files .
53,648
def uuid_constructor ( loader , node ) : value = loader . construct_scalar ( node ) return uuid . UUID ( value )
Construct a uuid . UUID object form a scalar YAML node .
53,649
def uuid_add_implicit_resolver ( Loader = ConfigLoader , Dumper = ConfigDumper ) : uuid_regex = r'^\b[a-f0-9]{8}-\b[a-f0-9]{4}-\b[a-f0-9]{4}-\b[a-f0-9]{4}-\b[a-f0-9]{12}$' pattern = re . compile ( uuid_regex ) yaml . add_implicit_resolver ( '!uuid' , pattern , Loader = Loader , Dumper = Dumper )
Attach an implicit pattern resolver for UUID objects .
53,650
def store ( self , config_file ) : selfcopy = copy . deepcopy ( self ) selfcopy . filter_exports ( ) with open ( config_file , 'w' ) as outf : yaml . dump ( selfcopy . node , outf , width = 80 , indent = 4 , default_flow_style = False , Dumper = ConfigDumper )
Store the configuration dictionary to a file .
53,651
def load ( self , _from ) : def load_rec ( inode , config ) : for k in config : if isinstance ( config [ k ] , dict ) and k not in [ 'value' , 'default' ] : if k in inode : load_rec ( inode [ k ] , config [ k ] ) else : LOG . debug ( "+ config element: '%s'" , k ) else : inode [ k ] = config [ k ] with open ( _from , '...
Load the configuration dictionary from file .
53,652
def init_from_env ( self ) : if 'default' in self . node : env_var = self . __to_env_var__ ( ) . upper ( ) if self . has_value ( ) : env_val = self . node [ 'value' ] else : env_val = self . node [ 'default' ] env_val = os . getenv ( env_var , to_yaml ( env_val ) ) try : self . node [ 'value' ] = yaml . load ( str ( en...
Initialize this node from environment .
53,653
def value ( self ) : def validate ( node_value ) : if hasattr ( node_value , 'validate' ) : node_value . validate ( ) return node_value if 'value' in self . node : return validate ( self . node [ 'value' ] ) return self
Return the node value if we re a leaf node .
53,654
def to_env_dict ( self ) : entries = { } if self . has_value ( ) : return { self . __to_env_var__ ( ) : self . node [ 'value' ] } if self . has_default ( ) : return { self . __to_env_var__ ( ) : self . node [ 'default' ] } for k in self . node : entries . update ( self [ k ] . to_env_dict ( ) ) return entries
Convert configuration object to a flat dictionary .
53,655
def rename_keys ( record : Mapping , key_map : Mapping ) -> dict : new_record = dict ( ) for k , v in record . items ( ) : key = key_map [ k ] if k in key_map else k new_record [ key ] = v return new_record
New record with same keys or renamed keys if key found in key_map .
53,656
def replace_keys ( record : Mapping , key_map : Mapping ) -> dict : return { key_map [ k ] : v for k , v in record . items ( ) if k in key_map }
New record with renamed keys including keys only found in key_map .
53,657
def inject_nulls ( data : Mapping , field_names ) -> dict : record = dict ( ) for field in field_names : record [ field ] = data . get ( field , None ) return record
Insert None as value for missing fields .
53,658
def read_struct ( fstream ) : line = fstream . readline ( ) . strip ( ) fragments = line . split ( "," ) fragments = [ x for x in fragments if x is not None ] partition = dict ( ) if not len ( fragments ) >= 3 : return None partition [ "struct" ] = fragments [ 0 ] partition [ "info" ] = fragments [ 1 ] partition [ "num...
Read a likwid struct from the text stream .
53,659
def read_table ( fstream ) : pos = fstream . tell ( ) line = fstream . readline ( ) . strip ( ) fragments = line . split ( "," ) fragments = [ x for x in fragments if x is not None ] partition = dict ( ) if not len ( fragments ) >= 4 : return None partition [ "table" ] = fragments [ 0 ] partition [ "group" ] = fragment...
Read a likwid table info from the text stream .
53,660
def read_structs ( fstream ) : struct = read_struct ( fstream ) while struct is not None : yield struct struct = read_struct ( fstream )
Read all structs from likwid s file stream .
53,661
def read_tables ( fstream ) : table = read_table ( fstream ) while table is not None : yield table table = read_table ( fstream )
Read all tables from likwid s file stream .
53,662
def get_measurements ( region , core_info , data , extra_offset = 0 ) : measurements = [ ] clean_core_info = [ x for x in core_info if x ] cores = len ( clean_core_info ) for k in data : if k not in [ "1" , "Region Info" , "Event" , "Metric" , "CPU clock" ] : slot = data [ k ] for i in range ( cores ) : core = core_inf...
Get the complete measurement info from likwid s region info .
53,663
def perfcounters ( infile ) : measurements = [ ] with open ( infile , 'r' ) as in_file : read_struct ( in_file ) for region_struct in read_structs ( in_file ) : region = region_struct [ "1" ] [ 1 ] core_info = region_struct [ "Region Info" ] measurements += get_measurements ( region , core_info , region_struct ) for ta...
Get a complete list of all measurements .
53,664
def cli_common ( doc , ** kwargs ) : arguments = docopt ( doc , version = 'hpcbench ' + __version__ , ** kwargs ) setup_logger ( arguments [ '-v' ] , arguments [ '--log' ] ) load_components ( ) try : import matplotlib except ImportError : pass else : matplotlib . use ( 'PS' ) return arguments
Program initialization for all provided executables
53,665
def compute_shape_centers ( df_shapes , shape_i_column , inplace = False ) : if not isinstance ( shape_i_column , bytes ) : raise KeyError ( 'Shape index must be a single column.' ) if not inplace : df_shapes = df_shapes . copy ( ) df_bounding_boxes = get_bounding_boxes ( df_shapes , shape_i_column ) path_centers = ( d...
Compute the center point of each polygon shape and the offset of each vertex to the corresponding polygon center point .
53,666
def refsDecl ( self ) : for refsDecl in self . graph . objects ( self . asNode ( ) , RDF_NAMESPACES . TEI . replacementPattern ) : return str ( refsDecl )
ResfDecl expression of the citation scheme
53,667
def initializeProfile ( self ) : LOGGER . debug ( "> Building '{0}' profile." . format ( self . __file ) ) sections_file_parser = SectionsFileParser ( self . __file ) sections_file_parser . parse ( ) if sections_file_parser . sections : fileStructureParsingError = lambda attribute : foundations . exceptions . FileStruc...
Initializes the Component Profile .
53,668
def register_component ( self , path ) : component = foundations . strings . get_splitext_basename ( path ) LOGGER . debug ( "> Current Component: '{0}'." . format ( component ) ) profile = Profile ( file = path ) if profile . initializeProfile ( ) : if os . path . isfile ( os . path . join ( profile . directory , prof...
Registers a Component using given path .
53,669
def register_components ( self ) : unregistered_components = [ ] for path in self . paths : for file in foundations . walkers . files_walker ( path , ( "\.{0}$" . format ( self . __extension ) , ) , ( "\._" , ) ) : if not self . register_component ( file ) : unregistered_components . append ( file ) if not unregistered...
Registers the Components .
53,670
def instantiate_component ( self , component , callback = None ) : profile = self . __components [ component ] callback and callback ( profile ) LOGGER . debug ( "> Current Component: '{0}'." . format ( component ) ) if os . path . isfile ( os . path . join ( profile . directory , profile . package ) + ".py" ) or os . ...
Instantiates given Component .
53,671
def instantiate_components ( self , callback = None ) : uninstantiated_components = [ component for component in self . list_components ( ) if not self . instantiate_component ( component , callback ) ] if not uninstantiated_components : return True else : raise manager . exceptions . ComponentInstantiationError ( "{0}...
Instantiates the Components .
53,672
def reload_component ( self , component ) : dependents = list ( reversed ( self . list_dependents ( component ) ) ) dependents . append ( component ) for dependent in dependents : profile = self . __components [ dependent ] module = __import__ ( profile . package ) reload ( module ) object = profile . attribute in dir ...
Reload given Component module .
53,673
def list_components ( self , dependency_order = True ) : if dependency_order : return list ( itertools . chain . from_iterable ( [ sorted ( list ( batch ) ) for batch in foundations . common . dependency_resolver ( dict ( ( key , value . require ) for ( key , value ) in self ) ) ] ) ) else : return [ key for ( key , va...
Lists the Components by dependency resolving .
53,674
def list_dependents ( self , component , dependents = None ) : dependents = set ( ) if dependents is None else dependents for name , profile in self : if not component in profile . require : continue dependents . add ( name ) self . list_dependents ( name , dependents ) return sorted ( list ( dependents ) , key = ( sel...
Lists given Component dependents Components .
53,675
def filter_components ( self , pattern , category = None ) : filtered_components = [ ] for component , profile in self : if category : if profile . category != category : continue if re . search ( pattern , component ) : filtered_components . append ( component ) return filtered_components
Filters the Components using given regex pattern .
53,676
def get_profile ( self , component ) : components = self . filter_components ( r"^{0}$" . format ( component ) ) if components != [ ] : return self . __components [ foundations . common . get_first_item ( components ) ]
Gets given Component profile .
53,677
def get_interface ( self , component ) : profile = self . get_profile ( component ) if profile : return profile . interface
Gets given Component interface .
53,678
def get_component_attribute_name ( component ) : search = re . search ( r"(?P<category>\w+)\.(?P<name>\w+)" , component ) if search : name = "{0}{1}{2}" . format ( search . group ( "category" ) , search . group ( "name" ) [ 0 ] . upper ( ) , search . group ( "name" ) [ 1 : ] ) LOGGER . debug ( "> Component name: '{0}' ...
Gets given Component attribute name .
53,679
def output_to_table ( obj , olist = 'inputs' , oformat = 'latex' , table_ends = False , prefix = "" ) : para = "" property_list = [ ] if olist == 'inputs' : property_list = obj . inputs elif olist == 'all' : for item in obj . __dict__ : if "_" != item [ 0 ] : property_list . append ( item ) for item in property_list : ...
Compile the properties to a table .
53,680
def format_value ( value , sf = 3 ) : if isinstance ( value , str ) : return value elif isinstance ( value , list ) or isinstance ( value , np . ndarray ) : value = list ( value ) for i in range ( len ( value ) ) : vv = format_value ( value [ i ] ) value [ i ] = vv return "[" + ", " . join ( value ) + "]" elif value is...
convert a parameter value into a formatted string with certain significant figures
53,681
def add_table_ends ( para , oformat = 'latex' , caption = "caption-text" , label = "table" ) : fpara = "" if oformat == 'latex' : fpara += "\\begin{table}[H]\n" fpara += "\\centering\n" fpara += "\\begin{tabular}{cc}\n" fpara += "\\toprule\n" fpara += "Parameter & Value \\\\\n" fpara += "\\midrule\n" fpara += para fpar...
Adds the latex table ends
53,682
def draw_shapes_svg_layer ( df_shapes , shape_i_columns , layer_name , layer_number = 1 , use_svg_path = True ) : minx , miny = df_shapes [ [ 'x' , 'y' ] ] . min ( ) . values maxx , maxy = df_shapes [ [ 'x' , 'y' ] ] . max ( ) . values width = maxx - minx height = maxy - miny dwg = svgwrite . Drawing ( 'should_not_exis...
Draw shapes as a layer in a SVG file .
53,683
def draw_lines_svg_layer ( df_endpoints , layer_name , layer_number = 1 ) : dwg = svgwrite . Drawing ( 'should_not_exist.svg' , profile = 'tiny' , debug = False ) dwg . attribs [ 'width' ] = df_endpoints [ [ 'x_source' , 'x_target' ] ] . values . max ( ) dwg . attribs [ 'height' ] = df_endpoints [ [ 'y_source' , 'y_tar...
Draw lines defined by endpoint coordinates as a layer in a SVG file .
53,684
def dts_error ( self , error_name , message = None ) : self . nautilus_extension . logger . info ( "DTS error thrown {} for {} ({})" . format ( error_name , request . path , message ) ) j = jsonify ( { "error" : error_name , "message" : message } ) j . status_code = 404 return j
Create a DTS Error reply
53,685
def r_dts_collection ( self , objectId = None ) : try : j = self . resolver . getMetadata ( objectId = objectId ) . export ( Mimetypes . JSON . DTS . Std ) j = jsonify ( j ) j . status_code = 200 except NautilusError as E : return self . dts_error ( error_name = E . __class__ . __name__ , message = E . __doc__ ) return...
DTS Collection Metadata reply for given objectId
53,686
def create_run ( cmd , project , exp , grp ) : from benchbuild . utils import schema as s session = s . Session ( ) run = s . Run ( command = str ( cmd ) , project_name = project . name , project_group = project . group , experiment_name = exp , run_group = str ( grp ) , experiment_group = project . experiment . id ) s...
Create a new run in the database .
53,687
def create_run_group ( prj ) : from benchbuild . utils import schema as s session = s . Session ( ) experiment = prj . experiment group = s . RunGroup ( id = prj . run_uuid , experiment = experiment . id ) session . add ( group ) session . commit ( ) return ( group , session )
Create a new run_group in the database .
53,688
def persist_project ( project ) : from benchbuild . utils . schema import Project , Session session = Session ( ) projects = session . query ( Project ) . filter ( Project . name == project . name ) . filter ( Project . group_name == project . group ) name = project . name desc = project . __doc__ domain = project . do...
Persist this project in the benchbuild database .
53,689
def persist_experiment ( experiment ) : from benchbuild . utils . schema import Experiment , Session session = Session ( ) cfg_exp = experiment . id LOG . debug ( "Using experiment ID stored in config: %s" , cfg_exp ) exps = session . query ( Experiment ) . filter ( Experiment . id == cfg_exp ) desc = str ( CFG [ "expe...
Persist this experiment in the benchbuild database .
53,690
def persist_perf ( run , session , svg_path ) : from benchbuild . utils import schema as s with open ( svg_path , 'r' ) as svg_file : svg_data = svg_file . read ( ) session . add ( s . Metadata ( name = "perf.flamegraph" , value = svg_data , run_id = run . id ) )
Persist the flamegraph in the database .
53,691
def persist_config ( run , session , cfg ) : from benchbuild . utils import schema as s for cfg_elem in cfg : session . add ( s . Config ( name = cfg_elem , value = cfg [ cfg_elem ] , run_id = run . id ) )
Persist the configuration in as key - value pairs .
53,692
def apis ( self ) : value = self . attributes [ 'apis' ] if isinstance ( value , six . string_types ) : value = shlex . split ( value ) return value
List of API to test
53,693
def pre_execute ( self , execution , context ) : path = self . _fspath if path : path = path . format ( benchmark = context . benchmark , api = execution [ 'category' ] , ** execution . get ( 'metas' , { } ) ) if self . clean_path : shutil . rmtree ( path , ignore_errors = True ) if execution [ 'metas' ] [ 'file_mode' ...
Make sure the named directory is created if possible
53,694
def file_mode ( self ) : fms = self . attributes [ 'file_mode' ] eax = set ( ) if isinstance ( fms , six . string_types ) : fms = shlex . split ( fms ) for fm in fms : if fm == 'both' : eax . add ( 'fpp' ) eax . add ( 'onefile' ) elif fm in [ 'fpp' , 'onefile' ] : eax . add ( fm ) else : raise Exception ( 'Invalid IOR ...
onefile fpp or both
53,695
def device ( self ) : hdevice = self . _libinput . libinput_event_get_device ( self . _hevent ) return Device ( hdevice , self . _libinput )
The device associated with this event .
53,696
def absolute_coords ( self ) : if self . type != EventType . POINTER_MOTION_ABSOLUTE : raise AttributeError ( _wrong_prop . format ( self . type ) ) abs_x = self . _libinput . libinput_event_pointer_get_absolute_x ( self . _handle ) abs_y = self . _libinput . libinput_event_pointer_get_absolute_y ( self . _handle ) ret...
The current absolute coordinates of the pointer event in mm from the top left corner of the device .
53,697
def transform_absolute_coords ( self , width , height ) : if self . type != EventType . POINTER_MOTION_ABSOLUTE : raise AttributeError ( _wrong_meth . format ( self . type ) ) abs_x = self . _libinput . libinput_event_pointer_get_absolute_x_transformed ( self . _handle , width ) abs_y = self . _libinput . libinput_even...
Return the current absolute coordinates of the pointer event transformed to screen coordinates .
53,698
def button_state ( self ) : if self . type != EventType . POINTER_BUTTON : raise AttributeError ( _wrong_prop . format ( self . type ) ) return self . _libinput . libinput_event_pointer_get_button_state ( self . _handle )
The button state that triggered this event .
53,699
def seat_button_count ( self ) : if self . type != EventType . POINTER_BUTTON : raise AttributeError ( _wrong_prop . format ( self . type ) ) return self . _libinput . libinput_event_pointer_get_seat_button_count ( self . _handle )
The total number of buttons pressed on all devices on the associated seat after the event was triggered .