idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
53,400
def arguments ( ) : DESCRIPTION = parser = argparse . ArgumentParser ( description = DESCRIPTION , formatter_class = Raw ) parser . add_argument ( "--email" , dest = "email" , action = 'store' , required = False , default = False , help = "An email address is required for querying Entrez databases." ) parser . add_argument ( "--api" , dest = "api_key" , action = 'store' , required = False , default = False , help = "A users ENTREZ API Key. Will speed up download." ) parser . add_argument ( "--query" , dest = "query" , action = 'store' , required = True , help = "Query to submit to Entrez." ) parser . add_argument ( "--host" , dest = "host" , action = 'store' , required = False , default = 'localhost' , help = "Location of an already running database." ) parser . add_argument ( "--port" , dest = "port" , action = 'store' , type = int , required = False , default = 27017 , help = "Mongo database port." ) parser . add_argument ( "--db" , dest = "db" , action = 'store' , required = False , default = 'sramongo' , help = "Name of the database." ) parser . add_argument ( "--debug" , dest = "debug" , action = 'store_true' , required = False , help = "Turn on debug output." ) parser . add_argument ( "--force" , dest = "force" , action = 'store_true' , required = False , help = "Forces clearing the cache." ) args = parser . parse_args ( ) if not ( args . email or args . api_key ) : logger . error ( 'You must provide either an `--email` or `--api`.' ) sys . exit ( ) return args
Pulls in command line arguments .
53,401
def run ( self , * nodes ) : nodes = nodes or self . nodes self . _prelude ( * nodes ) @ write_yaml_report def _run ( ) : self . _build_installer ( ) runner = functools . partial ( run_on_host , self . campaign ) if self . campaign . network . max_concurrent_runs > 1 : pool = Pool ( self . campaign . network . max_concurrent_runs ) pool . map ( runner , nodes ) else : for node in nodes : runner ( node ) return nodes with pushd ( self . campaign_path ) : _run ( )
Execute benchmarks on every node specified in arguments . If none are given then execute benchmarks on every nodes specified in the network . nodes campaign configuration .
53,402
def run ( self ) : with self . _scp_bensh_runner ( ) : self . _execute_bensh_runner ( ) path = self . _retrieve_tarball ( ) try : self . _aggregate_tarball ( path ) finally : os . remove ( path )
Execute benchmark on the specified node
53,403
def node_pairing ( self ) : value = self . attributes [ 'node_pairing' ] if value not in IMB . NODE_PAIRING : msg = 'Unexpected {0} value: got "{1}" but valid values are {2}' msg = msg . format ( 'node_pairing' , value , IMB . NODE_PAIRING ) raise ValueError ( msg ) return value
if node then test current node and next one if tag then create tests for every pair of the current tag .
53,404
def setup_benchbuild ( ) : LOG . debug ( "Setting up Benchbuild..." ) venv_dir = local . path ( "/benchbuild" ) prefixes = CFG [ "container" ] [ "prefixes" ] . value prefixes . append ( venv_dir ) CFG [ "container" ] [ "prefixes" ] = prefixes src_dir = str ( CFG [ "source_dir" ] ) have_src = src_dir is not None if have_src : __mount_source ( src_dir ) benchbuild = find_benchbuild ( ) if benchbuild and not requires_update ( benchbuild ) : if have_src : __upgrade_from_source ( venv_dir , with_deps = False ) return setup_virtualenv ( venv_dir ) if have_src : __upgrade_from_source ( venv_dir ) else : __upgrade_from_pip ( venv_dir )
Setup benchbuild inside a container .
53,405
def main ( argv = None ) : arguments = cli_common ( __doc__ , argv = argv ) benet = BeNet ( arguments [ 'CAMPAIGN_FILE' ] ) benet . run ( ) if argv is not None : return benet
ben - nett entry point
53,406
def main ( argv = None ) : arguments = cli_common ( __doc__ , argv = argv ) es_export = ESExporter ( arguments [ 'CAMPAIGN-DIR' ] , arguments [ '--es' ] ) es_export . export ( ) if argv is not None : return es_export
ben - elastic entry point
53,407
def r_cts ( self ) : _request = request . args . get ( "request" , None ) if _request is not None : try : if _request . lower ( ) == "getcapabilities" : return self . _get_capabilities ( urn = request . args . get ( "urn" , None ) ) elif _request . lower ( ) == "getpassage" : return self . _get_passage ( urn = request . args . get ( "urn" , None ) ) elif _request . lower ( ) == "getpassageplus" : return self . _get_passage_plus ( urn = request . args . get ( "urn" , None ) ) elif _request . lower ( ) == "getlabel" : return self . _get_label ( urn = request . args . get ( "urn" , None ) ) elif _request . lower ( ) == "getfirsturn" : return self . _get_first_urn ( urn = request . args . get ( "urn" , None ) ) elif _request . lower ( ) == "getprevnexturn" : return self . _get_prev_next ( urn = request . args . get ( "urn" , None ) ) elif _request . lower ( ) == "getvalidreff" : return self . _get_valid_reff ( urn = request . args . get ( "urn" , None ) , level = request . args . get ( "level" , 1 , type = int ) ) except NautilusError as E : return self . cts_error ( error_name = E . __class__ . __name__ , message = E . __doc__ ) return self . cts_error ( MissingParameter . __name__ , message = MissingParameter . __doc__ )
Actual main route of CTS APIs . Transfer typical requests through the ?request = REQUESTNAME route
53,408
def cts_error ( self , error_name , message = None ) : self . nautilus_extension . logger . info ( "CTS error thrown {} for {} ({})" . format ( error_name , request . query_string . decode ( ) , message ) ) return render_template ( "cts/Error.xml" , errorType = error_name , message = message ) , 404 , { "content-type" : "application/xml" }
Create a CTS Error reply
53,409
def _get_capabilities ( self , urn = None ) : r = self . resolver . getMetadata ( objectId = urn ) if len ( r . parents ) > 0 : r = r . parents [ - 1 ] r = render_template ( "cts/GetCapabilities.xml" , filters = "urn={}" . format ( urn ) , inventory = Markup ( r . export ( Mimetypes . XML . CTS ) ) ) return r , 200 , { "content-type" : "application/xml" }
Provisional route for GetCapabilities request
53,410
def _get_passage ( self , urn ) : urn = URN ( urn ) subreference = None if len ( urn ) < 4 : raise InvalidURN if urn . reference is not None : subreference = str ( urn . reference ) node = self . resolver . getTextualNode ( textId = urn . upTo ( URN . NO_PASSAGE ) , subreference = subreference ) r = render_template ( "cts/GetPassage.xml" , filters = "urn={}" . format ( urn ) , request_urn = str ( urn ) , full_urn = node . urn , passage = Markup ( node . export ( Mimetypes . XML . TEI ) ) ) return r , 200 , { "content-type" : "application/xml" }
Provisional route for GetPassage request
53,411
def _get_passage_plus ( self , urn ) : urn = URN ( urn ) subreference = None if len ( urn ) < 4 : raise InvalidURN if urn . reference is not None : subreference = str ( urn . reference ) node = self . resolver . getTextualNode ( textId = urn . upTo ( URN . NO_PASSAGE ) , subreference = subreference ) r = render_template ( "cts/GetPassagePlus.xml" , filters = "urn={}" . format ( urn ) , request_urn = str ( urn ) , full_urn = node . urn , prev_urn = node . prevId , next_urn = node . nextId , metadata = { "groupname" : [ ( literal . language , str ( literal ) ) for literal in node . metadata . get ( RDF_NAMESPACES . CTS . groupname ) ] , "title" : [ ( literal . language , str ( literal ) ) for literal in node . metadata . get ( RDF_NAMESPACES . CTS . title ) ] , "description" : [ ( literal . language , str ( literal ) ) for literal in node . metadata . get ( RDF_NAMESPACES . CTS . description ) ] , "label" : [ ( literal . language , str ( literal ) ) for literal in node . metadata . get ( RDF_NAMESPACES . CTS . label ) ] } , citation = Markup ( node . citation . export ( Mimetypes . XML . CTS ) ) , passage = Markup ( node . export ( Mimetypes . XML . TEI ) ) ) return r , 200 , { "content-type" : "application/xml" }
Provisional route for GetPassagePlus request
53,412
def _get_valid_reff ( self , urn , level ) : urn = URN ( urn ) subreference = None textId = urn . upTo ( URN . NO_PASSAGE ) if urn . reference is not None : subreference = str ( urn . reference ) reffs = self . resolver . getReffs ( textId = textId , subreference = subreference , level = level ) r = render_template ( "cts/GetValidReff.xml" , reffs = reffs , urn = textId , level = level , request_urn = str ( urn ) ) return r , 200 , { "content-type" : "application/xml" }
Provisional route for GetValidReff request
53,413
def _get_prev_next ( self , urn ) : urn = URN ( urn ) subreference = None textId = urn . upTo ( URN . NO_PASSAGE ) if urn . reference is not None : subreference = str ( urn . reference ) previous , nextious = self . resolver . getSiblings ( textId = textId , subreference = subreference ) r = render_template ( "cts/GetPrevNext.xml" , prev_urn = previous , next_urn = nextious , urn = textId , request_urn = str ( urn ) ) return r , 200 , { "content-type" : "application/xml" }
Provisional route for GetPrevNext request
53,414
def _get_first_urn ( self , urn ) : urn = URN ( urn ) subreference = None textId = urn . upTo ( URN . NO_PASSAGE ) if urn . reference is not None : subreference = str ( urn . reference ) firstId = self . resolver . getTextualNode ( textId = textId , subreference = subreference ) . firstId r = render_template ( "cts/GetFirstUrn.xml" , firstId = firstId , full_urn = textId , request_urn = str ( urn ) ) return r , 200 , { "content-type" : "application/xml" }
Provisional route for GetFirstUrn request
53,415
def _get_label ( self , urn ) : node = self . resolver . getTextualNode ( textId = urn ) r = render_template ( "cts/GetLabel.xml" , request_urn = str ( urn ) , full_urn = node . urn , metadata = { "groupname" : [ ( literal . language , str ( literal ) ) for literal in node . metadata . get ( RDF_NAMESPACES . CTS . groupname ) ] , "title" : [ ( literal . language , str ( literal ) ) for literal in node . metadata . get ( RDF_NAMESPACES . CTS . title ) ] , "description" : [ ( literal . language , str ( literal ) ) for literal in node . metadata . get ( RDF_NAMESPACES . CTS . description ) ] , "label" : [ ( literal . language , str ( literal ) ) for literal in node . metadata . get ( RDF_NAMESPACES . CTS . label ) ] } , citation = Markup ( node . citation . export ( Mimetypes . XML . CTS ) ) ) return r , 200 , { "content-type" : "application/xml" }
Provisional route for GetLabel request
53,416
def events ( self ) : while True : events = self . _selector . select ( ) for nevent in range ( len ( events ) + 1 ) : self . _libinput . libinput_dispatch ( self . _li ) hevent = self . _libinput . libinput_get_event ( self . _li ) if hevent : type_ = self . _libinput . libinput_event_get_type ( hevent ) self . _libinput . libinput_dispatch ( self . _li ) if type_ . is_pointer ( ) : yield PointerEvent ( hevent , self . _libinput ) elif type_ . is_keyboard ( ) : yield KeyboardEvent ( hevent , self . _libinput ) elif type_ . is_touch ( ) : yield TouchEvent ( hevent , self . _libinput ) elif type_ . is_gesture ( ) : yield GestureEvent ( hevent , self . _libinput ) elif type_ . is_tablet_tool ( ) : yield TabletToolEvent ( hevent , self . _libinput ) elif type_ . is_tablet_pad ( ) : yield TabletPadEvent ( hevent , self . _libinput ) elif type_ . is_switch ( ) : yield SwitchEvent ( hevent , self . _libinput ) elif type_ . is_device ( ) : yield DeviceNotifyEvent ( hevent , self . _libinput )
Yield events from the internal libinput s queue .
53,417
def next_event_type ( self ) : type_ = self . _libinput . libinput_next_event_type ( self . _li ) if type_ == 0 : return None else : return EventType ( type_ )
Return the type of the next event in the internal queue .
53,418
def add_device ( self , path ) : hdevice = self . _libinput . libinput_path_add_device ( self . _li , path . encode ( ) ) if hdevice : return Device ( hdevice , self . _libinput ) return None
Add a device to a libinput context .
53,419
def assign_seat ( self , seat ) : rc = self . _libinput . libinput_udev_assign_seat ( self . _li , seat . encode ( ) ) assert rc == 0 , 'Failed to assign {}' . format ( seat )
Assign a seat to this libinput context .
53,420
def context ( self , outdir , log_prefix ) : try : self . _outdir = outdir self . _log_prefix = log_prefix yield finally : self . _log_prefix = None self . _outdir = None
Setup instance to extract metrics from the proper run
53,421
def auto_detect_adjacent_shapes ( svg_source , shape_i_attr = 'id' , layer_name = 'Connections' , shapes_xpath = '//svg:path | //svg:polygon' , extend = 1.5 ) : df_shapes = svg_shapes_to_df ( svg_source , xpath = shapes_xpath ) df_shapes = compute_shape_centers ( df_shapes , shape_i_attr ) df_shape_connections = extract_adjacent_shapes ( df_shapes , shape_i_attr , extend = extend ) xml_root = etree . parse ( svg_source ) svg_root = xml_root . xpath ( '/svg:svg' , namespaces = INKSCAPE_NSMAP ) [ 0 ] df_shape_centers = ( df_shapes . drop_duplicates ( subset = [ shape_i_attr ] ) [ [ shape_i_attr ] + [ 'x_center' , 'y_center' ] ] . set_index ( shape_i_attr ) ) df_connection_centers = ( df_shape_centers . loc [ df_shape_connections . source ] . reset_index ( drop = True ) . join ( df_shape_centers . loc [ df_shape_connections . target ] . reset_index ( drop = True ) , lsuffix = '_source' , rsuffix = '_target' ) ) connections_xpath = '//svg:g[@inkscape:label="%s"]' % layer_name connections_groups = svg_root . xpath ( connections_xpath , namespaces = INKSCAPE_NSMAP ) if connections_groups : for g in connections_groups : g . getparent ( ) . remove ( g ) svg_output = draw_lines_svg_layer ( df_connection_centers . rename ( columns = { 'x_center_source' : 'x_source' , 'y_center_source' : 'y_source' , 'x_center_target' : 'x_target' , 'y_center_target' : 'y_target' } ) , layer_name = layer_name ) return svg_output
Attempt to automatically find adjacent shapes in a SVG layer .
53,422
def flatten_dict ( dic , parent_key = '' , sep = '.' ) : items = [ ] for key , value in dic . items ( ) : new_key = parent_key + sep + key if parent_key else key if isinstance ( value , collections . MutableMapping ) : items . extend ( flatten_dict ( value , new_key , sep = sep ) . items ( ) ) elif isinstance ( value , list ) : for idx , elt in enumerate ( value ) : items . extend ( flatten_dict ( elt , new_key + sep + str ( idx ) , sep = sep ) . items ( ) ) else : items . append ( ( new_key , value ) ) return dict ( items )
Flatten sub - keys of a dictionary
53,423
def freeze ( obj ) : if isinstance ( obj , collections . Mapping ) : return FrozenDict ( { freeze ( k ) : freeze ( v ) for k , v in six . iteritems ( obj ) } ) elif isinstance ( obj , list ) : return FrozenList ( [ freeze ( e ) for e in obj ] ) else : return obj
Transform tree of dict and list in read - only data structure . dict instances are transformed to FrozenDict lists in FrozenList .
53,424
def from_file ( cls , path ) : if path == '-' : return Configuration ( yaml . safe_load ( sys . stdin ) ) if not osp . exists ( path ) and not osp . isabs ( path ) : path = osp . join ( osp . dirname ( osp . abspath ( __file__ ) ) , path ) with open ( path , 'r' ) as istr : return Configuration ( yaml . safe_load ( istr ) )
Create a Configuration from a file
53,425
def file_ref ( self ) : from metatab . util import slugify assert self . file_name is None or isinstance ( self . file_name , str ) if self . file_name is not None and self . row is not None : parts = split ( self . file_name ) return "{} {}:{} " . format ( parts [ - 1 ] , self . row , self . col ) elif self . row is not None : return " {}:{} " . format ( self . row , self . col ) else : return ''
Return a string for the file row and column of the term .
53,426
def add_child ( self , child ) : assert isinstance ( child , Term ) self . children . append ( child ) child . parent = self assert not child . term_is ( "Datafile.Section" )
Add a term to this term s children . Also sets the child term s parent
53,427
def new_child ( self , term , value , ** kwargs ) : tc = self . doc . get_term_class ( term . lower ( ) ) c = tc ( term , str ( value ) if value is not None else None , parent = self , doc = self . doc , section = self . section ) . new_children ( ** kwargs ) c . term_value_name = self . doc . decl_terms . get ( c . join , { } ) . get ( 'termvaluename' , c . term_value_name ) assert not c . term_is ( "*.Section" ) self . children . append ( c ) return c
Create a new term and add it to this term as a child . Creates grandchildren from the kwargs .
53,428
def remove_child ( self , child ) : assert isinstance ( child , Term ) self . children . remove ( child ) self . doc . remove_term ( child )
Remove the term from this term s children .
53,429
def new_children ( self , ** kwargs ) : for k , v in kwargs . items ( ) : self . new_child ( k , v ) return self
Create new children from kwargs
53,430
def set_ownership ( self ) : assert self . section is not None for t in self . children : t . parent = self t . _section = self . section t . doc = self . doc t . set_ownership ( )
Recursivelt set the parent section and doc for a children
53,431
def find ( self , term , value = False ) : if '.' in term : parent , term = term . split ( '.' ) assert parent . lower ( ) == self . record_term_lc , ( parent . lower ( ) , self . record_term_lc ) for c in self . children : if c . record_term_lc == term . lower ( ) : if value is False or c . value == value : yield c
Return a terms by name . If the name is not qualified use this term s record name for the parent . The method will yield all terms with a matching qualified name .
53,432
def get_or_new_child ( self , term , value = False , ** kwargs ) : pt , rt = self . split_term ( term ) term = self . record_term + '.' + rt c = self . find_first ( rt ) if c is None : tc = self . doc . get_term_class ( term . lower ( ) ) c = tc ( term , value , parent = self , doc = self . doc , section = self . section ) . new_children ( ** kwargs ) assert not c . term_is ( "Datafile.Section" ) , ( self , c ) self . children . append ( c ) else : if value is not False : c . value = value for k , v in kwargs . items ( ) : c . get_or_new_child ( k , v ) assert self . find_first ( rt ) assert self . find_first ( rt ) == c return c
Find a term using find_first and set it s value and properties if it exists . If it does not create a new term and children .
53,433
def get_value ( self , item , default = None ) : try : return self [ item ] . value except ( AttributeError , KeyError ) as e : return default
Get the value of a child
53,434
def qualified_term ( self ) : assert self . parent is not None or self . parent_term_lc == 'root' if self . parent : return self . parent . record_term_lc + '.' + self . record_term_lc else : return 'root.' + self . record_term_lc
Return the fully qualified term name . The parent will be root if there is no parent term defined .
53,435
def term_is ( self , v ) : if isinstance ( v , str ) : if '.' not in v : v = 'root.' + v v_p , v_r = self . split_term_lower ( v ) if self . join_lc == v . lower ( ) : return True elif v_r == '*' and v_p == self . parent_term_lc : return True elif v_p == '*' and v_r == self . record_term_lc : return True elif v_p == '*' and v_r == '*' : return True else : return False else : return any ( self . term_is ( e ) for e in v )
Return True if the fully qualified name of the term is the same as the argument . If the argument is a list or tuple return True if any of the term names match .
53,436
def arg_props ( self ) : d = dict ( zip ( [ str ( e ) . lower ( ) for e in self . section . property_names ] , self . args ) ) d [ self . term_value_name . lower ( ) ] = self . value return d
Return the value and scalar properties as a dictionary . Returns only argumnet properties properties declared on the same row as a term . It will return an entry for all of the args declared by the term s section . Use props to get values of all children and arg props combined
53,437
def all_props ( self ) : d = self . arg_props d . update ( self . props ) return d
Return a dictionary with the values of all children and place holders for all of the section argumemts . It combines props and arg_props
53,438
def _convert_to_dict ( cls , term , replace_value_names = True ) : from collections import OrderedDict if not term : return None if term . children : d = OrderedDict ( ) for c in term . children : if c . child_property_type == 'scalar' : d [ c . record_term_lc ] = cls . _convert_to_dict ( c , replace_value_names ) elif c . child_property_type == 'sequence' : try : d [ c . record_term_lc ] . append ( cls . _convert_to_dict ( c , replace_value_names ) ) except ( KeyError , AttributeError ) : d [ c . record_term_lc ] = [ cls . _convert_to_dict ( c , replace_value_names ) ] elif c . child_property_type == 'sconcat' : if c . record_term_lc in d : s = d [ c . record_term_lc ] + ' ' else : s = '' d [ c . record_term_lc ] = s + ( cls . _convert_to_dict ( c , replace_value_names ) or '' ) elif c . child_property_type == 'bconcat' : d [ c . record_term_lc ] = d . get ( c . record_term_lc , '' ) + ( cls . _convert_to_dict ( c , replace_value_names ) or '' ) else : try : d [ c . record_term_lc ] . append ( cls . _convert_to_dict ( c , replace_value_names ) ) except KeyError : d [ c . record_term_lc ] = cls . _convert_to_dict ( c , replace_value_names ) except AttributeError as e : d [ c . record_term_lc ] = [ d [ c . record_term ] ] + [ cls . _convert_to_dict ( c , replace_value_names ) ] if term . value : if replace_value_names : d [ term . term_value_name . lower ( ) ] = term . value else : d [ '@value' ] = term . value return d else : return term . value
Converts a record heirarchy to nested dicts .
53,439
def rows ( self ) : tvm = self . section . doc . decl_terms . get ( self . qualified_term , { } ) . get ( 'termvaluename' , '@value' ) assert tvm properties = { tvm : self . value } for c in self . children : if c . is_terminal : if c . record_term_lc : properties [ c . record_term_lc ] = c . value yield ( self . qualified_term , properties ) for c in self . children : if not c . is_terminal : for row in c . rows : yield row
Yield rows for the term for writing terms to a CSV file .
53,440
def descendents ( self ) : for c in self . children : yield c for d in c . descendents : yield d
Iterate over all descendent terms
53,441
def subclass ( cls , t ) : t . doc = None t . terms = [ ] t . __class__ = SectionTerm return t
Change a term into a Section Term
53,442
def add_arg ( self , arg , prepend = False ) : self . args = [ arg_ . strip ( ) for arg_ in self . args if arg_ . strip ( ) ] if arg . title ( ) not in self . args : if prepend : self . args = [ arg . title ( ) ] + self . args else : self . args . append ( arg . title ( ) )
Append an arg to the arg list
53,443
def remove_arg ( self , arg ) : self . args = [ arg_ . strip ( ) for arg_ in self . args if arg_ . strip ( ) ] for arg_ in list ( self . args ) : if arg_ . lower ( ) == arg . lower ( ) : self . args . remove ( arg_ )
Remove an arg to the arg list
53,444
def add_term ( self , t ) : if t not in self . terms : if t . parent_term_lc == 'root' : self . terms . append ( t ) self . doc . add_term ( t , add_section = False ) t . set_ownership ( ) else : raise GenerateError ( "Can only add or move root-level terms. Term '{}' parent is '{}' " . format ( t , t . parent_term_lc ) ) assert t . section or t . join_lc == 'root.root' , t
Add a term to this section and set it s ownership . Should only be used on root level terms
53,445
def new_term ( self , term , value , ** kwargs ) : tc = self . doc . get_term_class ( term . lower ( ) ) t = tc ( term , value , doc = self . doc , parent = None , section = self ) . new_children ( ** kwargs ) self . doc . add_term ( t ) return t
Create a new root - level term in this section
53,446
def get_term ( self , term , value = False ) : return self . doc . find_first ( term , value = value , section = self . name )
Synonym for find_first restructed to this section
53,447
def remove_term ( self , term , remove_from_doc = True ) : try : self . terms . remove ( term ) except ValueError : pass if remove_from_doc : self . doc . remove_term ( term )
Remove a term from the terms . Must be the identical term the same object
53,448
def clean ( self ) : terms = list ( self ) for t in terms : self . doc . remove_term ( t )
Remove all of the terms from the section and also remove them from the document
53,449
def _args ( self , term , d ) : tvm = self . doc . decl_terms . get ( term , { } ) . get ( 'termvaluename' , '@value' ) lower_d = { k . lower ( ) : v for k , v in d . items ( ) } args = [ ] for n in [ tvm ] + self . property_names : args . append ( lower_d . get ( n . lower ( ) , '' ) ) try : del lower_d [ n . lower ( ) ] except KeyError : pass return term , args , lower_d
Extract the chldren of a term that are arg - children from those that are row - children .
53,450
def rows ( self ) : for t in self . terms : for row in t . rows : term , value = row if isinstance ( value , dict ) : term , args , remain = self . _args ( term , value ) yield term , args for k , v in remain . items ( ) : yield term . split ( '.' ) [ - 1 ] + '.' + k , v else : yield row
Yield rows for the section
53,451
def lines ( self ) : if self . name != 'Root' : yield ( 'Section' , '|' . join ( [ self . value ] + self . property_names ) ) for row in self . rows : term , value = row if not isinstance ( value , ( list , tuple ) ) : value = [ value ] term = term . replace ( 'root.' , '' ) . title ( ) yield ( term , value [ 0 ] ) children = list ( zip ( self . property_names , value [ 1 : ] ) ) for prop , value in children : if value and value . strip ( ) : child_t = '.' + ( prop . title ( ) ) yield ( " " + child_t , value )
Iterate over all of the rows as text lines
53,452
def as_dict ( self , replace_value_names = True ) : old_children = self . children self . children = self . terms d = super ( SectionTerm , self ) . as_dict ( replace_value_names ) self . children = old_children return d
Return the whole section as a dict
53,453
def cmd ( self ) : cmd = ( [ self . compiler_binary ] + self . flags + [ '-U' + x for x in self . undef ] + [ '-D' + x for x in self . define ] + [ '-I' + x for x in self . include_dirs ] + self . sources ) if self . run_linker : cmd += ( [ '-L' + x for x in self . library_dirs ] + [ ( x if os . path . exists ( x ) else '-l' + x ) for x in self . libraries ] + self . linkline ) counted = [ ] for envvar in re . findall ( '\$\{(\w+)\}' , ' ' . join ( cmd ) ) : if os . getenv ( envvar ) is None : if envvar not in counted : counted . append ( envvar ) msg = "Environment variable '{}' undefined." . format ( envvar ) self . logger . error ( msg ) raise CompilationError ( msg ) return cmd
The command below covers most cases if you need someting more complex subclass this .
53,454
def get_hash_of_dirs ( directory ) : import hashlib sha = hashlib . sha512 ( ) if not os . path . exists ( directory ) : return - 1 for root , _ , files in os . walk ( directory ) : for name in files : filepath = local . path ( root ) / name if filepath . exists ( ) : with open ( filepath , 'rb' ) as next_file : for line in next_file : sha . update ( line ) return sha . hexdigest ( )
Recursively hash the contents of the given directory .
53,455
def source_required ( src_file ) : if not src_file . exists ( ) : return True required = True hash_file = src_file . with_suffix ( ".hash" , depth = 0 ) LOG . debug ( "Hash file location: %s" , hash_file ) if hash_file . exists ( ) : new_hash = get_hash_of_dirs ( src_file ) with open ( hash_file , 'r' ) as h_file : old_hash = h_file . readline ( ) required = not new_hash == old_hash if required : from benchbuild . utils . cmd import rm rm ( "-r" , src_file ) rm ( hash_file ) if required : LOG . info ( "Source required for: %s" , src_file ) LOG . debug ( "Reason: src-exists: %s hash-exists: %s" , src_file . exists ( ) , hash_file . exists ( ) ) return required
Check if a download is required .
53,456
def update_hash ( src_file ) : hash_file = local . path ( src_file ) + ".hash" new_hash = 0 with open ( hash_file , 'w' ) as h_file : new_hash = get_hash_of_dirs ( src_file ) h_file . write ( str ( new_hash ) ) return new_hash
Update the hash for the given file .
53,457
def Copy ( From , To ) : from benchbuild . utils . cmd import cp cp ( "-ar" , "--reflink=auto" , From , To )
Small copy wrapper .
53,458
def CopyNoFail ( src , root = None ) : if root is None : root = str ( CFG [ "tmp_dir" ] ) src_path = local . path ( root ) / src if src_path . exists ( ) : Copy ( src_path , '.' ) return True return False
Just copy fName into the current working directory if it exists .
53,459
def Wget ( src_url , tgt_name , tgt_root = None ) : if tgt_root is None : tgt_root = str ( CFG [ "tmp_dir" ] ) from benchbuild . utils . cmd import wget tgt_file = local . path ( tgt_root ) / tgt_name if not source_required ( tgt_file ) : Copy ( tgt_file , "." ) return wget ( src_url , "-O" , tgt_file ) update_hash ( tgt_file ) Copy ( tgt_file , "." )
Download url if required .
53,460
def with_wget ( url_dict = None , target_file = None ) : def wget_decorator ( cls ) : def download_impl ( self ) : t_file = target_file if target_file else self . SRC_FILE t_version = url_dict [ self . version ] Wget ( t_version , t_file ) @ staticmethod def versions_impl ( ) : return list ( url_dict . keys ( ) ) cls . versions = versions_impl cls . download = download_impl return cls return wget_decorator
Decorate a project class with wget - based version information .
53,461
def Git ( repository , directory , rev = None , prefix = None , shallow_clone = True ) : repository_loc = str ( prefix ) if prefix is None : repository_loc = str ( CFG [ "tmp_dir" ] ) from benchbuild . utils . cmd import git src_dir = local . path ( repository_loc ) / directory if not source_required ( src_dir ) : Copy ( src_dir , "." ) return extra_param = [ ] if shallow_clone : extra_param . append ( "--depth" ) extra_param . append ( "1" ) git ( "clone" , extra_param , repository , src_dir ) if rev : with local . cwd ( src_dir ) : git ( "checkout" , rev ) update_hash ( src_dir ) Copy ( src_dir , "." ) return repository_loc
Get a clone of the given repo
53,462
def with_git ( repo , target_dir = None , limit = None , refspec = "HEAD" , clone = True , rev_list_args = None , version_filter = lambda version : True ) : if not rev_list_args : rev_list_args = [ ] def git_decorator ( cls ) : from benchbuild . utils . cmd import git @ staticmethod def versions_impl ( ) : directory = cls . SRC_FILE if target_dir is None else target_dir repo_prefix = local . path ( str ( CFG [ "tmp_dir" ] ) ) repo_loc = local . path ( repo_prefix ) / directory if source_required ( repo_loc ) : if not clone : return [ ] git ( "clone" , repo , repo_loc ) update_hash ( repo_loc ) with local . cwd ( repo_loc ) : rev_list = git ( "rev-list" , "--abbrev-commit" , "--abbrev=10" , refspec , * rev_list_args ) . strip ( ) . split ( '\n' ) latest = git ( "rev-parse" , "--short=10" , refspec ) . strip ( ) . split ( '\n' ) cls . VERSION = latest [ 0 ] if limit : return list ( filter ( version_filter , rev_list ) ) [ : limit ] return list ( filter ( version_filter , rev_list ) ) def download_impl ( self ) : nonlocal target_dir , git directory = cls . SRC_FILE if target_dir is None else target_dir Git ( self . repository , directory ) with local . cwd ( directory ) : git ( "checkout" , self . version ) cls . versions = versions_impl cls . download = download_impl cls . repository = repo return cls return git_decorator
Decorate a project class with git - based version information .
53,463
def Svn ( url , fname , to = None ) : if to is None : to = str ( CFG [ "tmp_dir" ] ) src_dir = local . path ( to ) / fname if not source_required ( src_dir ) : Copy ( src_dir , "." ) return from benchbuild . utils . cmd import svn svn ( "co" , url , src_dir ) update_hash ( src_dir ) Copy ( src_dir , "." )
Checkout the SVN repo .
53,464
def Rsync ( url , tgt_name , tgt_root = None ) : if tgt_root is None : tgt_root = str ( CFG [ "tmp_dir" ] ) from benchbuild . utils . cmd import rsync tgt_dir = local . path ( tgt_root ) / tgt_name if not source_required ( tgt_dir ) : Copy ( tgt_dir , "." ) return rsync ( "-a" , url , tgt_dir ) update_hash ( tgt_dir ) Copy ( tgt_dir , "." )
RSync a folder .
53,465
def camel_to_snake ( s : str ) -> str : return CAMEL_CASE_RE . sub ( r'_\1' , s ) . strip ( ) . lower ( )
Convert string from camel case to snake case .
53,466
def snake_to_camel ( s : str ) -> str : fragments = s . split ( '_' ) return fragments [ 0 ] + '' . join ( x . title ( ) for x in fragments [ 1 : ] )
Convert string from snake case to camel case .
53,467
def _create_extractors ( cls , metrics ) : metrics_dict = { } for metric , config in six . iteritems ( metrics ) : category = config . get ( 'category' , StdBenchmark . DEFAULT_CATEGORY ) metrics_dict . setdefault ( category , { } ) [ metric ] = config return dict ( ( category , StdExtractor ( metrics ) ) for category , metrics in six . iteritems ( metrics_dict ) )
Build metrics extractors according to the metrics config
53,468
def froms ( self ) : eax = { } for name , config in six . iteritems ( self . _metrics ) : from_ = self . _get_property ( config , 'from' , default = self . stdout ) eax . setdefault ( from_ , { } ) [ name ] = config return eax
Group metrics according to the from property .
53,469
def tesselate_shapes_frame ( df_shapes , shape_i_columns ) : frames = [ ] if isinstance ( shape_i_columns , bytes ) : shape_i_columns = [ shape_i_columns ] for shape_i , df_path in df_shapes . groupby ( shape_i_columns ) : points_i = df_path [ [ 'x' , 'y' ] ] . values if ( points_i [ 0 ] == points_i [ - 1 ] ) . all ( ) : points_i = points_i [ : - 1 ] try : triangulator = Triangulator ( points_i ) except : import pdb pdb . set_trace ( ) continue if not isinstance ( shape_i , ( list , tuple ) ) : shape_i = [ shape_i ] for i , triangle_i in enumerate ( triangulator . triangles ( ) ) : triangle_points_i = [ shape_i + [ i ] + [ j , x , y ] for j , ( x , y ) in enumerate ( triangle_i ) ] frames . extend ( triangle_points_i ) frames = None if not frames else frames return pd . DataFrame ( frames , columns = shape_i_columns + [ 'triangle_i' , 'vertex_i' , 'x' , 'y' ] )
Tesselate each shape path into one or more triangles .
53,470
def mkdtemp ( * args , ** kwargs ) : remove = kwargs . pop ( 'remove' , True ) path = tempfile . mkdtemp ( * args , ** kwargs ) try : yield path finally : if remove : shutil . rmtree ( path )
Create a temporary directory in a with - context
53,471
def slugify ( value ) : import re import unicodedata value = str ( value ) value = unicodedata . normalize ( 'NFKD' , value ) . encode ( 'ascii' , 'ignore' ) . decode ( 'utf8' ) . strip ( ) . lower ( ) value = re . sub ( r'[^\w\s\-\.]' , '' , value ) value = re . sub ( r'[-\s]+' , '-' , value ) return value
Normalizes string converts to lowercase removes non - alpha characters and converts spaces to hyphens . type (
53,472
def import_name_or_class ( name ) : " Import an obect as either a fully qualified, dotted name, " if isinstance ( name , str ) : module_name , object_name = name . rsplit ( '.' , 1 ) mod = __import__ ( module_name ) components = name . split ( '.' ) for comp in components [ 1 : ] : mod = getattr ( mod , comp ) return mod else : return name
Import an obect as either a fully qualified dotted name
53,473
def pip_installer_url ( version = None ) : version = version or hpcbench . __version__ version = str ( version ) if '.dev' in version : git_rev = 'master' if 'TRAVIS_BRANCH' in os . environ : git_rev = version . split ( '+' , 1 ) [ - 1 ] if '.' in git_rev : git_rev = git_rev . split ( '.' , 1 ) [ 0 ] git_rev = git_rev [ 1 : ] return 'git+{project_url}@{git_rev}#egg=hpcbench' . format ( project_url = 'http://github.com/BlueBrain/hpcbench' , git_rev = git_rev or 'master' , ) return 'hpcbench=={}' . format ( version )
Get argument to give to pip to install HPCBench .
53,474
def from_file ( campaign_file , ** kwargs ) : realpath = osp . realpath ( campaign_file ) if osp . isdir ( realpath ) : campaign_file = osp . join ( campaign_file , YAML_CAMPAIGN_FILE ) campaign = Configuration . from_file ( campaign_file ) return default_campaign ( campaign , ** kwargs )
Load campaign from YAML file
53,475
def default_campaign ( campaign = None , expandcampvars = True , exclude_nodes = None , frozen = True ) : campaign = campaign or nameddict ( ) def _merger ( _camp , _deft ) : for key in _deft . keys ( ) : if ( key in _camp and isinstance ( _camp [ key ] , dict ) and isinstance ( _deft [ key ] , collections . Mapping ) ) : _merger ( _camp [ key ] , _deft [ key ] ) elif key not in _camp : _camp [ key ] = _deft [ key ] _merger ( campaign , DEFAULT_CAMPAIGN ) campaign . setdefault ( 'campaign_id' , str ( uuid . uuid4 ( ) ) ) for precondition in campaign . precondition . keys ( ) : config = campaign . precondition [ precondition ] if not isinstance ( config , list ) : campaign . precondition [ precondition ] = [ config ] def _expandvars ( value ) : if isinstance ( value , six . string_types ) : return expandvars ( value ) return value if expandcampvars : campaign = nameddict ( dict_map_kv ( campaign , _expandvars ) ) else : campaign = nameddict ( campaign ) if expandcampvars : if campaign . network . get ( 'tags' ) is None : campaign . network [ 'tags' ] = { } NetworkConfig ( campaign ) . expand ( ) return freeze ( campaign ) if frozen else campaign
Fill an existing campaign with default values for optional keys
53,476
def get_benchmark_types ( campaign ) : for benchmarks in campaign . benchmarks . values ( ) : for name , benchmark in benchmarks . items ( ) : if name != 'sbatch' : yield benchmark . type
Get of benchmarks referenced in the configuration
53,477
def get_metrics ( campaign , report , top = True ) : if top and campaign . process . type == 'slurm' : for path , _ in report . collect ( 'jobid' , with_path = True ) : for child in ReportNode ( path ) . children . values ( ) : for metrics in get_metrics ( campaign , child , top = False ) : yield metrics else : def metrics_node_extract ( report ) : metrics_file = osp . join ( report . path , JSON_METRICS_FILE ) if osp . exists ( metrics_file ) : with open ( metrics_file ) as istr : return json . load ( istr ) def metrics_iterator ( report ) : return filter ( lambda eax : eax [ 1 ] is not None , report . map ( metrics_node_extract , with_path = True ) , ) for path , metrics in metrics_iterator ( report ) : yield report . path_context ( path ) , metrics
Extract metrics from existing campaign
53,478
def write ( self , file ) : render ( self . template , file , benchmarks = self . benchmarks , hostname = socket . gethostname ( ) , )
Write YAML campaign template to the given open file
53,479
def expand ( self ) : if self . slurm : self . _introspect_slurm_cluster ( ) self . network . nodes = self . _expand_nodes ( self . network . nodes ) self . _expand_tags ( )
Perform node expansion of network section .
53,480
def ensure_has_same_campaigns ( self ) : lhs_yaml = osp . join ( self . lhs , 'campaign.yaml' ) rhs_yaml = osp . join ( self . rhs , 'campaign.yaml' ) assert osp . isfile ( lhs_yaml ) assert osp . isfile ( rhs_yaml ) assert filecmp . cmp ( lhs_yaml , rhs_yaml )
Ensure that the 2 campaigns to merge have been generated from the same campaign . yaml
53,481
def path_context ( self , path ) : prefix = os . path . commonprefix ( [ path , self . _path ] ) relative_path = path [ len ( prefix ) : ] relative_path = relative_path . strip ( os . sep ) attrs = self . CONTEXT_ATTRS for i , elt in enumerate ( relative_path . split ( os . sep ) ) : yield attrs [ i ] , elt yield 'path' , path
Build of dictionary of fields extracted from the given path
53,482
def collect ( self , * keys , ** kwargs ) : if not keys : raise Exception ( 'Missing key' ) has_values = functools . reduce ( operator . __and__ , [ key in self . data for key in keys ] , True ) if has_values : values = tuple ( [ self . data [ key ] for key in keys ] ) if len ( values ) == 1 : values = values [ 0 ] if kwargs . get ( 'with_path' , False ) : yield self . path , values else : yield values if kwargs . get ( 'recursive' , True ) : for child in self . children . values ( ) : for value in child . collect ( * keys , ** kwargs ) : yield value
Generator function traversing tree structure to collect values of a specified key .
53,483
def collect_one ( self , * args , ** kwargs ) : generator = self . collect ( * args , ** kwargs ) try : value = next ( generator ) except StopIteration : raise Exception ( "Expected exactly one value don't have any" ) try : next ( generator ) except StopIteration : return value raise Exception ( 'Expected exactly one value but have more' )
Same as collect but expects to have only one result .
53,484
def set_beam_prop ( self , prop , values , repeat = "up" ) : values = np . array ( values ) if repeat == "up" : assert len ( values . shape ) == 1 values = [ values for ss in range ( self . n_storeys ) ] else : assert len ( values . shape ) == 2 if len ( values [ 0 ] ) != self . n_bays : raise ModelError ( "beam depths does not match number of bays (%i)." % self . n_bays ) for ss in range ( self . n_storeys ) : for i in range ( self . n_bays ) : self . _beams [ ss ] [ i ] . set_section_prop ( prop , values [ 0 ] [ i ] )
Specify the properties of the beams
53,485
def set_column_prop ( self , prop , values , repeat = "up" ) : values = np . array ( values ) if repeat == "up" : assert len ( values . shape ) == 1 values = [ values for ss in range ( self . n_storeys ) ] else : assert len ( values . shape ) == 2 if len ( values [ 0 ] ) != self . n_cols : raise ModelError ( "column props does not match n_cols (%i)." % self . n_cols ) for ss in range ( self . n_storeys ) : for i in range ( self . n_cols ) : self . _columns [ ss ] [ i ] . set_section_prop ( prop , values [ 0 ] [ i ] )
Specify the properties of the columns
53,486
def load ( self , path ) : with open ( path ) as f : self . config = full_load ( f ) if self . config is None : sys . stderr . write ( "Warning: config file is empty!\n" ) self . config = { }
Load yaml - formatted config file .
53,487
def save ( self ) : with open ( self . config_file , "w" ) as f : f . write ( dump ( self . config , default_flow_style = False ) )
Save current state of config dictionary .
53,488
def register ( self , extension , extension_name ) : self . _extensions [ extension_name ] = extension self . ROUTES . extend ( [ tuple ( list ( t ) + [ extension_name ] ) for t in extension . ROUTES ] ) self . CACHED . extend ( [ ( f_name , extension_name ) for f_name in extension . CACHED ] ) self . Access_Control_Allow_Methods . update ( { k : v for k , v in extension . Access_Control_Allow_Methods . items ( ) if k not in self . Access_Control_Allow_Methods } )
Register an extension into the Nautilus Router
53,489
def setLogger ( self , logger ) : self . logger = logger if logger is None : self . logger = logging . getLogger ( "capitains_nautilus" ) formatter = logging . Formatter ( "[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s" ) stream = FlaskNautilus . LoggingHandler ( ) stream . setLevel ( logging . INFO ) stream . setFormatter ( formatter ) self . logger . addHandler ( stream ) if self . resolver : self . resolver . logger = self . logger return self . logger
Set up the Logger for the application
53,490
def init_app ( self , app ) : self . init_blueprint ( app ) if self . flaskcache is not None : for func , extension_name in self . CACHED : func = getattr ( self . _extensions [ extension_name ] , func ) setattr ( self . _extensions [ extension_name ] , func . __name__ , self . flaskcache . memoize ( ) ( func ) ) return self . blueprint
Initiate the extension on the application
53,491
def init_blueprint ( self , app ) : self . blueprint = Blueprint ( self . name , self . name , template_folder = resource_filename ( "capitains_nautilus" , "data/templates" ) , url_prefix = self . prefix ) for url , name , methods , extension_name in self . ROUTES : self . blueprint . add_url_rule ( url , view_func = self . view ( name , extension_name ) , endpoint = name [ 2 : ] , methods = methods ) app . register_blueprint ( self . blueprint ) return self . blueprint
Properly generates the blueprint registering routes and filters and connecting the app and the blueprint
53,492
def view ( self , function_name , extension_name ) : if isinstance ( self . Access_Control_Allow_Origin , dict ) : d = { "Access-Control-Allow-Origin" : self . Access_Control_Allow_Origin [ function_name ] , "Access-Control-Allow-Methods" : self . Access_Control_Allow_Methods [ function_name ] } else : d = { "Access-Control-Allow-Origin" : self . Access_Control_Allow_Origin , "Access-Control-Allow-Methods" : self . Access_Control_Allow_Methods [ function_name ] } def r ( * x , ** y ) : val = getattr ( self . _extensions [ extension_name ] , function_name ) ( * x , ** y ) if isinstance ( val , Response ) : val . headers . extend ( d ) return val else : val = list ( val ) val [ 2 ] . update ( d ) return tuple ( val ) return r
Builds response according to a function name
53,493
def script ( experiment , projects ) : benchbuild_c = local [ local . path ( sys . argv [ 0 ] ) ] slurm_script = local . cwd / experiment . name + "-" + str ( CFG [ 'slurm' ] [ 'script' ] ) srun = local [ "srun" ] srun_args = [ ] if not CFG [ "slurm" ] [ "multithread" ] : srun_args . append ( "--hint=nomultithread" ) if not CFG [ "slurm" ] [ "turbo" ] : srun_args . append ( "--pstate-turbo=off" ) srun = srun [ srun_args ] srun = srun [ benchbuild_c [ "run" ] ] return __save__ ( slurm_script , srun , experiment , projects )
Prepare a slurm script that executes the experiment for a given project .
53,494
def fetch_text_structure ( urn , endpoint = "http://cts.perseids.org/api/cts" ) : structure = { "urn" : urn , "provenance" : endpoint , "valid_reffs" : { } } orig_edition = None suffix = 'grc' if 'greekLit' in urn else 'lat' resolver = HttpCtsResolver ( HttpCtsRetriever ( endpoint ) ) work_metadata = resolver . getMetadata ( urn ) try : orig_edition = next ( iter ( work_metadata . children [ edition ] for edition in work_metadata . children if suffix in str ( work_metadata . children [ edition ] . urn ) ) ) except Exception as e : print ( e ) return None assert orig_edition is not None structure [ "levels" ] = [ ( n + 1 , level . name . lower ( ) ) for n , level in enumerate ( orig_edition . citation ) ] for level_n , level_label in structure [ "levels" ] : structure [ "valid_reffs" ] [ level_n ] = [ ] for ref in resolver . getReffs ( urn , level = level_n ) : print ( ref ) element = { "current" : "{}:{}" . format ( urn , ref ) , } if "." in ref : element [ "parent" ] = "{}:{}" . format ( urn , "." . join ( ref . split ( "." ) [ : level_n - 1 ] ) ) textual_node = resolver . getTextualNode ( textId = urn , subreference = ref , prevnext = True ) if textual_node . nextId is not None : element [ "previous" ] = "{}:{}" . format ( urn , textual_node . nextId ) if textual_node . prevId is not None : element [ "following" ] = "{}:{}" . format ( urn , textual_node . prevId ) structure [ "valid_reffs" ] [ level_n ] . append ( element ) return structure
Fetches the text structure of a given work from a CTS endpoint .
53,495
def actions_for_project ( self , project ) : project . runtime_extension = run . RuntimeExtension ( project , self ) return self . default_runtime_actions ( project )
Execute all actions but don t do anything as extension .
53,496
def read_json_sensor_file ( ffp ) : sensor_path = ffp si = json . load ( open ( sensor_path ) ) for m_type in si : si [ m_type ] = { int ( k ) : v for k , v in si [ m_type ] . items ( ) } return si
Reads the sensor file and stores it as a dictionary .
53,497
def get_all_sensor_codes ( si , wild_sensor_code ) : mtype_and_ory , x , y , z = wild_sensor_code . split ( "-" ) if mtype_and_ory == "*" : mtypes = list ( si ) elif mtype_and_ory [ - 1 ] in "XYZ" and "ACCX" not in si : mtypes = [ mtype_and_ory [ : - 1 ] ] else : mtypes = [ mtype_and_ory ] all_sensor_codes = [ ] for mtype in mtypes : for m_number in si [ mtype ] : if x in [ "*" , si [ mtype ] [ m_number ] [ 'X-CODE' ] ] and y in [ "*" , si [ mtype ] [ m_number ] [ 'Y-CODE' ] ] and z in [ "*" , si [ mtype ] [ m_number ] [ 'Z-CODE' ] ] : cc = get_sensor_code_by_number ( si , mtype , m_number ) all_sensor_codes . append ( cc ) return all_sensor_codes
Get all sensor sensor_codes that match a wild sensor code
53,498
def get_mtype_and_number_from_code ( si , sensor_code ) : mtype_and_ory , x , y , z = sensor_code . split ( "-" ) if mtype_and_ory [ - 1 ] in "XYZ" and "ACCX" not in si : mtype = mtype_and_ory [ : - 1 ] else : mtype = mtype_and_ory for m_number in si [ mtype ] : cc = get_sensor_code_by_number ( si , mtype , m_number ) if cc == sensor_code : return mtype , m_number return None , None
Given a sensor sensor_code get motion type and sensor number
53,499
def get_names ( self ) : names = [ id for id in self . ecrm_P1_is_identified_by if id . uri == surf . ns . EFRBROO [ 'F12_Name' ] ] self . names = [ ] for name in names : for variant in name . rdfs_label : self . names . append ( ( variant . language , variant . title ( ) ) ) return self . names
Returns a dict where key is the language and value is the name in that language .