idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
53,500
def get_abbreviations ( self ) : abbreviations = [ ] try : type_abbreviation = self . session . get_resource ( BASE_URI_TYPES % "abbreviation" , self . session . get_class ( surf . ns . ECRM [ 'E55_Type' ] ) ) abbreviations = [ unicode ( label ) for name in self . ecrm_P1_is_identified_by for abbreviation in name . ecrm_P139_has_alternative_form for label in abbreviation . rdfs_label if name . uri == surf . ns . EFRBROO [ 'F12_Name' ] and abbreviation . ecrm_P2_has_type . first == type_abbreviation ] except Exception as e : logger . debug ( "Exception raised when getting abbreviations for %a" % self ) finally : return abbreviations
Get abbreviations of the names of the author .
53,501
def get_urn ( self ) : try : type_ctsurn = self . session . get_resource ( BASE_URI_TYPES % "CTS_URN" , self . session . get_class ( surf . ns . ECRM [ 'E55_Type' ] ) ) urn = [ CTS_URN ( urnstring . rdfs_label . one ) for urnstring in self . ecrm_P1_is_identified_by if urnstring . uri == surf . ns . ECRM [ 'E42_Identifier' ] and urnstring . ecrm_P2_has_type . first == type_ctsurn ] [ 0 ] return urn except Exception as e : return None
Assumes that each HucitAuthor has only one CTS URN .
53,502
def to_json ( self ) : names = self . get_names ( ) return json . dumps ( { "uri" : self . subject , "urn" : str ( self . get_urn ( ) ) , "names" : [ { "language" : lang , "label" : label } for lang , label in names ] , "name_abbreviations" : self . get_abbreviations ( ) , "works" : [ json . loads ( work . to_json ( ) ) for work in self . get_works ( ) ] } , indent = 2 )
Serialises a HucitAuthor to a JSON formatted string .
53,503
def add_text_structure ( self , label ) : ts = self . session . get_resource ( "%s/text_structure" % self . subject , self . session . get_class ( surf . ns . HUCIT [ 'TextStructure' ] ) ) ts . rdfs_label . append ( Literal ( label ) ) ts . save ( ) self . hucit_has_structure = ts self . update ( ) return self . hucit_has_structure . one
Adds a citable text structure to the work .
53,504
def remove_text_structure ( self , text_structure ) : idx = self . hucit_has_structure . index ( text_structure ) ts = self . hucit_has_structure . pop ( idx ) ts . remove ( ) self . update ( ) return
Remove any citable text structure to the work .
53,505
def _get_opus_maximum ( self ) : label = opmax = self . session . get_resource ( BASE_URI_TYPES % "opmax" , self . session . get_class ( surf . ns . ECRM [ 'E55_Type' ] ) ) if opmax . is_present ( ) : return opmax else : opmax . rdfs_label . append ( Literal ( label , "en" ) ) logger . debug ( "Created a new opus maximum type instance" ) opmax . save ( ) return opmax
Instantiate an opus maximum type .
53,506
def set_as_opus_maximum ( self ) : if self . is_opus_maximum ( ) : return False else : opmax = self . _get_opus_maximum ( ) self . ecrm_P2_has_type = opmax self . update ( ) return True
Mark explicitly the work as the author s opus maximum .
53,507
def is_opus_maximum ( self ) : opmax = self . _get_opus_maximum ( ) types = self . ecrm_P2_has_type if opmax in types : return True else : if len ( self . author . get_works ( ) ) == 1 : return True else : return False
Check whether the work is the author s opus maximum .
53,508
def author ( self ) : CreationEvent = self . session . get_class ( surf . ns . EFRBROO [ 'F27_Work_Conception' ] ) Person = self . session . get_class ( surf . ns . EFRBROO [ 'F10_Person' ] ) creation_event = CreationEvent . get_by ( efrbroo_R16_initiated = self ) . first ( ) return Person . get_by ( efrbroo_P14i_performed = creation_event ) . first ( )
Returns the author to whom the work is attributed .
53,509
def to_json ( self ) : titles = self . get_titles ( ) return json . dumps ( { "uri" : self . subject , "urn" : str ( self . get_urn ( ) ) , "titles" : [ { "language" : lang , "label" : label } for lang , label in titles ] , "title_abbreviations" : self . get_abbreviations ( ) } , indent = 2 )
Serialises a HucitWork to a JSON formatted string .
53,510
def execution_cls ( self ) : name = self . campaign . process . type for clazz in [ ExecutionDriver , SrunExecutionDriver ] : if name == clazz . name : return clazz raise NameError ( "Unknown execution layer: '%s'" % name )
Get execution layer class
53,511
def children ( self ) : tags = { '*' } if self . tag : network_tags = { self . tag : self . campaign . network . tags [ self . tag ] } else : network_tags = self . campaign . network . tags for tag , configs in network_tags . items ( ) : for config in configs : for mode , kconfig in config . items ( ) : if mode == 'match' : if kconfig . match ( self . name ) or kconfig . match ( LOCALHOST ) : tags . add ( tag ) break elif mode == 'nodes' : if self . name in kconfig or LOCALHOST in kconfig : tags . add ( tag ) break elif mode == 'constraint' : tags . add ( tag ) break if tag in tags : break return tags
Retrieve tags associated to the current node
53,512
def activate ( self ) : raise NotImplementedError ( "{0} | '{1}' must be implemented by '{2}' subclasses!" . format ( self . __class__ . __name__ , self . activate . __name__ , self . __class__ . __name__ ) )
Sets Component activation state .
53,513
def reservations ( self ) : command = [ SINFO , '--reservation' ] output = subprocess . check_output ( command , env = SINFO_ENV ) output = output . decode ( ) it = iter ( output . splitlines ( ) ) next ( it ) for line in it : rsv = Reservation . from_sinfo ( line ) yield rsv . name , rsv
get nodes of every reservations
53,514
def set ( self , values ) : if hasattr ( self , "inputs" ) : for item in self . inputs : if hasattr ( self , item ) : setattr ( self , item , values [ item ] )
Set the object parameters using a dictionary
53,515
def execute_plan ( plan ) : results = [ action ( ) for action in plan ] return [ result for result in results if actns . step_has_failed ( result ) ]
Execute the plan .
53,516
def format_repr ( obj , attributes ) -> str : attribute_repr = ', ' . join ( ( '{}={}' . format ( attr , repr ( getattr ( obj , attr ) ) ) for attr in attributes ) ) return "{0}({1})" . format ( obj . __class__ . __qualname__ , attribute_repr )
Format an object s repr method with specific attributes .
53,517
def build_parameter_descriptions ( obj , user_p = None , output = "csv" , show_none = True , ignore = None , plist = None ) : if user_p is None : user_p = { } if ignore is None : ignore = [ ] para = [ [ obj . __class__ . __name__ + " inputs:" , "" , "" ] ] if plist is None : if not hasattr ( obj , 'inputs' ) : raise exceptions . ModelError ( "Object must contain parameter 'inputs' or set plist as an input" ) plist = obj . inputs p_dict = { } if hasattr ( obj , 'ancestor_types' ) : bt = obj . ancestor_types for otype in bt : if otype in vp : for item in vp [ otype ] : p_dict [ item ] = vp [ otype ] [ item ] for item in user_p : p_dict [ item ] = user_p [ item ] else : p_dict = user_p for item in plist : if show_none is False and getattr ( obj , item ) is None : continue if item in ignore : continue if item in p_dict : para . append ( [ item , p_dict [ item ] [ 1 ] , p_dict [ item ] [ 0 ] ] ) else : para . append ( [ item , "" , "" ] ) if output == "csv" : out_obj = [ ] for i in range ( len ( para ) ) : out_obj . append ( "," . join ( para [ i ] ) ) elif output == "list" : out_obj = para elif output == "dict" : out_obj = OrderedDict ( ) for i in range ( len ( para ) ) : out_obj [ para [ i ] [ 0 ] ] = { "description" : para [ i ] [ 2 ] , "units" : para [ i ] [ 1 ] } else : raise ValueError ( "output must be either: 'csv', 'dict' or 'list'." ) return out_obj
Creates a list of the decription of all the inputs of an object
53,518
def all_descriptions ( ) : para = [ ] para += build_parameter_descriptions ( models . Soil ( ) ) + [ ",,\n" ] para += build_parameter_descriptions ( models . SoilProfile ( ) ) + [ ",,\n" ] para += build_parameter_descriptions ( models . Foundation ( ) ) + [ ",,\n" ] para += build_parameter_descriptions ( models . PadFoundation ( ) ) + [ ",,\n" ] para += build_parameter_descriptions ( models . SDOFBuilding ( ) ) + [ ",,\n" ] para += build_parameter_descriptions ( models . FrameBuilding2D ( 1 , 1 ) ) return para
Generates a list of descriptions of all the models
53,519
def plot_shapes_heat_map ( df_shapes , shape_i_columns , values , axis = None , vmin = None , vmax = None , value_formatter = None , color_map = None ) : df_shapes = df_shapes . copy ( ) df_shapes . loc [ : , 'y' ] = df_shapes . y . max ( ) - df_shapes . y . copy ( ) . values aspect_ratio = ( ( df_shapes . x . max ( ) - df_shapes . x . min ( ) ) / ( df_shapes . y . max ( ) - df_shapes . y . min ( ) ) ) if vmin is not None or vmax is not None : norm = mpl . colors . Normalize ( vmin = vmin or min ( values ) , vmax = vmax or max ( values ) ) else : norm = None if axis is None : fig , axis = plt . subplots ( figsize = ( 10 , 10 * aspect_ratio ) ) else : fig = axis . get_figure ( ) patches = OrderedDict ( [ ( id , Polygon ( df_shape_i [ [ 'x' , 'y' ] ] . values ) ) for id , df_shape_i in df_shapes . groupby ( shape_i_columns ) ] ) patches = pd . Series ( patches ) collection = PatchCollection ( patches . values , cmap = color_map , norm = norm ) collection . set_array ( values . ix [ patches . index ] ) axis . add_collection ( collection ) axis_divider = make_axes_locatable ( axis ) color_axis = axis_divider . append_axes ( "right" , size = "10%" , pad = 0.05 ) colorbar = fig . colorbar ( collection , format = value_formatter , cax = color_axis ) tick_labels = colorbar . ax . get_yticklabels ( ) if vmin is not None : tick_labels [ 0 ] = '$\leq$%s' % tick_labels [ 0 ] . get_text ( ) if vmax is not None : tick_labels [ - 1 ] = '$\geq$%s' % tick_labels [ - 1 ] . get_text ( ) colorbar . ax . set_yticklabels ( tick_labels ) axis . set_xlim ( df_shapes . x . min ( ) , df_shapes . x . max ( ) ) axis . set_ylim ( df_shapes . y . min ( ) , df_shapes . y . max ( ) ) return axis , colorbar
Plot polygon shapes colored based on values mapped onto a colormap .
53,520
def plot_color_map_bars ( values , vmin = None , vmax = None , color_map = None , axis = None , ** kwargs ) : if axis is None : fig , axis = plt . subplots ( ) norm = mpl . colors . Normalize ( vmin = vmin or min ( values ) , vmax = vmax or max ( values ) , clip = True ) if color_map is None : color_map = mpl . rcParams [ 'image.cmap' ] colors = color_map ( norm ( values . values ) . filled ( ) ) values . plot ( kind = 'bar' , ax = axis , color = colors , ** kwargs ) return axis
Plot bar for each value in values colored based on values mapped onto the specified color map .
53,521
def parse_broken_json ( json_text : str ) -> dict : json_text = json_text . replace ( ":" , ": " ) json_dict = yaml . load ( json_text ) return json_dict
Parses broken JSON that the standard Python JSON module cannot parse .
53,522
def _executor_script ( self ) : fd , path = tempfile . mkstemp ( suffix = '.sh' , dir = os . getcwd ( ) ) os . close ( fd ) with open ( path , 'w' ) as ostr : self . _write_executor_script ( ostr ) mode = os . stat ( path ) . st_mode os . chmod ( path , mode | stat . S_IEXEC | stat . S_IRGRP | stat . S_IRUSR ) return path
Create shell - script in charge of executing the benchmark and return its path .
53,523
def _write_executor_script ( self , ostr ) : environment = self . execution . get ( 'environment' ) or { } if not isinstance ( environment , Mapping ) : msg = 'Expected mapping for environment but got ' msg += str ( type ( environment ) ) raise Exception ( msg ) escaped_environment = dict ( ( var , six . moves . shlex_quote ( str ( value ) ) ) for var , value in environment . items ( ) ) modules = self . execution . get ( 'modules' ) or [ ] properties = dict ( command = self . command , cwd = os . getcwd ( ) , modules = modules , environment = escaped_environment , ) self . _jinja_executor_template . stream ( ** properties ) . dump ( ostr )
Write shell script in charge of executing the command
53,524
def command_str ( self ) : if isinstance ( self . command , six . string_types ) : return self . command return ' ' . join ( map ( six . moves . shlex_quote , self . command ) )
get command to execute as string properly escaped
53,525
def popen ( self , stdout , stderr ) : self . logger . info ( 'Executing command: %s' , self . command_str ) return subprocess . Popen ( [ self . _executor_script ] , stdout = stdout , stderr = stderr )
Build popen object to run
53,526
def srun ( self ) : commands = self . campaign . process . get ( 'commands' , { } ) srun = find_executable ( commands . get ( 'srun' , 'srun' ) ) if six . PY2 : srun = srun . encode ( 'utf-8' ) return srun
Get path to srun executable
53,527
def common_srun_options ( cls , campaign ) : default = dict ( campaign . process . get ( 'srun' ) or { } ) default . update ( output = 'slurm-%N-%t.stdout' , error = 'slurm-%N-%t.error' ) return default
Get options to be given to all srun commands
53,528
def command ( self ) : srun_optlist = build_slurm_arguments ( self . parent . command . srun or { } ) if not isinstance ( self . root . network . nodes ( self . tag ) , ConstraintTag ) : pargs = parse_constraint_in_args ( srun_optlist ) self . command_expansion_vars [ 'process_count' ] = pargs . ntasks if not pargs . constraint : srun_optlist += [ '--nodelist=' + ',' . join ( self . srun_nodes ) , '--nodes=' + str ( len ( self . srun_nodes ) ) , ] command = super ( SrunExecutionDriver , self ) . command return [ self . srun ] + srun_optlist + command
get command to execute
53,529
def srun_nodes ( self ) : count = self . execution . get ( 'srun_nodes' , 0 ) if isinstance ( count , six . string_types ) : tag = count count = 0 elif isinstance ( count , SEQUENCES ) : return count else : assert isinstance ( count , int ) tag = self . tag nodes = self . _srun_nodes ( tag , count ) if 'srun_nodes' in self . execution : self . execution [ 'srun_nodes' ] = nodes self . execution [ 'srun_nodes_count' ] = len ( nodes ) return nodes
Get list of nodes where to execute the command
53,530
def clear_graph ( identifier = None ) : graph = get_graph ( ) if identifier : graph . destroy ( identifier ) try : graph . close ( ) except : warn ( "Unable to close the Graph" )
Clean up a graph by removing it
53,531
def set_label ( self , label , lang ) : try : self . metadata . add ( SKOS . prefLabel , Literal ( label , lang = lang ) ) self . graph . addN ( [ ( self . asNode ( ) , RDFS . label , Literal ( label , lang = lang ) , self . graph ) , ] ) except Exception as E : pass
Add the label of the collection in given lang
53,532
def members ( self ) : return list ( [ self . children_class ( child ) for child in self . graph . subjects ( RDF_NAMESPACES . DTS . parent , self . asNode ( ) ) ] )
Children of the collection s item
53,533
def parent ( self ) : parent = list ( self . graph . objects ( self . asNode ( ) , RDF_NAMESPACES . DTS . parent ) ) if parent : return self . parent_class ( parent [ 0 ] ) return None
Parent of current object
53,534
def actions_for_project ( self , project ) : project . cflags = [ "-O3" , "-fno-omit-frame-pointer" ] project . runtime_extension = time . RunWithTime ( run . RuntimeExtension ( project , self ) ) return self . default_runtime_actions ( project )
Compile & Run the experiment with - O3 enabled .
53,535
def to_step_result ( func ) : @ ft . wraps ( func ) def wrapper ( * args , ** kwargs ) : res = func ( * args , ** kwargs ) if not res : res = [ StepResult . OK ] if not hasattr ( res , "__iter__" ) : res = [ res ] return res return wrapper
Convert a function return to a list of StepResults .
53,536
def prepend_status ( func ) : @ ft . wraps ( func ) def wrapper ( self , * args , ** kwargs ) : res = func ( self , * args , ** kwargs ) if self . status is not StepResult . UNSET : res = "[{status}]" . format ( status = self . status . name ) + res return res return wrapper
Prepends the output of func with the status .
53,537
def notify_step_begin_end ( func ) : @ ft . wraps ( func ) def wrapper ( self , * args , ** kwargs ) : cls = self . __class__ on_step_begin = cls . ON_STEP_BEGIN on_step_end = cls . ON_STEP_END for begin_listener in on_step_begin : begin_listener ( self ) res = func ( self , * args , ** kwargs ) for end_listener in on_step_end : end_listener ( self , func ) return res return wrapper
Print the beginning and the end of a func .
53,538
def log_before_after ( name : str , desc : str ) : def func_decorator ( f ) : @ ft . wraps ( f ) def wrapper ( * args , ** kwargs ) : LOG . info ( "\n%s - %s" , name , desc ) res = f ( * args , ** kwargs ) if StepResult . ERROR not in res : LOG . info ( "%s - OK\n" , name ) else : LOG . error ( "%s - ERROR\n" , name ) return res return wrapper return func_decorator
Log customized stirng before & after running func .
53,539
def euclid ( a , b ) : a = abs ( a ) b = abs ( b ) if a < b : a , b = b , a while b != 0 : a , b = b , a % b return a
returns the Greatest Common Divisor of a and b
53,540
def coPrime ( l ) : for i , j in combinations ( l , 2 ) : if euclid ( i , j ) != 1 : return False return True
returns True if the values in the list L are all co - prime otherwise it returns False .
53,541
def modInv ( a , m ) : if coPrime ( [ a , m ] ) : linearCombination = extendedEuclid ( a , m ) return linearCombination [ 1 ] % m else : return 0
returns the multiplicative inverse of a in modulo m as a positive value between zero and m - 1
53,542
def int2baseTwo ( x ) : assert x >= 0 bitInverse = [ ] while x != 0 : bitInverse . append ( x & 1 ) x >>= 1 return bitInverse
x is a positive integer . Convert it to base two as a list of integers in reverse order as a list .
53,543
def newKey ( a , b , k ) : try : p = findAPrime ( a , b , k ) while True : q = findAPrime ( a , b , k ) if q != p : break except : raise ValueError n = p * q m = ( p - 1 ) * ( q - 1 ) while True : e = random . randint ( 1 , m ) if coPrime ( [ e , m ] ) : break d = modInv ( e , m ) return ( n , e , d )
Try to find two large pseudo primes roughly between a and b . Generate public and private keys for RSA encryption . Raises ValueError if it fails to find one
53,544
def blocks2numList ( blocks , n ) : toProcess = copy . copy ( blocks ) returnList = [ ] for numBlock in toProcess : inner = [ ] for i in range ( 0 , n ) : inner . append ( numBlock % 256 ) numBlock >>= 8 inner . reverse ( ) returnList . extend ( inner ) return returnList
inverse function of numList2blocks .
53,545
def encrypt ( message , modN , e , blockSize ) : numList = string2numList ( message ) numBlocks = numList2blocks ( numList , blockSize ) message = numBlocks [ 0 ] return modExp ( message , e , modN )
given a string message public keys and blockSize encrypt using RSA algorithms .
53,546
def decrypt ( secret , modN , d , blockSize ) : numBlocks = [ modExp ( blocks , d , modN ) for blocks in secret ] numList = blocks2numList ( numBlocks , blockSize ) return numList2string ( numList )
reverse function of encrypt
53,547
def match_files ( files , pattern : Pattern ) : for name in files : if re . match ( pattern , name ) : yield name
Yields file name if matches a regular expression pattern .
53,548
def match_zipfile_members ( zipfile_path : str , pattern : Pattern ) : with ZipFile ( zipfile_path , mode = 'r' ) as zfile : members = zfile . namelist ( ) yield from match_files ( members , pattern )
Match files to a pattern within a zip file s content .
53,549
def directory_files ( path ) : for entry in os . scandir ( path ) : if not entry . name . startswith ( '.' ) and entry . is_file ( ) : yield entry . name
Yield directory file names .
53,550
def get_file_listing_sha ( listing_paths : Iterable ) -> str : return sha256 ( '' . join ( sorted ( listing_paths ) ) . encode ( 'utf-8' ) ) . hexdigest ( )
Return sha256 string for group of FTP listings .
53,551
def set_button_map ( self , button_map ) : assert self . finger_count > 0 , 'This device does not support tapping' return self . _libinput . libinput_device_config_tap_set_button_map ( self . _handle , button_map )
Set the finger number to button number mapping for tap - to - click .
53,552
def set_matrix ( self , matrix ) : matrix = ( c_float * 6 ) ( * matrix ) return self . _libinput . libinput_device_config_calibration_set_matrix ( self . _handle , matrix )
Apply the 3x3 transformation matrix to absolute device coordinates .
53,553
def matrix ( self ) : matrix = ( c_float * 6 ) ( ) rc = self . _libinput . libinput_device_config_calibration_get_matrix ( self . _handle , matrix ) return rc , tuple ( matrix )
The current calibration matrix for this device .
53,554
def default_matrix ( self ) : matrix = ( c_float * 6 ) ( ) rc = self . _libinput . libinput_device_config_calibration_get_default_matrix ( self . _handle , matrix ) return rc , tuple ( matrix )
The default calibration matrix for this device .
53,555
def sysname ( self ) : pchar = self . _libinput . libinput_device_get_sysname ( self . _handle ) return string_at ( pchar ) . decode ( )
The system name of the device .
53,556
def set_seat_logical_name ( self , seat ) : rc = self . _libinput . libinput_device_set_seat_logical_name ( self . _handle , seat . encode ( ) ) assert rc == 0 , 'Cannot assign device to {}' . format ( seat )
Change the logical seat associated with this device by removing the device and adding it to the new seat .
53,557
def capabilities ( self ) : caps = [ ] for cap in DeviceCapability : if self . _libinput . libinput_device_has_capability ( self . _handle , cap ) : caps . append ( cap ) return tuple ( caps )
A tuple of capabilities this device supports .
53,558
def size ( self ) : width = c_double ( 0 ) height = c_double ( 0 ) rc = self . _libinput . libinput_device_get_size ( self . _handle , byref ( width ) , byref ( height ) ) assert rc == 0 , 'This device does not provide size information' return width . value , height . value
The physical size of a device in mm where meaningful .
53,559
def has_button ( self , button ) : rc = self . _libinput . libinput_device_pointer_has_button ( self . _handle , button ) assert rc >= 0 , 'This device is not a pointer device' return bool ( rc )
Check if this device has a given button .
53,560
def num_mode_groups ( self ) : num = self . _libinput . libinput_device_tablet_pad_get_num_mode_groups ( self . _handle ) if num < 0 : raise AttributeError ( 'This device is not a tablet pad device' ) return num
Most devices only provide a single mode group however devices such as the Wacom Cintiq 22HD provide two mode groups .
53,561
def physical_name ( self ) : pchar = self . _libinput . libinput_seat_get_physical_name ( self . _handle ) return string_at ( pchar ) . decode ( )
The physical name of the seat .
53,562
def logical_name ( self ) : pchar = self . _libinput . libinput_seat_get_logical_name ( self . _handle ) return string_at ( pchar ) . decode ( )
The logical name of the seat .
53,563
def configure ( ) : log_levels = { 5 : logging . NOTSET , 4 : logging . DEBUG , 3 : logging . INFO , 2 : logging . WARNING , 1 : logging . ERROR , 0 : logging . CRITICAL } logging . captureWarnings ( True ) root_logger = logging . getLogger ( ) if settings . CFG [ "debug" ] : details_format = logging . Formatter ( '%(name)s (%(filename)s:%(lineno)s) [%(levelname)s] %(message)s' ) details_hdl = logging . StreamHandler ( ) details_hdl . setFormatter ( details_format ) root_logger . addHandler ( details_hdl ) else : brief_format = logging . Formatter ( '%(message)s' ) console_hdl = logging . StreamHandler ( ) console_hdl . setFormatter ( brief_format ) root_logger . addHandler ( console_hdl ) root_logger . setLevel ( log_levels [ int ( settings . CFG [ "verbosity" ] ) ] ) configure_plumbum_log ( ) configure_migrate_log ( ) configure_parse_log ( )
Load logging configuration from our own defaults .
53,564
def find_shape ( self , canvas_x , canvas_y ) : shape_x , shape_y , w = self . canvas_to_shapes_transform . dot ( [ canvas_x , canvas_y , 1 ] ) if hasattr ( self . space , 'point_query_first' ) : shape = self . space . point_query_first ( ( shape_x , shape_y ) ) else : info = self . space . point_query_nearest ( ( shape_x , shape_y ) , 0 , [ pymunk . ShapeFilter . ALL_CATEGORIES ] ) shape = info . shape if info else None if shape : return self . bodies [ shape . body ] return None
Look up shape based on canvas coordinates .
53,565
def get_bounding_box ( df_points ) : xy_min = df_points [ [ 'x' , 'y' ] ] . min ( ) xy_max = df_points [ [ 'x' , 'y' ] ] . max ( ) wh = xy_max - xy_min wh . index = 'width' , 'height' bbox = pd . concat ( [ xy_min , wh ] ) bbox . name = 'bounding_box' return bbox
Calculate the bounding box of all points in a data frame .
53,566
def json_decoder_hook ( dct , str_decoders = STRING_DECODERS , converters = MappingProxyType ( dict ( ) ) ) -> dict : for k , v in dct . items ( ) : if k in converters : parse_func = converters [ k ] dct [ k ] = parse_func ( v ) elif isinstance ( v , str ) : for decode_func in str_decoders : v = decode_func ( v ) if not isinstance ( v , str ) : break dct [ k ] = v elif isinstance ( v , collections . Mapping ) : dct [ k ] = json_decoder_hook ( v , str_decoders , converters ) return dct
Decoder for parsing typical objects like uuid s and dates .
53,567
def make_json_decoder_hook ( str_decoders = STRING_DECODERS , extra_str_decoders = tuple ( ) , converters = MappingProxyType ( dict ( ) ) ) -> Callable : str_decoders = tuple ( chain ( str_decoders , extra_str_decoders ) ) object_hook = partial ( json_decoder_hook , str_decoders = str_decoders , converters = converters ) return object_hook
Customize JSON string decoder hooks .
53,568
def wait_for_completion ( report , interval = 10 ) : for jobid in report . collect ( 'jobid' ) : try : if not Job . finished ( jobid ) : logging . info ( 'waiting for SLURM job %s' , jobid ) time . sleep ( interval ) while not Job . finished ( jobid ) : time . sleep ( interval ) yield Job . fromid ( jobid ) . _asdict ( ) except OSError as e : if e . errno == errno . ENOENT : yield dict ( id = str ( jobid ) ) else : raise e
Wait for asynchronous jobs stil running in the given campaign .
53,569
def main ( argv = None ) : arguments = cli_common ( __doc__ , argv = argv ) report = ReportNode ( arguments [ 'CAMPAIGN-DIR' ] ) jobs = wait_for_completion ( report , float ( arguments [ '--interval' ] ) ) status = ReportStatus ( report , jobs ) if not arguments [ '--silent' ] : fmt = arguments [ '--format' ] or 'log' status . log ( fmt ) if argv is None : sys . exit ( 0 if status . succeeded else 1 ) return status . status
ben - wait entry point
53,570
def getColors_Triad ( hue = None , sat = 1 , val = 1 , spread = 60 ) : palette = list ( ) if hue == None : leadHue = randFloat ( 0 , 1 ) else : leadHue = hue palette . append ( Color ( 0 , 0 , 0 , 1 ) . set_HSV ( leadHue , sat , val ) ) palette . append ( Color ( 0 , 0 , 0 , 1 ) . set_HSV ( ( leadHue + 0.5 + spread / 360 ) % 1 , sat , val ) ) palette . append ( Color ( 0 , 0 , 0 , 1 ) . set_HSV ( ( leadHue + 0.5 - spread / 360 ) % 1 , sat , val ) ) return palette
Create a palette with one main color and two opposite color evenly spread apart from the main one .
53,571
def default_job_name ( self ) : name = '' if not self . root . existing_campaign : campaign_file = osp . basename ( self . root . campaign_file ) campaign = osp . splitext ( campaign_file ) [ 0 ] name += campaign + '/' name += self . tag return name
Slurm job name if not already specified in the sbatch section
53,572
def main ( argv = None ) : arguments = cli_common ( __doc__ , argv = argv ) plugin = 'benchmark' if arguments [ 'benchmark' ] else None if arguments [ '-g' ] : template . generate_config ( plugin , arguments [ '<FILE>' ] ) else : with open ( arguments [ '<FILE>' ] ) as istr : context = json . load ( istr ) kwargs = dict ( no_input = True , extra_context = context ) if arguments [ '--output-dir' ] : kwargs . update ( output_dir = arguments [ '--output-dir' ] ) if arguments [ '--interactive' ] : kwargs . update ( no_input = False ) logging . info ( 'generating template in directory ' + kwargs . get ( 'output_dir' , os . getcwd ( ) ) ) template . generate_template ( plugin , ** kwargs )
ben - tpl entry point
53,573
def partition_ordered ( sequence , key = None ) : yield from ( ( k , list ( g ) ) for k , g in groupby ( sequence , key = key ) )
Partition ordered sequence by key .
53,574
def partition ( predicate , iterable ) : t1 , t2 = tee ( iterable ) return filterfalse ( predicate , t1 ) , filter ( predicate , t2 )
Use a predicate to partition true and false entries .
53,575
def PortageFactory ( name , NAME , DOMAIN , BaseClass = autoportage . AutoPortage ) : def run_not_supported ( self , * args , ** kwargs ) : del args , kwargs LOG . warning ( "Runtime testing not supported on auto-generated projects." ) return newclass = type ( name , ( BaseClass , ) , { "NAME" : NAME , "DOMAIN" : DOMAIN , "SRC_FILE" : "none" , "VERSION" : BaseClass . VERSION , "GROUP" : "auto-gentoo" , "run" : run_not_supported , "__module__" : "__main__" } ) return newclass
Create a new dynamic portage project .
53,576
def apply_unitschema ( data , uschema , as_quantity = True , raise_outerr = False , convert_base = False , use_wildcards = False , list_of_dicts = False ) : try : from pint import UnitRegistry ureg = UnitRegistry ( ) from pint . quantity import _Quantity except ImportError : raise ImportError ( 'please install pint to use this module' ) list_of_dicts = '__list__' if list_of_dicts else None uschema_flat = flatten ( uschema , key_as_tuple = True ) uschema_keys = sorted ( uschema_flat , key = len , reverse = True ) data_flat = flatten ( data , key_as_tuple = True , list_of_dicts = list_of_dicts ) for dkey , dvalue in data_flat . items ( ) : converted = False for ukey in uschema_keys : if not len ( ukey ) == len ( dkey [ - len ( ukey ) : ] ) : continue if use_wildcards : match = all ( [ fnmatch ( d , u ) for u , d in zip ( ukey , dkey [ - len ( ukey ) : ] ) ] ) else : match = ukey == dkey [ - len ( ukey ) : ] if match : if isinstance ( dvalue , ( list , tuple ) ) : dvalue = np . array ( dvalue ) if dvalue . dtype == np . object : dvalue = dvalue . astype ( float ) if isinstance ( dvalue , _Quantity ) : quantity = dvalue . to ( uschema_flat [ ukey ] ) else : quantity = ureg . Quantity ( dvalue , uschema_flat [ ukey ] ) if convert_base : quantity = quantity . to_base_units ( ) if as_quantity : data_flat [ dkey ] = quantity else : data_flat [ dkey ] = quantity . magnitude break if not converted and raise_outerr : raise KeyError ( 'could not find units for {}' . format ( dkey ) ) return unflatten ( data_flat , list_of_dicts = list_of_dicts )
apply the unit schema to the data
53,577
def get_version_from_cache_dir ( src_file ) : if src_file is None : return None tmp_dir = local . path ( str ( CFG [ "tmp_dir" ] ) ) if tmp_dir . exists ( ) : cache_file = tmp_dir / src_file dir_hash = get_hash_of_dirs ( cache_file ) if dir_hash is None : return None if len ( str ( dir_hash ) ) <= 7 : return str ( dir_hash ) return str ( dir_hash ) [ : 7 ] return None
Creates a version for a project out of the hash .
53,578
def path_to_list ( pathstr ) : return [ elem for elem in pathstr . split ( os . path . pathsep ) if elem ]
Conver a path string to a list of path elements .
53,579
def determine_path ( ) : root = __file__ if os . path . islink ( root ) : root = os . path . realpath ( root ) return os . path . dirname ( os . path . abspath ( root ) )
Borrowed from wxglade . py
53,580
def template_str ( template ) : tmpl_file = os . path . join ( determine_path ( ) , template ) with open ( tmpl_file , mode = 'r' ) as tmpl_strm : return "" . join ( tmpl_strm . readlines ( ) )
Read a template file from the resources and return it as str .
53,581
def mkdir_interactive ( dirpath ) : from benchbuild . utils . cmd import mkdir if os . path . exists ( dirpath ) : return response = ui . ask ( "The directory {dirname} does not exist yet. " "Should I create it?" . format ( dirname = dirpath ) , default_answer = True , default_answer_str = "yes" ) if response : mkdir ( "-p" , dirpath ) print ( "Created directory {0}." . format ( dirpath ) )
Create a directory if required .
53,582
def get_val_by_text ( root , search ) : found_flag = False for el in root . iter ( ) : if found_flag : return ( el ) if el . text == search : found_flag = True
From MeasYaps XML root find next sibling of node matching search .
53,583
def get_yaps_by_name ( root , name , afun = lambda x : x , default = None ) : node = root . find ( "ParamMap[@name='YAPS']/ParamLong[@name='%s']/value" % name ) if node is not None : return ( afun ( node . text ) ) else : return ( default )
From XML root return value of node matching attribute name .
53,584
def main ( args ) : options = { '-rfft' : [ 'resetFFTscale' , False ] , '-r1' : [ 'readOneCoil' , False ] , '-rp' : [ 'readPhaseCorInfo' , False ] , '-rn' : [ 'readNavigator' , False ] , '-skipts' : [ 'readTimeStamp' , True ] , '-nnavek' : [ 'nNavEK' , False ] , '-ros' : [ 'removeOS' , False ] , '-rosa' : [ 'removeOSafter' , False ] , '-I' : [ 'transformToImageSpace' , False ] , '-w' : [ 'writeToFile' , False ] , '-npz' : [ 'npz' , False ] } decode_simple_opts ( options , args , readMeasDataVB15 )
Function run when called from command line .
53,585
def _get_keys_folder ( jdir , key_path = None , in_memory = True , ignore_prefix = ( '.' , '_' ) ) : if not hasattr ( jdir , 'iterdir' ) : raise ValueError ( 'jdir is not a path object; {}' . format ( jdir ) ) key_path = [ ] if key_path is None else key_path keys = [ ] key_found = False if key_path else True search_key = key_path [ 0 ] if len ( key_path ) > 0 else None for jsub in jdir . iterdir ( ) : if jsub . is_file ( ) and jsub . name [ - 5 : ] == '.json' : name , ext = os . path . splitext ( jsub . name ) if name == search_key or not key_path : key_found = True if key_path : return jkeys ( jsub , key_path [ 1 : ] , in_memory , ignore_prefix ) else : keys . append ( name ) elif ( jsub . is_dir ( ) and not jsub . name . startswith ( ignore_prefix ) and ( jsub . name == search_key or not key_path ) ) : key_found = True if jsub . name in keys : raise IOError ( 'directory has a sub-dir and file with same name: ' '{1} and {1}.json in {0}' . format ( jdir , jsub . name ) ) if key_path : return jkeys ( jsub , key_path [ 1 : ] , in_memory , ignore_prefix ) else : keys . append ( jsub . name ) if not key_found : raise KeyError ( 'key not found: {0}' . format ( search_key ) ) return sorted ( keys )
get json keys from directory structure
53,586
def jkeys ( jfile , key_path = None , in_memory = True , ignore_prefix = ( '.' , '_' ) ) : key_path = [ ] if key_path is None else key_path def eval_file ( file_obj ) : if not in_memory : return _get_keys_ijson ( file_obj , key_path ) else : return _get_keys ( file_obj , key_path ) if isinstance ( jfile , basestring ) : if not os . path . exists ( jfile ) : raise IOError ( 'jfile does not exist: {}' . format ( jfile ) ) if os . path . isdir ( jfile ) : jpath = pathlib . Path ( jfile ) return _get_keys_folder ( jpath , key_path , in_memory , ignore_prefix ) else : with open ( jfile , 'r' ) as file_obj : return eval_file ( file_obj ) elif hasattr ( jfile , 'read' ) : return eval_file ( jfile ) elif hasattr ( jfile , 'iterdir' ) : if jfile . is_file ( ) : with jfile . open ( 'r' ) as file_obj : return eval_file ( file_obj ) else : return _get_keys_folder ( jfile , key_path , in_memory , ignore_prefix ) else : raise ValueError ( 'jfile should be a str, ' 'file_like or path_like object: {}' . format ( jfile ) )
get keys for initial json level or at level after following key_path
53,587
def _file_with_keys ( file_obj , key_path = None , parse_decimal = False ) : key_path = [ ] if key_path is None else key_path try : objs = ijson . items ( file_obj , '.' . join ( key_path ) ) except NameError : warnings . warn ( 'ijson package not found in environment, \ please install for on-disk key indexing' , ImportWarning ) data = json . load ( file_obj , parse_float = Decimal if parse_decimal else float , object_hook = decode ) return indexes ( data , key_path ) try : data = next ( objs ) except StopIteration : raise KeyError ( 'key path not available in json: {}' . format ( key_path ) ) if not parse_decimal : convert_type ( data , Decimal , float , in_place = True ) datastr = json . dumps ( data ) data = json . loads ( datastr , object_hook = decode ) return data
read json with keys
53,588
def _folder_to_json ( jdir , key_path = None , in_memory = True , ignore_prefix = ( '.' , '_' ) , dic = { } , parse_decimal = False ) : key_path = [ ] if key_path is None else key_path if not hasattr ( jdir , 'iterdir' ) : raise ValueError ( 'jdir is not a path object; {}' . format ( jdir ) ) key_found = False if key_path else True search_key = key_path [ 0 ] if len ( key_path ) > 0 else None for jsub in jdir . iterdir ( ) : if jsub . is_file ( ) and jsub . name . endswith ( '.json' ) : name , ext = os . path . splitext ( jsub . name ) if name == search_key or not key_path : key_found = True if key_path : data = to_dict ( jsub , key_path [ 1 : ] , in_memory , ignore_prefix , parse_decimal ) if isinstance ( data , dict ) : dic . update ( data ) else : dic . update ( { _Terminus ( ) : data } ) else : dic [ name ] = to_dict ( jsub , key_path [ 1 : ] , in_memory , ignore_prefix , parse_decimal ) elif ( jsub . is_dir ( ) and not jsub . name . startswith ( ignore_prefix ) and ( jsub . name == search_key or not key_path ) ) : key_found = True if jsub . name in dic . keys ( ) : raise IOError ( 'directory has a sub-dir and file with same name: ' '{1} and {1}.json in {0}' . format ( jdir , jsub . name ) ) if key_path : sub_d = dic else : dic [ jsub . name ] = { } sub_d = dic [ jsub . name ] _folder_to_json ( jsub , key_path [ 1 : ] , in_memory , ignore_prefix , sub_d , parse_decimal ) if not key_found : raise KeyError ( 'key not found: {0}' . format ( search_key ) )
read in folder structure as json
53,589
def to_dict ( jfile , key_path = None , in_memory = True , ignore_prefix = ( '.' , '_' ) , parse_decimal = False ) : key_path = [ ] if key_path is None else key_path if isinstance ( jfile , basestring ) : if not os . path . exists ( jfile ) : raise IOError ( 'jfile does not exist: {}' . format ( jfile ) ) if os . path . isdir ( jfile ) : data = { } jpath = pathlib . Path ( jfile ) _folder_to_json ( jpath , key_path [ : ] , in_memory , ignore_prefix , data , parse_decimal ) if isinstance ( list ( data . keys ( ) ) [ 0 ] , _Terminus ) : data = data . values ( ) [ 0 ] else : with open ( jfile , 'r' ) as file_obj : if key_path and not in_memory : data = _file_with_keys ( file_obj , key_path , parse_decimal ) elif key_path : data = json . load ( file_obj , object_hook = decode , parse_float = Decimal if parse_decimal else float ) data = indexes ( data , key_path ) else : data = json . load ( file_obj , object_hook = decode , parse_float = Decimal if parse_decimal else float ) elif hasattr ( jfile , 'read' ) : if key_path and not in_memory : data = _file_with_keys ( jfile , key_path , parse_decimal ) elif key_path : data = json . load ( jfile , object_hook = decode , parse_float = Decimal if parse_decimal else float ) data = indexes ( data , key_path ) else : data = json . load ( jfile , object_hook = decode , parse_float = Decimal if parse_decimal else float ) elif hasattr ( jfile , 'iterdir' ) : if jfile . is_file ( ) : with jfile . open ( ) as file_obj : if key_path and not in_memory : data = _file_with_keys ( file_obj , key_path , parse_decimal ) elif key_path : data = json . load ( file_obj , object_hook = decode , parse_float = Decimal if parse_decimal else float ) data = indexes ( data , key_path ) else : data = json . load ( file_obj , object_hook = decode , parse_float = Decimal if parse_decimal else float ) else : data = { } _folder_to_json ( jfile , key_path [ : ] , in_memory , ignore_prefix , data , parse_decimal ) if isinstance ( list ( data . keys ( ) ) [ 0 ] , _Terminus ) : data = data . values ( ) [ 0 ] else : raise ValueError ( 'jfile should be a str, ' 'file_like or path_like object: {}' . format ( jfile ) ) return data
input json to dict
53,590
def chunks ( items , chunksize ) : items = iter ( items ) for first in items : chunk = chain ( ( first , ) , islice ( items , chunksize - 1 ) ) yield chunk deque ( chunk , 0 )
Turn generator sequence into sequence of chunks .
53,591
def unionfs ( rw = 'rw' , ro = None , union = 'union' ) : from functools import wraps def wrap_in_union_fs ( func ) : @ wraps ( func ) def wrap_in_union_fs_func ( project , * args , ** kwargs ) : container = project . container if container is None or in_container ( ) : return func ( project , * args , ** kwargs ) build_dir = local . path ( project . builddir ) LOG . debug ( "UnionFS - Project builddir: %s" , project . builddir ) if __unionfs_is_active ( root = build_dir ) : LOG . debug ( "UnionFS already active in %s, nesting not supported." , build_dir ) return func ( project , * args , ** kwargs ) ro_dir = local . path ( container . local ) rw_dir = build_dir / rw un_dir = build_dir / union LOG . debug ( "UnionFS - RW: %s" , rw_dir ) unionfs_cmd = __unionfs_set_up ( ro_dir , rw_dir , un_dir ) project_builddir_bak = project . builddir project . builddir = un_dir proc = unionfs_cmd . popen ( ) while ( not __unionfs_is_active ( root = un_dir ) ) and ( proc . poll ( ) is None ) : pass ret = None if proc . poll ( ) is None : try : with local . cwd ( un_dir ) : ret = func ( project , * args , ** kwargs ) finally : project . builddir = project_builddir_bak from signal import SIGINT is_running = proc . poll ( ) is None while __unionfs_is_active ( root = un_dir ) and is_running : try : proc . send_signal ( SIGINT ) proc . wait ( timeout = 3 ) except subprocess . TimeoutExpired : proc . kill ( ) is_running = False LOG . debug ( "Unionfs shut down." ) if __unionfs_is_active ( root = un_dir ) : raise UnmountError ( ) return ret return wrap_in_union_fs_func return wrap_in_union_fs
Decorator for the UnionFS feature .
53,592
def __update_cleanup_paths ( new_path ) : cleanup_dirs = settings . CFG [ "cleanup_paths" ] . value cleanup_dirs = set ( cleanup_dirs ) cleanup_dirs . add ( new_path ) cleanup_dirs = list ( cleanup_dirs ) settings . CFG [ "cleanup_paths" ] = cleanup_dirs
Add the new path to the list of paths to clean up afterwards .
53,593
def __is_outside_of_builddir ( project , path_to_check ) : bdir = project . builddir cprefix = os . path . commonprefix ( [ path_to_check , bdir ] ) return cprefix != bdir
Check if a project lies outside of its expected directory .
53,594
def __unionfs_set_up ( ro_dir , rw_dir , mount_dir ) : mount_dir . mkdir ( ) rw_dir . mkdir ( ) if not ro_dir . exists ( ) : LOG . error ( "Base dir does not exist: '%s'" , ro_dir ) raise ValueError ( "Base directory does not exist" ) from benchbuild . utils . cmd import unionfs as unionfs_cmd LOG . debug ( "Mounting UnionFS on %s with RO:%s RW:%s" , mount_dir , ro_dir , rw_dir ) return unionfs_cmd [ "-f" , "-o" , "auto_unmount,allow_other,cow" , rw_dir + "=RW:" + ro_dir + "=RO" , mount_dir ]
Setup a unionfs via unionfs - fuse .
53,595
def get_precision_regex ( ) : expr = re . escape ( PRECISION_FORMULA ) expr += r'=\s*(\S*)\s.*\s([A-Z]*)' return re . compile ( expr )
Build regular expression used to extract precision metric from command output
53,596
def _build_data ( self ) : def baseN ( nodes , mpn ) : return int ( math . sqrt ( mpn * 0.80 * nodes * 1024 * 1024 / 8 ) ) def nFromNb ( baseN , nb ) : factor = int ( baseN / nb ) if factor % 2 != 0 : factor -= 1 return nb * factor def get_grid ( nodes , ppn ) : cores = nodes * ppn sqrt = math . sqrt ( cores ) factors = [ num for num in range ( 2 , int ( math . floor ( sqrt ) + 1 ) ) if cores % num == 0 ] if len ( factors ) == 0 : factors = [ 1 ] diff = 0 keep = 0 for factor in factors : if diff == 0 : diff = cores - factor if keep == 0 : keep = factor tmp_diff = cores - factor if tmp_diff < diff : diff = tmp_diff keep = factor return [ keep , int ( cores / keep ) ] properties = dict ( realN = nFromNb ( baseN ( self . nodes , self . memory_per_node ) , self . block_size ) , nb = self . block_size , pQ = get_grid ( self . nodes , self . cores_per_node ) , ) return self . _data_from_jinja ( ** properties )
Build HPL data from basic parameters
53,597
def mpirun ( self ) : cmd = self . attributes [ 'mpirun' ] if cmd and cmd [ 0 ] != 'mpirun' : cmd = [ 'mpirun' ] return [ str ( e ) for e in cmd ]
Additional options passed as a list to the mpirun command
53,598
def expandvars ( s , vars = None ) : tpl = TemplateWithDefaults ( s ) return tpl . substitute ( vars or os . environ )
Perform variable substitution on the given string
53,599
def xor ( cls , obj , ** kwargs ) : return cls . __eval_seqexp ( obj , operator . xor , ** kwargs )
Query an object .