idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
53,500 | def get_abbreviations ( self ) : abbreviations = [ ] try : type_abbreviation = self . session . get_resource ( BASE_URI_TYPES % "abbreviation" , self . session . get_class ( surf . ns . ECRM [ 'E55_Type' ] ) ) abbreviations = [ unicode ( label ) for name in self . ecrm_P1_is_identified_by for abbreviation in name . ecr... | Get abbreviations of the names of the author . |
53,501 | def get_urn ( self ) : try : type_ctsurn = self . session . get_resource ( BASE_URI_TYPES % "CTS_URN" , self . session . get_class ( surf . ns . ECRM [ 'E55_Type' ] ) ) urn = [ CTS_URN ( urnstring . rdfs_label . one ) for urnstring in self . ecrm_P1_is_identified_by if urnstring . uri == surf . ns . ECRM [ 'E42_Identif... | Assumes that each HucitAuthor has only one CTS URN . |
53,502 | def to_json ( self ) : names = self . get_names ( ) return json . dumps ( { "uri" : self . subject , "urn" : str ( self . get_urn ( ) ) , "names" : [ { "language" : lang , "label" : label } for lang , label in names ] , "name_abbreviations" : self . get_abbreviations ( ) , "works" : [ json . loads ( work . to_json ( ) ... | Serialises a HucitAuthor to a JSON formatted string . |
53,503 | def add_text_structure ( self , label ) : ts = self . session . get_resource ( "%s/text_structure" % self . subject , self . session . get_class ( surf . ns . HUCIT [ 'TextStructure' ] ) ) ts . rdfs_label . append ( Literal ( label ) ) ts . save ( ) self . hucit_has_structure = ts self . update ( ) return self . hucit_... | Adds a citable text structure to the work . |
53,504 | def remove_text_structure ( self , text_structure ) : idx = self . hucit_has_structure . index ( text_structure ) ts = self . hucit_has_structure . pop ( idx ) ts . remove ( ) self . update ( ) return | Remove any citable text structure to the work . |
53,505 | def _get_opus_maximum ( self ) : label = opmax = self . session . get_resource ( BASE_URI_TYPES % "opmax" , self . session . get_class ( surf . ns . ECRM [ 'E55_Type' ] ) ) if opmax . is_present ( ) : return opmax else : opmax . rdfs_label . append ( Literal ( label , "en" ) ) logger . debug ( "Created a new opus maxim... | Instantiate an opus maximum type . |
53,506 | def set_as_opus_maximum ( self ) : if self . is_opus_maximum ( ) : return False else : opmax = self . _get_opus_maximum ( ) self . ecrm_P2_has_type = opmax self . update ( ) return True | Mark explicitly the work as the author s opus maximum . |
53,507 | def is_opus_maximum ( self ) : opmax = self . _get_opus_maximum ( ) types = self . ecrm_P2_has_type if opmax in types : return True else : if len ( self . author . get_works ( ) ) == 1 : return True else : return False | Check whether the work is the author s opus maximum . |
53,508 | def author ( self ) : CreationEvent = self . session . get_class ( surf . ns . EFRBROO [ 'F27_Work_Conception' ] ) Person = self . session . get_class ( surf . ns . EFRBROO [ 'F10_Person' ] ) creation_event = CreationEvent . get_by ( efrbroo_R16_initiated = self ) . first ( ) return Person . get_by ( efrbroo_P14i_perfo... | Returns the author to whom the work is attributed . |
53,509 | def to_json ( self ) : titles = self . get_titles ( ) return json . dumps ( { "uri" : self . subject , "urn" : str ( self . get_urn ( ) ) , "titles" : [ { "language" : lang , "label" : label } for lang , label in titles ] , "title_abbreviations" : self . get_abbreviations ( ) } , indent = 2 ) | Serialises a HucitWork to a JSON formatted string . |
53,510 | def execution_cls ( self ) : name = self . campaign . process . type for clazz in [ ExecutionDriver , SrunExecutionDriver ] : if name == clazz . name : return clazz raise NameError ( "Unknown execution layer: '%s'" % name ) | Get execution layer class |
53,511 | def children ( self ) : tags = { '*' } if self . tag : network_tags = { self . tag : self . campaign . network . tags [ self . tag ] } else : network_tags = self . campaign . network . tags for tag , configs in network_tags . items ( ) : for config in configs : for mode , kconfig in config . items ( ) : if mode == 'mat... | Retrieve tags associated to the current node |
53,512 | def activate ( self ) : raise NotImplementedError ( "{0} | '{1}' must be implemented by '{2}' subclasses!" . format ( self . __class__ . __name__ , self . activate . __name__ , self . __class__ . __name__ ) ) | Sets Component activation state . |
53,513 | def reservations ( self ) : command = [ SINFO , '--reservation' ] output = subprocess . check_output ( command , env = SINFO_ENV ) output = output . decode ( ) it = iter ( output . splitlines ( ) ) next ( it ) for line in it : rsv = Reservation . from_sinfo ( line ) yield rsv . name , rsv | get nodes of every reservations |
53,514 | def set ( self , values ) : if hasattr ( self , "inputs" ) : for item in self . inputs : if hasattr ( self , item ) : setattr ( self , item , values [ item ] ) | Set the object parameters using a dictionary |
53,515 | def execute_plan ( plan ) : results = [ action ( ) for action in plan ] return [ result for result in results if actns . step_has_failed ( result ) ] | Execute the plan . |
53,516 | def format_repr ( obj , attributes ) -> str : attribute_repr = ', ' . join ( ( '{}={}' . format ( attr , repr ( getattr ( obj , attr ) ) ) for attr in attributes ) ) return "{0}({1})" . format ( obj . __class__ . __qualname__ , attribute_repr ) | Format an object s repr method with specific attributes . |
53,517 | def build_parameter_descriptions ( obj , user_p = None , output = "csv" , show_none = True , ignore = None , plist = None ) : if user_p is None : user_p = { } if ignore is None : ignore = [ ] para = [ [ obj . __class__ . __name__ + " inputs:" , "" , "" ] ] if plist is None : if not hasattr ( obj , 'inputs' ) : raise ex... | Creates a list of the decription of all the inputs of an object |
53,518 | def all_descriptions ( ) : para = [ ] para += build_parameter_descriptions ( models . Soil ( ) ) + [ ",,\n" ] para += build_parameter_descriptions ( models . SoilProfile ( ) ) + [ ",,\n" ] para += build_parameter_descriptions ( models . Foundation ( ) ) + [ ",,\n" ] para += build_parameter_descriptions ( models . PadFo... | Generates a list of descriptions of all the models |
53,519 | def plot_shapes_heat_map ( df_shapes , shape_i_columns , values , axis = None , vmin = None , vmax = None , value_formatter = None , color_map = None ) : df_shapes = df_shapes . copy ( ) df_shapes . loc [ : , 'y' ] = df_shapes . y . max ( ) - df_shapes . y . copy ( ) . values aspect_ratio = ( ( df_shapes . x . max ( ) ... | Plot polygon shapes colored based on values mapped onto a colormap . |
53,520 | def plot_color_map_bars ( values , vmin = None , vmax = None , color_map = None , axis = None , ** kwargs ) : if axis is None : fig , axis = plt . subplots ( ) norm = mpl . colors . Normalize ( vmin = vmin or min ( values ) , vmax = vmax or max ( values ) , clip = True ) if color_map is None : color_map = mpl . rcParam... | Plot bar for each value in values colored based on values mapped onto the specified color map . |
53,521 | def parse_broken_json ( json_text : str ) -> dict : json_text = json_text . replace ( ":" , ": " ) json_dict = yaml . load ( json_text ) return json_dict | Parses broken JSON that the standard Python JSON module cannot parse . |
53,522 | def _executor_script ( self ) : fd , path = tempfile . mkstemp ( suffix = '.sh' , dir = os . getcwd ( ) ) os . close ( fd ) with open ( path , 'w' ) as ostr : self . _write_executor_script ( ostr ) mode = os . stat ( path ) . st_mode os . chmod ( path , mode | stat . S_IEXEC | stat . S_IRGRP | stat . S_IRUSR ) return p... | Create shell - script in charge of executing the benchmark and return its path . |
53,523 | def _write_executor_script ( self , ostr ) : environment = self . execution . get ( 'environment' ) or { } if not isinstance ( environment , Mapping ) : msg = 'Expected mapping for environment but got ' msg += str ( type ( environment ) ) raise Exception ( msg ) escaped_environment = dict ( ( var , six . moves . shlex_... | Write shell script in charge of executing the command |
53,524 | def command_str ( self ) : if isinstance ( self . command , six . string_types ) : return self . command return ' ' . join ( map ( six . moves . shlex_quote , self . command ) ) | get command to execute as string properly escaped |
53,525 | def popen ( self , stdout , stderr ) : self . logger . info ( 'Executing command: %s' , self . command_str ) return subprocess . Popen ( [ self . _executor_script ] , stdout = stdout , stderr = stderr ) | Build popen object to run |
53,526 | def srun ( self ) : commands = self . campaign . process . get ( 'commands' , { } ) srun = find_executable ( commands . get ( 'srun' , 'srun' ) ) if six . PY2 : srun = srun . encode ( 'utf-8' ) return srun | Get path to srun executable |
53,527 | def common_srun_options ( cls , campaign ) : default = dict ( campaign . process . get ( 'srun' ) or { } ) default . update ( output = 'slurm-%N-%t.stdout' , error = 'slurm-%N-%t.error' ) return default | Get options to be given to all srun commands |
53,528 | def command ( self ) : srun_optlist = build_slurm_arguments ( self . parent . command . srun or { } ) if not isinstance ( self . root . network . nodes ( self . tag ) , ConstraintTag ) : pargs = parse_constraint_in_args ( srun_optlist ) self . command_expansion_vars [ 'process_count' ] = pargs . ntasks if not pargs . c... | get command to execute |
53,529 | def srun_nodes ( self ) : count = self . execution . get ( 'srun_nodes' , 0 ) if isinstance ( count , six . string_types ) : tag = count count = 0 elif isinstance ( count , SEQUENCES ) : return count else : assert isinstance ( count , int ) tag = self . tag nodes = self . _srun_nodes ( tag , count ) if 'srun_nodes' in ... | Get list of nodes where to execute the command |
53,530 | def clear_graph ( identifier = None ) : graph = get_graph ( ) if identifier : graph . destroy ( identifier ) try : graph . close ( ) except : warn ( "Unable to close the Graph" ) | Clean up a graph by removing it |
53,531 | def set_label ( self , label , lang ) : try : self . metadata . add ( SKOS . prefLabel , Literal ( label , lang = lang ) ) self . graph . addN ( [ ( self . asNode ( ) , RDFS . label , Literal ( label , lang = lang ) , self . graph ) , ] ) except Exception as E : pass | Add the label of the collection in given lang |
53,532 | def members ( self ) : return list ( [ self . children_class ( child ) for child in self . graph . subjects ( RDF_NAMESPACES . DTS . parent , self . asNode ( ) ) ] ) | Children of the collection s item |
53,533 | def parent ( self ) : parent = list ( self . graph . objects ( self . asNode ( ) , RDF_NAMESPACES . DTS . parent ) ) if parent : return self . parent_class ( parent [ 0 ] ) return None | Parent of current object |
53,534 | def actions_for_project ( self , project ) : project . cflags = [ "-O3" , "-fno-omit-frame-pointer" ] project . runtime_extension = time . RunWithTime ( run . RuntimeExtension ( project , self ) ) return self . default_runtime_actions ( project ) | Compile & Run the experiment with - O3 enabled . |
53,535 | def to_step_result ( func ) : @ ft . wraps ( func ) def wrapper ( * args , ** kwargs ) : res = func ( * args , ** kwargs ) if not res : res = [ StepResult . OK ] if not hasattr ( res , "__iter__" ) : res = [ res ] return res return wrapper | Convert a function return to a list of StepResults . |
53,536 | def prepend_status ( func ) : @ ft . wraps ( func ) def wrapper ( self , * args , ** kwargs ) : res = func ( self , * args , ** kwargs ) if self . status is not StepResult . UNSET : res = "[{status}]" . format ( status = self . status . name ) + res return res return wrapper | Prepends the output of func with the status . |
53,537 | def notify_step_begin_end ( func ) : @ ft . wraps ( func ) def wrapper ( self , * args , ** kwargs ) : cls = self . __class__ on_step_begin = cls . ON_STEP_BEGIN on_step_end = cls . ON_STEP_END for begin_listener in on_step_begin : begin_listener ( self ) res = func ( self , * args , ** kwargs ) for end_listener in on_... | Print the beginning and the end of a func . |
53,538 | def log_before_after ( name : str , desc : str ) : def func_decorator ( f ) : @ ft . wraps ( f ) def wrapper ( * args , ** kwargs ) : LOG . info ( "\n%s - %s" , name , desc ) res = f ( * args , ** kwargs ) if StepResult . ERROR not in res : LOG . info ( "%s - OK\n" , name ) else : LOG . error ( "%s - ERROR\n" , name ) ... | Log customized stirng before & after running func . |
53,539 | def euclid ( a , b ) : a = abs ( a ) b = abs ( b ) if a < b : a , b = b , a while b != 0 : a , b = b , a % b return a | returns the Greatest Common Divisor of a and b |
53,540 | def coPrime ( l ) : for i , j in combinations ( l , 2 ) : if euclid ( i , j ) != 1 : return False return True | returns True if the values in the list L are all co - prime otherwise it returns False . |
53,541 | def modInv ( a , m ) : if coPrime ( [ a , m ] ) : linearCombination = extendedEuclid ( a , m ) return linearCombination [ 1 ] % m else : return 0 | returns the multiplicative inverse of a in modulo m as a positive value between zero and m - 1 |
53,542 | def int2baseTwo ( x ) : assert x >= 0 bitInverse = [ ] while x != 0 : bitInverse . append ( x & 1 ) x >>= 1 return bitInverse | x is a positive integer . Convert it to base two as a list of integers in reverse order as a list . |
53,543 | def newKey ( a , b , k ) : try : p = findAPrime ( a , b , k ) while True : q = findAPrime ( a , b , k ) if q != p : break except : raise ValueError n = p * q m = ( p - 1 ) * ( q - 1 ) while True : e = random . randint ( 1 , m ) if coPrime ( [ e , m ] ) : break d = modInv ( e , m ) return ( n , e , d ) | Try to find two large pseudo primes roughly between a and b . Generate public and private keys for RSA encryption . Raises ValueError if it fails to find one |
53,544 | def blocks2numList ( blocks , n ) : toProcess = copy . copy ( blocks ) returnList = [ ] for numBlock in toProcess : inner = [ ] for i in range ( 0 , n ) : inner . append ( numBlock % 256 ) numBlock >>= 8 inner . reverse ( ) returnList . extend ( inner ) return returnList | inverse function of numList2blocks . |
53,545 | def encrypt ( message , modN , e , blockSize ) : numList = string2numList ( message ) numBlocks = numList2blocks ( numList , blockSize ) message = numBlocks [ 0 ] return modExp ( message , e , modN ) | given a string message public keys and blockSize encrypt using RSA algorithms . |
53,546 | def decrypt ( secret , modN , d , blockSize ) : numBlocks = [ modExp ( blocks , d , modN ) for blocks in secret ] numList = blocks2numList ( numBlocks , blockSize ) return numList2string ( numList ) | reverse function of encrypt |
53,547 | def match_files ( files , pattern : Pattern ) : for name in files : if re . match ( pattern , name ) : yield name | Yields file name if matches a regular expression pattern . |
53,548 | def match_zipfile_members ( zipfile_path : str , pattern : Pattern ) : with ZipFile ( zipfile_path , mode = 'r' ) as zfile : members = zfile . namelist ( ) yield from match_files ( members , pattern ) | Match files to a pattern within a zip file s content . |
53,549 | def directory_files ( path ) : for entry in os . scandir ( path ) : if not entry . name . startswith ( '.' ) and entry . is_file ( ) : yield entry . name | Yield directory file names . |
53,550 | def get_file_listing_sha ( listing_paths : Iterable ) -> str : return sha256 ( '' . join ( sorted ( listing_paths ) ) . encode ( 'utf-8' ) ) . hexdigest ( ) | Return sha256 string for group of FTP listings . |
53,551 | def set_button_map ( self , button_map ) : assert self . finger_count > 0 , 'This device does not support tapping' return self . _libinput . libinput_device_config_tap_set_button_map ( self . _handle , button_map ) | Set the finger number to button number mapping for tap - to - click . |
53,552 | def set_matrix ( self , matrix ) : matrix = ( c_float * 6 ) ( * matrix ) return self . _libinput . libinput_device_config_calibration_set_matrix ( self . _handle , matrix ) | Apply the 3x3 transformation matrix to absolute device coordinates . |
53,553 | def matrix ( self ) : matrix = ( c_float * 6 ) ( ) rc = self . _libinput . libinput_device_config_calibration_get_matrix ( self . _handle , matrix ) return rc , tuple ( matrix ) | The current calibration matrix for this device . |
53,554 | def default_matrix ( self ) : matrix = ( c_float * 6 ) ( ) rc = self . _libinput . libinput_device_config_calibration_get_default_matrix ( self . _handle , matrix ) return rc , tuple ( matrix ) | The default calibration matrix for this device . |
53,555 | def sysname ( self ) : pchar = self . _libinput . libinput_device_get_sysname ( self . _handle ) return string_at ( pchar ) . decode ( ) | The system name of the device . |
53,556 | def set_seat_logical_name ( self , seat ) : rc = self . _libinput . libinput_device_set_seat_logical_name ( self . _handle , seat . encode ( ) ) assert rc == 0 , 'Cannot assign device to {}' . format ( seat ) | Change the logical seat associated with this device by removing the device and adding it to the new seat . |
53,557 | def capabilities ( self ) : caps = [ ] for cap in DeviceCapability : if self . _libinput . libinput_device_has_capability ( self . _handle , cap ) : caps . append ( cap ) return tuple ( caps ) | A tuple of capabilities this device supports . |
53,558 | def size ( self ) : width = c_double ( 0 ) height = c_double ( 0 ) rc = self . _libinput . libinput_device_get_size ( self . _handle , byref ( width ) , byref ( height ) ) assert rc == 0 , 'This device does not provide size information' return width . value , height . value | The physical size of a device in mm where meaningful . |
53,559 | def has_button ( self , button ) : rc = self . _libinput . libinput_device_pointer_has_button ( self . _handle , button ) assert rc >= 0 , 'This device is not a pointer device' return bool ( rc ) | Check if this device has a given button . |
53,560 | def num_mode_groups ( self ) : num = self . _libinput . libinput_device_tablet_pad_get_num_mode_groups ( self . _handle ) if num < 0 : raise AttributeError ( 'This device is not a tablet pad device' ) return num | Most devices only provide a single mode group however devices such as the Wacom Cintiq 22HD provide two mode groups . |
53,561 | def physical_name ( self ) : pchar = self . _libinput . libinput_seat_get_physical_name ( self . _handle ) return string_at ( pchar ) . decode ( ) | The physical name of the seat . |
53,562 | def logical_name ( self ) : pchar = self . _libinput . libinput_seat_get_logical_name ( self . _handle ) return string_at ( pchar ) . decode ( ) | The logical name of the seat . |
53,563 | def configure ( ) : log_levels = { 5 : logging . NOTSET , 4 : logging . DEBUG , 3 : logging . INFO , 2 : logging . WARNING , 1 : logging . ERROR , 0 : logging . CRITICAL } logging . captureWarnings ( True ) root_logger = logging . getLogger ( ) if settings . CFG [ "debug" ] : details_format = logging . Formatter ( '%(n... | Load logging configuration from our own defaults . |
53,564 | def find_shape ( self , canvas_x , canvas_y ) : shape_x , shape_y , w = self . canvas_to_shapes_transform . dot ( [ canvas_x , canvas_y , 1 ] ) if hasattr ( self . space , 'point_query_first' ) : shape = self . space . point_query_first ( ( shape_x , shape_y ) ) else : info = self . space . point_query_nearest ( ( shap... | Look up shape based on canvas coordinates . |
53,565 | def get_bounding_box ( df_points ) : xy_min = df_points [ [ 'x' , 'y' ] ] . min ( ) xy_max = df_points [ [ 'x' , 'y' ] ] . max ( ) wh = xy_max - xy_min wh . index = 'width' , 'height' bbox = pd . concat ( [ xy_min , wh ] ) bbox . name = 'bounding_box' return bbox | Calculate the bounding box of all points in a data frame . |
53,566 | def json_decoder_hook ( dct , str_decoders = STRING_DECODERS , converters = MappingProxyType ( dict ( ) ) ) -> dict : for k , v in dct . items ( ) : if k in converters : parse_func = converters [ k ] dct [ k ] = parse_func ( v ) elif isinstance ( v , str ) : for decode_func in str_decoders : v = decode_func ( v ) if no... | Decoder for parsing typical objects like uuid s and dates . |
53,567 | def make_json_decoder_hook ( str_decoders = STRING_DECODERS , extra_str_decoders = tuple ( ) , converters = MappingProxyType ( dict ( ) ) ) -> Callable : str_decoders = tuple ( chain ( str_decoders , extra_str_decoders ) ) object_hook = partial ( json_decoder_hook , str_decoders = str_decoders , converters = converters... | Customize JSON string decoder hooks . |
53,568 | def wait_for_completion ( report , interval = 10 ) : for jobid in report . collect ( 'jobid' ) : try : if not Job . finished ( jobid ) : logging . info ( 'waiting for SLURM job %s' , jobid ) time . sleep ( interval ) while not Job . finished ( jobid ) : time . sleep ( interval ) yield Job . fromid ( jobid ) . _asdict (... | Wait for asynchronous jobs stil running in the given campaign . |
53,569 | def main ( argv = None ) : arguments = cli_common ( __doc__ , argv = argv ) report = ReportNode ( arguments [ 'CAMPAIGN-DIR' ] ) jobs = wait_for_completion ( report , float ( arguments [ '--interval' ] ) ) status = ReportStatus ( report , jobs ) if not arguments [ '--silent' ] : fmt = arguments [ '--format' ] or 'log' ... | ben - wait entry point |
53,570 | def getColors_Triad ( hue = None , sat = 1 , val = 1 , spread = 60 ) : palette = list ( ) if hue == None : leadHue = randFloat ( 0 , 1 ) else : leadHue = hue palette . append ( Color ( 0 , 0 , 0 , 1 ) . set_HSV ( leadHue , sat , val ) ) palette . append ( Color ( 0 , 0 , 0 , 1 ) . set_HSV ( ( leadHue + 0.5 + spread / 3... | Create a palette with one main color and two opposite color evenly spread apart from the main one . |
53,571 | def default_job_name ( self ) : name = '' if not self . root . existing_campaign : campaign_file = osp . basename ( self . root . campaign_file ) campaign = osp . splitext ( campaign_file ) [ 0 ] name += campaign + '/' name += self . tag return name | Slurm job name if not already specified in the sbatch section |
53,572 | def main ( argv = None ) : arguments = cli_common ( __doc__ , argv = argv ) plugin = 'benchmark' if arguments [ 'benchmark' ] else None if arguments [ '-g' ] : template . generate_config ( plugin , arguments [ '<FILE>' ] ) else : with open ( arguments [ '<FILE>' ] ) as istr : context = json . load ( istr ) kwargs = dic... | ben - tpl entry point |
53,573 | def partition_ordered ( sequence , key = None ) : yield from ( ( k , list ( g ) ) for k , g in groupby ( sequence , key = key ) ) | Partition ordered sequence by key . |
53,574 | def partition ( predicate , iterable ) : t1 , t2 = tee ( iterable ) return filterfalse ( predicate , t1 ) , filter ( predicate , t2 ) | Use a predicate to partition true and false entries . |
53,575 | def PortageFactory ( name , NAME , DOMAIN , BaseClass = autoportage . AutoPortage ) : def run_not_supported ( self , * args , ** kwargs ) : del args , kwargs LOG . warning ( "Runtime testing not supported on auto-generated projects." ) return newclass = type ( name , ( BaseClass , ) , { "NAME" : NAME , "DOMAIN" : DOMAI... | Create a new dynamic portage project . |
53,576 | def apply_unitschema ( data , uschema , as_quantity = True , raise_outerr = False , convert_base = False , use_wildcards = False , list_of_dicts = False ) : try : from pint import UnitRegistry ureg = UnitRegistry ( ) from pint . quantity import _Quantity except ImportError : raise ImportError ( 'please install pint to ... | apply the unit schema to the data |
53,577 | def get_version_from_cache_dir ( src_file ) : if src_file is None : return None tmp_dir = local . path ( str ( CFG [ "tmp_dir" ] ) ) if tmp_dir . exists ( ) : cache_file = tmp_dir / src_file dir_hash = get_hash_of_dirs ( cache_file ) if dir_hash is None : return None if len ( str ( dir_hash ) ) <= 7 : return str ( dir_... | Creates a version for a project out of the hash . |
53,578 | def path_to_list ( pathstr ) : return [ elem for elem in pathstr . split ( os . path . pathsep ) if elem ] | Conver a path string to a list of path elements . |
53,579 | def determine_path ( ) : root = __file__ if os . path . islink ( root ) : root = os . path . realpath ( root ) return os . path . dirname ( os . path . abspath ( root ) ) | Borrowed from wxglade . py |
53,580 | def template_str ( template ) : tmpl_file = os . path . join ( determine_path ( ) , template ) with open ( tmpl_file , mode = 'r' ) as tmpl_strm : return "" . join ( tmpl_strm . readlines ( ) ) | Read a template file from the resources and return it as str . |
53,581 | def mkdir_interactive ( dirpath ) : from benchbuild . utils . cmd import mkdir if os . path . exists ( dirpath ) : return response = ui . ask ( "The directory {dirname} does not exist yet. " "Should I create it?" . format ( dirname = dirpath ) , default_answer = True , default_answer_str = "yes" ) if response : mkdir (... | Create a directory if required . |
53,582 | def get_val_by_text ( root , search ) : found_flag = False for el in root . iter ( ) : if found_flag : return ( el ) if el . text == search : found_flag = True | From MeasYaps XML root find next sibling of node matching search . |
53,583 | def get_yaps_by_name ( root , name , afun = lambda x : x , default = None ) : node = root . find ( "ParamMap[@name='YAPS']/ParamLong[@name='%s']/value" % name ) if node is not None : return ( afun ( node . text ) ) else : return ( default ) | From XML root return value of node matching attribute name . |
53,584 | def main ( args ) : options = { '-rfft' : [ 'resetFFTscale' , False ] , '-r1' : [ 'readOneCoil' , False ] , '-rp' : [ 'readPhaseCorInfo' , False ] , '-rn' : [ 'readNavigator' , False ] , '-skipts' : [ 'readTimeStamp' , True ] , '-nnavek' : [ 'nNavEK' , False ] , '-ros' : [ 'removeOS' , False ] , '-rosa' : [ 'removeOSaf... | Function run when called from command line . |
53,585 | def _get_keys_folder ( jdir , key_path = None , in_memory = True , ignore_prefix = ( '.' , '_' ) ) : if not hasattr ( jdir , 'iterdir' ) : raise ValueError ( 'jdir is not a path object; {}' . format ( jdir ) ) key_path = [ ] if key_path is None else key_path keys = [ ] key_found = False if key_path else True search_key... | get json keys from directory structure |
53,586 | def jkeys ( jfile , key_path = None , in_memory = True , ignore_prefix = ( '.' , '_' ) ) : key_path = [ ] if key_path is None else key_path def eval_file ( file_obj ) : if not in_memory : return _get_keys_ijson ( file_obj , key_path ) else : return _get_keys ( file_obj , key_path ) if isinstance ( jfile , basestring ) ... | get keys for initial json level or at level after following key_path |
53,587 | def _file_with_keys ( file_obj , key_path = None , parse_decimal = False ) : key_path = [ ] if key_path is None else key_path try : objs = ijson . items ( file_obj , '.' . join ( key_path ) ) except NameError : warnings . warn ( 'ijson package not found in environment, \ please install for on-disk key indexing' ... | read json with keys |
53,588 | def _folder_to_json ( jdir , key_path = None , in_memory = True , ignore_prefix = ( '.' , '_' ) , dic = { } , parse_decimal = False ) : key_path = [ ] if key_path is None else key_path if not hasattr ( jdir , 'iterdir' ) : raise ValueError ( 'jdir is not a path object; {}' . format ( jdir ) ) key_found = False if key_p... | read in folder structure as json |
53,589 | def to_dict ( jfile , key_path = None , in_memory = True , ignore_prefix = ( '.' , '_' ) , parse_decimal = False ) : key_path = [ ] if key_path is None else key_path if isinstance ( jfile , basestring ) : if not os . path . exists ( jfile ) : raise IOError ( 'jfile does not exist: {}' . format ( jfile ) ) if os . path ... | input json to dict |
53,590 | def chunks ( items , chunksize ) : items = iter ( items ) for first in items : chunk = chain ( ( first , ) , islice ( items , chunksize - 1 ) ) yield chunk deque ( chunk , 0 ) | Turn generator sequence into sequence of chunks . |
53,591 | def unionfs ( rw = 'rw' , ro = None , union = 'union' ) : from functools import wraps def wrap_in_union_fs ( func ) : @ wraps ( func ) def wrap_in_union_fs_func ( project , * args , ** kwargs ) : container = project . container if container is None or in_container ( ) : return func ( project , * args , ** kwargs ) buil... | Decorator for the UnionFS feature . |
53,592 | def __update_cleanup_paths ( new_path ) : cleanup_dirs = settings . CFG [ "cleanup_paths" ] . value cleanup_dirs = set ( cleanup_dirs ) cleanup_dirs . add ( new_path ) cleanup_dirs = list ( cleanup_dirs ) settings . CFG [ "cleanup_paths" ] = cleanup_dirs | Add the new path to the list of paths to clean up afterwards . |
53,593 | def __is_outside_of_builddir ( project , path_to_check ) : bdir = project . builddir cprefix = os . path . commonprefix ( [ path_to_check , bdir ] ) return cprefix != bdir | Check if a project lies outside of its expected directory . |
53,594 | def __unionfs_set_up ( ro_dir , rw_dir , mount_dir ) : mount_dir . mkdir ( ) rw_dir . mkdir ( ) if not ro_dir . exists ( ) : LOG . error ( "Base dir does not exist: '%s'" , ro_dir ) raise ValueError ( "Base directory does not exist" ) from benchbuild . utils . cmd import unionfs as unionfs_cmd LOG . debug ( "Mounting U... | Setup a unionfs via unionfs - fuse . |
53,595 | def get_precision_regex ( ) : expr = re . escape ( PRECISION_FORMULA ) expr += r'=\s*(\S*)\s.*\s([A-Z]*)' return re . compile ( expr ) | Build regular expression used to extract precision metric from command output |
53,596 | def _build_data ( self ) : def baseN ( nodes , mpn ) : return int ( math . sqrt ( mpn * 0.80 * nodes * 1024 * 1024 / 8 ) ) def nFromNb ( baseN , nb ) : factor = int ( baseN / nb ) if factor % 2 != 0 : factor -= 1 return nb * factor def get_grid ( nodes , ppn ) : cores = nodes * ppn sqrt = math . sqrt ( cores ) factors ... | Build HPL data from basic parameters |
53,597 | def mpirun ( self ) : cmd = self . attributes [ 'mpirun' ] if cmd and cmd [ 0 ] != 'mpirun' : cmd = [ 'mpirun' ] return [ str ( e ) for e in cmd ] | Additional options passed as a list to the mpirun command |
53,598 | def expandvars ( s , vars = None ) : tpl = TemplateWithDefaults ( s ) return tpl . substitute ( vars or os . environ ) | Perform variable substitution on the given string |
53,599 | def xor ( cls , obj , ** kwargs ) : return cls . __eval_seqexp ( obj , operator . xor , ** kwargs ) | Query an object . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.