idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
9,000
def tryload_cache_list_with_compute ( use_cache , dpath , fname , cfgstr_list , compute_fn , * args ) : if use_cache is False : data_list = [ None ] * len ( cfgstr_list ) ismiss_list = [ True ] * len ( cfgstr_list ) data_list = compute_fn ( ismiss_list , * args ) return data_list else : data_list , ismiss_list = tryload_cache_list ( dpath , fname , cfgstr_list , verbose = False ) num_total = len ( cfgstr_list ) if any ( ismiss_list ) : newdata_list = compute_fn ( ismiss_list , * args ) newcfgstr_list = util_list . compress ( cfgstr_list , ismiss_list ) index_list = util_list . list_where ( ismiss_list ) print ( '[cache] %d/%d cache hits for %s in %s' % ( num_total - len ( index_list ) , num_total , fname , util_path . tail ( dpath ) ) ) for newcfgstr , newdata in zip ( newcfgstr_list , newdata_list ) : save_cache ( dpath , fname , newcfgstr , newdata , verbose = False ) for index , newdata in zip ( index_list , newdata_list ) : data_list [ index ] = newdata else : print ( '[cache] %d/%d cache hits for %s in %s' % ( num_total , num_total , fname , util_path . tail ( dpath ) ) ) return data_list
tries to load data but computes it if it can t give a compute function
9,001
def to_json ( val , allow_pickle = False , pretty = False ) : r UtoolJSONEncoder = make_utool_json_encoder ( allow_pickle ) json_kw = { } json_kw [ 'cls' ] = UtoolJSONEncoder if pretty : json_kw [ 'indent' ] = 4 json_kw [ 'separators' ] = ( ',' , ': ' ) json_str = json . dumps ( val , ** json_kw ) return json_str
r Converts a python object to a JSON string using the utool convention
9,002
def from_json ( json_str , allow_pickle = False ) : if six . PY3 : if isinstance ( json_str , bytes ) : json_str = json_str . decode ( 'utf-8' ) UtoolJSONEncoder = make_utool_json_encoder ( allow_pickle ) object_hook = UtoolJSONEncoder . _json_object_hook val = json . loads ( json_str , object_hook = object_hook ) return val
Decodes a JSON object specified in the utool convention
9,003
def cachestr_repr ( val ) : try : memview = memoryview ( val ) return memview . tobytes ( ) except Exception : try : return to_json ( val ) except Exception : if repr ( val . __class__ ) == "<class 'ibeis.control.IBEISControl.IBEISController'>" : return val . get_dbname ( )
Representation of an object as a cache string .
9,004
def cached_func ( fname = None , cache_dir = 'default' , appname = 'utool' , key_argx = None , key_kwds = None , use_cache = None , verbose = None ) : r if verbose is None : verbose = VERBOSE_CACHE def cached_closure ( func ) : from utool import util_decor import utool as ut fname_ = util_inspect . get_funcname ( func ) if fname is None else fname kwdefaults = util_inspect . get_kwdefaults ( func ) argnames = util_inspect . get_argnames ( func ) if ut . is_method ( func ) : argnames = argnames [ 1 : ] cacher = Cacher ( fname_ , cache_dir = cache_dir , appname = appname , verbose = verbose ) if use_cache is None : use_cache_ = not util_arg . get_argflag ( '--nocache-' + fname_ ) else : use_cache_ = use_cache def cached_wraper ( * args , ** kwargs ) : try : if verbose > 2 : print ( '[util_cache] computing cached function fname_=%s' % ( fname_ , ) ) cfgstr = get_cfgstr_from_args ( func , args , kwargs , key_argx , key_kwds , kwdefaults , argnames ) if util_cplat . WIN32 : cfgstr = '_' + util_hash . hashstr27 ( cfgstr ) assert cfgstr is not None , 'cfgstr=%r cannot be None' % ( cfgstr , ) use_cache__ = kwargs . pop ( 'use_cache' , use_cache_ ) if use_cache__ : data = cacher . tryload ( cfgstr ) if data is not None : return data data = func ( * args , ** kwargs ) cacher . save ( data , cfgstr ) return data except Exception as ex : from utool import util_dbg _dbgdict2 = dict ( key_argx = key_argx , lenargs = len ( args ) , lenkw = len ( kwargs ) , ) msg = '\n' . join ( [ '+--- UTOOL --- ERROR IN CACHED FUNCTION' , 'dbgdict2 = ' + util_str . repr4 ( _dbgdict2 ) , ] ) util_dbg . printex ( ex , msg ) raise cached_wraper = util_decor . preserve_sig ( cached_wraper , func ) cached_wraper . cacher = cacher return cached_wraper return cached_closure
r Wraps a function with a Cacher object
9,005
def get_global_shelf_fpath ( appname = 'default' , ensure = False ) : global_cache_dir = get_global_cache_dir ( appname , ensure = ensure ) shelf_fpath = join ( global_cache_dir , meta_util_constants . global_cache_fname ) return shelf_fpath
Returns the filepath to the global shelf
9,006
def global_cache_write ( key , val , appname = 'default' ) : with GlobalShelfContext ( appname ) as shelf : shelf [ key ] = val
Writes cache files to a safe place in each operating system
9,007
def delete_global_cache ( appname = 'default' ) : shelf_fpath = get_global_shelf_fpath ( appname ) util_path . remove_file ( shelf_fpath , verbose = True , dryrun = False )
Reads cache files to a safe place in each operating system
9,008
def existing_versions ( self ) : import glob pattern = self . fname + '_*' + self . ext for fname in glob . glob1 ( self . dpath , pattern ) : fpath = join ( self . dpath , fname ) yield fpath
Returns data with different cfgstr values that were previously computed with this cacher .
9,009
def tryload ( self , cfgstr = None ) : if cfgstr is None : cfgstr = self . cfgstr if cfgstr is None : import warnings warnings . warn ( 'No cfgstr given in Cacher constructor or call' ) cfgstr = '' if not self . enabled : if self . verbose > 0 : print ( '[cache] ... %s Cacher disabled' % ( self . fname ) ) return None try : if self . verbose > 1 : print ( '[cache] tryload fname=%s' % ( self . fname , ) ) return self . load ( cfgstr ) except IOError : if self . verbose > 0 : print ( '[cache] ... %s Cacher miss' % ( self . fname ) )
Like load but returns None if the load fails
9,010
def fuzzyload ( self , cachedir = None , partial_cfgstr = '' , ** kwargs ) : valid_targets = self . glob_valid_targets ( cachedir , partial_cfgstr ) if len ( valid_targets ) != 1 : import utool as ut msg = 'need to further specify target. valid_targets=%s' % ( ut . repr3 ( valid_targets , ) ) raise ValueError ( msg ) fpath = valid_targets [ 0 ] self . load ( fpath = fpath , ** kwargs )
Try and load from a partially specified configuration string
9,011
def load ( self , cachedir = None , cfgstr = None , fpath = None , verbose = None , quiet = QUIET , ignore_keys = None ) : if verbose is None : verbose = getattr ( self , 'verbose' , VERBOSE ) if fpath is None : fpath = self . get_fpath ( cachedir , cfgstr = cfgstr ) if verbose : print ( '[Cachable] cache tryload: %r' % ( basename ( fpath ) , ) ) try : self . _unsafe_load ( fpath , ignore_keys ) if verbose : print ( '... self cache hit: %r' % ( basename ( fpath ) , ) ) except ValueError as ex : import utool as ut msg = '[!Cachable] Cachable(%s) is likely corrupt' % ( self . get_cfgstr ( ) ) print ( 'CORRUPT fpath = %s' % ( fpath , ) ) ut . printex ( ex , msg , iswarning = True ) raise except zipfile . error as ex : import utool as ut msg = '[!Cachable] Cachable(%s) has bad zipfile' % ( self . get_cfgstr ( ) ) print ( 'CORRUPT fpath = %s' % ( fpath , ) ) ut . printex ( ex , msg , iswarning = True ) raise except IOError as ex : import utool as ut if not exists ( fpath ) : msg = '... self cache miss: %r' % ( basename ( fpath ) , ) if verbose : print ( msg ) raise print ( 'CORRUPT fpath = %s' % ( fpath , ) ) msg = '[!Cachable] Cachable(%s) is corrupt' % ( self . get_cfgstr ( ) ) ut . printex ( ex , msg , iswarning = True ) raise except Exception as ex : import utool as ut ut . printex ( ex , 'unknown exception while loading query result' ) raise
Loads the result from the given database
9,012
def truepath_relative ( path , otherpath = None ) : if otherpath is None : otherpath = os . getcwd ( ) otherpath = truepath ( otherpath ) path_ = normpath ( relpath ( path , otherpath ) ) return path_
Normalizes and returns absolute path with so specs
9,013
def tail ( fpath , n = 2 , trailing = True ) : return path_ndir_split ( fpath , n = n , trailing = trailing )
Alias for path_ndir_split
9,014
def unexpanduser ( path ) : r homedir = expanduser ( '~' ) if path . startswith ( homedir ) : path = '~' + path [ len ( homedir ) : ] return path
r Replaces home directory with ~
9,015
def path_ndir_split ( path_ , n , force_unix = True , winroot = 'C:' , trailing = True ) : r if not isinstance ( path_ , six . string_types ) : return path_ if n is None : cplat_path = ensure_crossplat_path ( path_ ) elif n == 0 : cplat_path = '' else : sep = '/' if force_unix else os . sep ndirs_list = [ ] head = path_ reached_end = False for nx in range ( n ) : head , tail = split ( head ) if tail == '' : if head == '' : reached_end = True else : root = head if len ( ndirs_list ) == 0 else head . strip ( '\\/' ) ndirs_list . append ( root ) reached_end = True break else : ndirs_list . append ( tail ) if trailing and not reached_end : head , tail = split ( head ) if len ( tail ) == 0 : if len ( head ) == 0 : reached_end = True ndirs = sep . join ( ndirs_list [ : : - 1 ] ) cplat_path = ensure_crossplat_path ( ndirs ) if trailing and not reached_end : cplat_path = '.../' + cplat_path return cplat_path
r Shows only a little bit of the path . Up to the n bottom - level directories
9,016
def augpath ( path , augsuf = '' , augext = '' , augpref = '' , augdir = None , newext = None , newfname = None , ensure = False , prefix = None , suffix = None ) : if prefix is not None : augpref = prefix if suffix is not None : augsuf = suffix dpath , fname = split ( path ) fname_noext , ext = splitext ( fname ) if newfname is not None : fname_noext = newfname if newext is None : newext = ext new_fname = '' . join ( ( augpref , fname_noext , augsuf , newext , augext ) ) if augdir is not None : new_dpath = join ( dpath , augdir ) if ensure : ensuredir ( new_dpath ) else : new_dpath = dpath newpath = join ( new_dpath , new_fname ) return newpath
augments end of path before the extension .
9,017
def remove_files_in_dir ( dpath , fname_pattern_list = '*' , recursive = False , verbose = VERBOSE , dryrun = False , ignore_errors = False ) : if isinstance ( fname_pattern_list , six . string_types ) : fname_pattern_list = [ fname_pattern_list ] if verbose > 2 : print ( '[util_path] Removing files:' ) print ( ' * from dpath = %r ' % dpath ) print ( ' * with patterns = %r' % fname_pattern_list ) print ( ' * recursive = %r' % recursive ) num_removed , num_matched = ( 0 , 0 ) if not exists ( dpath ) : msg = ( '!!! dir = %r does not exist!' % dpath ) if verbose : print ( msg ) warnings . warn ( msg , category = UserWarning ) for root , dname_list , fname_list in os . walk ( dpath ) : for fname_pattern in fname_pattern_list : for fname in fnmatch . filter ( fname_list , fname_pattern ) : num_matched += 1 num_removed += remove_file ( join ( root , fname ) , ignore_errors = ignore_errors , dryrun = dryrun , verbose = verbose > 5 ) if not recursive : break if verbose > 0 : print ( '[util_path] ... Removed %d/%d files' % ( num_removed , num_matched ) ) return True
Removes files matching a pattern from a directory
9,018
def delete ( path , dryrun = False , recursive = True , verbose = None , print_exists = True , ignore_errors = True ) : if verbose is None : verbose = VERBOSE if not QUIET : verbose = 1 if verbose > 0 : print ( '[util_path] Deleting path=%r' % path ) exists_flag = exists ( path ) link_flag = islink ( path ) if not exists_flag and not link_flag : if print_exists and verbose : print ( '..does not exist!' ) flag = False else : rmargs = dict ( verbose = verbose > 1 , ignore_errors = ignore_errors , dryrun = dryrun ) if islink ( path ) : os . unlink ( path ) flag = True elif isdir ( path ) : flag = remove_files_in_dir ( path , recursive = recursive , ** rmargs ) flag = flag and remove_dirs ( path , ** rmargs ) elif isfile ( path ) : flag = remove_file ( path , ** rmargs ) else : raise ValueError ( 'Unknown type of path=%r' % ( path , ) ) if verbose > 0 : print ( '[util_path] Finished deleting path=%r' % path ) return flag
Removes a file directory or symlink
9,019
def remove_existing_fpaths ( fpath_list , verbose = VERBOSE , quiet = QUIET , strict = False , print_caller = PRINT_CALLER , lbl = 'files' ) : import utool as ut if print_caller : print ( util_dbg . get_caller_name ( range ( 1 , 4 ) ) + ' called remove_existing_fpaths' ) fpath_list_ = ut . filter_Nones ( fpath_list ) exists_list = list ( map ( exists , fpath_list_ ) ) if verbose : n_total = len ( fpath_list ) n_valid = len ( fpath_list_ ) n_exist = sum ( exists_list ) print ( '[util_path.remove_existing_fpaths] request delete of %d %s' % ( n_total , lbl ) ) if n_valid != n_total : print ( ( '[util_path.remove_existing_fpaths] ' 'trying to delete %d/%d non None %s ' ) % ( n_valid , n_total , lbl ) ) print ( ( '[util_path.remove_existing_fpaths] ' ' %d/%d exist and need to be deleted' ) % ( n_exist , n_valid ) ) existing_fpath_list = ut . compress ( fpath_list_ , exists_list ) return remove_fpaths ( existing_fpath_list , verbose = verbose , quiet = quiet , strict = strict , print_caller = False , lbl = lbl )
checks existance before removing . then tries to remove exisint paths
9,020
def remove_fpaths ( fpaths , verbose = VERBOSE , quiet = QUIET , strict = False , print_caller = PRINT_CALLER , lbl = 'files' ) : import utool as ut if print_caller : print ( util_dbg . get_caller_name ( range ( 1 , 4 ) ) + ' called remove_fpaths' ) n_total = len ( fpaths ) _verbose = ( not quiet and n_total > 0 ) or VERYVERBOSE if _verbose : print ( '[util_path.remove_fpaths] try removing %d %s' % ( n_total , lbl ) ) n_removed = 0 prog = ut . ProgIter ( fpaths , label = 'removing files' , enabled = verbose ) _iter = iter ( prog ) try : for fpath in _iter : os . remove ( fpath ) n_removed += 1 except OSError as ex : if VERYVERBOSE : print ( 'WARNING: Could not remove fpath = %r' % ( fpath , ) ) if strict : util_dbg . printex ( ex , 'Could not remove fpath = %r' % ( fpath , ) , iswarning = False ) raise for fpath in _iter : try : os . remove ( fpath ) n_removed += 1 except OSError as ex : if VERYVERBOSE : print ( 'WARNING: Could not remove fpath = %r' % ( fpath , ) ) if _verbose : print ( '[util_path.remove_fpaths] ... removed %d / %d %s' % ( n_removed , n_total , lbl ) ) return n_removed
Removes multiple file paths
9,021
def longest_existing_path ( _path ) : r existing_path = _path while True : _path_new = os . path . dirname ( existing_path ) if exists ( _path_new ) : existing_path = _path_new break if _path_new == existing_path : print ( '!!! [utool] This is a very illformated path indeed.' ) existing_path = '' break existing_path = _path_new return existing_path
r Returns the longest root of _path that exists
9,022
def get_path_type ( path_ ) : r path_type = '' if isfile ( path_ ) : path_type += 'file' if isdir ( path_ ) : path_type += 'directory' if islink ( path_ ) : path_type += 'link' if ismount ( path_ ) : path_type += 'mount' return path_type
r returns if a path is a file directory link or mount
9,023
def checkpath ( path_ , verbose = VERYVERBOSE , n = None , info = VERYVERBOSE ) : r assert isinstance ( path_ , six . string_types ) , ( 'path_=%r is not a string. type(path_) = %r' % ( path_ , type ( path_ ) ) ) path_ = normpath ( path_ ) if sys . platform . startswith ( 'win32' ) : if path_ . startswith ( '\\' ) : dirs = path_ . split ( '\\' ) if len ( dirs ) > 1 and len ( dirs [ 0 ] ) == 0 and len ( dirs [ 1 ] ) == 1 : dirs [ 1 ] = dirs [ 1 ] . upper ( ) + ':' path_ = '\\' . join ( dirs [ 1 : ] ) does_exist = exists ( path_ ) if verbose : pretty_path = path_ndir_split ( path_ , n ) caller_name = util_dbg . get_caller_name ( allow_genexpr = False ) print ( '[%s] checkpath(%r)' % ( caller_name , pretty_path ) ) if does_exist : path_type = get_path_type ( path_ ) print ( '[%s] ...(%s) exists' % ( caller_name , path_type , ) ) else : print ( '[%s] ... does not exist' % ( caller_name ) ) if not does_exist and info : _longest_path = longest_existing_path ( path_ ) _longest_path_type = get_path_type ( _longest_path ) print ( '[util_path] ... The longest existing path is: %r' % _longest_path ) print ( '[util_path] ... and has type %r' % ( _longest_path_type , ) ) return does_exist
r verbose wrapper around os . path . exists
9,024
def ensurepath ( path_ , verbose = None ) : if verbose is None : verbose = VERYVERBOSE return ensuredir ( path_ , verbose = verbose )
DEPRICATE - alias - use ensuredir instead
9,025
def ensuredir ( path_ , verbose = None , info = False , mode = 0o1777 ) : r if verbose is None : verbose = VERYVERBOSE if isinstance ( path_ , ( list , tuple ) ) : path_ = join ( * path_ ) if HAVE_PATHLIB and isinstance ( path_ , pathlib . Path ) : path_ = str ( path_ ) if not checkpath ( path_ , verbose = verbose , info = info ) : if verbose : print ( '[util_path] mkdir(%r)' % path_ ) try : os . makedirs ( normpath ( path_ ) , mode = mode ) except OSError as ex : util_dbg . printex ( ex , 'check that the longest existing path ' 'is not a bad windows symlink.' , keys = [ 'path_' ] ) raise return path_
r Ensures that directory will exist . creates new dir with sticky bits by default
9,026
def touch ( fpath , times = None , verbose = True ) : r try : if verbose : print ( '[util_path] touching %r' % fpath ) with open ( fpath , 'a' ) : os . utime ( fpath , times ) except Exception as ex : import utool utool . printex ( ex , 'touch %s' % fpath ) raise return fpath
r Creates file if it doesnt exist
9,027
def copy_list ( src_list , dst_list , lbl = 'Copying' , ioerr_ok = False , sherro_ok = False , oserror_ok = False ) : task_iter = zip ( src_list , dst_list ) def docopy ( src , dst ) : try : shutil . copy2 ( src , dst ) except OSError : if ioerr_ok : return False raise except shutil . Error : if sherro_ok : return False raise except IOError : if ioerr_ok : return False raise return True progiter = util_progress . ProgIter ( task_iter , adjust = True , lbl = lbl ) success_list = [ docopy ( src , dst ) for ( src , dst ) in progiter ] return success_list
Copies all data and stat info
9,028
def glob ( dpath , pattern = None , recursive = False , with_files = True , with_dirs = True , maxdepth = None , exclude_dirs = [ ] , fullpath = True , ** kwargs ) : r gen = iglob ( dpath , pattern , recursive = recursive , with_files = with_files , with_dirs = with_dirs , maxdepth = maxdepth , fullpath = fullpath , exclude_dirs = exclude_dirs , ** kwargs ) path_list = list ( gen ) return path_list
r Globs directory for pattern
9,029
def num_images_in_dir ( path ) : num_imgs = 0 for root , dirs , files in os . walk ( path ) : for fname in files : if fpath_has_imgext ( fname ) : num_imgs += 1 return num_imgs
returns the number of images in a directory
9,030
def fpath_has_ext ( fname , exts , case_sensitive = False ) : fname_ = fname . lower ( ) if not case_sensitive else fname if case_sensitive : ext_pats = [ '*' + ext for ext in exts ] else : ext_pats = [ '*' + ext . lower ( ) for ext in exts ] return any ( [ fnmatch . fnmatch ( fname_ , pat ) for pat in ext_pats ] )
returns true if the filename has any of the given extensions
9,031
def get_modpath ( modname , prefer_pkg = False , prefer_main = False ) : r import importlib if isinstance ( modname , six . string_types ) : module = importlib . import_module ( modname ) else : module = modname modpath = module . __file__ . replace ( '.pyc' , '.py' ) initname = '__init__.py' mainname = '__main__.py' if prefer_pkg : if modpath . endswith ( initname ) or modpath . endswith ( mainname ) : modpath = dirname ( modpath ) if prefer_main : if modpath . endswith ( initname ) : main_modpath = modpath [ : - len ( initname ) ] + mainname if exists ( main_modpath ) : modpath = main_modpath return modpath
r Returns path to module
9,032
def get_relative_modpath ( module_fpath ) : modsubdir_list = get_module_subdir_list ( module_fpath ) _ , ext = splitext ( module_fpath ) rel_modpath = join ( * modsubdir_list ) + ext rel_modpath = ensure_crossplat_path ( rel_modpath ) return rel_modpath
Returns path to module relative to the package root
9,033
def get_modname_from_modpath ( module_fpath ) : modsubdir_list = get_module_subdir_list ( module_fpath ) modname = '.' . join ( modsubdir_list ) modname = modname . replace ( '.__init__' , '' ) . strip ( ) modname = modname . replace ( '.__main__' , '' ) . strip ( ) return modname
returns importable name from file path
9,034
def ls ( path , pattern = '*' ) : path_iter = glob ( path , pattern , recursive = False ) return sorted ( list ( path_iter ) )
like unix ls - lists all files and dirs in path
9,035
def ls_moduledirs ( path , private = True , full = True ) : dir_list = ls_dirs ( path ) module_dir_iter = filter ( is_module_dir , dir_list ) if not private : module_dir_iter = filterfalse ( is_private_module , module_dir_iter ) if not full : module_dir_iter = map ( basename , module_dir_iter ) return list ( module_dir_iter )
lists all dirs which are python modules in path
9,036
def list_images ( img_dpath_ , ignore_list = [ ] , recursive = False , fullpath = False , full = None , sort = True ) : r if full is not None : fullpath = fullpath or full img_dpath_ = util_str . ensure_unicode ( img_dpath_ ) img_dpath = realpath ( img_dpath_ ) ignore_set = set ( ignore_list ) gname_list_ = [ ] assertpath ( img_dpath ) true_imgpath = truepath ( img_dpath ) for root , dlist , flist in os . walk ( true_imgpath ) : root = util_str . ensure_unicode ( root ) rel_dpath = relpath ( root , img_dpath ) if any ( [ dname in ignore_set for dname in dirsplit ( rel_dpath ) ] ) : continue for fname in iter ( flist ) : fname = util_str . ensure_unicode ( fname ) gname = join ( rel_dpath , fname ) . replace ( '\\' , '/' ) if gname . startswith ( './' ) : gname = gname [ 2 : ] if fpath_has_imgext ( gname ) : if gname in ignore_set : continue if fullpath : gpath = join ( img_dpath , gname ) gname_list_ . append ( gpath ) else : gname_list_ . append ( gname ) if not recursive : break if sort : gname_list = sorted ( gname_list_ ) return gname_list
r Returns a list of images in a directory . By default returns relative paths .
9,037
def assertpath ( path_ , msg = '' , ** kwargs ) : if NO_ASSERTS : return if path_ is None : raise AssertionError ( 'path is None! %s' % ( path_ , msg ) ) if path_ == '' : raise AssertionError ( 'path=%r is the empty string! %s' % ( path_ , msg ) ) if not checkpath ( path_ , ** kwargs ) : raise AssertionError ( 'path=%r does not exist! %s' % ( path_ , msg ) )
Asserts that a patha exists
9,038
def matching_fpaths ( dpath_list , include_patterns , exclude_dirs = [ ] , greater_exclude_dirs = [ ] , exclude_patterns = [ ] , recursive = True ) : r if isinstance ( dpath_list , six . string_types ) : dpath_list = [ dpath_list ] for dpath in dpath_list : for root , dname_list , fname_list in os . walk ( dpath ) : subdirs = pathsplit_full ( relpath ( root , dpath ) ) if any ( [ dir_ in greater_exclude_dirs for dir_ in subdirs ] ) : continue if basename ( root ) in exclude_dirs : continue _match = fnmatch . fnmatch for name in fname_list : if any ( _match ( name , pat ) for pat in include_patterns ) : if not any ( _match ( name , pat ) for pat in exclude_patterns ) : fpath = join ( root , name ) yield fpath if not recursive : break
r walks dpath lists returning all directories that match the requested pattern .
9,039
def sed ( regexpr , repl , force = False , recursive = False , dpath_list = None , fpath_list = None , verbose = None , include_patterns = None , exclude_patterns = [ ] ) : if include_patterns is None : include_patterns = [ '*.py' , '*.pyx' , '*.pxi' , '*.cxx' , '*.cpp' , '*.hxx' , '*.hpp' , '*.c' , '*.h' , '*.html' , '*.tex' ] if dpath_list is None : dpath_list = [ os . getcwd ( ) ] if verbose is None : verbose = ut . NOT_QUIET if fpath_list is None : greater_exclude_dirs = get_standard_exclude_dnames ( ) exclude_dirs = [ ] fpath_generator = matching_fpaths ( dpath_list , include_patterns , exclude_dirs , greater_exclude_dirs = greater_exclude_dirs , recursive = recursive , exclude_patterns = exclude_patterns ) else : fpath_generator = fpath_list if verbose : print ( 'sed-ing %r' % ( dpath_list , ) ) print ( ' * regular expression : %r' % ( regexpr , ) ) print ( ' * replacement : %r' % ( repl , ) ) print ( ' * include_patterns : %r' % ( include_patterns , ) ) print ( ' * recursive: %r' % ( recursive , ) ) print ( ' * force: %r' % ( force , ) ) from utool import util_str print ( ' * fpath_list: %s' % ( util_str . repr3 ( fpath_list ) , ) ) regexpr = extend_regex ( regexpr ) num_changed = 0 num_files_checked = 0 fpaths_changed = [ ] for fpath in fpath_generator : num_files_checked += 1 changed_lines = sedfile ( fpath , regexpr , repl , force , verbose = verbose ) if changed_lines is not None : fpaths_changed . append ( fpath ) num_changed += len ( changed_lines ) import utool as ut print ( 'num_files_checked = %r' % ( num_files_checked , ) ) print ( 'fpaths_changed = %s' % ( ut . repr3 ( sorted ( fpaths_changed ) ) , ) ) print ( 'total lines changed = %r' % ( num_changed , ) )
Python implementation of sed . NOT FINISHED
9,040
def grep ( regex_list , recursive = True , dpath_list = None , include_patterns = None , exclude_dirs = [ ] , greater_exclude_dirs = None , inverse = False , exclude_patterns = [ ] , verbose = VERBOSE , fpath_list = None , reflags = 0 , cache = None ) : r from utool import util_regex from utool import util_list if include_patterns is None : include_patterns = [ '*' ] if greater_exclude_dirs is None : greater_exclude_dirs = [ ] if isinstance ( include_patterns , six . string_types ) : include_patterns = [ include_patterns ] if dpath_list is None : dpath_list = [ os . getcwd ( ) ] if verbose : recursive_stat_str = [ 'flat' , 'recursive' ] [ recursive ] print ( '[util_path] Greping (%s) %r for %r' % ( recursive_stat_str , dpath_list , regex_list ) ) print ( '[util_path] regex_list = %s' % ( regex_list ) ) if isinstance ( regex_list , six . string_types ) : regex_list = [ regex_list ] found_fpath_list = [ ] found_lines_list = [ ] found_lxs_list = [ ] if fpath_list is None : fpath_generator = matching_fpaths ( dpath_list = dpath_list , include_patterns = include_patterns , exclude_dirs = exclude_dirs , greater_exclude_dirs = greater_exclude_dirs , exclude_patterns = exclude_patterns , recursive = recursive ) else : fpath_generator = fpath_list _exprs_flags = [ util_regex . extend_regex2 ( expr , reflags ) for expr in regex_list ] extended_regex_list = util_list . take_column ( _exprs_flags , 0 ) reflags_list = util_list . take_column ( _exprs_flags , 1 ) reflags = reflags_list [ 0 ] for fpath in fpath_generator : found_lines , found_lxs = grepfile ( fpath , extended_regex_list , reflags_list , cache = cache ) if inverse : if len ( found_lines ) == 0 : found_fpath_list . append ( fpath ) found_lines_list . append ( [ ] ) found_lxs_list . append ( [ ] ) elif len ( found_lines ) > 0 : found_fpath_list . append ( fpath ) found_lines_list . append ( found_lines ) found_lxs_list . append ( found_lxs ) grep_result = ( found_fpath_list , found_lines_list , found_lxs_list ) if verbose : print ( '==========' ) print ( '==========' ) print ( '[util_path] found matches in %d files' % len ( found_fpath_list ) ) print ( make_grep_resultstr ( grep_result , extended_regex_list , reflags ) ) return grep_result
r greps for patterns Python implementation of grep . NOT FINISHED
9,041
def get_win32_short_path_name ( long_name ) : import ctypes from ctypes import wintypes _GetShortPathNameW = ctypes . windll . kernel32 . GetShortPathNameW _GetShortPathNameW . argtypes = [ wintypes . LPCWSTR , wintypes . LPWSTR , wintypes . DWORD ] _GetShortPathNameW . restype = wintypes . DWORD output_buf_size = 0 while True : output_buf = ctypes . create_unicode_buffer ( output_buf_size ) needed = _GetShortPathNameW ( long_name , output_buf , output_buf_size ) if output_buf_size >= needed : short_name = output_buf . value break else : output_buf_size = needed return short_name
Gets the short path name of a given long path .
9,042
def platform_path ( path ) : r try : if path == '' : raise ValueError ( 'path cannot be the empty string' ) path1 = truepath_relative ( path ) if sys . platform . startswith ( 'win32' ) : path2 = expand_win32_shortname ( path1 ) else : path2 = path1 except Exception as ex : util_dbg . printex ( ex , keys = [ 'path' , 'path1' , 'path2' ] ) raise return path2
r Returns platform specific path for pyinstaller usage
9,043
def find_lib_fpath ( libname , root_dir , recurse_down = True , verbose = False , debug = False ) : def get_lib_fname_list ( libname ) : if sys . platform . startswith ( 'win32' ) : libnames = [ 'lib' + libname + '.dll' , libname + '.dll' ] elif sys . platform . startswith ( 'darwin' ) : libnames = [ 'lib' + libname + '.dylib' ] elif sys . platform . startswith ( 'linux' ) : libnames = [ 'lib' + libname + '.so' ] else : raise Exception ( 'Unknown operating system: %s' % sys . platform ) return libnames def get_lib_dpath_list ( root_dir ) : 'returns possible lib locations' get_lib_dpath_list = [ root_dir , join ( root_dir , 'lib' ) , join ( root_dir , 'build' ) , join ( root_dir , 'build' , 'lib' ) ] return get_lib_dpath_list lib_fname_list = get_lib_fname_list ( libname ) tried_fpaths = [ ] while root_dir is not None : for lib_fname in lib_fname_list : for lib_dpath in get_lib_dpath_list ( root_dir ) : lib_fpath = normpath ( join ( lib_dpath , lib_fname ) ) if exists ( lib_fpath ) : if verbose : print ( '\n[c] Checked: ' . join ( tried_fpaths ) ) if debug : print ( 'using: %r' % lib_fpath ) return lib_fpath else : tried_fpaths . append ( lib_fpath ) _new_root = dirname ( root_dir ) if _new_root == root_dir : root_dir = None break else : root_dir = _new_root if not recurse_down : break msg = ( '\n[C!] load_clib(libname=%r root_dir=%r, recurse_down=%r, verbose=%r)' % ( libname , root_dir , recurse_down , verbose ) + '\n[c!] Cannot FIND dynamic library' ) print ( msg ) print ( '\n[c!] Checked: ' . join ( tried_fpaths ) ) raise ImportError ( msg )
Search for the library
9,044
def ensure_mingw_drive ( win32_path ) : r win32_drive , _path = splitdrive ( win32_path ) mingw_drive = '/' + win32_drive [ : - 1 ] . lower ( ) mingw_path = mingw_drive + _path return mingw_path
r replaces windows drives with mingw style drives
9,045
def ancestor_paths ( start = None , limit = { } ) : import utool as ut limit = ut . ensure_iterable ( limit ) limit = { expanduser ( p ) for p in limit } . union ( set ( limit ) ) if start is None : start = os . getcwd ( ) path = start prev = None while path != prev and prev not in limit : yield path prev = path path = dirname ( path )
All paths above you
9,046
def search_candidate_paths ( candidate_path_list , candidate_name_list = None , priority_paths = None , required_subpaths = [ ] , verbose = None ) : import utool as ut if verbose is None : verbose = 0 if QUIET else 1 if verbose >= 1 : print ( '[search_candidate_paths] Searching for candidate paths' ) if candidate_name_list is not None : candidate_path_list_ = [ join ( dpath , fname ) for dpath , fname in itertools . product ( candidate_path_list , candidate_name_list ) ] else : candidate_path_list_ = candidate_path_list if priority_paths is not None : candidate_path_list_ = priority_paths + candidate_path_list_ return_path = None for path in candidate_path_list_ : if path is not None and exists ( path ) : if verbose >= 2 : print ( '[search_candidate_paths] Found candidate directory %r' % ( path , ) ) print ( '[search_candidate_paths] ... checking for approprate structure' ) subpath_list = [ join ( path , subpath ) for subpath in required_subpaths ] if all ( ut . checkpath ( path_ , verbose = verbose ) for path_ in subpath_list ) : return_path = path if verbose >= 2 : print ( '[search_candidate_paths] Found acceptable path' ) return return_path break if verbose >= 1 : print ( '[search_candidate_paths] Failed to find acceptable path' ) return return_path
searches for existing paths that meed a requirement
9,047
def symlink ( real_path , link_path , overwrite = False , on_error = 'raise' , verbose = 2 ) : path = normpath ( real_path ) link = normpath ( link_path ) if verbose : print ( '[util_path] Creating symlink: path={} link={}' . format ( path , link ) ) if os . path . islink ( link ) : if verbose : print ( '[util_path] symlink already exists' ) os_readlink = getattr ( os , "readlink" , None ) if callable ( os_readlink ) : if os_readlink ( link ) == path : if verbose > 1 : print ( '[path] ... and points to the right place' ) return link else : print ( '[util_path] Warning, symlinks are not implemented on windows' ) if verbose > 1 : print ( '[util_path] ... but it points somewhere else' ) if overwrite : delete ( link , verbose > 1 ) elif on_error == 'ignore' : return False try : os_symlink = getattr ( os , "symlink" , None ) if callable ( os_symlink ) : os_symlink ( path , link ) else : win_shortcut ( path , link ) except Exception as ex : import utool as ut checkpath ( link , verbose = True ) checkpath ( path , verbose = True ) do_raise = ( on_error == 'raise' ) ut . printex ( ex , '[util_path] error making symlink' , iswarning = not do_raise ) if do_raise : raise return link
Attempt to create a symbolic link .
9,048
def remove_broken_links ( dpath , verbose = True ) : fname_list = [ join ( dpath , fname ) for fname in os . listdir ( dpath ) ] broken_links = list ( filterfalse ( exists , filter ( islink , fname_list ) ) ) num_broken = len ( broken_links ) if verbose : if verbose > 1 or num_broken > 0 : print ( '[util_path] Removing %d broken links in %r' % ( num_broken , dpath , ) ) for link in broken_links : os . unlink ( link ) return num_broken
Removes all broken links in a directory
9,049
def non_existing_path ( path_ , dpath = None , offset = 0 , suffix = None , force_fmt = False ) : r import utool as ut from os . path import basename , dirname if dpath is None : dpath = dirname ( path_ ) base_fmtstr = basename ( path_ ) if suffix is not None : base_fmtstr = ut . augpath ( base_fmtstr , suffix ) if '%' not in base_fmtstr : if not force_fmt : first_choice = join ( dpath , base_fmtstr ) if not exists ( first_choice ) : return first_choice base_fmtstr = ut . augpath ( base_fmtstr , '%d' ) dname_list = ut . glob ( dpath , pattern = '*' , recursive = False , with_files = True , with_dirs = True ) conflict_set = set ( basename ( dname ) for dname in dname_list ) newname = ut . get_nonconflicting_string ( base_fmtstr , conflict_set , offset = offset ) newpath = join ( dpath , newname ) return newpath
r Searches for and finds a path garuenteed to not exist .
9,050
def create_isobaric_quant_lookup ( quantdb , specfn_consensus_els , channelmap ) : channels_store = ( ( name , ) for name , c_id in sorted ( channelmap . items ( ) , key = lambda x : x [ 1 ] ) ) quantdb . store_channelmap ( channels_store ) channelmap_dbid = { channelmap [ ch_name ] : ch_id for ch_id , ch_name in quantdb . get_channelmap ( ) } quants = [ ] mzmlmap = quantdb . get_mzmlfile_map ( ) for specfn , consensus_el in specfn_consensus_els : rt = openmsreader . get_consxml_rt ( consensus_el ) rt = round ( float ( Decimal ( rt ) / 60 ) , 12 ) qdata = get_quant_data ( consensus_el ) spectra_id = quantdb . get_spectra_id ( mzmlmap [ specfn ] , retention_time = rt ) for channel_no in sorted ( qdata . keys ( ) ) : quants . append ( ( spectra_id , channelmap_dbid [ channel_no ] , qdata [ channel_no ] ) ) if len ( quants ) == DB_STORE_CHUNK : quantdb . store_isobaric_quants ( quants ) quantdb . store_isobaric_quants ( quants ) quantdb . index_isobaric_quants ( )
Creates an sqlite lookup table of scannrs with quant data .
9,051
def get_precursors_from_window ( quantdb , minmz ) : featmap = { } mz = False features = quantdb . get_precursor_quant_window ( FEATURE_ALIGN_WINDOW_AMOUNT , minmz ) for feat_id , fn_id , charge , mz , rt in features : try : featmap [ fn_id ] [ charge ] . append ( ( mz , rt , feat_id ) ) except KeyError : try : featmap [ fn_id ] [ charge ] = [ ( mz , rt , feat_id ) ] except KeyError : featmap [ fn_id ] = { charge : [ ( mz , rt , feat_id ) ] } return featmap , mz
Returns a dict of a specified amount of features from the ms1 quant database and the highest mz of those features
9,052
def get_quant_data ( cons_el ) : quant_out = { } for reporter in cons_el . findall ( './/element' ) : quant_out [ reporter . attrib [ 'map' ] ] = reporter . attrib [ 'it' ] return quant_out
Gets quant data from consensusXML element
9,053
def get_plat_specifier ( ) : import setuptools import distutils plat_name = distutils . util . get_platform ( ) plat_specifier = ".%s-%s" % ( plat_name , sys . version [ 0 : 3 ] ) if hasattr ( sys , 'gettotalrefcount' ) : plat_specifier += '-pydebug' return plat_specifier
Standard platform specifier used by distutils
9,054
def get_system_python_library ( ) : import os import utool as ut from os . path import basename , realpath pyname = basename ( realpath ( sys . executable ) ) ld_library_path = os . environ [ 'LD_LIBRARY_PATH' ] libdirs = [ x for x in ld_library_path . split ( os . pathsep ) if x ] + [ '/usr/lib' ] libfiles = ut . flatten ( [ ut . glob ( d , '*' + ut . get_lib_ext ( ) , recursive = True ) for d in libdirs ] ) python_libs = [ realpath ( f ) for f in libfiles if 'lib' + pyname in basename ( f ) ] python_libs = ut . unique_ordered ( python_libs ) assert len ( python_libs ) == 1 , str ( python_libs ) return python_libs [ 0 ]
FIXME ; hacky way of finding python library . Not cross platform yet .
9,055
def get_dynlib_dependencies ( lib_path ) : if LINUX : ldd_fpath = '/usr/bin/ldd' depend_out , depend_err , ret = cmd ( ldd_fpath , lib_path , verbose = False ) elif DARWIN : otool_fpath = '/opt/local/bin/otool' depend_out , depend_err , ret = cmd ( otool_fpath , '-L' , lib_path , verbose = False ) elif WIN32 : depend_out , depend_err , ret = cmd ( 'objdump' , '-p' , lib_path , verbose = False ) relevant_lines = [ line for line in depend_out . splitlines ( ) if 'DLL Name:' in line ] depend_out = '\n' . join ( relevant_lines ) assert ret == 0 , 'bad dependency check' return depend_out
Executes tools for inspecting dynamic library dependencies depending on the current platform .
9,056
def startfile ( fpath , detatch = True , quote = False , verbose = False , quiet = True ) : print ( '[cplat] startfile(%r)' % fpath ) fpath = normpath ( fpath ) if not exists ( fpath ) : raise Exception ( 'Cannot start nonexistant file: %r' % fpath ) if not WIN32 : fpath = pipes . quote ( fpath ) if LINUX : outtup = cmd ( ( 'xdg-open' , fpath ) , detatch = detatch , verbose = verbose , quiet = quiet ) elif DARWIN : outtup = cmd ( ( 'open' , fpath ) , detatch = detatch , verbose = verbose , quiet = quiet ) elif WIN32 : os . startfile ( fpath ) else : raise RuntimeError ( 'Unknown Platform' ) if outtup is not None : out , err , ret = outtup if not ret : raise Exception ( out + ' -- ' + err ) pass
Uses default program defined by the system to open a file .
9,057
def view_directory ( dname = None , fname = None , verbose = True ) : from utool . util_arg import STRICT from utool . util_path import checkpath if HAVE_PATHLIB and isinstance ( dname , pathlib . Path ) : dname = str ( dname ) if verbose : print ( '[cplat] view_directory(%r) ' % dname ) dname = os . getcwd ( ) if dname is None else dname open_prog = { 'win32' : 'explorer.exe' , 'linux' : 'nautilus' , 'darwin' : 'open' } [ OS_TYPE ] dname = normpath ( dname ) if STRICT : assert checkpath ( dname , verbose = verbose ) , 'directory doesnt exit' if fname is not None and OS_TYPE == 'linux' : arg = join ( dname , fname ) else : arg = dname args = ( open_prog , arg ) print ( subprocess . list2cmdline ( args ) ) subprocess . Popen ( args )
View a directory in the operating system file browser . Currently supports windows explorer mac open and linux nautlius .
9,058
def platform_cache_dir ( ) : if WIN32 : dpath_ = '~/AppData/Local' elif LINUX : dpath_ = '~/.cache' elif DARWIN : dpath_ = '~/Library/Caches' else : raise NotImplementedError ( 'Unknown Platform %r' % ( sys . platform , ) ) dpath = normpath ( expanduser ( dpath_ ) ) return dpath
Returns a directory which should be writable for any application This should be used for temporary deletable data .
9,059
def __parse_cmd_args ( args , sudo , shell ) : if isinstance ( args , tuple ) and len ( args ) == 1 and isinstance ( args [ 0 ] , tuple ) : args = args [ 0 ] if shell : if isinstance ( args , six . string_types ) : pass elif isinstance ( args , ( list , tuple ) ) and len ( args ) > 1 : args = ' ' . join ( args ) elif isinstance ( args , ( list , tuple ) ) and len ( args ) == 1 : if isinstance ( args [ 0 ] , ( tuple , list ) ) : args = ' ' . join ( args ) elif isinstance ( args [ 0 ] , six . string_types ) : args = args [ 0 ] else : if isinstance ( args , six . string_types ) : args = shlex . split ( args , posix = not WIN32 ) elif isinstance ( args , ( list , tuple ) ) : if len ( args ) > 1 : args = tuple ( args ) elif len ( args ) == 1 : if isinstance ( args [ 0 ] , ( tuple , list ) ) : args = tuple ( args [ 0 ] ) elif isinstance ( args [ 0 ] , six . string_types ) : args = shlex . split ( args [ 0 ] , posix = not WIN32 ) if sudo is True : if not WIN32 : if shell : args = 'sudo ' + args else : args = tuple ( [ 'sudo' ] ) + tuple ( args ) else : pass if WIN32 : if len ( args ) == 1 and isinstance ( args [ 0 ] , six . string_types ) : args = shlex . split ( args [ 0 ] , posix = not WIN32 ) return args
When shell is True Popen will only accept strings . No tuples Shell really should not be true .
9,060
def cmd2 ( command , shell = False , detatch = False , verbose = False , verbout = None ) : import shlex if isinstance ( command , ( list , tuple ) ) : raise ValueError ( 'command tuple not supported yet' ) args = shlex . split ( command , posix = not WIN32 ) if verbose is True : verbose = 2 if verbout is None : verbout = verbose >= 1 if verbose >= 2 : print ( '+=== START CMD2 ===' ) print ( 'Command:' ) print ( command ) if verbout : print ( '----' ) print ( 'Stdout:' ) proc = subprocess . Popen ( args , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , shell = shell , universal_newlines = True ) if detatch : info = { 'proc' : proc } else : write_fn = sys . stdout . write flush_fn = sys . stdout . flush logged_out = [ ] for line in _run_process ( proc ) : line_ = line if six . PY2 else line if len ( line_ ) > 0 : if verbout : write_fn ( line_ ) flush_fn ( ) logged_out . append ( line ) try : from utool import util_str out = '' . join ( logged_out ) except UnicodeDecodeError : from utool import util_str logged_out = util_str . ensure_unicode_strlist ( logged_out ) out = '' . join ( logged_out ) ( out_ , err ) = proc . communicate ( ) ret = proc . wait ( ) info = { 'out' : out , 'err' : err , 'ret' : ret , } if verbose >= 2 : print ( 'L END CMD2 ' ) return info
Trying to clean up cmd
9,061
def search_env_paths ( fname , key_list = None , verbose = None ) : r import utool as ut if key_list is None : key_list = [ key for key in os . environ if key . find ( 'PATH' ) > - 1 ] print ( 'key_list = %r' % ( key_list , ) ) found = ut . ddict ( list ) for key in key_list : dpath_list = os . environ [ key ] . split ( os . pathsep ) for dpath in dpath_list : matches = ut . glob ( dpath , fname ) found [ key ] . extend ( matches ) return dict ( found )
r Searches your PATH to see if fname exists
9,062
def change_term_title ( title ) : if True : return if not WIN32 : if title : cmd_str = r + title + os . system ( cmd_str )
only works on unix systems only tested on Ubuntu GNOME changes text on terminal title for identifying debugging tasks .
9,063
def unload_module ( modname ) : import sys import gc if modname in sys . modules : referrer_list = gc . get_referrers ( sys . modules [ modname ] ) for referer in referrer_list : if referer is not sys . modules : referer [ modname ] = None refcount = sys . getrefcount ( sys . modules [ modname ] ) print ( '%s refcount=%r' % ( modname , refcount ) ) del sys . modules [ modname ]
WARNING POTENTIALLY DANGEROUS AND MAY NOT WORK
9,064
def base_add_isoquant_data ( features , quantfeatures , acc_col , quantacc_col , quantfields ) : quant_map = get_quantmap ( quantfeatures , quantacc_col , quantfields ) for feature in features : feat_acc = feature [ acc_col ] outfeat = { k : v for k , v in feature . items ( ) } try : outfeat . update ( quant_map [ feat_acc ] ) except KeyError : outfeat . update ( { field : 'NA' for field in quantfields } ) yield outfeat
Generic function that takes a peptide or protein table and adds quant data from ANOTHER such table .
9,065
def get_quantmap ( features , acc_col , quantfields ) : qmap = { } for feature in features : feat_acc = feature . pop ( acc_col ) qmap [ feat_acc ] = { qf : feature [ qf ] for qf in quantfields } return qmap
Runs through proteins that are in a quanted protein table extracts and maps their information based on the quantfields list input . Map is a dict with protein_accessions as keys .
9,066
def partition_varied_cfg_list ( cfg_list , default_cfg = None , recursive = False ) : r import utool as ut if default_cfg is None : nonvaried_cfg = reduce ( ut . dict_intersection , cfg_list ) else : nonvaried_cfg = reduce ( ut . dict_intersection , [ default_cfg ] + cfg_list ) nonvaried_keys = list ( nonvaried_cfg . keys ( ) ) varied_cfg_list = [ ut . delete_dict_keys ( cfg . copy ( ) , nonvaried_keys ) for cfg in cfg_list ] if recursive : varied_keys = list ( set ( [ key for cfg in varied_cfg_list for key in cfg ] ) ) varied_vals_list = [ [ cfg [ key ] for cfg in varied_cfg_list if key in cfg ] for key in varied_keys ] for key , varied_vals in zip ( varied_keys , varied_vals_list ) : if len ( varied_vals ) == len ( cfg_list ) : if all ( [ isinstance ( val , dict ) for val in varied_vals ] ) : nonvaried_subdict , varied_subdicts = partition_varied_cfg_list ( varied_vals , recursive = recursive ) nonvaried_cfg [ key ] = nonvaried_subdict for cfg , subdict in zip ( varied_cfg_list , varied_subdicts ) : cfg [ key ] = subdict return nonvaried_cfg , varied_cfg_list
r Separates varied from non - varied parameters in a list of configs
9,067
def get_cfg_lbl ( cfg , name = None , nonlbl_keys = INTERNAL_CFGKEYS , key_order = None , with_name = True , default_cfg = None , sep = '' ) : r import utool as ut if name is None : name = cfg . get ( '_cfgname' , '' ) if default_cfg is not None : cfg = ut . partition_varied_cfg_list ( [ cfg ] , default_cfg ) [ 1 ] [ 0 ] _clean_cfg = ut . delete_keys ( cfg . copy ( ) , nonlbl_keys ) _lbl = ut . repr4 ( _clean_cfg , explicit = True , nl = False , strvals = True , key_order = key_order , itemsep = sep ) _search = [ 'dict(' , ')' ] _repl = [ '' ] * len ( _search ) _lbl = ut . multi_replace ( _lbl , _search , _repl ) . rstrip ( ',' ) if not with_name : return _lbl if NAMEVARSEP in name : hacked_name , _cfgstr , _ = parse_cfgstr_name_options ( name ) _cfgstr_options_list = re . split ( r',\s*' + ut . negative_lookahead ( r'[^\[\]]*\]' ) , _cfgstr ) _cfg_options = ut . parse_cfgstr_list ( _cfgstr_options_list , smartcast = False , oldmode = False ) ut . delete_keys ( _cfg_options , cfg . keys ( ) ) _preflbl = ut . repr4 ( _cfg_options , explicit = True , nl = False , strvals = True ) _preflbl = ut . multi_replace ( _preflbl , _search , _repl ) . rstrip ( ',' ) hacked_name += NAMEVARSEP + _preflbl cfg_lbl = hacked_name + _lbl else : cfg_lbl = name + NAMEVARSEP + _lbl return cfg_lbl
r Formats a flat configuration dict into a short string label . This is useful for re - creating command line strings .
9,068
def parse_cfgstr_list2 ( cfgstr_list , named_defaults_dict = None , cfgtype = None , alias_keys = None , valid_keys = None , expand_nested = True , strict = True , special_join_dict = None , is_nestedcfgtype = False , metadata = None ) : r import utool as ut cfg_combos_list = [ ] cfgstr_list_ = [ ] dyndef_named_defaults = { } for cfgstr in cfgstr_list : if cfgstr . find ( '=:' ) > - 1 : cfgname , cfgopt_strs , subx = parse_cfgstr_name_options ( cfgstr ) assert cfgname . endswith ( '=' ) cfgname = cfgname [ : - 1 ] base_cfg_list = lookup_base_cfg_list ( cfgname , named_defaults_dict , metadata = metadata ) cfg_options = noexpand_parse_cfgstrs ( cfgopt_strs ) dyndef_named_defaults [ cfgname ] = cfg_options else : cfgstr_list_ . append ( cfgstr ) if len ( dyndef_named_defaults ) > 0 and named_defaults_dict is None : named_defaults_dict = dyndef_named_defaults for cfgstr in cfgstr_list_ : cfg_combos = [ ] if cfgstr . find ( '::' ) > - 1 : special_cfgstr_list = cfgstr . split ( '::' ) special_combo_list = parse_cfgstr_list2 ( special_cfgstr_list , named_defaults_dict = named_defaults_dict , cfgtype = cfgtype , alias_keys = alias_keys , valid_keys = valid_keys , strict = strict , expand_nested = expand_nested , is_nestedcfgtype = False , metadata = metadata ) if special_join_dict is not None : for special_combo in special_combo_list : for cfg in special_combo : cfg . update ( special_join_dict ) if is_nestedcfgtype : cfg_combo = tuple ( [ combo for combo in special_combo_list ] ) else : cfg_combo = special_combo_list if expand_nested : cfg_combos . extend ( cfg_combo ) else : cfg_combos_list . append ( cfg_combo ) else : cfgname , cfgopt_strs , subx = parse_cfgstr_name_options ( cfgstr ) try : base_cfg_list = lookup_base_cfg_list ( cfgname , named_defaults_dict , metadata = metadata ) except Exception as ex : ut . printex ( ex , keys = [ 'cfgstr_list' , 'cfgstr_list_' ] ) raise for base_cfg in base_cfg_list : print ( 'cfgname = %r' % ( cfgname , ) ) print ( 'cfgopt_strs = %r' % ( cfgopt_strs , ) ) print ( 'base_cfg = %r' % ( base_cfg , ) ) print ( 'alias_keys = %r' % ( alias_keys , ) ) print ( 'cfgtype = %r' % ( cfgtype , ) ) print ( 'offset = %r' % ( len ( cfg_combos ) , ) ) print ( 'valid_keys = %r' % ( valid_keys , ) ) print ( 'strict = %r' % ( strict , ) ) cfg_combo = customize_base_cfg ( cfgname , cfgopt_strs , base_cfg , cfgtype , alias_keys , valid_keys , strict = strict , offset = len ( cfg_combos ) ) if is_nestedcfgtype : cfg_combo = [ cfg_combo ] if expand_nested : cfg_combos . extend ( cfg_combo ) else : cfg_combos_list . append ( cfg_combo ) if expand_nested : cfg_combos_list . append ( cfg_combos ) return cfg_combos_list
r Parses config strings . By looking up name in a dict of configs
9,069
def grid_search_generator ( grid_basis = [ ] , * args , ** kwargs ) : r grid_basis_ = grid_basis + list ( args ) + list ( kwargs . items ( ) ) grid_basis_dict = OrderedDict ( grid_basis_ ) grid_point_iter = util_dict . iter_all_dict_combinations_ordered ( grid_basis_dict ) for grid_point in grid_point_iter : yield grid_point
r Iteratively yeilds individual configuration points inside a defined basis .
9,070
def get_cfgdict_list_subset ( cfgdict_list , keys ) : r import utool as ut cfgdict_sublist_ = [ ut . dict_subset ( cfgdict , keys ) for cfgdict in cfgdict_list ] cfgtups_sublist_ = [ tuple ( ut . dict_to_keyvals ( cfgdict ) ) for cfgdict in cfgdict_sublist_ ] cfgtups_sublist = ut . unique_ordered ( cfgtups_sublist_ ) cfgdict_sublist = list ( map ( dict , cfgtups_sublist ) ) return cfgdict_sublist
r returns list of unique dictionaries only with keys specified in keys
9,071
def constrain_cfgdict_list ( cfgdict_list_ , constraint_func ) : cfgdict_list = [ ] for cfg_ in cfgdict_list_ : cfg = cfg_ . copy ( ) if constraint_func ( cfg ) is not False and len ( cfg ) > 0 : if cfg not in cfgdict_list : cfgdict_list . append ( cfg ) return cfgdict_list
constrains configurations and removes duplicates
9,072
def make_cfglbls ( cfgdict_list , varied_dict ) : import textwrap wrapper = textwrap . TextWrapper ( width = 50 ) cfglbl_list = [ ] for cfgdict_ in cfgdict_list : cfgdict = cfgdict_ . copy ( ) for key in six . iterkeys ( cfgdict_ ) : try : vals = varied_dict [ key ] if len ( vals ) == 1 : del cfgdict [ key ] else : if cfgdict [ key ] is None : del cfgdict [ key ] except KeyError : del cfgdict [ key ] cfglbl = six . text_type ( cfgdict ) search_repl_list = [ ( '\'' , '' ) , ( '}' , '' ) , ( '{' , '' ) , ( ': ' , '=' ) ] for search , repl in search_repl_list : cfglbl = cfglbl . replace ( search , repl ) cfglbl = ( '\n' . join ( wrapper . wrap ( cfglbl ) ) ) cfglbl_list . append ( cfglbl ) return cfglbl_list
Show only the text in labels that mater from the cfgdict
9,073
def gridsearch_timer ( func_list , args_list , niters = None , ** searchkw ) : import utool as ut timings = ut . ddict ( list ) if niters is None : niters = len ( args_list ) if ut . is_funclike ( args_list ) : get_args = args_list else : get_args = args_list . __getitem__ func_labels = searchkw . get ( 'func_labels' , [ ut . get_funcname ( func ) for func in func_list ] ) use_cache = searchkw . get ( 'use_cache' , not ut . get_argflag ( ( '--nocache' , '--nocache-time' ) ) ) assert_eq = searchkw . get ( 'assert_eq' , True ) count_list = list ( range ( niters ) ) xlabel_list = [ ] cache = ut . ShelfCacher ( 'timeings.shelf' , enabled = use_cache ) for count in ut . ProgressIter ( count_list , lbl = 'Testing Timeings' ) : args_ = get_args ( count ) xlabel_list . append ( args_ ) if True : if not isinstance ( args_ , tuple ) : args_ = ( args_ , ) assert isinstance ( args_ , tuple ) , 'args_ should be a tuple so it can be unpacked' ret_list = [ ] for func_ in func_list : try : kwargs_ = { } func_cachekey = ut . get_func_result_cachekey ( func_ , args_ , kwargs_ ) ellapsed = cache . load ( func_cachekey ) except ut . CacheMissException : with ut . Timer ( verbose = False ) as t : ret = func_ ( * args_ ) ret_list . append ( ret ) ellapsed = t . ellapsed cache . save ( func_cachekey , ellapsed ) timings [ func_ ] . append ( ellapsed ) if assert_eq : ut . assert_all_eq ( list ( map ( ut . cachestr_repr , ret_list ) ) ) cache . close ( ) count_to_xtick = searchkw . get ( 'count_to_xtick' , lambda x , y : x ) xtick_list = [ count_to_xtick ( count , get_args ( count ) ) for count in count_list ] def plot_timings ( ) : import plottool as pt ydata_list = ut . dict_take ( timings , func_list ) xdata = xtick_list ylabel = 'seconds' xlabel = 'input size' pt . multi_plot ( xdata , ydata_list , label_list = func_labels , ylabel = ylabel , xlabel = xlabel , ** searchkw ) time_result = { 'plot_timings' : plot_timings , 'timings' : timings , } return time_result
Times a series of functions on a series of inputs
9,074
def get_mapping ( version = 1 , exported_at = None , app_name = None ) : if exported_at is None : exported_at = timezone . now ( ) app_name = app_name or settings . HEROKU_CONNECT_APP_NAME return { 'version' : version , 'connection' : { 'organization_id' : settings . HEROKU_CONNECT_ORGANIZATION_ID , 'app_name' : app_name , 'exported_at' : exported_at . isoformat ( ) , } , 'mappings' : [ model . get_heroku_connect_mapping ( ) for model in get_heroku_connect_models ( ) ] }
Return Heroku Connect mapping for the entire project .
9,075
def get_heroku_connect_models ( ) : from django . apps import apps apps . check_models_ready ( ) from heroku_connect . db . models import HerokuConnectModel return ( model for models in apps . all_models . values ( ) for model in models . values ( ) if issubclass ( model , HerokuConnectModel ) and not model . _meta . managed )
Return all registered Heroku Connect Models .
9,076
def create_heroku_connect_schema ( using = DEFAULT_DB_ALIAS ) : connection = connections [ using ] with connection . cursor ( ) as cursor : cursor . execute ( _SCHEMA_EXISTS_QUERY , [ settings . HEROKU_CONNECT_SCHEMA ] ) schema_exists = cursor . fetchone ( ) [ 0 ] if schema_exists : return False cursor . execute ( "CREATE SCHEMA %s;" , [ AsIs ( settings . HEROKU_CONNECT_SCHEMA ) ] ) with connection . schema_editor ( ) as editor : for model in get_heroku_connect_models ( ) : editor . create_model ( model ) editor . execute ( 'CREATE EXTENSION IF NOT EXISTS "hstore";' ) from heroku_connect . models import ( TriggerLog , TriggerLogArchive ) for cls in [ TriggerLog , TriggerLogArchive ] : editor . create_model ( cls ) return True
Create Heroku Connect schema .
9,077
def get_connections ( app ) : payload = { 'app' : app } url = os . path . join ( settings . HEROKU_CONNECT_API_ENDPOINT , 'connections' ) response = requests . get ( url , params = payload , headers = _get_authorization_headers ( ) ) response . raise_for_status ( ) return response . json ( ) [ 'results' ]
Return all Heroku Connect connections setup with the given application .
9,078
def get_connection ( connection_id , deep = False ) : url = os . path . join ( settings . HEROKU_CONNECT_API_ENDPOINT , 'connections' , connection_id ) payload = { 'deep' : deep } response = requests . get ( url , params = payload , headers = _get_authorization_headers ( ) ) response . raise_for_status ( ) return response . json ( )
Get Heroku Connection connection information .
9,079
def import_mapping ( connection_id , mapping ) : url = os . path . join ( settings . HEROKU_CONNECT_API_ENDPOINT , 'connections' , connection_id , 'actions' , 'import' ) response = requests . post ( url = url , json = mapping , headers = _get_authorization_headers ( ) ) response . raise_for_status ( )
Import Heroku Connection mapping for given connection .
9,080
def link_connection_to_account ( app ) : url = os . path . join ( settings . HEROKU_CONNECT_API_ENDPOINT , 'users' , 'me' , 'apps' , app , 'auth' ) response = requests . post ( url = url , headers = _get_authorization_headers ( ) ) response . raise_for_status ( )
Link the connection to your Heroku user account .
9,081
def fetch_cvparams_values_from_subel ( base , subelname , paramnames , ns ) : sub_el = basereader . find_element_xpath ( base , subelname , ns ) cvparams = get_all_cvparams ( sub_el , ns ) output = [ ] for param in paramnames : output . append ( fetch_cvparam_value_by_name ( cvparams , param ) ) return output
Searches a base element for subelement by name then takes the cvParams of that subelement and returns the values as a list for the paramnames that match . Value order in list equals input paramnames order .
9,082
def create_tables ( self , tables ) : cursor = self . get_cursor ( ) for table in tables : columns = mslookup_tables [ table ] try : cursor . execute ( 'CREATE TABLE {0}({1})' . format ( table , ', ' . join ( columns ) ) ) except sqlite3 . OperationalError as error : print ( error ) print ( 'Warning: Table {} already exists in database, will ' 'add to existing tables instead of creating ' 'new.' . format ( table ) ) else : self . conn . commit ( )
Creates database tables in sqlite lookup db
9,083
def connect ( self , fn ) : self . conn = sqlite3 . connect ( fn ) cur = self . get_cursor ( ) cur . execute ( 'PRAGMA page_size=4096' ) cur . execute ( 'PRAGMA FOREIGN_KEYS=ON' ) cur . execute ( 'PRAGMA cache_size=10000' ) cur . execute ( 'PRAGMA journal_mode=MEMORY' )
SQLite connect method initialize db
9,084
def index_column ( self , index_name , table , column ) : cursor = self . get_cursor ( ) try : cursor . execute ( 'CREATE INDEX {0} on {1}({2})' . format ( index_name , table , column ) ) except sqlite3 . OperationalError as error : print ( error ) print ( 'Skipping index creation and assuming it exists already' ) else : self . conn . commit ( )
Called by interfaces to index specific column in table
9,085
def get_sql_select ( self , columns , table , distinct = False ) : sql = 'SELECT {0} {1} FROM {2}' dist = { True : 'DISTINCT' , False : '' } [ distinct ] return sql . format ( dist , ', ' . join ( columns ) , table )
Creates and returns an SQL SELECT statement
9,086
def store_many ( self , sql , values ) : cursor = self . get_cursor ( ) cursor . executemany ( sql , values ) self . conn . commit ( )
Abstraction over executemany method
9,087
def execute_sql ( self , sql ) : cursor = self . get_cursor ( ) cursor . execute ( sql ) return cursor
Executes SQL and returns cursor for it
9,088
def get_mzmlfile_map ( self ) : cursor = self . get_cursor ( ) cursor . execute ( 'SELECT mzmlfile_id, mzmlfilename FROM mzmlfiles' ) return { fn : fnid for fnid , fn in cursor . fetchall ( ) }
Returns dict of mzmlfilenames and their db ids
9,089
def get_spectra_id ( self , fn_id , retention_time = None , scan_nr = None ) : cursor = self . get_cursor ( ) sql = 'SELECT spectra_id FROM mzml WHERE mzmlfile_id=? ' values = [ fn_id ] if retention_time is not None : sql = '{0} AND retention_time=?' . format ( sql ) values . append ( retention_time ) if scan_nr is not None : sql = '{0} AND scan_nr=?' . format ( sql ) values . append ( scan_nr ) cursor . execute ( sql , tuple ( values ) ) return cursor . fetchone ( ) [ 0 ]
Returns spectra id for spectra filename and retention time
9,090
def to_string_monkey ( df , highlight_cols = None , latex = False ) : try : import pandas as pd import utool as ut import numpy as np import six if isinstance ( highlight_cols , six . string_types ) and highlight_cols == 'all' : highlight_cols = np . arange ( len ( df . columns ) ) try : self = pd . formats . format . DataFrameFormatter ( df ) except AttributeError : self = pd . io . formats . format . DataFrameFormatter ( df ) self . highlight_cols = highlight_cols def monkey ( self ) : return monkey_to_str_columns ( self , latex = latex ) ut . inject_func_as_method ( self , monkey , '_to_str_columns' , override = True , force = True ) def strip_ansi ( text ) : import re ansi_escape = re . compile ( r'\x1b[^m]*m' ) return ansi_escape . sub ( '' , text ) def justify_ansi ( self , texts , max_len , mode = 'right' ) : if mode == 'left' : return [ x . ljust ( max_len + ( len ( x ) - len ( strip_ansi ( x ) ) ) ) for x in texts ] elif mode == 'center' : return [ x . center ( max_len + ( len ( x ) - len ( strip_ansi ( x ) ) ) ) for x in texts ] else : return [ x . rjust ( max_len + ( len ( x ) - len ( strip_ansi ( x ) ) ) ) for x in texts ] ut . inject_func_as_method ( self . adj , justify_ansi , 'justify' , override = True , force = True ) def strlen_ansii ( self , text ) : return pd . compat . strlen ( strip_ansi ( text ) , encoding = self . encoding ) ut . inject_func_as_method ( self . adj , strlen_ansii , 'len' , override = True , force = True ) if False : strlen = ut . partial ( strlen_ansii , self . adj ) justfunc = ut . partial ( justify_ansi , self . adj ) strcols = monkey_to_str_columns ( self ) space = 1 lists = strcols str_ = self . adj . adjoin ( space , * lists ) print ( str_ ) print ( strip_ansi ( str_ ) ) self . to_string ( ) result = self . buf . getvalue ( ) result = '\n' . join ( [ x . rstrip ( ) for x in result . split ( '\n' ) ] ) return result except Exception as ex : ut . printex ( 'pandas monkey-patch is broken: {}' . format ( str ( ex ) ) , tb = True , iswarning = True ) return str ( df )
monkey patch to pandas to highlight the maximum value in specified cols of a row
9,091
def translate ( value ) : if isinstance ( value , BaseValidator ) : return value if value is None : return Anything ( ) if isinstance ( value , type ) : return IsA ( value ) if type ( value ) in compat . func_types : real_value = value ( ) return IsA ( type ( real_value ) , default = real_value ) if isinstance ( value , list ) : if value == [ ] : return IsA ( list ) elif len ( value ) == 1 : return ListOf ( translate ( value [ 0 ] ) ) else : raise StructureSpecificationError ( 'Expected a list containing exactly 1 item; ' 'got {cnt}: {spec}' . format ( cnt = len ( value ) , spec = value ) ) if isinstance ( value , dict ) : if not value : return IsA ( dict ) items = [ ] for k , v in value . items ( ) : if isinstance ( k , BaseValidator ) : k_validator = k else : k_validator = translate ( k ) default = k_validator . get_default_for ( None ) if default is not None : k_validator = Equals ( default ) v_validator = translate ( v ) items . append ( ( k_validator , v_validator ) ) return DictOf ( items ) return IsA ( type ( value ) , default = value )
Translates given schema from pythonic syntax to a validator .
9,092
def _merge ( self , value ) : if value is not None and not isinstance ( value , dict ) : return value if not self . _pairs : return { } collected = { } for k_validator , v_validator in self . _pairs : k_default = k_validator . get_default_for ( None ) if k_default is None : continue if value : v_for_this_k = value . get ( k_default ) else : v_for_this_k = None v_default = v_validator . get_default_for ( v_for_this_k ) collected . update ( { k_default : v_default } ) if value : for k , v in value . items ( ) : if k not in collected : collected [ k ] = v return collected
Returns a dictionary based on value with each value recursively merged with spec .
9,093
def handle_code ( code ) : "Handle a key or sequence of keys in braces" code_keys = [ ] if code in CODES : code_keys . append ( VirtualKeyAction ( CODES [ code ] ) ) elif len ( code ) == 1 : code_keys . append ( KeyAction ( code ) ) elif ' ' in code : to_repeat , count = code . rsplit ( None , 1 ) if to_repeat == "PAUSE" : try : pause_time = float ( count ) except ValueError : raise KeySequenceError ( 'invalid pause time %s' % count ) code_keys . append ( PauseAction ( pause_time ) ) else : try : count = int ( count ) except ValueError : raise KeySequenceError ( 'invalid repetition count %s' % count ) if to_repeat in CODES : code_keys . extend ( [ VirtualKeyAction ( CODES [ to_repeat ] ) ] * count ) else : to_repeat = parse_keys ( to_repeat ) if isinstance ( to_repeat , list ) : keys = to_repeat * count else : keys = [ to_repeat ] * count code_keys . extend ( keys ) else : raise RuntimeError ( "Unknown code: %s" % code ) return code_keys
Handle a key or sequence of keys in braces
9,094
def parse_keys ( string , with_spaces = False , with_tabs = False , with_newlines = False , modifiers = None ) : "Return the parsed keys" keys = [ ] if not modifiers : modifiers = [ ] index = 0 while index < len ( string ) : c = string [ index ] index += 1 if c in MODIFIERS . keys ( ) : modifier = MODIFIERS [ c ] modifiers . append ( modifier ) keys . append ( VirtualKeyAction ( modifier , up = False ) ) if DEBUG : print ( "MODS+" , modifiers ) continue elif c == "(" : end_pos = string . find ( ")" , index ) if end_pos == - 1 : raise KeySequenceError ( '`)` not found' ) keys . extend ( parse_keys ( string [ index : end_pos ] , modifiers = modifiers ) ) index = end_pos + 1 elif c == "{" : end_pos = string . find ( "}" , index + 1 ) if end_pos == - 1 : raise KeySequenceError ( '`}` not found' ) code = string [ index : end_pos ] index = end_pos + 1 keys . extend ( handle_code ( code ) ) elif c == ')' : raise KeySequenceError ( '`)` should be preceeded by `(`' ) elif c == '}' : raise KeySequenceError ( '`}` should be preceeded by `{`' ) else : if ( c == ' ' and not with_spaces or c == '\t' and not with_tabs or c == '\n' and not with_newlines ) : continue if c in ( '~' , '\n' ) : keys . append ( VirtualKeyAction ( CODES [ "ENTER" ] ) ) elif modifiers : keys . append ( EscapedKeyAction ( c ) ) else : keys . append ( KeyAction ( c ) ) while modifiers : if DEBUG : print ( "MODS-" , modifiers ) keys . append ( VirtualKeyAction ( modifiers . pop ( ) , down = False ) ) while modifiers : keys . append ( VirtualKeyAction ( modifiers . pop ( ) , down = False ) ) return keys
Return the parsed keys
9,095
def SendKeys ( keys , pause = 0.05 , with_spaces = False , with_tabs = False , with_newlines = False , turn_off_numlock = True ) : "Parse the keys and type them" keys = parse_keys ( keys , with_spaces , with_tabs , with_newlines ) for k in keys : k . Run ( ) time . sleep ( pause )
Parse the keys and type them
9,096
def main ( ) : "Send some test strings" actions = SendKeys ( actions , pause = .1 ) keys = parse_keys ( actions ) for k in keys : print ( k ) k . Run ( ) time . sleep ( .1 ) test_strings = [ "\n" "(aa)some text\n" , "(a)some{ }text\n" , "(b)some{{}text\n" , "(c)some{+}text\n" , "(d)so%me{ab 4}text" , "(e)so%me{LEFT 4}text" , "(f)so%me{ENTER 4}text" , "(g)so%me{^aa 4}text" , "(h)some +(asdf)text" , "(i)some %^+(asdf)text" , "(j)some %^+a text+" , "(k)some %^+a tex+{&}" , "(l)some %^+a tex+(dsf)" , "" , ] for s in test_strings : print ( repr ( s ) ) keys = parse_keys ( s , with_newlines = True ) print ( keys ) for k in keys : k . Run ( ) time . sleep ( .1 ) print ( )
Send some test strings
9,097
def GetInput ( self ) : "Build the INPUT structure for the action" actions = 1 if self . up and self . down : actions = 2 inputs = ( INPUT * actions ) ( ) vk , scan , flags = self . _get_key_info ( ) for inp in inputs : inp . type = INPUT_KEYBOARD inp . _ . ki . wVk = vk inp . _ . ki . wScan = scan inp . _ . ki . dwFlags |= flags if self . up : inputs [ - 1 ] . _ . ki . dwFlags |= KEYEVENTF_KEYUP return inputs
Build the INPUT structure for the action
9,098
def Run ( self ) : "Execute the action" inputs = self . GetInput ( ) return SendInput ( len ( inputs ) , ctypes . byref ( inputs ) , ctypes . sizeof ( INPUT ) )
Execute the action
9,099
def _get_down_up_string ( self ) : down_up = "" if not ( self . down and self . up ) : if self . down : down_up = "down" elif self . up : down_up = "up" return down_up
Return a string that will show whether the string is up or down