idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
9,600 | def ensure_newline ( self ) : DECTCEM_SHOW = '\033[?25h' AT_END = DECTCEM_SHOW + '\n' if not self . _cursor_at_newline : self . write ( AT_END ) self . _cursor_at_newline = True | use before any custom printing when using the progress iter to ensure your print statement starts on a new line instead of at the end of a progress line |
9,601 | def _get_timethresh_heuristics ( self ) : if self . length > 1E5 : time_thresh = 2.5 elif self . length > 1E4 : time_thresh = 2.0 elif self . length > 1E3 : time_thresh = 1.0 else : time_thresh = 0.5 return time_thresh | resonably decent hueristics for how much time to wait before updating progress . |
9,602 | def load_code ( name , base_path = None , recurse = False ) : if '/' in name : return load_location ( name , base_path , module = False ) return importer . import_code ( name , base_path , recurse = recurse ) | Load executable code from a URL or a path |
9,603 | def load ( name , base_path = None ) : if '/' in name : return load_location ( name , base_path , module = True ) return importer . import_symbol ( name , base_path ) | Load a module from a URL or a path |
9,604 | def extend ( path = None , cache = None ) : if path is None : path = config . PATH try : path = path . split ( ':' ) except : pass sys . path . extend ( [ library . to_path ( p , cache ) for p in path ] ) | Extend sys . path by a list of git paths . |
9,605 | def extender ( path = None , cache = None ) : old_path = sys . path [ : ] extend ( path , cache = None ) try : yield finally : sys . path = old_path | A context that temporarily extends sys . path and reverts it after the context is complete . |
9,606 | def add ( self , child ) : if isinstance ( child , Case ) : self . add_case ( child ) else : raise ModelError ( 'Unsupported child element' ) | Adds a typed child object to the conditional derived variable . |
9,607 | def add ( self , child ) : if isinstance ( child , Action ) : self . add_action ( child ) else : raise ModelError ( 'Unsupported child element' ) | Adds a typed child object to the event handler . |
9,608 | def add ( self , child ) : if isinstance ( child , StateVariable ) : self . add_state_variable ( child ) elif isinstance ( child , DerivedVariable ) : self . add_derived_variable ( child ) elif isinstance ( child , ConditionalDerivedVariable ) : self . add_conditional_derived_variable ( child ) elif isinstance ( child , TimeDerivative ) : self . add_time_derivative ( child ) elif isinstance ( child , EventHandler ) : self . add_event_handler ( child ) elif isinstance ( child , KineticScheme ) : self . add_kinetic_scheme ( child ) else : raise ModelError ( 'Unsupported child element' ) | Adds a typed child object to the behavioral object . |
9,609 | def add ( self , child ) : if isinstance ( child , Regime ) : self . add_regime ( child ) else : Behavioral . add ( self , child ) | Adds a typed child object to the dynamics object . |
9,610 | def create_bioset_lookup ( lookupdb , spectrafns , set_names ) : unique_setnames = set ( set_names ) lookupdb . store_biosets ( ( ( x , ) for x in unique_setnames ) ) set_id_map = lookupdb . get_setnames ( ) mzmlfiles = ( ( os . path . basename ( fn ) , set_id_map [ setname ] ) for fn , setname in zip ( spectrafns , set_names ) ) lookupdb . store_mzmlfiles ( mzmlfiles ) lookupdb . index_biosets ( ) | Fills lookup database with biological set names |
9,611 | def get_modpath_from_modname ( modname , prefer_pkg = False , prefer_main = False ) : from os . path import dirname , basename , join , exists initname = '__init__.py' mainname = '__main__.py' if modname in sys . modules : modpath = sys . modules [ modname ] . __file__ . replace ( '.pyc' , '.py' ) else : import pkgutil loader = pkgutil . find_loader ( modname ) modpath = loader . filename . replace ( '.pyc' , '.py' ) if '.' not in basename ( modpath ) : modpath = join ( modpath , initname ) if prefer_pkg : if modpath . endswith ( initname ) or modpath . endswith ( mainname ) : modpath = dirname ( modpath ) if prefer_main : if modpath . endswith ( initname ) : main_modpath = modpath [ : - len ( initname ) ] + mainname if exists ( main_modpath ) : modpath = main_modpath return modpath | Same as get_modpath but doesnt import directly |
9,612 | def check_module_installed ( modname ) : import pkgutil if '.' in modname : parts = modname . split ( '.' ) base = parts [ 0 ] submods = parts [ 1 : ] loader = pkgutil . find_loader ( base ) if loader is not None : submods return True loader = pkgutil . find_loader ( modname ) is_installed = loader is not None return is_installed | Check if a python module is installed without attempting to import it . Note that if modname indicates a child module the parent module is always loaded . |
9,613 | def import_module_from_fpath ( module_fpath ) : r from os . path import basename , splitext , isdir , join , exists , dirname , split import platform if isdir ( module_fpath ) : module_fpath = join ( module_fpath , '__init__.py' ) print ( 'module_fpath = {!r}' . format ( module_fpath ) ) if not exists ( module_fpath ) : raise ImportError ( 'module_fpath={!r} does not exist' . format ( module_fpath ) ) python_version = platform . python_version ( ) modname = splitext ( basename ( module_fpath ) ) [ 0 ] if modname == '__init__' : modname = split ( dirname ( module_fpath ) ) [ 1 ] if util_inject . PRINT_INJECT_ORDER : if modname not in sys . argv : util_inject . noinject ( modname , N = 2 , via = 'ut.import_module_from_fpath' ) if python_version . startswith ( '2.7' ) : import imp module = imp . load_source ( modname , module_fpath ) elif python_version . startswith ( '3' ) : import importlib . machinery loader = importlib . machinery . SourceFileLoader ( modname , module_fpath ) module = loader . load_module ( ) else : raise AssertionError ( 'invalid python version={!r}' . format ( python_version ) ) return module | r imports module from a file path |
9,614 | def print_locals ( * args , ** kwargs ) : from utool import util_str from utool import util_dbg from utool import util_dict locals_ = util_dbg . get_parent_frame ( ) . f_locals keys = kwargs . get ( 'keys' , None if len ( args ) == 0 else [ ] ) to_print = { } for arg in args : varname = util_dbg . get_varname_from_locals ( arg , locals_ ) to_print [ varname ] = arg if keys is not None : to_print . update ( util_dict . dict_take ( locals_ , keys ) ) if not to_print : to_print = locals_ locals_str = util_str . repr4 ( to_print ) print ( locals_str ) | Prints local variables in function . |
9,615 | def _extract_archive ( archive_fpath , archive_file , archive_namelist , output_dir , force_commonprefix = True , prefix = None , dryrun = False , verbose = not QUIET , overwrite = None ) : if prefix is not None : output_dir = join ( output_dir , prefix ) util_path . ensurepath ( output_dir ) archive_basename , ext = split_archive_ext ( basename ( archive_fpath ) ) if force_commonprefix and commonprefix ( archive_namelist ) == '' : output_dir = join ( output_dir , archive_basename ) util_path . ensurepath ( output_dir ) for member in archive_namelist : ( dname , fname ) = split ( member ) dpath = join ( output_dir , dname ) util_path . ensurepath ( dpath ) if verbose : print ( '[utool] Unarchive ' + fname + ' in ' + dpath ) if not dryrun : if overwrite is False : if exists ( join ( output_dir , member ) ) : continue archive_file . extract ( member , path = output_dir ) return output_dir | archive_fpath = zip_fpath archive_file = zip_file |
9,616 | def open_url_in_browser ( url , browsername = None , fallback = False ) : r import webbrowser print ( '[utool] Opening url=%r in browser' % ( url , ) ) if browsername is None : browser = webbrowser . open ( url ) else : browser = get_prefered_browser ( pref_list = [ browsername ] , fallback = fallback ) return browser . open ( url ) | r Opens a url in the specified or default browser |
9,617 | def url_read ( url , verbose = True ) : r if url . find ( '://' ) == - 1 : url = 'http://' + url if verbose : print ( 'Reading data from url=%r' % ( url , ) ) try : file_ = _urllib . request . urlopen ( url ) except IOError : raise data = file_ . read ( ) file_ . close ( ) return data | r Directly reads data from url |
9,618 | def url_read_text ( url , verbose = True ) : r data = url_read ( url , verbose ) text = data . decode ( 'utf8' ) return text | r Directly reads text data from url |
9,619 | def clean_dropbox_link ( dropbox_url ) : cleaned_url = dropbox_url . replace ( 'www.dropbox' , 'dl.dropbox' ) postfix_list = [ '?dl=0' ] for postfix in postfix_list : if cleaned_url . endswith ( postfix ) : cleaned_url = cleaned_url [ : - 1 * len ( postfix ) ] return cleaned_url | Dropbox links should be en - mass downloaed from dl . dropbox |
9,620 | def grab_selenium_chromedriver ( redownload = False ) : r import utool as ut import os import stat chromedriver_dpath = ut . ensuredir ( ut . truepath ( '~/bin' ) ) chromedriver_fpath = join ( chromedriver_dpath , 'chromedriver' ) if not ut . checkpath ( chromedriver_fpath ) or redownload : assert chromedriver_dpath in os . environ [ 'PATH' ] . split ( os . pathsep ) if ut . LINUX and ut . util_cplat . is64bit_python ( ) : import requests rsp = requests . get ( 'http://chromedriver.storage.googleapis.com/LATEST_RELEASE' , timeout = TIMEOUT ) assert rsp . status_code == 200 url = 'http://chromedriver.storage.googleapis.com/' + rsp . text . strip ( ) + '/chromedriver_linux64.zip' ut . grab_zipped_url ( url , download_dir = chromedriver_dpath , redownload = True ) else : raise AssertionError ( 'unsupported chrome driver getter script' ) if not ut . WIN32 : st = os . stat ( chromedriver_fpath ) os . chmod ( chromedriver_fpath , st . st_mode | stat . S_IEXEC ) ut . assert_exists ( chromedriver_fpath ) os . environ [ 'webdriver.chrome.driver' ] = chromedriver_fpath return chromedriver_fpath | r Automatically download selenium chrome driver if needed |
9,621 | def grab_selenium_driver ( driver_name = None ) : from selenium import webdriver if driver_name is None : driver_name = 'firefox' if driver_name . lower ( ) == 'chrome' : grab_selenium_chromedriver ( ) return webdriver . Chrome ( ) elif driver_name . lower ( ) == 'firefox' : return webdriver . Firefox ( ) else : raise AssertionError ( 'unknown name = %r' % ( driver_name , ) ) | pip install selenium - U |
9,622 | def grab_file_url ( file_url , appname = 'utool' , download_dir = None , delay = None , spoof = False , fname = None , verbose = True , redownload = False , check_hash = False ) : r file_url = clean_dropbox_link ( file_url ) if fname is None : fname = basename ( file_url ) if download_dir is None : download_dir = util_cplat . get_app_cache_dir ( appname ) fpath = join ( download_dir , fname ) if check_hash : if isinstance ( check_hash , ( list , tuple ) ) : hash_list = check_hash else : hash_list = [ 'md5' ] hash_remote , hash_tag_remote = grab_file_remote_hash ( file_url , hash_list , verbose = verbose ) hash_list = [ hash_tag_remote ] hash_local , hash_tag_local = get_file_local_hash ( fpath , hash_list , verbose = verbose ) if verbose : print ( '[utool] Pre Local Hash: %r' % ( hash_local , ) ) print ( '[utool] Pre Remote Hash: %r' % ( hash_remote , ) ) if hash_remote is None : check_hash = False elif hash_local is None : if verbose : print ( '[utool] Remote hash provided but local hash missing, redownloading.' ) redownload = True elif hash_local == hash_remote : assert hash_tag_local == hash_tag_remote , ( 'hash tag disagreement' ) else : if verbose : print ( '[utool] Both hashes provided, but they disagree, redownloading.' ) redownload = True util_path . ensurepath ( download_dir ) if redownload or not exists ( fpath ) : if verbose : print ( '[utool] Downloading file %s' % fpath ) if delay is not None : print ( '[utool] delay download by %r seconds' % ( delay , ) ) time . sleep ( delay ) download_url ( file_url , fpath , spoof = spoof ) else : if verbose : print ( '[utool] Already have file %s' % fpath ) util_path . assert_exists ( fpath ) if check_hash : hash_fpath = '%s.%s' % ( fpath , hash_tag_remote , ) with open ( hash_fpath , 'w' ) as hash_file : hash_file . write ( hash_remote ) hash_local , hash_tag_local = get_file_local_hash ( fpath , hash_list , verbose = verbose ) if verbose : print ( '[utool] Post Local Hash: %r' % ( hash_local , ) ) assert hash_local == hash_remote , 'Post-download hash disagreement' assert hash_tag_local == hash_tag_remote , 'Post-download hash tag disagreement' return fpath | r Downloads a file and returns the local path of the file . |
9,623 | def grab_zipped_url ( zipped_url , ensure = True , appname = 'utool' , download_dir = None , force_commonprefix = True , cleanup = False , redownload = False , spoof = False ) : r zipped_url = clean_dropbox_link ( zipped_url ) zip_fname = split ( zipped_url ) [ 1 ] data_name = split_archive_ext ( zip_fname ) [ 0 ] if download_dir is None : download_dir = util_cplat . get_app_cache_dir ( appname ) data_dir = join ( download_dir , data_name ) if ensure or redownload : if redownload : util_path . remove_dirs ( data_dir ) util_path . ensurepath ( download_dir ) if not exists ( data_dir ) or redownload : zip_fpath = realpath ( join ( download_dir , zip_fname ) ) if not exists ( zip_fpath ) or redownload : download_url ( zipped_url , zip_fpath , spoof = spoof ) unarchive_file ( zip_fpath , force_commonprefix ) if cleanup : util_path . delete ( zip_fpath ) if cleanup : util_path . assert_exists ( data_dir ) return util_path . unixpath ( data_dir ) | r downloads and unzips the url |
9,624 | def scp_pull ( remote_path , local_path = '.' , remote = 'localhost' , user = None ) : r import utool as ut if user is not None : remote_uri = user + '@' + remote + ':' + remote_path else : remote_uri = remote + ':' + remote_path scp_exe = 'scp' scp_args = ( scp_exe , '-r' , remote_uri , local_path ) ut . cmd ( scp_args ) | r wrapper for scp |
9,625 | def list_remote ( remote_uri , verbose = False ) : remote_uri1 , remote_dpath = remote_uri . split ( ':' ) if not remote_dpath : remote_dpath = '.' import utool as ut out = ut . cmd ( 'ssh' , remote_uri1 , 'ls -l %s' % ( remote_dpath , ) , verbose = verbose ) import re split_lines = [ re . split ( r'\s+' , t ) for t in out [ 0 ] . split ( '\n' ) ] paths = [ ' ' . join ( t2 [ 8 : ] ) for t2 in split_lines if len ( t2 ) > 8 ] return paths | remote_uri = user |
9,626 | def rsync ( src_uri , dst_uri , exclude_dirs = [ ] , port = 22 , dryrun = False ) : r from utool import util_cplat rsync_exe = 'rsync' rsync_options = '-avhzP' rsync_options += ' -e "ssh -p %d"' % ( port , ) if len ( exclude_dirs ) > 0 : exclude_tup = [ '--exclude ' + dir_ for dir_ in exclude_dirs ] exclude_opts = ' ' . join ( exclude_tup ) rsync_options += ' ' + exclude_opts cmdtuple = ( rsync_exe , rsync_options , src_uri , dst_uri ) cmdstr = ' ' . join ( cmdtuple ) print ( '[rsync] src_uri = %r ' % ( src_uri , ) ) print ( '[rsync] dst_uri = %r ' % ( dst_uri , ) ) print ( '[rsync] cmdstr = %r' % cmdstr ) print ( cmdstr ) util_cplat . cmd ( cmdstr , dryrun = dryrun ) | r Wrapper for rsync |
9,627 | def get_cache ( self , namespace , query_hash , length , start , end ) : query = 'SELECT start, value FROM gauged_cache WHERE namespace = ? ' 'AND hash = ? AND length = ? AND start BETWEEN ? AND ?' cursor = self . cursor cursor . execute ( query , ( namespace , query_hash , length , start , end ) ) return tuple ( cursor . fetchall ( ) ) | Get a cached value for the specified date range and query |
9,628 | def review ( cls , content , log , parent , window_icon ) : dlg = DlgReview ( content , log , parent , window_icon ) if dlg . exec_ ( ) : return dlg . ui . edit_main . toPlainText ( ) , dlg . ui . edit_log . toPlainText ( ) return None , None | Reviews the final bug report . |
9,629 | def get_version ( ) : version_desc = open ( os . path . join ( os . path . abspath ( APISettings . VERSION_FILE ) ) ) version_file = version_desc . read ( ) try : version = re . search ( r"version=['\"]([^'\"]+)['\"]" , version_file ) . group ( 1 ) return version except FileNotFoundError : Shell . fail ( 'File not found!' ) raise FileNotFoundError except ValueError : Shell . fail ( 'Version not found in file ' + version_file + '!' ) raise ValueError finally : version_desc . close ( ) | Return version from setup . py |
9,630 | def set_version ( old_version , new_version ) : try : if APISettings . DEBUG : Shell . debug ( '* ' + old_version + ' + new_version ) return True for line in fileinput . input ( os . path . abspath ( APISettings . VERSION_FILE ) , inplace = True ) : print ( line . replace ( old_version , new_version ) , end = '' ) Shell . success ( '* ' + old_version + ' + new_version ) except FileNotFoundError : Shell . warn ( 'File not found!' ) | Write new version into VERSION_FILE |
9,631 | def set_major ( self ) : old_version = self . get_version ( ) new_version = str ( int ( old_version . split ( '.' , 5 ) [ 0 ] ) + 1 ) + '.0.0' self . set_version ( old_version , new_version ) | Increment the major number of project |
9,632 | def set_minor ( self ) : old_version = self . get_version ( ) new_version = str ( int ( old_version . split ( '.' , 5 ) [ 0 ] ) ) + '.' + str ( int ( old_version . split ( '.' , 5 ) [ 1 ] ) + 1 ) + '.0' self . set_version ( old_version , new_version ) | Increment the minor number of project |
9,633 | def set_patch ( self , pre_release_tag = '' ) : current_version = self . get_version ( ) current_patch = self . get_patch_version ( current_version ) current_pre_release_tag = self . get_current_pre_release_tag ( current_patch ) current_RELEASE_SEPARATOR = self . get_current_RELEASE_SEPARATOR ( current_patch ) new_patch = '' if pre_release_tag : if current_pre_release_tag : new_patch = str ( current_patch . split ( current_pre_release_tag , 2 ) [ 0 ] ) + pre_release_tag if pre_release_tag == current_pre_release_tag : new_patch += str ( int ( current_patch . split ( current_pre_release_tag , 2 ) [ 1 ] ) + 1 ) else : new_patch += '0' else : new_patch = str ( int ( current_patch ) + 1 ) + APISettings . RELEASE_SEPARATOR + pre_release_tag + '0' else : if current_RELEASE_SEPARATOR : new_patch = str ( int ( current_patch . split ( current_RELEASE_SEPARATOR , 2 ) [ 0 ] ) + 1 ) elif current_pre_release_tag : new_patch = str ( int ( current_patch . split ( current_pre_release_tag , 2 ) [ 0 ] ) + 1 ) else : new_patch = str ( int ( current_patch ) + 1 ) new_version = str ( int ( current_version . split ( '.' , 5 ) [ 0 ] ) ) + '.' + str ( int ( current_version . split ( '.' , 5 ) [ 1 ] ) ) + '.' + str ( new_patch ) self . set_version ( current_version , new_version ) | Increment the patch number of project |
9,634 | def flush ( self ) : ( slice_ , self . __buffer ) = ( self . __buffer , '' ) self . __size = 0 return slice_ | Return all buffered data and clear the stack . |
9,635 | def __send_hello ( self ) : _logger . debug ( "Saying hello: [%s]" , self ) self . __c . send ( nsq . config . protocol . MAGIC_IDENTIFIER ) | Initiate the handshake . |
9,636 | def __sender ( self ) : while ( self . __ignore_quit is True or self . __nice_quit_ev . is_set ( ) is False ) and self . __force_quit_ev . is_set ( ) is False : try : ( command , parts ) = self . __outgoing_q . get ( block = False ) except gevent . queue . Empty : gevent . sleep ( nsq . config . client . WRITE_THROTTLE_S ) else : _logger . debug ( "Dequeued outgoing command ((%d) remaining): " "[%s]" , self . __outgoing_q . qsize ( ) , self . __distill_command_name ( command ) ) self . __send_command_primitive ( command , parts ) self . __send_thread_ev . set ( ) | Send - loop . |
9,637 | def __receiver ( self ) : while ( self . __ignore_quit is True or self . __nice_quit_ev . is_set ( ) is False ) and self . __force_quit_ev . is_set ( ) is False : try : self . __read_frame ( ) except errno . EAGAIN : gevent . sleep ( nsq . config . client . READ_THROTTLE_S ) self . __receive_thread_ev . set ( ) | Receive - loop . |
9,638 | def run ( self ) : while self . __nice_quit_ev . is_set ( ) is False : self . __connect ( ) _logger . info ( "Connection re-connect loop has terminated: %s" , self . __mc ) | Connect the server and maintain the connection . This shall not return until a connection has been determined to absolutely not be available . |
9,639 | def save ( obj , filename , protocol = 4 ) : with open ( filename , 'wb' ) as f : pickle . dump ( obj , f , protocol = protocol ) | Serialize an object to disk using pickle protocol . |
9,640 | def load_json ( filename , ** kwargs ) : with open ( filename , 'r' , encoding = 'utf-8' ) as f : return json . load ( f , ** kwargs ) | Load a JSON object from the specified file . |
9,641 | def save_json ( obj , filename , ** kwargs ) : with open ( filename , 'w' , encoding = 'utf-8' ) as f : json . dump ( obj , f , ** kwargs ) | Save an object as a JSON file . |
9,642 | def load_lines ( filename ) : with open ( filename , 'r' , encoding = 'utf-8' ) as f : return [ line . rstrip ( '\n' ) for line in f . readlines ( ) ] | Load a text file as an array of lines . |
9,643 | def save_lines ( lines , filename ) : with open ( filename , 'w' , encoding = 'utf-8' ) as f : f . write ( '\n' . join ( lines ) ) | Save an array of lines to a file . |
9,644 | def add ( self , child ) : if isinstance ( child , Component ) : self . add_child ( child ) else : raise ModelError ( 'Unsupported child element' ) | Adds a typed child object to the component . |
9,645 | def write_peps ( self , peps , reverse_seqs ) : if reverse_seqs : peps = [ ( x [ 0 ] [ : : - 1 ] , ) for x in peps ] cursor = self . get_cursor ( ) cursor . executemany ( 'INSERT INTO known_searchspace(seqs) VALUES (?)' , peps ) self . conn . commit ( ) | Writes peps to db . We can reverse to be able to look up peptides that have some amino acids missing at the N - terminal . This way we can still use the index . |
9,646 | def send_http_request ( self , app : str , service : str , version : str , method : str , entity : str , params : dict ) : host , port , node_id , service_type = self . _registry_client . resolve ( service , version , entity , HTTP ) url = 'http://{}:{}{}' . format ( host , port , params . pop ( 'path' ) ) http_keys = [ 'data' , 'headers' , 'cookies' , 'auth' , 'allow_redirects' , 'compress' , 'chunked' ] kwargs = { k : params [ k ] for k in http_keys if k in params } query_params = params . pop ( 'params' , { } ) if app is not None : query_params [ 'app' ] = app query_params [ 'version' ] = version query_params [ 'service' ] = service response = yield from aiohttp . request ( method , url , params = query_params , ** kwargs ) return response | A convenience method that allows you to send a well formatted http request to another service |
9,647 | def install_except_hook ( except_hook = _hooks . except_hook ) : if not _backends : raise ValueError ( 'no backends found, you must at least install one ' 'backend before calling this function' ) global _except_hook _except_hook = _hooks . QtExceptHook ( except_hook ) | Install an except hook that will show the crash report dialog when an unhandled exception has occured . |
9,648 | def show_report_dialog ( window_title = 'Report an issue...' , window_icon = None , traceback = None , issue_title = '' , issue_description = '' , parent = None , modal = None , include_log = True , include_sys_info = True ) : if not _backends : raise ValueError ( 'no backends found, you must at least install one ' 'backend before calling this function' ) from . _dialogs . report import DlgReport dlg = DlgReport ( _backends , window_title = window_title , window_icon = window_icon , traceback = traceback , issue_title = issue_title , issue_description = issue_description , parent = parent , include_log = include_log , include_sys_info = include_sys_info ) if modal : dlg . show ( ) return dlg else : dlg . exec_ ( ) | Show the issue report dialog manually . |
9,649 | def middleware ( self , args ) : if self . url [ ( len ( self . url ) - 1 ) ] == ( self . url_ , self . controller , dict ( method = self . method , request_type = self . request_type , middleware = None ) ) : self . url . pop ( ) self . url . append ( ( self . url_ , self . controller , dict ( method = self . method , request_type = self . request_type , middleware = args ) ) ) return self | Appends a Middleware to the route which is to be executed before the route runs |
9,650 | def get ( self , url , controller ) : self . request_type = 'GET' controller_class , controller_method = self . __return_controller__ ( controller ) self . controller = controller_class self . method = controller_method self . url_ = url self . url . append ( ( url , controller_class , dict ( method = controller_method , request_type = self . request_type , middleware = None ) ) ) return self | Gets the Controller and adds the route controller and method to the url list for GET request |
9,651 | def to_bytes ( value ) : if isinstance ( value , unicode ) : return value . encode ( 'utf8' ) elif not isinstance ( value , str ) : return str ( value ) return value | Get a byte array representing the value |
9,652 | def table_repr ( columns , rows , data , padding = 2 ) : padding = ' ' * padding column_lengths = [ len ( column ) for column in columns ] for row in rows : for i , column in enumerate ( columns ) : item = str ( data [ row ] [ column ] ) column_lengths [ i ] = max ( len ( item ) , column_lengths [ i ] ) max_row_length = max ( len ( row ) for row in rows ) if len ( rows ) else 0 table_row = ' ' * max_row_length for i , column in enumerate ( columns ) : table_row += padding + column . rjust ( column_lengths [ i ] ) table_rows = [ table_row ] for row in rows : table_row = row . rjust ( max_row_length ) for i , column in enumerate ( columns ) : item = str ( data [ row ] [ column ] ) table_row += padding + item . rjust ( column_lengths [ i ] ) table_rows . append ( table_row ) return '\n' . join ( table_rows ) | Generate a table for cli output |
9,653 | def get_proteins_for_db ( fastafn ) : objects = { } for record in parse_fasta ( fastafn ) : objects [ parse_protein_identifier ( record ) ] = record return ( ( ( acc , ) for acc in list ( objects ) ) , ( ( acc , str ( record . seq ) ) for acc , record in objects . items ( ) ) , ( ( acc , get_uniprot_evidence_level ( record . description ) ) for acc , record in objects . items ( ) ) ) | Runs through fasta file and returns proteins accession nrs sequences and evidence levels for storage in lookup DB . Duplicate accessions in fasta are accepted and removed by keeping only the last one . |
9,654 | def get_uniprot_evidence_level ( header ) : header = header . split ( ) for item in header : item = item . split ( '=' ) try : if item [ 0 ] == 'PE' : return 5 - int ( item [ 1 ] ) except IndexError : continue return - 1 | Returns uniprot protein existence evidence level for a fasta header . Evidence levels are 1 - 5 but we return 5 - x since sorting still demands that higher is better . |
9,655 | def run ( self ) : self . pre_run ( ) first = True while self . runnable : self . pre_call_message ( ) if first : self . pre_first_call_message ( ) message , payload = self . listener . get ( ) getattr ( self , message ) ( payload ) if first : first = False self . post_first_call_message ( ) self . post_call_message ( ) self . post_run ( ) | Run our loop and any defined hooks ... |
9,656 | def count_multiplicities ( times , tmax = 20 ) : n = times . shape [ 0 ] mtp = np . ones ( n , dtype = '<i4' ) cid = np . zeros ( n , '<i4' ) idx0 = 0 _mtp = 1 _cid = 0 t0 = times [ idx0 ] for i in range ( 1 , n ) : dt = times [ i ] - t0 if dt > tmax : mtp [ idx0 : i ] = _mtp cid [ idx0 : i ] = _cid _mtp = 0 _cid += 1 idx0 = i t0 = times [ i ] _mtp += 1 if i == n - 1 : mtp [ idx0 : ] = _mtp cid [ idx0 : ] = _cid break return mtp , cid | Calculate an array of multiplicities and corresponding coincidence IDs |
9,657 | def build_machine ( lines ) : if lines == [ ] : raise SyntaxError ( 'Empty file' ) else : machine = Machine ( lines [ 0 ] . split ( ) ) for line in lines [ 1 : ] : if line . strip ( ) != '' : machine . add_state ( line ) machine . check ( ) return machine | Build machine from list of lines . |
9,658 | def add_state ( self , string ) : parsed_string = string . split ( ) if len ( parsed_string ) > 0 : state , rules = parsed_string [ 0 ] , parsed_string [ 1 : ] if len ( rules ) != len ( self . alphabet ) : raise SyntaxError ( 'Wrong count of rules ({cur}/{exp}): {string}' . format ( cur = len ( rules ) , exp = len ( self . alphabet ) , string = string ) ) if state in self . states or state == self . TERM_STATE : raise SyntaxError ( 'Double definition of state: ' + state ) else : self . states [ state ] = [ ] for rule in rules : try : self . _add_rule ( state , rule ) except SyntaxError as err : self . states . pop ( state ) raise err | Add state and rules to machine . |
9,659 | def check ( self ) : has_term = False if self . START_STATE not in self . states : raise SyntaxError ( 'Undefined start rule' ) for state in self . states : for rule in self . states [ state ] : if rule is not None : if rule [ 2 ] == self . TERM_STATE : has_term = True elif rule [ 2 ] not in self . states : raise SyntaxError ( 'Unexpected state: ' + rule [ 2 ] ) if not has_term : raise SyntaxError ( 'Missed terminate state' ) | Check semantic rules . |
9,660 | def init_tape ( self , string ) : for char in string : if char not in self . alphabet and not char . isspace ( ) and char != self . EMPTY_SYMBOL : raise RuntimeError ( 'Invalid symbol: "' + char + '"' ) self . check ( ) self . state = self . START_STATE self . head = 0 self . tape = { } for i in range ( len ( string ) ) : symbol = string [ i ] if not string [ i ] . isspace ( ) else self . EMPTY_SYMBOL self . tape [ i ] = symbol | Init system values . |
9,661 | def get_tape ( self ) : result = '' for i in range ( min ( self . tape ) , max ( self . tape ) + 1 ) : symbol = self . tape [ i ] if self . tape [ i ] != self . EMPTY_SYMBOL else ' ' result += symbol return result . strip ( ) | Get content of tape . |
9,662 | def execute_once ( self ) : symbol = self . tape . get ( self . head , self . EMPTY_SYMBOL ) index = self . alphabet . index ( symbol ) rule = self . states [ self . state ] [ index ] if rule is None : raise RuntimeError ( 'Unexpected symbol: ' + symbol ) self . tape [ self . head ] = rule [ 0 ] if rule [ 1 ] == 'L' : self . head -= 1 elif rule [ 1 ] == 'R' : self . head += 1 self . state = rule [ 2 ] | One step of execution . |
9,663 | def compile ( self ) : result = TEMPLATE result += 'machine = Machine(' + repr ( self . alphabet ) + ')\n' for state in self . states : repr_state = state [ 0 ] for rule in self . states [ state ] : repr_state += ' ' + ( ',' . join ( rule ) if rule is not None else '-' ) result += ( "machine.add_state({repr_state})\n" . format ( repr_state = repr ( repr_state ) ) ) result += "for line in stdin:\n" result += " print(machine.execute(line))" return result | Return python code for create and execute machine . |
9,664 | def get_missing_services ( self , services ) : required_services = set ( services ) provided_services = set ( self . _services . keys ( ) ) missing_services = required_services . difference ( provided_services ) return sorted ( missing_services ) | Check if all required services are provided |
9,665 | def _drain ( self , cycles = None ) : log . info ( "Now draining..." ) if not cycles : log . info ( "No cycle count, the pipeline may be drained forever." ) if self . calibration : log . info ( "Setting up the detector calibration." ) for module in self . modules : module . detector = self . calibration . get_detector ( ) try : while not self . _stop : cycle_start = timer ( ) cycle_start_cpu = process_time ( ) log . debug ( "Pumping blob #{0}" . format ( self . _cycle_count ) ) self . blob = Blob ( ) for module in self . modules : if self . blob is None : log . debug ( "Skipping {0}, due to empty blob." . format ( module . name ) ) continue if module . only_if and not module . only_if . issubset ( set ( self . blob . keys ( ) ) ) : log . debug ( "Skipping {0}, due to missing required key" "'{1}'." . format ( module . name , module . only_if ) ) continue if ( self . _cycle_count + 1 ) % module . every != 0 : log . debug ( "Skipping {0} (every {1} iterations)." . format ( module . name , module . every ) ) continue if module . blob_keys is not None : blob_to_send = Blob ( { k : self . blob [ k ] for k in module . blob_keys if k in self . blob } ) else : blob_to_send = self . blob log . debug ( "Processing {0} " . format ( module . name ) ) start = timer ( ) start_cpu = process_time ( ) new_blob = module ( blob_to_send ) if self . timeit or module . timeit : self . _timeit [ module ] [ 'process' ] . append ( timer ( ) - start ) self . _timeit [ module ] [ 'process_cpu' ] . append ( process_time ( ) - start_cpu ) if module . blob_keys is not None : if new_blob is not None : for key in new_blob . keys ( ) : self . blob [ key ] = new_blob [ key ] else : self . blob = new_blob self . _timeit [ 'cycles' ] . append ( timer ( ) - cycle_start ) self . _timeit [ 'cycles_cpu' ] . append ( process_time ( ) - cycle_start_cpu ) self . _cycle_count += 1 if cycles and self . _cycle_count >= cycles : raise StopIteration except StopIteration : log . info ( "Nothing left to pump through." ) return self . finish ( ) | Activate the pump and let the flow go . |
9,666 | def _check_service_requirements ( self ) : missing = self . services . get_missing_services ( self . required_services . keys ( ) ) if missing : self . log . critical ( "Following services are required and missing: {}" . format ( ', ' . join ( missing ) ) ) return False return True | Final comparison of provided and required modules |
9,667 | def drain ( self , cycles = None ) : if not self . _check_service_requirements ( ) : self . init_timer . stop ( ) return self . finish ( ) if self . anybar : self . anybar . change ( "orange" ) self . init_timer . stop ( ) log . info ( "Trapping CTRL+C and starting to drain." ) signal . signal ( signal . SIGINT , self . _handle_ctrl_c ) with ignored ( KeyboardInterrupt ) : return self . _drain ( cycles ) | Execute _drain while trapping KeyboardInterrupt |
9,668 | def _handle_ctrl_c ( self , * args ) : if self . anybar : self . anybar . change ( "exclamation" ) if self . _stop : print ( "\nForced shutdown..." ) raise SystemExit if not self . _stop : hline = 42 * '=' print ( '\n' + hline + "\nGot CTRL+C, waiting for current cycle...\n" "Press CTRL+C again if you're in hurry!\n" + hline ) self . _stop = True | Handle the keyboard interrupts . |
9,669 | def get ( self , name , default = None ) : value = self . parameters . get ( name ) self . _processed_parameters . append ( name ) if value is None : return default return value | Return the value of the requested parameter or default if None . |
9,670 | def require ( self , name ) : value = self . get ( name ) if value is None : raise TypeError ( "{0} requires the parameter '{1}'." . format ( self . __class__ , name ) ) return value | Return the value of the requested parameter or raise an error . |
9,671 | def _check_unused_parameters ( self ) : all_params = set ( self . parameters . keys ( ) ) processed_params = set ( self . _processed_parameters ) unused_params = all_params - processed_params - RESERVED_ARGS if unused_params : self . log . warning ( "The following parameters were ignored: {}" . format ( ', ' . join ( sorted ( unused_params ) ) ) ) | Check if any of the parameters passed in are ignored |
9,672 | def open_file ( self , filename ) : try : if filename . endswith ( '.gz' ) : self . blob_file = gzip . open ( filename , 'rb' ) else : self . blob_file = open ( filename , 'rb' ) except TypeError : log . error ( "Please specify a valid filename." ) raise SystemExit except IOError as error_message : log . error ( error_message ) raise SystemExit | Open the file with filename |
9,673 | def parse ( cls , date_string ) : try : date = dateparser . parse ( date_string ) if date . tzinfo is None : date = dateparser . parse ( date_string , tzinfos = cls . tzd ) return date except Exception : raise ValueError ( "Could not parse date string!" ) | Parse any time string . Use a custom timezone matching if the original matching does not pull one out . |
9,674 | def epsg_code ( geojson ) : if isinstance ( geojson , dict ) : if 'crs' in geojson : urn = geojson [ 'crs' ] [ 'properties' ] [ 'name' ] . split ( ':' ) if 'EPSG' in urn : try : return int ( urn [ - 1 ] ) except ( TypeError , ValueError ) : return None return None | get the espg code from the crs system |
9,675 | def convert_coordinates ( coords , origin , wgs84 , wrapped ) : if isinstance ( coords , list ) or isinstance ( coords , tuple ) : try : if isinstance ( coords [ 0 ] , list ) or isinstance ( coords [ 0 ] , tuple ) : return [ convert_coordinates ( list ( c ) , origin , wgs84 , wrapped ) for c in coords ] elif isinstance ( coords [ 0 ] , float ) : c = list ( transform ( origin , wgs84 , * coords ) ) if wrapped and c [ 0 ] < - 170 : c [ 0 ] = c [ 0 ] + 360 return c except IndexError : pass return None | Convert coordinates from one crs to another |
9,676 | def to_latlon ( geojson , origin_espg = None ) : if isinstance ( geojson , dict ) : if origin_espg : code = origin_espg else : code = epsg_code ( geojson ) if code : origin = Proj ( init = 'epsg:%s' % code ) wgs84 = Proj ( init = 'epsg:4326' ) wrapped = test_wrap_coordinates ( geojson [ 'coordinates' ] , origin , wgs84 ) new_coords = convert_coordinates ( geojson [ 'coordinates' ] , origin , wgs84 , wrapped ) if new_coords : geojson [ 'coordinates' ] = new_coords try : del geojson [ 'crs' ] except KeyError : pass return geojson | Convert a given geojson to wgs84 . The original epsg must be included insde the crs tag of geojson |
9,677 | def camelcase_underscore ( name ) : s1 = re . sub ( '(.)([A-Z][a-z]+)' , r'\1_\2' , name ) return re . sub ( '([a-z0-9])([A-Z])' , r'\1_\2' , s1 ) . lower ( ) | Convert camelcase names to underscore |
9,678 | def get_tiles_list ( element ) : tiles = { } for el in element : g = ( el . findall ( './/Granules' ) or el . findall ( './/Granule' ) ) [ 0 ] name = g . attrib [ 'granuleIdentifier' ] name_parts = name . split ( '_' ) mgs = name_parts [ - 2 ] tiles [ mgs ] = name return tiles | Returns the list of all tile names from Product_Organisation element in metadata . xml |
9,679 | def metadata_to_dict ( metadata ) : tree = etree . parse ( metadata ) root = tree . getroot ( ) meta = OrderedDict ( ) keys = [ 'SPACECRAFT_NAME' , 'PRODUCT_STOP_TIME' , 'Cloud_Coverage_Assessment' , 'PROCESSING_LEVEL' , 'PRODUCT_TYPE' , 'PROCESSING_BASELINE' , 'SENSING_ORBIT_NUMBER' , 'SENSING_ORBIT_DIRECTION' , 'PRODUCT_FORMAT' , ] for key in keys : try : meta [ key . lower ( ) ] = root . findall ( './/' + key ) [ 0 ] . text except IndexError : meta [ key . lower ( ) ] = None meta [ 'product_cloud_coverage_assessment' ] = float ( meta . pop ( 'cloud_coverage_assessment' ) ) meta [ 'sensing_orbit_number' ] = int ( meta [ 'sensing_orbit_number' ] ) meta [ 'tiles' ] = get_tiles_list ( root . findall ( './/Product_Organisation' ) [ 0 ] ) if root . findall ( './/Band_List' ) : bands = root . findall ( './/Band_List' ) [ 0 ] meta [ 'band_list' ] = [ ] for b in bands : band = b . text . replace ( 'B' , '' ) if len ( band ) == 1 : band = 'B' + pad ( band , 2 ) else : band = b . text meta [ 'band_list' ] . append ( band ) else : bands = root . findall ( './/Spectral_Information_List' ) [ 0 ] meta [ 'band_list' ] = [ ] for b in bands : band = b . attrib [ 'physicalBand' ] . replace ( 'B' , '' ) if len ( band ) == 1 : band = 'B' + pad ( band , 2 ) else : band = b . attrib [ 'physicalBand' ] meta [ 'band_list' ] . append ( band ) return meta | Looks at metadata . xml file of sentinel product and extract useful keys Returns a python dict |
9,680 | def get_tile_geometry ( path , origin_espg , tolerance = 500 ) : with rasterio . open ( path ) as src : b = src . bounds tile_shape = Polygon ( [ ( b [ 0 ] , b [ 1 ] ) , ( b [ 2 ] , b [ 1 ] ) , ( b [ 2 ] , b [ 3 ] ) , ( b [ 0 ] , b [ 3 ] ) , ( b [ 0 ] , b [ 1 ] ) ] ) tile_geojson = mapping ( tile_shape ) image = src . read ( 1 ) mask = image == 0. novalue_shape = shapes ( image , mask = mask , transform = src . affine ) novalue_shape = [ Polygon ( s [ 'coordinates' ] [ 0 ] ) for ( s , v ) in novalue_shape ] if novalue_shape : union = cascaded_union ( novalue_shape ) data_shape = tile_shape . difference ( union ) if data_shape . geom_type == 'MultiPolygon' : areas = { p . area : i for i , p in enumerate ( data_shape ) } largest = max ( areas . keys ( ) ) data_shape = data_shape [ areas [ largest ] ] if list ( data_shape . interiors ) : data_shape = Polygon ( data_shape . exterior . coords ) data_shape = data_shape . simplify ( tolerance , preserve_topology = False ) data_geojson = mapping ( data_shape ) else : data_geojson = tile_geojson return ( to_latlon ( tile_geojson , origin_espg ) , to_latlon ( data_geojson , origin_espg ) ) | Calculate the data and tile geometry for sentinel - 2 tiles |
9,681 | def tile_metadata ( tile , product , geometry_check = None ) : grid = 'T{0}{1}{2}' . format ( pad ( tile [ 'utmZone' ] , 2 ) , tile [ 'latitudeBand' ] , tile [ 'gridSquare' ] ) meta = OrderedDict ( { 'tile_name' : product [ 'tiles' ] [ grid ] } ) logger . info ( '%s Processing tile %s' % ( threading . current_thread ( ) . name , tile [ 'path' ] ) ) meta [ 'date' ] = tile [ 'timestamp' ] . split ( 'T' ) [ 0 ] meta [ 'thumbnail' ] = '{1}/{0}/preview.jp2' . format ( tile [ 'path' ] , s3_url ) product . pop ( 'tiles' ) tile . pop ( 'datastrip' ) bands = product . pop ( 'band_list' ) for k , v in iteritems ( tile ) : meta [ camelcase_underscore ( k ) ] = v meta . update ( product ) links = [ '{2}/{0}/{1}.jp2' . format ( meta [ 'path' ] , b , s3_url ) for b in bands ] meta [ 'download_links' ] = { 'aws_s3' : links } meta [ 'original_tile_meta' ] = '{0}/{1}/tileInfo.json' . format ( s3_url , meta [ 'path' ] ) def internal_latlon ( meta ) : keys = [ 'tile_origin' , 'tile_geometry' , 'tile_data_geometry' ] for key in keys : if key in meta : meta [ key ] = to_latlon ( meta [ key ] ) return meta if geometry_check : if geometry_check ( meta ) : meta = get_tile_geometry_from_s3 ( meta ) else : meta = internal_latlon ( meta ) else : meta = internal_latlon ( meta ) meta [ 'aws_path' ] = meta . pop ( 'path' ) return meta | Generate metadata for a given tile |
9,682 | def load_markov ( argv , stdin ) : if len ( argv ) > 3 : with open ( argv [ 3 ] ) as input_file : return Algorithm ( input_file . readlines ( ) ) else : return Algorithm ( stdin . readlines ( ) ) | Load and return markov algorithm . |
9,683 | def load_turing ( argv , stdin ) : if len ( argv ) > 3 : with open ( argv [ 3 ] ) as input_file : return build_machine ( input_file . readlines ( ) ) else : return build_machine ( stdin . readlines ( ) ) | Load and return turing machine . |
9,684 | def main ( argv , stdin , stdout ) : if len ( argv ) > 1 and argv [ 1 : 3 ] == [ "compile" , "markov" ] : algo = load_markov ( argv , stdin ) print ( algo . compile ( ) , file = stdout ) elif len ( argv ) == 4 and argv [ 1 : 3 ] == [ "run" , "markov" ] : algo = load_markov ( argv , stdin ) for line in stdin : print ( algo . execute ( '' . join ( line . split ( ) ) ) , file = stdout ) elif len ( argv ) > 1 and argv [ 1 : 3 ] == [ "compile" , "turing" ] : machine = load_turing ( argv , stdin ) print ( machine . compile ( ) , file = stdout ) elif len ( argv ) == 4 and argv [ 1 : 3 ] == [ "run" , "turing" ] : machine = load_turing ( argv , stdin ) for line in stdin : print ( machine . execute ( line ) , file = stdout ) elif len ( argv ) == 2 and argv [ 1 ] == "test" : path = os . path . abspath ( os . path . dirname ( __file__ ) ) argv [ 1 ] = path pytest . main ( ) elif len ( argv ) == 2 and argv [ 1 ] == "version" : print ( "TuringMarkov" , VERSION , file = stdout ) else : print ( USAGE , file = stdout ) if not ( len ( argv ) == 2 and argv [ 1 ] == "help" ) : exit ( 1 ) | Execute when user call turingmarkov . |
9,685 | def detectors ( regex = None , sep = '\t' , temporary = False ) : db = DBManager ( temporary = temporary ) dt = db . detectors if regex is not None : try : re . compile ( regex ) except re . error : log . error ( "Invalid regex!" ) return dt = dt [ dt [ 'OID' ] . str . contains ( regex ) | dt [ 'CITY' ] . str . contains ( regex ) ] dt . to_csv ( sys . stdout , sep = sep ) | Print the detectors table |
9,686 | def get_product_metadata_path ( product_name ) : string_date = product_name . split ( '_' ) [ - 1 ] date = datetime . datetime . strptime ( string_date , '%Y%m%dT%H%M%S' ) path = 'products/{0}/{1}/{2}/{3}' . format ( date . year , date . month , date . day , product_name ) return { product_name : { 'metadata' : '{0}/{1}' . format ( path , 'metadata.xml' ) , 'tiles' : get_tile_metadata_path ( '{0}/{1}' . format ( path , 'productInfo.json' ) ) } } | gets a single products metadata |
9,687 | def get_products_metadata_path ( year , month , day ) : products = { } path = 'products/{0}/{1}/{2}/' . format ( year , month , day ) for key in bucket . objects . filter ( Prefix = path ) : product_path = key . key . replace ( path , '' ) . split ( '/' ) name = product_path [ 0 ] if name not in products : products [ name ] = { } if product_path [ 1 ] == 'metadata.xml' : products [ name ] [ 'metadata' ] = key . key if product_path [ 1 ] == 'productInfo.json' : products [ name ] [ 'tiles' ] = get_tile_metadata_path ( key . key ) return products | Get paths to multiple products metadata |
9,688 | def start ( backdate = None ) : if f . s . cum : raise StartError ( "Already have stamps, can't start again (must reset)." ) if f . t . subdvsn_awaiting or f . t . par_subdvsn_awaiting : raise StartError ( "Already have subdivisions, can't start again (must reset)." ) if f . t . stopped : raise StoppedError ( "Timer already stopped (must open new or reset)." ) t = timer ( ) if backdate is None : t_start = t else : if f . t is f . root : raise BackdateError ( "Cannot backdate start of root timer." ) if not isinstance ( backdate , float ) : raise TypeError ( "Backdate must be type float." ) if backdate > t : raise BackdateError ( "Cannot backdate to future time." ) if backdate < f . tm1 . last_t : raise BackdateError ( "Cannot backdate start to time previous to latest stamp in parent timer." ) t_start = backdate f . t . paused = False f . t . tmp_total = 0. f . t . start_t = t_start f . t . last_t = t_start return t | Mark the start of timing overwriting the automatic start data written on import or the automatic start at the beginning of a subdivision . |
9,689 | def stamp ( name , backdate = None , unique = None , keep_subdivisions = None , quick_print = None , un = None , ks = None , qp = None ) : t = timer ( ) if f . t . stopped : raise StoppedError ( "Cannot stamp stopped timer." ) if f . t . paused : raise PausedError ( "Cannot stamp paused timer." ) if backdate is None : t_stamp = t else : if not isinstance ( backdate , float ) : raise TypeError ( "Backdate must be type float." ) if backdate > t : raise BackdateError ( "Cannot backdate to future time." ) if backdate < f . t . last_t : raise BackdateError ( "Cannot backdate to time earlier than last stamp." ) t_stamp = backdate elapsed = t_stamp - f . t . last_t unique = SET [ 'UN' ] if ( unique is None and un is None ) else bool ( unique or un ) keep_subdivisions = SET [ 'KS' ] if ( keep_subdivisions is None and ks is None ) else bool ( keep_subdivisions or ks ) quick_print = SET [ 'QP' ] if ( quick_print is None and qp is None ) else bool ( quick_print or qp ) _stamp ( name , elapsed , unique , keep_subdivisions , quick_print ) tmp_self = timer ( ) - t f . t . self_cut += tmp_self f . t . last_t = t_stamp + tmp_self return t | Mark the end of a timing interval . |
9,690 | def stop ( name = None , backdate = None , unique = None , keep_subdivisions = None , quick_print = None , un = None , ks = None , qp = None ) : t = timer ( ) if f . t . stopped : raise StoppedError ( "Timer already stopped." ) if backdate is None : t_stop = t else : if f . t is f . root : raise BackdateError ( "Cannot backdate stop of root timer." ) if not isinstance ( backdate , float ) : raise TypeError ( "Backdate must be type float." ) if backdate > t : raise BackdateError ( "Cannot backdate to future time." ) if backdate < f . t . last_t : raise BackdateError ( "Cannot backdate to time earlier than last stamp." ) t_stop = backdate unique = SET [ 'UN' ] if ( unique is None and un is None ) else bool ( unique or un ) keep_subdivisions = SET [ 'KS' ] if ( keep_subdivisions is None and ks is None ) else bool ( keep_subdivisions or ks ) quick_print = SET [ 'QP' ] if ( quick_print is None and qp is None ) else bool ( quick_print or qp ) if name is not None : if f . t . paused : raise PausedError ( "Cannot stamp paused timer." ) elapsed = t_stop - f . t . last_t _stamp ( name , elapsed , unique , keep_subdivisions , quick_print ) else : times_priv . assign_subdivisions ( UNASGN , keep_subdivisions ) for s in f . t . rgstr_stamps : if s not in f . s . cum : f . s . cum [ s ] = 0. f . s . order . append ( s ) if not f . t . paused : f . t . tmp_total += t_stop - f . t . start_t f . t . tmp_total -= f . t . self_cut f . t . self_cut += timer ( ) - t times_priv . dump_times ( ) f . t . stopped = True if quick_print : print ( "({}) Total: {:.4f}" . format ( f . t . name , f . r . total ) ) return t | Mark the end of timing . Optionally performs a stamp hence accepts the same arguments . |
9,691 | def pause ( ) : t = timer ( ) if f . t . stopped : raise StoppedError ( "Cannot pause stopped timer." ) if f . t . paused : raise PausedError ( "Timer already paused." ) f . t . paused = True f . t . tmp_total += t - f . t . start_t f . t . start_t = None f . t . last_t = None return t | Pause the timer preventing subsequent time from accumulating in the total . Renders the timer inactive disabling other timing commands . |
9,692 | def resume ( ) : t = timer ( ) if f . t . stopped : raise StoppedError ( "Cannot resume stopped timer." ) if not f . t . paused : raise PausedError ( "Cannot resume timer that is not paused." ) f . t . paused = False f . t . start_t = t f . t . last_t = t return t | Resume a paused timer re - activating it . Subsequent time accumulates in the total . |
9,693 | def collapse_times ( ) : orig_ts = f . timer_stack orig_ls = f . loop_stack copy_ts = _copy_timer_stack ( ) copy_ls = copy . deepcopy ( f . loop_stack ) f . timer_stack = copy_ts f . loop_stack = copy_ls f . refresh_shortcuts ( ) while ( len ( f . timer_stack ) > 1 ) or f . t . in_loop : _collapse_subdivision ( ) timer_pub . stop ( ) collapsed_times = f . r f . timer_stack = orig_ts f . loop_stack = orig_ls f . refresh_shortcuts ( ) return collapsed_times | Make copies of everything assign to global shortcuts so functions work on them extract the times then restore the running stacks . |
9,694 | def create_plate ( self , plate_id , description , meta_data_id , values , complement , parent_plate ) : with switch_db ( PlateDefinitionModel , db_alias = 'hyperstream' ) : try : p = PlateDefinitionModel . objects . get ( plate_id = plate_id ) if p : logging . info ( "Plate with id {} already exists" . format ( plate_id ) ) return self . plates [ plate_id ] except DoesNotExist : pass except MultipleObjectsReturned : raise plate_definition = PlateDefinitionModel ( plate_id = plate_id , description = description , meta_data_id = meta_data_id , values = values , complement = complement , parent_plate = parent_plate ) self . add_plate ( plate_definition ) plate_definition . save ( ) return self . plates [ plate_id ] | Create a new plate and commit it to the database |
9,695 | def timed_loop ( name = None , rgstr_stamps = None , save_itrs = SET [ 'SI' ] , loop_end_stamp = None , end_stamp_unique = SET [ 'UN' ] , keep_prev_subdivisions = SET [ 'KS' ] , keep_end_subdivisions = SET [ 'KS' ] , quick_print = SET [ 'QP' ] ) : return TimedLoop ( name = name , rgstr_stamps = rgstr_stamps , save_itrs = save_itrs , loop_end_stamp = loop_end_stamp , end_stamp_unique = end_stamp_unique , keep_prev_subdivisions = keep_prev_subdivisions , keep_end_subdivisions = keep_end_subdivisions ) | Instantiate a TimedLoop object for measuring loop iteration timing data . Can be used with either for or while loops . |
9,696 | def timed_for ( iterable , name = None , rgstr_stamps = None , save_itrs = SET [ 'SI' ] , loop_end_stamp = None , end_stamp_unique = SET [ 'UN' ] , keep_prev_subdivisions = SET [ 'KS' ] , keep_end_subdivisions = SET [ 'KS' ] , quick_print = SET [ 'QP' ] ) : return TimedFor ( iterable , name = name , rgstr_stamps = rgstr_stamps , save_itrs = save_itrs , loop_end_stamp = loop_end_stamp , end_stamp_unique = end_stamp_unique , keep_prev_subdivisions = keep_prev_subdivisions , keep_end_subdivisions = keep_end_subdivisions ) | Instantiate a TimedLoop object for measuring for loop iteration timing data . Can be used only on for loops . |
9,697 | def write_calibration ( calib , f , loc ) : for i , node in enumerate ( [ p + '_' + s for p in [ 'pos' , 'dir' ] for s in 'xyz' ] ) : h5loc = loc + '/' + node ca = f . get_node ( h5loc ) ca . append ( calib [ : , i ] ) du = f . get_node ( loc + '/du' ) du . append ( calib [ : , 7 ] . astype ( 'u1' ) ) floor = f . get_node ( loc + '/floor' ) floor . append ( calib [ : , 8 ] . astype ( 'u1' ) ) t0 = f . get_node ( loc + '/t0' ) t0 . append ( calib [ : , 6 ] ) if loc == "/hits" : time = f . get_node ( loc + "/time" ) offset = len ( time ) chunk_size = len ( calib ) time [ offset - chunk_size : offset ] += calib [ : , 6 ] | Write calibration set to file |
9,698 | def initialise_arrays ( group , f ) : for node in [ 'pos_x' , 'pos_y' , 'pos_z' , 'dir_x' , 'dir_y' , 'dir_z' , 'du' , 'floor' , 't0' ] : if node in [ 'floor' , 'du' ] : atom = U1_ATOM else : atom = F4_ATOM f . create_earray ( group , node , atom , ( 0 , ) , filters = FILTERS ) | Create EArrays for calibrated hits |
9,699 | def blob_counter ( self ) : import aa from ROOT import EventFile try : event_file = EventFile ( self . filename ) except Exception : raise SystemExit ( "Could not open file" ) num_blobs = 0 for event in event_file : num_blobs += 1 return num_blobs | Create a blob counter . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.