idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
56,200
def to_json ( data ) : return json . dumps ( data , default = lambda x : x . __dict__ , sort_keys = True , indent = 4 )
Return data as a JSON string .
56,201
def convert_string ( string , chars = None ) : if chars is None : chars = [ ',' , '.' , '-' , '/' , ':' , ' ' ] for ch in chars : if ch in string : string = string . replace ( ch , ' ' ) return string
Remove certain characters from a string .
56,202
def convert_time ( time ) : split_time = time . split ( ) try : am_pm = split_time [ 1 ] . replace ( '.' , '' ) time_str = '{0} {1}' . format ( split_time [ 0 ] , am_pm ) except IndexError : return time try : time_obj = datetime . strptime ( time_str , '%I:%M %p' ) except ValueError : time_obj = datetime . strptime ( time_str , '%I %p' ) return time_obj . strftime ( '%H:%M %p' )
Convert a time string into 24 - hour time .
56,203
def convert_month ( date , shorten = True , cable = True ) : month = date . split ( ) [ 0 ] . lower ( ) if 'sept' in month : shorten = False if cable else True try : if shorten : month = SHORT_MONTHS [ MONTHS . index ( month ) ] else : month = MONTHS [ SHORT_MONTHS . index ( month ) ] except ValueError : month = month . title ( ) return '{0} {1}' . format ( month , ' ' . join ( date . split ( ) [ 1 : ] ) )
Replace month by shortening or lengthening it .
56,204
def convert_date ( date ) : date = convert_month ( date , shorten = False ) clean_string = convert_string ( date ) return datetime . strptime ( clean_string , DATE_FMT . replace ( '-' , '' ) )
Convert string to datetime object .
56,205
def date_in_range ( date1 , date2 , range ) : date_obj1 = convert_date ( date1 ) date_obj2 = convert_date ( date2 ) return ( date_obj2 - date_obj1 ) . days <= range
Check if two date objects are within a specific range
56,206
def inc_date ( date_obj , num , date_fmt ) : return ( date_obj + timedelta ( days = num ) ) . strftime ( date_fmt )
Increment the date by a certain number and return date object . as the specific string format .
56,207
def get_soup ( url ) : html = requests . get ( url , stream = True , headers = HEADERS ) if html . status_code != 404 : return BeautifulSoup ( html . content , 'html.parser' ) else : return None
Request the page and return the soup .
56,208
def match_list ( query_list , string ) : match = False index = 0 string = ' ' . join ( filter_stopwords ( string ) ) if not isinstance ( query_list , list ) : query_list = [ query_list ] while index < len ( query_list ) : query = query_list [ index ] words_query = filter_stopwords ( query ) match = all ( word in string for word in words_query ) if match : break index += 1 return match
Return True if all words in a word list are in the string .
56,209
def filter_stopwords ( phrase ) : if not isinstance ( phrase , list ) : phrase = phrase . split ( ) stopwords = [ 'the' , 'a' , 'in' , 'to' ] return [ word . lower ( ) for word in phrase if word . lower ( ) not in stopwords ]
Filter out stop words and return as a list of words
56,210
def safe_unicode ( string ) : if not PY3 : uni = string . replace ( u'\u2019' , "'" ) return uni . encode ( 'utf-8' ) return string
If Python 2 replace non - ascii characters and return encoded string .
56,211
def get_strings ( soup , tag ) : tags = soup . find_all ( tag ) strings = [ s . string for s in tags if s . string ] return strings
Get all the string children from an html tag .
56,212
def cli ( ctx , given_name , demo ) : path = None if path is None : path = ctx . home bubble_file_name = path + '/.bubble' config_file = path + '/config/config.yaml' if os . path . exists ( bubble_file_name ) and os . path . isfile ( bubble_file_name ) : ctx . say_yellow ( 'There is already a bubble present, will not initialize bubble in:' + path ) return else : given_name = '(((' + given_name + ')))' with open ( bubble_file_name , 'w' ) as dot_bubble : dot_bubble . write ( 'bubble=' + metadata . version + '\n' ) dot_bubble . write ( 'name=' + given_name + '\n' ) dot_bubble . write ( 'home=' + ctx . home + '\n' ) dot_bubble . write ( 'local_init_timestamp=' + str ( arrow . utcnow ( ) ) + '\n' ) dot_bubble . write ( 'local_creator_user=' + str ( os . getenv ( 'USER' ) ) + '\n' ) dot_bubble . write ( 'local_created_in_env=' + str ( os . environ ) + '\n' ) ctx . say_green ( 'Initialised a new bubble in [%s]' % click . format_filename ( bubble_file_name ) ) create_dir ( ctx , path + '/config/' ) create_dir ( ctx , path + '/logs/' ) create_dir ( ctx , path + '/export/' ) create_dir ( ctx , path + '/import/' ) create_dir ( ctx , path + '/remember/' ) create_dir ( ctx , path + '/remember/archive' ) with open ( config_file , 'w' ) as cfg_file : cfg_file . write ( get_example_configuration ( ) ) ctx . say_green ( 'Created an example configuration in %s' % click . format_filename ( config_file ) ) rules_file = path + '/config/rules.bubble' with open ( rules_file , 'w' ) as rules : rules . write ( get_example_rules_bubble ( ) ) ctx . say_green ( 'Created an example rules in [%s]' % click . format_filename ( rules_file ) ) rule_functions_file = path + '/custom_rule_functions.py' with open ( rule_functions_file , 'w' ) as rule_functions : rule_functions . write ( get_example_rule_functions ( ) ) ctx . say_green ( 'Created an example rule_functions in [%s]' % click . format_filename ( rule_functions_file ) ) src_client_file = path + '/mysrcclient.py' with open ( src_client_file , 'w' ) as src_client : src_client . write ( get_example_client_pull ( ) ) ctx . say_green ( 'Created source example client with pull method [%s]' % click . format_filename ( src_client_file ) ) tgt_client_file = path + '/mytgtclient.py' with open ( tgt_client_file , 'w' ) as tgt_client : tgt_client . write ( get_example_client_push ( ) ) ctx . say_green ( 'Created an target example client with push method [%s]' % click . format_filename ( src_client_file ) ) ctx . say_green ( 'Bubble initialized, please adjust your configuration file' )
Initializes a bubble .
56,213
def _bld_op ( self , op , num , ** kwargs ) : kwargs [ 'other' ] = num setattr ( self , op , { 'mtype' : pab , 'kwargs' : kwargs } )
implements pandas an operator
56,214
def _bld_pab_generic ( self , funcname , ** kwargs ) : margs = { 'mtype' : pab , 'kwargs' : kwargs } setattr ( self , funcname , margs )
implements a generic version of an attribute based pandas function
56,215
def _bld_pnab_generic ( self , funcname , ** kwargs ) : margs = { 'mtype' : pnab , 'kwargs' : kwargs } setattr ( self , funcname , margs )
implement s a generic version of a non - attribute based pandas function
56,216
def get ( self , request , * args , ** kwargs ) : cart = ShoppingCartProxy ( request ) return JsonResponse ( cart . get_products ( onlypublic = request . GET . get ( 'onlypublic' , True ) ) )
List all products in the shopping cart
56,217
def post ( self , request , * args , ** kwargs ) : POST = json . loads ( request . body . decode ( 'utf-8' ) ) if 'product_pk' in POST and 'quantity' in POST : cart = ShoppingCartProxy ( request ) cart . add ( product_pk = int ( POST [ 'product_pk' ] ) , quantity = int ( POST [ 'quantity' ] ) ) return JsonResponse ( cart . products ) return HttpResponseBadRequest ( )
Adds new product to the current shopping cart
56,218
def register_signal ( alias : str , signal : pyqtSignal ) : if SignalDispatcher . signal_alias_exists ( alias ) : raise SignalDispatcherError ( 'Alias "' + alias + '" for signal already exists!' ) SignalDispatcher . signals [ alias ] = signal
Used to register signal at the dispatcher . Note that you can not use alias that already exists .
56,219
def register_handler ( alias : str , handler : callable ) : if SignalDispatcher . handlers . get ( alias ) is None : SignalDispatcher . handlers [ alias ] = [ handler ] else : SignalDispatcher . handlers . get ( alias ) . append ( handler )
Used to register handler at the dispatcher .
56,220
def dispatch ( ) : aliases = SignalDispatcher . signals . keys ( ) for alias in aliases : handlers = SignalDispatcher . handlers . get ( alias ) signal = SignalDispatcher . signals . get ( alias ) if signal is None or handlers . __len__ ( ) == 0 : continue for handler in handlers : signal . connect ( handler )
This methods runs the wheel . It is used to connect signal with their handlers based on the aliases .
56,221
def _get_rev ( self , fpath ) : rev = None try : cmd = [ "git" , "log" , "-n1" , "--pretty=format:\"%h\"" , fpath ] rev = Popen ( cmd , stdout = PIPE , stderr = PIPE ) . communicate ( ) [ 0 ] except : pass if not rev : try : cmd = [ "svn" , "info" , fpath ] svninfo = Popen ( cmd , stdout = PIPE , stderr = PIPE ) . stdout . readlines ( ) for info in svninfo : tokens = info . split ( ":" ) if tokens [ 0 ] . strip ( ) == "Last Changed Rev" : rev = tokens [ 1 ] . strip ( ) except : pass return rev
Get an SCM version number . Try svn and git .
56,222
def execute_migrations ( self , show_traceback = True ) : all_migrations = get_pending_migrations ( self . path , self . databases ) if not len ( all_migrations ) : sys . stdout . write ( "There are no migrations to apply.\n" ) for db , migrations in all_migrations . iteritems ( ) : connection = connections [ db ] cursor = connection . cursor ( ) cursor . close ( ) for migration in migrations : migration_path = self . _get_migration_path ( db , migration ) with Transactional ( ) : sys . stdout . write ( "Executing migration %r on %r...." % ( migration , db ) ) created_models = self . _execute_migration ( db , migration_path , show_traceback = show_traceback ) emit_post_sync_signal ( created_models = created_models , verbosity = self . verbosity , interactive = self . interactive , db = db , ) if self . load_initial_data : sys . stdout . write ( "Running loaddata for initial_data fixtures on %r.\n" % db ) call_command ( "loaddata" , "initial_data" , verbosity = self . verbosity , database = db , )
Executes all pending migrations across all capable databases
56,223
def handle ( self , * args , ** options ) : self . do_list = options . get ( "do_list" ) self . do_execute = options . get ( "do_execute" ) self . do_create = options . get ( "do_create" ) self . do_create_all = options . get ( "do_create_all" ) self . do_seed = options . get ( "do_seed" ) self . load_initial_data = options . get ( "load_initial_data" , True ) self . args = args if options . get ( "path" ) : self . path = options . get ( "path" ) else : default_path = self . _get_default_migration_path ( ) self . path = getattr ( settings , "NASHVEGAS_MIGRATIONS_DIRECTORY" , default_path ) self . verbosity = int ( options . get ( "verbosity" , 1 ) ) self . interactive = options . get ( "interactive" ) self . databases = options . get ( "databases" ) if self . do_create and not self . databases : self . databases = [ DEFAULT_DB_ALIAS ] if self . do_create and self . do_create_all : raise CommandError ( "You cannot combine --create and --create-all" ) self . init_nashvegas ( ) if self . do_create_all : self . create_all_migrations ( ) elif self . do_create : assert len ( self . databases ) == 1 self . create_migrations ( self . databases [ 0 ] ) if self . do_execute : self . execute_migrations ( ) if self . do_list : self . list_migrations ( ) if self . do_seed : self . seed_migrations ( )
Upgrades the database . Executes SQL scripts that haven t already been applied to the database .
56,224
def plantuml ( desc ) : classes , relations , inherits = desc result = [ '@startuml' , 'skinparam defaultFontName Courier' , ] for cls in classes : class_desc = [ ] class_desc += [ ( i [ 1 ] , i [ 0 ] ) for i in cls [ 'cols' ] ] class_desc += [ ( '+' , i ) for i in cls [ 'props' ] ] class_desc += [ ( '%s()' % i , '' ) for i in cls [ 'methods' ] ] result . append ( 'Class %(name)s {\n%(desc)s\n}' % { 'name' : cls [ 'name' ] , 'desc' : '\n' . join ( tabular_output ( class_desc ) ) , } ) for item in inherits : result . append ( "%(parent)s <|-- %(child)s" % item ) for item in relations : result . append ( "%(from)s <--o %(to)s: %(by)s" % item ) result += [ 'right footer generated by sadisplay v%s' % __version__ , '@enduml' , ] return '\n\n' . join ( result )
Generate plantuml class diagram
56,225
def is_reference_target ( resource , rtype , label ) : prop = resource . props . references . get ( rtype , False ) if prop : return label in prop
Return true if the resource has this rtype with this label
56,226
def get_sources ( self , resources ) : rtype = self . rtype label = self . props . label result = [ resource for resource in resources . values ( ) if is_reference_target ( resource , rtype , label ) ] return result
Filter resources based on which have this reference
56,227
def setup ( app : Sphinx ) : importscan . scan ( plugins ) dectate . commit ( kb ) app . add_config_value ( 'kaybee_settings' , KaybeeSettings ( ) , 'html' ) bridge = 'kaybee.plugins.postrenderer.config.KaybeeBridge' app . config . template_bridge = bridge app . connect ( 'env-updated' , flush_everything ) app . connect ( SphinxEvent . BI . value , lambda sphinx_app : EventAction . call_builder_init ( kb , sphinx_app ) ) app . connect ( SphinxEvent . EPD . value , lambda sphinx_app , sphinx_env , docname : EventAction . call_purge_doc ( kb , sphinx_app , sphinx_env , docname ) ) app . connect ( SphinxEvent . EBRD . value , lambda sphinx_app , sphinx_env , docnames : EventAction . call_env_before_read_docs ( kb , sphinx_app , sphinx_env , docnames ) ) app . connect ( SphinxEvent . DREAD . value , lambda sphinx_app , doctree : EventAction . call_env_doctree_read ( kb , sphinx_app , doctree ) ) app . connect ( SphinxEvent . DRES . value , lambda sphinx_app , doctree , fromdocname : EventAction . call_doctree_resolved ( kb , sphinx_app , doctree , fromdocname ) ) app . connect ( SphinxEvent . EU . value , lambda sphinx_app , sphinx_env : EventAction . call_env_updated ( kb , sphinx_app , sphinx_env ) ) app . connect ( SphinxEvent . HCP . value , lambda sphinx_app : EventAction . call_html_collect_pages ( kb , sphinx_app ) ) app . connect ( SphinxEvent . ECC . value , lambda sphinx_builder , sphinx_env : EventAction . call_env_check_consistency ( kb , sphinx_builder , sphinx_env ) ) app . connect ( SphinxEvent . MR . value , lambda sphinx_app , sphinx_env , node , contnode : EventAction . call_missing_reference ( kb , sphinx_app , sphinx_env , node , contnode ) ) app . connect ( SphinxEvent . HPC . value , lambda sphinx_app , pagename , templatename , context , doctree : EventAction . call_html_page_context ( kb , sphinx_app , pagename , templatename , context , doctree ) ) return dict ( version = __version__ , parallel_read_safe = False )
Initialize Kaybee as a Sphinx extension
56,228
def loadInstance ( self ) : if self . _loaded : return self . _loaded = True module_path = self . modulePath ( ) package = projex . packageFromPath ( module_path ) path = os . path . normpath ( projex . packageRootPath ( module_path ) ) if path in sys . path : sys . path . remove ( path ) sys . path . insert ( 0 , path ) try : __import__ ( package ) except Exception , e : err = Plugin ( self . name ( ) , self . version ( ) ) err . setError ( e ) err . setFilepath ( module_path ) self . _instance = err self . setError ( e ) msg = "%s.plugin('%s') errored loading instance from %s" opts = ( self . proxyClass ( ) . __name__ , self . name ( ) , module_path ) logger . warning ( msg % opts ) logger . error ( e )
Loads the plugin from the proxy information that was created from the registry file .
56,229
def clean_resource_json ( resource_json ) : for a in ( 'parent_docname' , 'parent' , 'template' , 'repr' , 'series' ) : if a in resource_json : del resource_json [ a ] props = resource_json [ 'props' ] for prop in ( 'acquireds' , 'style' , 'in_nav' , 'nav_title' , 'weight' , 'auto_excerpt' ) : if prop in props : del props [ prop ] return resource_json
The catalog wants to be smaller let s drop some stuff
56,230
def get ( self , url , params = None , cache_cb = None , ** kwargs ) : if self . use_random_user_agent : headers = kwargs . get ( "headers" , dict ( ) ) headers . update ( { Headers . UserAgent . KEY : Headers . UserAgent . random ( ) } ) kwargs [ "headers" ] = headers url = add_params ( url , params ) cache_consumed , value = self . try_read_cache ( url ) if cache_consumed : response = requests . Response ( ) response . url = url response . _content = value else : response = self . ses . get ( url , ** kwargs ) if self . should_we_update_cache ( response , cache_cb , cache_consumed ) : self . cache . set ( url , response . content , expire = kwargs . get ( "cache_expire" , self . cache_expire ) , ) return response
Make http get request .
56,231
def download ( self , url , dst , params = None , cache_cb = None , overwrite = False , stream = False , minimal_size = - 1 , maximum_size = 1024 ** 6 , ** kwargs ) : response = self . get ( url , params = params , cache_cb = cache_cb , stream = stream , ** kwargs ) if not overwrite : if os . path . exists ( dst ) : raise OSError ( "'%s' exists!" % dst ) if stream : chunk_size = 1024 * 1024 downloaded_size = 0 with atomic_write ( dst , mode = "wb" ) as f : for chunk in response . iter_content ( chunk_size ) : if not chunk : break f . write ( chunk ) downloaded_size += chunk_size if ( downloaded_size < minimal_size ) or ( downloaded_size > maximum_size ) : self . raise_download_oversize_error ( url , downloaded_size , minimal_size , maximum_size ) else : content = response . content downloaded_size = sys . getsizeof ( content ) if ( downloaded_size < minimal_size ) or ( downloaded_size > maximum_size ) : self . raise_download_oversize_error ( url , downloaded_size , minimal_size , maximum_size ) else : with atomic_write ( dst , mode = "wb" ) as f : f . write ( content )
Download binary content to destination .
56,232
def option ( * args , ** kwargs ) : def decorate_sub_command ( method ) : if not hasattr ( method , "optparser" ) : method . optparser = SubCmdOptionParser ( ) method . optparser . add_option ( * args , ** kwargs ) return method def decorate_class ( klass ) : assert _forgiving_issubclass ( klass , Cmdln ) _inherit_attr ( klass , "toplevel_optparser_options" , [ ] , cp = lambda l : l [ : ] ) klass . toplevel_optparser_options . append ( ( args , kwargs ) ) return klass def decorate ( obj ) : if _forgiving_issubclass ( obj , Cmdln ) : return decorate_class ( obj ) else : return decorate_sub_command ( obj ) return decorate
Decorator to add an option to the optparser argument of a Cmdln subcommand
56,233
def _inherit_attr ( klass , attr , default , cp ) : if attr not in klass . __dict__ : if hasattr ( klass , attr ) : value = cp ( getattr ( klass , attr ) ) else : value = default setattr ( klass , attr , value )
Inherit the attribute from the base class
56,234
def _forgiving_issubclass ( derived_class , base_class ) : return ( type ( derived_class ) is ClassType and type ( base_class ) is ClassType and issubclass ( derived_class , base_class ) )
Forgiving version of issubclass
56,235
def timecalMs1DataMedian ( msrunContainer , specfile , calibrationData , minDataPoints = 50 , deviationKey = 'relDev' ) : corrData = dict ( ) _posDict = dict ( ) pos = 0 for si in msrunContainer . getItems ( specfiles = specfile , sort = 'rt' , selector = lambda si : si . msLevel == 1 ) : corrData [ si . id ] = { 'calibValue' : float ( ) , 'n' : int ( ) , 'data' : list ( ) } _posDict [ pos ] = si . id pos += 1 for siId , deviation in zip ( calibrationData [ 'siId' ] , calibrationData [ deviationKey ] ) : corrData [ siId ] [ 'data' ] . append ( deviation ) corrData [ siId ] [ 'n' ] += 1 for pos in range ( len ( corrData ) ) : entry = corrData [ _posDict [ pos ] ] _data = [ entry [ 'data' ] ] _n = entry [ 'n' ] expansion = 0 while _n < minDataPoints : expansion += 1 try : expData = corrData [ _posDict [ pos + expansion ] ] [ 'data' ] _data . append ( expData ) _n += corrData [ _posDict [ pos + expansion ] ] [ 'n' ] except KeyError : pass try : expData = corrData [ _posDict [ pos - expansion ] ] [ 'data' ] _data . append ( expData ) _n += corrData [ _posDict [ pos - expansion ] ] [ 'n' ] except KeyError : pass if len ( entry [ 'data' ] ) > 0 : median = numpy . median ( entry [ 'data' ] ) factor = 1 else : median = float ( ) factor = 0 for expData in _data [ 1 : ] : if len ( expData ) > 0 : median += numpy . median ( expData ) * 0.5 factor += 0.5 median = median / factor entry [ 'calibValue' ] = median return corrData
Generates a calibration value for each MS1 scan by calculating the median deviation
56,236
def get_genericpage ( cls , kb_app ) : q = dectate . Query ( 'genericpage' ) klasses = sorted ( q ( kb_app ) , key = lambda args : args [ 0 ] . order ) if not klasses : return Genericpage else : return klasses [ 0 ] [ 1 ]
Return the one class if configured otherwise default
56,237
def cli ( ctx ) : manfile = bubble_lib_dir + os . sep + 'extras' + os . sep + 'Bubble.1.gz' mancmd = [ "/usr/bin/man" , manfile ] try : return subprocess . call ( mancmd ) except Exception as e : print ( 'cannot run man with bubble man page' ) print ( 'you can always have a look at: ' + manfile )
Shows the man page packed inside the bubble tool
56,238
def _fetch_dimensions ( self , dataset ) : for dimension in super ( SCB , self ) . _fetch_dimensions ( dataset ) : if dimension . id == "Region" : yield Dimension ( dimension . id , datatype = "region" , dialect = "skatteverket" , label = dimension . label ) else : yield dimension
We override this method just to set the correct datatype and dialect for regions .
56,239
def call ( self , func , key , timeout = None ) : result = self . get ( key ) if result == NONE_RESULT : return None if result is None : result = func ( ) self . set ( key , result if result is not None else NONE_RESULT , timeout ) return result
Wraps a function call with cache .
56,240
def map ( self , key_pattern , func , all_args , timeout = None ) : results = [ ] keys = [ make_key ( key_pattern , func , args , { } ) for args in all_args ] cached = dict ( zip ( keys , self . get_many ( keys ) ) ) cache_to_add = { } for key , args in zip ( keys , all_args ) : val = cached [ key ] if val is None : val = func ( * args ) cache_to_add [ key ] = val if val is not None else NONE_RESULT if val == NONE_RESULT : val = None results . append ( val ) if cache_to_add : self . set_many ( cache_to_add , timeout ) return results
Cache return value of multiple calls .
56,241
async def _window_open ( self , stream_id : int ) : stream = self . _get_stream ( stream_id ) return await stream . window_open . wait ( )
Wait until the identified stream s flow control window is open .
56,242
async def send_data ( self , stream_id : int , data : bytes , end_stream : bool = False , ) : if self . closed : raise ConnectionClosedError stream = self . _get_stream ( stream_id ) if stream . closed : raise StreamClosedError ( stream_id ) remaining = data while len ( remaining ) > 0 : await asyncio . gather ( self . _writable . wait ( ) , self . _window_open ( stream . id ) , ) remaining_size = len ( remaining ) window_size = self . _h2 . local_flow_control_window ( stream . id ) max_frame_size = self . _h2 . max_outbound_frame_size send_size = min ( remaining_size , window_size , max_frame_size ) if send_size == 0 : continue logger . debug ( f'[{stream.id}] Sending {send_size} of {remaining_size} ' f'bytes (window {window_size}, frame max {max_frame_size})' ) to_send = remaining [ : send_size ] remaining = remaining [ send_size : ] end = ( end_stream is True and len ( remaining ) == 0 ) self . _h2 . send_data ( stream . id , to_send , end_stream = end ) self . _flush ( ) if self . _h2 . local_flow_control_window ( stream . id ) == 0 : stream . window_open . clear ( )
Send data respecting the receiver s flow control instructions . If the provided data is larger than the connection s maximum outbound frame size it will be broken into several frames as appropriate .
56,243
async def read_data ( self , stream_id : int ) -> bytes : frames = [ f async for f in self . stream_frames ( stream_id ) ] return b'' . join ( frames )
Read data from the specified stream until it is closed by the remote peer . If the stream is never ended this never returns .
56,244
async def read_frame ( self , stream_id : int ) -> bytes : stream = self . _get_stream ( stream_id ) frame = await stream . read_frame ( ) if frame . flow_controlled_length > 0 : self . _acknowledge_data ( frame . flow_controlled_length , stream_id ) return frame . data
Read a single frame of data from the specified stream waiting until frames are available if none are present in the local buffer . If the stream is closed and all buffered frames have been consumed raises a StreamConsumedError .
56,245
async def get_pushed_stream_ids ( self , parent_stream_id : int ) -> List [ int ] : if parent_stream_id not in self . _streams : logger . error ( f'Parent stream {parent_stream_id} unknown to this connection' ) raise NoSuchStreamError ( parent_stream_id ) parent = self . _get_stream ( parent_stream_id ) await parent . pushed_streams_available . wait ( ) pushed_streams_ids = self . _pushed_stream_ids [ parent . id ] stream_ids : List [ int ] = [ ] if len ( pushed_streams_ids ) > 0 : stream_ids . extend ( pushed_streams_ids ) pushed_streams_ids . clear ( ) parent . pushed_streams_available . clear ( ) return stream_ids
Return a list of all streams pushed by the remote peer that are children of the specified stream . If no streams have been pushed when this method is called waits until at least one stream has been pushed .
56,246
def convertMzml ( mzmlPath , outputDirectory = None ) : outputDirectory = outputDirectory if outputDirectory is not None else os . path . dirname ( mzmlPath ) msrunContainer = importMzml ( mzmlPath ) msrunContainer . setPath ( outputDirectory ) msrunContainer . save ( )
Imports an mzml file and converts it to a MsrunContainer file
56,247
def prepareSiiImport ( siiContainer , specfile , path , qcAttr , qcLargerBetter , qcCutoff , rankAttr , rankLargerBetter ) : if specfile not in siiContainer . info : siiContainer . addSpecfile ( specfile , path ) else : raise Exception ( '...' ) siiContainer . info [ specfile ] [ 'qcAttr' ] = qcAttr siiContainer . info [ specfile ] [ 'qcLargerBetter' ] = qcLargerBetter siiContainer . info [ specfile ] [ 'qcCutoff' ] = qcCutoff siiContainer . info [ specfile ] [ 'rankAttr' ] = rankAttr siiContainer . info [ specfile ] [ 'rankLargerBetter' ] = rankLargerBetter
Prepares the siiContainer for the import of peptide spectrum matching results . Adds entries to siiContainer . container and to siiContainer . info .
56,248
def importPeptideFeatures ( fiContainer , filelocation , specfile ) : if not os . path . isfile ( filelocation ) : warnings . warn ( 'The specified file does not exist %s' % ( filelocation , ) ) return None elif ( not filelocation . lower ( ) . endswith ( '.featurexml' ) and not filelocation . lower ( ) . endswith ( '.features.tsv' ) ) : print ( 'Wrong file extension, %s' % ( filelocation , ) ) elif specfile in fiContainer . info : print ( '%s is already present in the SiContainer, import interrupted.' % ( specfile , ) ) return None fiContainer . addSpecfile ( specfile , os . path . dirname ( filelocation ) ) if filelocation . lower ( ) . endswith ( '.featurexml' ) : featureDict = _importFeatureXml ( filelocation ) for featureId , featureEntryDict in viewitems ( featureDict ) : rtArea = set ( ) for convexHullEntry in featureEntryDict [ 'convexHullDict' ] [ '0' ] : rtArea . update ( [ convexHullEntry [ 0 ] ] ) fi = maspy . core . Fi ( featureId , specfile ) fi . rt = featureEntryDict [ 'rt' ] fi . rtArea = max ( rtArea ) - min ( rtArea ) fi . rtLow = min ( rtArea ) fi . rtHigh = max ( rtArea ) fi . charge = featureEntryDict [ 'charge' ] fi . mz = featureEntryDict [ 'mz' ] fi . mh = maspy . peptidemethods . calcMhFromMz ( featureEntryDict [ 'mz' ] , featureEntryDict [ 'charge' ] ) fi . intensity = featureEntryDict [ 'intensity' ] fi . quality = featureEntryDict [ 'overallquality' ] fi . isMatched = False fi . isAnnotated = False fi . isValid = True fiContainer . container [ specfile ] [ featureId ] = fi elif filelocation . lower ( ) . endswith ( '.features.tsv' ) : featureDict = _importDinosaurTsv ( filelocation ) for featureId , featureEntryDict in viewitems ( featureDict ) : fi = maspy . core . Fi ( featureId , specfile ) fi . rt = featureEntryDict [ 'rtApex' ] fi . rtArea = featureEntryDict [ 'rtEnd' ] - featureEntryDict [ 'rtStart' ] fi . rtFwhm = featureEntryDict [ 'fwhm' ] fi . rtLow = featureEntryDict [ 'rtStart' ] fi . rtHigh = featureEntryDict [ 'rtEnd' ] fi . charge = featureEntryDict [ 'charge' ] fi . numScans = featureEntryDict [ 'nScans' ] fi . mz = featureEntryDict [ 'mz' ] fi . mh = maspy . peptidemethods . calcMhFromMz ( featureEntryDict [ 'mz' ] , featureEntryDict [ 'charge' ] ) fi . intensity = featureEntryDict [ 'intensitySum' ] fi . intensityApex = featureEntryDict [ 'intensityApex' ] fi . isMatched = False fi . isAnnotated = False fi . isValid = True fiContainer . container [ specfile ] [ featureId ] = fi
Import peptide features from a featureXml file as generated for example by the OpenMS node featureFinderCentroided or a features . tsv file by the Dinosaur command line tool .
56,249
def _importDinosaurTsv ( filelocation ) : with io . open ( filelocation , 'r' , encoding = 'utf-8' ) as openFile : lines = openFile . readlines ( ) headerDict = dict ( [ [ y , x ] for ( x , y ) in enumerate ( lines [ 0 ] . strip ( ) . split ( '\t' ) ) ] ) featureDict = dict ( ) for linePos , line in enumerate ( lines [ 1 : ] ) : featureId = str ( linePos ) fields = line . strip ( ) . split ( '\t' ) entryDict = dict ( ) for headerName , headerPos in viewitems ( headerDict ) : entryDict [ headerName ] = float ( fields [ headerPos ] ) if headerName in [ 'rtApex' , 'rtEnd' , 'rtStart' , 'fwhm' ] : entryDict [ headerName ] *= 60 elif headerName in [ 'charge' , 'intensitySum' , 'nIsotopes' , 'nScans' , 'intensityApex' ] : entryDict [ headerName ] = int ( entryDict [ headerName ] ) featureDict [ featureId ] = entryDict return featureDict
Reads a Dinosaur tsv file .
56,250
def rst_to_html ( input_string : str ) -> str : overrides = dict ( input_encoding = 'unicode' , doctitle_xform = True , initial_header_level = 1 ) parts = publish_parts ( writer_name = 'html' , source = input_string , settings_overrides = overrides ) return parts [ 'html_body' ]
Given a string of RST use docutils to generate html
56,251
def get_rst_title ( rst_doc : Node ) -> Optional [ Any ] : for title in rst_doc . traverse ( nodes . title ) : return title . astext ( ) return None
Given some RST extract what docutils thinks is the title
56,252
def get_rst_excerpt ( rst_doc : document , paragraphs : int = 1 ) -> str : texts = [ ] for count , p in enumerate ( rst_doc . traverse ( paragraph ) ) : texts . append ( p . astext ( ) ) if count + 1 == paragraphs : break return ' ' . join ( texts )
Given rst parse and return a portion
56,253
def requires_password_auth ( fn ) : def wrapper ( self , * args , ** kwargs ) : self . auth_context = HAPI . auth_context_password return fn ( self , * args , ** kwargs ) return wrapper
Decorator for HAPI methods that requires the instance to be authenticated with a password
56,254
def requires_api_auth ( fn ) : def wrapper ( self , * args , ** kwargs ) : self . auth_context = HAPI . auth_context_hapi return fn ( self , * args , ** kwargs ) return wrapper
Decorator for HAPI methods that requires the instance to be authenticated with a HAPI token
56,255
def parse ( response ) : tokens = { r [ 0 ] : r [ 1 ] for r in [ r . split ( '=' ) for r in response . split ( "&" ) ] } if 'dummy' in tokens : del tokens [ 'dummy' ] if re . match ( '\D\d+$' , tokens . keys ( ) [ 0 ] ) : set_tokens = [ ] for key , value in tokens : key = re . match ( '^(.+\D)(\d+)$' , key ) if key is not None : if key . group ( 1 ) not in set_tokens : set_tokens [ key . group ( 1 ) ] = { } set_tokens [ key . group ( 1 ) ] [ key . group ( 0 ) . rstrip ( '_' ) ] = value tokens = set_tokens return tokens
Parse a postdata - style response format from the API into usable data
56,256
def init_chain ( self ) : if not self . _hasinit : self . _hasinit = True self . _devices = [ ] self . jtag_enable ( ) while True : idcode = self . rw_dr ( bitcount = 32 , read = True , lastbit = False ) ( ) if idcode in NULL_ID_CODES : break dev = self . initialize_device_from_id ( self , idcode ) if self . _debug : print ( dev ) self . _devices . append ( dev ) if len ( self . _devices ) >= 128 : raise JTAGTooManyDevicesError ( "This is an arbitrary " "limit to deal with breaking infinite loops. If " "you have more devices, please open a bug" ) self . jtag_disable ( ) self . _devices . reverse ( )
Autodetect the devices attached to the Controller and initialize a JTAGDevice for each .
56,257
def _UserUpdateConfigValue ( self , configKey , strDescriptor , isDir = True , dbConfigValue = None ) : newConfigValue = None if dbConfigValue is None : prompt = "Enter new {0} or 'x' to exit: " . format ( strDescriptor ) else : prompt = "Enter 'y' to use existing {0}, enter a new {0} or 'x' to exit: " . format ( strDescriptor ) while newConfigValue is None : response = goodlogging . Log . Input ( "CLEAR" , prompt ) if response . lower ( ) == 'x' : sys . exit ( 0 ) elif dbConfigValue is not None and response . lower ( ) == 'y' : newConfigValue = dbConfigValue elif not isDir : newConfigValue = response self . _db . SetConfigValue ( configKey , newConfigValue ) else : if os . path . isdir ( response ) : newConfigValue = os . path . abspath ( response ) self . _db . SetConfigValue ( configKey , newConfigValue ) else : goodlogging . Log . Info ( "CLEAR" , "{0} is not recognised as a directory" . format ( response ) ) return newConfigValue
Allow user to set or update config values in the database table . This is always called if no valid entry exists in the table already .
56,258
def _GetConfigValue ( self , configKey , strDescriptor , isDir = True ) : goodlogging . Log . Info ( "CLEAR" , "Loading {0} from database:" . format ( strDescriptor ) ) goodlogging . Log . IncreaseIndent ( ) configValue = self . _db . GetConfigValue ( configKey ) if configValue is None : goodlogging . Log . Info ( "CLEAR" , "No {0} exists in database" . format ( strDescriptor ) ) configValue = self . _UserUpdateConfigValue ( configKey , strDescriptor , isDir ) else : goodlogging . Log . Info ( "CLEAR" , "Got {0} {1} from database" . format ( strDescriptor , configValue ) ) if not isDir or os . path . isdir ( configValue ) : goodlogging . Log . Info ( "CLEAR" , "Using {0} {1}" . format ( strDescriptor , configValue ) ) goodlogging . Log . DecreaseIndent ( ) return configValue else : goodlogging . Log . Info ( "CLEAR" , "Exiting... {0} is not recognised as a directory" . format ( configValue ) ) sys . exit ( 0 )
Get configuration value from database table . If no value found user will be prompted to enter one .
56,259
def _UserUpdateSupportedFormats ( self , origFormatList = [ ] ) : formatList = list ( origFormatList ) inputDone = None while inputDone is None : prompt = "Enter new format (e.g. .mp4, .avi), " "'r' to reset format list, " "'f' to finish or " "'x' to exit: " response = goodlogging . Log . Input ( "CLEAR" , prompt ) if response . lower ( ) == 'x' : sys . exit ( 0 ) elif response . lower ( ) == 'f' : inputDone = 1 elif response . lower ( ) == 'r' : formatList = [ ] else : if response is not None : if ( response [ 0 ] != '.' ) : response = '.' + response formatList . append ( response ) formatList = set ( formatList ) origFormatList = set ( origFormatList ) if formatList != origFormatList : self . _db . PurgeSupportedFormats ( ) for fileFormat in formatList : self . _db . AddSupportedFormat ( fileFormat ) return formatList
Add supported formats to database table . Always called if the database table is empty .
56,260
def _GetSupportedFormats ( self ) : goodlogging . Log . Info ( "CLEAR" , "Loading supported formats from database:" ) goodlogging . Log . IncreaseIndent ( ) formatList = self . _db . GetSupportedFormats ( ) if formatList is None : goodlogging . Log . Info ( "CLEAR" , "No supported formats exist in database" ) formatList = self . _UserUpdateSupportedFormats ( ) else : goodlogging . Log . Info ( "CLEAR" , "Got supported formats from database: {0}" . format ( formatList ) ) goodlogging . Log . Info ( "CLEAR" , "Using supported formats: {0}" . format ( formatList ) ) goodlogging . Log . DecreaseIndent ( ) return formatList
Get supported format values from database table . If no values found user will be prompted to enter values for this table .
56,261
def _UserUpdateIgnoredDirs ( self , origIgnoredDirs = [ ] ) : ignoredDirs = list ( origIgnoredDirs ) inputDone = None while inputDone is None : prompt = "Enter new directory to ignore (e.g. DONE), " "'r' to reset directory list, " "'f' to finish or " "'x' to exit: " response = goodlogging . Log . Input ( "CLEAR" , prompt ) if response . lower ( ) == 'x' : sys . exit ( 0 ) elif response . lower ( ) == 'f' : inputDone = 1 elif response . lower ( ) == 'r' : ignoredDirs = [ ] else : if response is not None : ignoredDirs . append ( response ) ignoredDirs = set ( ignoredDirs ) origIgnoredDirs = set ( origIgnoredDirs ) if ignoredDirs != origIgnoredDirs : self . _db . PurgeIgnoredDirs ( ) for ignoredDir in ignoredDirs : self . _db . AddIgnoredDir ( ignoredDir ) return list ( ignoredDirs )
Add ignored directories to database table . Always called if the database table is empty .
56,262
def _GetIgnoredDirs ( self ) : goodlogging . Log . Info ( "CLEAR" , "Loading ignored directories from database:" ) goodlogging . Log . IncreaseIndent ( ) ignoredDirs = self . _db . GetIgnoredDirs ( ) if ignoredDirs is None : goodlogging . Log . Info ( "CLEAR" , "No ignored directories exist in database" ) ignoredDirs = self . _UserUpdateIgnoredDirs ( ) else : goodlogging . Log . Info ( "CLEAR" , "Got ignored directories from database: {0}" . format ( ignoredDirs ) ) if self . _archiveDir not in ignoredDirs : ignoredDirs . append ( self . _archiveDir ) goodlogging . Log . Info ( "CLEAR" , "Using ignored directories: {0}" . format ( ignoredDirs ) ) goodlogging . Log . DecreaseIndent ( ) return ignoredDirs
Get ignored directories values from database table . If no values found user will be prompted to enter values for this table .
56,263
def _GetDatabaseConfig ( self ) : goodlogging . Log . Seperator ( ) goodlogging . Log . Info ( "CLEAR" , "Getting configuration variables..." ) goodlogging . Log . IncreaseIndent ( ) if self . _sourceDir is None : self . _sourceDir = self . _GetConfigValue ( 'SourceDir' , 'source directory' ) if self . _inPlaceRename is False and self . _tvDir is None : self . _tvDir = self . _GetConfigValue ( 'TVDir' , 'tv directory' ) self . _archiveDir = self . _GetConfigValue ( 'ArchiveDir' , 'archive directory' , isDir = False ) self . _supportedFormatsList = self . _GetSupportedFormats ( ) self . _ignoredDirsList = self . _GetIgnoredDirs ( ) goodlogging . Log . NewLine ( ) goodlogging . Log . Info ( "CLEAR" , "Configuation is:" ) goodlogging . Log . IncreaseIndent ( ) goodlogging . Log . Info ( "CLEAR" , "Source directory = {0}" . format ( self . _sourceDir ) ) goodlogging . Log . Info ( "CLEAR" , "TV directory = {0}" . format ( self . _tvDir ) ) goodlogging . Log . Info ( "CLEAR" , "Supported formats = {0}" . format ( self . _supportedFormatsList ) ) goodlogging . Log . Info ( "CLEAR" , "Ignored directory list = {0}" . format ( self . _ignoredDirsList ) ) goodlogging . Log . ResetIndent ( )
Get all configuration from database .
56,264
def _GetSupportedFilesInDir ( self , fileDir , fileList , supportedFormatList , ignoreDirList ) : goodlogging . Log . Info ( "CLEAR" , "Parsing file directory: {0}" . format ( fileDir ) ) if os . path . isdir ( fileDir ) is True : for globPath in glob . glob ( os . path . join ( fileDir , '*' ) ) : if util . FileExtensionMatch ( globPath , supportedFormatList ) : newFile = tvfile . TVFile ( globPath ) if newFile . GetShowDetails ( ) : fileList . append ( newFile ) elif os . path . isdir ( globPath ) : if ( os . path . basename ( globPath ) in ignoreDirList ) : goodlogging . Log . Info ( "CLEAR" , "Skipping ignored directory: {0}" . format ( globPath ) ) else : self . _GetSupportedFilesInDir ( globPath , fileList , supportedFormatList , ignoreDirList ) else : goodlogging . Log . Info ( "CLEAR" , "Ignoring unsupported file or folder: {0}" . format ( globPath ) ) else : goodlogging . Log . Info ( "CLEAR" , "Invalid non-directory path given to parse" )
Recursively get all supported files given a root search directory .
56,265
def Run ( self ) : self . _GetArgs ( ) goodlogging . Log . Info ( "CLEAR" , "Using database: {0}" . format ( self . _databasePath ) ) self . _db = database . RenamerDB ( self . _databasePath ) if self . _dbPrint or self . _dbUpdate : goodlogging . Log . Seperator ( ) self . _db . PrintAllTables ( ) if self . _dbUpdate : goodlogging . Log . Seperator ( ) self . _db . ManualUpdateTables ( ) self . _GetDatabaseConfig ( ) if self . _enableExtract : goodlogging . Log . Seperator ( ) extractFileList = [ ] goodlogging . Log . Info ( "CLEAR" , "Parsing source directory for compressed files" ) goodlogging . Log . IncreaseIndent ( ) extract . GetCompressedFilesInDir ( self . _sourceDir , extractFileList , self . _ignoredDirsList ) goodlogging . Log . DecreaseIndent ( ) goodlogging . Log . Seperator ( ) extract . Extract ( extractFileList , self . _supportedFormatsList , self . _archiveDir , self . _skipUserInputExtract ) goodlogging . Log . Seperator ( ) tvFileList = [ ] goodlogging . Log . Info ( "CLEAR" , "Parsing source directory for compatible files" ) goodlogging . Log . IncreaseIndent ( ) self . _GetSupportedFilesInDir ( self . _sourceDir , tvFileList , self . _supportedFormatsList , self . _ignoredDirsList ) goodlogging . Log . DecreaseIndent ( ) tvRenamer = renamer . TVRenamer ( self . _db , tvFileList , self . _archiveDir , guideName = 'EPGUIDES' , tvDir = self . _tvDir , inPlaceRename = self . _inPlaceRename , forceCopy = self . _crossSystemCopyEnabled , skipUserInput = self . _skipUserInputRename ) tvRenamer . Run ( )
Main entry point for ClearManager class .
56,266
def flush ( self ) : self . stages = [ ] self . stagenames = [ ] if not self . queue : return if self . print_statistics : print ( "LEN OF QUENE" , len ( self ) ) t = time ( ) if self . _chain . _collect_compiler_artifacts : self . _compile ( debug = True , stages = self . stages , stagenames = self . stagenames ) else : self . _compile ( ) if self . debug : print ( "ABOUT TO EXEC" , self . queue ) if self . print_statistics : print ( "COMPILE TIME" , time ( ) - t ) print ( "TOTAL BITS OF ALL PRIMS" , sum ( ( p . count for p in self . queue if hasattr ( p , 'count' ) ) ) ) t = time ( ) self . _chain . _controller . _execute_primitives ( self . queue ) if self . print_statistics : print ( "EXECUTE TIME" , time ( ) - t ) self . queue = [ ] self . _chain . _sm . state = self . _fsm . state
Force the queue of Primitives to compile execute on the Controller and fulfill promises with the data returned .
56,267
def step_impl ( context ) : expected_lines = context . text . split ( '\n' ) assert len ( expected_lines ) == len ( context . output ) for expected , actual in zip ( expected_lines , context . output ) : print ( '--\n\texpected: {}\n\tactual: {}' . format ( expected , actual ) ) assert expected == actual
Compares text as written to the log output
56,268
def _ParseShowList ( self , checkOnly = False ) : showTitleList = [ ] showIDList = [ ] csvReader = csv . reader ( self . _allShowList . splitlines ( ) ) for rowCnt , row in enumerate ( csvReader ) : if rowCnt == 0 : for colCnt , column in enumerate ( row ) : if column == 'title' : titleIndex = colCnt if column == self . ID_LOOKUP_TAG : lookupIndex = colCnt else : try : showTitleList . append ( row [ titleIndex ] ) showIDList . append ( row [ lookupIndex ] ) except UnboundLocalError : goodlogging . Log . Fatal ( "EPGUIDE" , "Error detected in EPGUIDES allshows csv content" ) else : if checkOnly and rowCnt > 1 : return True self . _showTitleList = showTitleList self . _showIDList = showIDList return True
Read self . _allShowList as csv file and make list of titles and IDs .
56,269
def _GetAllShowList ( self ) : today = datetime . date . today ( ) . strftime ( "%Y%m%d" ) saveFile = '_epguides_' + today + '.csv' saveFilePath = os . path . join ( self . _saveDir , saveFile ) if os . path . exists ( saveFilePath ) : with open ( saveFilePath , 'r' ) as allShowsFile : self . _allShowList = allShowsFile . read ( ) else : self . _allShowList = util . WebLookup ( self . ALLSHOW_IDLIST_URL ) . strip ( ) if self . _ParseShowList ( checkOnly = True ) : with open ( saveFilePath , 'w' ) as allShowsFile : goodlogging . Log . Info ( "EPGUIDE" , "Adding new EPGUIDES file: {0}" . format ( saveFilePath ) , verbosity = self . logVerbosity ) allShowsFile . write ( self . _allShowList ) globPattern = '_epguides_????????.csv' globFilePath = os . path . join ( self . _saveDir , globPattern ) for filePath in glob . glob ( globFilePath ) : if filePath != saveFilePath : goodlogging . Log . Info ( "EPGUIDE" , "Removing old EPGUIDES file: {0}" . format ( filePath ) , verbosity = self . logVerbosity ) os . remove ( filePath )
Populates self . _allShowList with the epguides all show info .
56,270
def _GetShowID ( self , showName ) : self . _GetTitleList ( ) self . _GetIDList ( ) for index , showTitle in enumerate ( self . _showTitleList ) : if showName == showTitle : return self . _showIDList [ index ] return None
Get epguides show id for a given show name .
56,271
def _ExtractDataFromShowHtml ( self , html ) : htmlLines = html . splitlines ( ) for count , line in enumerate ( htmlLines ) : if line . strip ( ) == r'<pre>' : startLine = count + 1 if line . strip ( ) == r'</pre>' : endLine = count try : dataList = htmlLines [ startLine : endLine ] dataString = '\n' . join ( dataList ) return dataString . strip ( ) except : raise Exception ( "Show content not found - check EPGuides html formatting" )
Extracts csv show data from epguides html source .
56,272
def _GetEpisodeName ( self , showID , season , episode ) : showInfo = csv . reader ( self . _showInfoDict [ showID ] . splitlines ( ) ) for rowCnt , row in enumerate ( showInfo ) : if rowCnt == 0 : for colCnt , column in enumerate ( row ) : if column == 'season' : seasonIndex = colCnt if column == 'episode' : episodeIndex = colCnt if column == 'title' : titleIndex = colCnt else : try : int ( row [ seasonIndex ] ) int ( row [ episodeIndex ] ) except ValueError : pass else : if int ( row [ seasonIndex ] ) == int ( season ) and int ( row [ episodeIndex ] ) == int ( episode ) : goodlogging . Log . Info ( "EPGUIDE" , "Episode name is {0}" . format ( row [ titleIndex ] ) , verbosity = self . logVerbosity ) return row [ titleIndex ] return None
Get episode name from epguides show info .
56,273
def ShowNameLookUp ( self , string ) : goodlogging . Log . Info ( "EPGUIDES" , "Looking up show name match for string '{0}' in guide" . format ( string ) , verbosity = self . logVerbosity ) self . _GetTitleList ( ) showName = util . GetBestMatch ( string , self . _showTitleList ) return ( showName )
Attempts to find the best match for the given string in the list of epguides show titles . If this list has not previous been generated it will be generated first .
56,274
def EpisodeNameLookUp ( self , showName , season , episode ) : goodlogging . Log . Info ( "EPGUIDE" , "Looking up episode name for {0} S{1}E{2}" . format ( showName , season , episode ) , verbosity = self . logVerbosity ) goodlogging . Log . IncreaseIndent ( ) showID = self . _GetShowID ( showName ) if showID is not None : try : self . _showInfoDict [ showID ] except KeyError : goodlogging . Log . Info ( "EPGUIDE" , "Looking up info for new show: {0}(ID:{1})" . format ( showName , showID ) , verbosity = self . logVerbosity ) urlData = util . WebLookup ( self . EPISODE_LOOKUP_URL , { self . EP_LOOKUP_TAG : showID } ) self . _showInfoDict [ showID ] = self . _ExtractDataFromShowHtml ( urlData ) else : goodlogging . Log . Info ( "EPGUIDE" , "Reusing show info previous obtained for: {0}({1})" . format ( showName , showID ) , verbosity = self . logVerbosity ) finally : episodeName = self . _GetEpisodeName ( showID , season , episode ) goodlogging . Log . DecreaseIndent ( ) return episodeName goodlogging . Log . DecreaseIndent ( )
Get the episode name correspondng to the given show name season number and episode number .
56,275
def private_path ( self ) : path = os . path . join ( self . path , '.hg' , '.private' ) try : os . mkdir ( path ) except OSError as e : if e . errno != errno . EEXIST : raise return path
Get the path to a directory which can be used to store arbitrary data
56,276
def bookmarks ( self ) : cmd = [ HG , 'bookmarks' ] output = self . _command ( cmd ) . decode ( self . encoding , 'replace' ) if output . startswith ( 'no bookmarks set' ) : return [ ] results = [ ] for line in output . splitlines ( ) : m = bookmarks_rx . match ( line ) assert m , 'unexpected output: ' + line results . append ( m . group ( 'name' ) ) return results
Get list of bookmarks
56,277
def content ( self ) : if not self . _content : self . _content = self . _read ( ) return self . _content
Get the file contents .
56,278
def config ( self ) : conf = config . Configuration ( ) for namespace in self . namespaces : if not hasattr ( conf , namespace ) : if not self . _strict : continue raise exc . NamespaceNotRegistered ( "The namespace {0} is not registered." . format ( namespace ) ) name = getattr ( conf , namespace ) for item , value in compat . iteritems ( self . items ( namespace ) ) : if not hasattr ( name , item ) : if not self . _strict : continue raise exc . OptionNotRegistered ( "The option {0} is not registered." . format ( item ) ) setattr ( name , item , value ) return conf
Get a Configuration object from the file contents .
56,279
def _read ( self ) : with open ( self . path , 'r' ) as file_handle : content = file_handle . read ( ) return compat . unicode ( content )
Open the file and return its contents .
56,280
async def ask ( self , body , quick_replies = None , options = None , user = None ) : await self . send_text_message_to_all_interfaces ( recipient = user , text = body , quick_replies = quick_replies , options = options , ) return any . Any ( )
simple ask with predefined quick replies
56,281
async def say ( self , body , user , options ) : return await self . send_text_message_to_all_interfaces ( recipient = user , text = body , options = options )
say something to user
56,282
def connect ( self , protocolFactory ) : deferred = self . _startProcess ( ) deferred . addCallback ( self . _connectRelay , protocolFactory ) deferred . addCallback ( self . _startRelay ) return deferred
Starts a process and connect a protocol to it .
56,283
def _startProcess ( self ) : connectedDeferred = defer . Deferred ( ) processProtocol = RelayProcessProtocol ( connectedDeferred ) self . inductor . execute ( processProtocol , * self . inductorArgs ) return connectedDeferred
Use the inductor to start the process we want to relay data from .
56,284
def _connectRelay ( self , process , protocolFactory ) : try : wf = _WrappingFactory ( protocolFactory ) connector = RelayConnector ( process , wf , self . timeout , self . inductor . reactor ) connector . connect ( ) except : return defer . fail ( ) return wf . _onConnection
Set up and connect the protocol we want to relay to the process . This method is automatically called when the process is started and we are ready to relay through it .
56,285
def _startRelay ( self , client ) : process = client . transport . connector . process for _ , data in process . data : client . dataReceived ( data ) process . protocol = client @ process . _endedDeferred . addBoth def stopRelay ( reason ) : relay = client . transport relay . loseConnection ( reason ) connector = relay . connector connector . connectionLost ( reason ) return client
Start relaying data between the process and the protocol . This method is called when the protocol is connected .
56,286
def connectRelay ( self ) : self . protocol = self . connector . buildProtocol ( None ) self . connected = True self . protocol . makeConnection ( self )
Builds the target protocol and connects it to the relay transport .
56,287
def childDataReceived ( self , childFD , data ) : protocol = getattr ( self , 'protocol' , None ) if protocol : protocol . dataReceived ( data ) else : self . data . append ( ( childFD , data ) )
Relay data received on any file descriptor to the process
56,288
def publish ( self , user , provider , obj , comment , ** kwargs ) : social_user = self . _get_social_user ( user , provider ) backend = self . get_backend ( social_user , provider , context = kwargs ) return backend . publish ( obj , comment )
user - django User or UserSocialAuth instance provider - name of publisher provider obj - sharing object comment - string
56,289
def check ( self , user , provider , permission , ** kwargs ) : try : social_user = self . _get_social_user ( user , provider ) if not social_user : return False except SocialUserDoesNotExist : return False backend = self . get_backend ( social_user , provider , context = kwargs ) return backend . check ( permission )
user - django User or UserSocialAuth instance provider - name of publisher provider permission - if backend maintains check permissions vk - binary mask in int format facebook - scope string
56,290
def recognize_byte ( self , image , timeout = 10 ) : result = [ ] alpr = subprocess . Popen ( self . _cmd , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . DEVNULL ) try : stdout , stderr = alpr . communicate ( input = image , timeout = 10 ) stdout = io . StringIO ( str ( stdout , 'utf-8' ) ) except subprocess . TimeoutExpired : _LOGGER . error ( "Alpr process timeout!" ) alpr . kill ( ) return None tmp_res = { } while True : line = stdout . readline ( ) if not line : if len ( tmp_res ) > 0 : result . append ( tmp_res ) break new_plate = self . __re_plate . search ( line ) new_result = self . __re_result . search ( line ) if new_plate and len ( tmp_res ) > 0 : result . append ( tmp_res ) tmp_res = { } continue if new_result : try : tmp_res [ new_result . group ( 1 ) ] = float ( new_result . group ( 2 ) ) except ValueError : continue _LOGGER . debug ( "Process alpr with result: %s" , result ) return result
Process a byte image buffer .
56,291
def finished ( finished_status , update_interval , table , status_column , edit_at_column ) : sql = select ( [ table ] ) . where ( and_ ( * [ status_column >= finished_status , edit_at_column >= x_seconds_before_now ( update_interval ) ] ) ) return sql
Create text sql statement query for sqlalchemy that getting all finished task .
56,292
def unfinished ( finished_status , update_interval , table , status_column , edit_at_column ) : sql = select ( [ table ] ) . where ( or_ ( * [ status_column < finished_status , edit_at_column < x_seconds_before_now ( update_interval ) ] ) ) return sql
Create text sql statement query for sqlalchemy that getting all unfinished task .
56,293
def find_nearest ( x , x0 ) -> Tuple [ int , Any ] : x = np . asanyarray ( x ) x0 = np . atleast_1d ( x0 ) if x . size == 0 or x0 . size == 0 : raise ValueError ( 'empty input(s)' ) if x0 . ndim not in ( 0 , 1 ) : raise ValueError ( '2-D x0 not handled yet' ) ind = np . empty_like ( x0 , dtype = int ) for i , xi in enumerate ( x0 ) : if xi is not None and ( isinstance ( xi , ( datetime . datetime , datetime . date , np . datetime64 ) ) or np . isfinite ( xi ) ) : ind [ i ] = np . nanargmin ( abs ( x - xi ) ) else : raise ValueError ( 'x0 must NOT be None or NaN to avoid surprising None return value' ) return ind . squeeze ( ) [ ( ) ] , x [ ind ] . squeeze ( ) [ ( ) ]
This find_nearest function does NOT assume sorted input
56,294
def ensure_context_attribute_exists ( context , name , default_value = None ) : if not hasattr ( context , name ) : setattr ( context , name , default_value )
Ensure a behave resource exists as attribute in the behave context . If this is not the case the attribute is created by using the default_value .
56,295
def ensure_workdir_exists ( context ) : ensure_context_attribute_exists ( context , "workdir" , None ) if not context . workdir : context . workdir = os . path . abspath ( WORKDIR ) pathutil . ensure_directory_exists ( context . workdir )
Ensures that the work directory exists . In addition the location of the workdir is stored as attribute in the context object .
56,296
def del_by_idx ( tree , idxs ) : if len ( idxs ) == 0 : tree [ 'item' ] = None tree [ 'subtrees' ] = [ ] else : hidx , tidxs = idxs [ 0 ] , idxs [ 1 : ] del_by_idx ( tree [ 'subtrees' ] [ hidx ] [ 1 ] , tidxs ) if len ( tree [ 'subtrees' ] [ hidx ] [ 1 ] [ 'subtrees' ] ) == 0 : del tree [ 'subtrees' ] [ hidx ]
Delete a key entry based on numerical indexes into subtree lists .
56,297
def find_in_tree ( tree , key , perfect = False ) : if len ( key ) == 0 : if tree [ 'item' ] is not None : return tree [ 'item' ] , ( ) else : for i in range ( len ( tree [ 'subtrees' ] ) ) : if not perfect and tree [ 'subtrees' ] [ i ] [ 0 ] == '*' : item , trace = find_in_tree ( tree [ 'subtrees' ] [ i ] [ 1 ] , ( ) , perfect ) return item , ( i , ) + trace raise KeyError ( key ) else : head , tail = key [ 0 ] , key [ 1 : ] for i in range ( len ( tree [ 'subtrees' ] ) ) : if tree [ 'subtrees' ] [ i ] [ 0 ] == head or not perfect and tree [ 'subtrees' ] [ i ] [ 0 ] == '*' : try : item , trace = find_in_tree ( tree [ 'subtrees' ] [ i ] [ 1 ] , tail , perfect ) return item , ( i , ) + trace except KeyError : pass raise KeyError ( key )
Helper to perform find in dictionary tree .
56,298
def find ( self , key , perfect = False ) : return find_in_tree ( self . root , key , perfect )
Find a key path in the tree matching wildcards . Return value for key along with index path through subtree lists to the result . Throw KeyError if the key path doesn t exist in the tree .
56,299
def _purge_unreachable ( self , key ) : dels = [ ] for p in self : if dominates ( key , p ) : dels . append ( p ) for k in dels : _ , idxs = find_in_tree ( self . root , k , perfect = True ) del_by_idx ( self . root , idxs )
Purge unreachable dominated key paths before inserting a new key path .