idx
int64 0
63k
| question
stringlengths 53
5.28k
| target
stringlengths 5
805
|
|---|---|---|
3,300
|
def orders ( self ) : return [ order_cmd for order_cmd in dir ( self . handler ) if getattr ( getattr ( self . handler , order_cmd ) , "bot_order" , False ) ]
|
Return method tagged order in the handler .
|
3,301
|
def set ( self , name , value ) : flag = self . flags [ name ] self . _value = ( self . value | flag ) if value else ( self . value & ~ flag )
|
Sets the value of the field name to value which is True or False .
|
3,302
|
def to_dict ( self ) : return dict ( ( k , self . get ( k ) ) for k in self . flags . keys ( ) )
|
Returns this Flags object s fields as a dictionary .
|
3,303
|
def add ( self , host_value ) : host_obj = self . _host_factory ( host_value ) if self . _get_match ( host_obj ) is not None : return self . _add_new ( host_obj )
|
Add the given value to the collection .
|
3,304
|
def _get_match ( self , host_object ) : i = self . _get_insertion_point ( host_object ) potential_match = None try : potential_match = self [ i - 1 ] except IndexError : pass if host_object . is_match ( potential_match ) : return potential_match return None
|
Get an item matching the given host object .
|
3,305
|
def _add_new ( self , host_object ) : i = self . _get_insertion_point ( host_object ) for listed in self [ i : ] : if not listed . is_subdomain ( host_object ) : break self . hosts . pop ( i ) self . hosts . insert ( i , host_object . to_unicode ( ) )
|
Add a new host to the collection .
|
3,306
|
def method_descriptor ( descriptor : str ) -> MethodDescriptor : end_para = descriptor . find ( ')' ) returns = descriptor [ end_para + 1 : ] args = descriptor [ 1 : end_para ] return MethodDescriptor ( parse_descriptor ( returns ) [ 0 ] , parse_descriptor ( args ) , returns , args , descriptor )
|
Parses a Method descriptor as described in section 4 . 3 . 3 of the JVM specification .
|
3,307
|
def make_field ( self , ** kwargs ) : kwargs [ 'required' ] = False kwargs [ 'allow_null' ] = True return self . field_class ( ** kwargs )
|
create serializer field
|
3,308
|
def bind ( self , name , filterset ) : if self . name is not None : name = self . name self . field . bind ( name , self )
|
attach filter to filterset
|
3,309
|
def get_base_dir ( ) : return os . path . split ( os . path . abspath ( os . path . dirname ( __file__ ) ) ) [ 0 ]
|
Return the base directory
|
3,310
|
def is_valid_url ( url ) : regex = re . compile ( r'^(?:http|ftp)s?://' r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' r'(?::\d+)?' r'(?:/?|[/?]\S+)$' , re . IGNORECASE ) return bool ( regex . match ( url ) )
|
Check if url is valid
|
3,311
|
def generate_random_string ( length = 8 ) : char_set = string . ascii_uppercase + string . digits return '' . join ( random . sample ( char_set * ( length - 1 ) , length ) )
|
Generate a random string
|
3,312
|
def filter_stopwords ( str ) : STOPWORDS = [ 'a' , 'able' , 'about' , 'across' , 'after' , 'all' , 'almost' , 'also' , 'am' , 'among' , 'an' , 'and' , 'any' , 'are' , 'as' , 'at' , 'be' , 'because' , 'been' , 'but' , 'by' , 'can' , 'cannot' , 'could' , 'dear' , 'did' , 'do' , 'does' , 'either' , 'else' , 'ever' , 'every' , 'for' , 'from' , 'get' , 'got' , 'had' , 'has' , 'have' , 'he' , 'her' , 'hers' , 'him' , 'his' , 'how' , 'however' , 'i' , 'if' , 'in' , 'into' , 'is' , 'it' , 'its' , 'just' , 'least' , 'let' , 'like' , 'likely' , 'may' , 'me' , 'might' , 'most' , 'must' , 'my' , 'neither' , 'no' , 'nor' , 'not' , 'of' , 'off' , 'often' , 'on' , 'only' , 'or' , 'other' , 'our' , 'own' , 'rather' , 'said' , 'say' , 'says' , 'she' , 'should' , 'since' , 'so' , 'some' , 'than' , 'that' , 'the' , 'their' , 'them' , 'then' , 'there' , 'these' , 'they' , 'this' , 'tis' , 'to' , 'too' , 'twas' , 'us' , 'wants' , 'was' , 'we' , 'were' , 'what' , 'when' , 'where' , 'which' , 'while' , 'who' , 'whom' , 'why' , 'will' , 'with' , 'would' , 'yet' , 'you' , 'your' ] return [ t for t in str . split ( ) if t . lower ( ) not in STOPWORDS ]
|
Stop word filter returns list
|
3,313
|
def convert_bytes ( bytes ) : bytes = float ( bytes ) if bytes >= 1099511627776 : terabytes = bytes / 1099511627776 size = '%.2fT' % terabytes elif bytes >= 1073741824 : gigabytes = bytes / 1073741824 size = '%.2fG' % gigabytes elif bytes >= 1048576 : megabytes = bytes / 1048576 size = '%.2fM' % megabytes elif bytes >= 1024 : kilobytes = bytes / 1024 size = '%.2fK' % kilobytes else : size = '%.2fb' % bytes return size
|
Convert bytes into human readable
|
3,314
|
def find ( self , node , path ) : return node . find ( path , namespaces = self . namespaces )
|
Wrapper for lxml s find .
|
3,315
|
def xpath ( self , node , path ) : return node . xpath ( path , namespaces = self . namespaces )
|
Wrapper for lxml s xpath .
|
3,316
|
def mkdir ( self , path ) : resp = self . _sendRequest ( "MKCOL" , path ) if resp . status_code != 201 : if resp . status_code == 409 : raise YaDiskException ( 409 , "Part of path {} does not exists" . format ( path ) ) elif resp . status_code == 405 : raise YaDiskException ( 405 , "Path {} already exists" . format ( path ) ) else : raise YaDiskException ( resp . status_code , resp . content )
|
Create directory . All part of path must be exists . Raise exception when path already exists .
|
3,317
|
def rm ( self , path ) : resp = self . _sendRequest ( "DELETE" , path ) if not ( resp . status_code in ( 200 , 204 ) ) : raise YaDiskException ( resp . status_code , resp . content )
|
Delete file or directory .
|
3,318
|
def upload ( self , file , path ) : with open ( file , "rb" ) as f : resp = self . _sendRequest ( "PUT" , path , data = f ) if resp . status_code != 201 : raise YaDiskException ( resp . status_code , resp . content )
|
Upload file .
|
3,319
|
def download ( self , path , file ) : resp = self . _sendRequest ( "GET" , path ) if resp . status_code == 200 : with open ( file , "wb" ) as f : f . write ( resp . content ) else : raise YaDiskException ( resp . status_code , resp . content )
|
Download remote file to disk .
|
3,320
|
def publish ( self , path ) : def parseContent ( content ) : root = ET . fromstring ( content ) prop = root . find ( ".//d:prop" , namespaces = self . namespaces ) return prop . find ( "{urn:yandex:disk:meta}public_url" ) . text . strip ( ) data = _check_dst_absolute ( path ) resp = self . _sendRequest ( "PROPPATCH" , addUrl = path , data = data ) if resp . status_code == 207 : return parseContent ( resp . content ) else : raise YaDiskException ( resp . status_code , resp . content )
|
Publish file or folder and return public url
|
3,321
|
def write_instruction ( fout , start_pos , ins ) : opcode , operands = ins . opcode , ins . operands fmt_operands = opcode_table [ opcode ] [ 'operands' ] if ins . wide : fout . write ( pack ( '>B' , 0xC4 ) ) fout . write ( pack ( '>B' , opcode ) ) fout . write ( pack ( '>H' , operands [ 0 ] . value ) ) if opcode == 0x84 : fout . write ( pack ( '>h' , operands [ 1 ] . value ) ) elif fmt_operands : fout . write ( pack ( '>B' , opcode ) ) for i , ( fmt , _ ) in enumerate ( fmt_operands ) : fout . write ( fmt . value . pack ( operands [ i ] . value ) ) elif opcode == 0xAB : fout . write ( pack ( '>B' , opcode ) ) padding = 4 - ( start_pos + 1 ) % 4 padding = padding if padding != 4 else 0 fout . write ( pack ( f'{padding}x' ) ) fout . write ( pack ( '>ii' , operands [ 1 ] . value , len ( operands [ 0 ] ) ) ) for key in sorted ( operands [ 0 ] . keys ( ) ) : fout . write ( pack ( '>ii' , key , operands [ 0 ] [ key ] ) ) elif opcode == 0xAA : fout . write ( pack ( '>B' , opcode ) ) padding = 4 - ( start_pos + 1 ) % 4 padding = padding if padding != 4 else 0 fout . write ( pack ( f'{padding}x' ) ) fout . write ( pack ( f'>iii{len(operands) - 3}i' , operands [ 0 ] . value , operands [ 1 ] . value , operands [ 2 ] . value , * ( o . value for o in operands [ 3 : ] ) ) ) else : fout . write ( pack ( '>B' , opcode ) )
|
Writes a single instruction of opcode with operands to fout .
|
3,322
|
def read_instruction ( fio , start_pos ) : op = fio . read ( 1 ) if not op : return None op = ord ( op ) ins = opcode_table [ op ] operands = ins [ 'operands' ] name = ins [ 'mnemonic' ] final_operands = [ ] if operands : for fmt , type_ in operands : final_operands . append ( Operand ( type_ , fmt . value . unpack ( fio . read ( fmt . value . size ) ) [ 0 ] ) ) elif op == 0xAB : padding = 4 - ( start_pos + 1 ) % 4 padding = padding if padding != 4 else 0 fio . read ( padding ) default , npairs = unpack ( '>ii' , fio . read ( 8 ) ) pairs = { } for _ in repeat ( None , npairs ) : match , offset = unpack ( '>ii' , fio . read ( 8 ) ) pairs [ match ] = offset final_operands . append ( pairs ) final_operands . append ( Operand ( OperandTypes . BRANCH , default ) ) elif op == 0xAA : padding = 4 - ( start_pos + 1 ) % 4 padding = padding if padding != 4 else 0 fio . read ( padding ) default , low , high = unpack ( '>iii' , fio . read ( 12 ) ) final_operands . append ( Operand ( OperandTypes . BRANCH , default ) ) final_operands . append ( Operand ( OperandTypes . LITERAL , low ) ) final_operands . append ( Operand ( OperandTypes . LITERAL , high ) ) for _ in repeat ( None , high - low + 1 ) : offset = unpack ( '>i' , fio . read ( 4 ) ) [ 0 ] final_operands . append ( Operand ( OperandTypes . BRANCH , offset ) ) elif op == 0xC4 : real_op = unpack ( '>B' , fio . read ( 1 ) ) [ 0 ] ins = opcode_table [ real_op ] name = ins [ 'mnemonic' ] final_operands . append ( Operand ( OperandTypes . LOCAL_INDEX , unpack ( '>H' , fio . read ( 2 ) ) [ 0 ] ) ) if real_op == 0x84 : final_operands . append ( Operand ( OperandTypes . LITERAL , unpack ( '>H' , fio . read ( 2 ) ) [ 0 ] ) ) return Instruction ( name , op , final_operands , start_pos )
|
Reads a single instruction from fio and returns it or None if the stream is empty .
|
3,323
|
def load_bytecode_definitions ( * , path = None ) -> dict : if path is not None : with open ( path , 'rb' ) as file_in : j = json . load ( file_in ) else : try : j = json . loads ( pkgutil . get_data ( 'jawa.util' , 'bytecode.json' ) ) except json . JSONDecodeError : return { } for definition in j . values ( ) : operands = definition [ 'operands' ] if operands : definition [ 'operands' ] = [ [ getattr ( OperandFmts , oo [ 0 ] ) , OperandTypes [ oo [ 1 ] ] ] for oo in operands ] return { ** j , ** { v [ 'op' ] : v for v in j . values ( ) } }
|
Load bytecode definitions from JSON file .
|
3,324
|
def size_on_disk ( self , start_pos = 0 ) : size = 1 fmts = opcode_table [ self . opcode ] [ 'operands' ] if self . wide : size += 2 if self . opcode == 0x84 : size += 2 elif fmts : for fmt , _ in fmts : size += fmt . value . size elif self . opcode == 0xAB : padding = 4 - ( start_pos + 1 ) % 4 padding = padding if padding != 4 else 0 size += padding size += 8 size += len ( self . operands [ 0 ] ) * 8 elif self . opcode == 0xAA : raise NotImplementedError ( ) return size
|
Returns the size of this instruction and its operands when packed . start_pos is required for the tableswitch and lookupswitch instruction as the padding depends on alignment .
|
3,325
|
def wide ( self ) : if not opcode_table [ self . opcode ] . get ( 'can_be_wide' ) : return False if self . operands [ 0 ] . value >= 255 : return True if self . opcode == 0x84 : if self . operands [ 1 ] . value >= 255 : return True return False
|
True if this instruction needs to be prefixed by the WIDE opcode .
|
3,326
|
def get_attachment ( self , file_path ) : try : file_ = open ( file_path , 'rb' ) attachment = MIMEBase ( 'application' , 'octet-stream' ) attachment . set_payload ( file_ . read ( ) ) file_ . close ( ) encoders . encode_base64 ( attachment ) attachment . add_header ( 'Content-Disposition' , 'attachment' , filename = os . path . basename ( file_path ) ) return attachment except IOError : traceback . print_exc ( ) message = ( 'The requested file could not be read. Maybe wrong ' 'permissions?' ) print >> sys . stderr , message sys . exit ( 6 )
|
Get file as MIMEBase message
|
3,327
|
def lookup ( self , host_value ) : try : host_object = self . _host_factory ( host_value ) except InvalidHostError : return None result = self . _get_match_and_classification ( host_object ) host_item , classification = result if host_item is not None : return AddressListItem ( host_item . to_unicode ( ) , self , classification ) return None
|
Get a host value matching the given value .
|
3,328
|
def any_match ( self , urls ) : return any ( urlparse ( u ) . hostname in self for u in urls )
|
Check if any of the given URLs has a matching host .
|
3,329
|
def lookup_matching ( self , urls ) : hosts = ( urlparse ( u ) . hostname for u in urls ) for val in hosts : item = self . lookup ( val ) if item is not None : yield item
|
Get matching hosts for the given URLs .
|
3,330
|
def filter_matching ( self , urls ) : for url in urls : if urlparse ( url ) . hostname in self : yield url
|
Get URLs with hosts matching any listed ones .
|
3,331
|
def decode_modified_utf8 ( s : bytes ) -> str : s = bytearray ( s ) buff = [ ] buffer_append = buff . append ix = 0 while ix < len ( s ) : x = s [ ix ] ix += 1 if x >> 7 == 0 : pass elif x >> 6 == 6 : y = s [ ix ] ix += 1 x = ( ( x & 0x1F ) << 6 ) + ( y & 0x3F ) elif x >> 4 == 14 : y , z = s [ ix : ix + 2 ] ix += 2 x = ( ( x & 0xF ) << 12 ) + ( ( y & 0x3F ) << 6 ) + ( z & 0x3F ) elif x == 0xED : v , w , x , y , z = s [ ix : ix + 6 ] ix += 5 x = 0x10000 + ( ( ( v & 0x0F ) << 16 ) + ( ( w & 0x3F ) << 10 ) + ( ( y & 0x0F ) << 6 ) + ( z & 0x3F ) ) elif x == 0xC0 and s [ ix ] == 0x80 : ix += 1 x = 0 buffer_append ( x ) return u'' . join ( chr ( b ) for b in buff )
|
Decodes a bytestring containing modified UTF - 8 as defined in section 4 . 4 . 7 of the JVM specification .
|
3,332
|
def encode_modified_utf8 ( u : str ) -> bytearray : final_string = bytearray ( ) for c in [ ord ( char ) for char in u ] : if c == 0x00 or ( 0x80 < c < 0x7FF ) : final_string . extend ( [ ( 0xC0 | ( 0x1F & ( c >> 6 ) ) ) , ( 0x80 | ( 0x3F & c ) ) ] ) elif c < 0x7F : final_string . append ( c ) elif 0x800 < c < 0xFFFF : final_string . extend ( [ ( 0xE0 | ( 0x0F & ( c >> 12 ) ) ) , ( 0x80 | ( 0x3F & ( c >> 6 ) ) ) , ( 0x80 | ( 0x3F & c ) ) ] ) return final_string
|
Encodes a unicode string as modified UTF - 8 as defined in section 4 . 4 . 7 of the JVM specification .
|
3,333
|
def unregister ( self , name , func ) : try : templatehook = self . _registry [ name ] except KeyError : return templatehook . unregister ( func )
|
Remove a previously registered callback
|
3,334
|
def unregister_all ( self , name ) : try : templatehook = self . _registry [ name ] except KeyError : return templatehook . unregister_all ( )
|
Remove all callbacks
|
3,335
|
def deprecated_name ( name ) : def decorator ( func ) : def func_wrapper ( self ) : if hasattr ( self , name ) : return getattr ( self , name ) else : return func ( self ) return func_wrapper return decorator
|
Allow old method names for backwards compatability .
|
3,336
|
def get ( code ) : instance = _cache . get ( code ) if instance is None : url = '{prefix}{code}.gml?download' . format ( prefix = EPSG_IO_URL , code = code ) xml = requests . get ( url ) . content root = ET . fromstring ( xml ) class_for_tag = { GML_NS + 'CartesianCS' : CartesianCS , GML_NS + 'GeodeticCRS' : GeodeticCRS , GML_NS + 'ProjectedCRS' : ProjectedCRS , GML_NS + 'CompoundCRS' : CompoundCRS , GML_NS + 'BaseUnit' : UOM , } if root . tag in class_for_tag : instance = class_for_tag [ root . tag ] ( root ) else : raise ValueError ( 'Unsupported code type: {}' . format ( root . tag ) ) _cache [ code ] = instance return instance
|
Return an object that corresponds to the given EPSG code .
|
3,337
|
def id ( self ) : id = self . element . attrib [ GML_NS + 'id' ] code = id . split ( '-' ) [ - 1 ] return code
|
The EPSG code for this CRS .
|
3,338
|
def as_html ( self ) : url = '{prefix}{code}.html?download' . format ( prefix = EPSG_IO_URL , code = self . id ) return requests . get ( url ) . text
|
Return the OGC WKT which corresponds to the CRS as HTML .
|
3,339
|
def as_proj4 ( self ) : url = '{prefix}{code}.proj4?download' . format ( prefix = EPSG_IO_URL , code = self . id ) return requests . get ( url ) . text . strip ( )
|
Return the PROJ . 4 string which corresponds to the CRS .
|
3,340
|
def remove ( self , method : Method ) : self . _table = [ fld for fld in self . _table if fld is not method ]
|
Removes a method from the table by identity .
|
3,341
|
def create ( self , name : str , descriptor : str , code : CodeAttribute = None ) -> Method : method = Method ( self . _cf ) name = self . _cf . constants . create_utf8 ( name ) descriptor = self . _cf . constants . create_utf8 ( descriptor ) method . _name_index = name . index method . _descriptor_index = descriptor . index method . access_flags . acc_public = True if code is not None : method . attributes . create ( CodeAttribute ) self . append ( method ) return method
|
Creates a new method from name and descriptor . If code is not None add a Code attribute to this method .
|
3,342
|
def unpack ( self , source : IO ) : method_count = unpack ( '>H' , source . read ( 2 ) ) [ 0 ] for _ in repeat ( None , method_count ) : method = Method ( self . _cf ) method . unpack ( source ) self . append ( method )
|
Read the MethodTable from the file - like object source .
|
3,343
|
def pack ( self , out : IO ) : out . write ( pack ( '>H' , len ( self ) ) ) for method in self . _table : method . pack ( out )
|
Write the MethodTable to the file - like object out .
|
3,344
|
def unpack ( self , info ) : self . max_stack , self . max_locals , c_len = info . unpack ( '>HHI' ) self . _code = info . read ( c_len ) ex_table_len = info . u2 ( ) for _ in repeat ( None , ex_table_len ) : self . exception_table . append ( CodeException ( * info . unpack ( '>HHHH' ) ) ) self . attributes = AttributeTable ( self . cf , parent = self ) self . attributes . unpack ( info )
|
Read the CodeAttribute from the byte string info .
|
3,345
|
def pack ( self ) : with io . BytesIO ( ) as file_out : file_out . write ( pack ( '>HHI' , self . max_stack , self . max_locals , len ( self . _code ) ) ) file_out . write ( self . _code ) file_out . write ( pack ( '>H' , len ( self . exception_table ) ) ) for exception in self . exception_table : file_out . write ( pack ( '>HHHH' , * exception ) ) self . attributes . pack ( file_out ) return file_out . getvalue ( )
|
The CodeAttribute in packed byte string form .
|
3,346
|
def _volume_command ( ramp , volume ) : if volume is not None : ramp . set_volume ( float ( volume ) ) else : print ramp . volume
|
Set the value if a volume level is provided else print the current volume level .
|
3,347
|
def _status_command ( cast , ramp ) : if ramp . is_playing : play_symbol = u'\u25B6' else : play_symbol = u'\u2759\u2759' print u' %s %s by %s from %s via %s, %s of %s' % ( play_symbol , ramp . title , ramp . artist , ramp . album , cast . app . app_id , _to_minutes ( ramp . current_time ) , _to_minutes ( ramp . duration ) )
|
Build a nice status message and print it to stdout .
|
3,348
|
def main ( ) : opts = docopt ( __doc__ , version = "cast 0.1" ) cast = pychromecast . PyChromecast ( CHROMECAST_HOST ) ramp = cast . get_protocol ( pychromecast . PROTOCOL_RAMP ) time . sleep ( SLEEP_TIME ) if ramp is None : print 'Chromecast is not up or current app does not handle RAMP.' return 1 if opts [ 'next' ] : ramp . next ( ) elif opts [ 'pause' ] : ramp . pause ( ) elif opts [ 'play' ] : ramp . play ( ) elif opts [ 'toggle' ] : ramp . playpause ( ) elif opts [ 'seek' ] : ramp . seek ( opts [ '<second>' ] ) elif opts [ 'rewind' ] : ramp . rewind ( ) elif opts [ 'status' ] : _status_command ( cast , ramp ) elif opts [ 'volume' ] : _volume_command ( ramp , opts [ '<value>' ] ) time . sleep ( SLEEP_TIME )
|
Read the options given on the command line and do the required actions .
|
3,349
|
def untokenize ( iterable ) : ut = Untokenizer ( ) out = ut . untokenize ( iterable ) if ut . encoding is not None : out = out . encode ( ut . encoding ) return out
|
Transform tokens back into Python source code . It returns a bytes object encoded using the ENCODING token which is the first token sequence output by tokenize .
|
3,350
|
def get_powers_of_2 ( _sum ) : return [ 2 ** y for y , x in enumerate ( bin ( _sum ) [ : 1 : - 1 ] ) if int ( x ) ]
|
Get powers of 2 that sum up to the given number .
|
3,351
|
def _query ( self , host_object ) : host_to_query = host_object . relative_domain query_name = host_to_query . derelativize ( self . _query_suffix ) try : return query ( query_name ) except NXDOMAIN : return None
|
Query the DNSBL service for given value .
|
3,352
|
def _query ( self , host_object , classification = False ) : template = 'http://verify.hosts-file.net/?v={}&s={}' url = template . format ( self . app_id , host_object . to_unicode ( ) ) url = url + '&class=true' if classification else url return get ( url ) . text
|
Query the client for data of given host .
|
3,353
|
def _request_address ( self ) : if not self . _request_address_val : template = ( 'https://sb-ssl.google.com/safebrowsing/api/lookup' '?client={0}&key={1}&appver={2}&pver={3}' ) self . _request_address_val = template . format ( self . client_name , self . api_key , self . app_version , self . protocol_version ) return self . _request_address_val
|
Get address of a POST request to the service .
|
3,354
|
def _query_once ( self , urls ) : request_body = '{}\n{}' . format ( len ( urls ) , '\n' . join ( urls ) ) response = post ( self . _request_address , request_body ) try : response . raise_for_status ( ) except HTTPError as error : if response . status_code == 401 : msg = 'The API key is not authorized' raise_from ( UnathorizedAPIKeyError ( msg ) , error ) else : raise return response
|
Perform a single POST request using lookup API .
|
3,355
|
def _query ( self , urls ) : urls = list ( set ( urls ) ) for i in range ( 0 , len ( urls ) , self . max_urls_per_request ) : chunk = urls [ i : i + self . max_urls_per_request ] response = self . _query_once ( chunk ) if response . status_code == 200 : yield chunk , response
|
Test URLs for being listed by the service .
|
3,356
|
def _get_match_and_classification ( self , urls ) : for url_list , response in self . _query ( urls ) : classification_set = response . text . splitlines ( ) for url , _class in zip ( url_list , classification_set ) : if _class != 'ok' : yield url , _class
|
Get classification for all matching URLs .
|
3,357
|
def lookup_matching ( self , urls ) : for url , _class in self . _get_match_and_classification ( urls ) : classification = set ( _class . split ( ',' ) ) yield AddressListItem ( url , self , classification )
|
Get items for all listed URLs .
|
3,358
|
def extends_ ( cls , kls ) : if inspect . isclass ( kls ) : for _name , _val in kls . __dict__ . items ( ) : if not _name . startswith ( "__" ) : setattr ( cls , _name , _val ) elif inspect . isfunction ( kls ) : setattr ( cls , kls . __name__ , kls ) return cls
|
A view decorator to extend another view class or function to itself It will inherit all its methods and propeties and use them on itself
|
3,359
|
def send ( self , to , subject , body , reply_to = None , ** kwargs ) : if self . provider == "SES" : self . mail . send ( to = to , subject = subject , body = body , reply_to = reply_to , ** kwargs ) elif self . provider == "FLASK-MAIL" : msg = flask_mail . Message ( recipients = to , subject = subject , body = body , reply_to = reply_to , sender = self . app . config . get ( "MAIL_DEFAULT_SENDER" ) ) self . mail . send ( msg )
|
Send simple message
|
3,360
|
def send_template ( self , template , to , reply_to = None , ** context ) : if self . provider == "SES" : self . mail . send_template ( template = template , to = to , reply_to = reply_to , ** context ) elif self . provider == "FLASK-MAIL" : ses_mail = ses_mailer . Mail ( app = self . app ) data = ses_mail . parse_template ( template = template , ** context ) msg = flask_mail . Message ( recipients = to , subject = data [ "subject" ] , body = data [ "body" ] , reply_to = reply_to , sender = self . app . config . get ( "MAIL_DEFAULT_SENDER" ) ) self . mail . send ( msg )
|
Send Template message
|
3,361
|
def dump_to_console ( pylint_data ) : for key , value in list ( pylint_data . items ( ) ) : if key not in ( 'errors' , 'total' , 'scores' , 'average' ) and len ( value ) > 0 : print ( "\n*********** {}" . format ( key ) ) for line in value : print ( line . strip ( '\n' ) ) f_score = [ score [ 1 ] for score in pylint_data [ 'scores' ] if score [ 0 ] == key ] [ 0 ] print ( "Score: {}" . format ( f_score ) )
|
Displays pylint data to the console .
|
3,362
|
def post_to_gerrit ( commit , score = 0 , message = '' , user = 'lunatest' , gerrit = None ) : if score > 0 : score = "+{}" . format ( score ) else : url = "{}job/{}/{}/consoleText" . format ( os . environ . get ( 'JENKINS_URL' ) , os . environ . get ( 'JOB_NAME' ) , os . environ . get ( 'BUILD_NUMBER' ) ) message = ( "{}\r\n\r\n" "Check output here: {}" ) . format ( message , url ) score = str ( score ) message = "'\"{}\"'" . format ( message ) subprocess . check_output ( [ "ssh" , "-p" , str ( os . environ . get ( "GERRIT_PORT" , "29418" ) ) , "{}@{}" . format ( user , gerrit ) , "gerrit" , "review" , "--code-review " + score , "-m" , message , commit ] )
|
Post the data to gerrit . This right now is a stub as I ll need to write the code to post up to gerrit .
|
3,363
|
def sort_by_type ( file_list ) : ret_dict = defaultdict ( list ) for filepath in file_list : _ , ext = os . path . splitext ( filepath ) ret_dict [ ext . replace ( '.' , '' ) ] . append ( filepath ) return ret_dict
|
Sorts a list of files into types .
|
3,364
|
def checkout ( repository , target ) : repository . git . fetch ( [ next ( iter ( repository . remotes ) ) , target ] ) repository . git . checkout ( "FETCH_HEAD" ) return repository . git . rev_parse ( [ "--short" , "HEAD" ] ) . encode ( 'ascii' , 'ignore' )
|
Check out target into the current directory . Target can be a branch review Id or commit .
|
3,365
|
def get_files_changed ( repository , review_id ) : repository . git . fetch ( [ next ( iter ( repository . remotes ) ) , review_id ] ) files_changed = repository . git . diff_tree ( [ "--no-commit-id" , "--name-only" , "-r" , "FETCH_HEAD" ] ) . splitlines ( ) print ( "Found {} files changed" . format ( len ( files_changed ) ) ) return files_changed
|
Get a list of files changed compared to the given review . Compares against current directory .
|
3,366
|
def start ( self ) : for name , child in self . _compound_children . items ( ) : self . logger . debug ( 'start %s (%s)' , name , child . __class__ . __name__ ) child . start ( )
|
Start the component running .
|
3,367
|
def join ( self , end_comps = False ) : for name , child in self . _compound_children . items ( ) : if end_comps and not child . is_pipe_end ( ) : continue self . logger . debug ( 'join %s (%s)' , name , child . __class__ . __name__ ) child . join ( )
|
Wait for the compound component s children to stop running .
|
3,368
|
def main ( review_id , repository , branch = "development" , user = 'admin' , gerrit = None ) : checkout ( repository , branch ) raw_file_list = get_files_changed ( repository = repository , review_id = review_id ) checkout ( repository = repository , target = branch ) files = sort_by_type ( raw_file_list ) old_data = run_linters ( files ) commit_id = checkout ( repository = repository , target = review_id ) new_data = run_linters ( files ) dump_to_console ( new_data [ 'py' ] ) validations = run_validators ( new_data , old_data ) final_score = min ( list ( validations . values ( ) ) , key = lambda x : x [ 0 ] ) [ 0 ] comment = "" for name , validation in list ( validations . items ( ) ) : score , message = validation if message [ - 1 : ] != "\n" : message += "\n" comment += message exit_code = 1 if final_score < 0 else 0 post_to_gerrit ( commit_id , score = final_score , message = comment , user = user , gerrit = gerrit ) exit ( exit_code )
|
Do the bulk of the work
|
3,369
|
def set_sqlite_pragma ( dbapi_connection , connection_record ) : import sqlite3 if dbapi_connection . __class__ is sqlite3 . Connection : cursor = dbapi_connection . cursor ( ) cursor . execute ( "PRAGMA foreign_keys=ON" ) cursor . close ( )
|
Allows foreign keys to work in sqlite .
|
3,370
|
def validate ( self ) : log . info ( "{}\nValidating against {}" . format ( "-" * 100 , self . schema_def . __class__ . __name__ ) ) if not self . schema_def : raise ValueError ( "No schema definition supplied." ) self . checked_attributes = [ ] result = ValidationResult ( self . allowed_namespaces [ 0 ] , self . schema_def . __class__ . __name__ ) for subject , predicate , object_ in self . graph : log . info ( "\nsubj: {subj}\npred: {pred}\n obj: {obj}" . format ( subj = subject , pred = predicate , obj = object_ . encode ( 'utf-8' ) ) ) result . add_error ( self . _check_triple ( ( subject , predicate , object_ ) ) ) return result
|
Iterate over all triples in the graph and validate each one appropriately
|
3,371
|
def _check_triple ( self , triple ) : subj , pred , obj = triple if self . _should_ignore_predicate ( pred ) : log . info ( "Ignoring triple with predicate '{}'" . format ( self . _field_name_from_uri ( pred ) ) ) return classes = [ ] log . warning ( "Possible member %s found" % pred ) pred = self . _expand_qname ( pred ) if self . _namespace_from_uri ( pred ) not in self . allowed_namespaces : log . info ( "Member %s does not use an allowed namespace" , pred ) return instanceof = self . _is_instance ( ( subj , pred , obj ) ) if type ( instanceof ) == rt . URIRef : instanceof = self . _expand_qname ( instanceof ) if hasattr ( self . schema_def , "attributes_by_class" ) and not self . schema_def . attributes_by_class : log . info ( "Parsed ontology not found. Parsing..." ) self . schema_def . parse_ontology ( ) class_invalid = self . _validate_class ( instanceof ) if class_invalid : log . warning ( "Invalid class %s" % instanceof ) return class_invalid classes = self . _superclasses_for_subject ( self . graph , instanceof ) classes . append ( instanceof ) member_invalid = self . _validate_member ( pred , classes , instanceof ) if member_invalid : log . warning ( "Invalid member of class" ) return member_invalid dupe_invalid = self . _validate_duplication ( ( subj , pred ) , instanceof ) if dupe_invalid : log . warning ( "Duplication found" ) return dupe_invalid self . checked_attributes . append ( ( subj , pred ) ) log . warning ( "successfully validated triple, no errors" ) return
|
compare triple to ontology return error or None
|
3,372
|
def _validate_class ( self , cl ) : if cl not in self . schema_def . attributes_by_class : search_string = self . _build_search_string ( cl ) err = self . err ( "{0} - invalid class" , self . _field_name_from_uri ( cl ) , search_string = search_string ) return ValidationWarning ( ValidationResult . ERROR , err [ 'err' ] , err [ 'line' ] , err [ 'num' ] )
|
return error if class cl is not found in the ontology
|
3,373
|
def _validate_member ( self , member , classes , instanceof ) : log . info ( "Validating member %s" % member ) stripped = self . _get_stripped_attributes ( member , classes ) if self . _field_name_from_uri ( member ) in stripped : all_class_members = sum ( [ self . schema_def . attributes_by_class [ cl ] for cl in classes ] , [ ] ) if member in all_class_members : return if self . _namespace_from_uri ( member ) in self . allowed_namespaces : err = self . err ( "Unoficially allowed namespace {0}" , self . _namespace_from_uri ( member ) ) return ValidationWarning ( ValidationResult . WARNING , err [ 'err' ] , err [ 'line' ] , err [ 'num' ] ) else : err = self . err ( "{0} - invalid member of {1}" , self . _field_name_from_uri ( member ) , self . _field_name_from_uri ( instanceof ) ) return ValidationWarning ( ValidationResult . ERROR , err [ 'err' ] , err [ 'line' ] , err [ 'num' ] )
|
return error if member is not a member of any class in classes
|
3,374
|
def _validate_duplication ( self , subj_and_pred , cl ) : subj , pred = subj_and_pred log . info ( "Validating duplication of member %s" % pred ) if ( subj , pred ) in self . checked_attributes : err = self . err ( "{0} - duplicated member of {1}" , self . _field_name_from_uri ( pred ) , self . _field_name_from_uri ( cl ) ) return ValidationWarning ( ValidationResult . WARNING , err [ 'err' ] , err [ 'line' ] , err [ 'num' ] )
|
returns error if we ve already seen the member pred on subj
|
3,375
|
def _superclasses_for_subject ( self , graph , typeof ) : classes = [ ] superclass = typeof while True : found = False for p , o in self . schema_def . ontology [ superclass ] : if self . schema_def . lexicon [ 'subclass' ] == str ( p ) : found = True classes . append ( o ) superclass = o if not found : break return classes
|
helper returns a list of all superclasses of a given class
|
3,376
|
def _is_instance ( self , triple ) : subj , pred , obj = triple input_pred_ns = self . _namespace_from_uri ( self . _expand_qname ( pred ) ) triples = self . graph . triples ( ( subj , rt . URIRef ( self . schema_def . lexicon [ 'type' ] ) , None ) ) if triples : for tr in triples : triple_obj_ns = self . _namespace_from_uri ( self . _expand_qname ( tr [ 2 ] ) ) if input_pred_ns == triple_obj_ns : return tr [ 2 ]
|
helper returns the class type of subj
|
3,377
|
def _namespace_from_uri ( self , uri ) : uri = str ( uri ) parts = uri . split ( '#' ) if len ( parts ) == 1 : return "%s/" % '/' . join ( uri . split ( '/' ) [ : - 1 ] ) return "%s#" % '#' . join ( parts [ : - 1 ] )
|
returns the expanded namespace prefix of a uri
|
3,378
|
def _expand_qname ( self , qname ) : if type ( qname ) is not rt . URIRef : raise TypeError ( "Cannot expand qname of type {}, must be URIRef" . format ( type ( qname ) ) ) for ns in self . graph . namespaces ( ) : if ns [ 0 ] == qname . split ( ':' ) [ 0 ] : return rt . URIRef ( "%s%s" % ( ns [ 1 ] , qname . split ( ':' ) [ - 1 ] ) ) return qname
|
expand a qualified name s namespace prefix to include the resolved namespace root url
|
3,379
|
def _write_unassigned_ranges ( out_filename ) : with open ( out_filename , 'wt' ) as f : f . write ( '# auto-generated by port_for._download_ranges (%s)\n' % datetime . date . today ( ) ) f . write ( 'UNASSIGNED_RANGES = [\n' ) for range in to_ranges ( sorted ( list ( _unassigned_ports ( ) ) ) ) : f . write ( " (%d, %d),\n" % range ) f . write ( ']\n' )
|
Downloads ports data from IANA & Wikipedia and converts it to a python module . This function is used to generate _ranges . py .
|
3,380
|
def _wikipedia_known_port_ranges ( ) : req = urllib2 . Request ( WIKIPEDIA_PAGE , headers = { 'User-Agent' : "Magic Browser" } ) page = urllib2 . urlopen ( req ) . read ( ) . decode ( 'utf8' ) ports = re . findall ( '<td>((\d+)(\W(\d+))?)</td>' , page , re . U ) return ( ( int ( p [ 1 ] ) , int ( p [ 3 ] if p [ 3 ] else p [ 1 ] ) ) for p in ports )
|
Returns used port ranges according to Wikipedia page . This page contains unofficial well - known ports .
|
3,381
|
def _iana_unassigned_port_ranges ( ) : page = urllib2 . urlopen ( IANA_DOWNLOAD_URL ) . read ( ) xml = ElementTree . fromstring ( page ) records = xml . findall ( '{%s}record' % IANA_NS ) for record in records : description = record . find ( '{%s}description' % IANA_NS ) . text if description == 'Unassigned' : numbers = record . find ( '{%s}number' % IANA_NS ) . text yield numbers
|
Returns unassigned port ranges according to IANA .
|
3,382
|
def run_linters ( files ) : data = { } for file_type , file_list in list ( files . items ( ) ) : linter = LintFactory . get_linter ( file_type ) if linter is not None : data [ file_type ] = linter . run ( file_list ) return data
|
Run through file list and try to find a linter that matches the given file type .
|
3,383
|
def run_validators ( new_data , old_data ) : validation_data = { } for file_type , lint_data in list ( new_data . items ( ) ) : old_lint_data = old_data . get ( file_type , { } ) validator = ValidatorFactory . get_validator ( file_type ) if validator is not None : validation_data [ validator . name ] = validator . run ( lint_data , old_lint_data ) return validation_data
|
Run through all matching validators .
|
3,384
|
def original_unescape ( self , s ) : if isinstance ( s , basestring ) : return unicode ( HTMLParser . unescape ( self , s ) ) elif isinstance ( s , list ) : return [ unicode ( HTMLParser . unescape ( self , item ) ) for item in s ] else : return s
|
Since we need to use this sometimes
|
3,385
|
def get_standard ( self ) : try : res = urlopen ( PARSELY_PAGE_SCHEMA ) except : return [ ] text = res . read ( ) if isinstance ( text , bytes ) : text = text . decode ( 'utf-8' ) tree = etree . parse ( StringIO ( text ) ) stdref = tree . xpath ( "//div/@about" ) return [ a . split ( ':' ) [ 1 ] for a in stdref ]
|
get list of allowed parameters
|
3,386
|
def _get_parselypage ( self , body ) : parser = ParselyPageParser ( ) ret = None try : parser . feed ( body ) except HTMLParseError : pass if parser . ppage is None : return ret = parser . ppage if ret : ret = { parser . original_unescape ( k ) : parser . original_unescape ( v ) for k , v in iteritems ( ret ) } return ret
|
extract the parsely - page meta content from a page
|
3,387
|
def _read_schema ( self ) : cache_filename = os . path . join ( CACHE_ROOT , "%s.smt" % self . _representation ) log . info ( "Attempting to read local schema at %s" % cache_filename ) try : if time . time ( ) - os . stat ( cache_filename ) . st_mtime > CACHE_EXPIRY : log . warning ( "Cache expired, re-pulling" ) self . _pull_schema_definition ( cache_filename ) except OSError : log . warning ( "Local schema not found. Pulling from web." ) self . _pull_schema_definition ( cache_filename ) else : log . info ( "Success" ) return cache_filename
|
return the local filename of the definition file for this schema if not present or older than expiry pull the latest version from the web at self . _ontology_file
|
3,388
|
def _pull_schema_definition ( self , fname ) : std_url = urlopen ( self . _ontology_file ) cached_std = open ( fname , "w+" ) cached_std . write ( std_url . read ( ) ) cached_std . close ( )
|
download an ontology definition from the web
|
3,389
|
def parse_ontology ( self ) : start = time . clock ( ) log . info ( "Parsing ontology file for %s" % self . __class__ . __name__ ) for subj , pred , obj in self . _schema_nodes ( ) : if subj not in self . attributes_by_class : if obj == rt . URIRef ( self . lexicon [ 'class' ] ) and pred == rt . URIRef ( self . lexicon [ 'type' ] ) : self . attributes_by_class [ subj ] = [ ] leaves = [ ( subj , pred , obj ) ] if type ( obj ) == rt . BNode : leaves = deepest_node ( ( subj , pred , obj ) , self . graph ) for s , p , o in leaves : if o not in self . attributes_by_class : self . attributes_by_class [ o ] = [ ] if pred == rt . URIRef ( self . lexicon [ 'domain' ] ) : self . attributes_by_class [ o ] . append ( subj ) if not self . attributes_by_class : log . info ( "No nodes found in ontology" ) log . info ( "Ontology parsing complete in {}" . format ( ( time . clock ( ) - start ) * 1000 ) )
|
place the ontology graph into a set of custom data structures for use by the validator
|
3,390
|
def _schema_nodes ( self ) : name , ext = os . path . splitext ( self . _ontology_file ) if ext in [ '.ttl' ] : self . _ontology_parser_function = lambda s : rdflib . Graph ( ) . parse ( s , format = 'n3' ) else : self . _ontology_parser_function = lambda s : pyRdfa ( ) . graph_from_source ( s ) if not self . _ontology_parser_function : raise ValueError ( "No function found to parse ontology. %s" % self . errorstring_base ) if not self . _ontology_file : raise ValueError ( "No ontology file specified. %s" % self . errorstring_base ) if not self . lexicon : raise ValueError ( "No lexicon object assigned. %s" % self . errorstring_base ) latest_file = self . _read_schema ( ) try : self . graph = self . _ontology_parser_function ( latest_file ) except : raise IOError ( "Error parsing ontology at %s" % latest_file ) for subj , pred , obj in self . graph : self . ontology [ subj ] . append ( ( pred , obj ) ) yield ( subj , pred , obj )
|
parse self . _ontology_file into a graph
|
3,391
|
def baseDomain ( domain , includeScheme = True ) : result = '' url = urlparse ( domain ) if includeScheme : result = '%s://' % url . scheme if len ( url . netloc ) == 0 : result += url . path else : result += url . netloc return result
|
Return only the network location portion of the given domain unless includeScheme is True
|
3,392
|
def search ( session , query ) : flat_query = "" . join ( query . split ( ) ) artists = session . query ( Artist ) . filter ( or_ ( Artist . name . ilike ( f"%%{query}%%" ) , Artist . name . ilike ( f"%%{flat_query}%%" ) ) ) . all ( ) albums = session . query ( Album ) . filter ( Album . title . ilike ( f"%%{query}%%" ) ) . all ( ) tracks = session . query ( Track ) . filter ( Track . title . ilike ( f"%%{query}%%" ) ) . all ( ) return dict ( artists = artists , albums = albums , tracks = tracks )
|
Naive search of the database for query .
|
3,393
|
def prompt ( message , default = None , strip = True , suffix = ' ' ) : if default is not None : prompt_text = "{0} [{1}]{2}" . format ( message , default , suffix ) else : prompt_text = "{0}{1}" . format ( message , suffix ) input_value = get_input ( prompt_text ) if input_value and strip : input_value = input_value . strip ( ) if not input_value : input_value = default return input_value
|
Print a message and prompt user for input . Return user input .
|
3,394
|
def yesno ( message , default = 'yes' , suffix = ' ' ) : if default == 'yes' : yesno_prompt = '[Y/n]' elif default == 'no' : yesno_prompt = '[y/N]' else : raise ValueError ( "default must be 'yes' or 'no'." ) if message != '' : prompt_text = "{0} {1}{2}" . format ( message , yesno_prompt , suffix ) else : prompt_text = "{0}{1}" . format ( yesno_prompt , suffix ) while True : response = get_input ( prompt_text ) . strip ( ) if response == '' : return True else : if re . match ( '^(y)(es)?$' , response , re . IGNORECASE ) : if default == 'yes' : return True else : return False elif re . match ( '^(n)(o)?$' , response , re . IGNORECASE ) : if default == 'no' : return True else : return False
|
Prompt user to answer yes or no . Return True if the default is chosen otherwise False .
|
3,395
|
def register_validator ( validator ) : if hasattr ( validator , "EXTS" ) and hasattr ( validator , "run" ) : ValidatorFactory . PLUGINS . append ( validator ) else : raise ValidatorException ( "Validator does not have 'run' method or EXTS variable!" )
|
Register a Validator class for file verification .
|
3,396
|
def file_writer ( self , in_frame ) : self . update_config ( ) path = self . config [ 'path' ] encoder = self . config [ 'encoder' ] fps = self . config [ 'fps' ] bit16 = self . config [ '16bit' ] numpy_image = in_frame . as_numpy ( ) ylen , xlen , bpc = numpy_image . shape if bpc == 3 : if in_frame . type != 'RGB' : self . logger . warning ( 'Expected RGB input, got %s' , in_frame . type ) pix_fmt = ( 'rgb24' , 'rgb48le' ) [ bit16 ] elif bpc == 1 : if in_frame . type != 'Y' : self . logger . warning ( 'Expected Y input, got %s' , in_frame . type ) pix_fmt = ( 'gray' , 'gray16le' ) [ bit16 ] else : self . logger . critical ( 'Cannot write %s frame with %d components' , in_frame . type , bpc ) return md = Metadata ( ) . copy ( in_frame . metadata ) audit = md . get ( 'audit' ) audit += '%s = data\n' % path audit += ' encoder: "%s"\n' % ( encoder ) audit += ' 16bit: %s\n' % ( self . config [ '16bit' ] ) md . set ( 'audit' , audit ) md . to_file ( path ) with self . subprocess ( [ 'ffmpeg' , '-v' , 'warning' , '-y' , '-an' , '-s' , '%dx%d' % ( xlen , ylen ) , '-f' , 'rawvideo' , '-c:v' , 'rawvideo' , '-r' , '%d' % fps , '-pix_fmt' , pix_fmt , '-i' , '-' , '-r' , '%d' % fps ] + encoder . split ( ) + [ path ] , stdin = subprocess . PIPE ) as sp : while True : in_frame = yield True if not in_frame : break if bit16 : numpy_image = in_frame . as_numpy ( dtype = pt_float ) numpy_image = numpy_image * pt_float ( 256.0 ) numpy_image = numpy_image . clip ( pt_float ( 0 ) , pt_float ( 2 ** 16 - 1 ) ) . astype ( numpy . uint16 ) else : numpy_image = in_frame . as_numpy ( dtype = numpy . uint8 ) sp . stdin . write ( numpy_image . tostring ( ) ) del in_frame
|
Generator process to write file
|
3,397
|
def HHIPreFilter ( config = { } ) : fil = numpy . array ( [ - 4 , 8 , 25 , - 123 , 230 , 728 , 230 , - 123 , 25 , 8 , - 4 ] , dtype = numpy . float32 ) . reshape ( ( - 1 , 1 , 1 ) ) / numpy . float32 ( 1000 ) resize = Resize ( config = config ) out_frame = Frame ( ) out_frame . data = fil out_frame . type = 'fil' audit = out_frame . metadata . get ( 'audit' ) audit += 'data = HHI pre-interlace filter\n' out_frame . metadata . set ( 'audit' , audit ) resize . filter ( out_frame ) return resize
|
HHI pre - interlace filter .
|
3,398
|
def port_ranges ( ) : try : return _linux_ranges ( ) except ( OSError , IOError ) : try : ranges = _bsd_ranges ( ) if ranges : return ranges except ( OSError , IOError ) : pass return [ DEFAULT_EPHEMERAL_PORT_RANGE ]
|
Returns a list of ephemeral port ranges for current machine .
|
3,399
|
def run ( self ) : try : self . owner . start_event ( ) while True : while not self . incoming : time . sleep ( 0.01 ) while self . incoming : command = self . incoming . popleft ( ) if command is None : raise StopIteration ( ) command ( ) except StopIteration : pass self . owner . stop_event ( )
|
The actual event loop .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.