idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
12,300 | def send ( self , destination , body , content_type = None , headers = None , ** keyword_headers ) : assert destination is not None , "'destination' is required" assert body is not None , "'body' is required" headers = utils . merge_headers ( [ headers , keyword_headers ] ) headers [ HDR_DESTINATION ] = destination if content_type : headers [ HDR_CONTENT_TYPE ] = content_type if self . auto_content_length and body and HDR_CONTENT_LENGTH not in headers : headers [ HDR_CONTENT_LENGTH ] = len ( body ) self . send_frame ( CMD_SEND , headers , body ) | Send a message to a destination . |
12,301 | def subscribe ( self , destination , id = None , ack = 'auto' , headers = None , ** keyword_headers ) : assert destination is not None , "'destination' is required" headers = utils . merge_headers ( [ headers , keyword_headers ] ) headers [ HDR_DESTINATION ] = destination if id : headers [ HDR_ID ] = id headers [ HDR_ACK ] = ack self . send_frame ( CMD_SUBSCRIBE , headers ) | Subscribe to a destination . |
12,302 | def unsubscribe ( self , destination = None , id = None , headers = None , ** keyword_headers ) : assert id is not None or destination is not None , "'id' or 'destination' is required" headers = utils . merge_headers ( [ headers , keyword_headers ] ) if id : headers [ HDR_ID ] = id if destination : headers [ HDR_DESTINATION ] = destination self . send_frame ( CMD_UNSUBSCRIBE , headers ) | Unsubscribe from a destination by either id or the destination name . |
12,303 | def connect ( self , username = None , passcode = None , wait = False , headers = None , ** keyword_headers ) : cmd = CMD_STOMP headers = utils . merge_headers ( [ headers , keyword_headers ] ) headers [ HDR_ACCEPT_VERSION ] = self . version if self . transport . vhost : headers [ HDR_HOST ] = self . transport . vhost if username is not None : headers [ HDR_LOGIN ] = username if passcode is not None : headers [ HDR_PASSCODE ] = passcode self . send_frame ( cmd , headers ) if wait : self . transport . wait_for_connection ( ) if self . transport . connection_error : raise ConnectFailedException ( ) | Start a connection . |
12,304 | def get_socket ( host , port , timeout = None ) : for res in getaddrinfo ( host , port , 0 , SOCK_STREAM ) : af , socktype , proto , canonname , sa = res sock = None try : sock = socket ( af , socktype , proto ) if timeout is not None : sock . settimeout ( timeout ) sock . connect ( sa ) return sock except error : if sock is not None : sock . close ( ) raise error | Return a socket . |
12,305 | def start ( self ) : self . running = True self . attempt_connection ( ) receiver_thread = self . create_thread_fc ( self . __receiver_loop ) receiver_thread . name = "StompReceiver%s" % getattr ( receiver_thread , "name" , "Thread" ) self . notify ( 'connecting' ) | Start the connection . This should be called after all listeners have been registered . If this method is not called no frames will be received by the connection . |
12,306 | def stop ( self ) : with self . __receiver_thread_exit_condition : while not self . __receiver_thread_exited and self . is_connected ( ) : self . __receiver_thread_exit_condition . wait ( ) | Stop the connection . Performs a clean shutdown by waiting for the receiver thread to exit . |
12,307 | def notify ( self , frame_type , headers = None , body = None ) : if frame_type == 'receipt' : receipt = headers [ 'receipt-id' ] receipt_value = self . __receipts . get ( receipt ) with self . __send_wait_condition : self . set_receipt ( receipt , None ) self . __send_wait_condition . notify ( ) if receipt_value == CMD_DISCONNECT : self . set_connected ( False ) if receipt == self . __disconnect_receipt : self . disconnect_socket ( ) self . __disconnect_receipt = None elif frame_type == 'connected' : self . set_connected ( True ) elif frame_type == 'disconnected' : self . set_connected ( False ) with self . __listeners_change_condition : listeners = sorted ( self . listeners . items ( ) ) for ( _ , listener ) in listeners : if not listener : continue notify_func = getattr ( listener , 'on_%s' % frame_type , None ) if not notify_func : log . debug ( "listener %s has no method on_%s" , listener , frame_type ) continue if frame_type in ( 'heartbeat' , 'disconnected' ) : notify_func ( ) continue if frame_type == 'connecting' : notify_func ( self . current_host_and_port ) continue if frame_type == 'error' and not self . connected : with self . __connect_wait_condition : self . connection_error = True self . __connect_wait_condition . notify ( ) rtn = notify_func ( headers , body ) if rtn : ( headers , body ) = rtn return ( headers , body ) | Utility function for notifying listeners of incoming and outgoing messages |
12,308 | def transmit ( self , frame ) : with self . __listeners_change_condition : listeners = sorted ( self . listeners . items ( ) ) for ( _ , listener ) in listeners : if not listener : continue try : listener . on_send ( frame ) except AttributeError : continue if frame . cmd == CMD_DISCONNECT and HDR_RECEIPT in frame . headers : self . __disconnect_receipt = frame . headers [ HDR_RECEIPT ] lines = utils . convert_frame ( frame ) packed_frame = pack ( lines ) if log . isEnabledFor ( logging . DEBUG ) : log . debug ( "Sending frame: %s" , lines ) else : log . info ( "Sending frame: %r" , frame . cmd or "heartbeat" ) self . send ( packed_frame ) | Convert a frame object to a frame string and transmit to the server . |
12,309 | def wait_for_connection ( self , timeout = None ) : if timeout is not None : wait_time = timeout / 10.0 else : wait_time = None with self . __connect_wait_condition : while self . running and not self . is_connected ( ) and not self . connection_error : self . __connect_wait_condition . wait ( wait_time ) if not self . running or not self . is_connected ( ) : raise exception . ConnectFailedException ( ) | Wait until we ve established a connection with the server . |
12,310 | def __receiver_loop ( self ) : log . info ( "Starting receiver loop" ) notify_disconnected = True try : while self . running : try : while self . running : frames = self . __read ( ) for frame in frames : f = utils . parse_frame ( frame ) if f is None : continue if self . __auto_decode : f . body = decode ( f . body ) self . process_frame ( f , frame ) except exception . ConnectionClosedException : if self . running : self . __recvbuf = b'' self . running = False notify_disconnected = True break finally : self . cleanup ( ) finally : with self . __receiver_thread_exit_condition : self . __receiver_thread_exited = True self . __receiver_thread_exit_condition . notifyAll ( ) log . info ( "Receiver loop ended" ) self . notify ( 'receiver_loop_completed' ) if notify_disconnected : self . notify ( 'disconnected' ) with self . __connect_wait_condition : self . __connect_wait_condition . notifyAll ( ) | Main loop listening for incoming data . |
12,311 | def is_connected ( self ) : try : return self . socket is not None and self . socket . getsockname ( ) [ 1 ] != 0 and BaseTransport . is_connected ( self ) except socket . error : return False | Return true if the socket managed by this connection is connected |
12,312 | def disconnect_socket ( self ) : self . running = False if self . socket is not None : if self . __need_ssl ( ) : try : self . socket = self . socket . unwrap ( ) except Exception : _ , e , _ = sys . exc_info ( ) log . warning ( e ) elif hasattr ( socket , 'SHUT_RDWR' ) : try : self . socket . shutdown ( socket . SHUT_RDWR ) except socket . error : _ , e , _ = sys . exc_info ( ) if get_errno ( e ) != errno . ENOTCONN : log . warning ( "Unable to issue SHUT_RDWR on socket because of error '%s'" , e ) if self . socket is not None : try : self . socket . close ( ) except socket . error : _ , e , _ = sys . exc_info ( ) log . warning ( "Unable to close socket because of error '%s'" , e ) self . current_host_and_port = None self . socket = None self . notify ( 'disconnected' ) | Disconnect the underlying socket connection |
12,313 | def set_ssl ( self , for_hosts = [ ] , key_file = None , cert_file = None , ca_certs = None , cert_validator = None , ssl_version = DEFAULT_SSL_VERSION , password = None ) : if not ssl : raise Exception ( "SSL connection requested, but SSL library not found" ) for host_port in for_hosts : self . __ssl_params [ host_port ] = dict ( key_file = key_file , cert_file = cert_file , ca_certs = ca_certs , cert_validator = cert_validator , ssl_version = ssl_version , password = password ) | Sets up SSL configuration for the given hosts . This ensures socket is wrapped in a SSL connection raising an exception if the SSL module can t be found . |
12,314 | def __need_ssl ( self , host_and_port = None ) : if not host_and_port : host_and_port = self . current_host_and_port return host_and_port in self . __ssl_params | Whether current host needs SSL or not . |
12,315 | def get_ssl ( self , host_and_port = None ) : if not host_and_port : host_and_port = self . current_host_and_port return self . __ssl_params . get ( host_and_port ) | Get SSL params for the given host . |
12,316 | def __print_async ( self , frame_type , headers , body ) : if self . __quit : return if self . verbose : self . __sysout ( frame_type ) for k , v in headers . items ( ) : self . __sysout ( '%s: %s' % ( k , v ) ) else : if 'message-id' in headers : self . __sysout ( 'message-id: %s' % headers [ 'message-id' ] ) if 'subscription' in headers : self . __sysout ( 'subscription: %s' % headers [ 'subscription' ] ) if self . prompt != '' : self . __sysout ( '' ) self . __sysout ( body ) if not self . __start : self . __sysout ( self . prompt , end = '' ) else : self . __start = False self . stdout . flush ( ) | Utility function to print a message and setup the command prompt for the next input |
12,317 | def simple_tokenize ( text , include_punctuation = False ) : text = unicodedata . normalize ( 'NFC' , text ) if include_punctuation : return [ token . casefold ( ) for token in TOKEN_RE_WITH_PUNCTUATION . findall ( text ) ] else : return [ token . strip ( "'" ) . casefold ( ) for token in TOKEN_RE . findall ( text ) ] | Tokenize the given text using a straightforward Unicode - aware token expression . |
12,318 | def tokenize ( text , lang , include_punctuation = False , external_wordlist = False ) : global _mecab_tokenize , _jieba_tokenize language = langcodes . get ( lang ) info = get_language_info ( language ) text = preprocess_text ( text , language ) if info [ 'tokenizer' ] == 'mecab' : from wordfreq . mecab import mecab_tokenize as _mecab_tokenize tokens = _mecab_tokenize ( text , language . language ) if not include_punctuation : tokens = [ token for token in tokens if not PUNCT_RE . match ( token ) ] elif info [ 'tokenizer' ] == 'jieba' : from wordfreq . chinese import jieba_tokenize as _jieba_tokenize tokens = _jieba_tokenize ( text , external_wordlist = external_wordlist ) if not include_punctuation : tokens = [ token for token in tokens if not PUNCT_RE . match ( token ) ] else : if info [ 'tokenizer' ] != 'regex' and lang not in _WARNED_LANGUAGES : logger . warning ( "The language '{}' is in the '{}' script, which we don't " "have a tokenizer for. The results will be bad." . format ( lang , info [ 'script' ] ) ) _WARNED_LANGUAGES . add ( lang ) tokens = simple_tokenize ( text , include_punctuation = include_punctuation ) return tokens | Tokenize this text in a way that s relatively simple but appropriate for the language . Strings that are looked up in wordfreq will be run through this function first so that they can be expected to match the data . |
12,319 | def lossy_tokenize ( text , lang , include_punctuation = False , external_wordlist = False ) : global _simplify_chinese info = get_language_info ( lang ) tokens = tokenize ( text , lang , include_punctuation , external_wordlist ) if info [ 'lookup_transliteration' ] == 'zh-Hans' : from wordfreq . chinese import simplify_chinese as _simplify_chinese tokens = [ _simplify_chinese ( token ) for token in tokens ] return [ smash_numbers ( token ) for token in tokens ] | Get a list of tokens for this text with largely the same results and options as tokenize but aggressively normalize some text in a lossy way that s good for counting word frequencies . |
12,320 | def read_cBpack ( filename ) : with gzip . open ( filename , 'rb' ) as infile : data = msgpack . load ( infile , raw = False ) header = data [ 0 ] if ( not isinstance ( header , dict ) or header . get ( 'format' ) != 'cB' or header . get ( 'version' ) != 1 ) : raise ValueError ( "Unexpected header: %r" % header ) return data [ 1 : ] | Read a file from an idiosyncratic format that we use for storing approximate word frequencies called cBpack . |
12,321 | def available_languages ( wordlist = 'best' ) : if wordlist == 'best' : available = available_languages ( 'small' ) available . update ( available_languages ( 'large' ) ) return available elif wordlist == 'combined' : logger . warning ( "The 'combined' wordlists have been renamed to 'small'." ) wordlist = 'small' available = { } for path in DATA_PATH . glob ( '*.msgpack.gz' ) : if not path . name . startswith ( '_' ) : list_name = path . name . split ( '.' ) [ 0 ] name , lang = list_name . split ( '_' ) if name == wordlist : available [ lang ] = str ( path ) return available | Given a wordlist name return a dictionary of language codes to filenames representing all the languages in which that wordlist is available . |
12,322 | def get_frequency_dict ( lang , wordlist = 'best' , match_cutoff = 30 ) : freqs = { } pack = get_frequency_list ( lang , wordlist , match_cutoff ) for index , bucket in enumerate ( pack ) : freq = cB_to_freq ( - index ) for word in bucket : freqs [ word ] = freq return freqs | Get a word frequency list as a dictionary mapping tokens to frequencies as floating - point probabilities . |
12,323 | def word_frequency ( word , lang , wordlist = 'best' , minimum = 0. ) : args = ( word , lang , wordlist , minimum ) try : return _wf_cache [ args ] except KeyError : if len ( _wf_cache ) >= CACHE_SIZE : _wf_cache . clear ( ) _wf_cache [ args ] = _word_frequency ( * args ) return _wf_cache [ args ] | Get the frequency of word in the language with code lang from the specified wordlist . |
12,324 | def zipf_frequency ( word , lang , wordlist = 'best' , minimum = 0. ) : freq_min = zipf_to_freq ( minimum ) freq = word_frequency ( word , lang , wordlist , freq_min ) return round ( freq_to_zipf ( freq ) , 2 ) | Get the frequency of word in the language with code lang on the Zipf scale . |
12,325 | def top_n_list ( lang , n , wordlist = 'best' , ascii_only = False ) : results = [ ] for word in iter_wordlist ( lang , wordlist ) : if ( not ascii_only ) or max ( word ) <= '~' : results . append ( word ) if len ( results ) >= n : break return results | Return a frequency list of length n in descending order of frequency . This list contains words from wordlist of the given language . If ascii_only then only ascii words are considered . |
12,326 | def random_words ( lang = 'en' , wordlist = 'best' , nwords = 5 , bits_per_word = 12 , ascii_only = False ) : n_choices = 2 ** bits_per_word choices = top_n_list ( lang , n_choices , wordlist , ascii_only = ascii_only ) if len ( choices ) < n_choices : raise ValueError ( "There aren't enough words in the wordlist to provide %d bits of " "entropy per word." % bits_per_word ) return ' ' . join ( [ random . choice ( choices ) for i in range ( nwords ) ] ) | Returns a string of random space separated words . |
12,327 | def random_ascii_words ( lang = 'en' , wordlist = 'best' , nwords = 5 , bits_per_word = 12 ) : return random_words ( lang , wordlist , nwords , bits_per_word , ascii_only = True ) | Returns a string of random space separated ASCII words . |
12,328 | def jieba_tokenize ( text , external_wordlist = False ) : global jieba_tokenizer , jieba_orig_tokenizer if external_wordlist : if jieba_orig_tokenizer is None : jieba_orig_tokenizer = jieba . Tokenizer ( dictionary = ORIG_DICT_FILENAME ) return jieba_orig_tokenizer . lcut ( text ) else : if jieba_tokenizer is None : jieba_tokenizer = jieba . Tokenizer ( dictionary = DICT_FILENAME ) tokens = [ ] for _token , start , end in jieba_tokenizer . tokenize ( simplify_chinese ( text ) , HMM = False ) : tokens . append ( text [ start : end ] ) return tokens | Tokenize the given text into tokens whose word frequencies can probably be looked up . This uses Jieba a word - frequency - based tokenizer . |
12,329 | def preprocess_text ( text , language ) : info = get_language_info ( language ) text = unicodedata . normalize ( info [ 'normal_form' ] , text ) if info [ 'transliteration' ] is not None : text = transliterate ( info [ 'transliteration' ] , text ) if info [ 'remove_marks' ] : text = remove_marks ( text ) if info [ 'dotless_i' ] : text = casefold_with_i_dots ( text ) else : text = text . casefold ( ) if info [ 'diacritics_under' ] == 'commas' : text = cedillas_to_commas ( text ) elif info [ 'diacritics_under' ] == 'cedillas' : text = commas_to_cedillas ( text ) return text | This function applies pre - processing steps that convert forms of words considered equivalent into one standardized form . |
12,330 | def _language_in_list ( language , targets , min_score = 80 ) : matched = best_match ( language , targets , min_score = min_score ) return matched [ 1 ] > 0 | A helper function to determine whether this language matches one of the target languages with a match score above a certain threshold . |
12,331 | def mecab_tokenize ( text , lang ) : if lang not in MECAB_DICTIONARY_NAMES : raise ValueError ( "Can't run MeCab on language %r" % lang ) if lang not in MECAB_ANALYZERS : MECAB_ANALYZERS [ lang ] = make_mecab_analyzer ( MECAB_DICTIONARY_NAMES [ lang ] ) analyzer = MECAB_ANALYZERS [ lang ] text = unicodedata . normalize ( 'NFKC' , text . strip ( ) ) analyzed = analyzer . parse ( text ) if not analyzed : return [ ] return [ line . split ( '\t' ) [ 0 ] for line in analyzed . split ( '\n' ) if line != '' and line != 'EOS' ] | Use the mecab - python3 package to tokenize the given text . The lang must be ja for Japanese or ko for Korean . |
12,332 | def transliterate ( table , text ) : if table == 'sr-Latn' : return text . translate ( SR_LATN_TABLE ) elif table == 'az-Latn' : return text . translate ( AZ_LATN_TABLE ) else : raise ValueError ( "Unknown transliteration table: {!r}" . format ( table ) ) | Transliterate text according to one of the tables above . |
12,333 | def _update_exit_code_from_stats ( cls , statistics : Statistics , app : Application ) : for error_type in statistics . errors : exit_code = app . ERROR_CODE_MAP . get ( error_type ) if exit_code : app . update_exit_code ( exit_code ) | Set the current exit code based on the Statistics . |
12,334 | def is_response ( cls , response ) : if response . body : if cls . is_file ( response . body ) : return True | Return whether the document is likely to be a Sitemap . |
12,335 | def is_file ( cls , file ) : peeked_data = wpull . util . peek_file ( file ) if is_gzip ( peeked_data ) : try : peeked_data = wpull . decompression . gzip_uncompress ( peeked_data , truncated = True ) except zlib . error : pass peeked_data = wpull . string . printable_bytes ( peeked_data ) if b'<?xml' in peeked_data and ( b'<sitemapindex' in peeked_data or b'<urlset' in peeked_data ) : return True | Return whether the file is likely a Sitemap . |
12,336 | def normalize_hostname ( hostname ) : try : new_hostname = hostname . encode ( 'idna' ) . decode ( 'ascii' ) . lower ( ) except UnicodeError as error : raise UnicodeError ( 'Hostname {} rejected: {}' . format ( hostname , error ) ) from error if hostname != new_hostname : new_hostname . encode ( 'idna' ) return new_hostname | Normalizes a hostname so that it is ASCII and valid domain name . |
12,337 | def normalize_path ( path , encoding = 'utf-8' ) : if not path . startswith ( '/' ) : path = '/' + path path = percent_encode ( flatten_path ( path , flatten_slashes = True ) , encoding = encoding ) return uppercase_percent_encoding ( path ) | Normalize a path string . |
12,338 | def normalize_query ( text , encoding = 'utf-8' ) : path = percent_encode_plus ( text , encoding = encoding ) return uppercase_percent_encoding ( path ) | Normalize a query string . |
12,339 | def normalize_fragment ( text , encoding = 'utf-8' ) : path = percent_encode ( text , encoding = encoding , encode_set = FRAGMENT_ENCODE_SET ) return uppercase_percent_encoding ( path ) | Normalize a fragment . |
12,340 | def normalize_username ( text , encoding = 'utf-8' ) : path = percent_encode ( text , encoding = encoding , encode_set = USERNAME_ENCODE_SET ) return uppercase_percent_encoding ( path ) | Normalize a username |
12,341 | def normalize_password ( text , encoding = 'utf-8' ) : path = percent_encode ( text , encoding = encoding , encode_set = PASSWORD_ENCODE_SET ) return uppercase_percent_encoding ( path ) | Normalize a password |
12,342 | def percent_encode ( text , encode_set = DEFAULT_ENCODE_SET , encoding = 'utf-8' ) : byte_string = text . encode ( encoding ) try : mapping = _percent_encoder_map_cache [ encode_set ] except KeyError : mapping = _percent_encoder_map_cache [ encode_set ] = PercentEncoderMap ( encode_set ) . __getitem__ return '' . join ( [ mapping ( char ) for char in byte_string ] ) | Percent encode text . |
12,343 | def percent_encode_plus ( text , encode_set = QUERY_ENCODE_SET , encoding = 'utf-8' ) : if ' ' not in text : return percent_encode ( text , encode_set , encoding ) else : result = percent_encode ( text , encode_set , encoding ) return result . replace ( ' ' , '+' ) | Percent encode text for query strings . |
12,344 | def schemes_similar ( scheme1 , scheme2 ) : if scheme1 == scheme2 : return True if scheme1 in ( 'http' , 'https' ) and scheme2 in ( 'http' , 'https' ) : return True return False | Return whether URL schemes are similar . |
12,345 | def is_subdir ( base_path , test_path , trailing_slash = False , wildcards = False ) : if trailing_slash : base_path = base_path . rsplit ( '/' , 1 ) [ 0 ] + '/' test_path = test_path . rsplit ( '/' , 1 ) [ 0 ] + '/' else : if not base_path . endswith ( '/' ) : base_path += '/' if not test_path . endswith ( '/' ) : test_path += '/' if wildcards : return fnmatch . fnmatchcase ( test_path , base_path ) else : return test_path . startswith ( base_path ) | Return whether the a path is a subpath of another . |
12,346 | def uppercase_percent_encoding ( text ) : if '%' not in text : return text return re . sub ( r'%[a-f0-9][a-f0-9]' , lambda match : match . group ( 0 ) . upper ( ) , text ) | Uppercases percent - encoded sequences . |
12,347 | def split_query ( qs , keep_blank_values = False ) : items = [ ] for pair in qs . split ( '&' ) : name , delim , value = pair . partition ( '=' ) if not delim and keep_blank_values : value = None if keep_blank_values or value : items . append ( ( name , value ) ) return items | Split the query string . |
12,348 | def query_to_map ( text ) : dict_obj = { } for key , value in split_query ( text , True ) : if key not in dict_obj : dict_obj [ key ] = [ ] if value : dict_obj [ key ] . append ( value . replace ( '+' , ' ' ) ) else : dict_obj [ key ] . append ( '' ) return query_to_map ( text ) | Return a key - values mapping from a query string . |
12,349 | def urljoin ( base_url , url , allow_fragments = True ) : if url . startswith ( '//' ) and len ( url ) > 2 : scheme = base_url . partition ( ':' ) [ 0 ] if scheme : return urllib . parse . urljoin ( base_url , '{0}:{1}' . format ( scheme , url ) , allow_fragments = allow_fragments ) return urllib . parse . urljoin ( base_url , url , allow_fragments = allow_fragments ) | Join URLs like urllib . parse . urljoin but allow scheme - relative URL . |
12,350 | def flatten_path ( path , flatten_slashes = False ) : if not path or path == '/' : return '/' if path [ 0 ] == '/' : path = path [ 1 : ] parts = path . split ( '/' ) new_parts = collections . deque ( ) for part in parts : if part == '.' or ( flatten_slashes and not part ) : continue elif part != '..' : new_parts . append ( part ) elif new_parts : new_parts . pop ( ) if flatten_slashes and path . endswith ( '/' ) or not len ( new_parts ) : new_parts . append ( '' ) new_parts . appendleft ( '' ) return '/' . join ( new_parts ) | Flatten an absolute URL path by removing the dot segments . |
12,351 | def parse ( cls , url , default_scheme = 'http' , encoding = 'utf-8' ) : if url is None : return None url = url . strip ( ) if frozenset ( url ) & C0_CONTROL_SET : raise ValueError ( 'URL contains control codes: {}' . format ( ascii ( url ) ) ) scheme , sep , remaining = url . partition ( ':' ) if not scheme : raise ValueError ( 'URL missing scheme: {}' . format ( ascii ( url ) ) ) scheme = scheme . lower ( ) if not sep and default_scheme : remaining = url scheme = default_scheme elif not sep : raise ValueError ( 'URI missing colon: {}' . format ( ascii ( url ) ) ) if default_scheme and '.' in scheme or scheme == 'localhost' : remaining = '{}:{}' . format ( scheme , remaining ) scheme = default_scheme info = URLInfo ( ) info . encoding = encoding if scheme not in RELATIVE_SCHEME_DEFAULT_PORTS : info . raw = url info . scheme = scheme info . path = remaining return info if remaining . startswith ( '//' ) : remaining = remaining [ 2 : ] path_index = remaining . find ( '/' ) query_index = remaining . find ( '?' ) fragment_index = remaining . find ( '#' ) try : index_tuple = ( path_index , query_index , fragment_index ) authority_index = min ( num for num in index_tuple if num >= 0 ) except ValueError : authority_index = len ( remaining ) authority = remaining [ : authority_index ] resource = remaining [ authority_index : ] try : index_tuple = ( query_index , fragment_index ) path_index = min ( num for num in index_tuple if num >= 0 ) except ValueError : path_index = len ( remaining ) path = remaining [ authority_index + 1 : path_index ] or '/' if fragment_index >= 0 : query_index = fragment_index else : query_index = len ( remaining ) query = remaining [ path_index + 1 : query_index ] fragment = remaining [ query_index + 1 : ] userinfo , host = cls . parse_authority ( authority ) hostname , port = cls . parse_host ( host ) username , password = cls . parse_userinfo ( userinfo ) if not hostname : raise ValueError ( 'Hostname is empty: {}' . format ( ascii ( url ) ) ) info . raw = url info . scheme = scheme info . authority = authority info . path = normalize_path ( path , encoding = encoding ) info . query = normalize_query ( query , encoding = encoding ) info . fragment = normalize_fragment ( fragment , encoding = encoding ) info . userinfo = userinfo info . username = percent_decode ( username , encoding = encoding ) info . password = percent_decode ( password , encoding = encoding ) info . host = host info . hostname = hostname info . port = port or RELATIVE_SCHEME_DEFAULT_PORTS [ scheme ] info . resource = resource return info | Parse a URL and return a URLInfo . |
12,352 | def parse_authority ( cls , authority ) : userinfo , sep , host = authority . partition ( '@' ) if not sep : return '' , userinfo else : return userinfo , host | Parse the authority part and return userinfo and host . |
12,353 | def parse_userinfo ( cls , userinfo ) : username , sep , password = userinfo . partition ( ':' ) return username , password | Parse the userinfo and return username and password . |
12,354 | def parse_host ( cls , host ) : if host . endswith ( ']' ) : return cls . parse_hostname ( host ) , None else : hostname , sep , port = host . rpartition ( ':' ) if sep : port = int ( port ) if port < 0 or port > 65535 : raise ValueError ( 'Port number invalid' ) else : hostname = port port = None return cls . parse_hostname ( hostname ) , port | Parse the host and return hostname and port . |
12,355 | def parse_hostname ( cls , hostname ) : if hostname . startswith ( '[' ) : return cls . parse_ipv6_hostname ( hostname ) else : try : new_hostname = normalize_ipv4_address ( hostname ) except ValueError : new_hostname = hostname new_hostname = normalize_hostname ( new_hostname ) if any ( char in new_hostname for char in FORBIDDEN_HOSTNAME_CHARS ) : raise ValueError ( 'Invalid hostname: {}' . format ( ascii ( hostname ) ) ) return new_hostname | Parse the hostname and normalize . |
12,356 | def parse_ipv6_hostname ( cls , hostname ) : if not hostname . startswith ( '[' ) or not hostname . endswith ( ']' ) : raise ValueError ( 'Invalid IPv6 address: {}' . format ( ascii ( hostname ) ) ) hostname = ipaddress . IPv6Address ( hostname [ 1 : - 1 ] ) . compressed return hostname | Parse and normalize a IPv6 address . |
12,357 | def to_dict ( self ) : return dict ( raw = self . raw , scheme = self . scheme , authority = self . authority , netloc = self . authority , path = self . path , query = self . query , fragment = self . fragment , userinfo = self . userinfo , username = self . username , password = self . password , host = self . host , hostname = self . hostname , port = self . port , resource = self . resource , url = self . url , encoding = self . encoding , ) | Return a dict of the attributes . |
12,358 | def is_port_default ( self ) : if self . scheme in RELATIVE_SCHEME_DEFAULT_PORTS : return RELATIVE_SCHEME_DEFAULT_PORTS [ self . scheme ] == self . port | Return whether the URL is using the default port . |
12,359 | def hostname_with_port ( self ) : default_port = RELATIVE_SCHEME_DEFAULT_PORTS . get ( self . scheme ) if not default_port : return '' assert '[' not in self . hostname assert ']' not in self . hostname if self . is_ipv6 ( ) : hostname = '[{}]' . format ( self . hostname ) else : hostname = self . hostname if default_port != self . port : return '{}:{}' . format ( hostname , self . port ) else : return hostname | Return the host portion but omit default port if needed . |
12,360 | def _new_url_record ( cls , request : Request ) -> URLRecord : url_record = URLRecord ( ) url_record . url = request . url_info . url url_record . status = Status . in_progress url_record . try_count = 0 url_record . level = 0 return url_record | Return new empty URLRecord . |
12,361 | def _server_begin_response_callback ( self , response : Response ) : self . _item_session . response = response if self . _cookie_jar : self . _cookie_jar . extract_cookies ( response , self . _item_session . request ) action = self . _result_rule . handle_pre_response ( self . _item_session ) self . _file_writer_session . process_response ( response ) return action == Actions . NORMAL | Pre - response callback handler . |
12,362 | def _server_end_response_callback ( self , respoonse : Response ) : request = self . _item_session . request response = self . _item_session . response _logger . info ( __ ( _ ( 'Fetched ‘{url}’: {status_code} {reason}. ' 'Length: {content_length} [{content_type}].' ) , url = request . url , status_code = response . status_code , reason = wpull . string . printable_str ( response . reason ) , content_length = wpull . string . printable_str ( response . fields . get ( 'Content-Length' , _ ( 'none' ) ) ) , content_type = wpull . string . printable_str ( response . fields . get ( 'Content-Type' , _ ( 'none' ) ) ) , ) ) self . _result_rule . handle_response ( self . _item_session ) if response . status_code in WebProcessor . DOCUMENT_STATUS_CODES : filename = self . _file_writer_session . save_document ( response ) self . _processing_rule . scrape_document ( self . _item_session ) self . _result_rule . handle_document ( self . _item_session , filename ) elif response . status_code in WebProcessor . NO_DOCUMENT_STATUS_CODES : self . _file_writer_session . discard_document ( response ) self . _result_rule . handle_no_document ( self . _item_session ) else : self . _file_writer_session . discard_document ( response ) self . _result_rule . handle_document_error ( self . _item_session ) | Response callback handler . |
12,363 | def _init_stream ( self ) : assert not self . _control_connection self . _control_connection = yield from self . _acquire_request_connection ( self . _request ) self . _control_stream = ControlStream ( self . _control_connection ) self . _commander = Commander ( self . _control_stream ) read_callback = functools . partial ( self . event_dispatcher . notify , self . Event . control_receive_data ) self . _control_stream . data_event_dispatcher . add_read_listener ( read_callback ) write_callback = functools . partial ( self . event_dispatcher . notify , self . Event . control_send_data ) self . _control_stream . data_event_dispatcher . add_write_listener ( write_callback ) | Create streams and commander . |
12,364 | def _log_in ( self ) : username = self . _request . url_info . username or self . _request . username or 'anonymous' password = self . _request . url_info . password or self . _request . password or '-wpull@' cached_login = self . _login_table . get ( self . _control_connection ) if cached_login and cached_login == ( username , password ) : _logger . debug ( 'Reusing existing login.' ) return try : yield from self . _commander . login ( username , password ) except FTPServerError as error : raise AuthenticationError ( 'Login error: {}' . format ( error ) ) from error self . _login_table [ self . _control_connection ] = ( username , password ) | Connect and login . |
12,365 | def start ( self , request : Request ) -> Response : if self . _session_state != SessionState . ready : raise RuntimeError ( 'Session not ready' ) response = Response ( ) yield from self . _prepare_fetch ( request , response ) response . file_transfer_size = yield from self . _fetch_size ( request ) if request . restart_value : try : yield from self . _commander . restart ( request . restart_value ) response . restart_value = request . restart_value except FTPServerError : _logger . debug ( 'Could not restart file.' , exc_info = 1 ) yield from self . _open_data_stream ( ) command = Command ( 'RETR' , request . file_path ) yield from self . _begin_stream ( command ) self . _session_state = SessionState . file_request_sent return response | Start a file or directory listing download . |
12,366 | def start_listing ( self , request : Request ) -> ListingResponse : if self . _session_state != SessionState . ready : raise RuntimeError ( 'Session not ready' ) response = ListingResponse ( ) yield from self . _prepare_fetch ( request , response ) yield from self . _open_data_stream ( ) mlsd_command = Command ( 'MLSD' , self . _request . file_path ) list_command = Command ( 'LIST' , self . _request . file_path ) try : yield from self . _begin_stream ( mlsd_command ) self . _listing_type = 'mlsd' except FTPServerError as error : if error . reply_code in ( ReplyCodes . syntax_error_command_unrecognized , ReplyCodes . command_not_implemented ) : self . _listing_type = None else : raise if not self . _listing_type : yield from self . _begin_stream ( list_command ) self . _listing_type = 'list' _logger . debug ( 'Listing type is %s' , self . _listing_type ) self . _session_state = SessionState . directory_request_sent return response | Fetch a file listing . |
12,367 | def _prepare_fetch ( self , request : Request , response : Response ) : self . _request = request self . _response = response yield from self . _init_stream ( ) connection_closed = self . _control_connection . closed ( ) if connection_closed : self . _login_table . pop ( self . _control_connection , None ) yield from self . _control_stream . reconnect ( ) request . address = self . _control_connection . address connection_reused = not connection_closed self . event_dispatcher . notify ( self . Event . begin_control , request , connection_reused = connection_reused ) if connection_closed : yield from self . _commander . read_welcome_message ( ) yield from self . _log_in ( ) self . _response . request = request | Prepare for a fetch . |
12,368 | def _begin_stream ( self , command : Command ) : begin_reply = yield from self . _commander . begin_stream ( command ) self . _response . reply = begin_reply self . event_dispatcher . notify ( self . Event . begin_transfer , self . _response ) | Start data stream transfer . |
12,369 | def download_listing ( self , file : Optional [ IO ] , duration_timeout : Optional [ float ] = None ) -> ListingResponse : if self . _session_state != SessionState . directory_request_sent : raise RuntimeError ( 'File request not sent' ) self . _session_state = SessionState . file_request_sent yield from self . download ( file = file , rewind = False , duration_timeout = duration_timeout ) try : if self . _response . body . tell ( ) == 0 : listings = ( ) elif self . _listing_type == 'mlsd' : self . _response . body . seek ( 0 ) machine_listings = wpull . protocol . ftp . util . parse_machine_listing ( self . _response . body . read ( ) . decode ( 'utf-8' , errors = 'surrogateescape' ) , convert = True , strict = False ) listings = list ( wpull . protocol . ftp . util . machine_listings_to_file_entries ( machine_listings ) ) else : self . _response . body . seek ( 0 ) file = io . TextIOWrapper ( self . _response . body , encoding = 'utf-8' , errors = 'surrogateescape' ) listing_parser = ListingParser ( file = file ) listings = list ( listing_parser . parse_input ( ) ) _logger . debug ( 'Listing detected as %s' , listing_parser . type ) file . detach ( ) except ( ListingError , ValueError ) as error : raise ProtocolError ( * error . args ) from error self . _response . files = listings self . _response . body . seek ( 0 ) self . _session_state = SessionState . response_received return self . _response | Read file listings . |
12,370 | def _open_data_stream ( self ) : @ asyncio . coroutine def connection_factory ( address : Tuple [ int , int ] ) : self . _data_connection = yield from self . _acquire_connection ( address [ 0 ] , address [ 1 ] ) return self . _data_connection self . _data_stream = yield from self . _commander . setup_data_stream ( connection_factory ) self . _response . data_address = self . _data_connection . address read_callback = functools . partial ( self . event_dispatcher . notify , self . Event . transfer_receive_data ) self . _data_stream . data_event_dispatcher . add_read_listener ( read_callback ) write_callback = functools . partial ( self . event_dispatcher . notify , self . Event . transfer_send_data ) self . _data_stream . data_event_dispatcher . add_write_listener ( write_callback ) | Open the data stream connection . |
12,371 | def _fetch_size ( self , request : Request ) -> int : try : size = yield from self . _commander . size ( request . file_path ) return size except FTPServerError : return | Return size of file . |
12,372 | def parse_refresh ( text ) : match = re . search ( r'url\s*=(.+)' , text , re . IGNORECASE ) if match : url = match . group ( 1 ) if url . startswith ( '"' ) : url = url . strip ( '"' ) elif url . startswith ( "'" ) : url = url . strip ( "'" ) return clean_link_soup ( url ) | Parses text for HTTP Refresh URL . |
12,373 | def is_likely_inline ( link ) : file_type = mimetypes . guess_type ( link , strict = False ) [ 0 ] if file_type : top_level_type , subtype = file_type . split ( '/' , 1 ) return top_level_type in ( 'image' , 'video' , 'audio' ) or subtype == 'javascript' | Return whether the link is likely to be inline . |
12,374 | def is_likely_link ( text ) : text = text . lower ( ) if ( text . startswith ( 'http://' ) or text . startswith ( 'https://' ) or text . startswith ( 'ftp://' ) or text . startswith ( '/' ) or text . startswith ( '//' ) or text . endswith ( '/' ) or text . startswith ( '../' ) ) : return True dummy , dot , file_extension = text . rpartition ( '.' ) if dot and file_extension and len ( file_extension ) <= 4 : file_extension_set = frozenset ( file_extension ) if file_extension_set and file_extension_set <= ALPHANUMERIC_CHARS and not file_extension_set <= NUMERIC_CHARS : if file_extension in COMMON_TLD : return False file_type = mimetypes . guess_type ( text , strict = False ) [ 0 ] if file_type : return True else : return False | Return whether the text is likely to be a link . |
12,375 | def is_unlikely_link ( text ) : if text [ : 1 ] in ',;+:' or text [ - 1 : ] in '.,;+:' : return True if re . search ( r , text ) : return True if text [ : 1 ] == '.' and not text . startswith ( './' ) and not text . startswith ( '../' ) : return True if text in ( '/' , '//' ) : return True if '//' in text and '://' not in text and not text . startswith ( '//' ) : return True if text in MIMETYPES : return True tag_1 , dummy , tag_2 = text . partition ( '.' ) if tag_1 in HTML_TAGS and tag_2 != 'html' : return True if FIRST_PART_TLD_PATTERN . match ( text ) : return True | Return whether the text is likely to cause false positives . |
12,376 | def identify_link_type ( filename ) : mime_type = mimetypes . guess_type ( filename ) [ 0 ] if not mime_type : return if mime_type == 'text/css' : return LinkType . css elif mime_type == 'application/javascript' : return LinkType . javascript elif mime_type == 'text/html' or mime_type . endswith ( 'xml' ) : return LinkType . html elif mime_type . startswith ( 'video' ) or mime_type . startswith ( 'image' ) or mime_type . startswith ( 'audio' ) or mime_type . endswith ( 'shockwave-flash' ) : return LinkType . media | Return link type guessed by filename extension . |
12,377 | def new_encoded_stream ( args , stream ) : if args . ascii_print : return wpull . util . ASCIIStreamWriter ( stream ) else : return stream | Return a stream writer . |
12,378 | def _schedule ( self ) : if self . _running : _logger . debug ( 'Schedule check function.' ) self . _call_later_handle = self . _event_loop . call_later ( self . _timeout , self . _check ) | Schedule check function . |
12,379 | def _check ( self ) : _logger . debug ( 'Check if timeout.' ) self . _call_later_handle = None if self . _touch_time is not None : difference = self . _event_loop . time ( ) - self . _touch_time _logger . debug ( 'Time difference %s' , difference ) if difference > self . _timeout : self . _connection . close ( ) self . _timed_out = True if not self . _connection . closed ( ) : self . _schedule ( ) | Check and close connection if needed . |
12,380 | def close ( self ) : if self . _call_later_handle : self . _call_later_handle . cancel ( ) self . _running = False | Stop running timers . |
12,381 | def closed ( self ) -> bool : return not self . writer or not self . reader or self . reader . at_eof ( ) | Return whether the connection is closed . |
12,382 | def connect ( self ) : _logger . debug ( __ ( 'Connecting to {0}.' , self . _address ) ) if self . _state != ConnectionState . ready : raise Exception ( 'Closed connection must be reset before reusing.' ) if self . _sock : connection_future = asyncio . open_connection ( sock = self . _sock , ** self . _connection_kwargs ( ) ) else : host = self . _address [ 0 ] port = self . _address [ 1 ] connection_future = asyncio . open_connection ( host , port , ** self . _connection_kwargs ( ) ) self . reader , self . writer = yield from self . run_network_operation ( connection_future , wait_timeout = self . _connect_timeout , name = 'Connect' ) if self . _timeout is not None : self . _close_timer = CloseTimer ( self . _timeout , self ) else : self . _close_timer = DummyCloseTimer ( ) self . _state = ConnectionState . created _logger . debug ( 'Connected.' ) | Establish a connection . |
12,383 | def readline ( self ) -> bytes : assert self . _state == ConnectionState . created , 'Expect conn created. Got {}.' . format ( self . _state ) with self . _close_timer . with_timeout ( ) : data = yield from self . run_network_operation ( self . reader . readline ( ) , close_timeout = self . _timeout , name = 'Readline' ) return data | Read a line of data . |
12,384 | def run_network_operation ( self , task , wait_timeout = None , close_timeout = None , name = 'Network operation' ) : if wait_timeout is not None and close_timeout is not None : raise Exception ( 'Cannot use wait_timeout and close_timeout at the same time' ) try : if close_timeout is not None : with self . _close_timer . with_timeout ( ) : data = yield from task if self . _close_timer . is_timeout ( ) : raise NetworkTimedOut ( '{name} timed out.' . format ( name = name ) ) else : return data elif wait_timeout is not None : data = yield from asyncio . wait_for ( task , wait_timeout ) return data else : return ( yield from task ) except asyncio . TimeoutError as error : self . close ( ) raise NetworkTimedOut ( '{name} timed out.' . format ( name = name ) ) from error except ( tornado . netutil . SSLCertificateError , SSLVerificationError ) as error : self . close ( ) raise SSLVerificationError ( '{name} certificate error: {error}' . format ( name = name , error = error ) ) from error except AttributeError as error : self . close ( ) raise NetworkError ( '{name} network error: connection closed unexpectedly: {error}' . format ( name = name , error = error ) ) from error except ( socket . error , ssl . SSLError , OSError , IOError ) as error : self . close ( ) if isinstance ( error , NetworkError ) : raise if error . errno == errno . ECONNREFUSED : raise ConnectionRefused ( error . errno , os . strerror ( error . errno ) ) from error error_string = str ( error ) . lower ( ) if 'certificate' in error_string or 'unknown ca' in error_string : raise SSLVerificationError ( '{name} certificate error: {error}' . format ( name = name , error = error ) ) from error else : if error . errno : raise NetworkError ( error . errno , os . strerror ( error . errno ) ) from error else : raise NetworkError ( '{name} network error: {error}' . format ( name = name , error = error ) ) from error | Run the task and raise appropriate exceptions . |
12,385 | def start_tls ( self , ssl_context : Union [ bool , dict , ssl . SSLContext ] = True ) -> 'SSLConnection' : sock = self . writer . get_extra_info ( 'socket' ) ssl_conn = SSLConnection ( self . _address , ssl_context = ssl_context , hostname = self . _hostname , timeout = self . _timeout , connect_timeout = self . _connect_timeout , bind_host = self . _bind_host , bandwidth_limiter = self . _bandwidth_limiter , sock = sock ) yield from ssl_conn . connect ( ) return ssl_conn | Start client TLS on this connection and return SSLConnection . |
12,386 | def _verify_cert ( self , sock : ssl . SSLSocket ) : verify_mode = self . _ssl_context . verify_mode assert verify_mode in ( ssl . CERT_NONE , ssl . CERT_REQUIRED , ssl . CERT_OPTIONAL ) , 'Unknown verify mode {}' . format ( verify_mode ) if verify_mode == ssl . CERT_NONE : return cert = sock . getpeercert ( ) if not cert and verify_mode == ssl . CERT_OPTIONAL : return if not cert : raise SSLVerificationError ( 'No SSL certificate given' ) try : ssl . match_hostname ( cert , self . _hostname ) except ssl . CertificateError as error : raise SSLVerificationError ( 'Invalid SSL certificate' ) from error | Check if certificate matches hostname . |
12,387 | def trim ( self ) : now_time = time . time ( ) while self . _seq and self . _seq [ 0 ] . expire_time < now_time : item = self . _seq . popleft ( ) del self . _map [ item . key ] if self . _max_items : while self . _seq and len ( self . _seq ) > self . _max_items : item = self . _seq . popleft ( ) del self . _map [ item . key ] | Remove items that are expired or exceed the max size . |
12,388 | def strip_path_session_id ( path ) : for pattern in SESSION_ID_PATH_PATTERNS : match = pattern . match ( path ) if match : path = match . group ( 1 ) + match . group ( 3 ) return path | Strip session ID from URL path . |
12,389 | def rewrite ( self , url_info : URLInfo ) -> URLInfo : if url_info . scheme not in ( 'http' , 'https' ) : return url_info if self . _session_id_enabled : url = '{scheme}://{authority}{path}?{query}#{fragment}' . format ( scheme = url_info . scheme , authority = url_info . authority , path = strip_path_session_id ( url_info . path ) , query = strip_query_session_id ( url_info . query ) , fragment = url_info . fragment , ) url_info = parse_url_or_log ( url ) or url_info if self . _hash_fragment_enabled and url_info . fragment . startswith ( '!' ) : if url_info . query : url = '{}&_escaped_fragment_={}' . format ( url_info . url , url_info . fragment [ 1 : ] ) else : url = '{}?_escaped_fragment_={}' . format ( url_info . url , url_info . fragment [ 1 : ] ) url_info = parse_url_or_log ( url ) or url_info return url_info | Rewrite the given URL . |
12,390 | def parse_address ( text : str ) -> Tuple [ str , int ] : match = re . search ( r'\(' r'(\d{1,3})\s*,' r'\s*(\d{1,3})\s*,' r'\s*(\d{1,3})\s*,' r'\s*(\d{1,3})\s*,' r'\s*(\d{1,3})\s*,' r'\s*(\d{1,3})\s*' r'\)' , text ) if match : return ( '{0}.{1}.{2}.{3}' . format ( int ( match . group ( 1 ) ) , int ( match . group ( 2 ) ) , int ( match . group ( 3 ) ) , int ( match . group ( 4 ) ) ) , int ( match . group ( 5 ) ) << 8 | int ( match . group ( 6 ) ) ) else : raise ValueError ( 'No address found' ) | Parse PASV address . |
12,391 | def reply_code_tuple ( code : int ) -> Tuple [ int , int , int ] : return code // 100 , code // 10 % 10 , code % 10 | Return the reply code as a tuple . |
12,392 | def parse_machine_listing ( text : str , convert : bool = True , strict : bool = True ) -> List [ dict ] : listing = [ ] for line in text . splitlines ( False ) : facts = line . split ( ';' ) row = { } filename = None for fact in facts : name , sep , value = fact . partition ( '=' ) if sep : name = name . strip ( ) . lower ( ) value = value . strip ( ) . lower ( ) if convert : try : value = convert_machine_list_value ( name , value ) except ValueError : if strict : raise row [ name ] = value else : if name [ 0 : 1 ] == ' ' : filename = name [ 1 : ] else : name = name . strip ( ) . lower ( ) row [ name ] = '' if filename : row [ 'name' ] = filename listing . append ( row ) elif strict : raise ValueError ( 'Missing filename.' ) return listing | Parse machine listing . |
12,393 | def convert_machine_list_value ( name : str , value : str ) -> Union [ datetime . datetime , str , int ] : if name == 'modify' : return convert_machine_list_time_val ( value ) elif name == 'size' : return int ( value ) else : return value | Convert sizes and time values . |
12,394 | def convert_machine_list_time_val ( text : str ) -> datetime . datetime : text = text [ : 14 ] if len ( text ) != 14 : raise ValueError ( 'Time value not 14 chars' ) year = int ( text [ 0 : 4 ] ) month = int ( text [ 4 : 6 ] ) day = int ( text [ 6 : 8 ] ) hour = int ( text [ 8 : 10 ] ) minute = int ( text [ 10 : 12 ] ) second = int ( text [ 12 : 14 ] ) return datetime . datetime ( year , month , day , hour , minute , second , tzinfo = datetime . timezone . utc ) | Convert RFC 3659 time - val to datetime objects . |
12,395 | def machine_listings_to_file_entries ( listings : Iterable [ dict ] ) -> Iterable [ FileEntry ] : for listing in listings : yield FileEntry ( listing [ 'name' ] , type = listing . get ( 'type' ) , size = listing . get ( 'size' ) , date = listing . get ( 'modify' ) ) | Convert results from parsing machine listings to FileEntry list . |
12,396 | def reply_code ( self ) : if len ( self . args ) >= 2 and isinstance ( self . args [ 1 ] , int ) : return self . args [ 1 ] | Return reply code . |
12,397 | def _run_producer_wrapper ( self ) : try : yield from self . _producer . process ( ) except Exception as error : if not isinstance ( error , StopIteration ) : _logger . debug ( 'Producer died.' , exc_info = True ) self . stop ( ) raise else : self . stop ( ) | Run the producer if exception stop engine . |
12,398 | def read_cdx ( file , encoding = 'utf8' ) : with codecs . getreader ( encoding ) ( file ) as stream : header_line = stream . readline ( ) separator = header_line [ 0 ] field_keys = header_line . strip ( ) . split ( separator ) if field_keys . pop ( 0 ) != 'CDX' : raise ValueError ( 'CDX header not found.' ) for line in stream : yield dict ( zip ( field_keys , line . strip ( ) . split ( separator ) ) ) | Iterate CDX file . |
12,399 | def set_common_fields ( self , warc_type : str , content_type : str ) : self . fields [ self . WARC_TYPE ] = warc_type self . fields [ self . CONTENT_TYPE ] = content_type self . fields [ self . WARC_DATE ] = wpull . util . datetime_str ( ) self . fields [ self . WARC_RECORD_ID ] = '<{0}>' . format ( uuid . uuid4 ( ) . urn ) | Set the required fields for the record . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.