idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
12,500 | def parse_cldr_json ( directory , language_codes = DEFAULT_LANGUAGE_CODES , massage = True ) : am_strings = set ( ) pm_strings = set ( ) month_to_int = { } for lang in language_codes : path = os . path . join ( directory , 'main' , lang , 'ca-gregorian.json' ) with open ( path ) as in_file : doc = json . load ( in_file... | Parse CLDR JSON datasets to for date time things . |
12,501 | def acquire_proxy ( self , host , port , use_ssl = False , host_key = None , tunnel = True ) : if self . _host_filter and not self . _host_filter . test ( host ) : connection = yield from super ( ) . acquire ( host , port , use_ssl , host_key ) return connection host_key = host_key or ( host , port , use_ssl ) proxy_ho... | Check out a connection . |
12,502 | def _establish_tunnel ( self , connection , address ) : host = '[{}]' . format ( address [ 0 ] ) if ':' in address [ 0 ] else address [ 0 ] port = address [ 1 ] request = RawRequest ( 'CONNECT' , '{0}:{1}' . format ( host , port ) ) self . add_auth_header ( request ) stream = Stream ( connection , keep_alive = True ) _... | Establish a TCP tunnel . |
12,503 | def is_file ( cls , file ) : peeked_data = wpull . string . printable_bytes ( wpull . util . peek_file ( file ) ) . lower ( ) if b'<!doctype html' in peeked_data or b'<head' in peeked_data or b'<title' in peeked_data or b'<html' in peeked_data or b'<script' in peeked_data or b'<table' in peeked_data or b'<a href' in pe... | Return whether the file is likely to be HTML . |
12,504 | def convert_http_request ( request , referrer_host = None ) : new_request = urllib . request . Request ( request . url_info . url , origin_req_host = referrer_host , ) for name , value in request . fields . get_all ( ) : new_request . add_header ( name , value ) return new_request | Convert a HTTP request . |
12,505 | def add_cookie_header ( self , request , referrer_host = None ) : new_request = convert_http_request ( request , referrer_host ) self . _cookie_jar . add_cookie_header ( new_request ) request . fields . clear ( ) for name , value in new_request . header_items ( ) : request . fields . add ( name , value ) | Wrapped add_cookie_header . |
12,506 | def extract_cookies ( self , response , request , referrer_host = None ) : new_response = HTTPResponseInfoWrapper ( response ) new_request = convert_http_request ( request , referrer_host ) self . _cookie_jar . extract_cookies ( new_response , new_request ) | Wrapped extract_cookies . |
12,507 | def close ( self ) : if self . _save_filename : self . _cookie_jar . save ( self . _save_filename , ignore_discard = self . _keep_session_cookies ) | Save the cookie jar if needed . |
12,508 | def start ( self , use_atexit = True ) : assert not self . _process _logger . debug ( 'Starting process %s' , self . _proc_args ) process_future = asyncio . create_subprocess_exec ( stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . PIPE , * self . _proc_args ) self . _process = yield from p... | Start the executable . |
12,509 | def close ( self ) : if not self . _process : return if self . _process . returncode is not None : return _logger . debug ( 'Terminate process.' ) try : self . _process . terminate ( ) except OSError as error : if error . errno != errno . ESRCH : raise for dummy in range ( 10 ) : if self . _process . returncode is not ... | Terminate or kill the subprocess . |
12,510 | def _read_stdout ( self ) : try : while self . _process . returncode is None : line = yield from self . _process . stdout . readline ( ) _logger . debug ( 'Read stdout line %s' , repr ( line ) ) if not line : break if self . _stdout_callback : yield from self . _stdout_callback ( line ) except Exception : _logger . exc... | Continuously read the stdout for messages . |
12,511 | def _read_stderr ( self ) : try : while self . _process . returncode is None : line = yield from self . _process . stderr . readline ( ) if not line : break if self . _stderr_callback : yield from self . _stderr_callback ( line ) except Exception : _logger . exception ( 'Unhandled read stderr exception.' ) raise | Continuously read stderr for error messages . |
12,512 | def read_file ( self , file : Union [ IO , asyncio . StreamWriter ] = None ) : if file : file_is_async = hasattr ( file , 'drain' ) while True : data = yield from self . _connection . read ( 4096 ) if not data : break if file : file . write ( data ) if file_is_async : yield from file . drain ( ) self . _data_event_disp... | Read from connection to file . |
12,513 | def reconnect ( self ) : if self . _connection . closed ( ) : self . _connection . reset ( ) yield from self . _connection . connect ( ) | Connected the stream if needed . |
12,514 | def write_command ( self , command : Command ) : _logger . debug ( 'Write command.' ) data = command . to_bytes ( ) yield from self . _connection . write ( data ) self . _data_event_dispatcher . notify_write ( data ) | Write a command to the stream . |
12,515 | def read_reply ( self ) -> Reply : _logger . debug ( 'Read reply' ) reply = Reply ( ) while True : line = yield from self . _connection . readline ( ) if line [ - 1 : ] != b'\n' : raise NetworkError ( 'Connection closed.' ) self . _data_event_dispatcher . notify_read ( line ) reply . parse ( line ) if reply . code is n... | Read a reply from the stream . |
12,516 | def can_fetch_pool ( self , request : Request ) : url_info = request . url_info user_agent = request . fields . get ( 'User-agent' , '' ) if self . _robots_txt_pool . has_parser ( url_info ) : return self . _robots_txt_pool . can_fetch ( url_info , user_agent ) else : raise NotInPoolError ( ) | Return whether the request can be fetched based on the pool . |
12,517 | def fetch_robots_txt ( self , request : Request , file = None ) : url_info = request . url_info url = URLInfo . parse ( '{0}://{1}/robots.txt' . format ( url_info . scheme , url_info . hostname_with_port ) ) . url if not file : file = wpull . body . new_temp_file ( os . getcwd ( ) , hint = 'robots' ) with contextlib . ... | Fetch the robots . txt file for the request . |
12,518 | def can_fetch ( self , request : Request , file = None ) -> bool : try : return self . can_fetch_pool ( request ) except NotInPoolError : pass yield from self . fetch_robots_txt ( request , file = file ) return self . can_fetch_pool ( request ) | Return whether the request can fetched . |
12,519 | def _read_content ( self , response : Response , original_url_info : URLInfo ) : data = response . body . read ( 4096 ) url_info = original_url_info try : self . _robots_txt_pool . load_robots_txt ( url_info , data ) except ValueError : _logger . warning ( __ ( _ ( 'Failed to parse {url} for robots exclusion rules. ' '... | Read response and parse the contents into the pool . |
12,520 | def _accept_as_blank ( self , url_info : URLInfo ) : _logger . debug ( __ ( 'Got empty robots.txt for {0}.' , url_info . url ) ) self . _robots_txt_pool . load_robots_txt ( url_info , '' ) | Mark the URL as OK in the pool . |
12,521 | def to_text_format ( self ) : return '\n' . join ( itertools . chain ( ( self . fetch_date . strftime ( '%Y%m%d%H%M%S' ) , ) , ( rr . to_text ( ) for rr in self . resource_records ) , ( ) , ) ) | Format as detached DNS information as text . |
12,522 | def first_ipv4 ( self ) -> Optional [ AddressInfo ] : for info in self . _address_infos : if info . family == socket . AF_INET : return info | The first IPv4 address . |
12,523 | def first_ipv6 ( self ) -> Optional [ AddressInfo ] : for info in self . _address_infos : if info . family == socket . AF_INET6 : return info | The first IPV6 address . |
12,524 | def rotate ( self ) : item = self . _address_infos . pop ( 0 ) self . _address_infos . append ( item ) | Move the first address to the last position . |
12,525 | def _query_dns ( self , host : str , family : int = socket . AF_INET ) -> dns . resolver . Answer : record_type = { socket . AF_INET : 'A' , socket . AF_INET6 : 'AAAA' } [ family ] event_loop = asyncio . get_event_loop ( ) query = functools . partial ( self . _dns_resolver . query , host , record_type , source = self .... | Query DNS using Python . |
12,526 | def _getaddrinfo ( self , host : str , family : int = socket . AF_UNSPEC ) -> List [ tuple ] : event_loop = asyncio . get_event_loop ( ) query = event_loop . getaddrinfo ( host , 0 , family = family , proto = socket . IPPROTO_TCP ) if self . _timeout : query = asyncio . wait_for ( query , self . _timeout ) try : result... | Query DNS using system resolver . |
12,527 | def _convert_dns_answer ( cls , answer : dns . resolver . Answer ) -> Iterable [ AddressInfo ] : assert answer . rdtype in ( dns . rdatatype . A , dns . rdatatype . AAAA ) if answer . rdtype == dns . rdatatype . A : family = socket . AF_INET else : family = socket . AF_INET6 for record in answer : ip_address = record .... | Convert the DNS answer to address info . |
12,528 | def _convert_addrinfo ( cls , results : List [ tuple ] ) -> Iterable [ AddressInfo ] : for result in results : family = result [ 0 ] address = result [ 4 ] ip_address = address [ 0 ] if family == socket . AF_INET6 : flow_info = address [ 2 ] control_id = address [ 3 ] else : flow_info = None control_id = None yield Add... | Convert the result list to address info . |
12,529 | def _get_ipv6_info ( cls , ip_address : str ) -> tuple : results = socket . getaddrinfo ( ip_address , 0 , proto = socket . IPPROTO_TCP , flags = socket . AI_NUMERICHOST ) flow_info = results [ 0 ] [ 4 ] [ 2 ] control_id = results [ 0 ] [ 4 ] [ 3 ] return flow_info , control_id | Extract the flow info and control id . |
12,530 | def raise_if_not_match ( cls , action : str , expected_code : Union [ int , Sequence [ int ] ] , reply : Reply ) : if isinstance ( expected_code , int ) : expected_codes = ( expected_code , ) else : expected_codes = expected_code if reply . code not in expected_codes : raise FTPServerError ( 'Failed action {action}: {r... | Raise FTPServerError if not expected reply code . |
12,531 | def read_welcome_message ( self ) : reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'Server ready' , ReplyCodes . service_ready_for_new_user , reply ) | Read the welcome message . |
12,532 | def passive_mode ( self ) -> Tuple [ str , int ] : yield from self . _control_stream . write_command ( Command ( 'PASV' ) ) reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'Passive mode' , ReplyCodes . entering_passive_mode , reply ) try : return wpull . protocol . ftp . util . pa... | Enable passive mode . |
12,533 | def setup_data_stream ( self , connection_factory : Callable [ [ tuple ] , Connection ] , data_stream_factory : Callable [ [ Connection ] , DataStream ] = DataStream ) -> DataStream : yield from self . _control_stream . write_command ( Command ( 'TYPE' , 'I' ) ) reply = yield from self . _control_stream . read_reply ( ... | Create and setup a data stream . |
12,534 | def begin_stream ( self , command : Command ) -> Reply : yield from self . _control_stream . write_command ( command ) reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'Begin stream' , ( ReplyCodes . file_status_okay_about_to_open_data_connection , ReplyCodes . data_connection_alre... | Start sending content on the data stream . |
12,535 | def read_stream ( self , file : IO , data_stream : DataStream ) -> Reply : yield from data_stream . read_file ( file = file ) reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'End stream' , ReplyCodes . closing_data_connection , reply ) data_stream . close ( ) return reply | Read from the data stream . |
12,536 | def restart ( self , offset : int ) : yield from self . _control_stream . write_command ( Command ( 'REST' , str ( offset ) ) ) reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'Restart' , ReplyCodes . requested_file_action_pending_further_information , reply ) | Send restart command . |
12,537 | def get_version ( exe_path = 'youtube-dl' ) : process = subprocess . Popen ( [ exe_path , '--version' ] , stdout = subprocess . PIPE ) version_string = process . communicate ( ) [ 0 ] version_string = version_string . decode ( ) . strip ( ) assert ' ' not in version_string , version_string return version_string | Get the version string of youtube - dl . |
12,538 | def _get_output_template ( self ) : path = self . _file_writer_session . extra_resource_path ( '.youtube-dl' ) if not path : self . _temp_dir = tempfile . TemporaryDirectory ( dir = self . _root_path , prefix = 'tmp-wpull-youtubedl' ) path = '{}/tmp' . format ( self . _temp_dir . name ) return path , '{}.%(id)s.%(forma... | Return the path prefix and output template . |
12,539 | def _write_warc_metadata ( self ) : uri = 'metadata://{}{}' . format ( self . _item_session . url_record . url_info . authority , self . _item_session . url_record . url_info . resource ) glob_pattern = self . _path_prefix + '*.info.json' filenames = list ( glob . glob ( glob_pattern ) ) if not filenames : _logger . wa... | Write the JSON metadata to WARC . |
12,540 | def process ( self , session : AppSession ) : if not session . args . warc_dedup : return iterable = wpull . warc . format . read_cdx ( session . args . warc_dedup , encoding = session . args . local_encoding or 'utf-8' ) missing_url_msg = _ ( 'The URL ("a") is missing from the CDX file.' ) missing_id_msg = _ ( 'The re... | Populate the visits from the CDX into the URL table . |
12,541 | def to_lxml_encoding ( encoding ) : try : lxml . html . HTMLParser ( encoding = encoding ) except LookupError : encoding = encoding . replace ( '-' , '' ) else : return encoding try : lxml . html . HTMLParser ( encoding = encoding ) except LookupError : encoding = encoding . replace ( '_' , '' ) else : return encoding ... | Check if lxml supports the specified encoding . |
12,542 | def parse_lxml ( self , file , encoding = None , target_class = HTMLParserTarget , parser_type = 'html' ) : if encoding : lxml_encoding = to_lxml_encoding ( encoding ) or 'latin1' else : lxml_encoding = encoding elements = [ ] callback_func = elements . append target = target_class ( callback_func ) if parser_type == '... | Return an iterator of elements found in the document . |
12,543 | def parse_doctype ( cls , file , encoding = None ) : if encoding : lxml_encoding = to_lxml_encoding ( encoding ) or 'latin1' else : lxml_encoding = encoding try : parser = lxml . etree . XMLParser ( encoding = lxml_encoding , recover = True ) tree = lxml . etree . parse ( io . BytesIO ( wpull . util . peek_file ( file ... | Get the doctype from the document . |
12,544 | def detect_parser_type ( cls , file , encoding = None ) : is_xml = XMLDetector . is_file ( file ) doctype = cls . parse_doctype ( file , encoding = encoding ) or '' if not doctype and is_xml : return 'xml' if 'XHTML' in doctype : return 'xhtml' return 'html' | Get the suitable parser type for the document . |
12,545 | def new_temp_file ( directory = None , hint = '' ) : return tempfile . NamedTemporaryFile ( prefix = 'tmp-wpull-{0}-' . format ( hint ) , suffix = '.tmp' , dir = directory ) | Return a new temporary file . |
12,546 | def content ( self ) : if not self . _content_data : if is_seekable ( self . file ) : with wpull . util . reset_file_offset ( self . file ) : self . _content_data = self . file . read ( ) else : self . _content_data = self . file . read ( ) return self . _content_data | Return the content of the file . |
12,547 | def size ( self ) : try : return os . fstat ( self . file . fileno ( ) ) . st_size except io . UnsupportedOperation : pass if is_seekable ( self . file ) : with wpull . util . reset_file_offset ( self . file ) : self . file . seek ( 0 , os . SEEK_END ) return self . file . tell ( ) raise OSError ( 'Unsupported operatio... | Return the size of the file . |
12,548 | def _apply_pragmas_callback ( cls , connection , record ) : _logger . debug ( 'Setting pragmas.' ) connection . execute ( 'PRAGMA journal_mode=WAL' ) connection . execute ( 'PRAGMA synchronous=NORMAL' ) | Set SQLite pragmas . |
12,549 | def has_parser ( self , url_info : URLInfo ) : key = self . url_info_key ( url_info ) return key in self . _parsers | Return whether a parser has been created for the URL . |
12,550 | def can_fetch ( self , url_info : URLInfo , user_agent : str ) : key = self . url_info_key ( url_info ) parser = self . _parsers [ key ] return parser . is_allowed ( user_agent , url_info . url ) | Return whether the URL can be fetched . |
12,551 | def load_robots_txt ( self , url_info : URLInfo , text : str ) : key = self . url_info_key ( url_info ) parser = robotexclusionrulesparser . RobotExclusionRulesParser ( ) parser . parse ( text ) self . _parsers [ key ] = parser | Load the robot . txt file . |
12,552 | def _build_demux_document_scraper ( cls , session : AppSession ) : session . factory . new ( 'DemuxDocumentScraper' , cls . _build_document_scrapers ( session ) ) | Create demux document scraper . |
12,553 | def _build_document_scrapers ( cls , session : AppSession ) : html_parser = session . factory [ 'HTMLParser' ] element_walker = session . factory . new ( 'ElementWalker' ) scrapers = [ session . factory . new ( 'HTMLScraper' , html_parser , element_walker , followed_tags = session . args . follow_tags , ignored_tags = ... | Create the document scrapers . |
12,554 | def _build_request_factory ( cls , session : AppSession ) : def request_factory ( * args , ** kwargs ) : request = session . factory . class_map [ 'Request' ] ( * args , ** kwargs ) user_agent = session . args . user_agent or session . default_user_agent request . fields [ 'User-Agent' ] = user_agent if session . args ... | Create the request factory . |
12,555 | def _build_http_client ( cls , session : AppSession ) : stream_factory = functools . partial ( HTTPStream , ignore_length = session . args . ignore_length , keep_alive = session . args . http_keep_alive ) return session . factory . new ( 'HTTPClient' , connection_pool = session . factory [ 'ConnectionPool' ] , stream_f... | Create the HTTP client . |
12,556 | def _build_web_client ( cls , session : AppSession ) : cookie_jar = cls . _build_cookie_jar ( session ) http_client = cls . _build_http_client ( session ) redirect_factory = functools . partial ( session . factory . class_map [ 'RedirectTracker' ] , max_redirects = session . args . max_redirect ) return session . facto... | Build Web Client . |
12,557 | def _build_cookie_jar ( cls , session : AppSession ) : if not session . args . cookies : return if session . args . load_cookies or session . args . save_cookies : session . factory . set ( 'CookieJar' , BetterMozillaCookieJar ) cookie_jar = session . factory . new ( 'CookieJar' ) if session . args . load_cookies : coo... | Build the cookie jar |
12,558 | def _build_ftp_client ( cls , session : AppSession ) : return session . factory . new ( 'FTPClient' , connection_pool = session . factory [ 'ConnectionPool' ] , ) | Build FTP client . |
12,559 | def process ( self , session : AppSession ) : args = session . args if not ( args . phantomjs or args . youtube_dl or args . proxy_server ) : return proxy_server = session . factory . new ( 'HTTPProxyServer' , session . factory [ 'HTTPClient' ] , ) cookie_jar = session . factory . get ( 'CookieJarWrapper' ) proxy_copro... | Build MITM proxy server . |
12,560 | def _build_processor ( cls , session : AppSession ) : web_processor = cls . _build_web_processor ( session ) ftp_processor = cls . _build_ftp_processor ( session ) delegate_processor = session . factory . new ( 'Processor' ) delegate_processor . register ( 'http' , web_processor ) delegate_processor . register ( 'https... | Create the Processor |
12,561 | def _build_web_processor ( cls , session : AppSession ) : args = session . args url_filter = session . factory [ 'DemuxURLFilter' ] document_scraper = session . factory [ 'DemuxDocumentScraper' ] file_writer = session . factory [ 'FileWriter' ] post_data = cls . _get_post_data ( session . args ) web_client = session . ... | Build WebProcessor . |
12,562 | def _build_ftp_processor ( cls , session : AppSession ) : ftp_client = session . factory [ 'FTPClient' ] fetch_params = session . factory . new ( 'FTPProcessorFetchParams' , remove_listing = session . args . remove_listing , retr_symlinks = session . args . retr_symlinks , preserve_permissions = session . args . preser... | Build FTPProcessor . |
12,563 | def _get_post_data ( cls , args ) : if args . post_data : return args . post_data elif args . post_file : return args . post_file . read ( ) | Return the post data . |
12,564 | def _build_robots_txt_checker ( cls , session : AppSession ) : if session . args . robots : robots_txt_pool = session . factory . new ( 'RobotsTxtPool' ) robots_txt_checker = session . factory . new ( 'RobotsTxtChecker' , web_client = session . factory [ 'WebClient' ] , robots_txt_pool = robots_txt_pool ) return robots... | Build robots . txt checker . |
12,565 | def _build_phantomjs_coprocessor ( cls , session : AppSession , proxy_port : int ) : page_settings = { } default_headers = NameValueRecord ( ) for header_string in session . args . header : default_headers . parse ( header_string ) default_headers . add ( 'Accept-Language' , '*' ) if not session . args . http_compressi... | Build proxy server and PhantomJS client . controller coprocessor . |
12,566 | def _build_youtube_dl_coprocessor ( cls , session : AppSession , proxy_port : int ) : wpull . processor . coprocessor . youtubedl . get_version ( session . args . youtube_dl_exe ) coprocessor = session . factory . new ( 'YoutubeDlCoprocessor' , session . args . youtube_dl_exe , ( session . args . proxy_server_address ,... | Build youtube - dl coprocessor . |
12,567 | def build ( self ) -> Application : pipelines = self . _build_pipelines ( ) self . _factory . new ( 'Application' , pipelines ) return self . _factory [ 'Application' ] | Put the application together . |
12,568 | def is_supported ( cls , file = None , request = None , response = None , url_info = None ) : tests = ( ( response , cls . is_response ) , ( file , cls . is_file ) , ( request , cls . is_request ) , ( url_info , cls . is_url ) ) for instance , method in tests : if instance : try : result = method ( instance ) except No... | Given the hints return whether the document is supported . |
12,569 | def _print_stats ( cls , stats : Statistics , human_format_speed : bool = True ) : time_length = datetime . timedelta ( seconds = int ( stats . stop_time - stats . start_time ) ) file_size = wpull . string . format_size ( stats . size ) if stats . bandwidth_meter . num_samples : speed = stats . bandwidth_meter . speed ... | Log the final statistics to the user . |
12,570 | def is_no_body ( request , response , no_content_codes = DEFAULT_NO_CONTENT_CODES ) : if 'Content-Length' not in response . fields and 'Transfer-Encoding' not in response . fields and ( response . status_code in no_content_codes or request . method . upper ( ) == 'HEAD' ) : return True else : return False | Return whether a content body is not expected . |
12,571 | def write_request ( self , request , full_url = False ) : _logger . debug ( 'Sending headers.' ) if hasattr ( request , 'prepare_for_send' ) : request . prepare_for_send ( full_url = full_url ) if self . _ignore_length : request . fields [ 'Connection' ] = 'close' data = request . to_bytes ( ) self . _data_event_dispat... | Send the request s HTTP status line and header fields . |
12,572 | def write_body ( self , file , length = None ) : _logger . debug ( 'Sending body.' ) file_is_async = ( asyncio . iscoroutine ( file . read ) or asyncio . iscoroutinefunction ( file . read ) ) _logger . debug ( __ ( 'Body is async: {0}' , file_is_async ) ) if length is not None : bytes_left = length while True : if leng... | Send the request s content body . |
12,573 | def read_response ( self , response = None ) : _logger . debug ( 'Reading header.' ) if response is None : response = Response ( ) header_lines = [ ] bytes_read = 0 while True : try : data = yield from self . _connection . readline ( ) except ValueError as error : raise ProtocolError ( 'Invalid header: {0}' . format ( ... | Read the response s HTTP status line and header fields . |
12,574 | def read_body ( self , request , response , file = None , raw = False ) : if is_no_body ( request , response ) : return if not raw : self . _setup_decompressor ( response ) read_strategy = self . get_read_strategy ( response ) if self . _ignore_length and read_strategy == 'length' : read_strategy = 'close' if read_stra... | Read the response s content body . |
12,575 | def _read_body_until_close ( self , response , file ) : _logger . debug ( 'Reading body until close.' ) file_is_async = hasattr ( file , 'drain' ) while True : data = yield from self . _connection . read ( self . _read_size ) if not data : break self . _data_event_dispatcher . notify_read ( data ) content_data = self .... | Read the response until the connection closes . |
12,576 | def _read_body_by_length ( self , response , file ) : _logger . debug ( 'Reading body by length.' ) file_is_async = hasattr ( file , 'drain' ) try : body_size = int ( response . fields [ 'Content-Length' ] ) if body_size < 0 : raise ValueError ( 'Content length cannot be negative.' ) except ValueError as error : _logge... | Read the connection specified by a length . |
12,577 | def _read_body_by_chunk ( self , response , file , raw = False ) : reader = ChunkedTransferReader ( self . _connection ) file_is_async = hasattr ( file , 'drain' ) while True : chunk_size , data = yield from reader . read_chunk_header ( ) self . _data_event_dispatcher . notify_read ( data ) if raw : file . write ( data... | Read the connection using chunked transfer encoding . |
12,578 | def get_read_strategy ( cls , response ) : chunked_match = re . match ( r'chunked($|;)' , response . fields . get ( 'Transfer-Encoding' , '' ) ) if chunked_match : return 'chunked' elif 'Content-Length' in response . fields : return 'length' else : return 'close' | Return the appropriate algorithm of reading response . |
12,579 | def _setup_decompressor ( self , response ) : encoding = response . fields . get ( 'Content-Encoding' , '' ) . lower ( ) if encoding == 'gzip' : self . _decompressor = wpull . decompression . GzipDecompressor ( ) elif encoding == 'deflate' : self . _decompressor = wpull . decompression . DeflateDecompressor ( ) else : ... | Set up the content encoding decompressor . |
12,580 | def _decompress_data ( self , data ) : if self . _decompressor : try : return self . _decompressor . decompress ( data ) except zlib . error as error : raise ProtocolError ( 'zlib error: {0}.' . format ( error ) ) from error else : return data | Decompress the given data and return the uncompressed data . |
12,581 | def _flush_decompressor ( self ) : if self . _decompressor : try : return self . _decompressor . flush ( ) except zlib . error as error : raise ProtocolError ( 'zlib flush error: {0}.' . format ( error ) ) from error else : return b'' | Return any data left in the decompressor . |
12,582 | def gzip_uncompress ( data , truncated = False ) : decompressor = SimpleGzipDecompressor ( ) inflated_data = decompressor . decompress ( data ) if not truncated : inflated_data += decompressor . flush ( ) return inflated_data | Uncompress gzip data . |
12,583 | def set_status ( self , status : Status , increment_try_count : bool = True , filename : str = None ) : url = self . url_record . url assert not self . _try_count_incremented , ( url , status ) if increment_try_count : self . _try_count_incremented = True _logger . debug ( __ ( 'Marking URL {0} status {1}.' , url , sta... | Mark the item with the given status . |
12,584 | def add_child_url ( self , url : str , inline : bool = False , link_type : Optional [ LinkType ] = None , post_data : Optional [ str ] = None , level : Optional [ int ] = None , replace : bool = False ) : url_properties = URLProperties ( ) url_properties . level = self . url_record . level + 1 if level is None else lev... | Add links scraped from the document with automatic values . |
12,585 | def child_url_record ( self , url : str , inline : bool = False , link_type : Optional [ LinkType ] = None , post_data : Optional [ str ] = None , level : Optional [ int ] = None ) : url_record = URLRecord ( ) url_record . url = url url_record . status = Status . todo url_record . try_count = 0 url_record . level = sel... | Return a child URLRecord . |
12,586 | def get_heading_encoding ( response ) : encoding = wpull . protocol . http . util . parse_charset ( response . fields . get ( 'content-type' , '' ) ) if encoding : return wpull . string . normalize_codec_name ( encoding ) else : return None | Return the document encoding from a HTTP header . |
12,587 | def detect_response_encoding ( response , is_html = False , peek = 131072 ) : encoding = get_heading_encoding ( response ) encoding = wpull . string . detect_encoding ( wpull . util . peek_file ( response . body , peek ) , encoding = encoding , is_html = is_html ) _logger . debug ( __ ( 'Got encoding: {0}' , encoding )... | Return the likely encoding of the response document . |
12,588 | def contains ( self , url : str ) : try : self . get_one ( url ) except NotFound : return False else : return True | Return whether the URL is in the table . |
12,589 | def add_one ( self , url : str , url_properties : Optional [ URLProperties ] = None , url_data : Optional [ URLData ] = None ) : self . add_many ( [ AddURLInfo ( url , url_properties , url_data ) ] ) | Add a single URL to the table . |
12,590 | def stream ( self ) : chunk_a = None chunk_b = None chunk_a_index = 0 chunk_b_index = 0 search_start_index = 0 while True : chunk_a = chunk_b chunk_a_index = chunk_b_index chunk_b = self . _file . read ( self . _read_size ) if chunk_a is None : continue chunk_b_index = chunk_a_index + len ( chunk_a ) if not chunk_a : b... | Iterate the file stream . |
12,591 | def notify ( self , * args , ** kwargs ) : for handler in tuple ( self . handlers ) : handler ( * args , ** kwargs ) | Call all the callback handlers with given arguments . |
12,592 | def new ( self , name , * args , ** kwargs ) : if name in self . _instance_map : raise ValueError ( 'Instance {0} is already initialized' . format ( name ) ) instance = self . _class_map [ name ] ( * args , ** kwargs ) self . _instance_map [ name ] = instance return instance | Create an instance . |
12,593 | def is_all_initialized ( self ) : return frozenset ( self . _class_map . keys ( ) ) == frozenset ( self . _instance_map . keys ( ) ) | Return whether all the instances have been initialized . |
12,594 | def normalize_name ( name , overrides = None ) : normalized_name = name . title ( ) if overrides : override_map = dict ( [ ( name . title ( ) , name ) for name in overrides ] ) return override_map . get ( normalized_name , normalized_name ) else : return normalized_name | Normalize the key name to title case . |
12,595 | def guess_line_ending ( string ) : assert isinstance ( string , str ) , 'Expect str. Got {}' . format ( type ( string ) ) crlf_count = string . count ( '\r\n' ) lf_count = string . count ( '\n' ) if crlf_count >= lf_count : return '\r\n' else : return '\n' | Return the most likely line delimiter from the string . |
12,596 | def unfold_lines ( string ) : assert isinstance ( string , str ) , 'Expect str. Got {}' . format ( type ( string ) ) lines = string . splitlines ( ) line_buffer = io . StringIO ( ) for line_number in range ( len ( lines ) ) : line = lines [ line_number ] if line and line [ 0 : 1 ] in ( ' ' , '\t' ) : line_buffer . writ... | Join lines that are wrapped . |
12,597 | def parse ( self , string , strict = True ) : if isinstance ( string , bytes ) : errors = 'strict' if strict else 'replace' string = string . decode ( self . encoding , errors = errors ) if not self . raw : self . raw = string else : self . raw += string lines = unfold_lines ( string ) . splitlines ( ) for line in line... | Parse the string or bytes . |
12,598 | def add ( self , name , value ) : normalized_name = normalize_name ( name , self . _normalize_overrides ) self . _map [ normalized_name ] . append ( value ) | Append the name - value pair to the record . |
12,599 | def get_list ( self , name ) : normalized_name = normalize_name ( name , self . _normalize_overrides ) return self . _map [ normalized_name ] | Return all the values for given name . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.