idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
12,500
def parse_cldr_json ( directory , language_codes = DEFAULT_LANGUAGE_CODES , massage = True ) : am_strings = set ( ) pm_strings = set ( ) month_to_int = { } for lang in language_codes : path = os . path . join ( directory , 'main' , lang , 'ca-gregorian.json' ) with open ( path ) as in_file : doc = json . load ( in_file ) months_dict = doc [ 'main' ] [ lang ] [ 'dates' ] [ 'calendars' ] [ 'gregorian' ] [ 'months' ] [ 'format' ] [ 'abbreviated' ] day_periods_dict = doc [ 'main' ] [ lang ] [ 'dates' ] [ 'calendars' ] [ 'gregorian' ] [ 'dayPeriods' ] [ 'format' ] [ 'abbreviated' ] for month , month_str in months_dict . items ( ) : if massage : month_str = unicodedata . normalize ( 'NFKD' , month_str ) . lower ( ) . strip ( '.' ) month_to_int [ month_str ] = int ( month ) am_str = day_periods_dict [ 'am' ] pm_str = day_periods_dict [ 'pm' ] if massage : am_str = unicodedata . normalize ( 'NFKD' , am_str ) . lower ( ) . strip ( '.' ) pm_str = unicodedata . normalize ( 'NFKD' , pm_str ) . lower ( ) . strip ( '.' ) am_strings . add ( am_str ) pm_strings . add ( pm_str ) print ( pprint . pformat ( am_strings ) ) print ( pprint . pformat ( pm_strings ) ) print ( pprint . pformat ( month_to_int ) )
Parse CLDR JSON datasets to for date time things .
12,501
def acquire_proxy ( self , host , port , use_ssl = False , host_key = None , tunnel = True ) : if self . _host_filter and not self . _host_filter . test ( host ) : connection = yield from super ( ) . acquire ( host , port , use_ssl , host_key ) return connection host_key = host_key or ( host , port , use_ssl ) proxy_host , proxy_port = self . _proxy_address connection = yield from super ( ) . acquire ( proxy_host , proxy_port , self . _proxy_ssl , host_key = host_key ) connection . proxied = True _logger . debug ( 'Request for proxy connection.' ) if connection . closed ( ) : _logger . debug ( 'Connecting to proxy.' ) yield from connection . connect ( ) if tunnel : yield from self . _establish_tunnel ( connection , ( host , port ) ) if use_ssl : ssl_connection = yield from connection . start_tls ( self . _ssl_context ) ssl_connection . proxied = True ssl_connection . tunneled = True self . _connection_map [ ssl_connection ] = connection connection . wrapped_connection = ssl_connection return ssl_connection if connection . wrapped_connection : ssl_connection = connection . wrapped_connection self . _connection_map [ ssl_connection ] = connection return ssl_connection else : return connection
Check out a connection .
12,502
def _establish_tunnel ( self , connection , address ) : host = '[{}]' . format ( address [ 0 ] ) if ':' in address [ 0 ] else address [ 0 ] port = address [ 1 ] request = RawRequest ( 'CONNECT' , '{0}:{1}' . format ( host , port ) ) self . add_auth_header ( request ) stream = Stream ( connection , keep_alive = True ) _logger . debug ( 'Sending Connect.' ) yield from stream . write_request ( request ) _logger . debug ( 'Read proxy response.' ) response = yield from stream . read_response ( ) if response . status_code != 200 : debug_file = io . BytesIO ( ) _logger . debug ( 'Read proxy response body.' ) yield from stream . read_body ( request , response , file = debug_file ) debug_file . seek ( 0 ) _logger . debug ( ascii ( debug_file . read ( ) ) ) if response . status_code == 200 : connection . tunneled = True else : raise NetworkError ( 'Proxy does not support CONNECT: {} {}' . format ( response . status_code , wpull . string . printable_str ( response . reason ) ) )
Establish a TCP tunnel .
12,503
def is_file ( cls , file ) : peeked_data = wpull . string . printable_bytes ( wpull . util . peek_file ( file ) ) . lower ( ) if b'<!doctype html' in peeked_data or b'<head' in peeked_data or b'<title' in peeked_data or b'<html' in peeked_data or b'<script' in peeked_data or b'<table' in peeked_data or b'<a href' in peeked_data : return True
Return whether the file is likely to be HTML .
12,504
def convert_http_request ( request , referrer_host = None ) : new_request = urllib . request . Request ( request . url_info . url , origin_req_host = referrer_host , ) for name , value in request . fields . get_all ( ) : new_request . add_header ( name , value ) return new_request
Convert a HTTP request .
12,505
def add_cookie_header ( self , request , referrer_host = None ) : new_request = convert_http_request ( request , referrer_host ) self . _cookie_jar . add_cookie_header ( new_request ) request . fields . clear ( ) for name , value in new_request . header_items ( ) : request . fields . add ( name , value )
Wrapped add_cookie_header .
12,506
def extract_cookies ( self , response , request , referrer_host = None ) : new_response = HTTPResponseInfoWrapper ( response ) new_request = convert_http_request ( request , referrer_host ) self . _cookie_jar . extract_cookies ( new_response , new_request )
Wrapped extract_cookies .
12,507
def close ( self ) : if self . _save_filename : self . _cookie_jar . save ( self . _save_filename , ignore_discard = self . _keep_session_cookies )
Save the cookie jar if needed .
12,508
def start ( self , use_atexit = True ) : assert not self . _process _logger . debug ( 'Starting process %s' , self . _proc_args ) process_future = asyncio . create_subprocess_exec ( stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . PIPE , * self . _proc_args ) self . _process = yield from process_future self . _stderr_reader = asyncio . async ( self . _read_stderr ( ) ) self . _stdout_reader = asyncio . async ( self . _read_stdout ( ) ) if use_atexit : atexit . register ( self . close )
Start the executable .
12,509
def close ( self ) : if not self . _process : return if self . _process . returncode is not None : return _logger . debug ( 'Terminate process.' ) try : self . _process . terminate ( ) except OSError as error : if error . errno != errno . ESRCH : raise for dummy in range ( 10 ) : if self . _process . returncode is not None : return time . sleep ( 0.05 ) _logger . debug ( 'Failed to terminate. Killing.' ) try : self . _process . kill ( ) except OSError as error : if error . errno != errno . ESRCH : raise
Terminate or kill the subprocess .
12,510
def _read_stdout ( self ) : try : while self . _process . returncode is None : line = yield from self . _process . stdout . readline ( ) _logger . debug ( 'Read stdout line %s' , repr ( line ) ) if not line : break if self . _stdout_callback : yield from self . _stdout_callback ( line ) except Exception : _logger . exception ( 'Unhandled read stdout exception.' ) raise
Continuously read the stdout for messages .
12,511
def _read_stderr ( self ) : try : while self . _process . returncode is None : line = yield from self . _process . stderr . readline ( ) if not line : break if self . _stderr_callback : yield from self . _stderr_callback ( line ) except Exception : _logger . exception ( 'Unhandled read stderr exception.' ) raise
Continuously read stderr for error messages .
12,512
def read_file ( self , file : Union [ IO , asyncio . StreamWriter ] = None ) : if file : file_is_async = hasattr ( file , 'drain' ) while True : data = yield from self . _connection . read ( 4096 ) if not data : break if file : file . write ( data ) if file_is_async : yield from file . drain ( ) self . _data_event_dispatcher . notify_read ( data )
Read from connection to file .
12,513
def reconnect ( self ) : if self . _connection . closed ( ) : self . _connection . reset ( ) yield from self . _connection . connect ( )
Connected the stream if needed .
12,514
def write_command ( self , command : Command ) : _logger . debug ( 'Write command.' ) data = command . to_bytes ( ) yield from self . _connection . write ( data ) self . _data_event_dispatcher . notify_write ( data )
Write a command to the stream .
12,515
def read_reply ( self ) -> Reply : _logger . debug ( 'Read reply' ) reply = Reply ( ) while True : line = yield from self . _connection . readline ( ) if line [ - 1 : ] != b'\n' : raise NetworkError ( 'Connection closed.' ) self . _data_event_dispatcher . notify_read ( line ) reply . parse ( line ) if reply . code is not None : break return reply
Read a reply from the stream .
12,516
def can_fetch_pool ( self , request : Request ) : url_info = request . url_info user_agent = request . fields . get ( 'User-agent' , '' ) if self . _robots_txt_pool . has_parser ( url_info ) : return self . _robots_txt_pool . can_fetch ( url_info , user_agent ) else : raise NotInPoolError ( )
Return whether the request can be fetched based on the pool .
12,517
def fetch_robots_txt ( self , request : Request , file = None ) : url_info = request . url_info url = URLInfo . parse ( '{0}://{1}/robots.txt' . format ( url_info . scheme , url_info . hostname_with_port ) ) . url if not file : file = wpull . body . new_temp_file ( os . getcwd ( ) , hint = 'robots' ) with contextlib . closing ( file ) : request = Request ( url ) session = self . _web_client . session ( request ) while not session . done ( ) : wpull . util . truncate_file ( file . name ) try : response = yield from session . start ( ) yield from session . download ( file = file ) except ProtocolError : self . _accept_as_blank ( url_info ) return status_code = response . status_code if 500 <= status_code <= 599 : raise ServerError ( 'Server returned error for robots.txt.' ) if status_code == 200 : self . _read_content ( response , url_info ) else : self . _accept_as_blank ( url_info )
Fetch the robots . txt file for the request .
12,518
def can_fetch ( self , request : Request , file = None ) -> bool : try : return self . can_fetch_pool ( request ) except NotInPoolError : pass yield from self . fetch_robots_txt ( request , file = file ) return self . can_fetch_pool ( request )
Return whether the request can fetched .
12,519
def _read_content ( self , response : Response , original_url_info : URLInfo ) : data = response . body . read ( 4096 ) url_info = original_url_info try : self . _robots_txt_pool . load_robots_txt ( url_info , data ) except ValueError : _logger . warning ( __ ( _ ( 'Failed to parse {url} for robots exclusion rules. ' 'Ignoring.' ) , url_info . url ) ) self . _accept_as_blank ( url_info ) else : _logger . debug ( __ ( 'Got a good robots.txt for {0}.' , url_info . url ) )
Read response and parse the contents into the pool .
12,520
def _accept_as_blank ( self , url_info : URLInfo ) : _logger . debug ( __ ( 'Got empty robots.txt for {0}.' , url_info . url ) ) self . _robots_txt_pool . load_robots_txt ( url_info , '' )
Mark the URL as OK in the pool .
12,521
def to_text_format ( self ) : return '\n' . join ( itertools . chain ( ( self . fetch_date . strftime ( '%Y%m%d%H%M%S' ) , ) , ( rr . to_text ( ) for rr in self . resource_records ) , ( ) , ) )
Format as detached DNS information as text .
12,522
def first_ipv4 ( self ) -> Optional [ AddressInfo ] : for info in self . _address_infos : if info . family == socket . AF_INET : return info
The first IPv4 address .
12,523
def first_ipv6 ( self ) -> Optional [ AddressInfo ] : for info in self . _address_infos : if info . family == socket . AF_INET6 : return info
The first IPV6 address .
12,524
def rotate ( self ) : item = self . _address_infos . pop ( 0 ) self . _address_infos . append ( item )
Move the first address to the last position .
12,525
def _query_dns ( self , host : str , family : int = socket . AF_INET ) -> dns . resolver . Answer : record_type = { socket . AF_INET : 'A' , socket . AF_INET6 : 'AAAA' } [ family ] event_loop = asyncio . get_event_loop ( ) query = functools . partial ( self . _dns_resolver . query , host , record_type , source = self . _bind_address ) try : answer = yield from event_loop . run_in_executor ( None , query ) except ( dns . resolver . NXDOMAIN , dns . resolver . NoAnswer ) as error : raise DNSNotFound ( 'DNS resolution failed: {error}' . format ( error = wpull . util . get_exception_message ( error ) ) ) from error except dns . exception . DNSException as error : raise NetworkError ( 'DNS resolution error: {error}' . format ( error = wpull . util . get_exception_message ( error ) ) ) from error else : return answer
Query DNS using Python .
12,526
def _getaddrinfo ( self , host : str , family : int = socket . AF_UNSPEC ) -> List [ tuple ] : event_loop = asyncio . get_event_loop ( ) query = event_loop . getaddrinfo ( host , 0 , family = family , proto = socket . IPPROTO_TCP ) if self . _timeout : query = asyncio . wait_for ( query , self . _timeout ) try : results = yield from query except socket . error as error : if error . errno in ( socket . EAI_FAIL , socket . EAI_NODATA , socket . EAI_NONAME ) : raise DNSNotFound ( 'DNS resolution failed: {error}' . format ( error = error ) ) from error else : raise NetworkError ( 'DNS resolution error: {error}' . format ( error = error ) ) from error except asyncio . TimeoutError as error : raise NetworkError ( 'DNS resolve timed out.' ) from error else : return results
Query DNS using system resolver .
12,527
def _convert_dns_answer ( cls , answer : dns . resolver . Answer ) -> Iterable [ AddressInfo ] : assert answer . rdtype in ( dns . rdatatype . A , dns . rdatatype . AAAA ) if answer . rdtype == dns . rdatatype . A : family = socket . AF_INET else : family = socket . AF_INET6 for record in answer : ip_address = record . to_text ( ) if family == socket . AF_INET6 : flow_info , control_id = cls . _get_ipv6_info ( ip_address ) else : flow_info = control_id = None yield AddressInfo ( ip_address , family , flow_info , control_id )
Convert the DNS answer to address info .
12,528
def _convert_addrinfo ( cls , results : List [ tuple ] ) -> Iterable [ AddressInfo ] : for result in results : family = result [ 0 ] address = result [ 4 ] ip_address = address [ 0 ] if family == socket . AF_INET6 : flow_info = address [ 2 ] control_id = address [ 3 ] else : flow_info = None control_id = None yield AddressInfo ( ip_address , family , flow_info , control_id )
Convert the result list to address info .
12,529
def _get_ipv6_info ( cls , ip_address : str ) -> tuple : results = socket . getaddrinfo ( ip_address , 0 , proto = socket . IPPROTO_TCP , flags = socket . AI_NUMERICHOST ) flow_info = results [ 0 ] [ 4 ] [ 2 ] control_id = results [ 0 ] [ 4 ] [ 3 ] return flow_info , control_id
Extract the flow info and control id .
12,530
def raise_if_not_match ( cls , action : str , expected_code : Union [ int , Sequence [ int ] ] , reply : Reply ) : if isinstance ( expected_code , int ) : expected_codes = ( expected_code , ) else : expected_codes = expected_code if reply . code not in expected_codes : raise FTPServerError ( 'Failed action {action}: {reply_code} {reply_text}' . format ( action = action , reply_code = reply . code , reply_text = ascii ( reply . text ) ) , reply . code )
Raise FTPServerError if not expected reply code .
12,531
def read_welcome_message ( self ) : reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'Server ready' , ReplyCodes . service_ready_for_new_user , reply )
Read the welcome message .
12,532
def passive_mode ( self ) -> Tuple [ str , int ] : yield from self . _control_stream . write_command ( Command ( 'PASV' ) ) reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'Passive mode' , ReplyCodes . entering_passive_mode , reply ) try : return wpull . protocol . ftp . util . parse_address ( reply . text ) except ValueError as error : raise ProtocolError ( str ( error ) ) from error
Enable passive mode .
12,533
def setup_data_stream ( self , connection_factory : Callable [ [ tuple ] , Connection ] , data_stream_factory : Callable [ [ Connection ] , DataStream ] = DataStream ) -> DataStream : yield from self . _control_stream . write_command ( Command ( 'TYPE' , 'I' ) ) reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'Binary mode' , ReplyCodes . command_okay , reply ) address = yield from self . passive_mode ( ) connection = yield from connection_factory ( address ) connection . reset ( ) yield from connection . connect ( ) data_stream = data_stream_factory ( connection ) return data_stream
Create and setup a data stream .
12,534
def begin_stream ( self , command : Command ) -> Reply : yield from self . _control_stream . write_command ( command ) reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'Begin stream' , ( ReplyCodes . file_status_okay_about_to_open_data_connection , ReplyCodes . data_connection_already_open_transfer_starting , ) , reply ) return reply
Start sending content on the data stream .
12,535
def read_stream ( self , file : IO , data_stream : DataStream ) -> Reply : yield from data_stream . read_file ( file = file ) reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'End stream' , ReplyCodes . closing_data_connection , reply ) data_stream . close ( ) return reply
Read from the data stream .
12,536
def restart ( self , offset : int ) : yield from self . _control_stream . write_command ( Command ( 'REST' , str ( offset ) ) ) reply = yield from self . _control_stream . read_reply ( ) self . raise_if_not_match ( 'Restart' , ReplyCodes . requested_file_action_pending_further_information , reply )
Send restart command .
12,537
def get_version ( exe_path = 'youtube-dl' ) : process = subprocess . Popen ( [ exe_path , '--version' ] , stdout = subprocess . PIPE ) version_string = process . communicate ( ) [ 0 ] version_string = version_string . decode ( ) . strip ( ) assert ' ' not in version_string , version_string return version_string
Get the version string of youtube - dl .
12,538
def _get_output_template ( self ) : path = self . _file_writer_session . extra_resource_path ( '.youtube-dl' ) if not path : self . _temp_dir = tempfile . TemporaryDirectory ( dir = self . _root_path , prefix = 'tmp-wpull-youtubedl' ) path = '{}/tmp' . format ( self . _temp_dir . name ) return path , '{}.%(id)s.%(format_id)s.%(ext)s' . format ( path )
Return the path prefix and output template .
12,539
def _write_warc_metadata ( self ) : uri = 'metadata://{}{}' . format ( self . _item_session . url_record . url_info . authority , self . _item_session . url_record . url_info . resource ) glob_pattern = self . _path_prefix + '*.info.json' filenames = list ( glob . glob ( glob_pattern ) ) if not filenames : _logger . warning ( __ ( _ ( 'Could not find external process metadata file: {filename}' ) , filename = glob_pattern ) ) return for filename in filenames : record = WARCRecord ( ) record . set_common_fields ( 'metadata' , 'application/vnd.youtube-dl_formats+json' ) record . fields [ 'WARC-Target-URI' ] = uri record . block_file = open ( filename , 'rb' ) self . _warc_recorder . set_length_and_maybe_checksums ( record ) self . _warc_recorder . write_record ( record ) record . block_file . close ( )
Write the JSON metadata to WARC .
12,540
def process ( self , session : AppSession ) : if not session . args . warc_dedup : return iterable = wpull . warc . format . read_cdx ( session . args . warc_dedup , encoding = session . args . local_encoding or 'utf-8' ) missing_url_msg = _ ( 'The URL ("a") is missing from the CDX file.' ) missing_id_msg = _ ( 'The record ID ("u") is missing from the CDX file.' ) missing_checksum_msg = _ ( 'The SHA1 checksum ("k") is missing from the CDX file.' ) counter = 0 def visits ( ) : nonlocal counter checked_fields = False for record in iterable : if not checked_fields : if 'a' not in record : raise ValueError ( missing_url_msg ) if 'u' not in record : raise ValueError ( missing_id_msg ) if 'k' not in record : raise ValueError ( missing_checksum_msg ) checked_fields = True yield record [ 'a' ] , record [ 'u' ] , record [ 'k' ] counter += 1 url_table = session . factory [ 'URLTable' ] url_table . add_visits ( visits ( ) ) _logger . info ( __ ( gettext . ngettext ( 'Loaded {num} record from CDX file.' , 'Loaded {num} records from CDX file.' , counter ) , num = counter ) )
Populate the visits from the CDX into the URL table .
12,541
def to_lxml_encoding ( encoding ) : try : lxml . html . HTMLParser ( encoding = encoding ) except LookupError : encoding = encoding . replace ( '-' , '' ) else : return encoding try : lxml . html . HTMLParser ( encoding = encoding ) except LookupError : encoding = encoding . replace ( '_' , '' ) else : return encoding try : lxml . html . HTMLParser ( encoding = encoding ) except LookupError : pass else : return encoding
Check if lxml supports the specified encoding .
12,542
def parse_lxml ( self , file , encoding = None , target_class = HTMLParserTarget , parser_type = 'html' ) : if encoding : lxml_encoding = to_lxml_encoding ( encoding ) or 'latin1' else : lxml_encoding = encoding elements = [ ] callback_func = elements . append target = target_class ( callback_func ) if parser_type == 'html' : parser = lxml . html . HTMLParser ( encoding = lxml_encoding , target = target ) elif parser_type == 'xhtml' : parser = lxml . html . XHTMLParser ( encoding = lxml_encoding , target = target , recover = True ) else : parser = lxml . etree . XMLParser ( encoding = lxml_encoding , target = target , recover = True ) if parser_type == 'html' : for dummy in range ( 3 ) : parser . feed ( '<html>' . encode ( encoding ) ) while True : data = file . read ( self . BUFFER_SIZE ) if not data : break parser . feed ( data ) for element in elements : yield element del elements [ : ] parser . close ( ) for element in elements : yield element
Return an iterator of elements found in the document .
12,543
def parse_doctype ( cls , file , encoding = None ) : if encoding : lxml_encoding = to_lxml_encoding ( encoding ) or 'latin1' else : lxml_encoding = encoding try : parser = lxml . etree . XMLParser ( encoding = lxml_encoding , recover = True ) tree = lxml . etree . parse ( io . BytesIO ( wpull . util . peek_file ( file ) ) , parser = parser ) if tree . getroot ( ) is not None : return tree . docinfo . doctype except lxml . etree . LxmlError : pass
Get the doctype from the document .
12,544
def detect_parser_type ( cls , file , encoding = None ) : is_xml = XMLDetector . is_file ( file ) doctype = cls . parse_doctype ( file , encoding = encoding ) or '' if not doctype and is_xml : return 'xml' if 'XHTML' in doctype : return 'xhtml' return 'html'
Get the suitable parser type for the document .
12,545
def new_temp_file ( directory = None , hint = '' ) : return tempfile . NamedTemporaryFile ( prefix = 'tmp-wpull-{0}-' . format ( hint ) , suffix = '.tmp' , dir = directory )
Return a new temporary file .
12,546
def content ( self ) : if not self . _content_data : if is_seekable ( self . file ) : with wpull . util . reset_file_offset ( self . file ) : self . _content_data = self . file . read ( ) else : self . _content_data = self . file . read ( ) return self . _content_data
Return the content of the file .
12,547
def size ( self ) : try : return os . fstat ( self . file . fileno ( ) ) . st_size except io . UnsupportedOperation : pass if is_seekable ( self . file ) : with wpull . util . reset_file_offset ( self . file ) : self . file . seek ( 0 , os . SEEK_END ) return self . file . tell ( ) raise OSError ( 'Unsupported operation.' )
Return the size of the file .
12,548
def _apply_pragmas_callback ( cls , connection , record ) : _logger . debug ( 'Setting pragmas.' ) connection . execute ( 'PRAGMA journal_mode=WAL' ) connection . execute ( 'PRAGMA synchronous=NORMAL' )
Set SQLite pragmas .
12,549
def has_parser ( self , url_info : URLInfo ) : key = self . url_info_key ( url_info ) return key in self . _parsers
Return whether a parser has been created for the URL .
12,550
def can_fetch ( self , url_info : URLInfo , user_agent : str ) : key = self . url_info_key ( url_info ) parser = self . _parsers [ key ] return parser . is_allowed ( user_agent , url_info . url )
Return whether the URL can be fetched .
12,551
def load_robots_txt ( self , url_info : URLInfo , text : str ) : key = self . url_info_key ( url_info ) parser = robotexclusionrulesparser . RobotExclusionRulesParser ( ) parser . parse ( text ) self . _parsers [ key ] = parser
Load the robot . txt file .
12,552
def _build_demux_document_scraper ( cls , session : AppSession ) : session . factory . new ( 'DemuxDocumentScraper' , cls . _build_document_scrapers ( session ) )
Create demux document scraper .
12,553
def _build_document_scrapers ( cls , session : AppSession ) : html_parser = session . factory [ 'HTMLParser' ] element_walker = session . factory . new ( 'ElementWalker' ) scrapers = [ session . factory . new ( 'HTMLScraper' , html_parser , element_walker , followed_tags = session . args . follow_tags , ignored_tags = session . args . ignore_tags , only_relative = session . args . relative , robots = session . args . robots , encoding_override = session . args . remote_encoding , ) , ] if 'css' in session . args . link_extractors : css_scraper = session . factory . new ( 'CSSScraper' , encoding_override = session . args . remote_encoding , ) scrapers . append ( css_scraper ) element_walker . css_scraper = css_scraper if 'javascript' in session . args . link_extractors : javascript_scraper = session . factory . new ( 'JavaScriptScraper' , encoding_override = session . args . remote_encoding , ) scrapers . append ( javascript_scraper ) element_walker . javascript_scraper = javascript_scraper if session . args . sitemaps : scrapers . append ( session . factory . new ( 'SitemapScraper' , html_parser , encoding_override = session . args . remote_encoding , ) ) return scrapers
Create the document scrapers .
12,554
def _build_request_factory ( cls , session : AppSession ) : def request_factory ( * args , ** kwargs ) : request = session . factory . class_map [ 'Request' ] ( * args , ** kwargs ) user_agent = session . args . user_agent or session . default_user_agent request . fields [ 'User-Agent' ] = user_agent if session . args . referer : request . fields [ 'Referer' ] = session . args . referer for header_string in session . args . header : request . fields . parse ( header_string ) if session . args . http_compression : request . fields [ 'Accept-Encoding' ] = 'gzip, deflate' if session . args . no_cache : request . fields [ 'Cache-Control' ] = 'no-cache, must-revalidate' request . fields [ 'Pragma' ] = 'no-cache' return request return request_factory
Create the request factory .
12,555
def _build_http_client ( cls , session : AppSession ) : stream_factory = functools . partial ( HTTPStream , ignore_length = session . args . ignore_length , keep_alive = session . args . http_keep_alive ) return session . factory . new ( 'HTTPClient' , connection_pool = session . factory [ 'ConnectionPool' ] , stream_factory = stream_factory )
Create the HTTP client .
12,556
def _build_web_client ( cls , session : AppSession ) : cookie_jar = cls . _build_cookie_jar ( session ) http_client = cls . _build_http_client ( session ) redirect_factory = functools . partial ( session . factory . class_map [ 'RedirectTracker' ] , max_redirects = session . args . max_redirect ) return session . factory . new ( 'WebClient' , http_client , redirect_tracker_factory = redirect_factory , cookie_jar = cookie_jar , request_factory = cls . _build_request_factory ( session ) , )
Build Web Client .
12,557
def _build_cookie_jar ( cls , session : AppSession ) : if not session . args . cookies : return if session . args . load_cookies or session . args . save_cookies : session . factory . set ( 'CookieJar' , BetterMozillaCookieJar ) cookie_jar = session . factory . new ( 'CookieJar' ) if session . args . load_cookies : cookie_jar . load ( session . args . load_cookies , ignore_discard = True ) else : cookie_jar = session . factory . new ( 'CookieJar' ) policy = session . factory . new ( 'CookiePolicy' , cookie_jar = cookie_jar ) cookie_jar . set_policy ( policy ) _logger . debug ( __ ( 'Loaded cookies: {0}' , list ( cookie_jar ) ) ) cookie_jar_wrapper = session . factory . new ( 'CookieJarWrapper' , cookie_jar , save_filename = session . args . save_cookies , keep_session_cookies = session . args . keep_session_cookies , ) return cookie_jar_wrapper
Build the cookie jar
12,558
def _build_ftp_client ( cls , session : AppSession ) : return session . factory . new ( 'FTPClient' , connection_pool = session . factory [ 'ConnectionPool' ] , )
Build FTP client .
12,559
def process ( self , session : AppSession ) : args = session . args if not ( args . phantomjs or args . youtube_dl or args . proxy_server ) : return proxy_server = session . factory . new ( 'HTTPProxyServer' , session . factory [ 'HTTPClient' ] , ) cookie_jar = session . factory . get ( 'CookieJarWrapper' ) proxy_coprocessor = session . factory . new ( 'ProxyCoprocessor' , session ) proxy_socket = tornado . netutil . bind_sockets ( session . args . proxy_server_port , address = session . args . proxy_server_address ) [ 0 ] proxy_port = proxy_socket . getsockname ( ) [ 1 ] proxy_async_server = yield from asyncio . start_server ( proxy_server , sock = proxy_socket ) session . async_servers . append ( proxy_async_server ) session . proxy_server_port = proxy_port
Build MITM proxy server .
12,560
def _build_processor ( cls , session : AppSession ) : web_processor = cls . _build_web_processor ( session ) ftp_processor = cls . _build_ftp_processor ( session ) delegate_processor = session . factory . new ( 'Processor' ) delegate_processor . register ( 'http' , web_processor ) delegate_processor . register ( 'https' , web_processor ) delegate_processor . register ( 'ftp' , ftp_processor )
Create the Processor
12,561
def _build_web_processor ( cls , session : AppSession ) : args = session . args url_filter = session . factory [ 'DemuxURLFilter' ] document_scraper = session . factory [ 'DemuxDocumentScraper' ] file_writer = session . factory [ 'FileWriter' ] post_data = cls . _get_post_data ( session . args ) web_client = session . factory [ 'WebClient' ] robots_txt_checker = cls . _build_robots_txt_checker ( session ) http_username = args . user or args . http_user http_password = args . password or args . http_password ftp_username = args . user or args . ftp_user ftp_password = args . password or args . ftp_password fetch_rule = session . factory . new ( 'FetchRule' , url_filter = url_filter , robots_txt_checker = robots_txt_checker , http_login = ( http_username , http_password ) , ftp_login = ( ftp_username , ftp_password ) , duration_timeout = args . session_timeout , ) waiter = session . factory . new ( 'Waiter' , wait = args . wait , random_wait = args . random_wait , max_wait = args . waitretry ) result_rule = session . factory . new ( 'ResultRule' , ssl_verification = args . check_certificate , retry_connrefused = args . retry_connrefused , retry_dns_error = args . retry_dns_error , waiter = waiter , statistics = session . factory [ 'Statistics' ] , ) processing_rule = session . factory . new ( 'ProcessingRule' , fetch_rule , document_scraper = document_scraper , sitemaps = session . args . sitemaps , url_rewriter = session . factory . get ( 'URLRewriter' ) , ) web_processor_fetch_params = session . factory . new ( 'WebProcessorFetchParams' , post_data = post_data , strong_redirects = args . strong_redirects , content_on_error = args . content_on_error , ) processor = session . factory . new ( 'WebProcessor' , web_client , web_processor_fetch_params , ) return processor
Build WebProcessor .
12,562
def _build_ftp_processor ( cls , session : AppSession ) : ftp_client = session . factory [ 'FTPClient' ] fetch_params = session . factory . new ( 'FTPProcessorFetchParams' , remove_listing = session . args . remove_listing , retr_symlinks = session . args . retr_symlinks , preserve_permissions = session . args . preserve_permissions , glob = session . args . glob , ) return session . factory . new ( 'FTPProcessor' , ftp_client , fetch_params , )
Build FTPProcessor .
12,563
def _get_post_data ( cls , args ) : if args . post_data : return args . post_data elif args . post_file : return args . post_file . read ( )
Return the post data .
12,564
def _build_robots_txt_checker ( cls , session : AppSession ) : if session . args . robots : robots_txt_pool = session . factory . new ( 'RobotsTxtPool' ) robots_txt_checker = session . factory . new ( 'RobotsTxtChecker' , web_client = session . factory [ 'WebClient' ] , robots_txt_pool = robots_txt_pool ) return robots_txt_checker
Build robots . txt checker .
12,565
def _build_phantomjs_coprocessor ( cls , session : AppSession , proxy_port : int ) : page_settings = { } default_headers = NameValueRecord ( ) for header_string in session . args . header : default_headers . parse ( header_string ) default_headers . add ( 'Accept-Language' , '*' ) if not session . args . http_compression : default_headers . add ( 'Accept-Encoding' , 'identity' ) default_headers = dict ( default_headers . items ( ) ) if session . args . read_timeout : page_settings [ 'resourceTimeout' ] = session . args . read_timeout * 1000 page_settings [ 'userAgent' ] = session . args . user_agent or session . default_user_agent wpull . driver . phantomjs . get_version ( session . args . phantomjs_exe ) phantomjs_params = PhantomJSParams ( wait_time = session . args . phantomjs_wait , num_scrolls = session . args . phantomjs_scroll , smart_scroll = session . args . phantomjs_smart_scroll , snapshot = session . args . phantomjs_snapshot , custom_headers = default_headers , page_settings = page_settings , load_time = session . args . phantomjs_max_time , ) extra_args = [ '--proxy' , '{}:{}' . format ( session . args . proxy_server_address , proxy_port ) , '--ignore-ssl-errors=true' ] phantomjs_driver_factory = functools . partial ( session . factory . class_map [ 'PhantomJSDriver' ] , exe_path = session . args . phantomjs_exe , extra_args = extra_args , ) phantomjs_coprocessor = session . factory . new ( 'PhantomJSCoprocessor' , phantomjs_driver_factory , session . factory [ 'ProcessingRule' ] , phantomjs_params , root_path = session . args . directory_prefix , warc_recorder = session . factory . get ( 'WARCRecorder' ) , ) return phantomjs_coprocessor
Build proxy server and PhantomJS client . controller coprocessor .
12,566
def _build_youtube_dl_coprocessor ( cls , session : AppSession , proxy_port : int ) : wpull . processor . coprocessor . youtubedl . get_version ( session . args . youtube_dl_exe ) coprocessor = session . factory . new ( 'YoutubeDlCoprocessor' , session . args . youtube_dl_exe , ( session . args . proxy_server_address , proxy_port ) , root_path = session . args . directory_prefix , user_agent = session . args . user_agent or session . default_user_agent , warc_recorder = session . factory . get ( 'WARCRecorder' ) , inet_family = session . args . inet_family , check_certificate = False ) return coprocessor
Build youtube - dl coprocessor .
12,567
def build ( self ) -> Application : pipelines = self . _build_pipelines ( ) self . _factory . new ( 'Application' , pipelines ) return self . _factory [ 'Application' ]
Put the application together .
12,568
def is_supported ( cls , file = None , request = None , response = None , url_info = None ) : tests = ( ( response , cls . is_response ) , ( file , cls . is_file ) , ( request , cls . is_request ) , ( url_info , cls . is_url ) ) for instance , method in tests : if instance : try : result = method ( instance ) except NotImplementedError : pass else : if result : return True elif result is VeryFalse : return VeryFalse
Given the hints return whether the document is supported .
12,569
def _print_stats ( cls , stats : Statistics , human_format_speed : bool = True ) : time_length = datetime . timedelta ( seconds = int ( stats . stop_time - stats . start_time ) ) file_size = wpull . string . format_size ( stats . size ) if stats . bandwidth_meter . num_samples : speed = stats . bandwidth_meter . speed ( ) if human_format_speed : speed_size_str = wpull . string . format_size ( speed ) else : speed_size_str = '{:.1f} b' . format ( speed * 8 ) else : speed_size_str = _ ( '-- B' ) _logger . info ( _ ( 'FINISHED.' ) ) _logger . info ( __ ( _ ( 'Duration: {preformatted_timedelta}. ' 'Speed: {preformatted_speed_size}/s.' ) , preformatted_timedelta = time_length , preformatted_speed_size = speed_size_str , ) ) _logger . info ( __ ( gettext . ngettext ( 'Downloaded: {num_files} file, {preformatted_file_size}.' , 'Downloaded: {num_files} files, {preformatted_file_size}.' , stats . files ) , num_files = stats . files , preformatted_file_size = file_size ) ) if stats . is_quota_exceeded : _logger . info ( _ ( 'Download quota exceeded.' ) )
Log the final statistics to the user .
12,570
def is_no_body ( request , response , no_content_codes = DEFAULT_NO_CONTENT_CODES ) : if 'Content-Length' not in response . fields and 'Transfer-Encoding' not in response . fields and ( response . status_code in no_content_codes or request . method . upper ( ) == 'HEAD' ) : return True else : return False
Return whether a content body is not expected .
12,571
def write_request ( self , request , full_url = False ) : _logger . debug ( 'Sending headers.' ) if hasattr ( request , 'prepare_for_send' ) : request . prepare_for_send ( full_url = full_url ) if self . _ignore_length : request . fields [ 'Connection' ] = 'close' data = request . to_bytes ( ) self . _data_event_dispatcher . notify_write ( data ) yield from self . _connection . write ( data , drain = False )
Send the request s HTTP status line and header fields .
12,572
def write_body ( self , file , length = None ) : _logger . debug ( 'Sending body.' ) file_is_async = ( asyncio . iscoroutine ( file . read ) or asyncio . iscoroutinefunction ( file . read ) ) _logger . debug ( __ ( 'Body is async: {0}' , file_is_async ) ) if length is not None : bytes_left = length while True : if length is not None : if bytes_left <= 0 : break read_size = min ( bytes_left , self . _read_size ) else : read_size = self . _read_size if file_is_async : data = yield from file . read ( read_size ) else : data = file . read ( read_size ) if not data : break self . _data_event_dispatcher . notify_write ( data ) if bytes_left <= self . _read_size : drain = False else : drain = True yield from self . _connection . write ( data , drain = drain ) if length is not None : bytes_left -= len ( data )
Send the request s content body .
12,573
def read_response ( self , response = None ) : _logger . debug ( 'Reading header.' ) if response is None : response = Response ( ) header_lines = [ ] bytes_read = 0 while True : try : data = yield from self . _connection . readline ( ) except ValueError as error : raise ProtocolError ( 'Invalid header: {0}' . format ( error ) ) from error self . _data_event_dispatcher . notify_read ( data ) if not data . endswith ( b'\n' ) : raise NetworkError ( 'Connection closed.' ) elif data in ( b'\r\n' , b'\n' ) : break header_lines . append ( data ) assert data . endswith ( b'\n' ) bytes_read += len ( data ) if bytes_read > 32768 : raise ProtocolError ( 'Header too big.' ) if not header_lines : raise ProtocolError ( 'No header received.' ) response . parse ( b'' . join ( header_lines ) ) return response
Read the response s HTTP status line and header fields .
12,574
def read_body ( self , request , response , file = None , raw = False ) : if is_no_body ( request , response ) : return if not raw : self . _setup_decompressor ( response ) read_strategy = self . get_read_strategy ( response ) if self . _ignore_length and read_strategy == 'length' : read_strategy = 'close' if read_strategy == 'chunked' : yield from self . _read_body_by_chunk ( response , file , raw = raw ) elif read_strategy == 'length' : yield from self . _read_body_by_length ( response , file ) else : yield from self . _read_body_until_close ( response , file ) should_close = wpull . protocol . http . util . should_close ( request . version , response . fields . get ( 'Connection' ) ) if not self . _keep_alive or should_close : _logger . debug ( 'Not keep-alive. Closing connection.' ) self . close ( )
Read the response s content body .
12,575
def _read_body_until_close ( self , response , file ) : _logger . debug ( 'Reading body until close.' ) file_is_async = hasattr ( file , 'drain' ) while True : data = yield from self . _connection . read ( self . _read_size ) if not data : break self . _data_event_dispatcher . notify_read ( data ) content_data = self . _decompress_data ( data ) if file : file . write ( content_data ) if file_is_async : yield from file . drain ( ) content_data = self . _flush_decompressor ( ) if file : file . write ( content_data ) if file_is_async : yield from file . drain ( )
Read the response until the connection closes .
12,576
def _read_body_by_length ( self , response , file ) : _logger . debug ( 'Reading body by length.' ) file_is_async = hasattr ( file , 'drain' ) try : body_size = int ( response . fields [ 'Content-Length' ] ) if body_size < 0 : raise ValueError ( 'Content length cannot be negative.' ) except ValueError as error : _logger . warning ( __ ( _ ( 'Invalid content length: {error}' ) , error = error ) ) yield from self . _read_body_until_close ( response , file ) return bytes_left = body_size while bytes_left > 0 : data = yield from self . _connection . read ( self . _read_size ) if not data : break bytes_left -= len ( data ) if bytes_left < 0 : data = data [ : bytes_left ] _logger . warning ( _ ( 'Content overrun.' ) ) self . close ( ) self . _data_event_dispatcher . notify_read ( data ) content_data = self . _decompress_data ( data ) if file : file . write ( content_data ) if file_is_async : yield from file . drain ( ) if bytes_left > 0 : raise NetworkError ( 'Connection closed.' ) content_data = self . _flush_decompressor ( ) if file and content_data : file . write ( content_data ) if file_is_async : yield from file . drain ( )
Read the connection specified by a length .
12,577
def _read_body_by_chunk ( self , response , file , raw = False ) : reader = ChunkedTransferReader ( self . _connection ) file_is_async = hasattr ( file , 'drain' ) while True : chunk_size , data = yield from reader . read_chunk_header ( ) self . _data_event_dispatcher . notify_read ( data ) if raw : file . write ( data ) if not chunk_size : break while True : content , data = yield from reader . read_chunk_body ( ) self . _data_event_dispatcher . notify_read ( data ) if not content : if raw : file . write ( data ) break content = self . _decompress_data ( content ) if file : file . write ( content ) if file_is_async : yield from file . drain ( ) content = self . _flush_decompressor ( ) if file : file . write ( content ) if file_is_async : yield from file . drain ( ) trailer_data = yield from reader . read_trailer ( ) self . _data_event_dispatcher . notify_read ( trailer_data ) if file and raw : file . write ( trailer_data ) if file_is_async : yield from file . drain ( ) response . fields . parse ( trailer_data )
Read the connection using chunked transfer encoding .
12,578
def get_read_strategy ( cls , response ) : chunked_match = re . match ( r'chunked($|;)' , response . fields . get ( 'Transfer-Encoding' , '' ) ) if chunked_match : return 'chunked' elif 'Content-Length' in response . fields : return 'length' else : return 'close'
Return the appropriate algorithm of reading response .
12,579
def _setup_decompressor ( self , response ) : encoding = response . fields . get ( 'Content-Encoding' , '' ) . lower ( ) if encoding == 'gzip' : self . _decompressor = wpull . decompression . GzipDecompressor ( ) elif encoding == 'deflate' : self . _decompressor = wpull . decompression . DeflateDecompressor ( ) else : self . _decompressor = None
Set up the content encoding decompressor .
12,580
def _decompress_data ( self , data ) : if self . _decompressor : try : return self . _decompressor . decompress ( data ) except zlib . error as error : raise ProtocolError ( 'zlib error: {0}.' . format ( error ) ) from error else : return data
Decompress the given data and return the uncompressed data .
12,581
def _flush_decompressor ( self ) : if self . _decompressor : try : return self . _decompressor . flush ( ) except zlib . error as error : raise ProtocolError ( 'zlib flush error: {0}.' . format ( error ) ) from error else : return b''
Return any data left in the decompressor .
12,582
def gzip_uncompress ( data , truncated = False ) : decompressor = SimpleGzipDecompressor ( ) inflated_data = decompressor . decompress ( data ) if not truncated : inflated_data += decompressor . flush ( ) return inflated_data
Uncompress gzip data .
12,583
def set_status ( self , status : Status , increment_try_count : bool = True , filename : str = None ) : url = self . url_record . url assert not self . _try_count_incremented , ( url , status ) if increment_try_count : self . _try_count_incremented = True _logger . debug ( __ ( 'Marking URL {0} status {1}.' , url , status ) ) url_result = URLResult ( ) url_result . filename = filename self . app_session . factory [ 'URLTable' ] . check_in ( url , status , increment_try_count = increment_try_count , url_result = url_result , ) self . _processed = True
Mark the item with the given status .
12,584
def add_child_url ( self , url : str , inline : bool = False , link_type : Optional [ LinkType ] = None , post_data : Optional [ str ] = None , level : Optional [ int ] = None , replace : bool = False ) : url_properties = URLProperties ( ) url_properties . level = self . url_record . level + 1 if level is None else level url_properties . inline_level = ( self . url_record . inline_level or 0 ) + 1 if inline else None url_properties . parent_url = self . url_record . url url_properties . root_url = self . url_record . root_url or self . url_record . url url_properties . link_type = link_type url_data = URLData ( ) url_data . post_data = post_data if replace : self . app_session . factory [ 'URLTable' ] . remove_many ( [ url ] ) self . add_url ( url , url_properties , url_data )
Add links scraped from the document with automatic values .
12,585
def child_url_record ( self , url : str , inline : bool = False , link_type : Optional [ LinkType ] = None , post_data : Optional [ str ] = None , level : Optional [ int ] = None ) : url_record = URLRecord ( ) url_record . url = url url_record . status = Status . todo url_record . try_count = 0 url_record . level = self . url_record . level + 1 if level is None else level url_record . root_url = self . url_record . root_url or self . url_record . url url_record . parent_url = self . url_record . url url_record . inline_level = ( self . url_record . inline_level or 0 ) + 1 if inline else 0 url_record . link_type = link_type url_record . post_data = post_data return url_record
Return a child URLRecord .
12,586
def get_heading_encoding ( response ) : encoding = wpull . protocol . http . util . parse_charset ( response . fields . get ( 'content-type' , '' ) ) if encoding : return wpull . string . normalize_codec_name ( encoding ) else : return None
Return the document encoding from a HTTP header .
12,587
def detect_response_encoding ( response , is_html = False , peek = 131072 ) : encoding = get_heading_encoding ( response ) encoding = wpull . string . detect_encoding ( wpull . util . peek_file ( response . body , peek ) , encoding = encoding , is_html = is_html ) _logger . debug ( __ ( 'Got encoding: {0}' , encoding ) ) return encoding
Return the likely encoding of the response document .
12,588
def contains ( self , url : str ) : try : self . get_one ( url ) except NotFound : return False else : return True
Return whether the URL is in the table .
12,589
def add_one ( self , url : str , url_properties : Optional [ URLProperties ] = None , url_data : Optional [ URLData ] = None ) : self . add_many ( [ AddURLInfo ( url , url_properties , url_data ) ] )
Add a single URL to the table .
12,590
def stream ( self ) : chunk_a = None chunk_b = None chunk_a_index = 0 chunk_b_index = 0 search_start_index = 0 while True : chunk_a = chunk_b chunk_a_index = chunk_b_index chunk_b = self . _file . read ( self . _read_size ) if chunk_a is None : continue chunk_b_index = chunk_a_index + len ( chunk_a ) if not chunk_a : break current_chunk = chunk_a + chunk_b [ : self . _overlap_size ] offset_end = len ( chunk_a ) + self . _overlap_size while True : offset_start = search_start_index - chunk_a_index match = self . _pattern . search ( current_chunk , offset_start , offset_end ) if not match : unmatched_part = chunk_a [ offset_start : ] if unmatched_part : yield ( None , unmatched_part ) search_start_index += len ( unmatched_part ) break start_index , end_index = match . span ( match . lastindex ) unmatched_part = current_chunk [ offset_start : start_index ] if unmatched_part : yield ( None , unmatched_part ) yield ( match , match . group ( match . lastindex ) ) search_start_index += len ( unmatched_part ) + len ( match . group ( match . lastindex ) )
Iterate the file stream .
12,591
def notify ( self , * args , ** kwargs ) : for handler in tuple ( self . handlers ) : handler ( * args , ** kwargs )
Call all the callback handlers with given arguments .
12,592
def new ( self , name , * args , ** kwargs ) : if name in self . _instance_map : raise ValueError ( 'Instance {0} is already initialized' . format ( name ) ) instance = self . _class_map [ name ] ( * args , ** kwargs ) self . _instance_map [ name ] = instance return instance
Create an instance .
12,593
def is_all_initialized ( self ) : return frozenset ( self . _class_map . keys ( ) ) == frozenset ( self . _instance_map . keys ( ) )
Return whether all the instances have been initialized .
12,594
def normalize_name ( name , overrides = None ) : normalized_name = name . title ( ) if overrides : override_map = dict ( [ ( name . title ( ) , name ) for name in overrides ] ) return override_map . get ( normalized_name , normalized_name ) else : return normalized_name
Normalize the key name to title case .
12,595
def guess_line_ending ( string ) : assert isinstance ( string , str ) , 'Expect str. Got {}' . format ( type ( string ) ) crlf_count = string . count ( '\r\n' ) lf_count = string . count ( '\n' ) if crlf_count >= lf_count : return '\r\n' else : return '\n'
Return the most likely line delimiter from the string .
12,596
def unfold_lines ( string ) : assert isinstance ( string , str ) , 'Expect str. Got {}' . format ( type ( string ) ) lines = string . splitlines ( ) line_buffer = io . StringIO ( ) for line_number in range ( len ( lines ) ) : line = lines [ line_number ] if line and line [ 0 : 1 ] in ( ' ' , '\t' ) : line_buffer . write ( ' ' ) elif line_number != 0 : line_buffer . write ( '\r\n' ) line_buffer . write ( line . strip ( ) ) line_buffer . write ( '\r\n' ) return line_buffer . getvalue ( )
Join lines that are wrapped .
12,597
def parse ( self , string , strict = True ) : if isinstance ( string , bytes ) : errors = 'strict' if strict else 'replace' string = string . decode ( self . encoding , errors = errors ) if not self . raw : self . raw = string else : self . raw += string lines = unfold_lines ( string ) . splitlines ( ) for line in lines : if line : if ':' not in line : if strict : raise ValueError ( 'Field missing colon.' ) else : continue name , value = line . split ( ':' , 1 ) name = name . strip ( ) value = value . strip ( ) self . add ( name , value )
Parse the string or bytes .
12,598
def add ( self , name , value ) : normalized_name = normalize_name ( name , self . _normalize_overrides ) self . _map [ normalized_name ] . append ( value )
Append the name - value pair to the record .
12,599
def get_list ( self , name ) : normalized_name = normalize_name ( name , self . _normalize_overrides ) return self . _map [ normalized_name ]
Return all the values for given name .