idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
12,400
def set_content_length ( self ) : if not self . block_file : self . fields [ 'Content-Length' ] = '0' return with wpull . util . reset_file_offset ( self . block_file ) : wpull . util . seek_file_end ( self . block_file ) self . fields [ 'Content-Length' ] = str ( self . block_file . tell ( ) )
Find and set the content length .
12,401
def compute_checksum ( self , payload_offset : Optional [ int ] = None ) : if not self . block_file : self . fields [ 'Content-Length' ] = '0' return block_hasher = hashlib . sha1 ( ) payload_hasher = hashlib . sha1 ( ) with wpull . util . reset_file_offset ( self . block_file ) : if payload_offset is not None : data =...
Compute and add the checksum data to the record fields .
12,402
def get_http_header ( self ) -> Response : with wpull . util . reset_file_offset ( self . block_file ) : data = self . block_file . read ( 4096 ) match = re . match ( br'(.*?\r?\n\r?\n)' , data ) if not match : return status_line , dummy , field_str = match . group ( 1 ) . partition ( b'\n' ) try : version , code , rea...
Return the HTTP header .
12,403
def _write_config ( self ) : param_dict = { 'url' : self . _params . url , 'snapshot_paths' : self . _params . snapshot_paths , 'wait_time' : self . _params . wait_time , 'num_scrolls' : self . _params . num_scrolls , 'smart_scroll' : self . _params . smart_scroll , 'snapshot' : self . _params . snapshot , 'viewport_wi...
Write the parameters to a file for PhantomJS to read .
12,404
def clean ( self , force : bool = False ) : with ( yield from self . _lock ) : for connection in tuple ( self . ready ) : if force or connection . closed ( ) : connection . close ( ) self . ready . remove ( connection )
Clean closed connections .
12,405
def close ( self ) : for connection in self . ready : connection . close ( ) for connection in self . busy : connection . close ( ) self . _closed = True
Forcibly close all connections .
12,406
def acquire ( self ) -> Connection : assert not self . _closed yield from self . _condition . acquire ( ) while True : if self . ready : connection = self . ready . pop ( ) break elif len ( self . busy ) < self . max_connections : connection = self . _connection_factory ( ) break else : yield from self . _condition . w...
Register and return a connection .
12,407
def release ( self , connection : Connection , reuse : bool = True ) : yield from self . _condition . acquire ( ) self . busy . remove ( connection ) if reuse : self . ready . add ( connection ) self . _condition . notify ( ) self . _condition . release ( )
Unregister a connection .
12,408
def acquire ( self , host : str , port : int , use_ssl : bool = False , host_key : Optional [ Any ] = None ) -> Union [ Connection , SSLConnection ] : assert isinstance ( port , int ) , 'Expect int. Got {}' . format ( type ( port ) ) assert not self . _closed yield from self . _process_no_wait_releases ( ) if use_ssl :...
Return an available connection .
12,409
def release ( self , connection : Connection ) : assert not self . _closed key = connection . key host_pool = self . _host_pools [ key ] _logger . debug ( 'Check in %s' , key ) yield from host_pool . release ( connection ) force = self . count ( ) > self . _max_count yield from self . clean ( force = force )
Put a connection back in the pool .
12,410
def session ( self , host : str , port : int , use_ssl : bool = False ) : connection = yield from self . acquire ( host , port , use_ssl ) @ contextlib . contextmanager def context_wrapper ( ) : try : yield connection finally : self . no_wait_release ( connection ) return context_wrapper ( )
Return a context manager that returns a connection .
12,411
def clean ( self , force : bool = False ) : assert not self . _closed with ( yield from self . _host_pools_lock ) : for key , pool in tuple ( self . _host_pools . items ( ) ) : yield from pool . clean ( force = force ) if not self . _host_pool_waiters [ key ] and pool . empty ( ) : del self . _host_pools [ key ] del se...
Clean all closed connections .
12,412
def close ( self ) : for key , pool in tuple ( self . _host_pools . items ( ) ) : pool . close ( ) del self . _host_pools [ key ] del self . _host_pool_waiters [ key ] self . _closed = True
Close all the connections and clean up .
12,413
def count ( self ) -> int : counter = 0 for pool in self . _host_pools . values ( ) : counter += pool . count ( ) return counter
Return number of connections .
12,414
def set_preferred ( self , preferred_addr , addr_1 , addr_2 ) : if addr_1 > addr_2 : addr_1 , addr_2 = addr_2 , addr_1 self . _cache [ ( addr_1 , addr_2 ) ] = preferred_addr
Set the preferred address .
12,415
def get_preferred ( self , addr_1 , addr_2 ) : if addr_1 > addr_2 : addr_1 , addr_2 = addr_2 , addr_1 return self . _cache . get ( ( addr_1 , addr_2 ) )
Return the preferred address .
12,416
def _connect_dual_stack ( self , primary_address , secondary_address ) : self . _primary_connection = self . _connection_factory ( primary_address ) self . _secondary_connection = self . _connection_factory ( secondary_address ) @ asyncio . coroutine def connect_primary ( ) : yield from self . _primary_connection . con...
Connect using happy eyeballs .
12,417
def _get_preferred_host ( self , result : ResolveResult ) -> Tuple [ str , str ] : host_1 = result . first_ipv4 . ip_address if result . first_ipv4 else None host_2 = result . first_ipv6 . ip_address if result . first_ipv6 else None if not host_2 : return host_1 , None elif not host_1 : return host_2 , None preferred_h...
Get preferred host from DNS results .
12,418
def _check_journals_and_maybe_raise ( self ) : files = list ( glob . glob ( self . _prefix_filename + '*-wpullinc' ) ) if files : raise OSError ( 'WARC file {} is incomplete.' . format ( files [ 0 ] ) )
Check if any journal files exist and raise an error .
12,419
def _start_new_warc_file ( self , meta = False ) : if self . _params . max_size and not meta and self . _params . appending : while True : self . _warc_filename = self . _generate_warc_filename ( ) if os . path . exists ( self . _warc_filename ) : _logger . debug ( 'Skip {0}' , self . _warc_filename ) self . _sequence_...
Create and set as current WARC file .
12,420
def _generate_warc_filename ( self , meta = False ) : if self . _params . max_size is None : sequence_name = '' elif meta : sequence_name = '-meta' else : sequence_name = '-{0:05d}' . format ( self . _sequence_num ) if self . _params . compress : extension = 'warc.gz' else : extension = 'warc' return '{0}{1}.{2}' . for...
Return a suitable WARC filename .
12,421
def _start_new_cdx_file ( self ) : self . _cdx_filename = '{0}.cdx' . format ( self . _prefix_filename ) if not self . _params . appending : wpull . util . truncate_file ( self . _cdx_filename ) self . _write_cdx_header ( ) elif not os . path . exists ( self . _cdx_filename ) : self . _write_cdx_header ( )
Create and set current CDX file .
12,422
def _populate_warcinfo ( self , extra_fields = None ) : self . _warcinfo_record . set_common_fields ( WARCRecord . WARCINFO , WARCRecord . WARC_FIELDS ) info_fields = NameValueRecord ( wrap_width = 1024 ) info_fields [ 'Software' ] = self . _params . software_string or self . DEFAULT_SOFTWARE_STRING info_fields [ 'form...
Add the metadata to the Warcinfo record .
12,423
def _setup_log ( self ) : logger = logging . getLogger ( ) formatter = logging . Formatter ( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) self . _log_temp_file = NamedTemporaryFile ( prefix = 'tmp-wpull-warc-' , dir = self . _params . temp_dir , suffix = '.log.gz' , delete = False , ) self . _log_temp_file ...
Set up the logging file .
12,424
def _move_file_to_dest_dir ( self , filename ) : assert self . _params . move_to if os . path . isdir ( self . _params . move_to ) : _logger . debug ( 'Moved {} to {}.' , self . _warc_filename , self . _params . move_to ) shutil . move ( filename , self . _params . move_to ) else : _logger . error ( '{} is not a direct...
Move the file to the move_to directory .
12,425
def set_length_and_maybe_checksums ( self , record , payload_offset = None ) : if self . _params . digests : record . compute_checksum ( payload_offset ) else : record . set_content_length ( )
Set the content length and possibly the checksums .
12,426
def write_record ( self , record ) : record . fields [ 'WARC-Warcinfo-ID' ] = self . _warcinfo_record . fields [ WARCRecord . WARC_RECORD_ID ] _logger . debug ( 'Writing WARC record {0}.' , record . fields [ 'WARC-Type' ] ) if self . _params . compress : open_func = gzip . GzipFile else : open_func = open if os . path ...
Append the record to the WARC file .
12,427
def close ( self ) : if self . _log_temp_file : self . _log_handler . flush ( ) logger = logging . getLogger ( ) logger . removeHandler ( self . _log_handler ) self . _log_handler . stream . close ( ) log_record = WARCRecord ( ) log_record . block_file = gzip . GzipFile ( filename = self . _log_temp_file . name ) log_r...
Close the WARC file and clean up any logging handlers .
12,428
def _write_cdx_header ( self ) : with open ( self . _cdx_filename , mode = 'a' , encoding = 'utf-8' ) as out_file : out_file . write ( self . CDX_DELIMINATOR ) out_file . write ( self . CDX_DELIMINATOR . join ( ( 'CDX' , 'a' , 'b' , 'm' , 's' , 'k' , 'S' , 'V' , 'g' , 'u' ) ) ) out_file . write ( '\n' )
Write the CDX header .
12,429
def _write_cdx_field ( self , record , raw_file_record_size , raw_file_offset ) : if record . fields [ WARCRecord . WARC_TYPE ] != WARCRecord . RESPONSE or not re . match ( r'application/http; *msgtype *= *response' , record . fields [ WARCRecord . CONTENT_TYPE ] ) : return url = record . fields [ 'WARC-Target-URI' ] _...
Write the CDX field if needed .
12,430
def parse_mimetype ( cls , value ) : match = re . match ( r'([a-zA-Z0-9-]+/[a-zA-Z0-9-]+)' , value ) if match : return match . group ( 1 )
Return the MIME type from a Content - Type string .
12,431
def _new_temp_file ( self , hint = 'warcrecsess' ) : return wpull . body . new_temp_file ( directory = self . _temp_dir , hint = hint )
Return new temp file .
12,432
def _record_revisit ( self , payload_offset : int ) : fields = self . _response_record . fields ref_record_id = self . _url_table . get_revisit_id ( fields [ 'WARC-Target-URI' ] , fields . get ( 'WARC-Payload-Digest' , '' ) . upper ( ) . replace ( 'SHA1:' , '' ) ) if ref_record_id : try : self . _response_record . bloc...
Record the revisit if possible .
12,433
def increment ( self , size : int ) : assert size >= 0 , size self . files += 1 self . size += size self . bandwidth_meter . feed ( size )
Increment the number of files downloaded .
12,434
def is_quota_exceeded ( self ) -> bool : if self . quota and self . _url_table is not None : return self . size >= self . quota and self . _url_table . get_root_url_todo_count ( ) == 0
Return whether the quota is exceeded .
12,435
def increment_error ( self , error : Exception ) : _logger . debug ( 'Increment error %s' , error ) for error_class in ERROR_PRIORITIES : if isinstance ( error , error_class ) : self . errors [ error_class ] += 1 return self . errors [ type ( error ) ] += 1
Increment the error counter preferring base exceptions .
12,436
def link_head ( self , node ) : assert not node . tail old_head = self . head if old_head : assert old_head . tail == self old_head . tail = node node . head = old_head node . tail = self self . head = node
Add a node to the head .
12,437
def link_tail ( self , node ) : assert not node . head old_tail = self . tail if old_tail : assert old_tail . head == self old_tail . head = node node . tail = old_tail node . head = self self . tail = node
Add a node to the tail .
12,438
def unlink ( self ) : old_head = self . head old_tail = self . tail self . head = None self . tail = None if old_head : old_head . tail = old_tail if old_tail : old_tail . head = old_head
Remove this node and link any head or tail .
12,439
def prepare_for_send ( self , full_url = False ) : assert self . url assert self . method assert self . version url_info = self . url_info if 'Host' not in self . fields : self . fields [ 'Host' ] = url_info . hostname_with_port if not full_url : if url_info . query : self . resource_path = '{0}?{1}' . format ( url_inf...
Modify the request to be suitable for HTTP server .
12,440
def is_response ( cls , response ) : if 'css' in response . fields . get ( 'content-type' , '' ) . lower ( ) : return True if response . body : if 'html' in response . fields . get ( 'content-type' , '' ) . lower ( ) : return cls . is_file ( response . body )
Return whether the document is likely to be CSS .
12,441
def is_file ( cls , file ) : peeked_data = wpull . string . printable_bytes ( wpull . util . peek_file ( file ) ) . lower ( ) if b'<html' in peeked_data : return VeryFalse if re . search ( br'@import |color:|background[a-z-]*:|font[a-z-]*:' , peeked_data ) : return True
Return whether the file is likely CSS .
12,442
def start ( self ) : self . _current_session = session = self . _http_client . session ( ) request = self . next_request ( ) assert request if request . url_info . password or request . url_info . hostname_with_port in self . _hostnames_with_auth : self . _add_basic_auth_header ( request ) response = yield from session...
Begin fetching the next request .
12,443
def download ( self , file : Optional [ IO [ bytes ] ] = None , duration_timeout : Optional [ float ] = None ) : yield from self . _current_session . download ( file , duration_timeout = duration_timeout )
Download content .
12,444
def _process_response ( self , response : Response ) : _logger . debug ( 'Handling response' ) self . _redirect_tracker . load ( response ) if self . _redirect_tracker . is_redirect ( ) : self . _process_redirect ( ) self . _loop_type = LoopType . redirect elif response . status_code == http . client . UNAUTHORIZED and...
Handle the response and update the internal state .
12,445
def _process_redirect ( self ) : _logger . debug ( 'Handling redirect.' ) if self . _redirect_tracker . exceeded ( ) : raise ProtocolError ( 'Too many redirects.' ) try : url = self . _redirect_tracker . next_location ( ) if not url : raise ProtocolError ( 'Redirect location missing.' ) if self . _redirect_tracker . is...
Update the Redirect Tracker .
12,446
def _get_cookie_referrer_host ( self ) : referer = self . _original_request . fields . get ( 'Referer' ) if referer : return URLInfo . parse ( referer ) . hostname else : return None
Return the referrer hostname .
12,447
def _add_cookies ( self , request : Request ) : self . _cookie_jar . add_cookie_header ( request , self . _get_cookie_referrer_host ( ) )
Add the cookie headers to the Request .
12,448
def _extract_cookies ( self , response : Response ) : self . _cookie_jar . extract_cookies ( response , response . request , self . _get_cookie_referrer_host ( ) )
Load the cookie headers from the Response .
12,449
def session ( self , request : Request ) -> WebSession : return WebSession ( request , http_client = self . _http_client , redirect_tracker = self . _redirect_tracker_factory ( ) , request_factory = self . _request_factory , cookie_jar = self . _cookie_jar , )
Return a fetch session .
12,450
def to_dir_path_url ( url_info : URLInfo ) -> str : dir_name = posixpath . dirname ( url_info . path ) if not dir_name . endswith ( '/' ) : url_template = 'ftp://{}{}/' else : url_template = 'ftp://{}{}' return url_template . format ( url_info . hostname_with_port , dir_name )
Return URL string with the path replaced with directory only .
12,451
def _prepare_request_file_vs_dir ( self , request : Request ) -> bool : if self . _item_session . url_record . link_type : is_file = self . _item_session . url_record . link_type == LinkType . file elif request . url_info . path . endswith ( '/' ) : is_file = False else : is_file = 'unknown' if is_file == 'unknown' : f...
Check if file modify request and return whether is a file .
12,452
def _fetch_parent_path ( self , request : Request , use_cache : bool = True ) : directory_url = to_dir_path_url ( request . url_info ) if use_cache : if directory_url in self . _processor . listing_cache : return self . _processor . listing_cache [ directory_url ] directory_request = copy . deepcopy ( request ) directo...
Fetch parent directory and return list FileEntry .
12,453
def _add_listing_links ( self , response : ListingResponse ) : base_url = response . request . url_info . url if self . _glob_pattern : level = self . _item_session . url_record . level else : level = None for file_entry in response . files : if self . _glob_pattern and not fnmatch . fnmatchcase ( file_entry . name , s...
Add links from file listing response .
12,454
def _make_symlink ( self , link_name : str , link_target : str ) : path = self . _file_writer_session . extra_resource_path ( 'dummy' ) if path : dir_path = os . path . dirname ( path ) symlink_path = os . path . join ( dir_path , link_name ) _logger . debug ( 'symlink {} -> {}' , symlink_path , link_target ) os . syml...
Make a symlink on the system .
12,455
def _apply_unix_permissions ( self , request : Request , response : Response ) : files = yield from self . _fetch_parent_path ( request ) if not files : return filename = posixpath . basename ( request . file_path ) for file_entry in files : if file_entry . name == filename and file_entry . perm : _logger . debug ( 'Se...
Fetch and apply Unix permissions .
12,456
def _build_url_rewriter ( cls , session : AppSession ) : if session . args . escaped_fragment or session . args . strip_session_id : return session . factory . new ( 'URLRewriter' , hash_fragment = session . args . escaped_fragment , session_id = session . args . strip_session_id )
Build URL rewriter if needed .
12,457
def _build_url_filters ( cls , session : AppSession ) : args = session . args filters = [ HTTPSOnlyFilter ( ) if args . https_only else SchemeFilter ( ) , RecursiveFilter ( enabled = args . recursive , page_requisites = args . page_requisites ) , FollowFTPFilter ( follow = args . follow_ftp ) , ] if args . no_parent : ...
Create the URL filter instances .
12,458
def _build_document_converter ( cls , session : AppSession ) : if not session . args . convert_links : return converter = session . factory . new ( 'BatchDocumentConverter' , session . factory [ 'HTMLParser' ] , session . factory [ 'ElementWalker' ] , session . factory [ 'URLTable' ] , backup = session . args . backup_...
Build the Document Converter .
12,459
def _setup_logging ( cls , args ) : assert ( logging . CRITICAL > logging . ERROR > logging . WARNING > logging . INFO > logging . DEBUG > logging . NOTSET ) assert ( LOG_VERY_QUIET > LOG_QUIET > LOG_NO_VERBOSE > LOG_VERBOSE > LOG_DEBUG ) assert args . verbosity root_logger = logging . getLogger ( ) current_level = roo...
Set up the root logger if needed .
12,460
def _setup_console_logger ( cls , session : AppSession , args , stderr ) : stream = new_encoded_stream ( args , stderr ) logger = logging . getLogger ( ) session . console_log_handler = handler = logging . StreamHandler ( stream ) formatter = logging . Formatter ( '%(levelname)s %(message)s' ) log_filter = logging . Fi...
Set up the console logger .
12,461
def _setup_file_logger ( cls , session : AppSession , args ) : if not ( args . output_file or args . append_output ) : return logger = logging . getLogger ( ) formatter = logging . Formatter ( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) if args . output_file : filename = args . output_file mode = 'w' else ...
Set up the file message logger .
12,462
def _run_driver ( self , item_session : ItemSession , request , response ) : _logger . debug ( 'Started PhantomJS processing.' ) session = PhantomJSCoprocessorSession ( self . _phantomjs_driver_factory , self . _root_path , self . _processing_rule , self . _file_writer_session , request , response , item_session , self...
Start PhantomJS processing .
12,463
def _add_warc_action_log ( self , path , url ) : _logger . debug ( 'Adding action log record.' ) actions = [ ] with open ( path , 'r' , encoding = 'utf-8' , errors = 'replace' ) as file : for line in file : actions . append ( json . loads ( line ) ) log_data = json . dumps ( { 'actions' : actions } , indent = 4 , ) . e...
Add the action log to the WARC file .
12,464
def _add_warc_snapshot ( self , filename , url ) : _logger . debug ( 'Adding snapshot record.' ) extension = os . path . splitext ( filename ) [ 1 ] content_type = { '.pdf' : 'application/pdf' , '.html' : 'text/html' , '.png' : 'image/png' , '.gif' : 'image/gif' } [ extension ] record = WARCRecord ( ) record . set_comm...
Add the snaphot to the WARC file .
12,465
def _scrape_document ( self ) : mock_response = self . _new_mock_response ( self . _response , self . _get_temp_path ( 'phantom' , '.html' ) ) self . _item_session . request = self . _request self . _item_session . response = mock_response self . _processing_rule . scrape_document ( item_session ) if mock_response . bo...
Extract links from the DOM .
12,466
def _new_mock_response ( self , response , file_path ) : mock_response = copy . copy ( response ) mock_response . body = Body ( open ( file_path , 'rb' ) ) mock_response . fields = NameValueRecord ( ) for name , value in response . fields . get_all ( ) : mock_response . fields . add ( name , value ) mock_response . fie...
Return a new mock Response with the content .
12,467
def _build_ssl_context ( cls , session : AppSession ) -> ssl . SSLContext : args = session . args ssl_context = ssl . SSLContext ( args . secure_protocol ) if args . check_certificate : ssl_context . verify_mode = ssl . CERT_REQUIRED cls . _load_ca_certs ( session ) ssl_context . load_verify_locations ( session . ca_ce...
Create the SSL options .
12,468
def _load_ca_certs ( cls , session : AppSession , clean : bool = True ) : args = session . args if session . ca_certs_filename : return session . ca_certs_filename certs = set ( ) if args . use_internal_ca_certs : pem_filename = os . path . join ( os . path . dirname ( __file__ ) , '..' , '..' , 'cert' , 'ca-bundle.pem...
Load the Certificate Authority certificates .
12,469
def _read_pem_file ( cls , filename , from_package = False ) : _logger . debug ( 'Reading PEM {0}.' . format ( filename ) ) if from_package : return wpull . util . filter_pem ( wpull . util . get_package_data ( filename ) ) with open ( filename , 'rb' ) as in_file : return wpull . util . filter_pem ( in_file . read ( )...
Read the PEM file .
12,470
def start ( self , request : Request ) -> Response : if self . _session_state != SessionState . ready : raise RuntimeError ( 'Session already started' ) assert not self . _request self . _request = request _logger . debug ( __ ( 'Client fetch request {0}.' , request ) ) connection = yield from self . _acquire_request_c...
Begin a HTTP request
12,471
def to_bytes ( instance , encoding = 'utf-8' , error = 'strict' ) : if isinstance ( instance , bytes ) : return instance elif hasattr ( instance , 'encode' ) : return instance . encode ( encoding , error ) elif isinstance ( instance , list ) : return list ( [ to_bytes ( item , encoding , error ) for item in instance ] ...
Convert an instance recursively to bytes .
12,472
def to_str ( instance , encoding = 'utf-8' ) : if isinstance ( instance , str ) : return instance elif hasattr ( instance , 'decode' ) : return instance . decode ( encoding ) elif isinstance ( instance , list ) : return list ( [ to_str ( item , encoding ) for item in instance ] ) elif isinstance ( instance , tuple ) : ...
Convert an instance recursively to string .
12,473
def detect_encoding ( data , encoding = None , fallback = 'latin1' , is_html = False ) : if encoding : encoding = normalize_codec_name ( encoding ) bs4_detector = EncodingDetector ( data , override_encodings = ( encoding , ) if encoding else ( ) , is_html = is_html ) candidates = itertools . chain ( bs4_detector . enco...
Detect the character encoding of the data .
12,474
def try_decoding ( data , encoding ) : try : data . decode ( encoding , 'strict' ) except UnicodeError : if len ( data ) > 16 : for trim in ( 1 , 2 , 3 ) : trimmed_data = data [ : - trim ] if trimmed_data : try : trimmed_data . decode ( encoding , 'strict' ) except UnicodeError : continue else : return True return Fals...
Return whether the Python codec could decode the data .
12,475
def format_size ( num , format_str = '{num:.1f} {unit}' ) : for unit in ( 'B' , 'KiB' , 'MiB' , 'GiB' ) : if - 1024 < num < 1024 : return format_str . format ( num = num , unit = unit ) num /= 1024.0 return format_str . format ( num = num , unit = 'TiB' )
Format the file size into a human readable text .
12,476
def printable_str ( text , keep_newlines = False ) : if isinstance ( text , str ) : new_text = ascii ( text ) [ 1 : - 1 ] else : new_text = ascii ( text ) if keep_newlines : new_text = new_text . replace ( '\\r' , '\r' ) . replace ( '\\n' , '\n' ) return new_text
Escape any control or non - ASCII characters from string .
12,477
def _print_status ( self ) : self . _clear_line ( ) self . _print ( ' ' ) if self . max_value : self . _print_percent ( ) self . _print ( ' ' ) self . _print_bar ( ) else : self . _print_throbber ( ) self . _print ( ' ' ) if self . measurement == Measurement . bytes : self . _print_size_downloaded ( ) else : self . _p...
Print an entire status line including bar and stats .
12,478
def _print_throbber ( self ) : self . _print ( '[' ) for position in range ( self . _bar_width ) : self . _print ( 'O' if position == self . _throbber_index else ' ' ) self . _print ( ']' ) self . _throbber_index = next ( self . _throbber_iter )
Print an indefinite progress bar .
12,479
def _print_bar ( self ) : self . _print ( '[' ) for position in range ( self . _bar_width ) : position_fraction = position / ( self . _bar_width - 1 ) position_bytes = position_fraction * self . max_value if position_bytes < ( self . continue_value or 0 ) : self . _print ( '+' ) elif position_bytes <= ( self . continue...
Print a progress bar .
12,480
def _print_duration ( self ) : duration = int ( time . time ( ) - self . _start_time ) self . _print ( datetime . timedelta ( seconds = duration ) )
Print the elapsed download time .
12,481
def _print_speed ( self ) : if self . _bandwidth_meter . num_samples : speed = self . _bandwidth_meter . speed ( ) if self . _human_format : file_size_str = wpull . string . format_size ( speed ) else : file_size_str = '{:.1f} b' . format ( speed * 8 ) speed_str = _ ( '{preformatted_file_size}/s' ) . format ( preformat...
Print the current speed .
12,482
def _print_percent ( self ) : fraction_done = ( ( self . continue_value or 0 + self . current_value ) / self . max_value ) self . _print ( '{fraction_done:.1%}' . format ( fraction_done = fraction_done ) )
Print how much is done in percentage .
12,483
def register ( self , name : str ) : if name in self . _callbacks : raise ValueError ( 'Hook already registered' ) self . _callbacks [ name ] = None if self . _event_dispatcher is not None : self . _event_dispatcher . register ( name )
Register hooks that can be connected .
12,484
def unregister ( self , name : str ) : del self . _callbacks [ name ] if self . _event_dispatcher is not None : self . _event_dispatcher . unregister ( name )
Unregister hook .
12,485
def connect ( self , name , callback ) : if not self . _callbacks [ name ] : self . _callbacks [ name ] = callback else : raise HookAlreadyConnectedError ( 'Callback hook already connected.' )
Add callback to hook .
12,486
def call ( self , name : str , * args , ** kwargs ) : if self . _event_dispatcher is not None : self . _event_dispatcher . notify ( name , * args , ** kwargs ) if self . _callbacks [ name ] : return self . _callbacks [ name ] ( * args , ** kwargs ) else : raise HookDisconnected ( 'No callback is connected.' )
Invoke the callback .
12,487
def get_crawl_delay ( self , user_agent ) : if ( PY_MAJOR_VERSION < 3 ) and ( not isinstance ( user_agent , unicode ) ) : user_agent = user_agent . decode ( ) for ruleset in self . __rulesets : if ruleset . does_user_agent_match ( user_agent ) : return ruleset . crawl_delay return None
Returns a float representing the crawl delay specified for this user agent or None if the crawl delay was unspecified or not a float .
12,488
def parse ( self , s ) : self . _sitemaps = [ ] self . __rulesets = [ ] if ( PY_MAJOR_VERSION > 2 ) and ( isinstance ( s , bytes ) or isinstance ( s , bytearray ) ) or ( PY_MAJOR_VERSION == 2 ) and ( not isinstance ( s , unicode ) ) : s = s . decode ( "iso-8859-1" ) s = _end_of_line_regex . sub ( "\n" , s ) lines = s ....
Parses the passed string as a set of robots . txt rules .
12,489
def close_stream_on_error ( func ) : @ asyncio . coroutine @ functools . wraps ( func ) def wrapper ( self , * args , ** kwargs ) : with wpull . util . close_on_error ( self . close ) : return ( yield from func ( self , * args , ** kwargs ) ) return wrapper
Decorator to close stream on error .
12,490
def url_to_filename ( url , index = 'index.html' , alt_char = False ) : assert isinstance ( url , str ) , 'Expect str. Got {}.' . format ( type ( url ) ) url_split_result = urllib . parse . urlsplit ( url ) filename = url_split_result . path . split ( '/' ) [ - 1 ] if not filename : filename = index if url_split_result...
Return a filename from a URL .
12,491
def url_to_dir_parts ( url , include_protocol = False , include_hostname = False , alt_char = False ) : assert isinstance ( url , str ) , 'Expect str. Got {}.' . format ( type ( url ) ) url_split_result = urllib . parse . urlsplit ( url ) parts = [ ] if include_protocol : parts . append ( url_split_result . scheme ) if...
Return a list of directory parts from a URL .
12,492
def safe_filename ( filename , os_type = 'unix' , no_control = True , ascii_only = True , case = None , encoding = 'utf8' , max_length = None ) : assert isinstance ( filename , str ) , 'Expect str. Got {}.' . format ( type ( filename ) ) if filename in ( '.' , os . curdir ) : new_filename = '%2E' elif filename in ( '.'...
Return a safe filename or path part .
12,493
def anti_clobber_dir_path ( dir_path , suffix = '.d' ) : dir_path = os . path . normpath ( dir_path ) parts = dir_path . split ( os . sep ) for index in range ( len ( parts ) ) : test_path = os . sep . join ( parts [ : index + 1 ] ) if os . path . isfile ( test_path ) : parts [ index ] += suffix return os . sep . join ...
Return a directory path free of filenames .
12,494
def parse_content_disposition ( text ) : match = re . search ( r'filename\s*=\s*(.+)' , text , re . IGNORECASE ) if not match : return filename = match . group ( 1 ) if filename [ 0 ] in '"\'' : match = re . match ( r'(.)(.+)(?!\\)\1' , filename ) if match : filename = match . group ( 2 ) . replace ( '\\"' , '"' ) retu...
Parse a Content - Disposition header value .
12,495
def safe_filename ( self , part ) : return safe_filename ( part , os_type = self . _os_type , no_control = self . _no_control , ascii_only = self . _ascii_only , case = self . _case , max_length = self . _max_filename_length , )
Return a safe filename or file part .
12,496
def _warn_unsafe_options ( cls , args ) : enabled_options = [ ] for option_name in cls . UNSAFE_OPTIONS : if getattr ( args , option_name ) : enabled_options . append ( option_name ) if enabled_options : _logger . warning ( __ ( _ ( 'The following unsafe options are enabled: {list}.' ) , list = enabled_options ) ) _log...
Print warnings about any enabled hazardous options .
12,497
def _warn_silly_options ( cls , args ) : if 'page-requisites' in args . span_hosts_allow and not args . page_requisites : _logger . warning ( _ ( 'Spanning hosts is allowed for page requisites, ' 'but the page requisites option is not on.' ) ) if 'linked-pages' in args . span_hosts_allow and not args . recursive : _log...
Print warnings about any options that may be silly .
12,498
def parse_month ( text : str ) -> int : text = text . lower ( ) try : return MONTH_MAP [ text ] except KeyError : pass try : return MONTH_MAP [ text [ : 3 ] ] except KeyError : pass raise ValueError ( 'Month {} not found.' . format ( repr ( text ) ) )
Parse month string into integer .
12,499
def y2k ( year : int ) -> int : assert 0 <= year <= 99 , 'Not a two digit year {}' . format ( year ) return year + 1000 if year >= 69 else year + 2000
Convert two digit year to four digit year .