idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
12,400 | def set_content_length ( self ) : if not self . block_file : self . fields [ 'Content-Length' ] = '0' return with wpull . util . reset_file_offset ( self . block_file ) : wpull . util . seek_file_end ( self . block_file ) self . fields [ 'Content-Length' ] = str ( self . block_file . tell ( ) ) | Find and set the content length . |
12,401 | def compute_checksum ( self , payload_offset : Optional [ int ] = None ) : if not self . block_file : self . fields [ 'Content-Length' ] = '0' return block_hasher = hashlib . sha1 ( ) payload_hasher = hashlib . sha1 ( ) with wpull . util . reset_file_offset ( self . block_file ) : if payload_offset is not None : data = self . block_file . read ( payload_offset ) block_hasher . update ( data ) while True : data = self . block_file . read ( 4096 ) if data == b'' : break block_hasher . update ( data ) payload_hasher . update ( data ) content_length = self . block_file . tell ( ) content_hash = block_hasher . digest ( ) self . fields [ 'WARC-Block-Digest' ] = 'sha1:{0}' . format ( base64 . b32encode ( content_hash ) . decode ( ) ) if payload_offset is not None : payload_hash = payload_hasher . digest ( ) self . fields [ 'WARC-Payload-Digest' ] = 'sha1:{0}' . format ( base64 . b32encode ( payload_hash ) . decode ( ) ) self . fields [ 'Content-Length' ] = str ( content_length ) | Compute and add the checksum data to the record fields . |
12,402 | def get_http_header ( self ) -> Response : with wpull . util . reset_file_offset ( self . block_file ) : data = self . block_file . read ( 4096 ) match = re . match ( br'(.*?\r?\n\r?\n)' , data ) if not match : return status_line , dummy , field_str = match . group ( 1 ) . partition ( b'\n' ) try : version , code , reason = Response . parse_status_line ( status_line ) except ValueError : return response = Response ( status_code = code , reason = reason , version = version ) try : response . fields . parse ( field_str , strict = False ) except ValueError : return return response | Return the HTTP header . |
12,403 | def _write_config ( self ) : param_dict = { 'url' : self . _params . url , 'snapshot_paths' : self . _params . snapshot_paths , 'wait_time' : self . _params . wait_time , 'num_scrolls' : self . _params . num_scrolls , 'smart_scroll' : self . _params . smart_scroll , 'snapshot' : self . _params . snapshot , 'viewport_width' : self . _params . viewport_size [ 0 ] , 'viewport_height' : self . _params . viewport_size [ 1 ] , 'paper_width' : self . _params . paper_size [ 0 ] , 'paper_height' : self . _params . paper_size [ 1 ] , 'custom_headers' : self . _params . custom_headers , 'page_settings' : self . _params . page_settings , } if self . _params . event_log_filename : param_dict [ 'event_log_filename' ] = os . path . abspath ( self . _params . event_log_filename ) if self . _params . action_log_filename : param_dict [ 'action_log_filename' ] = os . path . abspath ( self . _params . action_log_filename ) config_text = json . dumps ( param_dict ) self . _config_file . write ( config_text . encode ( 'utf-8' ) ) self . _config_file . close ( ) | Write the parameters to a file for PhantomJS to read . |
12,404 | def clean ( self , force : bool = False ) : with ( yield from self . _lock ) : for connection in tuple ( self . ready ) : if force or connection . closed ( ) : connection . close ( ) self . ready . remove ( connection ) | Clean closed connections . |
12,405 | def close ( self ) : for connection in self . ready : connection . close ( ) for connection in self . busy : connection . close ( ) self . _closed = True | Forcibly close all connections . |
12,406 | def acquire ( self ) -> Connection : assert not self . _closed yield from self . _condition . acquire ( ) while True : if self . ready : connection = self . ready . pop ( ) break elif len ( self . busy ) < self . max_connections : connection = self . _connection_factory ( ) break else : yield from self . _condition . wait ( ) self . busy . add ( connection ) self . _condition . release ( ) return connection | Register and return a connection . |
12,407 | def release ( self , connection : Connection , reuse : bool = True ) : yield from self . _condition . acquire ( ) self . busy . remove ( connection ) if reuse : self . ready . add ( connection ) self . _condition . notify ( ) self . _condition . release ( ) | Unregister a connection . |
12,408 | def acquire ( self , host : str , port : int , use_ssl : bool = False , host_key : Optional [ Any ] = None ) -> Union [ Connection , SSLConnection ] : assert isinstance ( port , int ) , 'Expect int. Got {}' . format ( type ( port ) ) assert not self . _closed yield from self . _process_no_wait_releases ( ) if use_ssl : connection_factory = functools . partial ( self . _ssl_connection_factory , hostname = host ) else : connection_factory = functools . partial ( self . _connection_factory , hostname = host ) connection_factory = functools . partial ( HappyEyeballsConnection , ( host , port ) , connection_factory , self . _resolver , self . _happy_eyeballs_table , is_ssl = use_ssl ) key = host_key or ( host , port , use_ssl ) with ( yield from self . _host_pools_lock ) : if key not in self . _host_pools : host_pool = self . _host_pools [ key ] = HostPool ( connection_factory , max_connections = self . _max_host_count ) self . _host_pool_waiters [ key ] = 1 else : host_pool = self . _host_pools [ key ] self . _host_pool_waiters [ key ] += 1 _logger . debug ( 'Check out %s' , key ) connection = yield from host_pool . acquire ( ) connection . key = key with ( yield from self . _host_pools_lock ) : self . _host_pool_waiters [ key ] -= 1 return connection | Return an available connection . |
12,409 | def release ( self , connection : Connection ) : assert not self . _closed key = connection . key host_pool = self . _host_pools [ key ] _logger . debug ( 'Check in %s' , key ) yield from host_pool . release ( connection ) force = self . count ( ) > self . _max_count yield from self . clean ( force = force ) | Put a connection back in the pool . |
12,410 | def session ( self , host : str , port : int , use_ssl : bool = False ) : connection = yield from self . acquire ( host , port , use_ssl ) @ contextlib . contextmanager def context_wrapper ( ) : try : yield connection finally : self . no_wait_release ( connection ) return context_wrapper ( ) | Return a context manager that returns a connection . |
12,411 | def clean ( self , force : bool = False ) : assert not self . _closed with ( yield from self . _host_pools_lock ) : for key , pool in tuple ( self . _host_pools . items ( ) ) : yield from pool . clean ( force = force ) if not self . _host_pool_waiters [ key ] and pool . empty ( ) : del self . _host_pools [ key ] del self . _host_pool_waiters [ key ] | Clean all closed connections . |
12,412 | def close ( self ) : for key , pool in tuple ( self . _host_pools . items ( ) ) : pool . close ( ) del self . _host_pools [ key ] del self . _host_pool_waiters [ key ] self . _closed = True | Close all the connections and clean up . |
12,413 | def count ( self ) -> int : counter = 0 for pool in self . _host_pools . values ( ) : counter += pool . count ( ) return counter | Return number of connections . |
12,414 | def set_preferred ( self , preferred_addr , addr_1 , addr_2 ) : if addr_1 > addr_2 : addr_1 , addr_2 = addr_2 , addr_1 self . _cache [ ( addr_1 , addr_2 ) ] = preferred_addr | Set the preferred address . |
12,415 | def get_preferred ( self , addr_1 , addr_2 ) : if addr_1 > addr_2 : addr_1 , addr_2 = addr_2 , addr_1 return self . _cache . get ( ( addr_1 , addr_2 ) ) | Return the preferred address . |
12,416 | def _connect_dual_stack ( self , primary_address , secondary_address ) : self . _primary_connection = self . _connection_factory ( primary_address ) self . _secondary_connection = self . _connection_factory ( secondary_address ) @ asyncio . coroutine def connect_primary ( ) : yield from self . _primary_connection . connect ( ) return self . _primary_connection @ asyncio . coroutine def connect_secondary ( ) : yield from self . _secondary_connection . connect ( ) return self . _secondary_connection primary_fut = connect_primary ( ) secondary_fut = connect_secondary ( ) failed = False for fut in asyncio . as_completed ( ( primary_fut , secondary_fut ) ) : if not self . _active_connection : try : self . _active_connection = yield from fut except NetworkError : if not failed : _logger . debug ( 'Original dual stack exception' , exc_info = True ) failed = True else : raise else : _logger . debug ( 'Got first of dual stack.' ) else : @ asyncio . coroutine def cleanup ( ) : try : conn = yield from fut except NetworkError : pass else : conn . close ( ) _logger . debug ( 'Closed abandoned connection.' ) asyncio . get_event_loop ( ) . create_task ( cleanup ( ) ) preferred_host = self . _active_connection . host self . _happy_eyeballs_table . set_preferred ( preferred_host , primary_address [ 0 ] , secondary_address [ 0 ] ) | Connect using happy eyeballs . |
12,417 | def _get_preferred_host ( self , result : ResolveResult ) -> Tuple [ str , str ] : host_1 = result . first_ipv4 . ip_address if result . first_ipv4 else None host_2 = result . first_ipv6 . ip_address if result . first_ipv6 else None if not host_2 : return host_1 , None elif not host_1 : return host_2 , None preferred_host = self . _happy_eyeballs_table . get_preferred ( host_1 , host_2 ) if preferred_host : return preferred_host , None else : return host_1 , host_2 | Get preferred host from DNS results . |
12,418 | def _check_journals_and_maybe_raise ( self ) : files = list ( glob . glob ( self . _prefix_filename + '*-wpullinc' ) ) if files : raise OSError ( 'WARC file {} is incomplete.' . format ( files [ 0 ] ) ) | Check if any journal files exist and raise an error . |
12,419 | def _start_new_warc_file ( self , meta = False ) : if self . _params . max_size and not meta and self . _params . appending : while True : self . _warc_filename = self . _generate_warc_filename ( ) if os . path . exists ( self . _warc_filename ) : _logger . debug ( 'Skip {0}' , self . _warc_filename ) self . _sequence_num += 1 else : break else : self . _warc_filename = self . _generate_warc_filename ( meta = meta ) _logger . debug ( 'WARC file at {0}' , self . _warc_filename ) if not self . _params . appending : wpull . util . truncate_file ( self . _warc_filename ) self . _warcinfo_record = WARCRecord ( ) self . _populate_warcinfo ( self . _params . extra_fields ) self . write_record ( self . _warcinfo_record ) | Create and set as current WARC file . |
12,420 | def _generate_warc_filename ( self , meta = False ) : if self . _params . max_size is None : sequence_name = '' elif meta : sequence_name = '-meta' else : sequence_name = '-{0:05d}' . format ( self . _sequence_num ) if self . _params . compress : extension = 'warc.gz' else : extension = 'warc' return '{0}{1}.{2}' . format ( self . _prefix_filename , sequence_name , extension ) | Return a suitable WARC filename . |
12,421 | def _start_new_cdx_file ( self ) : self . _cdx_filename = '{0}.cdx' . format ( self . _prefix_filename ) if not self . _params . appending : wpull . util . truncate_file ( self . _cdx_filename ) self . _write_cdx_header ( ) elif not os . path . exists ( self . _cdx_filename ) : self . _write_cdx_header ( ) | Create and set current CDX file . |
12,422 | def _populate_warcinfo ( self , extra_fields = None ) : self . _warcinfo_record . set_common_fields ( WARCRecord . WARCINFO , WARCRecord . WARC_FIELDS ) info_fields = NameValueRecord ( wrap_width = 1024 ) info_fields [ 'Software' ] = self . _params . software_string or self . DEFAULT_SOFTWARE_STRING info_fields [ 'format' ] = 'WARC File Format 1.0' info_fields [ 'conformsTo' ] = 'http://bibnum.bnf.fr/WARC/WARC_ISO_28500_version1_latestdraft.pdf' if extra_fields : for name , value in extra_fields : info_fields . add ( name , value ) self . _warcinfo_record . block_file = io . BytesIO ( bytes ( info_fields ) + b'\r\n' ) self . _warcinfo_record . compute_checksum ( ) | Add the metadata to the Warcinfo record . |
12,423 | def _setup_log ( self ) : logger = logging . getLogger ( ) formatter = logging . Formatter ( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) self . _log_temp_file = NamedTemporaryFile ( prefix = 'tmp-wpull-warc-' , dir = self . _params . temp_dir , suffix = '.log.gz' , delete = False , ) self . _log_temp_file . close ( ) self . _log_handler = handler = logging . StreamHandler ( io . TextIOWrapper ( gzip . GzipFile ( filename = self . _log_temp_file . name , mode = 'wb' ) , encoding = 'utf-8' ) ) logger . setLevel ( logging . DEBUG ) logger . debug ( 'Wpull needs the root logger level set to DEBUG.' ) handler . setFormatter ( formatter ) logger . addHandler ( handler ) handler . setLevel ( logging . INFO ) | Set up the logging file . |
12,424 | def _move_file_to_dest_dir ( self , filename ) : assert self . _params . move_to if os . path . isdir ( self . _params . move_to ) : _logger . debug ( 'Moved {} to {}.' , self . _warc_filename , self . _params . move_to ) shutil . move ( filename , self . _params . move_to ) else : _logger . error ( '{} is not a directory; not moving {}.' , self . _params . move_to , filename ) | Move the file to the move_to directory . |
12,425 | def set_length_and_maybe_checksums ( self , record , payload_offset = None ) : if self . _params . digests : record . compute_checksum ( payload_offset ) else : record . set_content_length ( ) | Set the content length and possibly the checksums . |
12,426 | def write_record ( self , record ) : record . fields [ 'WARC-Warcinfo-ID' ] = self . _warcinfo_record . fields [ WARCRecord . WARC_RECORD_ID ] _logger . debug ( 'Writing WARC record {0}.' , record . fields [ 'WARC-Type' ] ) if self . _params . compress : open_func = gzip . GzipFile else : open_func = open if os . path . exists ( self . _warc_filename ) : before_offset = os . path . getsize ( self . _warc_filename ) else : before_offset = 0 journal_filename = self . _warc_filename + '-wpullinc' with open ( journal_filename , 'w' ) as file : file . write ( 'wpull-journal-version:1\n' ) file . write ( 'offset:{}\n' . format ( before_offset ) ) try : with open_func ( self . _warc_filename , mode = 'ab' ) as out_file : for data in record : out_file . write ( data ) except ( OSError , IOError ) as error : _logger . info ( _ ( 'Rolling back file {filename} to length {length}.' ) , filename = self . _warc_filename , length = before_offset ) with open ( self . _warc_filename , mode = 'wb' ) as out_file : out_file . truncate ( before_offset ) raise error finally : os . remove ( journal_filename ) after_offset = os . path . getsize ( self . _warc_filename ) if self . _cdx_filename : raw_file_offset = before_offset raw_file_record_size = after_offset - before_offset self . _write_cdx_field ( record , raw_file_record_size , raw_file_offset ) | Append the record to the WARC file . |
12,427 | def close ( self ) : if self . _log_temp_file : self . _log_handler . flush ( ) logger = logging . getLogger ( ) logger . removeHandler ( self . _log_handler ) self . _log_handler . stream . close ( ) log_record = WARCRecord ( ) log_record . block_file = gzip . GzipFile ( filename = self . _log_temp_file . name ) log_record . set_common_fields ( 'resource' , 'text/plain' ) log_record . fields [ 'WARC-Target-URI' ] = 'urn:X-wpull:log' if self . _params . max_size is not None : if self . _params . move_to is not None : self . _move_file_to_dest_dir ( self . _warc_filename ) self . _start_new_warc_file ( meta = True ) self . set_length_and_maybe_checksums ( log_record ) self . write_record ( log_record ) log_record . block_file . close ( ) try : os . remove ( self . _log_temp_file . name ) except OSError : _logger . exception ( 'Could not close log temp file.' ) self . _log_temp_file = None self . _log_handler . close ( ) self . _log_handler = None if self . _params . move_to is not None : self . _move_file_to_dest_dir ( self . _warc_filename ) if self . _cdx_filename and self . _params . move_to is not None : self . _move_file_to_dest_dir ( self . _cdx_filename ) | Close the WARC file and clean up any logging handlers . |
12,428 | def _write_cdx_header ( self ) : with open ( self . _cdx_filename , mode = 'a' , encoding = 'utf-8' ) as out_file : out_file . write ( self . CDX_DELIMINATOR ) out_file . write ( self . CDX_DELIMINATOR . join ( ( 'CDX' , 'a' , 'b' , 'm' , 's' , 'k' , 'S' , 'V' , 'g' , 'u' ) ) ) out_file . write ( '\n' ) | Write the CDX header . |
12,429 | def _write_cdx_field ( self , record , raw_file_record_size , raw_file_offset ) : if record . fields [ WARCRecord . WARC_TYPE ] != WARCRecord . RESPONSE or not re . match ( r'application/http; *msgtype *= *response' , record . fields [ WARCRecord . CONTENT_TYPE ] ) : return url = record . fields [ 'WARC-Target-URI' ] _logger . debug ( 'Writing CDX record {0}.' , url ) http_header = record . get_http_header ( ) if http_header : mime_type = self . parse_mimetype ( http_header . fields . get ( 'Content-Type' , '' ) ) or '-' response_code = str ( http_header . status_code ) else : mime_type = '-' response_code = '-' timestamp = str ( int ( wpull . util . parse_iso8601_str ( record . fields [ WARCRecord . WARC_DATE ] ) ) ) checksum = record . fields . get ( 'WARC-Payload-Digest' , '' ) if checksum . startswith ( 'sha1:' ) : checksum = checksum . replace ( 'sha1:' , '' , 1 ) else : checksum = '-' raw_file_record_size_str = str ( raw_file_record_size ) raw_file_offset_str = str ( raw_file_offset ) filename = os . path . basename ( self . _warc_filename ) record_id = record . fields [ WARCRecord . WARC_RECORD_ID ] fields_strs = ( url , timestamp , mime_type , response_code , checksum , raw_file_record_size_str , raw_file_offset_str , filename , record_id ) with open ( self . _cdx_filename , mode = 'a' , encoding = 'utf-8' ) as out_file : out_file . write ( self . CDX_DELIMINATOR . join ( fields_strs ) ) out_file . write ( '\n' ) | Write the CDX field if needed . |
12,430 | def parse_mimetype ( cls , value ) : match = re . match ( r'([a-zA-Z0-9-]+/[a-zA-Z0-9-]+)' , value ) if match : return match . group ( 1 ) | Return the MIME type from a Content - Type string . |
12,431 | def _new_temp_file ( self , hint = 'warcrecsess' ) : return wpull . body . new_temp_file ( directory = self . _temp_dir , hint = hint ) | Return new temp file . |
12,432 | def _record_revisit ( self , payload_offset : int ) : fields = self . _response_record . fields ref_record_id = self . _url_table . get_revisit_id ( fields [ 'WARC-Target-URI' ] , fields . get ( 'WARC-Payload-Digest' , '' ) . upper ( ) . replace ( 'SHA1:' , '' ) ) if ref_record_id : try : self . _response_record . block_file . truncate ( payload_offset ) except TypeError : self . _response_record . block_file . seek ( 0 ) data = self . _response_record . block_file . read ( payload_offset ) self . _response_record . block_file . truncate ( ) self . _response_record . block_file . seek ( 0 ) self . _response_record . block_file . write ( data ) self . _recorder . set_length_and_maybe_checksums ( self . _response_record ) fields [ WARCRecord . WARC_TYPE ] = WARCRecord . REVISIT fields [ 'WARC-Refers-To' ] = ref_record_id fields [ 'WARC-Profile' ] = WARCRecord . SAME_PAYLOAD_DIGEST_URI fields [ 'WARC-Truncated' ] = 'length' | Record the revisit if possible . |
12,433 | def increment ( self , size : int ) : assert size >= 0 , size self . files += 1 self . size += size self . bandwidth_meter . feed ( size ) | Increment the number of files downloaded . |
12,434 | def is_quota_exceeded ( self ) -> bool : if self . quota and self . _url_table is not None : return self . size >= self . quota and self . _url_table . get_root_url_todo_count ( ) == 0 | Return whether the quota is exceeded . |
12,435 | def increment_error ( self , error : Exception ) : _logger . debug ( 'Increment error %s' , error ) for error_class in ERROR_PRIORITIES : if isinstance ( error , error_class ) : self . errors [ error_class ] += 1 return self . errors [ type ( error ) ] += 1 | Increment the error counter preferring base exceptions . |
12,436 | def link_head ( self , node ) : assert not node . tail old_head = self . head if old_head : assert old_head . tail == self old_head . tail = node node . head = old_head node . tail = self self . head = node | Add a node to the head . |
12,437 | def link_tail ( self , node ) : assert not node . head old_tail = self . tail if old_tail : assert old_tail . head == self old_tail . head = node node . tail = old_tail node . head = self self . tail = node | Add a node to the tail . |
12,438 | def unlink ( self ) : old_head = self . head old_tail = self . tail self . head = None self . tail = None if old_head : old_head . tail = old_tail if old_tail : old_tail . head = old_head | Remove this node and link any head or tail . |
12,439 | def prepare_for_send ( self , full_url = False ) : assert self . url assert self . method assert self . version url_info = self . url_info if 'Host' not in self . fields : self . fields [ 'Host' ] = url_info . hostname_with_port if not full_url : if url_info . query : self . resource_path = '{0}?{1}' . format ( url_info . path , url_info . query ) else : self . resource_path = url_info . path else : self . resource_path = url_info . url | Modify the request to be suitable for HTTP server . |
12,440 | def is_response ( cls , response ) : if 'css' in response . fields . get ( 'content-type' , '' ) . lower ( ) : return True if response . body : if 'html' in response . fields . get ( 'content-type' , '' ) . lower ( ) : return cls . is_file ( response . body ) | Return whether the document is likely to be CSS . |
12,441 | def is_file ( cls , file ) : peeked_data = wpull . string . printable_bytes ( wpull . util . peek_file ( file ) ) . lower ( ) if b'<html' in peeked_data : return VeryFalse if re . search ( br'@import |color:|background[a-z-]*:|font[a-z-]*:' , peeked_data ) : return True | Return whether the file is likely CSS . |
12,442 | def start ( self ) : self . _current_session = session = self . _http_client . session ( ) request = self . next_request ( ) assert request if request . url_info . password or request . url_info . hostname_with_port in self . _hostnames_with_auth : self . _add_basic_auth_header ( request ) response = yield from session . start ( request ) self . _process_response ( response ) return response | Begin fetching the next request . |
12,443 | def download ( self , file : Optional [ IO [ bytes ] ] = None , duration_timeout : Optional [ float ] = None ) : yield from self . _current_session . download ( file , duration_timeout = duration_timeout ) | Download content . |
12,444 | def _process_response ( self , response : Response ) : _logger . debug ( 'Handling response' ) self . _redirect_tracker . load ( response ) if self . _redirect_tracker . is_redirect ( ) : self . _process_redirect ( ) self . _loop_type = LoopType . redirect elif response . status_code == http . client . UNAUTHORIZED and self . _next_request . password : self . _process_authentication ( response ) else : self . _next_request = None self . _loop_type = LoopType . normal if self . _cookie_jar : self . _extract_cookies ( response ) if self . _next_request : self . _add_cookies ( self . _next_request ) | Handle the response and update the internal state . |
12,445 | def _process_redirect ( self ) : _logger . debug ( 'Handling redirect.' ) if self . _redirect_tracker . exceeded ( ) : raise ProtocolError ( 'Too many redirects.' ) try : url = self . _redirect_tracker . next_location ( ) if not url : raise ProtocolError ( 'Redirect location missing.' ) if self . _redirect_tracker . is_repeat ( ) : _logger . debug ( 'Got redirect is repeat.' ) request = self . _original_request . copy ( ) request . url = url else : request = self . _request_factory ( url ) request . prepare_for_send ( ) except ValueError as error : raise ProtocolError ( 'Invalid redirect location.' ) from error self . _next_request = request _logger . debug ( 'Updated next redirect request to {0}.' . format ( request ) ) | Update the Redirect Tracker . |
12,446 | def _get_cookie_referrer_host ( self ) : referer = self . _original_request . fields . get ( 'Referer' ) if referer : return URLInfo . parse ( referer ) . hostname else : return None | Return the referrer hostname . |
12,447 | def _add_cookies ( self , request : Request ) : self . _cookie_jar . add_cookie_header ( request , self . _get_cookie_referrer_host ( ) ) | Add the cookie headers to the Request . |
12,448 | def _extract_cookies ( self , response : Response ) : self . _cookie_jar . extract_cookies ( response , response . request , self . _get_cookie_referrer_host ( ) ) | Load the cookie headers from the Response . |
12,449 | def session ( self , request : Request ) -> WebSession : return WebSession ( request , http_client = self . _http_client , redirect_tracker = self . _redirect_tracker_factory ( ) , request_factory = self . _request_factory , cookie_jar = self . _cookie_jar , ) | Return a fetch session . |
12,450 | def to_dir_path_url ( url_info : URLInfo ) -> str : dir_name = posixpath . dirname ( url_info . path ) if not dir_name . endswith ( '/' ) : url_template = 'ftp://{}{}/' else : url_template = 'ftp://{}{}' return url_template . format ( url_info . hostname_with_port , dir_name ) | Return URL string with the path replaced with directory only . |
12,451 | def _prepare_request_file_vs_dir ( self , request : Request ) -> bool : if self . _item_session . url_record . link_type : is_file = self . _item_session . url_record . link_type == LinkType . file elif request . url_info . path . endswith ( '/' ) : is_file = False else : is_file = 'unknown' if is_file == 'unknown' : files = yield from self . _fetch_parent_path ( request ) if not files : return True filename = posixpath . basename ( request . file_path ) for file_entry in files : if file_entry . name == filename : _logger . debug ( 'Found entry in parent. Type {}' , file_entry . type ) is_file = file_entry . type != 'dir' break else : _logger . debug ( 'Did not find entry. Assume file.' ) return True if not is_file : request . url = append_slash_to_path_url ( request . url_info ) _logger . debug ( 'Request URL changed to {}. Path={}.' , request . url , request . file_path ) return is_file | Check if file modify request and return whether is a file . |
12,452 | def _fetch_parent_path ( self , request : Request , use_cache : bool = True ) : directory_url = to_dir_path_url ( request . url_info ) if use_cache : if directory_url in self . _processor . listing_cache : return self . _processor . listing_cache [ directory_url ] directory_request = copy . deepcopy ( request ) directory_request . url = directory_url _logger . debug ( 'Check if URL {} is file with {}.' , request . url , directory_url ) with self . _processor . ftp_client . session ( ) as session : try : yield from session . start_listing ( directory_request ) except FTPServerError : _logger . debug ( 'Got an error. Assume is file.' ) if use_cache : self . _processor . listing_cache [ directory_url ] = None return temp_file = tempfile . NamedTemporaryFile ( dir = self . _item_session . app_session . root_path , prefix = 'tmp-wpull-list' ) with temp_file as file : directory_response = yield from session . download_listing ( file , duration_timeout = self . _fetch_rule . duration_timeout ) if use_cache : self . _processor . listing_cache [ directory_url ] = directory_response . files return directory_response . files | Fetch parent directory and return list FileEntry . |
12,453 | def _add_listing_links ( self , response : ListingResponse ) : base_url = response . request . url_info . url if self . _glob_pattern : level = self . _item_session . url_record . level else : level = None for file_entry in response . files : if self . _glob_pattern and not fnmatch . fnmatchcase ( file_entry . name , self . _glob_pattern ) : continue if file_entry . type == 'dir' : linked_url = urljoin_safe ( base_url , file_entry . name + '/' ) elif file_entry . type in ( 'file' , 'symlink' , None ) : if not self . _processor . fetch_params . retr_symlinks and file_entry . type == 'symlink' : self . _make_symlink ( file_entry . name , file_entry . dest ) linked_url = None else : linked_url = urljoin_safe ( base_url , file_entry . name ) else : linked_url = None if linked_url : linked_url_info = parse_url_or_log ( linked_url ) if linked_url_info : verdict = self . _fetch_rule . check_ftp_request ( self . _item_session ) [ 0 ] if verdict : if linked_url_info . path . endswith ( '/' ) : self . _item_session . add_child_url ( linked_url_info . url , link_type = LinkType . directory ) else : self . _item_session . add_child_url ( linked_url_info . url , link_type = LinkType . file , level = level ) | Add links from file listing response . |
12,454 | def _make_symlink ( self , link_name : str , link_target : str ) : path = self . _file_writer_session . extra_resource_path ( 'dummy' ) if path : dir_path = os . path . dirname ( path ) symlink_path = os . path . join ( dir_path , link_name ) _logger . debug ( 'symlink {} -> {}' , symlink_path , link_target ) os . symlink ( link_target , symlink_path ) _logger . info ( _ ( 'Created symbolic link {symlink_path} to target {symlink_target}.' ) , symlink_path = symlink_path , symlink_target = link_target ) | Make a symlink on the system . |
12,455 | def _apply_unix_permissions ( self , request : Request , response : Response ) : files = yield from self . _fetch_parent_path ( request ) if not files : return filename = posixpath . basename ( request . file_path ) for file_entry in files : if file_entry . name == filename and file_entry . perm : _logger . debug ( 'Set chmod {} o{:o}.' , response . body . name , file_entry . perm ) os . chmod ( response . body . name , file_entry . perm ) | Fetch and apply Unix permissions . |
12,456 | def _build_url_rewriter ( cls , session : AppSession ) : if session . args . escaped_fragment or session . args . strip_session_id : return session . factory . new ( 'URLRewriter' , hash_fragment = session . args . escaped_fragment , session_id = session . args . strip_session_id ) | Build URL rewriter if needed . |
12,457 | def _build_url_filters ( cls , session : AppSession ) : args = session . args filters = [ HTTPSOnlyFilter ( ) if args . https_only else SchemeFilter ( ) , RecursiveFilter ( enabled = args . recursive , page_requisites = args . page_requisites ) , FollowFTPFilter ( follow = args . follow_ftp ) , ] if args . no_parent : filters . append ( ParentFilter ( ) ) if args . domains or args . exclude_domains : filters . append ( BackwardDomainFilter ( args . domains , args . exclude_domains ) ) if args . hostnames or args . exclude_hostnames : filters . append ( HostnameFilter ( args . hostnames , args . exclude_hostnames ) ) if args . tries : filters . append ( TriesFilter ( args . tries ) ) if args . level and args . recursive or args . page_requisites_level : filters . append ( LevelFilter ( args . level , inline_max_depth = args . page_requisites_level ) ) if args . accept_regex or args . reject_regex : filters . append ( RegexFilter ( args . accept_regex , args . reject_regex ) ) if args . include_directories or args . exclude_directories : filters . append ( DirectoryFilter ( args . include_directories , args . exclude_directories ) ) if args . accept or args . reject : filters . append ( BackwardFilenameFilter ( args . accept , args . reject ) ) return filters | Create the URL filter instances . |
12,458 | def _build_document_converter ( cls , session : AppSession ) : if not session . args . convert_links : return converter = session . factory . new ( 'BatchDocumentConverter' , session . factory [ 'HTMLParser' ] , session . factory [ 'ElementWalker' ] , session . factory [ 'URLTable' ] , backup = session . args . backup_converted ) return converter | Build the Document Converter . |
12,459 | def _setup_logging ( cls , args ) : assert ( logging . CRITICAL > logging . ERROR > logging . WARNING > logging . INFO > logging . DEBUG > logging . NOTSET ) assert ( LOG_VERY_QUIET > LOG_QUIET > LOG_NO_VERBOSE > LOG_VERBOSE > LOG_DEBUG ) assert args . verbosity root_logger = logging . getLogger ( ) current_level = root_logger . getEffectiveLevel ( ) min_level = LOG_VERY_QUIET if args . verbosity == LOG_QUIET : min_level = logging . ERROR if args . verbosity in ( LOG_NO_VERBOSE , LOG_VERBOSE ) or args . warc_file or args . output_file or args . append_output : min_level = logging . INFO if args . verbosity == LOG_DEBUG : min_level = logging . DEBUG if current_level > min_level : root_logger . setLevel ( min_level ) root_logger . debug ( 'Wpull needs the root logger level set to {0}.' . format ( min_level ) ) if current_level <= logging . INFO : logging . captureWarnings ( True ) | Set up the root logger if needed . |
12,460 | def _setup_console_logger ( cls , session : AppSession , args , stderr ) : stream = new_encoded_stream ( args , stderr ) logger = logging . getLogger ( ) session . console_log_handler = handler = logging . StreamHandler ( stream ) formatter = logging . Formatter ( '%(levelname)s %(message)s' ) log_filter = logging . Filter ( 'wpull' ) handler . setFormatter ( formatter ) handler . setLevel ( args . verbosity or logging . INFO ) handler . addFilter ( log_filter ) logger . addHandler ( handler ) | Set up the console logger . |
12,461 | def _setup_file_logger ( cls , session : AppSession , args ) : if not ( args . output_file or args . append_output ) : return logger = logging . getLogger ( ) formatter = logging . Formatter ( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) if args . output_file : filename = args . output_file mode = 'w' else : filename = args . append_output mode = 'a' session . file_log_handler = handler = logging . FileHandler ( filename , mode , encoding = 'utf-8' ) handler . setFormatter ( formatter ) logger . addHandler ( handler ) if args . verbosity == logging . DEBUG : handler . setLevel ( logging . DEBUG ) else : handler . setLevel ( logging . INFO ) | Set up the file message logger . |
12,462 | def _run_driver ( self , item_session : ItemSession , request , response ) : _logger . debug ( 'Started PhantomJS processing.' ) session = PhantomJSCoprocessorSession ( self . _phantomjs_driver_factory , self . _root_path , self . _processing_rule , self . _file_writer_session , request , response , item_session , self . _phantomjs_params , self . _warc_recorder ) with contextlib . closing ( session ) : yield from session . run ( ) _logger . debug ( 'Ended PhantomJS processing.' ) | Start PhantomJS processing . |
12,463 | def _add_warc_action_log ( self , path , url ) : _logger . debug ( 'Adding action log record.' ) actions = [ ] with open ( path , 'r' , encoding = 'utf-8' , errors = 'replace' ) as file : for line in file : actions . append ( json . loads ( line ) ) log_data = json . dumps ( { 'actions' : actions } , indent = 4 , ) . encode ( 'utf-8' ) self . _action_warc_record = record = WARCRecord ( ) record . set_common_fields ( 'metadata' , 'application/json' ) record . fields [ 'WARC-Target-URI' ] = 'urn:X-wpull:snapshot?url={0}' . format ( wpull . url . percent_encode_query_value ( url ) ) record . block_file = io . BytesIO ( log_data ) self . _warc_recorder . set_length_and_maybe_checksums ( record ) self . _warc_recorder . write_record ( record ) | Add the action log to the WARC file . |
12,464 | def _add_warc_snapshot ( self , filename , url ) : _logger . debug ( 'Adding snapshot record.' ) extension = os . path . splitext ( filename ) [ 1 ] content_type = { '.pdf' : 'application/pdf' , '.html' : 'text/html' , '.png' : 'image/png' , '.gif' : 'image/gif' } [ extension ] record = WARCRecord ( ) record . set_common_fields ( 'resource' , content_type ) record . fields [ 'WARC-Target-URI' ] = 'urn:X-wpull:snapshot?url={0}' . format ( wpull . url . percent_encode_query_value ( url ) ) if self . _action_warc_record : record . fields [ 'WARC-Concurrent-To' ] = self . _action_warc_record . fields [ WARCRecord . WARC_RECORD_ID ] with open ( filename , 'rb' ) as in_file : record . block_file = in_file self . _warc_recorder . set_length_and_maybe_checksums ( record ) self . _warc_recorder . write_record ( record ) | Add the snaphot to the WARC file . |
12,465 | def _scrape_document ( self ) : mock_response = self . _new_mock_response ( self . _response , self . _get_temp_path ( 'phantom' , '.html' ) ) self . _item_session . request = self . _request self . _item_session . response = mock_response self . _processing_rule . scrape_document ( item_session ) if mock_response . body : mock_response . body . close ( ) | Extract links from the DOM . |
12,466 | def _new_mock_response ( self , response , file_path ) : mock_response = copy . copy ( response ) mock_response . body = Body ( open ( file_path , 'rb' ) ) mock_response . fields = NameValueRecord ( ) for name , value in response . fields . get_all ( ) : mock_response . fields . add ( name , value ) mock_response . fields [ 'Content-Type' ] = 'text/html; charset="utf-8"' return mock_response | Return a new mock Response with the content . |
12,467 | def _build_ssl_context ( cls , session : AppSession ) -> ssl . SSLContext : args = session . args ssl_context = ssl . SSLContext ( args . secure_protocol ) if args . check_certificate : ssl_context . verify_mode = ssl . CERT_REQUIRED cls . _load_ca_certs ( session ) ssl_context . load_verify_locations ( session . ca_certs_filename ) else : ssl_context . verify_mode = ssl . CERT_NONE if args . strong_crypto : ssl_context . options |= ssl . OP_NO_SSLv2 ssl_context . options |= ssl . OP_NO_SSLv3 if hasattr ( ssl , 'OP_NO_COMPRESSION' ) : ssl_context . options |= ssl . OP_NO_COMPRESSION else : _logger . warning ( _ ( 'Unable to disable TLS compression.' ) ) if args . certificate : ssl_context . load_cert_chain ( args . certificate , args . private_key ) if args . edg_file : ssl . RAND_egd ( args . edg_file ) if args . random_file : with open ( args . random_file , 'rb' ) as in_file : ssl . RAND_add ( in_file . read ( 15360 ) , 0.0 ) return ssl_context | Create the SSL options . |
12,468 | def _load_ca_certs ( cls , session : AppSession , clean : bool = True ) : args = session . args if session . ca_certs_filename : return session . ca_certs_filename certs = set ( ) if args . use_internal_ca_certs : pem_filename = os . path . join ( os . path . dirname ( __file__ ) , '..' , '..' , 'cert' , 'ca-bundle.pem' ) certs . update ( cls . _read_pem_file ( pem_filename , from_package = True ) ) if args . ca_directory : if os . path . isdir ( args . ca_directory ) : for filename in os . listdir ( args . ca_directory ) : if os . path . isfile ( filename ) : certs . update ( cls . _read_pem_file ( filename ) ) else : _logger . warning ( __ ( _ ( 'Certificate directory {path} does not exist.' ) , path = args . ca_directory ) ) if args . ca_certificate : if os . path . isfile ( args . ca_certificate ) : certs . update ( cls . _read_pem_file ( args . ca_certificate ) ) else : _logger . warning ( __ ( _ ( 'Certificate file {path} does not exist.' ) , path = args . ca_certificate ) ) session . ca_certs_filename = certs_filename = tempfile . mkstemp ( suffix = '.pem' , prefix = 'tmp-wpull-' ) [ 1 ] def clean_certs_file ( ) : os . remove ( certs_filename ) if clean : atexit . register ( clean_certs_file ) with open ( certs_filename , 'w+b' ) as certs_file : for cert in certs : certs_file . write ( cert ) _logger . debug ( 'CA certs loaded.' ) | Load the Certificate Authority certificates . |
12,469 | def _read_pem_file ( cls , filename , from_package = False ) : _logger . debug ( 'Reading PEM {0}.' . format ( filename ) ) if from_package : return wpull . util . filter_pem ( wpull . util . get_package_data ( filename ) ) with open ( filename , 'rb' ) as in_file : return wpull . util . filter_pem ( in_file . read ( ) ) | Read the PEM file . |
12,470 | def start ( self , request : Request ) -> Response : if self . _session_state != SessionState . ready : raise RuntimeError ( 'Session already started' ) assert not self . _request self . _request = request _logger . debug ( __ ( 'Client fetch request {0}.' , request ) ) connection = yield from self . _acquire_request_connection ( request ) full_url = connection . proxied and not connection . tunneled self . _stream = stream = self . _stream_factory ( connection ) yield from self . _stream . reconnect ( ) request . address = connection . address self . event_dispatcher . notify ( self . Event . begin_request , request ) write_callback = functools . partial ( self . event_dispatcher . notify , self . Event . request_data ) stream . data_event_dispatcher . add_write_listener ( write_callback ) yield from stream . write_request ( request , full_url = full_url ) if request . body : assert 'Content-Length' in request . fields length = int ( request . fields [ 'Content-Length' ] ) yield from stream . write_body ( request . body , length = length ) stream . data_event_dispatcher . remove_write_listener ( write_callback ) self . event_dispatcher . notify ( self . Event . end_request , request ) read_callback = functools . partial ( self . event_dispatcher . notify , self . Event . response_data ) stream . data_event_dispatcher . add_read_listener ( read_callback ) self . _response = response = yield from stream . read_response ( ) response . request = request self . event_dispatcher . notify ( self . Event . begin_response , response ) self . _session_state = SessionState . request_sent return response | Begin a HTTP request |
12,471 | def to_bytes ( instance , encoding = 'utf-8' , error = 'strict' ) : if isinstance ( instance , bytes ) : return instance elif hasattr ( instance , 'encode' ) : return instance . encode ( encoding , error ) elif isinstance ( instance , list ) : return list ( [ to_bytes ( item , encoding , error ) for item in instance ] ) elif isinstance ( instance , tuple ) : return tuple ( [ to_bytes ( item , encoding , error ) for item in instance ] ) elif isinstance ( instance , dict ) : return dict ( [ ( to_bytes ( key , encoding , error ) , to_bytes ( value , encoding , error ) ) for key , value in instance . items ( ) ] ) else : return instance | Convert an instance recursively to bytes . |
12,472 | def to_str ( instance , encoding = 'utf-8' ) : if isinstance ( instance , str ) : return instance elif hasattr ( instance , 'decode' ) : return instance . decode ( encoding ) elif isinstance ( instance , list ) : return list ( [ to_str ( item , encoding ) for item in instance ] ) elif isinstance ( instance , tuple ) : return tuple ( [ to_str ( item , encoding ) for item in instance ] ) elif isinstance ( instance , dict ) : return dict ( [ ( to_str ( key , encoding ) , to_str ( value , encoding ) ) for key , value in instance . items ( ) ] ) else : return instance | Convert an instance recursively to string . |
12,473 | def detect_encoding ( data , encoding = None , fallback = 'latin1' , is_html = False ) : if encoding : encoding = normalize_codec_name ( encoding ) bs4_detector = EncodingDetector ( data , override_encodings = ( encoding , ) if encoding else ( ) , is_html = is_html ) candidates = itertools . chain ( bs4_detector . encodings , ( fallback , ) ) for candidate in candidates : if not candidate : continue candidate = normalize_codec_name ( candidate ) if not candidate : continue if candidate == 'ascii' and fallback != 'ascii' : continue if try_decoding ( data , candidate ) : return candidate raise ValueError ( 'Unable to detect encoding.' ) | Detect the character encoding of the data . |
12,474 | def try_decoding ( data , encoding ) : try : data . decode ( encoding , 'strict' ) except UnicodeError : if len ( data ) > 16 : for trim in ( 1 , 2 , 3 ) : trimmed_data = data [ : - trim ] if trimmed_data : try : trimmed_data . decode ( encoding , 'strict' ) except UnicodeError : continue else : return True return False else : return True | Return whether the Python codec could decode the data . |
12,475 | def format_size ( num , format_str = '{num:.1f} {unit}' ) : for unit in ( 'B' , 'KiB' , 'MiB' , 'GiB' ) : if - 1024 < num < 1024 : return format_str . format ( num = num , unit = unit ) num /= 1024.0 return format_str . format ( num = num , unit = 'TiB' ) | Format the file size into a human readable text . |
12,476 | def printable_str ( text , keep_newlines = False ) : if isinstance ( text , str ) : new_text = ascii ( text ) [ 1 : - 1 ] else : new_text = ascii ( text ) if keep_newlines : new_text = new_text . replace ( '\\r' , '\r' ) . replace ( '\\n' , '\n' ) return new_text | Escape any control or non - ASCII characters from string . |
12,477 | def _print_status ( self ) : self . _clear_line ( ) self . _print ( ' ' ) if self . max_value : self . _print_percent ( ) self . _print ( ' ' ) self . _print_bar ( ) else : self . _print_throbber ( ) self . _print ( ' ' ) if self . measurement == Measurement . bytes : self . _print_size_downloaded ( ) else : self . _print ( self . current_value ) self . _print ( ' ' ) self . _print_duration ( ) self . _print ( ' ' ) if self . measurement == Measurement . bytes : self . _print_speed ( ) self . _flush ( ) | Print an entire status line including bar and stats . |
12,478 | def _print_throbber ( self ) : self . _print ( '[' ) for position in range ( self . _bar_width ) : self . _print ( 'O' if position == self . _throbber_index else ' ' ) self . _print ( ']' ) self . _throbber_index = next ( self . _throbber_iter ) | Print an indefinite progress bar . |
12,479 | def _print_bar ( self ) : self . _print ( '[' ) for position in range ( self . _bar_width ) : position_fraction = position / ( self . _bar_width - 1 ) position_bytes = position_fraction * self . max_value if position_bytes < ( self . continue_value or 0 ) : self . _print ( '+' ) elif position_bytes <= ( self . continue_value or 0 ) + self . current_value : self . _print ( '=' ) else : self . _print ( ' ' ) self . _print ( ']' ) | Print a progress bar . |
12,480 | def _print_duration ( self ) : duration = int ( time . time ( ) - self . _start_time ) self . _print ( datetime . timedelta ( seconds = duration ) ) | Print the elapsed download time . |
12,481 | def _print_speed ( self ) : if self . _bandwidth_meter . num_samples : speed = self . _bandwidth_meter . speed ( ) if self . _human_format : file_size_str = wpull . string . format_size ( speed ) else : file_size_str = '{:.1f} b' . format ( speed * 8 ) speed_str = _ ( '{preformatted_file_size}/s' ) . format ( preformatted_file_size = file_size_str ) else : speed_str = _ ( '-- B/s' ) self . _print ( speed_str ) | Print the current speed . |
12,482 | def _print_percent ( self ) : fraction_done = ( ( self . continue_value or 0 + self . current_value ) / self . max_value ) self . _print ( '{fraction_done:.1%}' . format ( fraction_done = fraction_done ) ) | Print how much is done in percentage . |
12,483 | def register ( self , name : str ) : if name in self . _callbacks : raise ValueError ( 'Hook already registered' ) self . _callbacks [ name ] = None if self . _event_dispatcher is not None : self . _event_dispatcher . register ( name ) | Register hooks that can be connected . |
12,484 | def unregister ( self , name : str ) : del self . _callbacks [ name ] if self . _event_dispatcher is not None : self . _event_dispatcher . unregister ( name ) | Unregister hook . |
12,485 | def connect ( self , name , callback ) : if not self . _callbacks [ name ] : self . _callbacks [ name ] = callback else : raise HookAlreadyConnectedError ( 'Callback hook already connected.' ) | Add callback to hook . |
12,486 | def call ( self , name : str , * args , ** kwargs ) : if self . _event_dispatcher is not None : self . _event_dispatcher . notify ( name , * args , ** kwargs ) if self . _callbacks [ name ] : return self . _callbacks [ name ] ( * args , ** kwargs ) else : raise HookDisconnected ( 'No callback is connected.' ) | Invoke the callback . |
12,487 | def get_crawl_delay ( self , user_agent ) : if ( PY_MAJOR_VERSION < 3 ) and ( not isinstance ( user_agent , unicode ) ) : user_agent = user_agent . decode ( ) for ruleset in self . __rulesets : if ruleset . does_user_agent_match ( user_agent ) : return ruleset . crawl_delay return None | Returns a float representing the crawl delay specified for this user agent or None if the crawl delay was unspecified or not a float . |
12,488 | def parse ( self , s ) : self . _sitemaps = [ ] self . __rulesets = [ ] if ( PY_MAJOR_VERSION > 2 ) and ( isinstance ( s , bytes ) or isinstance ( s , bytearray ) ) or ( PY_MAJOR_VERSION == 2 ) and ( not isinstance ( s , unicode ) ) : s = s . decode ( "iso-8859-1" ) s = _end_of_line_regex . sub ( "\n" , s ) lines = s . split ( "\n" ) previous_line_was_a_user_agent = False current_ruleset = None for line in lines : line = line . strip ( ) if line and line [ 0 ] == '#' : pass else : i = line . find ( "#" ) if i != - 1 : line = line [ : i ] line = line . strip ( ) if not line : if current_ruleset and current_ruleset . is_not_empty ( ) : self . __rulesets . append ( current_ruleset ) current_ruleset = None previous_line_was_a_user_agent = False else : matches = _directive_regex . findall ( line ) if matches : field , data = matches [ 0 ] field = field . lower ( ) data = _scrub_data ( data ) if field in ( "useragent" , "user-agent" ) : if previous_line_was_a_user_agent : if current_ruleset and data : current_ruleset . add_robot_name ( data ) else : if current_ruleset and current_ruleset . is_not_empty ( ) : self . __rulesets . append ( current_ruleset ) current_ruleset = _Ruleset ( ) if data : current_ruleset . add_robot_name ( data ) previous_line_was_a_user_agent = True elif field == "allow" : previous_line_was_a_user_agent = False if current_ruleset : current_ruleset . add_allow_rule ( data ) elif field == "sitemap" : previous_line_was_a_user_agent = False self . _sitemaps . append ( data ) elif field == "crawl-delay" : previous_line_was_a_user_agent = False if current_ruleset : try : current_ruleset . crawl_delay = float ( data ) except ValueError : pass else : previous_line_was_a_user_agent = False if current_ruleset : current_ruleset . add_disallow_rule ( data ) if current_ruleset and current_ruleset . is_not_empty ( ) : self . __rulesets . append ( current_ruleset ) not_defaults = [ r for r in self . __rulesets if not r . is_default ( ) ] defaults = [ r for r in self . __rulesets if r . is_default ( ) ] self . __rulesets = not_defaults + defaults | Parses the passed string as a set of robots . txt rules . |
12,489 | def close_stream_on_error ( func ) : @ asyncio . coroutine @ functools . wraps ( func ) def wrapper ( self , * args , ** kwargs ) : with wpull . util . close_on_error ( self . close ) : return ( yield from func ( self , * args , ** kwargs ) ) return wrapper | Decorator to close stream on error . |
12,490 | def url_to_filename ( url , index = 'index.html' , alt_char = False ) : assert isinstance ( url , str ) , 'Expect str. Got {}.' . format ( type ( url ) ) url_split_result = urllib . parse . urlsplit ( url ) filename = url_split_result . path . split ( '/' ) [ - 1 ] if not filename : filename = index if url_split_result . query : if alt_char : query_delim = '@' else : query_delim = '?' filename = '{0}{1}{2}' . format ( filename , query_delim , url_split_result . query ) return filename | Return a filename from a URL . |
12,491 | def url_to_dir_parts ( url , include_protocol = False , include_hostname = False , alt_char = False ) : assert isinstance ( url , str ) , 'Expect str. Got {}.' . format ( type ( url ) ) url_split_result = urllib . parse . urlsplit ( url ) parts = [ ] if include_protocol : parts . append ( url_split_result . scheme ) if include_hostname : hostname = url_split_result . hostname if url_split_result . port : if alt_char : port_delim = '+' else : port_delim = ':' hostname = '{0}{1}{2}' . format ( hostname , port_delim , url_split_result . port ) parts . append ( hostname ) for path_part in url_split_result . path . split ( '/' ) : if path_part : parts . append ( path_part ) if not url . endswith ( '/' ) and parts : parts . pop ( ) return parts | Return a list of directory parts from a URL . |
12,492 | def safe_filename ( filename , os_type = 'unix' , no_control = True , ascii_only = True , case = None , encoding = 'utf8' , max_length = None ) : assert isinstance ( filename , str ) , 'Expect str. Got {}.' . format ( type ( filename ) ) if filename in ( '.' , os . curdir ) : new_filename = '%2E' elif filename in ( '.' , os . pardir ) : new_filename = '%2E%2E' else : unix = os_type == 'unix' windows = os_type == 'windows' encoder_args = ( unix , no_control , windows , ascii_only ) if encoder_args not in _encoder_cache : _encoder_cache [ encoder_args ] = PercentEncoder ( unix = unix , control = no_control , windows = windows , ascii_ = ascii_only ) encoder = _encoder_cache [ encoder_args ] encoded_filename = filename . encode ( encoding ) new_filename = encoder . quote ( encoded_filename ) . decode ( encoding ) if os_type == 'windows' : if new_filename [ - 1 ] in ' .' : new_filename = '{0}{1:02X}' . format ( new_filename [ : - 1 ] , new_filename [ - 1 ] ) if max_length and len ( new_filename ) > max_length : hash_obj = hashlib . sha1 ( new_filename . encode ( encoding ) ) new_length = max ( 0 , max_length - 8 ) new_filename = '{0}{1}' . format ( new_filename [ : new_length ] , hash_obj . hexdigest ( ) [ : 8 ] ) if case == 'lower' : new_filename = new_filename . lower ( ) elif case == 'upper' : new_filename = new_filename . upper ( ) return new_filename | Return a safe filename or path part . |
12,493 | def anti_clobber_dir_path ( dir_path , suffix = '.d' ) : dir_path = os . path . normpath ( dir_path ) parts = dir_path . split ( os . sep ) for index in range ( len ( parts ) ) : test_path = os . sep . join ( parts [ : index + 1 ] ) if os . path . isfile ( test_path ) : parts [ index ] += suffix return os . sep . join ( parts ) return dir_path | Return a directory path free of filenames . |
12,494 | def parse_content_disposition ( text ) : match = re . search ( r'filename\s*=\s*(.+)' , text , re . IGNORECASE ) if not match : return filename = match . group ( 1 ) if filename [ 0 ] in '"\'' : match = re . match ( r'(.)(.+)(?!\\)\1' , filename ) if match : filename = match . group ( 2 ) . replace ( '\\"' , '"' ) return filename else : filename = filename . partition ( ';' ) [ 0 ] . strip ( ) return filename | Parse a Content - Disposition header value . |
12,495 | def safe_filename ( self , part ) : return safe_filename ( part , os_type = self . _os_type , no_control = self . _no_control , ascii_only = self . _ascii_only , case = self . _case , max_length = self . _max_filename_length , ) | Return a safe filename or file part . |
12,496 | def _warn_unsafe_options ( cls , args ) : enabled_options = [ ] for option_name in cls . UNSAFE_OPTIONS : if getattr ( args , option_name ) : enabled_options . append ( option_name ) if enabled_options : _logger . warning ( __ ( _ ( 'The following unsafe options are enabled: {list}.' ) , list = enabled_options ) ) _logger . warning ( _ ( 'The use of unsafe options may lead to unexpected behavior ' 'or file corruption.' ) ) if not args . retr_symlinks : _logger . warning ( _ ( 'The --retr-symlinks=off option is a security risk.' ) ) | Print warnings about any enabled hazardous options . |
12,497 | def _warn_silly_options ( cls , args ) : if 'page-requisites' in args . span_hosts_allow and not args . page_requisites : _logger . warning ( _ ( 'Spanning hosts is allowed for page requisites, ' 'but the page requisites option is not on.' ) ) if 'linked-pages' in args . span_hosts_allow and not args . recursive : _logger . warning ( _ ( 'Spanning hosts is allowed for linked pages, ' 'but the recursive option is not on.' ) ) if args . warc_file and ( args . http_proxy or args . https_proxy ) : _logger . warning ( _ ( 'WARC specifications do not handle proxies.' ) ) if ( args . password or args . ftp_password or args . http_password or args . proxy_password ) and args . warc_file : _logger . warning ( _ ( 'Your password is recorded in the WARC file.' ) ) | Print warnings about any options that may be silly . |
12,498 | def parse_month ( text : str ) -> int : text = text . lower ( ) try : return MONTH_MAP [ text ] except KeyError : pass try : return MONTH_MAP [ text [ : 3 ] ] except KeyError : pass raise ValueError ( 'Month {} not found.' . format ( repr ( text ) ) ) | Parse month string into integer . |
12,499 | def y2k ( year : int ) -> int : assert 0 <= year <= 99 , 'Not a two digit year {}' . format ( year ) return year + 1000 if year >= 69 else year + 2000 | Convert two digit year to four digit year . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.