idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
7,400
def af_for_address ( text ) : try : junk = dns . ipv4 . inet_aton ( text ) return AF_INET except Exception : try : junk = dns . ipv6 . inet_aton ( text ) return AF_INET6 except Exception : raise ValueError
Determine the address family of a textual - form network address .
7,401
def is_multicast ( text ) : try : first = ord ( dns . ipv4 . inet_aton ( text ) [ 0 ] ) return ( first >= 224 and first <= 239 ) except Exception : try : first = ord ( dns . ipv6 . inet_aton ( text ) [ 0 ] ) return ( first == 255 ) except Exception : raise ValueError
Is the textual - form network address a multicast address?
7,402
def _stack_format ( stack ) : s = StringIO ( ) s . write ( 'Traceback:' ) s . write ( os . linesep ) for frame , fname , lineno , method , lines , dummy in reversed ( stack ) : s . write ( ' File %r, line %d, in %s' % ( fname , lineno , method ) ) s . write ( os . linesep ) s . write ( ' %s' % lines [ 0 ] . lstrip ( ) ) if PRINT_LOCALVARS : for key , value in frame . f_locals . items ( ) : s . write ( " %s = " % key ) try : s . write ( repr ( value ) ) s . write ( os . linesep ) except Exception : s . write ( "error in repr() call%s" % os . linesep ) return s . getvalue ( )
Format a stack trace to a message .
7,403
def debug ( logname , msg , * args , ** kwargs ) : log = logging . getLogger ( logname ) if log . isEnabledFor ( logging . DEBUG ) : _log ( log . debug , msg , args , ** kwargs )
Log a debug message .
7,404
def info ( logname , msg , * args , ** kwargs ) : log = logging . getLogger ( logname ) if log . isEnabledFor ( logging . INFO ) : _log ( log . info , msg , args , ** kwargs )
Log an informational message .
7,405
def warn ( logname , msg , * args , ** kwargs ) : log = logging . getLogger ( logname ) if log . isEnabledFor ( logging . WARN ) : _log ( log . warn , msg , args , ** kwargs )
Log a warning .
7,406
def error ( logname , msg , * args , ** kwargs ) : log = logging . getLogger ( logname ) if log . isEnabledFor ( logging . ERROR ) : _log ( log . error , msg , args , ** kwargs )
Log an error .
7,407
def critical ( logname , msg , * args , ** kwargs ) : log = logging . getLogger ( logname ) if log . isEnabledFor ( logging . CRITICAL ) : _log ( log . critical , msg , args , ** kwargs )
Log a critical error .
7,408
def get_files ( dirname ) : for entry in os . listdir ( dirname ) : fullentry = os . path . join ( dirname , entry ) if os . path . islink ( fullentry ) : continue if os . path . isfile ( fullentry ) : yield entry elif os . path . isdir ( fullentry ) : yield entry + "/"
Get iterator of entries in directory . Only allows regular files and directories no symlinks .
7,409
def get_nt_filename ( path ) : unc , rest = os . path . splitunc ( path ) head , tail = os . path . split ( rest ) if not tail : return path for fname in os . listdir ( unc + head ) : if fname . lower ( ) == tail . lower ( ) : return os . path . join ( get_nt_filename ( unc + head ) , fname ) log . error ( LOG_CHECK , "could not find %r in %r" , tail , head ) return path
Return case sensitive filename for NT path .
7,410
def get_os_filename ( path ) : if os . name == 'nt' : path = prepare_urlpath_for_nt ( path ) res = urllib . url2pathname ( fileutil . pathencode ( path ) ) if os . name == 'nt' and res . endswith ( ':' ) and len ( res ) == 2 : res += os . sep return res
Return filesystem path for given URL path .
7,411
def is_absolute_path ( path ) : if os . name == 'nt' : if re . search ( r"^[a-zA-Z]:" , path ) : return True path = path . replace ( "\\" , "/" ) return path . startswith ( "/" )
Check if given path is absolute . On Windows absolute paths start with a drive letter . On all other systems absolute paths start with a slash .
7,412
def init ( self , base_ref , base_url , parent_url , recursion_level , aggregate , line , column , page , name , url_encoding , extern ) : super ( FileUrl , self ) . init ( base_ref , base_url , parent_url , recursion_level , aggregate , line , column , page , name , url_encoding , extern ) self . scheme = u'file'
Initialize the scheme .
7,413
def add_size_info ( self ) : if self . is_directory ( ) : return filename = self . get_os_filename ( ) self . size = fileutil . get_size ( filename ) self . modified = datetime . utcfromtimestamp ( fileutil . get_mtime ( filename ) )
Get size of file content and modification time from filename path .
7,414
def check_connection ( self ) : if ( self . parent_url is not None and not self . parent_url . startswith ( u"file:" ) ) : msg = _ ( "local files are only checked without parent URL or when the parent URL is also a file" ) raise LinkCheckerError ( msg ) if self . is_directory ( ) : self . set_result ( _ ( "directory" ) ) else : url = fileutil . pathencode ( self . url ) self . url_connection = urlopen ( url ) self . check_case_sensitivity ( )
Try to open the local file . Under NT systems the case sensitivity is checked .
7,415
def check_case_sensitivity ( self ) : if os . name != 'nt' : return path = self . get_os_filename ( ) realpath = get_nt_filename ( path ) if path != realpath : self . add_warning ( _ ( "The URL path %(path)r is not the same as the " "system path %(realpath)r. You should always use " "the system path in URLs." ) % { "path" : path , "realpath" : realpath } , tag = WARN_FILE_SYSTEM_PATH )
Check if url and windows path name match cases else there might be problems when copying such files on web servers that are case sensitive .
7,416
def read_content ( self ) : if self . is_directory ( ) : data = get_index_html ( get_files ( self . get_os_filename ( ) ) ) if isinstance ( data , unicode ) : data = data . encode ( "iso8859-1" , "ignore" ) else : data = super ( FileUrl , self ) . read_content ( ) return data
Return file content or in case of directories a dummy HTML file with links to the files .
7,417
def is_directory ( self ) : filename = self . get_os_filename ( ) return os . path . isdir ( filename ) and not os . path . islink ( filename )
Check if file is a directory .
7,418
def set_content_type ( self ) : if self . url : self . content_type = mimeutil . guess_mimetype ( self . url , read = self . get_content ) else : self . content_type = u""
Return URL content type or an empty string if content type could not be found .
7,419
def add_url ( self , url , line = 0 , column = 0 , page = 0 , name = u"" , base = None ) : webroot = self . aggregate . config [ "localwebroot" ] if webroot and url and url . startswith ( u"/" ) : url = webroot + url [ 1 : ] log . debug ( LOG_CHECK , "Applied local webroot `%s' to `%s'." , webroot , url ) super ( FileUrl , self ) . add_url ( url , line = line , column = column , page = page , name = name , base = base )
If a local webroot directory is configured replace absolute URLs with it . After that queue the URL data for checking .
7,420
def new_request_session ( config , cookies ) : session = requests . Session ( ) if cookies : session . cookies = cookies session . max_redirects = config [ "maxhttpredirects" ] session . headers . update ( { "User-Agent" : config [ "useragent" ] , } ) if config [ "cookiefile" ] : for cookie in cookies . from_file ( config [ "cookiefile" ] ) : session . cookies = requests . cookies . merge_cookies ( session . cookies , cookie ) return session
Create a new request session .
7,421
def visit_loginurl ( self ) : url = self . config [ "loginurl" ] if not url : return user , password = self . config . get_user_password ( url ) session = requests . Session ( ) response = session . get ( url ) cgiuser = self . config [ "loginuserfield" ] cgipassword = self . config [ "loginpasswordfield" ] form = formsearch . search_form ( response . content , cgiuser , cgipassword , encoding = response . encoding ) form . data [ cgiuser ] = user form . data [ cgipassword ] = password for key , value in self . config [ "loginextrafields" ] . items ( ) : form . data [ key ] = value formurl = urlparse . urljoin ( url , form . url ) response = session . post ( formurl , data = form . data ) self . cookies = session . cookies if len ( self . cookies ) == 0 : raise LinkCheckerError ( "No cookies set by login URL %s" % url )
Check for a login URL and visit it .
7,422
def start_threads ( self ) : if self . config [ "status" ] : t = status . Status ( self , self . config [ "status_wait_seconds" ] ) t . start ( ) self . threads . append ( t ) if self . config [ "maxrunseconds" ] : t = interrupt . Interrupt ( self . config [ "maxrunseconds" ] ) t . start ( ) self . threads . append ( t ) num = self . config [ "threads" ] if num > 0 : for dummy in range ( num ) : t = checker . Checker ( self . urlqueue , self . logger , self . add_request_session ) self . threads . append ( t ) t . start ( ) else : self . request_sessions [ thread . get_ident ( ) ] = new_request_session ( self . config , self . cookies ) checker . check_urls ( self . urlqueue , self . logger )
Spawn threads for URL checking and status printing .
7,423
def add_request_session ( self ) : session = new_request_session ( self . config , self . cookies ) self . request_sessions [ thread . get_ident ( ) ] = session
Add a request session for current thread .
7,424
def wait_for_host ( self , host ) : t = time . time ( ) if host in self . times : due_time = self . times [ host ] if due_time > t : wait = due_time - t time . sleep ( wait ) t = time . time ( ) wait_time = random . uniform ( self . wait_time_min , self . wait_time_max ) self . times [ host ] = t + wait_time
Throttle requests to one host .
7,425
def print_active_threads ( self ) : debug = log . is_debug ( LOG_CHECK ) if debug : first = True for name in self . get_check_threads ( ) : if first : log . info ( LOG_CHECK , _ ( "These URLs are still active:" ) ) first = False log . info ( LOG_CHECK , name [ 12 : ] ) args = dict ( num = len ( [ x for x in self . threads if x . getName ( ) . startswith ( "CheckThread-" ) ] ) , timeout = strformat . strduration_long ( self . config [ "aborttimeout" ] ) , ) log . info ( LOG_CHECK , _ ( "%(num)d URLs are still active. After a timeout of %(timeout)s the active URLs will stop." ) % args )
Log all currently active threads .
7,426
def get_check_threads ( self ) : for t in self . threads : name = t . getName ( ) if name . startswith ( "CheckThread-" ) : yield name
Return iterator of checker threads .
7,427
def abort ( self ) : self . print_active_threads ( ) self . cancel ( ) timeout = self . config [ "aborttimeout" ] try : self . urlqueue . join ( timeout = timeout ) except urlqueue . Timeout : log . warn ( LOG_CHECK , "Abort timed out after %d seconds, stopping application." % timeout ) raise KeyboardInterrupt ( )
Print still - active URLs and empty the URL queue .
7,428
def remove_stopped_threads ( self ) : self . threads = [ t for t in self . threads if t . is_alive ( ) ]
Remove the stopped threads from the internal thread list .
7,429
def finish ( self ) : if not self . urlqueue . empty ( ) : self . cancel ( ) for t in self . threads : t . stop ( )
Wait for checker threads to finish .
7,430
def end_log_output ( self , ** kwargs ) : kwargs . update ( dict ( downloaded_bytes = self . downloaded_bytes , num_urls = len ( self . result_cache ) , ) ) self . logger . end_log_output ( ** kwargs )
Print ending output to log .
7,431
def x509_to_dict ( x509 ) : from requests . packages . urllib3 . contrib . pyopenssl import get_subj_alt_name res = { 'subject' : ( ( ( 'commonName' , x509 . get_subject ( ) . CN ) , ) , ) , 'subjectAltName' : [ ( 'DNS' , value ) for value in get_subj_alt_name ( x509 ) ] } notAfter = x509 . get_notAfter ( ) if notAfter is not None : parsedtime = asn1_generaltime_to_seconds ( notAfter ) if parsedtime is not None : res [ 'notAfter' ] = parsedtime . strftime ( '%b %d %H:%M:%S %Y' ) if parsedtime . tzinfo is None : res [ 'notAfter' ] += ' GMT' else : res [ 'notAfter' ] = notAfter return res
Parse a x509 pyopenssl object to a dictionary with keys subject subjectAltName and optional notAfter .
7,432
def asn1_generaltime_to_seconds ( timestr ) : res = None timeformat = "%Y%m%d%H%M%S" try : res = datetime . strptime ( timestr , timeformat + 'Z' ) except ValueError : try : res = datetime . strptime ( timestr , timeformat + '%z' ) except ValueError : pass return res
The given string has one of the following formats YYYYMMDDhhmmssZ YYYYMMDDhhmmss + hhmm YYYYMMDDhhmmss - hhmm
7,433
def has_header_value ( headers , name , value ) : name = name . lower ( ) value = value . lower ( ) for hname , hvalue in headers : if hname . lower ( ) == name and hvalue . lower ( ) == value : return True return False
Look in headers for a specific header name and value . Both name and value are case insensitive .
7,434
def http_persistent ( response ) : headers = response . getheaders ( ) if response . version == 11 : return not has_header_value ( headers , 'Connection' , 'Close' ) return has_header_value ( headers , "Connection" , "Keep-Alive" )
See if the HTTP connection can be kept open according the the header values found in the response object .
7,435
def http_keepalive ( headers ) : keepalive = headers . get ( "Keep-Alive" ) if keepalive is not None : try : keepalive = int ( keepalive [ 8 : ] . strip ( ) ) except ( ValueError , OverflowError ) : keepalive = DEFAULT_KEEPALIVE else : keepalive = DEFAULT_KEEPALIVE return keepalive
Get HTTP keepalive value either from the Keep - Alive header or a default value .
7,436
def print_plugins ( folders , exit_code = 0 ) : modules = plugins . get_plugin_modules ( folders ) pluginclasses = sorted ( plugins . get_plugin_classes ( modules ) , key = lambda x : x . __name__ ) for pluginclass in pluginclasses : print ( pluginclass . __name__ ) doc = strformat . wrap ( pluginclass . __doc__ , 80 ) print ( strformat . indent ( doc ) ) print ( ) sys . exit ( exit_code )
Print available plugins and exit .
7,437
def print_usage ( msg , exit_code = 2 ) : program = sys . argv [ 0 ] print ( _ ( "Error: %(msg)s" ) % { "msg" : msg } , file = console . stderr ) print ( _ ( "Execute '%(program)s -h' for help" ) % { "program" : program } , file = console . stderr ) sys . exit ( exit_code )
Print a program msg text to stderr and exit .
7,438
def aggregate_url ( aggregate , url , err_exit_code = 2 ) : get_url_from = checker . get_url_from url = checker . guess_url ( url ) url_data = get_url_from ( url , 0 , aggregate , extern = ( 0 , 0 ) ) aggregate . urlqueue . put ( url_data )
Append given commandline URL to input queue .
7,439
def print_help ( self , file = sys . stdout ) : msg = console . encode ( self . format_help ( ) ) if fileutil . is_tty ( file ) : strformat . paginate ( msg ) else : print ( msg , file = file )
Print a help message to stdout .
7,440
def from_wire ( wire , keyring = None , request_mac = '' , xfr = False , origin = None , tsig_ctx = None , multi = False , first = True , question_only = False , one_rr_per_rrset = False ) : m = Message ( id = 0 ) m . keyring = keyring m . request_mac = request_mac m . xfr = xfr m . origin = origin m . tsig_ctx = tsig_ctx m . multi = multi m . first = first reader = _WireReader ( wire , m , question_only , one_rr_per_rrset ) reader . read ( ) return m
Convert a DNS wire format message into a message object .
7,441
def from_text ( text ) : m = Message ( ) reader = _TextReader ( text , m ) reader . read ( ) return m
Convert the text format message into a message object .
7,442
def from_file ( f ) : if sys . hexversion >= 0x02030000 : str_type = basestring opts = 'rU' else : str_type = str opts = 'r' if isinstance ( f , str_type ) : f = file ( f , opts ) want_close = True else : want_close = False try : m = from_text ( f ) finally : if want_close : f . close ( ) return m
Read the next text format message from the specified file .
7,443
def make_query ( qname , rdtype , rdclass = dns . rdataclass . IN , use_edns = None , want_dnssec = False ) : if isinstance ( qname , ( str , unicode ) ) : qname = dns . name . from_text ( qname ) if isinstance ( rdtype , ( str , unicode ) ) : rdtype = dns . rdatatype . from_text ( rdtype ) if isinstance ( rdclass , ( str , unicode ) ) : rdclass = dns . rdataclass . from_text ( rdclass ) m = Message ( ) m . flags |= dns . flags . RD m . find_rrset ( m . question , qname , rdclass , rdtype , create = True , force_unique = True ) m . use_edns ( use_edns ) m . want_dnssec ( want_dnssec ) return m
Make a query message .
7,444
def make_response ( query , recursion_available = False , our_payload = 8192 ) : if query . flags & dns . flags . QR : raise dns . exception . FormError ( 'specified query message is not a query' ) response = dns . message . Message ( query . id ) response . flags = dns . flags . QR | ( query . flags & dns . flags . RD ) if recursion_available : response . flags |= dns . flags . RA response . set_opcode ( query . opcode ( ) ) response . question = list ( query . question ) if query . edns >= 0 : response . use_edns ( 0 , 0 , our_payload , query . payload ) if not query . keyname is None : response . keyname = query . keyname response . keyring = query . keyring response . request_mac = query . mac return response
Make a message which is a response for the specified query . The message returned is really a response skeleton ; it has all of the infrastructure required of a response but none of the content .
7,445
def to_text ( self , origin = None , relativize = True , ** kw ) : s = cStringIO . StringIO ( ) print >> s , 'id %d' % self . id print >> s , 'opcode %s' % dns . opcode . to_text ( dns . opcode . from_flags ( self . flags ) ) rc = dns . rcode . from_flags ( self . flags , self . ednsflags ) print >> s , 'rcode %s' % dns . rcode . to_text ( rc ) print >> s , 'flags %s' % dns . flags . to_text ( self . flags ) if self . edns >= 0 : print >> s , 'edns %s' % self . edns if self . ednsflags != 0 : print >> s , 'eflags %s' % dns . flags . edns_to_text ( self . ednsflags ) print >> s , 'payload' , self . payload is_update = dns . opcode . is_update ( self . flags ) if is_update : print >> s , ';ZONE' else : print >> s , ';QUESTION' for rrset in self . question : print >> s , rrset . to_text ( origin , relativize , ** kw ) if is_update : print >> s , ';PREREQ' else : print >> s , ';ANSWER' for rrset in self . answer : print >> s , rrset . to_text ( origin , relativize , ** kw ) if is_update : print >> s , ';UPDATE' else : print >> s , ';AUTHORITY' for rrset in self . authority : print >> s , rrset . to_text ( origin , relativize , ** kw ) print >> s , ';ADDITIONAL' for rrset in self . additional : print >> s , rrset . to_text ( origin , relativize , ** kw ) return s . getvalue ( ) [ : - 1 ]
Convert the message to text .
7,446
def is_response ( self , other ) : if other . flags & dns . flags . QR == 0 or self . id != other . id or dns . opcode . from_flags ( self . flags ) != dns . opcode . from_flags ( other . flags ) : return False if dns . rcode . from_flags ( other . flags , other . ednsflags ) != dns . rcode . NOERROR : return True if dns . opcode . is_update ( self . flags ) : return True for n in self . question : if n not in other . question : return False for n in other . question : if n not in self . question : return False return True
Is other a response to self?
7,447
def find_rrset ( self , section , name , rdclass , rdtype , covers = dns . rdatatype . NONE , deleting = None , create = False , force_unique = False ) : key = ( self . section_number ( section ) , name , rdclass , rdtype , covers , deleting ) if not force_unique : if not self . index is None : rrset = self . index . get ( key ) if not rrset is None : return rrset else : for rrset in section : if rrset . match ( name , rdclass , rdtype , covers , deleting ) : return rrset if not create : raise KeyError rrset = dns . rrset . RRset ( name , rdclass , rdtype , covers , deleting ) section . append ( rrset ) if not self . index is None : self . index [ key ] = rrset return rrset
Find the RRset with the given attributes in the specified section .
7,448
def get_rrset ( self , section , name , rdclass , rdtype , covers = dns . rdatatype . NONE , deleting = None , create = False , force_unique = False ) : try : rrset = self . find_rrset ( section , name , rdclass , rdtype , covers , deleting , create , force_unique ) except KeyError : rrset = None return rrset
Get the RRset with the given attributes in the specified section .
7,449
def to_wire ( self , origin = None , max_size = 0 , ** kw ) : if max_size == 0 : if self . request_payload != 0 : max_size = self . request_payload else : max_size = 65535 if max_size < 512 : max_size = 512 elif max_size > 65535 : max_size = 65535 r = dns . renderer . Renderer ( self . id , self . flags , max_size , origin ) for rrset in self . question : r . add_question ( rrset . name , rrset . rdtype , rrset . rdclass ) for rrset in self . answer : r . add_rrset ( dns . renderer . ANSWER , rrset , ** kw ) for rrset in self . authority : r . add_rrset ( dns . renderer . AUTHORITY , rrset , ** kw ) if self . edns >= 0 : r . add_edns ( self . edns , self . ednsflags , self . payload , self . options ) for rrset in self . additional : r . add_rrset ( dns . renderer . ADDITIONAL , rrset , ** kw ) r . write_header ( ) if not self . keyname is None : r . add_tsig ( self . keyname , self . keyring [ self . keyname ] , self . fudge , self . original_id , self . tsig_error , self . other_data , self . request_mac , self . keyalgorithm ) self . mac = r . mac return r . get_wire ( )
Return a string containing the message in DNS compressed wire format .
7,450
def use_tsig ( self , keyring , keyname = None , fudge = 300 , original_id = None , tsig_error = 0 , other_data = '' , algorithm = dns . tsig . default_algorithm ) : self . keyring = keyring if keyname is None : self . keyname = self . keyring . keys ( ) [ 0 ] else : if isinstance ( keyname , ( str , unicode ) ) : keyname = dns . name . from_text ( keyname ) self . keyname = keyname self . keyalgorithm = algorithm self . fudge = fudge if original_id is None : self . original_id = self . id else : self . original_id = original_id self . tsig_error = tsig_error self . other_data = other_data
When sending a TSIG signature using the specified keyring and keyname should be added .
7,451
def use_edns ( self , edns = 0 , ednsflags = 0 , payload = 1280 , request_payload = None , options = None ) : if edns is None or edns is False : edns = - 1 if edns is True : edns = 0 if request_payload is None : request_payload = payload if edns < 0 : ednsflags = 0 payload = 0 request_payload = 0 options = [ ] else : ednsflags &= 0xFF00FFFFL ednsflags |= ( edns << 16 ) if options is None : options = [ ] self . edns = edns self . ednsflags = ednsflags self . payload = payload self . options = options self . request_payload = request_payload
Configure EDNS behavior .
7,452
def want_dnssec ( self , wanted = True ) : if wanted : if self . edns < 0 : self . use_edns ( ) self . ednsflags |= dns . flags . DO elif self . edns >= 0 : self . ednsflags &= ~ dns . flags . DO
Enable or disable DNSSEC desired flag in requests .
7,453
def set_rcode ( self , rcode ) : ( value , evalue ) = dns . rcode . to_flags ( rcode ) self . flags &= 0xFFF0 self . flags |= value self . ednsflags &= 0x00FFFFFFL self . ednsflags |= evalue if self . ednsflags != 0 and self . edns < 0 : self . edns = 0
Set the rcode .
7,454
def set_opcode ( self , opcode ) : self . flags &= 0x87FF self . flags |= dns . opcode . to_flags ( opcode )
Set the opcode .
7,455
def read ( self ) : l = len ( self . wire ) if l < 12 : raise ShortHeader ( self . message . id , self . message . flags , qcount , ancount , aucount , adcount ) = struct . unpack ( '!HHHHHH' , self . wire [ : 12 ] ) self . current = 12 if dns . opcode . is_update ( self . message . flags ) : self . updating = True self . _get_question ( qcount ) if self . question_only : return self . _get_section ( self . message . answer , ancount ) self . _get_section ( self . message . authority , aucount ) self . _get_section ( self . message . additional , adcount ) if self . current != l : raise TrailingJunk if self . message . multi and self . message . tsig_ctx and not self . message . had_tsig : self . message . tsig_ctx . update ( self . wire )
Read a wire format DNS message and build a dns . message . Message object .
7,456
def _header_line ( self , section ) : token = self . tok . get ( ) what = token . value if what == 'id' : self . message . id = self . tok . get_int ( ) elif what == 'flags' : while True : token = self . tok . get ( ) if not token . is_identifier ( ) : self . tok . unget ( token ) break self . message . flags = self . message . flags | dns . flags . from_text ( token . value ) if dns . opcode . is_update ( self . message . flags ) : self . updating = True elif what == 'edns' : self . message . edns = self . tok . get_int ( ) self . message . ednsflags = self . message . ednsflags | ( self . message . edns << 16 ) elif what == 'eflags' : if self . message . edns < 0 : self . message . edns = 0 while True : token = self . tok . get ( ) if not token . is_identifier ( ) : self . tok . unget ( token ) break self . message . ednsflags = self . message . ednsflags | dns . flags . edns_from_text ( token . value ) elif what == 'payload' : self . message . payload = self . tok . get_int ( ) if self . message . edns < 0 : self . message . edns = 0 elif what == 'opcode' : text = self . tok . get_string ( ) self . message . flags = self . message . flags | dns . opcode . to_flags ( dns . opcode . from_text ( text ) ) elif what == 'rcode' : text = self . tok . get_string ( ) self . message . set_rcode ( dns . rcode . from_text ( text ) ) else : raise UnknownHeaderField self . tok . get_eol ( )
Process one line from the text format header section .
7,457
def _question_line ( self , section ) : token = self . tok . get ( want_leading = True ) if not token . is_whitespace ( ) : self . last_name = dns . name . from_text ( token . value , None ) name = self . last_name token = self . tok . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError try : rdclass = dns . rdataclass . from_text ( token . value ) token = self . tok . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError except dns . exception . SyntaxError : raise dns . exception . SyntaxError except Exception : rdclass = dns . rdataclass . IN rdtype = dns . rdatatype . from_text ( token . value ) self . message . find_rrset ( self . message . question , name , rdclass , rdtype , create = True , force_unique = True ) if self . updating : self . zone_rdclass = rdclass self . tok . get_eol ( )
Process one line from the text format question section .
7,458
def _rr_line ( self , section ) : deleting = None token = self . tok . get ( want_leading = True ) if not token . is_whitespace ( ) : self . last_name = dns . name . from_text ( token . value , None ) name = self . last_name token = self . tok . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError try : ttl = int ( token . value , 0 ) token = self . tok . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError except dns . exception . SyntaxError : raise dns . exception . SyntaxError except Exception : ttl = 0 try : rdclass = dns . rdataclass . from_text ( token . value ) token = self . tok . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError if rdclass == dns . rdataclass . ANY or rdclass == dns . rdataclass . NONE : deleting = rdclass rdclass = self . zone_rdclass except dns . exception . SyntaxError : raise dns . exception . SyntaxError except Exception : rdclass = dns . rdataclass . IN rdtype = dns . rdatatype . from_text ( token . value ) token = self . tok . get ( ) if not token . is_eol_or_eof ( ) : self . tok . unget ( token ) rd = dns . rdata . from_text ( rdclass , rdtype , self . tok , None ) covers = rd . covers ( ) else : rd = None covers = dns . rdatatype . NONE rrset = self . message . find_rrset ( section , name , rdclass , rdtype , covers , deleting , True , self . updating ) if not rd is None : rrset . add ( rd , ttl )
Process one line from the text format answer authority or additional data sections .
7,459
def read ( self ) : line_method = self . _header_line section = None while 1 : token = self . tok . get ( True , True ) if token . is_eol_or_eof ( ) : break if token . is_comment ( ) : u = token . value . upper ( ) if u == 'HEADER' : line_method = self . _header_line elif u == 'QUESTION' or u == 'ZONE' : line_method = self . _question_line section = self . message . question elif u == 'ANSWER' or u == 'PREREQ' : line_method = self . _rr_line section = self . message . answer elif u == 'AUTHORITY' or u == 'UPDATE' : line_method = self . _rr_line section = self . message . authority elif u == 'ADDITIONAL' : line_method = self . _rr_line section = self . message . additional self . tok . get_eol ( ) continue self . tok . unget ( token ) line_method ( section )
Read a text format DNS message and build a dns . message . Message object .
7,460
def from_text ( text ) : if text . isdigit ( ) : value = int ( text ) if value >= 0 and value <= 15 : return value value = _by_text . get ( text . upper ( ) ) if value is None : raise UnknownOpcode return value
Convert text into an opcode .
7,461
def chmod ( config ) : output_dir = config [ "output_dir" ] for dirpath , dirnames , filenames in os . walk ( output_dir ) : for dirname in dirnames : os . chmod ( os . path . join ( dirpath , dirname ) , 0755 ) for filename in filenames : os . chmod ( os . path . join ( dirpath , filename ) , 0644 )
Set correct file permissions .
7,462
def trace_filter ( patterns ) : if patterns is None : _trace_filter . clear ( ) else : _trace_filter . update ( re . compile ( pat ) for pat in patterns )
Add given patterns to trace filter set or clear set if patterns is None .
7,463
def _trace ( frame , event , arg ) : if event in ( 'call' , 'c_call' ) : _trace_line ( frame , event , arg ) elif event in ( 'return' , 'c_return' ) : _trace_line ( frame , event , arg ) print ( " return:" , arg ) return _trace
Trace function calls .
7,464
def _trace_full ( frame , event , arg ) : if event == "line" : _trace_line ( frame , event , arg ) else : _trace ( frame , event , arg ) return _trace_full
Trace every executed line .
7,465
def _trace_line ( frame , event , arg ) : name = frame . f_globals [ "__name__" ] if name in _trace_ignore : return _trace_line for pat in _trace_filter : if not pat . match ( name ) : return _trace_line lineno = frame . f_lineno filename = frame . f_globals [ "__file__" ] if filename . endswith ( ( ".pyc" , ".pyo" ) ) : filename = filename [ : - 1 ] line = linecache . getline ( filename , lineno ) currentThread = threading . currentThread ( ) tid = currentThread . ident tname = currentThread . getName ( ) args = ( tid , tname , time . time ( ) , line . rstrip ( ) , name , lineno ) print ( "THREAD(%d) %r %.2f %s # %s:%d" % args )
Print current executed line .
7,466
def start_output ( self ) : super ( DOTLogger , self ) . start_output ( ) if self . has_part ( "intro" ) : self . write_intro ( ) self . writeln ( ) self . writeln ( u"digraph G {" ) self . writeln ( u" graph [" ) self . writeln ( u" charset=\"%s\"," % self . get_charset_encoding ( ) ) self . writeln ( u" ];" ) self . flush ( )
Write start of checking info as DOT comment .
7,467
def comment ( self , s , ** args ) : self . write ( u"// " ) self . writeln ( s = s , ** args )
Write DOT comment .
7,468
def write_edge ( self , node ) : source = dotquote ( self . nodes [ node [ "parent_url" ] ] [ "label" ] ) target = dotquote ( node [ "label" ] ) self . writeln ( u' "%s" -> "%s" [' % ( source , target ) ) self . writeln ( u' label="%s",' % dotquote ( node [ "edge" ] ) ) if self . has_part ( "result" ) : self . writeln ( u" valid=%d," % node [ "valid" ] ) self . writeln ( u" ];" )
Write edge from parent to node .
7,469
def log_url ( self , url_data ) : self . xml_starttag ( u'urldata' ) if self . has_part ( 'url' ) : self . xml_tag ( u"url" , unicode ( url_data . base_url ) ) if url_data . name and self . has_part ( 'name' ) : self . xml_tag ( u"name" , unicode ( url_data . name ) ) if url_data . parent_url and self . has_part ( 'parenturl' ) : attrs = { u'line' : u"%d" % url_data . line , u'column' : u"%d" % url_data . column , } self . xml_tag ( u"parent" , unicode ( url_data . parent_url ) , attrs = attrs ) if url_data . base_ref and self . has_part ( 'base' ) : self . xml_tag ( u"baseref" , unicode ( url_data . base_ref ) ) if self . has_part ( "realurl" ) : self . xml_tag ( u"realurl" , unicode ( url_data . url ) ) if self . has_part ( "extern" ) : self . xml_tag ( u"extern" , u"%d" % ( 1 if url_data . extern else 0 ) ) if url_data . dltime >= 0 and self . has_part ( "dltime" ) : self . xml_tag ( u"dltime" , u"%f" % url_data . dltime ) if url_data . size >= 0 and self . has_part ( "dlsize" ) : self . xml_tag ( u"dlsize" , u"%d" % url_data . size ) if url_data . checktime and self . has_part ( "checktime" ) : self . xml_tag ( u"checktime" , u"%f" % url_data . checktime ) if self . has_part ( "level" ) : self . xml_tag ( u"level" , u"%d" % url_data . level ) if url_data . info and self . has_part ( 'info' ) : self . xml_starttag ( u"infos" ) for info in url_data . info : self . xml_tag ( u"info" , info ) self . xml_endtag ( u"infos" ) if url_data . modified and self . has_part ( 'modified' ) : self . xml_tag ( u"modified" , self . format_modified ( url_data . modified ) ) if url_data . warnings and self . has_part ( 'warning' ) : self . xml_starttag ( u"warnings" ) for tag , data in url_data . warnings : attrs = { } if tag : attrs [ "tag" ] = tag self . xml_tag ( u"warning" , data , attrs ) self . xml_endtag ( u"warnings" ) if self . has_part ( "result" ) : attrs = { } if url_data . result : attrs [ "result" ] = url_data . result self . xml_tag ( u"valid" , u"%d" % ( 1 if url_data . valid else 0 ) , attrs ) self . xml_endtag ( u'urldata' ) self . flush ( )
Log URL data in custom XML format .
7,470
def check_w3_errors ( url_data , xml , w3type ) : dom = parseString ( xml ) for error in dom . getElementsByTagName ( 'm:error' ) : warnmsg = _ ( "%(w3type)s validation error at line %(line)s col %(column)s: %(msg)s" ) attrs = { "w3type" : w3type , "line" : getXmlText ( error , "m:line" ) , "column" : getXmlText ( error , "m:col" ) , "msg" : getXmlText ( error , "m:message" ) , } url_data . add_warning ( warnmsg % attrs )
Add warnings for W3C HTML or CSS errors in xml format . w3type is either W3C HTML or W3C CSS .
7,471
def getXmlText ( parent , tag ) : elem = parent . getElementsByTagName ( tag ) [ 0 ] rc = [ ] for node in elem . childNodes : if node . nodeType == node . TEXT_NODE : rc . append ( node . data ) return '' . join ( rc )
Return XML content of given tag in parent element .
7,472
def check_w3_time ( self ) : if time . time ( ) - self . last_w3_call < W3Timer . SleepSeconds : time . sleep ( W3Timer . SleepSeconds ) self . last_w3_call = time . time ( )
Make sure the W3C validators are at most called once a second .
7,473
def check ( self , url_data ) : self . timer . check_w3_time ( ) session = url_data . session try : body = { 'uri' : url_data . url , 'output' : 'soap12' } response = session . post ( 'http://validator.w3.org/check' , data = body ) response . raise_for_status ( ) if response . headers . get ( 'x-w3c-validator-status' , 'Invalid' ) == 'Valid' : url_data . add_info ( u"W3C Validator: %s" % _ ( "valid HTML syntax" ) ) return check_w3_errors ( url_data , response . text , "W3C HTML" ) except requests . exceptions . RequestException : pass except Exception as msg : log . warn ( LOG_PLUGIN , _ ( "HTML syntax check plugin error: %(msg)s " ) % { "msg" : msg } )
Check HTML syntax of given URL .
7,474
def log_filter_url ( self , url_data , do_print ) : self . stats . log_url ( url_data , do_print ) if self . disabled : return if self . prefix is None : if not url_data . url . startswith ( HTTP_SCHEMES ) : log . warn ( LOG_CHECK , "Sitemap URL %r does not start with http: or https:." , url_data . url ) self . disabled = True return self . prefix = url_data . url priority = 1.0 elif url_data . url == self . prefix : return else : priority = 0.5 if self . priority is not None : priority = self . priority if ( url_data . valid and url_data . url . startswith ( HTTP_SCHEMES ) and url_data . url . startswith ( self . prefix ) and url_data . content_type in HTML_TYPES ) : self . log_url ( url_data , priority = priority )
Update accounting data and determine if URL should be included in the sitemap .
7,475
def log_url ( self , url_data , priority = None ) : self . xml_starttag ( u'url' ) self . xml_tag ( u'loc' , url_data . url ) if url_data . modified : self . xml_tag ( u'lastmod' , self . format_modified ( url_data . modified , sep = "T" ) ) self . xml_tag ( u'changefreq' , self . frequency ) self . xml_tag ( u'priority' , "%.2f" % priority ) self . xml_endtag ( u'url' ) self . flush ( )
Log URL data in sitemap format .
7,476
def reset ( self ) : super ( HttpUrl , self ) . reset ( ) self . headers = { } self . auth = None self . ssl_cipher = None self . ssl_cert = None
Initialize HTTP specific variables .
7,477
def content_allows_robots ( self ) : if not self . is_html ( ) : return True handler = linkparse . MetaRobotsFinder ( ) parser = htmlsax . parser ( handler ) handler . parser = parser if self . charset : parser . encoding = self . charset try : parser . feed ( self . get_content ( ) ) parser . flush ( ) except linkparse . StopParse as msg : log . debug ( LOG_CHECK , "Stopped parsing: %s" , msg ) pass handler . parser = None parser . handler = None return handler . follow
Return False if the content of this URL forbids robots to search for recursive links .
7,478
def add_size_info ( self ) : if self . headers and "Content-Length" in self . headers and "Transfer-Encoding" not in self . headers : try : self . size = int ( self . getheader ( "Content-Length" ) ) except ( ValueError , OverflowError ) : pass else : self . size = - 1
Get size of URL content from HTTP header .
7,479
def build_request ( self ) : clientheaders = { } if ( self . parent_url and self . parent_url . lower ( ) . startswith ( HTTP_SCHEMAS ) ) : clientheaders [ "Referer" ] = self . parent_url kwargs = dict ( method = 'GET' , url = self . url , headers = clientheaders , ) if self . auth : kwargs [ 'auth' ] = self . auth log . debug ( LOG_CHECK , "Prepare request with %s" , kwargs ) request = requests . Request ( ** kwargs ) return self . session . prepare_request ( request )
Build a prepared request object .
7,480
def send_request ( self , request ) : self . aggregate . wait_for_host ( self . urlparts [ 1 ] ) kwargs = self . get_request_kwargs ( ) kwargs [ "allow_redirects" ] = False self . _send_request ( request , ** kwargs )
Send request and store response in self . url_connection .
7,481
def _send_request ( self , request , ** kwargs ) : log . debug ( LOG_CHECK , "Send request %s with %s" , request , kwargs ) log . debug ( LOG_CHECK , "Request headers %s" , request . headers ) self . url_connection = self . session . send ( request , ** kwargs ) self . headers = self . url_connection . headers self . _add_ssl_info ( )
Send GET request .
7,482
def _get_ssl_sock ( self ) : assert self . scheme == u"https" , self raw_connection = self . url_connection . raw . _connection if raw_connection . sock is None : raw_connection . connect ( ) return raw_connection . sock
Get raw SSL socket .
7,483
def _add_ssl_info ( self ) : if self . scheme == u'https' : sock = self . _get_ssl_sock ( ) if hasattr ( sock , 'cipher' ) : self . ssl_cert = sock . getpeercert ( ) else : cert = sock . connection . get_peer_certificate ( ) self . ssl_cert = httputil . x509_to_dict ( cert ) log . debug ( LOG_CHECK , "Got SSL certificate %s" , self . ssl_cert ) else : self . ssl_cert = None
Add SSL cipher info .
7,484
def get_redirects ( self , request ) : kwargs = self . get_request_kwargs ( ) return self . session . resolve_redirects ( self . url_connection , request , ** kwargs )
Return iterator of redirects for given request .
7,485
def follow_redirections ( self , request ) : log . debug ( LOG_CHECK , "follow all redirections" ) if self . is_redirect ( ) : self . aggregate . plugin_manager . run_connection_plugins ( self ) response = None for response in self . get_redirects ( request ) : newurl = response . url log . debug ( LOG_CHECK , "Redirected to %r" , newurl ) self . aliases . append ( newurl ) self . add_info ( _ ( "Redirected to `%(url)s'." ) % { 'url' : newurl } ) self . extern = None self . set_extern ( newurl ) self . urlparts = strformat . url_unicode_split ( newurl ) self . build_url_parts ( ) self . url_connection = response self . headers = response . headers self . url = urlutil . urlunsplit ( self . urlparts ) self . scheme = self . urlparts [ 0 ] . lower ( ) self . _add_ssl_info ( ) self . _add_response_info ( ) if self . is_redirect ( ) : self . aggregate . plugin_manager . run_connection_plugins ( self )
Follow all redirections of http response .
7,486
def getheader ( self , name , default = None ) : value = self . headers . get ( name ) if value is None : return default return unicode_safe ( value , encoding = HEADER_ENCODING )
Get decoded header value .
7,487
def check_response ( self ) : if self . url_connection . status_code >= 400 : self . set_result ( u"%d %s" % ( self . url_connection . status_code , self . url_connection . reason ) , valid = False ) else : if self . url_connection . status_code == 204 : self . add_warning ( self . url_connection . reason , tag = WARN_HTTP_EMPTY_CONTENT ) if self . url_connection . status_code >= 200 : self . set_result ( u"%r %s" % ( self . url_connection . status_code , self . url_connection . reason ) ) else : self . set_result ( _ ( "OK" ) )
Check final result and log it .
7,488
def read_content ( self ) : maxbytes = self . aggregate . config [ "maxfilesizedownload" ] buf = StringIO ( ) for data in self . url_connection . iter_content ( chunk_size = self . ReadChunkBytes ) : if buf . tell ( ) + len ( data ) > maxbytes : raise LinkCheckerError ( _ ( "File size too large" ) ) buf . write ( data ) return buf . getvalue ( )
Return data and data size for this URL . Can be overridden in subclasses .
7,489
def get_file ( type_str , open_files , basedir ) : if type_str not in open_files : filename = type_str + ".html" encoding = 'utf-8' fd = codecs . open ( os . path . join ( basedir , filename ) , 'w' , encoding ) open_files [ type_str ] = fd write_html_header ( fd , type_str , encoding ) return open_files [ type_str ]
Get already opened file or open and initialize a new one .
7,490
def _get_loggers ( ) : from . . import loader modules = loader . get_package_modules ( 'logger' ) return list ( loader . get_plugins ( modules , [ _Logger ] ) )
Return list of Logger classes .
7,491
def reset ( self ) : self . number = 0 self . errors = 0 self . errors_printed = 0 self . warnings = 0 self . warnings_printed = 0 self . internal_errors = 0 self . link_types = ContentTypes . copy ( ) self . max_url_length = 0 self . min_url_length = 0 self . avg_url_length = 0.0 self . avg_number = 0 self . downloaded_bytes = None
Reset all log statistics to default values .
7,492
def log_url ( self , url_data , do_print ) : self . number += 1 if not url_data . valid : self . errors += 1 if do_print : self . errors_printed += 1 num_warnings = len ( url_data . warnings ) self . warnings += num_warnings if do_print : self . warnings_printed += num_warnings if url_data . content_type : key = url_data . content_type . split ( '/' , 1 ) [ 0 ] . lower ( ) if key not in self . link_types : key = "other" elif url_data . url . startswith ( u"mailto:" ) : key = "mail" else : key = "other" self . link_types [ key ] += 1 if url_data . url : l = len ( url_data . url ) self . max_url_length = max ( l , self . max_url_length ) if self . min_url_length == 0 : self . min_url_length = l else : self . min_url_length = min ( l , self . min_url_length ) self . avg_number += 1 self . avg_url_length += ( l - self . avg_url_length ) / self . avg_number
Log URL statistics .
7,493
def get_args ( self , kwargs ) : args = dict ( self . LoggerArgs ) args . update ( kwargs ) return args
Construct log configuration from default and user args .
7,494
def encode ( self , s ) : assert isinstance ( s , unicode ) return s . encode ( self . output_encoding , self . codec_errors )
Encode string with output encoding .
7,495
def start_fileoutput ( self ) : path = os . path . dirname ( self . filename ) try : if path and not os . path . isdir ( path ) : os . makedirs ( path ) self . fd = self . create_fd ( ) self . close_fd = True except IOError : msg = sys . exc_info ( ) [ 1 ] log . warn ( LOG_CHECK , "Could not open file %r for writing: %s\n" "Disabling log output of %s" , self . filename , msg , self ) self . fd = dummy . Dummy ( ) self . is_active = False self . filename = None
Start output to configured file .
7,496
def create_fd ( self ) : if self . filename is None : return i18n . get_encoded_writer ( encoding = self . output_encoding , errors = self . codec_errors ) return codecs . open ( self . filename , "wb" , self . output_encoding , self . codec_errors )
Create open file descriptor .
7,497
def close_fileoutput ( self ) : if self . fd is not None : try : self . flush ( ) except IOError : pass if self . close_fd : try : self . fd . close ( ) except IOError : pass self . fd = None
Flush and close the file output denoted by self . fd .
7,498
def check_date ( self ) : now = datetime . date . today ( ) if now . day == 7 and now . month == 1 : msg = _ ( "Happy birthday for LinkChecker, I'm %d years old today!" ) self . comment ( msg % ( now . year - 2000 ) )
Check for special dates .
7,499
def wrap ( self , lines , width ) : sep = os . linesep + os . linesep text = sep . join ( lines ) kwargs = dict ( subsequent_indent = " " * self . max_indent , initial_indent = " " * self . max_indent , break_long_words = False , break_on_hyphens = False ) return strformat . wrap ( text , width , ** kwargs ) . lstrip ( )
Return wrapped version of given lines .