idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
7,200
def check_anchor ( self , url_data ) : log . debug ( LOG_PLUGIN , "checking anchor %r in %s" , url_data . anchor , self . anchors ) enc = lambda anchor : urlutil . url_quote_part ( anchor , encoding = url_data . encoding ) if any ( x for x in self . anchors if enc ( x [ 0 ] ) == url_data . anchor ) : return if self . anchors : anchornames = sorted ( set ( u"`%s'" % x [ 0 ] for x in self . anchors ) ) anchors = u", " . join ( anchornames ) else : anchors = u"-" args = { "name" : url_data . anchor , "anchors" : anchors } msg = u"%s %s" % ( _ ( "Anchor `%(name)s' not found." ) % args , _ ( "Available anchors: %(anchors)s." ) % args ) url_data . add_warning ( msg )
If URL is valid parseable and has an anchor check it . A warning is logged and True is returned if the anchor is not found .
7,201
def zone_for_name ( name , rdclass = dns . rdataclass . IN , tcp = False , resolver = None ) : if isinstance ( name , basestring ) : name = dns . name . from_text ( name , dns . name . root ) if resolver is None : resolver = get_default_resolver ( ) if not name . is_absolute ( ) : raise NotAbsolute ( name ) while 1 : try : answer = resolver . query ( name , dns . rdatatype . SOA , rdclass , tcp ) if answer . rrset . name == name : return name except ( dns . resolver . NXDOMAIN , dns . resolver . NoAnswer ) : pass try : name = name . parent ( ) except dns . name . NoParent : raise NoRootSOA
Find the name of the zone which contains the specified name .
7,202
def maybe_clean ( self ) : now = time . time ( ) if self . next_cleaning <= now : keys_to_delete = [ ] for ( k , v ) in self . data . iteritems ( ) : if v . expiration <= now : keys_to_delete . append ( k ) for k in keys_to_delete : del self . data [ k ] now = time . time ( ) self . next_cleaning = now + self . cleaning_interval
Clean the cache if it s time to do so .
7,203
def put ( self , key , value ) : self . maybe_clean ( ) self . data [ key ] = value
Associate key and value in the cache .
7,204
def flush ( self , key = None ) : if not key is None : if key in self . data : del self . data [ key ] else : self . data = { } self . next_cleaning = time . time ( ) + self . cleaning_interval
Flush the cache .
7,205
def reset ( self ) : self . domain = dns . name . Name ( dns . name . from_text ( socket . gethostname ( ) ) [ 1 : ] ) if len ( self . domain ) == 0 : self . domain = dns . name . root self . nameservers = [ ] self . localhosts = set ( [ 'localhost' , 'loopback' , '127.0.0.1' , '0.0.0.0' , '::1' , 'ip6-localhost' , 'ip6-loopback' , ] ) self . interfaces = set ( ) self . search = set ( ) self . search_patterns = [ 'www.%s.com' , 'www.%s.org' , 'www.%s.net' , ] self . port = 53 self . timeout = 2.0 self . lifetime = 30.0 self . keyring = None self . keyname = None self . keyalgorithm = dns . tsig . default_algorithm self . edns = - 1 self . ednsflags = 0 self . payload = 0 self . cache = None
Reset all resolver configuration to the defaults .
7,206
def read_local_ifaddrs ( self ) : if os . name != 'posix' : return [ ] try : from linkcheck . network import IfConfig except ImportError : return [ ] ifaddrs = [ ] ifc = IfConfig ( ) for iface in ifc . getInterfaceList ( flags = IfConfig . IFF_UP ) : addr = ifc . getAddr ( iface ) if addr : ifaddrs . append ( addr ) return ifaddrs
IP addresses for all active interfaces .
7,207
def _config_win32_nameservers ( self , nameservers ) : nameservers = str ( nameservers ) split_char = self . _determine_split_char ( nameservers ) ns_list = nameservers . split ( split_char ) for ns in ns_list : if not ns in self . nameservers : self . nameservers . append ( ns )
Configure a NameServer registry entry .
7,208
def _config_win32_domain ( self , domain ) : self . domain = dns . name . from_text ( str ( domain ) )
Configure a Domain registry entry .
7,209
def _config_win32_search ( self , search ) : search = str ( search ) split_char = self . _determine_split_char ( search ) search_list = search . split ( split_char ) for s in search_list : if not s in self . search : self . search . add ( dns . name . from_text ( s ) )
Configure a Search registry entry .
7,210
def _config_win32_add_ifaddr ( self , key , name ) : try : ip , rtype = _winreg . QueryValueEx ( key , name ) if isinstance ( ip , basestring ) and ip : ip = str ( ip ) . lower ( ) self . localhosts . add ( ip ) self . interfaces . add ( ip ) except WindowsError : pass
Add interface ip address to self . localhosts .
7,211
def _config_win32_fromkey ( self , key ) : try : enable_dhcp , rtype = _winreg . QueryValueEx ( key , 'EnableDHCP' ) except WindowsError : enable_dhcp = False if enable_dhcp : try : servers , rtype = _winreg . QueryValueEx ( key , 'DhcpNameServer' ) except WindowsError : servers = None if servers : self . _config_win32_nameservers ( servers ) try : dom , rtype = _winreg . QueryValueEx ( key , 'DhcpDomain' ) if dom : self . _config_win32_domain ( servers ) except WindowsError : pass self . _config_win32_add_ifaddr ( key , 'DhcpIPAddress' ) else : try : servers , rtype = _winreg . QueryValueEx ( key , 'NameServer' ) except WindowsError : servers = None if servers : self . _config_win32_nameservers ( servers ) try : dom , rtype = _winreg . QueryValueEx ( key , 'Domain' ) if dom : self . _config_win32_domain ( servers ) except WindowsError : pass self . _config_win32_add_ifaddr ( key , 'IPAddress' ) try : search , rtype = _winreg . QueryValueEx ( key , 'SearchList' ) except WindowsError : search = None if search : self . _config_win32_search ( search )
Extract DNS info from a registry key .
7,212
def read_registry ( self ) : lm = _winreg . ConnectRegistry ( None , _winreg . HKEY_LOCAL_MACHINE ) want_scan = False try : try : tcp_params = _winreg . OpenKey ( lm , r'SYSTEM\CurrentControlSet' r'\Services\Tcpip\Parameters' ) want_scan = True except EnvironmentError : tcp_params = _winreg . OpenKey ( lm , r'SYSTEM\CurrentControlSet' r'\Services\VxD\MSTCP' ) try : self . _config_win32_fromkey ( tcp_params ) finally : tcp_params . Close ( ) if want_scan : interfaces = _winreg . OpenKey ( lm , r'SYSTEM\CurrentControlSet' r'\Services\Tcpip\Parameters' r'\Interfaces' ) try : i = 0 while True : try : guid = _winreg . EnumKey ( interfaces , i ) i += 1 key = _winreg . OpenKey ( interfaces , guid ) if not self . _win32_is_nic_enabled ( lm , guid , key ) : continue try : self . _config_win32_fromkey ( key ) finally : key . Close ( ) except EnvironmentError : break finally : interfaces . Close ( ) finally : lm . Close ( )
Extract resolver configuration from the Windows registry .
7,213
def use_tsig ( self , keyring , keyname = None , algorithm = dns . tsig . default_algorithm ) : self . keyring = keyring if keyname is None : self . keyname = self . keyring . keys ( ) [ 0 ] else : self . keyname = keyname self . keyalgorithm = algorithm
Add a TSIG signature to the query .
7,214
def use_edns ( self , edns , ednsflags , payload ) : if edns is None : edns = - 1 self . edns = edns self . ednsflags = ednsflags self . payload = payload
Configure Edns .
7,215
def init ( domain , directory , loc = None ) : global default_language , default_encoding , default_domain , default_directory default_directory = directory default_domain = domain if os . path . isdir ( directory ) : for lang in os . listdir ( directory ) : path = os . path . join ( directory , lang , 'LC_MESSAGES' ) mo_file = os . path . join ( path , '%s.mo' % domain ) if os . path . exists ( mo_file ) : supported_languages . add ( lang ) if loc is None : loc , encoding = get_locale ( ) else : encoding = get_locale ( ) [ 1 ] if loc in supported_languages : default_language = loc else : default_language = "en" default_encoding = encoding install_language ( default_language )
Initialize this gettext i18n module . Searches for supported languages and installs the gettext translator class .
7,216
def install_language ( language ) : translator = get_translator ( default_domain , default_directory , languages = [ get_lang ( language ) ] , fallback = True ) do_unicode = True translator . install ( do_unicode )
Install translation service routines into default namespace .
7,217
def get_translator ( domain , directory , languages = None , translatorklass = Translator , fallback = False , fallbackklass = NullTranslator ) : translator = gettext . translation ( domain , localedir = directory , languages = languages , class_ = translatorklass , fallback = fallback ) if not isinstance ( translator , gettext . GNUTranslations ) and fallbackklass : translator = fallbackklass ( ) return translator
Search the appropriate GNUTranslations class .
7,218
def get_headers_lang ( headers ) : if 'Accept-Language' not in headers : return default_language languages = headers [ 'Accept-Language' ] . split ( "," ) pref_languages = [ ] for lang in languages : pref = 1.0 if ";" in lang : lang , _pref = lang . split ( ';' , 1 ) try : pref = float ( _pref ) except ValueError : pass pref_languages . append ( ( pref , lang ) ) pref_languages . sort ( ) for lang in ( x [ 1 ] for x in pref_languages ) : if lang in supported_languages : return lang return default_language
Return preferred supported language in given HTTP headers .
7,219
def get_locale ( ) : try : loc , encoding = locale . getdefaultlocale ( ) except ValueError : loc , encoding = None , None if loc is None : loc = "C" else : loc = norm_locale ( loc ) if encoding is None : encoding = "ascii" return ( loc , encoding )
Search the default platform locale and norm it .
7,220
def norm_locale ( loc ) : loc = locale . normalize ( loc ) pos = loc . find ( '@' ) if pos >= 0 : loc = loc [ : pos ] pos = loc . find ( '.' ) if pos >= 0 : loc = loc [ : pos ] pos = loc . find ( '_' ) if pos >= 0 : loc = loc [ : pos ] return loc
Normalize a locale .
7,221
def get_encoded_writer ( out = sys . stdout , encoding = None , errors = 'replace' ) : if encoding is None : encoding = default_encoding Writer = codecs . getwriter ( encoding ) return Writer ( out , errors )
Get wrapped output writer with given encoding and error handling .
7,222
def _add_rr ( self , name , ttl , rd , deleting = None , section = None ) : if section is None : section = self . authority covers = rd . covers ( ) rrset = self . find_rrset ( section , name , self . zone_rdclass , rd . rdtype , covers , deleting , True , True ) rrset . add ( rd , ttl )
Add a single RR to the update section .
7,223
def to_wire ( self , origin = None , max_size = 65535 ) : if origin is None : origin = self . origin return super ( Update , self ) . to_wire ( origin , max_size )
Return a string containing the update in DNS compressed wire format .
7,224
def comment ( self , s , ** args ) : self . write ( u"-- " ) self . writeln ( s = s , ** args )
Write SQL comment .
7,225
def start_output ( self ) : super ( SQLLogger , self ) . start_output ( ) if self . has_part ( "intro" ) : self . write_intro ( ) self . writeln ( ) self . flush ( )
Write start of checking info as sql comment .
7,226
def log_url ( self , url_data ) : self . writeln ( u"insert into %(table)s(urlname," "parentname,baseref,valid,result,warning,info,url,line,col," "name,checktime,dltime,size,cached,level,modified) values (" "%(base_url)s," "%(url_parent)s," "%(base_ref)s," "%(valid)d," "%(result)s," "%(warning)s," "%(info)s," "%(url)s," "%(line)d," "%(column)d," "%(name)s," "%(checktime)d," "%(dltime)d," "%(size)d," "%(cached)d," "%(level)d," "%(modified)s" ")%(separator)s" % { 'table' : self . dbname , 'base_url' : sqlify ( url_data . base_url ) , 'url_parent' : sqlify ( ( url_data . parent_url ) ) , 'base_ref' : sqlify ( ( url_data . base_ref ) ) , 'valid' : intify ( url_data . valid ) , 'result' : sqlify ( url_data . result ) , 'warning' : sqlify ( os . linesep . join ( x [ 1 ] for x in url_data . warnings ) ) , 'info' : sqlify ( os . linesep . join ( url_data . info ) ) , 'url' : sqlify ( urlutil . url_quote ( url_data . url ) ) , 'line' : url_data . line , 'column' : url_data . column , 'name' : sqlify ( url_data . name ) , 'checktime' : url_data . checktime , 'dltime' : url_data . dltime , 'size' : url_data . size , 'cached' : 0 , 'separator' : self . separator , "level" : url_data . level , "modified" : sqlify ( self . format_modified ( url_data . modified ) ) , } ) self . flush ( )
Store url check info into the database .
7,227
def write_memory_dump ( ) : gc . collect ( ) if gc . garbage : log . warn ( LOG_CHECK , "Unreachabe objects: %s" , pprint . pformat ( gc . garbage ) ) from meliae import scanner fo , filename = get_temp_file ( mode = 'wb' , suffix = '.json' , prefix = 'lcdump_' ) try : scanner . dump_all_objects ( fo ) finally : fo . close ( ) return filename
Dump memory to a temporary filename with the meliae package .
7,228
def from_e164 ( text , origin = public_enum_domain ) : parts = [ d for d in text if d . isdigit ( ) ] parts . reverse ( ) return dns . name . from_text ( '.' . join ( parts ) , origin = origin )
Convert an E . 164 number in textual form into a Name object whose value is the ENUM domain name for that number .
7,229
def to_e164 ( name , origin = public_enum_domain , want_plus_prefix = True ) : if not origin is None : name = name . relativize ( origin ) dlabels = [ d for d in name . labels if ( d . isdigit ( ) and len ( d ) == 1 ) ] if len ( dlabels ) != len ( name . labels ) : raise dns . exception . SyntaxError ( 'non-digit labels in ENUM domain name' ) dlabels . reverse ( ) text = '' . join ( dlabels ) if want_plus_prefix : text = '+' + text return text
Convert an ENUM domain name into an E . 164 number .
7,230
def query ( number , domains , resolver = None ) : if resolver is None : resolver = dns . resolver . get_default_resolver ( ) for domain in domains : if isinstance ( domain , ( str , unicode ) ) : domain = dns . name . from_text ( domain ) qname = dns . e164 . from_e164 ( number , domain ) try : return resolver . query ( qname , 'NAPTR' ) except dns . resolver . NXDOMAIN : pass raise dns . resolver . NXDOMAIN
Look for NAPTR RRs for the specified number in the specified domains .
7,231
def get_node ( self , url_data ) : if not url_data . url : return None elif url_data . url in self . nodes : return None node = { "url" : url_data . url , "parent_url" : url_data . parent_url , "id" : self . nodeid , "label" : quote ( url_data . title if url_data . title else url_data . name ) , "extern" : 1 if url_data . extern else 0 , "checktime" : url_data . checktime , "size" : url_data . size , "dltime" : url_data . dltime , "edge" : quote ( url_data . name ) , "valid" : 1 if url_data . valid else 0 , } self . nodes [ node [ "url" ] ] = node self . nodeid += 1 return node
Return new node data or None if node already exists .
7,232
def write_edges ( self ) : for node in self . nodes . values ( ) : if node [ "parent_url" ] in self . nodes : self . write_edge ( node ) self . flush ( )
Write all edges we can find in the graph in a brute - force manner .
7,233
def end_output ( self , ** kwargs ) : self . write_edges ( ) self . end_graph ( ) if self . has_part ( "outro" ) : self . write_outro ( ) self . close_fileoutput ( )
Write edges and end of checking info as gml comment .
7,234
def from_file ( filename ) : entries = [ ] with open ( filename ) as fd : lines = [ ] for line in fd . readlines ( ) : line = line . rstrip ( ) if not line : if lines : entries . append ( from_headers ( "\r\n" . join ( lines ) ) ) lines = [ ] else : lines . append ( line ) if lines : entries . append ( from_headers ( "\r\n" . join ( lines ) ) ) return entries
Parse cookie data from a text file in HTTP header format .
7,235
def GetConsoleScreenBufferInfo ( stream_id = STDOUT ) : handle = handles [ stream_id ] csbi = CONSOLE_SCREEN_BUFFER_INFO ( ) success = windll . kernel32 . GetConsoleScreenBufferInfo ( handle , byref ( csbi ) ) if not success : raise WinError ( ) return csbi
Get console screen buffer info object .
7,236
def SetConsoleTextAttribute ( stream_id , attrs ) : handle = handles [ stream_id ] return windll . kernel32 . SetConsoleTextAttribute ( handle , attrs )
Set a console text attribute .
7,237
def init ( ) : global _default_foreground , _default_background , _default_style try : attrs = GetConsoleScreenBufferInfo ( ) . wAttributes except ( ArgumentError , WindowsError ) : _default_foreground = GREY _default_background = BLACK _default_style = NORMAL else : _default_foreground = attrs & 7 _default_background = ( attrs >> 4 ) & 7 _default_style = attrs & BRIGHT
Initialize foreground and background attributes .
7,238
def set_console ( stream = STDOUT , foreground = None , background = None , style = None ) : if foreground is None : foreground = _default_foreground if background is None : background = _default_background if style is None : style = _default_style attrs = get_attrs ( foreground , background , style ) SetConsoleTextAttribute ( stream , attrs )
Set console foreground and background attributes .
7,239
def reset ( self ) : super ( FtpUrl , self ) . reset ( ) self . files = [ ] self . filename = None self . filename_encoding = 'iso-8859-1'
Initialize FTP url data .
7,240
def login ( self ) : self . url_connection = ftplib . FTP ( timeout = self . aggregate . config [ "timeout" ] ) if log . is_debug ( LOG_CHECK ) : self . url_connection . set_debuglevel ( 1 ) try : self . url_connection . connect ( self . host , self . port ) _user , _password = self . get_user_password ( ) if _user is None : self . url_connection . login ( ) elif _password is None : self . url_connection . login ( _user ) else : self . url_connection . login ( _user , _password ) info = self . url_connection . getwelcome ( ) if info : log . debug ( LOG_CHECK , "FTP info %s" , info ) pass else : raise LinkCheckerError ( _ ( "Got no answer from FTP server" ) ) except EOFError as msg : raise LinkCheckerError ( _ ( "Remote host has closed connection: %(msg)s" ) % str ( msg ) )
Log into ftp server and check the welcome message .
7,241
def negotiate_encoding ( self ) : try : features = self . url_connection . sendcmd ( "FEAT" ) except ftplib . error_perm as msg : log . debug ( LOG_CHECK , "Ignoring error when getting FTP features: %s" % msg ) pass else : log . debug ( LOG_CHECK , "FTP features %s" , features ) if " UTF-8" in features . splitlines ( ) : self . filename_encoding = "utf-8"
Check if server can handle UTF - 8 encoded filenames . See also RFC 2640 .
7,242
def cwd ( self ) : path = self . urlparts [ 2 ] . encode ( self . filename_encoding , 'replace' ) dirname = path . strip ( '/' ) dirs = dirname . split ( '/' ) filename = dirs . pop ( ) self . url_connection . cwd ( '/' ) for d in dirs : self . url_connection . cwd ( d ) return filename
Change to URL parent directory . Return filename of last path component .
7,243
def listfile ( self ) : if not self . filename : return files = self . get_files ( ) log . debug ( LOG_CHECK , "FTP files %s" , str ( files ) ) if self . filename in files : return if "%s/" % self . filename in files : if not self . url . endswith ( '/' ) : self . add_warning ( _ ( "Missing trailing directory slash in ftp url." ) , tag = WARN_FTP_MISSING_SLASH ) self . url += '/' return raise ftplib . error_perm ( "550 File not found" )
See if filename is in the current FTP directory .
7,244
def get_files ( self ) : files = [ ] def add_entry ( line ) : log . debug ( LOG_CHECK , "Directory entry %r" , line ) from . . ftpparse import ftpparse fpo = ftpparse ( line ) if fpo is not None and fpo [ "name" ] : name = fpo [ "name" ] if fpo [ "trycwd" ] : name += "/" if fpo [ "trycwd" ] or fpo [ "tryretr" ] : files . append ( name ) self . url_connection . dir ( add_entry ) return files
Get list of filenames in directory . Subdirectories have an ending slash .
7,245
def is_parseable ( self ) : if self . is_directory ( ) : return True if self . content_type in self . ContentMimetypes : return True log . debug ( LOG_CHECK , "URL with content type %r is not parseable." , self . content_type ) return False
See if URL target is parseable for recursion .
7,246
def set_content_type ( self ) : self . content_type = mimeutil . guess_mimetype ( self . url , read = self . get_content )
Set URL content type or an empty string if content type could not be found .
7,247
def read_content ( self ) : if self . is_directory ( ) : self . url_connection . cwd ( self . filename ) self . files = self . get_files ( ) data = get_index_html ( self . files ) else : ftpcmd = "RETR %s" % self . filename buf = StringIO ( ) def stor_data ( s ) : if ( buf . tell ( ) + len ( s ) ) > self . max_size : raise LinkCheckerError ( _ ( "FTP file size too large" ) ) buf . write ( s ) self . url_connection . retrbinary ( ftpcmd , stor_data ) data = buf . getvalue ( ) buf . close ( ) return data
Return URL target content or in case of directories a dummy HTML file with links to the files .
7,248
def close_connection ( self ) : if self . url_connection is not None : try : self . url_connection . quit ( ) except Exception : pass self . url_connection = None
Release the open connection from the connection pool .
7,249
def find_bookmark_file ( ) : try : dirname = get_profile_dir ( ) if os . path . isdir ( dirname ) : for name in OperaBookmarkFiles : fname = os . path . join ( dirname , name ) if os . path . isfile ( fname ) : return fname except Exception : pass return u""
Return the bookmark file of the Opera profile . Returns absolute filename if found or empty string if no bookmark file could be found .
7,250
def main ( args ) : filename = args [ 0 ] marker = args [ 1 ] for line in fileinput . input ( filename , inplace = 1 ) : print ( line . rstrip ( ) ) if line . startswith ( marker ) : break
Remove lines after marker .
7,251
def parse_bookmark_json ( data ) : for entry in data [ "roots" ] . values ( ) : for url , name in parse_bookmark_node ( entry ) : yield url , name
Parse complete JSON data for Chromium Bookmarks .
7,252
def parse_bookmark_node ( node ) : if node [ "type" ] == "url" : yield node [ "url" ] , node [ "name" ] elif node [ "type" ] == "folder" : for child in node [ "children" ] : for entry in parse_bookmark_node ( child ) : yield entry
Parse one JSON node of Chromium Bookmarks .
7,253
def to_text ( self , name , ** kw ) : s = StringIO . StringIO ( ) for rds in self . rdatasets : print >> s , rds . to_text ( name , ** kw ) return s . getvalue ( ) [ : - 1 ]
Convert a node to text format .
7,254
def find_rdataset ( self , rdclass , rdtype , covers = dns . rdatatype . NONE , create = False ) : for rds in self . rdatasets : if rds . match ( rdclass , rdtype , covers ) : return rds if not create : raise KeyError rds = dns . rdataset . Rdataset ( rdclass , rdtype ) self . rdatasets . append ( rds ) return rds
Find an rdataset matching the specified properties in the current node .
7,255
def get_rdataset ( self , rdclass , rdtype , covers = dns . rdatatype . NONE , create = False ) : try : rds = self . find_rdataset ( rdclass , rdtype , covers , create ) except KeyError : rds = None return rds
Get an rdataset matching the specified properties in the current node .
7,256
def delete_rdataset ( self , rdclass , rdtype , covers = dns . rdatatype . NONE ) : rds = self . get_rdataset ( rdclass , rdtype , covers ) if not rds is None : self . rdatasets . remove ( rds )
Delete the rdataset matching the specified properties in the current node .
7,257
def replace_rdataset ( self , replacement ) : self . delete_rdataset ( replacement . rdclass , replacement . rdtype , replacement . covers ) self . rdatasets . append ( replacement )
Replace an rdataset .
7,258
def poll ( self ) : recv_list = [ self . server_fileno ] for client in self . clients . values ( ) : if client . active : recv_list . append ( client . fileno ) else : self . on_disconnect ( client ) del self . clients [ client . fileno ] send_list = [ ] for client in self . clients . values ( ) : if client . send_pending : send_list . append ( client . fileno ) try : rlist , slist , elist = select . select ( recv_list , send_list , [ ] , self . timeout ) except select . error , err : print >> sys . stderr , ( "!! FATAL SELECT error '%d:%s'!" % ( err [ 0 ] , err [ 1 ] ) ) sys . exit ( 1 ) for sock_fileno in rlist : if sock_fileno == self . server_fileno : try : sock , addr_tup = self . server_socket . accept ( ) except socket . error , err : print >> sys . stderr , ( "!! ACCEPT error '%d:%s'." % ( err [ 0 ] , err [ 1 ] ) ) continue if self . client_count ( ) >= MAX_CONNECTIONS : print '?? Refusing new connection; maximum in use.' sock . close ( ) continue new_client = TelnetClient ( sock , addr_tup ) self . clients [ new_client . fileno ] = new_client self . on_connect ( new_client ) else : try : self . clients [ sock_fileno ] . socket_recv ( ) except BogConnectionLost : self . clients [ sock_fileno ] . deactivate ( ) for sock_fileno in slist : self . clients [ sock_fileno ] . socket_send ( )
Perform a non - blocking scan of recv and send states on the server and client connection sockets . Process new connection requests read incomming data and send outgoing data . Sends and receives may be partial .
7,259
def add_loghandler ( handler ) : format = "%(levelname)s %(name)s %(asctime)s %(threadName)s %(message)s" handler . setFormatter ( logging . Formatter ( format ) ) logging . getLogger ( LOG_ROOT ) . addHandler ( handler ) logging . getLogger ( ) . addHandler ( handler )
Add log handler to root logger and LOG_ROOT and set formatting .
7,260
def remove_loghandler ( handler ) : logging . getLogger ( LOG_ROOT ) . removeHandler ( handler ) logging . getLogger ( ) . removeHandler ( handler )
Remove log handler from root logger and LOG_ROOT .
7,261
def set_loglevel ( loggers , level ) : if not loggers : return if 'all' in loggers : loggers = lognames . keys ( ) for key in loggers : logging . getLogger ( lognames [ key ] ) . setLevel ( level )
Set logging levels for given loggers .
7,262
def log_url ( self , url_data ) : node = self . get_node ( url_data ) if node : self . xml_starttag ( u'node' , attrs = { u"name" : u"%d" % node [ "id" ] } ) self . xml_tag ( u"label" , node [ "label" ] ) if self . has_part ( "realurl" ) : self . xml_tag ( u"url" , node [ "url" ] ) self . xml_starttag ( u"data" ) if node [ "dltime" ] >= 0 and self . has_part ( "dltime" ) : self . xml_tag ( u"dltime" , u"%f" % node [ "dltime" ] ) if node [ "size" ] >= 0 and self . has_part ( "dlsize" ) : self . xml_tag ( u"size" , u"%d" % node [ "size" ] ) if node [ "checktime" ] and self . has_part ( "checktime" ) : self . xml_tag ( u"checktime" , u"%f" % node [ "checktime" ] ) if self . has_part ( "extern" ) : self . xml_tag ( u"extern" , u"%d" % node [ "extern" ] ) self . xml_endtag ( u"data" ) self . xml_endtag ( u"node" )
Write one node and all possible edges .
7,263
def end_output ( self , ** kwargs ) : self . xml_endtag ( u"graph" ) self . xml_endtag ( u"GraphXML" ) self . xml_end_output ( ) self . close_fileoutput ( )
Finish graph output and print end of checking info as xml comment .
7,264
def from_text ( text ) : value = _by_text . get ( text . upper ( ) ) if value is None : match = _unknown_class_pattern . match ( text ) if match == None : raise UnknownRdataclass value = int ( match . group ( 1 ) ) if value < 0 or value > 65535 : raise ValueError ( "class must be between >= 0 and <= 65535" ) return value
Convert text into a DNS rdata class value .
7,265
def to_text ( value ) : if value < 0 or value > 65535 : raise ValueError ( "class must be between >= 0 and <= 65535" ) text = _by_value . get ( value ) if text is None : text = 'CLASS' + repr ( value ) return text
Convert a DNS rdata class to text .
7,266
def my_on_connect ( client ) : client . send ( 'You connected from %s\n' % client . addrport ( ) ) if CLIENTS : client . send ( 'Also connected are:\n' ) for neighbor in CLIENTS : client . send ( '%s\n' % neighbor . addrport ( ) ) else : client . send ( 'Sadly, you are alone.\n' ) CLIENTS . append ( client )
Example on_connect handler .
7,267
def _reset ( self ) : self . entries = [ ] self . default_entry = None self . disallow_all = False self . allow_all = False self . last_checked = 0 self . sitemap_urls = [ ]
Reset internal flags and entry lists .
7,268
def set_url ( self , url ) : self . url = url self . host , self . path = urlparse . urlparse ( url ) [ 1 : 3 ]
Set the URL referring to a robots . txt file .
7,269
def read ( self ) : self . _reset ( ) kwargs = dict ( headers = { 'User-Agent' : configuration . UserAgent , 'Accept-Encoding' : ACCEPT_ENCODING , } ) if self . auth : kwargs [ "auth" ] = self . auth if self . proxies : kwargs [ "proxies" ] = self . proxies try : response = self . session . get ( self . url , ** kwargs ) response . raise_for_status ( ) content_type = response . headers . get ( 'content-type' ) if content_type and content_type . lower ( ) . startswith ( 'text/plain' ) : self . parse ( response . iter_lines ( ) ) else : log . debug ( LOG_CHECK , "%r allow all (no text content)" , self . url ) self . allow_all = True except requests . HTTPError as x : if x . response . status_code in ( 401 , 403 ) : self . disallow_all = True log . debug ( LOG_CHECK , "%r disallow all (code %d)" , self . url , x . response . status_code ) else : self . allow_all = True log . debug ( LOG_CHECK , "%r allow all (HTTP error)" , self . url ) except requests . exceptions . Timeout : raise except requests . exceptions . RequestException : self . allow_all = True log . debug ( LOG_CHECK , "%r allow all (request error)" , self . url )
Read the robots . txt URL and feeds it to the parser .
7,270
def _add_entry ( self , entry ) : if "*" in entry . useragents : self . default_entry = entry else : self . entries . append ( entry )
Add a parsed entry to entry list .
7,271
def can_fetch ( self , useragent , url ) : log . debug ( LOG_CHECK , "%r check allowance for:\n user agent: %r\n url: %r ..." , self . url , useragent , url ) if not isinstance ( useragent , str ) : useragent = useragent . encode ( "ascii" , "ignore" ) if not isinstance ( url , str ) : url = url . encode ( "ascii" , "ignore" ) if self . disallow_all : log . debug ( LOG_CHECK , " ... disallow all." ) return False if self . allow_all : log . debug ( LOG_CHECK , " ... allow all." ) return True url = urllib . quote ( urlparse . urlparse ( urllib . unquote ( url ) ) [ 2 ] ) or "/" for entry in self . entries : if entry . applies_to ( useragent ) : return entry . allowance ( url ) if self . default_entry is not None : return self . default_entry . allowance ( url ) log . debug ( LOG_CHECK , " ... agent not found, allow." ) return True
Using the parsed robots . txt decide if useragent can fetch url .
7,272
def get_crawldelay ( self , useragent ) : for entry in self . entries : if entry . applies_to ( useragent ) : return entry . crawldelay return 0
Look for a configured crawl delay .
7,273
def applies_to ( self , useragent ) : if not useragent : return True useragent = useragent . lower ( ) for agent in self . useragents : if agent == '*' : return True if agent . lower ( ) in useragent : return True return False
Check if this entry applies to the specified agent .
7,274
def add_result ( self , key , result ) : if len ( self . cache ) > self . max_size : return if key is not None : self . cache [ key ] = result
Add result object to cache with given key . The request is ignored when the cache is already full or the key is None .
7,275
def filter_tree ( tree ) : to_remove = [ ] for elem in tree . findall ( 'urldata' ) : valid = elem . find ( 'valid' ) if valid is not None and valid . text == '0' and valid . attrib . get ( 'result' , '' ) . startswith ( '401' ) : to_remove . append ( elem ) root = tree . getroot ( ) for elem in to_remove : root . remove ( elem )
Filter all 401 errors .
7,276
def _connect_nntp ( self , nntpserver ) : tries = 0 nntp = None while tries < 2 : tries += 1 try : nntp = nntplib . NNTP ( nntpserver , usenetrc = False ) except nntplib . NNTPTemporaryError : self . wait ( ) except nntplib . NNTPPermanentError as msg : if re . compile ( "^50[45]" ) . search ( str ( msg ) ) : self . wait ( ) else : raise if nntp is None : raise LinkCheckerError ( _ ( "NNTP server too busy; tried more than %d times." ) % tries ) if log . is_debug ( LOG_CHECK ) : nntp . set_debuglevel ( 1 ) self . add_info ( nntp . getwelcome ( ) ) return nntp
This is done only once per checking task . Also the newly introduced error codes 504 and 505 ( both inclining Too busy retry later are caught .
7,277
def init_fileoutput ( self , args ) : super ( TextLogger , self ) . init_fileoutput ( args ) if self . fd is not None : self . fd = ansicolor . Colorizer ( self . fd )
Colorize file output if possible .
7,278
def start_fileoutput ( self ) : init_color = self . fd is None super ( TextLogger , self ) . start_fileoutput ( ) if init_color : self . fd = ansicolor . Colorizer ( self . fd )
Needed to make file descriptor color aware .
7,279
def start_output ( self ) : super ( TextLogger , self ) . start_output ( ) if self . has_part ( 'intro' ) : self . write_intro ( ) self . flush ( )
Write generic start checking info .
7,280
def write_intro ( self ) : self . writeln ( configuration . AppInfo ) self . writeln ( configuration . Freeware ) self . writeln ( _ ( "Get the newest version at %(url)s" ) % { 'url' : configuration . Url } ) self . writeln ( _ ( "Write comments and bugs to %(url)s" ) % { 'url' : configuration . SupportUrl } ) self . writeln ( _ ( "Support this project at %(url)s" ) % { 'url' : configuration . DonateUrl } ) self . check_date ( ) self . writeln ( ) self . writeln ( _ ( "Start checking at %s" ) % strformat . strtime ( self . starttime ) )
Log introduction text .
7,281
def log_url ( self , url_data ) : self . writeln ( ) if self . has_part ( 'url' ) : self . write_url ( url_data ) if url_data . name and self . has_part ( 'name' ) : self . write_name ( url_data ) if url_data . parent_url and self . has_part ( 'parenturl' ) : self . write_parent ( url_data ) if url_data . base_ref and self . has_part ( 'base' ) : self . write_base ( url_data ) if url_data . url and self . has_part ( 'realurl' ) : self . write_real ( url_data ) if url_data . checktime and self . has_part ( 'checktime' ) : self . write_checktime ( url_data ) if url_data . dltime >= 0 and self . has_part ( 'dltime' ) : self . write_dltime ( url_data ) if url_data . size >= 0 and self . has_part ( 'dlsize' ) : self . write_size ( url_data ) if url_data . info and self . has_part ( 'info' ) : self . write_info ( url_data ) if url_data . modified and self . has_part ( 'modified' ) : self . write_modified ( url_data ) if url_data . warnings and self . has_part ( 'warning' ) : self . write_warning ( url_data ) if self . has_part ( 'result' ) : self . write_result ( url_data ) self . flush ( )
Write url checking info .
7,282
def write_id ( self ) : self . writeln ( ) self . write ( self . part ( 'id' ) + self . spaces ( 'id' ) ) self . writeln ( u"%d" % self . stats . number , color = self . colorinfo )
Write unique ID of url_data .
7,283
def write_warning ( self , url_data ) : self . write ( self . part ( "warning" ) + self . spaces ( "warning" ) ) warning_msgs = [ u"[%s] %s" % x for x in url_data . warnings ] self . writeln ( self . wrap ( warning_msgs , 65 ) , color = self . colorwarning )
Write url_data . warning .
7,284
def write_outro ( self , interrupt = False ) : self . writeln ( ) if interrupt : self . writeln ( _ ( "The check has been interrupted; results are not complete." ) ) self . write ( _ ( "That's it." ) + " " ) self . write ( _n ( "%d link" , "%d links" , self . stats . number ) % self . stats . number ) self . write ( u" " ) if self . stats . num_urls is not None : self . write ( _n ( "in %d URL" , "in %d URLs" , self . stats . num_urls ) % self . stats . num_urls ) self . write ( u" checked. " ) warning_text = _n ( "%d warning found" , "%d warnings found" , self . stats . warnings_printed ) % self . stats . warnings_printed if self . stats . warnings_printed : warning_color = self . colorwarning else : warning_color = self . colorinfo self . write ( warning_text , color = warning_color ) if self . stats . warnings != self . stats . warnings_printed : self . write ( _ ( " (%d ignored or duplicates not printed)" ) % ( self . stats . warnings - self . stats . warnings_printed ) ) self . write ( u". " ) error_text = _n ( "%d error found" , "%d errors found" , self . stats . errors_printed ) % self . stats . errors_printed if self . stats . errors_printed : error_color = self . colorinvalid else : error_color = self . colorvalid self . write ( error_text , color = error_color ) if self . stats . errors != self . stats . errors_printed : self . write ( _ ( " (%d duplicates not printed)" ) % ( self . stats . errors - self . stats . errors_printed ) ) self . writeln ( u"." ) num = self . stats . internal_errors if num : self . writeln ( _n ( "There was %(num)d internal error." , "There were %(num)d internal errors." , num ) % { "num" : num } ) self . stoptime = time . time ( ) duration = self . stoptime - self . starttime self . writeln ( _ ( "Stopped checking at %(time)s (%(duration)s)" ) % { "time" : strformat . strtime ( self . stoptime ) , "duration" : strformat . strduration_long ( duration ) } )
Write end of checking message .
7,285
def write_stats ( self ) : self . writeln ( ) self . writeln ( _ ( "Statistics:" ) ) if self . stats . downloaded_bytes is not None : self . writeln ( _ ( "Downloaded: %s." ) % strformat . strsize ( self . stats . downloaded_bytes ) ) if self . stats . number > 0 : self . writeln ( _ ( "Content types: %(image)d image, %(text)d text, %(video)d video, " "%(audio)d audio, %(application)d application, %(mail)d mail" " and %(other)d other." ) % self . stats . link_types ) self . writeln ( _ ( "URL lengths: min=%(min)d, max=%(max)d, avg=%(avg)d." ) % dict ( min = self . stats . min_url_length , max = self . stats . max_url_length , avg = self . stats . avg_url_length ) ) else : self . writeln ( _ ( "No statistics available since no URLs were checked." ) )
Write check statistic info .
7,286
def end_output ( self , ** kwargs ) : self . stats . downloaded_bytes = kwargs . get ( "downloaded_bytes" ) self . stats . num_urls = kwargs . get ( "num_urls" ) if self . has_part ( 'stats' ) : self . write_stats ( ) if self . has_part ( 'outro' ) : self . write_outro ( interrupt = kwargs . get ( "interrupt" ) ) self . close_fileoutput ( )
Write end of output info and flush all output buffers .
7,287
def url_fix_host ( urlparts ) : if not urlparts [ 1 ] : urlparts [ 2 ] = urllib . unquote ( urlparts [ 2 ] ) return False userpass , netloc = urllib . splituser ( urlparts [ 1 ] ) if userpass : userpass = urllib . unquote ( userpass ) netloc , is_idn = idna_encode ( urllib . unquote ( netloc ) . lower ( ) ) i = netloc . find ( "\\" ) if i != - 1 : comps = netloc [ i : ] if not urlparts [ 2 ] or urlparts [ 2 ] == '/' : urlparts [ 2 ] = comps else : urlparts [ 2 ] = "%s%s" % ( comps , urllib . unquote ( urlparts [ 2 ] ) ) netloc = netloc [ : i ] else : i = netloc . find ( "?" ) if i != - 1 : netloc , urlparts [ 3 ] = netloc . split ( '?' , 1 ) urlparts [ 2 ] = urllib . unquote ( urlparts [ 2 ] ) if userpass : userpass += "@" else : userpass = "" if urlparts [ 0 ] in default_ports : dport = default_ports [ urlparts [ 0 ] ] host , port = splitport ( netloc , port = dport ) if host . endswith ( "." ) : host = host [ : - 1 ] if port != dport : host = "%s:%d" % ( host , port ) netloc = host urlparts [ 1 ] = userpass + netloc return is_idn
Unquote and fix hostname . Returns is_idn .
7,288
def url_fix_common_typos ( url ) : if url . startswith ( "http//" ) : url = "http://" + url [ 6 : ] elif url . startswith ( "https//" ) : url = "https://" + url [ 7 : ] return url
Fix common typos in given URL like forgotten colon .
7,289
def url_parse_query ( query , encoding = None ) : if isinstance ( query , unicode ) : if encoding is None : encoding = url_encoding query = query . encode ( encoding , 'ignore' ) append = "" while '?' in query : query , rest = query . rsplit ( '?' , 1 ) append = '?' + url_parse_query ( rest ) + append l = [ ] for k , v , sep in parse_qsl ( query , keep_blank_values = True ) : k = url_quote_part ( k , '/-:,;' ) if v : v = url_quote_part ( v , '/-:,;' ) l . append ( "%s=%s%s" % ( k , v , sep ) ) elif v is None : l . append ( "%s%s" % ( k , sep ) ) else : l . append ( "%s=%s" % ( k , sep ) ) return '' . join ( l ) + append
Parse and re - join the given CGI query .
7,290
def urlunsplit ( urlparts ) : res = urlparse . urlunsplit ( urlparts ) if os . name == 'nt' and urlparts [ 0 ] == 'file' and '|' not in urlparts [ 2 ] : repl = 'file://' if urlparts [ 2 ] . startswith ( '//' ) else 'file:/' res = res . replace ( 'file:' , repl ) return res
Same as urlparse . urlunsplit but with extra UNC path handling for Windows OS .
7,291
def url_quote ( url ) : if not url_is_absolute ( url ) : return document_quote ( url ) urlparts = list ( urlparse . urlsplit ( url ) ) urlparts [ 0 ] = url_quote_part ( urlparts [ 0 ] ) urlparts [ 1 ] = url_quote_part ( urlparts [ 1 ] , ':' ) urlparts [ 2 ] = url_quote_part ( urlparts [ 2 ] , '/=,' ) urlparts [ 3 ] = url_quote_part ( urlparts [ 3 ] , '&=,' ) l = [ ] for k , v , sep in parse_qsl ( urlparts [ 3 ] , True ) : k = url_quote_part ( k , '/-:,;' ) if v : v = url_quote_part ( v , '/-:,;' ) l . append ( "%s=%s%s" % ( k , v , sep ) ) else : l . append ( "%s%s" % ( k , sep ) ) urlparts [ 3 ] = '' . join ( l ) urlparts [ 4 ] = url_quote_part ( urlparts [ 4 ] ) return urlunsplit ( urlparts )
Quote given URL .
7,292
def document_quote ( document ) : doc , query = urllib . splitquery ( document ) doc = url_quote_part ( doc , '/=,' ) if query : return "%s?%s" % ( doc , query ) return doc
Quote given document .
7,293
def match_host ( host , domainlist ) : if not host : return False for domain in domainlist : if domain . startswith ( '.' ) : if host . endswith ( domain ) : return True elif host == domain : return True return False
Return True if host matches an entry in given domain list .
7,294
def url_unsplit ( parts ) : if parts [ 2 ] == default_ports . get ( parts [ 0 ] ) : return "%s://%s%s" % ( parts [ 0 ] , parts [ 1 ] , parts [ 3 ] ) return "%s://%s:%d%s" % parts
Rejoin URL parts to a string .
7,295
def splitport ( host , port = 0 ) : if ":" in host : shost , sport = host . split ( ":" , 1 ) iport = is_numeric_port ( sport ) if iport : host , port = shost , iport elif not sport : host = shost else : pass return host , port
Split optional port number from host . If host has no port number the given default port is returned .
7,296
def get_content ( url , user = None , password = None , proxy = None , data = None , addheaders = None ) : from . import configuration headers = { 'User-Agent' : configuration . UserAgent , } if addheaders : headers . update ( addheaders ) method = 'GET' kwargs = dict ( headers = headers ) if user and password : kwargs [ 'auth' ] = ( user , password ) if data : kwargs [ 'data' ] = data method = 'POST' if proxy : kwargs [ 'proxy' ] = dict ( http = proxy ) from . configuration import get_share_file try : kwargs [ "verify" ] = get_share_file ( 'cacert.pem' ) except ValueError : pass try : response = requests . request ( method , url , ** kwargs ) return response . text , response . headers except ( requests . exceptions . RequestException , requests . exceptions . BaseHTTPError ) as msg : log . warn ( LOG_CHECK , ( "Could not get content of URL %(url)s: %(msg)s." ) % { "url" : url , "msg" : str ( msg ) } ) return None , str ( msg )
Get URL content and info .
7,297
def shorten_duplicate_content_url ( url ) : if '#' in url : url = url . split ( '#' , 1 ) [ 0 ] if url . endswith ( 'index.html' ) : return url [ : - 10 ] if url . endswith ( 'index.htm' ) : return url [ : - 9 ] return url
Remove anchor part and trailing index . html from URL .
7,298
def is_duplicate_content_url ( url1 , url2 ) : if url1 == url2 : return True if url2 in url1 : url1 = shorten_duplicate_content_url ( url1 ) if not url2 . endswith ( '/' ) and url1 . endswith ( '/' ) : url2 += '/' return url1 == url2 if url1 in url2 : url2 = shorten_duplicate_content_url ( url2 ) if not url1 . endswith ( '/' ) and url2 . endswith ( '/' ) : url1 += '/' return url1 == url2 return False
Check if both URLs are allowed to point to the same content .
7,299
def write_file ( filename , content , backup = False , callback = None ) : f = file ( filename + ".tmp" , 'wb' ) if callback is None : f . write ( content ) else : callback ( f , content ) f . close ( ) if os . path . exists ( filename ) : os . rename ( filename , filename + ".bak" ) os . rename ( filename + ".tmp" , filename ) if not backup and os . path . exists ( filename + ".bak" ) : os . remove ( filename + ".bak" )
Overwrite a possibly existing file with new content . Do this in a manner that does not leave truncated or broken files behind .