idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
7,500
def write ( self , s , ** args ) : if self . filename is not None : self . start_fileoutput ( ) if self . fd is None : log . warn ( LOG_CHECK , "writing to unitialized or closed file" ) else : try : self . fd . write ( s , ** args ) except IOError : msg = sys . exc_info ( ) [ 1 ] log . warn ( LOG_CHECK , "Could not write to output file: %s\n" "Disabling log output of %s" , msg , self ) self . close_fileoutput ( ) self . fd = dummy . Dummy ( ) self . is_active = False
Write string to output descriptor . Strips control characters from string before writing .
7,501
def writeln ( self , s = u"" , ** args ) : self . write ( u"%s%s" % ( s , unicode ( os . linesep ) ) , ** args )
Write string to output descriptor plus a newline .
7,502
def start_output ( self ) : if self . logparts is None : parts = Fields . keys ( ) else : parts = self . logparts values = ( self . part ( x ) for x in parts ) self . max_indent = max ( len ( x ) for x in values ) + 1 for key in parts : numspaces = ( self . max_indent - len ( self . part ( key ) ) ) self . logspaces [ key ] = u" " * numspaces self . stats . reset ( ) self . starttime = time . time ( )
Start log output .
7,503
def log_filter_url ( self , url_data , do_print ) : self . stats . log_url ( url_data , do_print ) if do_print : self . log_url ( url_data )
Log a new url with this logger if do_print is True . Else only update accounting data .
7,504
def write_intro ( self ) : self . comment ( _ ( "created by %(app)s at %(time)s" ) % { "app" : configuration . AppName , "time" : strformat . strtime ( self . starttime ) } ) self . comment ( _ ( "Get the newest version at %(url)s" ) % { 'url' : configuration . Url } ) self . comment ( _ ( "Write comments and bugs to %(url)s" ) % { 'url' : configuration . SupportUrl } ) self . comment ( _ ( "Support this project at %(url)s" ) % { 'url' : configuration . DonateUrl } ) self . check_date ( )
Write intro comments .
7,505
def write_outro ( self ) : self . stoptime = time . time ( ) duration = self . stoptime - self . starttime self . comment ( _ ( "Stopped checking at %(time)s (%(duration)s)" ) % { "time" : strformat . strtime ( self . stoptime ) , "duration" : strformat . strduration_long ( duration ) } )
Write outro comments .
7,506
def format_modified ( self , modified , sep = " " ) : if modified is not None : return modified . strftime ( "%Y-%m-%d{0}%H:%M:%S.%fZ" . format ( sep ) ) return u""
Format modification date in UTC if it s not None .
7,507
def formvalue ( form , key ) : field = form . get ( key ) if isinstance ( field , list ) : field = field [ 0 ] return field
Get value with given key from WSGI form .
7,508
def checklink ( form = None , env = os . environ ) : if form is None : form = { } try : checkform ( form , env ) except LCFormError as errmsg : log ( env , errmsg ) yield encode ( format_error ( errmsg ) ) return out = ThreadsafeIO ( ) config = get_configuration ( form , out ) url = strformat . stripurl ( formvalue ( form , "url" ) ) aggregate = director . get_aggregate ( config ) url_data = checker . get_url_from ( url , 0 , aggregate , extern = ( 0 , 0 ) ) aggregate . urlqueue . put ( url_data ) for html_str in start_check ( aggregate , out ) : yield encode ( html_str ) out . close ( )
Validates the CGI form and checks the given links .
7,509
def start_check ( aggregate , out ) : t = threading . Thread ( target = director . check_urls , args = ( aggregate , ) ) t . start ( ) sleep_seconds = 2 run_seconds = 0 while not aggregate . is_finished ( ) : yield out . get_data ( ) time . sleep ( sleep_seconds ) run_seconds += sleep_seconds if run_seconds > MAX_REQUEST_SECONDS : director . abort ( aggregate ) break yield out . get_data ( )
Start checking in background and write encoded output to out .
7,510
def get_configuration ( form , out ) : config = configuration . Configuration ( ) config [ "recursionlevel" ] = int ( formvalue ( form , "level" ) ) config [ "logger" ] = config . logger_new ( 'html' , fd = out , encoding = HTML_ENCODING ) config [ "threads" ] = 2 if "anchors" in form : config [ "enabledplugins" ] . append ( "AnchorCheck" ) if "errors" not in form : config [ "verbose" ] = True pat = "!^%s$" % urlutil . safe_url_pattern config [ "externlinks" ] . append ( get_link_pat ( pat , strict = True ) ) config . sanitize ( ) return config
Initialize a CGI configuration .
7,511
def checkform ( form , env ) : if "language" in form : lang = formvalue ( form , 'language' ) if lang in _supported_langs : localestr = lang_locale [ lang ] try : locale . setlocale ( locale . LC_ALL , localestr ) init_i18n ( ) except locale . Error as errmsg : log ( env , "could not set locale %r: %s" % ( localestr , errmsg ) ) else : raise LCFormError ( _ ( "unsupported language %r" ) % lang ) if "url" in form : url = formvalue ( form , "url" ) if not url or url == "http://" : raise LCFormError ( _ ( "empty url was given" ) ) if not urlutil . is_safe_url ( url ) : raise LCFormError ( _ ( "disallowed url %r was given" ) % url ) else : raise LCFormError ( _ ( "no url was given" ) ) if "level" in form : level = formvalue ( form , "level" ) if not _is_level ( level ) : raise LCFormError ( _ ( "invalid recursion level %r" ) % level ) for option in ( "anchors" , "errors" , "intern" ) : if option in form : value = formvalue ( form , option ) if value != "on" : raise LCFormError ( _ ( "invalid %s option %r" ) % ( option , value ) )
Check form data . throw exception on error Be sure to NOT print out any user - given data as HTML code so use only plain strings as exception text .
7,512
def dump ( env , form ) : for var , value in env . items ( ) : log ( env , var + "=" + value ) for key in form : log ( env , str ( formvalue ( form , key ) ) )
Log environment and form .
7,513
def write ( self , data ) : assert isinstance ( data , unicode ) if self . closed : raise IOError ( "Write on closed I/O object" ) if data : self . buf . append ( data )
Write given unicode data to buffer .
7,514
def get_data ( self ) : data = u"" . join ( self . buf ) self . buf = [ ] return data
Get bufferd unicode data .
7,515
def parse_opera ( url_data ) : from . . bookmarks . opera import parse_bookmark_data for url , name , lineno in parse_bookmark_data ( url_data . get_content ( ) ) : url_data . add_url ( url , line = lineno , name = name )
Parse an opera bookmark file .
7,516
def parse_chromium ( url_data ) : from . . bookmarks . chromium import parse_bookmark_data for url , name in parse_bookmark_data ( url_data . get_content ( ) ) : url_data . add_url ( url , name = name )
Parse a Chromium or Google Chrome bookmark file .
7,517
def parse_safari ( url_data ) : from . . bookmarks . safari import parse_bookmark_data for url , name in parse_bookmark_data ( url_data . get_content ( ) ) : url_data . add_url ( url , name = name )
Parse a Safari bookmark file .
7,518
def parse_text ( url_data ) : lineno = 0 for line in url_data . get_content ( ) . splitlines ( ) : lineno += 1 line = line . strip ( ) if not line or line . startswith ( '#' ) : continue url_data . add_url ( line , line = lineno )
Parse a text file with one url per line ; comment and blank lines are ignored .
7,519
def parse_swf ( url_data ) : linkfinder = linkparse . swf_url_re . finditer for mo in linkfinder ( url_data . get_content ( ) ) : url = mo . group ( ) url_data . add_url ( url )
Parse a SWF file for URLs .
7,520
def find_links ( url_data , callback , tags ) : handler = linkparse . LinkFinder ( callback , tags ) parser = htmlsax . parser ( handler ) if url_data . charset : parser . encoding = url_data . charset handler . parser = parser try : parser . feed ( url_data . get_content ( ) ) parser . flush ( ) except linkparse . StopParse as msg : log . debug ( LOG_CHECK , "Stopped parsing: %s" , msg ) pass handler . parser = None parser . handler = None
Parse into content and search for URLs to check . Found URLs are added to the URL queue .
7,521
def parse_firefox ( url_data ) : filename = url_data . get_os_filename ( ) for url , name in firefox . parse_bookmark_file ( filename ) : url_data . add_url ( url , name = name )
Parse a Firefox3 bookmark file .
7,522
def parse_itms_services ( url_data ) : query = url_data . urlparts [ 3 ] for k , v , sep in urlutil . parse_qsl ( query , keep_blank_values = True ) : if k == "url" : url_data . add_url ( v ) break
Get url CGI parameter value as child URL .
7,523
def get_package_modules ( packagename ) : if is_frozen ( ) : zipname = os . path . dirname ( os . path . dirname ( __file__ ) ) parentmodule = os . path . basename ( os . path . dirname ( __file__ ) ) with zipfile . ZipFile ( zipname , 'r' ) as f : prefix = "%s/%s/" % ( parentmodule , packagename ) modnames = [ os . path . splitext ( n [ len ( prefix ) : ] ) [ 0 ] for n in f . namelist ( ) if n . startswith ( prefix ) and "__init__" not in n ] else : dirname = os . path . join ( os . path . dirname ( __file__ ) , packagename ) modnames = [ x [ : - 3 ] for x in get_importable_files ( dirname ) ] for modname in modnames : try : name = "..%s.%s" % ( packagename , modname ) yield importlib . import_module ( name , __name__ ) except ImportError as msg : print ( "WARN: could not load module %s: %s" % ( modname , msg ) )
Find all valid modules in the given package which must be a folder in the same directory as this loader . py module . A valid module has a . py extension and is importable .
7,524
def get_lock ( name , debug = False ) : lock = threading . Lock ( ) if debug : lock = DebugLock ( lock , name ) return lock
Get a new lock .
7,525
def get_semaphore ( name , value = None , debug = False ) : if value is None : lock = threading . Semaphore ( ) else : lock = threading . BoundedSemaphore ( value ) if debug : lock = DebugLock ( lock , name ) return lock
Get a new semaphore .
7,526
def acquire ( self , blocking = 1 ) : threadname = threading . currentThread ( ) . getName ( ) log . debug ( LOG_THREAD , "Acquire %s for %s" , self . name , threadname ) self . lock . acquire ( blocking ) log . debug ( LOG_THREAD , "...acquired %s for %s" , self . name , threadname )
Acquire lock .
7,527
def read_multiline ( value ) : for line in value . splitlines ( ) : line = line . strip ( ) if not line or line . startswith ( '#' ) : continue yield line
Helper function reading multiline values .
7,528
def read_string_option ( self , section , option , allowempty = False ) : if self . has_option ( section , option ) : value = self . get ( section , option ) if not allowempty and not value : raise LinkCheckerError ( _ ( "invalid empty value for %s: %s\n" ) % ( option , value ) ) self . config [ option ] = value
Read a string option .
7,529
def read_boolean_option ( self , section , option ) : if self . has_option ( section , option ) : self . config [ option ] = self . getboolean ( section , option )
Read a boolean option .
7,530
def read_int_option ( self , section , option , key = None , min = None , max = None ) : if self . has_option ( section , option ) : num = self . getint ( section , option ) if min is not None and num < min : raise LinkCheckerError ( _ ( "invalid value for %s: %d must not be less than %d" ) % ( option , num , min ) ) if max is not None and num < max : raise LinkCheckerError ( _ ( "invalid value for %s: %d must not be greater than %d" ) % ( option , num , max ) ) if key is None : key = option self . config [ key ] = num
Read an integer option .
7,531
def read_output_config ( self ) : section = "output" from . . logger import LoggerClasses for c in LoggerClasses : key = c . LoggerName if self . has_section ( key ) : for opt in self . options ( key ) : self . config [ key ] [ opt ] = self . get ( key , opt ) if self . has_option ( key , 'parts' ) : val = self . get ( key , 'parts' ) parts = [ f . strip ( ) . lower ( ) for f in val . split ( ',' ) ] self . config [ key ] [ 'parts' ] = parts self . read_boolean_option ( section , "warnings" ) if self . has_option ( section , "verbose" ) : if self . getboolean ( section , "verbose" ) : self . config [ "verbose" ] = True self . config [ "warnings" ] = True if self . has_option ( section , "quiet" ) : if self . getboolean ( section , "quiet" ) : self . config [ 'output' ] = 'none' self . config [ 'quiet' ] = True if self . has_option ( section , "debug" ) : val = self . get ( section , "debug" ) parts = [ f . strip ( ) . lower ( ) for f in val . split ( ',' ) ] logconf . set_debug ( parts ) self . read_boolean_option ( section , "status" ) if self . has_option ( section , "log" ) : val = self . get ( section , "log" ) . strip ( ) . lower ( ) self . config [ 'output' ] = val if self . has_option ( section , "fileoutput" ) : loggers = self . get ( section , "fileoutput" ) . split ( "," ) loggers = ( x . strip ( ) . lower ( ) for x in loggers ) from . . logger import LoggerNames loggers = ( x for x in loggers if x in LoggerNames and x not in ( "blacklist" , "none" ) ) for val in loggers : output = self . config . logger_new ( val , fileoutput = 1 ) self . config [ 'fileoutput' ] . append ( output )
Read configuration options in section output .
7,532
def read_checking_config ( self ) : section = "checking" self . read_int_option ( section , "threads" , min = - 1 ) self . config [ 'threads' ] = max ( 0 , self . config [ 'threads' ] ) self . read_int_option ( section , "timeout" , min = 1 ) self . read_int_option ( section , "aborttimeout" , min = 1 ) self . read_int_option ( section , "recursionlevel" , min = - 1 ) self . read_string_option ( section , "nntpserver" ) self . read_string_option ( section , "useragent" ) self . read_int_option ( section , "maxrequestspersecond" , min = 1 ) self . read_int_option ( section , "maxnumurls" , min = 0 ) self . read_int_option ( section , "maxfilesizeparse" , min = 1 ) self . read_int_option ( section , "maxfilesizedownload" , min = 1 ) if self . has_option ( section , "allowedschemes" ) : self . config [ 'allowedschemes' ] = [ x . strip ( ) . lower ( ) for x in self . get ( section , 'allowedschemes' ) . split ( ',' ) ] self . read_boolean_option ( section , "debugmemory" ) self . read_string_option ( section , "cookiefile" ) self . read_string_option ( section , "localwebroot" ) try : self . read_boolean_option ( section , "sslverify" ) except ValueError : self . read_string_option ( section , "sslverify" ) self . read_int_option ( section , "maxrunseconds" , min = 0 )
Read configuration options in section checking .
7,533
def read_authentication_config ( self ) : section = "authentication" password_fields = [ ] if self . has_option ( section , "entry" ) : for val in read_multiline ( self . get ( section , "entry" ) ) : auth = val . split ( ) if len ( auth ) == 3 : self . config . add_auth ( pattern = auth [ 0 ] , user = auth [ 1 ] , password = auth [ 2 ] ) password_fields . append ( "entry/%s/%s" % ( auth [ 0 ] , auth [ 1 ] ) ) elif len ( auth ) == 2 : self . config . add_auth ( pattern = auth [ 0 ] , user = auth [ 1 ] ) else : raise LinkCheckerError ( _ ( "missing auth part in entry %(val)r" ) % { "val" : val } ) if self . has_option ( section , "loginurl" ) : val = self . get ( section , "loginurl" ) . strip ( ) if not ( val . lower ( ) . startswith ( "http:" ) or val . lower ( ) . startswith ( "https:" ) ) : raise LinkCheckerError ( _ ( "invalid login URL `%s'. Only " "HTTP and HTTPS URLs are supported." ) % val ) self . config [ "loginurl" ] = val self . read_string_option ( section , "loginuserfield" ) self . read_string_option ( section , "loginpasswordfield" ) if self . has_option ( section , "loginextrafields" ) : for val in read_multiline ( self . get ( section , "loginextrafields" ) ) : name , value = val . split ( ":" , 1 ) self . config [ "loginextrafields" ] [ name ] = value self . check_password_readable ( section , password_fields )
Read configuration options in section authentication .
7,534
def check_password_readable ( self , section , fields ) : if not fields : return if len ( self . read_ok ) != 1 : return fn = self . read_ok [ 0 ] if fileutil . is_accessable_by_others ( fn ) : log . warn ( LOG_CHECK , "The configuration file %s contains password information (in section [%s] and options %s) and the file is readable by others. Please make the file only readable by you." , fn , section , fields ) if os . name == 'posix' : log . warn ( LOG_CHECK , _ ( "For example execute 'chmod go-rw %s'." ) % fn ) elif os . name == 'nt' : log . warn ( LOG_CHECK , _ ( "See http://support.microsoft.com/kb/308419 for more info on setting file permissions." ) )
Check if there is a readable configuration file and print a warning .
7,535
def read_filtering_config ( self ) : section = "filtering" if self . has_option ( section , "ignorewarnings" ) : self . config [ 'ignorewarnings' ] = [ f . strip ( ) . lower ( ) for f in self . get ( section , 'ignorewarnings' ) . split ( ',' ) ] if self . has_option ( section , "ignore" ) : for line in read_multiline ( self . get ( section , "ignore" ) ) : pat = get_link_pat ( line , strict = 1 ) self . config [ "externlinks" ] . append ( pat ) if self . has_option ( section , "nofollow" ) : for line in read_multiline ( self . get ( section , "nofollow" ) ) : pat = get_link_pat ( line , strict = 0 ) self . config [ "externlinks" ] . append ( pat ) if self . has_option ( section , "internlinks" ) : pat = get_link_pat ( self . get ( section , "internlinks" ) ) self . config [ "internlinks" ] . append ( pat ) self . read_boolean_option ( section , "checkextern" )
Read configuration options in section filtering .
7,536
def read_plugin_config ( self ) : folders = self . config [ "pluginfolders" ] modules = plugins . get_plugin_modules ( folders ) for pluginclass in plugins . get_plugin_classes ( modules ) : section = pluginclass . __name__ if self . has_section ( section ) : self . config [ "enabledplugins" ] . append ( section ) self . config [ section ] = pluginclass . read_config ( self )
Read plugin - specific configuration values .
7,537
def check ( self , url_data ) : log . debug ( LOG_PLUGIN , "checking content for warning regex" ) content = url_data . get_content ( ) match = self . warningregex . search ( content ) if match : line = content . count ( '\n' , 0 , match . start ( ) ) msg = _ ( "Found %(match)r at line %(line)d in link contents." ) url_data . add_warning ( msg % { "match" : match . group ( ) , "line" : line } )
Check content .
7,538
def _set_section ( self , section ) : if self . section != section : if self . section > section : raise dns . exception . FormError self . section = section
Set the renderer s current section .
7,539
def add_question ( self , qname , rdtype , rdclass = dns . rdataclass . IN ) : self . _set_section ( QUESTION ) before = self . output . tell ( ) qname . to_wire ( self . output , self . compress , self . origin ) self . output . write ( struct . pack ( "!HH" , rdtype , rdclass ) ) after = self . output . tell ( ) if after >= self . max_size : self . _rollback ( before ) raise dns . exception . TooBig self . counts [ QUESTION ] += 1
Add a question to the message .
7,540
def add_rrset ( self , section , rrset , ** kw ) : self . _set_section ( section ) before = self . output . tell ( ) n = rrset . to_wire ( self . output , self . compress , self . origin , ** kw ) after = self . output . tell ( ) if after >= self . max_size : self . _rollback ( before ) raise dns . exception . TooBig self . counts [ section ] += n
Add the rrset to the specified section .
7,541
def add_rdataset ( self , section , name , rdataset , ** kw ) : self . _set_section ( section ) before = self . output . tell ( ) n = rdataset . to_wire ( name , self . output , self . compress , self . origin , ** kw ) after = self . output . tell ( ) if after >= self . max_size : self . _rollback ( before ) raise dns . exception . TooBig self . counts [ section ] += n
Add the rdataset to the specified section using the specified name as the owner name .
7,542
def add_edns ( self , edns , ednsflags , payload , options = None ) : ednsflags &= 0xFF00FFFFL ednsflags |= ( edns << 16 ) self . _set_section ( ADDITIONAL ) before = self . output . tell ( ) self . output . write ( struct . pack ( '!BHHIH' , 0 , dns . rdatatype . OPT , payload , ednsflags , 0 ) ) if not options is None : lstart = self . output . tell ( ) for opt in options : stuff = struct . pack ( "!HH" , opt . otype , 0 ) self . output . write ( stuff ) start = self . output . tell ( ) opt . to_wire ( self . output ) end = self . output . tell ( ) assert end - start < 65536 self . output . seek ( start - 2 ) stuff = struct . pack ( "!H" , end - start ) self . output . write ( stuff ) self . output . seek ( 0 , 2 ) lend = self . output . tell ( ) assert lend - lstart < 65536 self . output . seek ( lstart - 2 ) stuff = struct . pack ( "!H" , lend - lstart ) self . output . write ( stuff ) self . output . seek ( 0 , 2 ) after = self . output . tell ( ) if after >= self . max_size : self . _rollback ( before ) raise dns . exception . TooBig self . counts [ ADDITIONAL ] += 1
Add an EDNS OPT record to the message .
7,543
def add_tsig ( self , keyname , secret , fudge , id , tsig_error , other_data , request_mac , algorithm = dns . tsig . default_algorithm ) : self . _set_section ( ADDITIONAL ) before = self . output . tell ( ) s = self . output . getvalue ( ) ( tsig_rdata , self . mac , ctx ) = dns . tsig . sign ( s , keyname , secret , int ( time . time ( ) ) , fudge , id , tsig_error , other_data , request_mac , algorithm = algorithm ) keyname . to_wire ( self . output , self . compress , self . origin ) self . output . write ( struct . pack ( '!HHIH' , dns . rdatatype . TSIG , dns . rdataclass . ANY , 0 , 0 ) ) rdata_start = self . output . tell ( ) self . output . write ( tsig_rdata ) after = self . output . tell ( ) assert after - rdata_start < 65536 if after >= self . max_size : self . _rollback ( before ) raise dns . exception . TooBig self . output . seek ( rdata_start - 2 ) self . output . write ( struct . pack ( '!H' , after - rdata_start ) ) self . counts [ ADDITIONAL ] += 1 self . output . seek ( 10 ) self . output . write ( struct . pack ( '!H' , self . counts [ ADDITIONAL ] ) ) self . output . seek ( 0 , 2 )
Add a TSIG signature to the message .
7,544
def write_header ( self ) : self . output . seek ( 0 ) self . output . write ( struct . pack ( '!HHHHHH' , self . id , self . flags , self . counts [ 0 ] , self . counts [ 1 ] , self . counts [ 2 ] , self . counts [ 3 ] ) ) self . output . seek ( 0 , 2 )
Write the DNS message header .
7,545
def option_from_wire ( otype , wire , current , olen ) : cls = get_option_class ( otype ) return cls . from_wire ( otype , wire , current , olen )
Build an EDNS option object from wire format
7,546
def comment ( self , s , ** args ) : self . writeln ( s = u"# %s" % s , ** args )
Write CSV comment .
7,547
def start_output ( self ) : super ( CSVLogger , self ) . start_output ( ) row = [ ] if self . has_part ( "intro" ) : self . write_intro ( ) self . flush ( ) else : self . write ( u"" ) self . queue = StringIO ( ) self . writer = csv . writer ( self . queue , dialect = self . dialect , delimiter = self . separator , lineterminator = self . linesep , quotechar = self . quotechar ) for s in Columns : if self . has_part ( s ) : row . append ( s ) if row : self . writerow ( row )
Write checking start info as csv comment .
7,548
def log_url ( self , url_data ) : row = [ ] if self . has_part ( "urlname" ) : row . append ( url_data . base_url ) if self . has_part ( "parentname" ) : row . append ( url_data . parent_url ) if self . has_part ( "baseref" ) : row . append ( url_data . base_ref ) if self . has_part ( "result" ) : row . append ( url_data . result ) if self . has_part ( "warningstring" ) : row . append ( self . linesep . join ( x [ 1 ] for x in url_data . warnings ) ) if self . has_part ( "infostring" ) : row . append ( self . linesep . join ( url_data . info ) ) if self . has_part ( "valid" ) : row . append ( url_data . valid ) if self . has_part ( "url" ) : row . append ( url_data . url ) if self . has_part ( "line" ) : row . append ( url_data . line ) if self . has_part ( "column" ) : row . append ( url_data . column ) if self . has_part ( "name" ) : row . append ( url_data . name ) if self . has_part ( "dltime" ) : row . append ( url_data . dltime ) if self . has_part ( "dlsize" ) : row . append ( url_data . size ) if self . has_part ( "checktime" ) : row . append ( url_data . checktime ) if self . has_part ( "cached" ) : row . append ( 0 ) if self . has_part ( "level" ) : row . append ( url_data . level ) if self . has_part ( "modified" ) : row . append ( self . format_modified ( url_data . modified ) ) self . writerow ( map ( strformat . unicode_safe , row ) ) self . flush ( )
Write csv formatted url check info .
7,549
def check_urls ( urlqueue , logger ) : while not urlqueue . empty ( ) : url_data = urlqueue . get ( ) try : check_url ( url_data , logger ) finally : urlqueue . task_done ( url_data )
Check URLs without threading .
7,550
def check_url ( url_data , logger ) : if url_data . has_result : logger . log_url ( url_data . to_wire ( ) ) else : cache = url_data . aggregate . result_cache key = url_data . cache_url result = cache . get_result ( key ) if result is None : check_start = time . time ( ) try : url_data . check ( ) do_parse = url_data . check_content ( ) url_data . checktime = time . time ( ) - check_start result = url_data . to_wire ( ) cache . add_result ( key , result ) for alias in url_data . aliases : cache . add_result ( alias , result ) if do_parse : parser . parse_url ( url_data ) finally : url_data . close_connection ( ) else : result = copy . copy ( result ) result . parent_url = url_data . parent_url result . base_ref = url_data . base_ref or u"" result . base_url = url_data . base_url or u"" result . line = url_data . line result . column = url_data . column result . level = url_data . recursion_level result . name = url_data . name logger . log_url ( result )
Check a single URL with logging .
7,551
def check_url ( self ) : try : url_data = self . urlqueue . get ( timeout = QUEUE_POLL_INTERVALL_SECS ) if url_data is not None : try : self . check_url_data ( url_data ) finally : self . urlqueue . task_done ( url_data ) self . setName ( self . origname ) except urlqueue . Empty : pass except Exception : self . internal_error ( )
Try to get URL data from queue and check it .
7,552
def check_url_data ( self , url_data ) : if url_data . url is None : url = "" else : url = url_data . url . encode ( "ascii" , "replace" ) self . setName ( "CheckThread-%s" % url ) check_url ( url_data , self . logger )
Check one URL data instance .
7,553
def udp ( q , where , timeout = None , port = 53 , af = None , source = None , source_port = 0 , ignore_unexpected = False , one_rr_per_rrset = False ) : wire = q . to_wire ( ) if af is None : try : af = dns . inet . af_for_address ( where ) except Exception : af = dns . inet . AF_INET if af == dns . inet . AF_INET : destination = ( where , port ) if source is not None : source = ( source , source_port ) elif af == dns . inet . AF_INET6 : destination = ( where , port , 0 , 0 ) if source is not None : source = ( source , source_port , 0 , 0 ) s = socket . socket ( af , socket . SOCK_DGRAM , 0 ) try : expiration = _compute_expiration ( timeout ) s . setblocking ( 0 ) if source is not None : s . bind ( source ) _wait_for_writable ( s , expiration ) s . sendto ( wire , destination ) while 1 : _wait_for_readable ( s , expiration ) ( wire , from_address ) = s . recvfrom ( 65535 ) if _addresses_equal ( af , from_address , destination ) or ( dns . inet . is_multicast ( where ) and from_address [ 1 : ] == destination [ 1 : ] ) : break if not ignore_unexpected : raise UnexpectedSource ( 'got a response from ' '%s instead of %s' % ( from_address , destination ) ) finally : s . close ( ) r = dns . message . from_wire ( wire , keyring = q . keyring , request_mac = q . mac , one_rr_per_rrset = one_rr_per_rrset ) if not q . is_response ( r ) : raise BadResponse return r
Return the response obtained after sending a query via UDP .
7,554
def _net_read ( sock , count , expiration ) : s = '' while count > 0 : _wait_for_readable ( sock , expiration ) n = sock . recv ( count ) if n == '' : raise EOFError count = count - len ( n ) s = s + n return s
Read the specified number of bytes from sock . Keep trying until we either get the desired amount or we hit EOF . A Timeout exception will be raised if the operation is not completed by the expiration time .
7,555
def _net_write ( sock , data , expiration ) : current = 0 l = len ( data ) while current < l : _wait_for_writable ( sock , expiration ) current += sock . send ( data [ current : ] )
Write the specified data to the socket . A Timeout exception will be raised if the operation is not completed by the expiration time .
7,556
def tcp ( q , where , timeout = None , port = 53 , af = None , source = None , source_port = 0 , one_rr_per_rrset = False ) : wire = q . to_wire ( ) if af is None : try : af = dns . inet . af_for_address ( where ) except Exception : af = dns . inet . AF_INET if af == dns . inet . AF_INET : destination = ( where , port ) if source is not None : source = ( source , source_port ) elif af == dns . inet . AF_INET6 : destination = ( where , port , 0 , 0 ) if source is not None : source = ( source , source_port , 0 , 0 ) s = socket . socket ( af , socket . SOCK_STREAM , 0 ) try : expiration = _compute_expiration ( timeout ) s . setblocking ( 0 ) if source is not None : s . bind ( source ) _connect ( s , destination ) l = len ( wire ) tcpmsg = struct . pack ( "!H" , l ) + wire _net_write ( s , tcpmsg , expiration ) ldata = _net_read ( s , 2 , expiration ) ( l , ) = struct . unpack ( "!H" , ldata ) wire = _net_read ( s , l , expiration ) finally : s . close ( ) r = dns . message . from_wire ( wire , keyring = q . keyring , request_mac = q . mac , one_rr_per_rrset = one_rr_per_rrset ) if not q . is_response ( r ) : raise BadResponse return r
Return the response obtained after sending a query via TCP .
7,557
def from_text ( text ) : value = _by_text . get ( text . upper ( ) ) if value is None : match = _unknown_type_pattern . match ( text ) if match == None : raise UnknownRdatatype value = int ( match . group ( 1 ) ) if value < 0 or value > 65535 : raise ValueError ( "type must be between >= 0 and <= 65535" ) return value
Convert text into a DNS rdata type value .
7,558
def is_metatype ( rdtype ) : if rdtype >= TKEY and rdtype <= ANY or rdtype in _metatypes : return True return False
True if the type is a metatype .
7,559
def run_checked ( self ) : self . start_time = time . time ( ) self . setName ( "Status" ) wait_seconds = 1 first_wait = True while not self . stopped ( wait_seconds ) : self . log_status ( ) if first_wait : wait_seconds = self . wait_seconds first_wait = False
Print periodic status messages .
7,560
def log_status ( self ) : duration = time . time ( ) - self . start_time checked , in_progress , queue = self . aggregator . urlqueue . status ( ) num_urls = len ( self . aggregator . result_cache ) self . logger . log_status ( checked , in_progress , queue , duration , num_urls )
Log a status message .
7,561
def init_mimedb ( ) : global mimedb try : mimedb = mimetypes . MimeTypes ( strict = False ) except Exception as msg : log . error ( LOG_CHECK , "could not initialize MIME database: %s" % msg ) return add_mimetype ( mimedb , 'text/plain' , '.adr' ) add_mimetype ( mimedb , 'application/x-httpd-php' , '.php' ) add_mimetype ( mimedb , 'text/vnd.wap.wml' , '.wml' )
Initialize the local MIME database .
7,562
def add_mimetype ( mimedb , mimetype , extension ) : strict = extension in mimedb . types_map [ True ] mimedb . add_type ( mimetype , extension , strict = strict )
Add or replace a mimetype to be used with the given extension .
7,563
def search_form ( content , cgiuser , cgipassword , encoding = 'utf-8' ) : handler = FormFinder ( ) parser = htmlsax . parser ( handler ) handler . parser = parser parser . encoding = encoding parser . feed ( content ) parser . flush ( ) handler . parser = None parser . handler = None log . debug ( LOG_CHECK , "Found forms %s" , handler . forms ) cginames = ( cgiuser . lower ( ) , cgipassword . lower ( ) ) for form in handler . forms : for key , value in form . data . items ( ) : if key . lower ( ) in cginames : return form return None
Search for a HTML form in the given HTML content that has the given CGI fields . If no form is found return None .
7,564
def end_element ( self , tag ) : if tag == u'form' : self . forms . append ( self . form ) self . form = None
search for ending form values .
7,565
def init_i18n ( loc = None ) : if 'LOCPATH' in os . environ : locdir = os . environ [ 'LOCPATH' ] else : locdir = os . path . join ( get_install_data ( ) , 'share' , 'locale' ) i18n . init ( configdata . name . lower ( ) , locdir , loc = loc ) import logging logging . addLevelName ( logging . CRITICAL , _ ( 'CRITICAL' ) ) logging . addLevelName ( logging . ERROR , _ ( 'ERROR' ) ) logging . addLevelName ( logging . WARN , _ ( 'WARN' ) ) logging . addLevelName ( logging . WARNING , _ ( 'WARNING' ) ) logging . addLevelName ( logging . INFO , _ ( 'INFO' ) ) logging . addLevelName ( logging . DEBUG , _ ( 'DEBUG' ) ) logging . addLevelName ( logging . NOTSET , _ ( 'NOTSET' ) )
Initialize i18n with the configured locale dir . The environment variable LOCPATH can also specify a locale dir .
7,566
def drop_privileges ( ) : if os . name != 'posix' : return if os . geteuid ( ) == 0 : log . warn ( LOG_CHECK , _ ( "Running as root user; " "dropping privileges by changing user to nobody." ) ) import pwd os . seteuid ( pwd . getpwnam ( 'nobody' ) [ 3 ] )
Make sure to drop root privileges on POSIX systems .
7,567
def find_third_party_modules ( ) : parent = os . path . dirname ( os . path . dirname ( __file__ ) ) third_party = os . path . join ( parent , "third_party" ) if os . path . isdir ( third_party ) : sys . path . append ( os . path . join ( third_party , "dnspython" ) )
Find third party modules and add them to the python path .
7,568
def get_plist_data_from_file ( filename ) : if has_biplist : return biplist . readPlist ( filename ) try : return plistlib . readPlist ( filename ) except Exception : return { }
Parse plist data for a file . Tries biplist falling back to plistlib .
7,569
def get_plist_data_from_string ( data ) : if has_biplist : return biplist . readPlistFromString ( data ) try : return plistlib . readPlistFromString ( data ) except Exception : return { }
Parse plist data for a string . Tries biplist falling back to plistlib .
7,570
def parse_plist ( entry ) : if is_leaf ( entry ) : url = entry [ KEY_URLSTRING ] title = entry [ KEY_URIDICTIONARY ] . get ( 'title' , url ) yield ( url , title ) elif has_children ( entry ) : for child in entry [ KEY_CHILDREN ] : for item in parse_plist ( child ) : yield item
Parse a XML dictionary entry .
7,571
def algorithm_from_text ( text ) : value = _algorithm_by_text . get ( text . upper ( ) ) if value is None : value = int ( text ) return value
Convert text into a DNSSEC algorithm value
7,572
def algorithm_to_text ( value ) : text = _algorithm_by_value . get ( value ) if text is None : text = str ( value ) return text
Convert a DNSSEC algorithm value to text
7,573
def _validate ( rrset , rrsigset , keys , origin = None , now = None ) : if isinstance ( origin , ( str , unicode ) ) : origin = dns . name . from_text ( origin , dns . name . root ) if isinstance ( rrset , tuple ) : rrname = rrset [ 0 ] else : rrname = rrset . name if isinstance ( rrsigset , tuple ) : rrsigname = rrsigset [ 0 ] rrsigrdataset = rrsigset [ 1 ] else : rrsigname = rrsigset . name rrsigrdataset = rrsigset rrname = rrname . choose_relativity ( origin ) rrsigname = rrname . choose_relativity ( origin ) if rrname != rrsigname : raise ValidationFailure , "owner names do not match" for rrsig in rrsigrdataset : try : _validate_rrsig ( rrset , rrsig , keys , origin , now ) return except ValidationFailure , e : pass raise ValidationFailure , "no RRSIGs validated"
Validate an RRset
7,574
def allows_url ( self , url_data ) : roboturl = url_data . get_robots_txt_url ( ) with self . get_lock ( roboturl ) : return self . _allows_url ( url_data , roboturl )
Ask robots . txt allowance .
7,575
def _allows_url ( self , url_data , roboturl ) : with cache_lock : if roboturl in self . cache : self . hits += 1 rp = self . cache [ roboturl ] return rp . can_fetch ( self . useragent , url_data . url ) self . misses += 1 kwargs = dict ( auth = url_data . auth , session = url_data . session ) if hasattr ( url_data , "proxy" ) and hasattr ( url_data , "proxy_type" ) : kwargs [ "proxies" ] = { url_data . proxytype : url_data . proxy } rp = robotparser2 . RobotFileParser ( ** kwargs ) rp . set_url ( roboturl ) rp . read ( ) with cache_lock : self . cache [ roboturl ] = rp self . add_sitemap_urls ( rp , url_data , roboturl ) return rp . can_fetch ( self . useragent , url_data . url )
Ask robots . txt allowance . Assumes only single thread per robots . txt URL calls this function .
7,576
def add_sitemap_urls ( self , rp , url_data , roboturl ) : if not rp . sitemap_urls or not url_data . allows_simple_recursion ( ) : return for sitemap_url , line in rp . sitemap_urls : url_data . add_url ( sitemap_url , line = line )
Add sitemap URLs to queue .
7,577
def start_output ( self ) : super ( HtmlLogger , self ) . start_output ( ) header = { "encoding" : self . get_charset_encoding ( ) , "title" : configuration . App , "body" : self . colorbackground , "link" : self . colorlink , "vlink" : self . colorlink , "alink" : self . colorlink , "url" : self . colorurl , "error" : self . colorerror , "valid" : self . colorok , "warning" : self . colorwarning , } self . write ( HTML_HEADER % header ) self . comment ( "Generated by %s" % configuration . App ) if self . has_part ( 'intro' ) : self . write ( u"<h2>" + configuration . App + "</h2><br/><blockquote>" + configuration . Freeware + "<br/><br/>" + ( _ ( "Start checking at %s" ) % strformat . strtime ( self . starttime ) ) + os . linesep + "<br/>" ) self . check_date ( ) self . flush ( )
Write start of checking info .
7,578
def write_id ( self ) : self . writeln ( u"<tr>" ) self . writeln ( u'<td>%s</td>' % self . part ( "id" ) ) self . write ( u"<td>%d</td></tr>" % self . stats . number )
Write ID for current URL .
7,579
def write_warning ( self , url_data ) : sep = u"<br/>" + os . linesep text = sep . join ( cgi . escape ( x [ 1 ] ) for x in url_data . warnings ) self . writeln ( u'<tr><td class="warning" ' + u'valign="top">' + self . part ( "warning" ) + u'</td><td class="warning">' + text + u"</td></tr>" )
Write url_data . warnings .
7,580
def write_stats ( self ) : self . writeln ( u'<br/><i>%s</i><br/>' % _ ( "Statistics" ) ) if self . stats . number > 0 : self . writeln ( _ ( "Content types: %(image)d image, %(text)d text, %(video)d video, " "%(audio)d audio, %(application)d application, %(mail)d mail" " and %(other)d other." ) % self . stats . link_types ) self . writeln ( u"<br/>" ) self . writeln ( _ ( "URL lengths: min=%(min)d, max=%(max)d, avg=%(avg)d." ) % dict ( min = self . stats . min_url_length , max = self . stats . max_url_length , avg = self . stats . avg_url_length ) ) else : self . writeln ( _ ( "No statistics available since no URLs were checked." ) ) self . writeln ( u"<br/>" )
Write check statistic infos .
7,581
def write_outro ( self ) : self . writeln ( u"<br/>" ) self . write ( _ ( "That's it." ) + " " ) if self . stats . number >= 0 : self . write ( _n ( "%d link checked." , "%d links checked." , self . stats . number ) % self . stats . number ) self . write ( u" " ) self . write ( _n ( "%d warning found" , "%d warnings found" , self . stats . warnings_printed ) % self . stats . warnings_printed ) if self . stats . warnings != self . stats . warnings_printed : self . write ( _ ( " (%d ignored or duplicates not printed)" ) % ( self . stats . warnings - self . stats . warnings_printed ) ) self . write ( u". " ) self . write ( _n ( "%d error found" , "%d errors found" , self . stats . errors_printed ) % self . stats . errors_printed ) if self . stats . errors != self . stats . errors_printed : self . write ( _ ( " (%d duplicates not printed)" ) % ( self . stats . errors - self . stats . errors_printed ) ) self . writeln ( u"." ) self . writeln ( u"<br/>" ) num = self . stats . internal_errors if num : self . write ( _n ( "There was %(num)d internal error." , "There were %(num)d internal errors." , num ) % { "num" : num } ) self . writeln ( u"<br/>" ) self . stoptime = time . time ( ) duration = self . stoptime - self . starttime self . writeln ( _ ( "Stopped checking at %(time)s (%(duration)s)" ) % { "time" : strformat . strtime ( self . stoptime ) , "duration" : strformat . strduration_long ( duration ) } ) self . writeln ( u'</blockquote><br/><hr><small>' + configuration . HtmlAppInfo + u"<br/>" ) self . writeln ( _ ( "Get the newest version at %s" ) % ( u'<a href="' + configuration . Url + u'" target="_top">' + configuration . Url + u"</a>.<br/>" ) ) self . writeln ( _ ( "Write comments and bugs to %s" ) % ( u'<a href="' + configuration . SupportUrl + u'">' + configuration . SupportUrl + u"</a>.<br/>" ) ) self . writeln ( _ ( "Support this project at %s" ) % ( u'<a href="' + configuration . DonateUrl + u'">' + configuration . DonateUrl + u"</a>." ) ) self . writeln ( u"</small></body></html>" )
Write end of check message .
7,582
def end_output ( self , ** kwargs ) : if self . has_part ( "stats" ) : self . write_stats ( ) if self . has_part ( "outro" ) : self . write_outro ( ) self . close_fileoutput ( )
Write end of checking info as HTML .
7,583
def is_valid_ipv4 ( ip ) : if not _ipv4_re . match ( ip ) : return False a , b , c , d = [ int ( i ) for i in ip . split ( "." ) ] return a <= 255 and b <= 255 and c <= 255 and d <= 255
Return True if given ip is a valid IPv4 address .
7,584
def is_valid_ipv6 ( ip ) : if not ( _ipv6_re . match ( ip ) or _ipv6_ipv4_re . match ( ip ) or _ipv6_abbr_re . match ( ip ) or _ipv6_ipv4_abbr_re . match ( ip ) ) : return False return True
Return True if given ip is a valid IPv6 address .
7,585
def host_in_set ( ip , hosts , nets ) : if ip in hosts : return True if is_valid_ipv4 ( ip ) : n = dq2num ( ip ) for net in nets : if dq_in_net ( n , net ) : return True return False
Return True if given ip is in host or network list .
7,586
def lookup_ips ( ips ) : hosts = set ( ) for ip in ips : try : hosts . add ( socket . gethostbyaddr ( ip ) [ 0 ] ) except socket . error : hosts . add ( ip ) return hosts
Return set of host names that resolve to given ips .
7,587
def obfuscate_ip ( ip ) : if is_valid_ipv4 ( ip ) : res = "0x%s" % "" . join ( hex ( int ( x ) ) [ 2 : ] for x in ip . split ( "." ) ) else : raise ValueError ( 'Invalid IP value %r' % ip ) assert is_obfuscated_ip ( res ) , '%r obfuscation error' % res return res
Obfuscate given host in IP form .
7,588
def put ( self , item ) : with self . mutex : self . _put ( item ) self . not_empty . notify ( )
Put an item into the queue . Block if necessary until a free slot is available .
7,589
def _put ( self , url_data ) : if self . shutdown or self . max_allowed_urls == 0 : return log . debug ( LOG_CACHE , "queueing %s" , url_data . url ) key = url_data . cache_url cache = url_data . aggregate . result_cache if url_data . has_result or cache . has_result ( key ) : self . queue . appendleft ( url_data ) else : assert key is not None , "no result for None key: %s" % url_data if self . max_allowed_urls is not None : self . max_allowed_urls -= 1 self . num_puts += 1 if self . num_puts >= NUM_PUTS_CLEANUP : self . cleanup ( ) self . queue . append ( url_data ) self . unfinished_tasks += 1
Put URL in queue increase number of unfished tasks .
7,590
def cleanup ( self ) : self . num_puts = 0 cached = [ ] for i , url_data in enumerate ( self . queue ) : key = url_data . cache_url cache = url_data . aggregate . result_cache if cache . has_result ( key ) : cached . append ( i ) for pos in cached : self . _move_to_top ( pos )
Move cached elements to top .
7,591
def _move_to_top ( self , pos ) : if pos > 0 : self . queue . rotate ( - pos ) item = self . queue . popleft ( ) self . queue . rotate ( pos ) self . queue . appendleft ( item )
Move element at given position to top of queue .
7,592
def do_shutdown ( self ) : with self . mutex : unfinished = self . unfinished_tasks - len ( self . queue ) self . queue . clear ( ) if unfinished <= 0 : if unfinished < 0 : raise ValueError ( 'shutdown is in error' ) self . all_tasks_done . notifyAll ( ) self . unfinished_tasks = unfinished self . shutdown = True
Shutdown the queue by not accepting any more URLs .
7,593
def add ( self , item ) : if not item in self . items : self . items . append ( item )
Add an item to the set .
7,594
def union_update ( self , other ) : if not isinstance ( other , Set ) : raise ValueError ( 'other must be a Set instance' ) if self is other : return for item in other . items : self . add ( item )
Update the set adding any elements from other which are not already in the set .
7,595
def intersection_update ( self , other ) : if not isinstance ( other , Set ) : raise ValueError ( 'other must be a Set instance' ) if self is other : return for item in list ( self . items ) : if item not in other . items : self . items . remove ( item )
Update the set removing any elements from other which are not in both sets .
7,596
def difference_update ( self , other ) : if not isinstance ( other , Set ) : raise ValueError ( 'other must be a Set instance' ) if self is other : self . items = [ ] else : for item in other . items : self . discard ( item )
Update the set removing any elements from other which are in the set .
7,597
def urljoin ( parent , url ) : if urlutil . url_is_absolute ( url ) : return url return urlparse . urljoin ( parent , url )
If url is relative join parent and url . Else leave url as - is .
7,598
def init ( self , base_ref , base_url , parent_url , recursion_level , aggregate , line , column , page , name , url_encoding , extern ) : self . base_ref = base_ref if self . base_ref is not None : assert isinstance ( self . base_ref , unicode ) , repr ( self . base_ref ) self . base_url = base_url . strip ( ) if base_url else base_url if self . base_url is not None : assert isinstance ( self . base_url , unicode ) , repr ( self . base_url ) self . parent_url = parent_url if self . parent_url is not None : assert isinstance ( self . parent_url , unicode ) , repr ( self . parent_url ) self . recursion_level = recursion_level self . aggregate = aggregate self . line = line self . column = column self . page = page self . name = name assert isinstance ( self . name , unicode ) , repr ( self . name ) self . encoding = url_encoding self . charset = None self . extern = extern if self . base_ref : assert not urlutil . url_needs_quoting ( self . base_ref ) , "unquoted base reference URL %r" % self . base_ref if self . parent_url : assert not urlutil . url_needs_quoting ( self . parent_url ) , "unquoted parent URL %r" % self . parent_url url = absolute_url ( self . base_url , base_ref , parent_url ) self . scheme = url . split ( ":" , 1 ) [ 0 ] . lower ( ) or "file" if self . base_url != base_url : self . add_warning ( _ ( "Leading or trailing whitespace in URL `%(url)s'." ) % { "url" : base_url } , tag = WARN_URL_WHITESPACE )
Initialize internal data .
7,599
def reset ( self ) : self . url = None self . urlparts = None self . scheme = self . host = self . port = self . anchor = None self . result = u"" self . has_result = False self . valid = True self . warnings = [ ] self . info = [ ] self . size = - 1 self . modified = None self . dltime = - 1 self . checktime = 0 self . url_connection = None self . data = None self . cache_url = None self . extern = None self . caching = True self . title = None self . do_check_content = True self . content_type = u"" self . aliases = [ ]
Reset all variables to default values .