idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
7,100
def sanitize_loginurl ( self ) : url = self [ "loginurl" ] disable = False if not self [ "loginpasswordfield" ] : log . warn ( LOG_CHECK , _ ( "no CGI password fieldname given for login URL." ) ) disable = True if not self [ "loginuserfield" ] : log . warn ( LOG_CHECK , _ ( "no CGI user fieldname given for login URL." ) ) disable = True if self . get_user_password ( url ) == ( None , None ) : log . warn ( LOG_CHECK , _ ( "no user/password authentication data found for login URL." ) ) disable = True if not url . lower ( ) . startswith ( ( "http:" , "https:" ) ) : log . warn ( LOG_CHECK , _ ( "login URL is not a HTTP URL." ) ) disable = True urlparts = urlparse . urlsplit ( url ) if not urlparts [ 0 ] or not urlparts [ 1 ] or not urlparts [ 2 ] : log . warn ( LOG_CHECK , _ ( "login URL is incomplete." ) ) disable = True if disable : log . warn ( LOG_CHECK , _ ( "disabling login URL %(url)s." ) % { "url" : url } ) self [ "loginurl" ] = None
Make login configuration consistent .
7,101
def sanitize_proxies ( self ) : if os . name != 'posix' : return if "http" not in self [ "proxy" ] : http_proxy = get_gconf_http_proxy ( ) or get_kde_http_proxy ( ) if http_proxy : self [ "proxy" ] [ "http" ] = http_proxy if "ftp" not in self [ "proxy" ] : ftp_proxy = get_gconf_ftp_proxy ( ) or get_kde_ftp_proxy ( ) if ftp_proxy : self [ "proxy" ] [ "ftp" ] = ftp_proxy
Try to read additional proxy settings which urllib does not support .
7,102
def sanitize_ssl ( self ) : if self [ "sslverify" ] is True : try : self [ "sslverify" ] = get_system_cert_file ( ) except ValueError : try : self [ "sslverify" ] = get_certifi_file ( ) except ( ValueError , ImportError ) : try : self [ "sslverify" ] = get_share_file ( 'cacert.pem' ) except ValueError : pass
Use local installed certificate file if available . Tries to get system then certifi then the own installed certificate file .
7,103
def get_command ( self ) : cmd = None count = len ( self . command_list ) if count > 0 : cmd = self . command_list . pop ( 0 ) if count == 1 : self . cmd_ready = False return cmd
Get a line of text that was received from the DE . The class s cmd_ready attribute will be true if lines are available .
7,104
def send ( self , text ) : if text : self . send_buffer += text . replace ( '\n' , '\r\n' ) self . send_pending = True
Send raw text to the distant end .
7,105
def send_wrapped ( self , text ) : lines = word_wrap ( text , self . columns ) for line in lines : self . send_cc ( line + '\n' )
Send text padded and wrapped to the user s screen width .
7,106
def request_will_echo ( self ) : self . _iac_will ( ECHO ) self . _note_reply_pending ( ECHO , True ) self . telnet_echo = True
Tell the DE that we would like to echo their text . See RFC 857 .
7,107
def request_wont_echo ( self ) : self . _iac_wont ( ECHO ) self . _note_reply_pending ( ECHO , True ) self . telnet_echo = False
Tell the DE that we would like to stop echoing their text . See RFC 857 .
7,108
def socket_send ( self ) : if len ( self . send_buffer ) : try : sent = self . sock . send ( self . send_buffer ) except socket . error , err : print ( "!! SEND error '%d:%s' from %s" % ( err [ 0 ] , err [ 1 ] , self . addrport ( ) ) ) self . active = False return self . bytes_sent += sent self . send_buffer = self . send_buffer [ sent : ] else : self . send_pending = False
Called by TelnetServer when send data is ready .
7,109
def socket_recv ( self ) : try : data = self . sock . recv ( 2048 ) except socket . error , ex : print ( "?? socket.recv() error '%d:%s' from %s" % ( ex [ 0 ] , ex [ 1 ] , self . addrport ( ) ) ) raise BogConnectionLost ( ) size = len ( data ) if size == 0 : raise BogConnectionLost ( ) self . last_input_time = time . time ( ) self . bytes_received += size for byte in data : self . _iac_sniffer ( byte ) while True : mark = self . recv_buffer . find ( '\n' ) if mark == - 1 : break cmd = self . recv_buffer [ : mark ] . strip ( ) self . command_list . append ( cmd ) self . cmd_ready = True self . recv_buffer = self . recv_buffer [ mark + 1 : ]
Called by TelnetServer when recv data is ready .
7,110
def _recv_byte ( self , byte ) : if self . telnet_echo : self . _echo_byte ( byte ) self . recv_buffer += byte
Non - printable filtering currently disabled because it did not play well with extended character sets .
7,111
def _echo_byte ( self , byte ) : if byte == '\n' : self . send_buffer += '\r' if self . telnet_echo_password : self . send_buffer += '*' else : self . send_buffer += byte
Echo a character back to the client and convert LF into CR \ LF .
7,112
def _two_byte_cmd ( self , cmd ) : if cmd == SB : self . telnet_got_sb = True self . telnet_sb_buffer = '' elif cmd == SE : self . telnet_got_sb = False self . _sb_decoder ( ) elif cmd == NOP : pass elif cmd == DATMK : pass elif cmd == IP : pass elif cmd == AO : pass elif cmd == AYT : pass elif cmd == EC : pass elif cmd == EL : pass elif cmd == GA : pass else : print "2BC: Should not be here." self . telnet_got_iac = False self . telnet_got_cmd = None
Handle incoming Telnet commands that are two bytes long .
7,113
def _sb_decoder ( self ) : bloc = self . telnet_sb_buffer if len ( bloc ) > 2 : if bloc [ 0 ] == TTYPE and bloc [ 1 ] == IS : self . terminal_type = bloc [ 2 : ] if bloc [ 0 ] == NAWS : if len ( bloc ) != 5 : print "Bad length on NAWS SB:" , len ( bloc ) else : self . columns = ( 256 * ord ( bloc [ 1 ] ) ) + ord ( bloc [ 2 ] ) self . rows = ( 256 * ord ( bloc [ 3 ] ) ) + ord ( bloc [ 4 ] ) self . telnet_sb_buffer = ''
Figures out what to do with a received sub - negotiation block .
7,114
def _check_local_option ( self , option ) : if not self . telnet_opt_dict . has_key ( option ) : self . telnet_opt_dict [ option ] = TelnetOption ( ) return self . telnet_opt_dict [ option ] . local_option
Test the status of local negotiated Telnet options .
7,115
def _check_remote_option ( self , option ) : if not self . telnet_opt_dict . has_key ( option ) : self . telnet_opt_dict [ option ] = TelnetOption ( ) return self . telnet_opt_dict [ option ] . remote_option
Test the status of remote negotiated Telnet options .
7,116
def _check_reply_pending ( self , option ) : if not self . telnet_opt_dict . has_key ( option ) : self . telnet_opt_dict [ option ] = TelnetOption ( ) return self . telnet_opt_dict [ option ] . reply_pending
Test the status of requested Telnet options .
7,117
def _note_reply_pending ( self , option , state ) : if not self . telnet_opt_dict . has_key ( option ) : self . telnet_opt_dict [ option ] = TelnetOption ( ) self . telnet_opt_dict [ option ] . reply_pending = state
Record the status of requested Telnet options .
7,118
def pipecmd ( cmd1 , cmd2 ) : p1 = subprocess . Popen ( cmd1 , stdout = subprocess . PIPE ) p2 = subprocess . Popen ( cmd2 , stdin = p1 . stdout , stdout = subprocess . PIPE ) p1 . stdout . close ( ) return p2 . communicate ( ) [ 0 ]
Return output of cmd1 | cmd2 .
7,119
def _getaddr ( self , ifname , func ) : try : result = self . _ioctl ( func , self . _getifreq ( ifname ) ) except IOError as msg : log . warn ( LOG_CHECK , "error getting addr for interface %r: %s" , ifname , msg ) return None return socket . inet_ntoa ( result [ 20 : 24 ] )
Get interface address .
7,120
def getInterfaceList ( self , flags = 0 ) : if sys . platform == 'darwin' : command = [ 'ifconfig' , '-l' ] if flags & self . IFF_UP : command . append ( '-u' ) res = subprocess . Popen ( command , stdout = subprocess . PIPE ) . communicate ( ) [ 0 ] return res . split ( ) bufsize = 8192 max_bufsize = bufsize * 10 while True : buf = array . array ( 'c' , '\0' * bufsize ) ifreq = struct . pack ( "iP" , buf . buffer_info ( ) [ 1 ] , buf . buffer_info ( ) [ 0 ] ) try : result = self . _ioctl ( self . SIOCGIFCONF , ifreq ) break except IOError as msg : if msg [ 0 ] != errno . EINVAL or bufsize == max_bufsize : raise bufsize += 8192 data = buf . tostring ( ) iflist = [ ] size , ptr = struct . unpack ( "iP" , result ) i = 0 while i < size : ifconf = data [ i : i + self . ifr_size ] name = struct . unpack ( "16s%ds" % ( self . ifr_size - 16 ) , ifconf ) [ 0 ] name = name . split ( '\0' , 1 ) [ 0 ] if name : if flags and not ( self . getFlags ( name ) & flags ) : continue iflist . append ( name ) i += self . ifr_size return iflist
Get all interface names in a list .
7,121
def getFlags ( self , ifname ) : try : result = self . _ioctl ( self . SIOCGIFFLAGS , self . _getifreq ( ifname ) ) except IOError as msg : log . warn ( LOG_CHECK , "error getting flags for interface %r: %s" , ifname , msg ) return 0 flags , = struct . unpack ( 'H' , result [ 16 : 18 ] ) return flags
Get the flags for an interface
7,122
def getAddr ( self , ifname ) : if sys . platform == 'darwin' : return ifconfig_inet ( ifname ) . get ( 'address' ) return self . _getaddr ( ifname , self . SIOCGIFADDR )
Get the inet addr for an interface .
7,123
def getMask ( self , ifname ) : if sys . platform == 'darwin' : return ifconfig_inet ( ifname ) . get ( 'netmask' ) return self . _getaddr ( ifname , self . SIOCGIFNETMASK )
Get the netmask for an interface .
7,124
def getBroadcast ( self , ifname ) : if sys . platform == 'darwin' : return ifconfig_inet ( ifname ) . get ( 'broadcast' ) return self . _getaddr ( ifname , self . SIOCGIFBRDADDR )
Get the broadcast addr for an interface .
7,125
def isLoopback ( self , ifname ) : if ifname . startswith ( 'lo' ) : return True return ( self . getFlags ( ifname ) & self . IFF_LOOPBACK ) != 0
Check whether interface is a loopback device .
7,126
def check ( self , url_data ) : content = url_data . get_content ( ) self . _check_by_re ( url_data , content ) self . _check_inline_links ( url_data , content )
Extracts urls from the file .
7,127
def _check_by_re ( self , url_data , content ) : for link_re in self . _link_res : for u in link_re . finditer ( content ) : self . _save_url ( url_data , content , u . group ( 1 ) , u . start ( 1 ) )
Finds urls by re .
7,128
def _find_balanced ( self , text , start , open_c , close_c ) : i = start l = len ( text ) count = 1 while count > 0 and i < l : if text [ i ] == open_c : count += 1 elif text [ i ] == close_c : count -= 1 i += 1 return i
Returns the index where the open_c and close_c characters balance out - the same number of open_c and close_c are encountered - or the end of string if it s reached before the balance point is found .
7,129
def _extract_url_and_title ( self , text , start ) : idx = self . _whitespace . match ( text , start + 1 ) . end ( ) if idx == len ( text ) : return None , None end_idx = idx has_anglebrackets = text [ idx ] == "<" if has_anglebrackets : end_idx = self . _find_balanced ( text , end_idx + 1 , "<" , ">" ) end_idx = self . _find_balanced ( text , end_idx , "(" , ")" ) match = self . _inline_link_title . search ( text , idx , end_idx ) if not match : return None , None url = text [ idx : match . start ( ) ] if has_anglebrackets : url = self . _strip_anglebrackets . sub ( r'\1' , url ) return url , end_idx
Extracts the url from the tail of a link .
7,130
def _check_inline_links ( self , url_data , content ) : MAX_LINK_TEXT_SENTINEL = 3000 curr_pos = 0 content_length = len ( content ) while True : try : start_idx = content . index ( '[' , curr_pos ) except ValueError : break bracket_depth = 0 for p in range ( start_idx + 1 , min ( start_idx + MAX_LINK_TEXT_SENTINEL , content_length ) ) : if content [ p ] == ']' : bracket_depth -= 1 if bracket_depth < 0 : break elif content [ p ] == '[' : bracket_depth += 1 else : curr_pos = start_idx + 1 continue p += 1 if p >= content_length : return if content [ p ] == '(' : url , url_end_idx = self . _extract_url_and_title ( content , p ) if url is not None : self . _save_url ( url_data , content , url , p ) start_idx = url_end_idx curr_pos = start_idx + 1
Checks inline links .
7,131
def is_meta_url ( attr , attrs ) : res = False if attr == "content" : equiv = attrs . get_true ( 'http-equiv' , u'' ) . lower ( ) scheme = attrs . get_true ( 'scheme' , u'' ) . lower ( ) res = equiv in ( u'refresh' , ) or scheme in ( u'dcterms.uri' , ) if attr == "href" : rel = attrs . get_true ( 'rel' , u'' ) . lower ( ) res = rel in ( u'shortcut icon' , u'icon' ) return res
Check if the meta attributes contain a URL .
7,132
def is_form_get ( attr , attrs ) : res = False if attr == "action" : method = attrs . get_true ( 'method' , u'' ) . lower ( ) res = method != 'post' return res
Check if this is a GET form action URL .
7,133
def start_element ( self , tag , attrs ) : if tag == 'meta' and attrs . get ( 'name' ) == 'robots' : val = attrs . get_true ( 'content' , u'' ) . lower ( ) . split ( u',' ) self . follow = u'nofollow' not in val self . index = u'noindex' not in val raise StopParse ( "found <meta name=robots> tag" ) elif tag == 'body' : raise StopParse ( "found <body> tag" )
Search for meta robots . txt nofollow and noindex flags .
7,134
def start_element ( self , tag , attrs ) : log . debug ( LOG_CHECK , "LinkFinder tag %s attrs %s" , tag , attrs ) log . debug ( LOG_CHECK , "line %d col %d old line %d old col %d" , self . parser . lineno ( ) , self . parser . column ( ) , self . parser . last_lineno ( ) , self . parser . last_column ( ) ) if tag == "base" and not self . base_ref : self . base_ref = attrs . get_true ( "href" , u'' ) tagattrs = self . tags . get ( tag , self . universal_attrs ) for attr in tagattrs . intersection ( attrs ) : if tag == "meta" and not is_meta_url ( attr , attrs ) : continue if tag == "form" and not is_form_get ( attr , attrs ) : continue name = self . get_link_name ( tag , attrs , attr ) base = u'' if tag == 'applet' : base = attrs . get_true ( 'codebase' , u'' ) if not base : base = self . base_ref value = attrs . get ( attr ) if tag == 'link' and attrs . get ( 'rel' ) == 'dns-prefetch' : if ':' in value : value = value . split ( ':' , 1 ) [ 1 ] value = 'dns:' + value . rstrip ( '/' ) self . parse_tag ( tag , attr , value , name , base ) log . debug ( LOG_CHECK , "LinkFinder finished tag %s" , tag )
Search for links and store found URLs in a list .
7,135
def get_link_name ( self , tag , attrs , attr ) : if tag == 'a' and attr == 'href' : data = self . parser . peek ( MAX_NAMELEN ) data = data . decode ( self . parser . encoding , "ignore" ) name = linkname . href_name ( data ) if not name : name = attrs . get_true ( 'title' , u'' ) elif tag == 'img' : name = attrs . get_true ( 'alt' , u'' ) if not name : name = attrs . get_true ( 'title' , u'' ) else : name = u"" return name
Parse attrs for link name . Return name of link .
7,136
def parse_tag ( self , tag , attr , value , name , base ) : assert isinstance ( tag , unicode ) , repr ( tag ) assert isinstance ( attr , unicode ) , repr ( attr ) assert isinstance ( name , unicode ) , repr ( name ) assert isinstance ( base , unicode ) , repr ( base ) assert isinstance ( value , unicode ) or value is None , repr ( value ) if tag == u'meta' and value : mo = refresh_re . match ( value ) if mo : self . found_url ( mo . group ( "url" ) , name , base ) elif attr != 'content' : self . found_url ( value , name , base ) elif attr == u'style' and value : for mo in css_url_re . finditer ( value ) : url = unquote ( mo . group ( "url" ) , matching = True ) self . found_url ( url , name , base ) elif attr == u'archive' : for url in value . split ( u',' ) : self . found_url ( url , name , base ) elif attr == u'srcset' : for img_candidate in value . split ( u',' ) : url = img_candidate . split ( ) [ 0 ] self . found_url ( url , name , base ) else : self . found_url ( value , name , base )
Add given url data to url list .
7,137
def found_url ( self , url , name , base ) : assert isinstance ( url , unicode ) or url is None , repr ( url ) self . callback ( url , line = self . parser . last_lineno ( ) , column = self . parser . last_column ( ) , name = name , base = base )
Add newly found URL to queue .
7,138
def from_text_list ( name , ttl , rdclass , rdtype , text_rdatas ) : if isinstance ( name , ( str , unicode ) ) : name = dns . name . from_text ( name , None ) if isinstance ( rdclass , ( str , unicode ) ) : rdclass = dns . rdataclass . from_text ( rdclass ) if isinstance ( rdtype , ( str , unicode ) ) : rdtype = dns . rdatatype . from_text ( rdtype ) r = RRset ( name , rdclass , rdtype ) r . update_ttl ( ttl ) for t in text_rdatas : rd = dns . rdata . from_text ( r . rdclass , r . rdtype , t ) r . add ( rd ) return r
Create an RRset with the specified name TTL class and type and with the specified list of rdatas in text format .
7,139
def from_text ( name , ttl , rdclass , rdtype , * text_rdatas ) : return from_text_list ( name , ttl , rdclass , rdtype , text_rdatas )
Create an RRset with the specified name TTL class and type and with the specified rdatas in text format .
7,140
def from_rdata_list ( name , ttl , rdatas ) : if isinstance ( name , ( str , unicode ) ) : name = dns . name . from_text ( name , None ) if len ( rdatas ) == 0 : raise ValueError ( "rdata list must not be empty" ) r = None for rd in rdatas : if r is None : r = RRset ( name , rd . rdclass , rd . rdtype ) r . update_ttl ( ttl ) first_time = False r . add ( rd ) return r
Create an RRset with the specified name and TTL and with the specified list of rdata objects .
7,141
def match ( self , name , rdclass , rdtype , covers , deleting = None ) : if not super ( RRset , self ) . match ( rdclass , rdtype , covers ) : return False if self . name != name or self . deleting != deleting : return False return True
Returns True if this rrset matches the specified class type covers and deletion state .
7,142
def to_text ( self , origin = None , relativize = True , ** kw ) : return super ( RRset , self ) . to_text ( self . name , origin , relativize , self . deleting , ** kw )
Convert the RRset into DNS master file format .
7,143
def to_wire ( self , file , compress = None , origin = None , ** kw ) : return super ( RRset , self ) . to_wire ( self . name , file , compress , origin , self . deleting , ** kw )
Convert the RRset to wire format .
7,144
def log_url ( self , url_data ) : key = ( url_data . parent_url , url_data . cache_url ) key = repr ( key ) if key in self . blacklist : if url_data . valid : del self . blacklist [ key ] else : self . blacklist [ key ] += 1 else : if not url_data . valid : self . blacklist [ key ] = 1
Put invalid url in blacklist delete valid url from blacklist .
7,145
def read_blacklist ( self ) : with codecs . open ( self . filename , 'r' , self . output_encoding , self . codec_errors ) as fd : for line in fd : line = line . rstrip ( ) if line . startswith ( '#' ) or not line : continue value , key = line . split ( None , 1 ) self . blacklist [ key ] = int ( value )
Read a previously stored blacklist from file fd .
7,146
def write_blacklist ( self ) : oldmask = os . umask ( 0077 ) for key , value in self . blacklist . items ( ) : self . write ( u"%d %s%s" % ( value , repr ( key ) , os . linesep ) ) self . close_fileoutput ( ) os . umask ( oldmask )
Write the blacklist .
7,147
def from_text ( text , origin = None , rdclass = dns . rdataclass . IN , relativize = True , zone_factory = Zone , filename = None , allow_include = False , check_origin = True ) : if filename is None : filename = '<string>' tok = dns . tokenizer . Tokenizer ( text , filename ) reader = _MasterReader ( tok , origin , rdclass , relativize , zone_factory , allow_include = allow_include , check_origin = check_origin ) reader . read ( ) return reader . zone
Build a zone object from a master file format string .
7,148
def from_file ( f , origin = None , rdclass = dns . rdataclass . IN , relativize = True , zone_factory = Zone , filename = None , allow_include = True , check_origin = True ) : if sys . hexversion >= 0x02030000 : str_type = basestring opts = 'rU' else : str_type = str opts = 'r' if isinstance ( f , str_type ) : if filename is None : filename = f f = file ( f , opts ) want_close = True else : if filename is None : filename = '<file>' want_close = False try : z = from_text ( f , origin , rdclass , relativize , zone_factory , filename , allow_include , check_origin ) finally : if want_close : f . close ( ) return z
Read a master file and build a zone object .
7,149
def from_xfr ( xfr , zone_factory = Zone , relativize = True ) : z = None for r in xfr : if z is None : if relativize : origin = r . origin else : origin = r . answer [ 0 ] . name rdclass = r . answer [ 0 ] . rdclass z = zone_factory ( origin , rdclass , relativize = relativize ) for rrset in r . answer : znode = z . nodes . get ( rrset . name ) if not znode : znode = z . node_factory ( ) z . nodes [ rrset . name ] = znode zrds = znode . find_rdataset ( rrset . rdclass , rrset . rdtype , rrset . covers , True ) zrds . update_ttl ( rrset . ttl ) for rd in rrset : rd . choose_relativity ( z . origin , relativize ) zrds . add ( rd ) z . check_origin ( ) return z
Convert the output of a zone transfer generator into a zone object .
7,150
def find_node ( self , name , create = False ) : name = self . _validate_name ( name ) node = self . nodes . get ( name ) if node is None : if not create : raise KeyError node = self . node_factory ( ) self . nodes [ name ] = node return node
Find a node in the zone possibly creating it .
7,151
def get_node ( self , name , create = False ) : try : node = self . find_node ( name , create ) except KeyError : node = None return node
Get a node in the zone possibly creating it .
7,152
def delete_node ( self , name ) : name = self . _validate_name ( name ) if name in self . nodes : del self . nodes [ name ]
Delete the specified node if it exists .
7,153
def replace_rdataset ( self , name , replacement ) : if replacement . rdclass != self . rdclass : raise ValueError ( 'replacement.rdclass != zone.rdclass' ) node = self . find_node ( name , True ) node . replace_rdataset ( replacement )
Replace an rdataset at name .
7,154
def to_file ( self , f , sorted = True , relativize = True , nl = None ) : if sys . hexversion >= 0x02030000 : str_type = basestring else : str_type = str if nl is None : opts = 'w' else : opts = 'wb' if isinstance ( f , str_type ) : f = file ( f , opts ) want_close = True else : want_close = False try : if sorted : names = self . keys ( ) names . sort ( ) else : names = self . iterkeys ( ) for n in names : l = self [ n ] . to_text ( n , origin = self . origin , relativize = relativize ) if nl is None : print >> f , l else : f . write ( l ) f . write ( nl ) finally : if want_close : f . close ( )
Write a zone to a file .
7,155
def check_origin ( self ) : if self . relativize : name = dns . name . empty else : name = self . origin if self . get_rdataset ( name , dns . rdatatype . SOA ) is None : raise NoSOA if self . get_rdataset ( name , dns . rdatatype . NS ) is None : raise NoNS
Do some simple checking of the zone s origin .
7,156
def _rr_line ( self ) : if self . current_origin is None : raise UnknownOrigin token = self . tok . get ( want_leading = True ) if not token . is_whitespace ( ) : self . last_name = dns . name . from_text ( token . value , self . current_origin ) else : token = self . tok . get ( ) if token . is_eol_or_eof ( ) : return self . tok . unget ( token ) name = self . last_name if not name . is_subdomain ( self . zone . origin ) : self . _eat_line ( ) return if self . relativize : name = name . relativize ( self . zone . origin ) token = self . tok . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError try : ttl = dns . ttl . from_text ( token . value ) token = self . tok . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError except dns . ttl . BadTTL : ttl = self . ttl try : rdclass = dns . rdataclass . from_text ( token . value ) token = self . tok . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError except dns . exception . SyntaxError : raise except Exception : rdclass = self . zone . rdclass if rdclass != self . zone . rdclass : raise dns . exception . SyntaxError ( "RR class is not zone's class" ) try : rdtype = dns . rdatatype . from_text ( token . value ) except Exception : raise dns . exception . SyntaxError ( "unknown rdatatype '%s'" % token . value ) n = self . zone . nodes . get ( name ) if n is None : n = self . zone . node_factory ( ) self . zone . nodes [ name ] = n try : rd = dns . rdata . from_text ( rdclass , rdtype , self . tok , self . current_origin , False ) except dns . exception . SyntaxError : ( ty , va ) = sys . exc_info ( ) [ : 2 ] raise va except Exception : ( ty , va ) = sys . exc_info ( ) [ : 2 ] raise dns . exception . SyntaxError ( "caught exception %s: %s" % ( str ( ty ) , str ( va ) ) ) rd . choose_relativity ( self . zone . origin , self . relativize ) covers = rd . covers ( ) rds = n . find_rdataset ( rdclass , rdtype , covers , True ) rds . add ( rd , ttl )
Process one line from a DNS master file .
7,157
def read ( self ) : try : while 1 : token = self . tok . get ( True , True ) . unescape ( ) if token . is_eof ( ) : if not self . current_file is None : self . current_file . close ( ) if len ( self . saved_state ) > 0 : ( self . tok , self . current_origin , self . last_name , self . current_file , self . ttl ) = self . saved_state . pop ( - 1 ) continue break elif token . is_eol ( ) : continue elif token . is_comment ( ) : self . tok . get_eol ( ) continue elif token . value [ 0 ] == '$' : u = token . value . upper ( ) if u == '$TTL' : token = self . tok . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError ( "bad $TTL" ) self . ttl = dns . ttl . from_text ( token . value ) self . tok . get_eol ( ) elif u == '$ORIGIN' : self . current_origin = self . tok . get_name ( ) self . tok . get_eol ( ) if self . zone . origin is None : self . zone . origin = self . current_origin elif u == '$INCLUDE' and self . allow_include : token = self . tok . get ( ) if not token . is_quoted_string ( ) : raise dns . exception . SyntaxError ( "bad filename in $INCLUDE" ) filename = token . value token = self . tok . get ( ) if token . is_identifier ( ) : new_origin = dns . name . from_text ( token . value , self . current_origin ) self . tok . get_eol ( ) elif not token . is_eol_or_eof ( ) : raise dns . exception . SyntaxError ( "bad origin in $INCLUDE" ) else : new_origin = self . current_origin self . saved_state . append ( ( self . tok , self . current_origin , self . last_name , self . current_file , self . ttl ) ) self . current_file = file ( filename , 'r' ) self . tok = dns . tokenizer . Tokenizer ( self . current_file , filename ) self . current_origin = new_origin else : raise dns . exception . SyntaxError ( "Unknown master file directive '" + u + "'" ) continue self . tok . unget ( token ) self . _rr_line ( ) except dns . exception . SyntaxError , detail : ( filename , line_number ) = self . tok . where ( ) if detail is None : detail = "syntax error" raise dns . exception . SyntaxError ( "%s:%d: %s" % ( filename , line_number , detail ) ) if self . check_origin : self . zone . check_origin ( )
Read a DNS master file and build a zone object .
7,158
def cnormpath ( path ) : path = normpath ( path ) if os . name == 'nt' : path = path . replace ( "/" , "\\" ) if not os . path . isabs ( path ) : path = normpath ( os . path . join ( sys . prefix , path ) ) return path
Norm a path name to platform specific notation and make it absolute .
7,159
def cc_run ( args ) : prog = b"int main(){}\n" pipe = subprocess . Popen ( args , stdin = subprocess . PIPE , stdout = subprocess . PIPE , close_fds = True ) pipe . communicate ( input = prog ) if os . WIFEXITED ( pipe . returncode ) : return os . WEXITSTATUS ( pipe . returncode ) == 0 return False
Run the C compiler with a simple main program .
7,160
def list_message_files ( package , suffix = ".mo" ) : for fname in glob . glob ( "po/*" + suffix ) : localename = os . path . splitext ( os . path . basename ( fname ) ) [ 0 ] domainname = "%s.mo" % package . lower ( ) yield ( fname , os . path . join ( "share" , "locale" , localename , "LC_MESSAGES" , domainname ) )
Return list of all found message files and their installation paths .
7,161
def check_manifest ( ) : try : f = open ( 'MANIFEST' ) except Exception : print ( '\n*** SOURCE WARNING: The MANIFEST file is missing!' ) return try : manifest = [ l . strip ( ) for l in f . readlines ( ) if not l . startswith ( '#' ) ] finally : f . close ( ) err = [ line for line in manifest if not os . path . exists ( line ) ] if err : n = len ( manifest ) print ( '\n*** SOURCE WARNING: There are files missing (%d/%d found)!' % ( n - len ( err ) , n ) ) print ( '\nMissing: ' . join ( err ) )
Snatched from roundup . sf . net . Check that the files listed in the MANIFEST are present when the source is unpacked .
7,162
def install ( self ) : outs = super ( MyInstallLib , self ) . install ( ) infile = self . create_conf_file ( ) outfile = os . path . join ( self . install_dir , os . path . basename ( infile ) ) self . copy_file ( infile , outfile ) outs . append ( outfile ) return outs
Install the generated config file .
7,163
def create_conf_file ( self ) : cmd_obj = self . distribution . get_command_obj ( "install" ) cmd_obj . ensure_finalized ( ) data = [ ] for d in [ 'purelib' , 'platlib' , 'lib' , 'headers' , 'scripts' , 'data' ] : attr = 'install_%s' % d if cmd_obj . root : cutoff = len ( cmd_obj . root ) if cmd_obj . root . endswith ( os . sep ) : cutoff -= 1 val = getattr ( cmd_obj , attr ) [ cutoff : ] else : val = getattr ( cmd_obj , attr ) if attr == 'install_data' : cdir = os . path . join ( val , "share" , "linkchecker" ) data . append ( 'config_dir = %r' % cnormpath ( cdir ) ) elif attr == 'install_lib' : if cmd_obj . root : _drive , tail = os . path . splitdrive ( val ) if tail . startswith ( os . sep ) : tail = tail [ 1 : ] self . install_lib = os . path . join ( cmd_obj . root , tail ) else : self . install_lib = val data . append ( "%s = %r" % ( attr , cnormpath ( val ) ) ) self . distribution . create_conf_file ( data , directory = self . install_lib ) return self . get_conf_output ( )
Create configuration file .
7,164
def get_outputs ( self ) : outs = super ( MyInstallLib , self ) . get_outputs ( ) conf_output = self . get_conf_output ( ) outs . append ( conf_output ) if self . compile : outs . extend ( self . _bytecode_filenames ( [ conf_output ] ) ) return outs
Add the generated config file to the list of outputs .
7,165
def fix_permissions ( self ) : if os . name == 'posix' and not self . dry_run : for path in self . get_outputs ( ) : mode = os . stat ( path ) [ stat . ST_MODE ] if stat . S_ISDIR ( mode ) : mode |= 0o11 mode |= 0o44 os . chmod ( path , mode )
Set correct read permissions on POSIX systems . Might also be possible by setting umask?
7,166
def run_commands ( self ) : cwd = os . getcwd ( ) data = [ ] data . append ( 'config_dir = %r' % os . path . join ( cwd , "config" ) ) data . append ( "install_data = %r" % cwd ) data . append ( "install_scripts = %r" % cwd ) self . create_conf_file ( data ) super ( MyDistribution , self ) . run_commands ( )
Generate config file and run commands .
7,167
def build_extensions ( self ) : extra = [ ] if self . compiler . compiler_type == 'unix' : option = "-std=gnu99" if cc_supports_option ( self . compiler . compiler , option ) : extra . append ( option ) self . check_extensions_list ( self . extensions ) for ext in self . extensions : for opt in extra : if opt not in ext . extra_compile_args : ext . extra_compile_args . append ( opt ) self . build_extension ( ext )
Add - std = gnu99 to build options if supported .
7,168
def run ( self ) : if self . all : directory = os . path . join ( "build" , "share" ) if os . path . exists ( directory ) : remove_tree ( directory , dry_run = self . dry_run ) else : log . warn ( "'%s' does not exist -- can't clean it" , directory ) clean . run ( self )
Remove share directory on clean .
7,169
def from_address ( text ) : try : parts = list ( dns . ipv6 . inet_aton ( text ) . encode ( 'hex_codec' ) ) origin = ipv6_reverse_domain except Exception : parts = [ '%d' % ord ( byte ) for byte in dns . ipv4 . inet_aton ( text ) ] origin = ipv4_reverse_domain parts . reverse ( ) return dns . name . from_text ( '.' . join ( parts ) , origin = origin )
Convert an IPv4 or IPv6 address in textual form into a Name object whose value is the reverse - map domain name of the address .
7,170
def to_address ( name ) : if name . is_subdomain ( ipv4_reverse_domain ) : name = name . relativize ( ipv4_reverse_domain ) labels = list ( name . labels ) labels . reverse ( ) text = '.' . join ( labels ) return dns . ipv4 . inet_ntoa ( dns . ipv4 . inet_aton ( text ) ) elif name . is_subdomain ( ipv6_reverse_domain ) : name = name . relativize ( ipv6_reverse_domain ) labels = list ( name . labels ) labels . reverse ( ) parts = [ ] i = 0 l = len ( labels ) while i < l : parts . append ( '' . join ( labels [ i : i + 4 ] ) ) i += 4 text = ':' . join ( parts ) return dns . ipv6 . inet_ntoa ( dns . ipv6 . inet_aton ( text ) ) else : raise dns . exception . SyntaxError ( 'unknown reverse-map address family' )
Convert a reverse map domain name into textual address form .
7,171
def from_text ( text ) : if text . isdigit ( ) : v = int ( text ) if v >= 0 and v <= 4095 : return v v = _by_text . get ( text . upper ( ) ) if v is None : raise UnknownRcode return v
Convert text into an rcode .
7,172
def from_flags ( flags , ednsflags ) : value = ( flags & 0x000f ) | ( ( ednsflags >> 20 ) & 0xff0 ) if value < 0 or value > 4095 : raise ValueError ( 'rcode must be >= 0 and <= 4095' ) return value
Return the rcode value encoded by flags and ednsflags .
7,173
def to_text ( value ) : text = _by_value . get ( value ) if text is None : text = str ( value ) return text
Convert rcode into text .
7,174
def _resolve_entity ( mo ) : ent = mo . group ( "entity" ) s = mo . group ( ) if s . startswith ( '&#' ) : if s [ 2 ] in 'xX' : radix = 16 else : radix = 10 try : num = int ( ent , radix ) except ( ValueError , OverflowError ) : return u'' else : num = name2codepoint . get ( ent ) if num is None or num < 0 : return u'' try : return unichr ( num ) except ValueError : return u''
Resolve a HTML entity .
7,175
def get_geoip_dat ( ) : datafiles = ( "GeoIPCity.dat" , "GeoIP.dat" ) if os . name == 'nt' : paths = ( sys . exec_prefix , r"c:\geoip" ) else : paths = ( "/usr/local/share/GeoIP" , "/usr/share/GeoIP" ) for path in paths : for datafile in datafiles : filename = os . path . join ( path , datafile ) if os . path . isfile ( filename ) : return filename
Find a GeoIP database preferring city over country lookup .
7,176
def get_location ( host ) : if geoip is None : return None try : record = get_geoip_record ( host ) except ( geoip_error , socket . error ) : log . debug ( LOG_PLUGIN , "Geoip error for %r" , host , exception = True ) return None value = u"" if record and record . get ( "city" ) : value += unicode_safe ( record [ "city" ] ) if record and record . get ( "country_name" ) : if value : value += u", " value += unicode_safe ( record [ "country_name" ] ) return value
Get translated country and optional city name .
7,177
def get_intern_pattern ( url ) : parts = strformat . url_unicode_split ( url ) scheme = parts [ 0 ] . lower ( ) domain = parts [ 1 ] . lower ( ) domain , is_idn = urlutil . idna_encode ( domain ) if domain . startswith ( 'www.' ) : domain = domain [ 4 : ] if not ( domain and scheme ) : return None path = urlutil . splitparams ( parts [ 2 ] ) [ 0 ] segments = path . split ( '/' ) [ : - 1 ] path = "/" . join ( segments ) if url . endswith ( '/' ) : path += '/' args = list ( re . escape ( x ) for x in ( scheme , domain , path ) ) if args [ 0 ] in ( 'http' , 'https' ) : args [ 0 ] = 'https?' args [ 1 ] = r"(www\.|)%s" % args [ 1 ] return "^%s://%s%s" % tuple ( args )
Return intern pattern for given URL . Redirections to the same domain with or without www . prepended are allowed .
7,178
def setdefault ( self , key , * args ) : if key not in self : self . _keys . append ( key ) return super ( ListDict , self ) . setdefault ( key , * args )
Remember key order if key not found .
7,179
def pop ( self , key ) : if key in self . _keys : self . _keys . remove ( key ) super ( ListDict , self ) . pop ( key )
Remove key from dict and return value .
7,180
def popitem ( self ) : if self . _keys : k = self . _keys [ 0 ] v = self [ k ] del self [ k ] return ( k , v ) raise KeyError ( "popitem() on empty dictionary" )
Remove oldest key from dict and return item .
7,181
def get ( self , key , def_val = None ) : assert isinstance ( key , basestring ) return dict . get ( self , key . lower ( ) , def_val )
Return lowercase key value .
7,182
def setdefault ( self , key , * args ) : assert isinstance ( key , basestring ) return dict . setdefault ( self , key . lower ( ) , * args )
Set lowercase key value and return .
7,183
def fromkeys ( cls , iterable , value = None ) : d = cls ( ) for k in iterable : dict . __setitem__ ( d , k . lower ( ) , value ) return d
Construct new caseless dict from given data .
7,184
def pop ( self , key , * args ) : assert isinstance ( key , basestring ) return dict . pop ( self , key . lower ( ) , * args )
Remove lowercase key from dict and return value .
7,185
def shrink ( self ) : trim = int ( 0.05 * len ( self ) ) if trim : items = super ( LFUCache , self ) . items ( ) keyfunc = lambda x : x [ 1 ] [ 0 ] values = sorted ( items , key = keyfunc ) for item in values [ 0 : trim ] : del self [ item [ 0 ] ]
Shrink ca . 5% of entries .
7,186
def setdefault ( self , key , def_val = None ) : if key in self : return self [ key ] self [ key ] = def_val return def_val
Update key usage if found and return value else set and return default .
7,187
def items ( self ) : return [ ( key , value [ 1 ] ) for key , value in super ( LFUCache , self ) . items ( ) ]
Return list of items not updating usage count .
7,188
def popitem ( self ) : key , value = super ( LFUCache , self ) . popitem ( ) return ( key , value [ 1 ] )
Remove and return an item .
7,189
def unicode_safe ( s , encoding = i18n . default_encoding , errors = 'replace' ) : assert s is not None , "argument to unicode_safe was None" if isinstance ( s , unicode ) : return s return unicode ( str ( s ) , encoding , errors )
Get unicode string without raising encoding errors . Unknown characters of the given encoding will be ignored .
7,190
def unquote ( s , matching = False ) : if not s : return s if len ( s ) < 2 : return s if matching : if s [ 0 ] in ( "\"'" ) and s [ 0 ] == s [ - 1 ] : s = s [ 1 : - 1 ] else : if s [ 0 ] in ( "\"'" ) : s = s [ 1 : ] if s [ - 1 ] in ( "\"'" ) : s = s [ : - 1 ] return s
Remove leading and ending single and double quotes . The quotes need to match if matching is True . Only one quote from each end will be stripped .
7,191
def indent ( text , indent_string = " " ) : lines = str ( text ) . splitlines ( ) return os . linesep . join ( "%s%s" % ( indent_string , x ) for x in lines )
Indent each line of text with the given indent string .
7,192
def strtime ( t , func = time . localtime ) : return time . strftime ( "%Y-%m-%d %H:%M:%S" , func ( t ) ) + strtimezone ( )
Return ISO 8601 formatted time .
7,193
def strduration_long ( duration , do_translate = True ) : if do_translate : global _ , _n else : _ = lambda x : x _n = lambda a , b , n : a if n == 1 else b if duration < 0 : duration = abs ( duration ) prefix = "-" else : prefix = "" if duration < 1 : return _ ( "%(prefix)s%(duration).02f seconds" ) % { "prefix" : prefix , "duration" : duration } _n ( "%d second" , "%d seconds" , 1 ) _n ( "%d minute" , "%d minutes" , 1 ) _n ( "%d hour" , "%d hours" , 1 ) _n ( "%d day" , "%d days" , 1 ) _n ( "%d year" , "%d years" , 1 ) cutoffs = [ ( 60 , "%d second" , "%d seconds" ) , ( 60 , "%d minute" , "%d minutes" ) , ( 24 , "%d hour" , "%d hours" ) , ( 365 , "%d day" , "%d days" ) , ( None , "%d year" , "%d years" ) , ] time_str = [ ] for divisor , single , plural in cutoffs : if duration < 1 : break if divisor is None : duration , unit = 0 , duration else : duration , unit = divmod ( duration , divisor ) if unit : time_str . append ( _n ( single , plural , unit ) % unit ) time_str . reverse ( ) if len ( time_str ) > 2 : time_str . pop ( ) return "%s%s" % ( prefix , ", " . join ( time_str ) )
Turn a time value in seconds into x hours x minutes etc .
7,194
def strtimezone ( ) : if time . daylight : zone = time . altzone else : zone = time . timezone return "%+04d" % ( - zone // SECONDS_PER_HOUR )
Return timezone info %z on some platforms but not supported on all .
7,195
def create_socket ( family , socktype , proto = 0 , timeout = 60 ) : sock = socket . socket ( family , socktype , proto = proto ) sock . settimeout ( timeout ) socktypes_inet = [ socket . AF_INET ] if has_ipv6 : socktypes_inet . append ( socket . AF_INET6 ) if family in socktypes_inet and socktype == socket . SOCK_STREAM : sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , 1 ) return sock
Create a socket with given family and type . If SSL context is given an SSL socket is created .
7,196
def applies_to ( self , url_data ) : return url_data . valid and url_data . scheme == 'https' and not url_data . extern [ 0 ] and url_data . url_connection is not None
Check validity scheme extern and url_connection .
7,197
def check ( self , url_data ) : host = url_data . urlparts [ 1 ] if host in self . checked_hosts : return self . checked_hosts . add ( host ) cert = url_data . ssl_cert config = url_data . aggregate . config if cert and 'notAfter' in cert : self . check_ssl_valid_date ( url_data , cert ) elif config [ 'sslverify' ] : msg = _ ( 'certificate did not include "notAfter" information' ) url_data . add_warning ( msg ) else : msg = _ ( 'SSL verification is disabled; enable the sslverify option' ) url_data . add_warning ( msg )
Run all SSL certificate checks that have not yet been done . OpenSSL already checked the SSL notBefore and notAfter dates .
7,198
def check_ssl_valid_date ( self , url_data , cert ) : import ssl try : notAfter = ssl . cert_time_to_seconds ( cert [ 'notAfter' ] ) except ValueError as msg : msg = _ ( 'Invalid SSL certficate "notAfter" value %r' ) % cert [ 'notAfter' ] url_data . add_warning ( msg ) return curTime = time . time ( ) secondsValid = notAfter - curTime args = dict ( expire = cert [ 'notAfter' ] ) if secondsValid < 0 : msg = _ ( 'SSL certficate is expired on %(expire)s.' ) url_data . add_warning ( msg % args ) else : args [ 'valid' ] = strformat . strduration_long ( secondsValid ) if secondsValid < self . warn_ssl_cert_secs_valid : msg = _ ( 'SSL certificate expires on %(expire)s and is only %(valid)s valid.' ) url_data . add_warning ( msg % args ) else : msg = _ ( 'SSL certificate expires on %(expire)s and is %(valid)s valid.' ) url_data . add_info ( msg % args )
Check if the certificate is still valid or if configured check if it s at least a number of days valid .
7,199
def add_anchor ( self , url , line , column , name , base ) : self . anchors . append ( ( url , line , column , name , base ) )
Add anchor URL .