idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
7,300
|
def has_module ( name , without_error = True ) : try : importlib . import_module ( name ) return True except ImportError : return False except Exception : return not without_error
|
Test if given module can be imported .
|
7,301
|
def pathencode ( path ) : if isinstance ( path , unicode ) and not os . path . supports_unicode_filenames : path = path . encode ( FSCODING , "replace" ) return path
|
Encode a path string with the platform file system encoding .
|
7,302
|
def has_changed ( filename ) : key = os . path . abspath ( filename ) mtime = get_mtime ( key ) if key not in _mtime_cache : _mtime_cache [ key ] = mtime return True return mtime > _mtime_cache [ key ]
|
Check if filename has changed since the last check . If this is the first check assume the file is changed .
|
7,303
|
def is_readable ( filename ) : return os . path . isfile ( filename ) and os . access ( filename , os . R_OK )
|
Check if file is a regular file and is readable .
|
7,304
|
def is_accessable_by_others ( filename ) : mode = os . stat ( filename ) [ stat . ST_MODE ] return mode & ( stat . S_IRWXG | stat . S_IRWXO )
|
Check if file is group or world accessable .
|
7,305
|
def is_writable_by_others ( filename ) : mode = os . stat ( filename ) [ stat . ST_MODE ] return mode & stat . S_IWOTH
|
Check if file or directory is world writable .
|
7,306
|
def is_writable ( filename ) : if not os . path . exists ( filename ) : parentdir = os . path . dirname ( filename ) return os . path . isdir ( parentdir ) and os . access ( parentdir , os . W_OK ) return os . path . isfile ( filename ) and os . access ( filename , os . W_OK )
|
Check if - the file is a regular file and is writable or - the file does not exist and its parent directory exists and is writable
|
7,307
|
def write ( self , data ) : self . tmpbuf . append ( data ) self . pos += len ( data )
|
Write data to buffer .
|
7,308
|
def flush ( self , overlap = 0 ) : self . buf += self . empty . join ( self . tmpbuf ) self . tmpbuf = [ ] if overlap and overlap < self . pos : data = self . buf [ : - overlap ] self . buf = self . buf [ - overlap : ] else : data = self . buf self . buf = self . empty return data
|
Flush buffered data and return it .
|
7,309
|
def guess_url ( url ) : if url . lower ( ) . startswith ( "www." ) : return "http://%s" % url elif url . lower ( ) . startswith ( "ftp." ) : return "ftp://%s" % url return url
|
Guess if URL is a http or ftp URL .
|
7,310
|
def absolute_url ( base_url , base_ref , parent_url ) : if base_url and urlutil . url_is_absolute ( base_url ) : return base_url elif base_ref and urlutil . url_is_absolute ( base_ref ) : return base_ref elif parent_url and urlutil . url_is_absolute ( parent_url ) : return parent_url return u""
|
Search for the absolute url to detect the link type . This does not join any url fragments together!
|
7,311
|
def get_url_from ( base_url , recursion_level , aggregate , parent_url = None , base_ref = None , line = 0 , column = 0 , page = 0 , name = u"" , parent_content_type = None , extern = None ) : if base_url is not None : base_url = strformat . unicode_safe ( base_url ) base_url_stripped = base_url . lstrip ( ) else : base_url_stripped = base_url if parent_url is not None : parent_url = strformat . unicode_safe ( parent_url ) if base_ref is not None : base_ref = strformat . unicode_safe ( base_ref ) name = strformat . unicode_safe ( name ) url = absolute_url ( base_url_stripped , base_ref , parent_url ) . lower ( ) if ":" in url : scheme = url . split ( ":" , 1 ) [ 0 ] . lower ( ) else : scheme = None if not ( url or name ) : name = base_url . replace ( "\\" , "/" ) allowed_schemes = aggregate . config [ "allowedschemes" ] local_php = ( parent_content_type == 'application/x-httpd-php' and '<?' in base_url and '?>' in base_url and scheme == 'file' ) if local_php or ( allowed_schemes and scheme not in allowed_schemes ) : klass = ignoreurl . IgnoreUrl else : assume_local_file = ( recursion_level == 0 ) klass = get_urlclass_from ( scheme , assume_local_file = assume_local_file ) log . debug ( LOG_CHECK , "%s handles url %s" , klass . __name__ , base_url ) return klass ( base_url , recursion_level , aggregate , parent_url = parent_url , base_ref = base_ref , line = line , column = column , page = page , name = name , extern = extern )
|
Get url data from given base data .
|
7,312
|
def get_urlclass_from ( scheme , assume_local_file = False ) : if scheme in ( "http" , "https" ) : klass = httpurl . HttpUrl elif scheme == "ftp" : klass = ftpurl . FtpUrl elif scheme == "file" : klass = fileurl . FileUrl elif scheme == "telnet" : klass = telneturl . TelnetUrl elif scheme == "mailto" : klass = mailtourl . MailtoUrl elif scheme in ( "nntp" , "news" , "snews" ) : klass = nntpurl . NntpUrl elif scheme == "dns" : klass = dnsurl . DnsUrl elif scheme == "itms-services" : klass = itmsservicesurl . ItmsServicesUrl elif scheme and unknownurl . is_unknown_scheme ( scheme ) : klass = unknownurl . UnknownUrl elif assume_local_file : klass = fileurl . FileUrl else : klass = unknownurl . UnknownUrl return klass
|
Return checker class for given URL scheme . If the scheme cannot be matched and assume_local_file is True assume a local file .
|
7,313
|
def get_index_html ( urls ) : lines = [ "<html>" , "<body>" ] for entry in urls : name = cgi . escape ( entry ) try : url = cgi . escape ( urllib . quote ( entry ) ) except KeyError : url = name lines . append ( '<a href="%s">%s</a>' % ( url , name ) ) lines . extend ( [ "</body>" , "</html>" ] ) return os . linesep . join ( lines )
|
Construct artificial index . html from given URLs .
|
7,314
|
def run_checked ( self ) : self . start_time = time . time ( ) self . setName ( "Interrupt" ) while not self . stopped ( self . WaitSeconds ) : duration = time . time ( ) - self . start_time if duration > self . duration : log . warn ( LOG_CHECK , "Interrupt after %s" % strformat . strduration_long ( duration ) ) raise KeyboardInterrupt ( )
|
Wait and raise KeyboardInterrupt after .
|
7,315
|
def esc_ansicolor ( color ) : control = '' if ";" in color : control , color = color . split ( ";" , 1 ) control = AnsiControl . get ( control , '' ) + ";" cnum = AnsiColor . get ( color , '0' ) return AnsiEsc % ( control + cnum )
|
convert a named color definition to an escaped ANSI color
|
7,316
|
def get_win_color ( color ) : foreground = background = style = None control = '' if ";" in color : control , color = color . split ( ";" , 1 ) if control == bold : style = colorama . BRIGHT if color in InverseColors : background = WinColor [ color ] else : foreground = WinColor . get ( color ) return foreground , background , style
|
Convert a named color definition to Windows console color foreground background and style numbers .
|
7,317
|
def has_colors ( fp ) : if not is_tty ( fp ) : return False if os . name == 'nt' : return True elif has_curses : import curses try : curses . setupterm ( os . environ . get ( "TERM" ) , fp . fileno ( ) ) return curses . tigetnum ( "colors" ) >= 8 except curses . error : return False return False
|
Test if given file is an ANSI color enabled tty .
|
7,318
|
def get_columns ( fp ) : if not is_tty ( fp ) : return 80 if os . name == 'nt' : return colorama . get_console_size ( ) . X if has_curses : import curses try : curses . setupterm ( os . environ . get ( "TERM" ) , fp . fileno ( ) ) return curses . tigetnum ( "cols" ) except curses . error : pass return 80
|
Return number of columns for given file .
|
7,319
|
def _write_color ( self , text , color = None ) : if color is None : self . fp . write ( text ) else : write_color ( self . fp , text , color )
|
Print text with given color . If color is None print text as - is .
|
7,320
|
def search_url ( obj , url_data , pageno , seen_objs ) : if isinstance ( obj , PDFObjRef ) : if obj . objid in seen_objs : return seen_objs . add ( obj . objid ) obj = obj . resolve ( ) if isinstance ( obj , dict ) : for key , value in obj . items ( ) : if key == 'URI' and isinstance ( value , basestring ) : url = strformat . unicode_safe ( value ) url_data . add_url ( url , page = pageno ) else : search_url ( value , url_data , pageno , seen_objs ) elif isinstance ( obj , list ) : for elem in obj : search_url ( elem , url_data , pageno , seen_objs ) elif isinstance ( obj , PDFStream ) : search_url ( obj . attrs , url_data , pageno , seen_objs )
|
Recurse through a PDF object searching for URLs .
|
7,321
|
def check ( self , url_data ) : password = '' data = url_data . get_content ( ) fp = StringIO ( data ) try : parser = PDFParser ( fp ) doc = PDFDocument ( parser , password = password ) for ( pageno , page ) in enumerate ( PDFPage . create_pages ( doc ) , start = 1 ) : if "Contents" in page . attrs : search_url ( page . attrs [ "Contents" ] , url_data , pageno , set ( ) ) if "Annots" in page . attrs : search_url ( page . attrs [ "Annots" ] , url_data , pageno , set ( ) ) except PSException as msg : if not msg . args : msg = repr ( msg ) log . warn ( LOG_PLUGIN , "Error parsing PDF file: %s" , msg )
|
Parse PDF data .
|
7,322
|
def _escapify ( label ) : text = '' for c in label : if c in _escaped : text += '\\' + c elif ord ( c ) > 0x20 and ord ( c ) < 0x7F : text += c else : text += '\\%03d' % ord ( c ) return text
|
Escape the characters in label which need it .
|
7,323
|
def _validate_labels ( labels ) : l = len ( labels ) total = 0 i = - 1 j = 0 for label in labels : ll = len ( label ) total += ll + 1 if ll > 63 : raise LabelTooLong if i < 0 and label == '' : i = j j += 1 if total > 255 : raise NameTooLong if i >= 0 and i != l - 1 : raise EmptyLabel
|
Check for empty labels in the middle of a label sequence labels that are too long and for too many labels .
|
7,324
|
def from_unicode ( text , origin = root ) : if not isinstance ( text , unicode ) : raise ValueError ( "input to from_unicode() must be a unicode string" ) if not ( origin is None or isinstance ( origin , Name ) ) : raise ValueError ( "origin must be a Name or None" ) labels = [ ] label = u'' escaping = False edigits = 0 total = 0 if text == u'@' : text = u'' if text : if text == u'.' : return Name ( [ '' ] ) for c in text : if escaping : if edigits == 0 : if c . isdigit ( ) : total = int ( c ) edigits += 1 else : label += c escaping = False else : if not c . isdigit ( ) : raise BadEscape total *= 10 total += int ( c ) edigits += 1 if edigits == 3 : escaping = False label += chr ( total ) elif c == u'.' or c == u'\u3002' or c == u'\uff0e' or c == u'\uff61' : if len ( label ) == 0 : raise EmptyLabel labels . append ( encodings . idna . ToASCII ( label ) ) label = u'' elif c == u'\\' : escaping = True edigits = 0 total = 0 else : label += c if escaping : raise BadEscape if len ( label ) > 0 : labels . append ( encodings . idna . ToASCII ( label ) ) else : labels . append ( '' ) if ( len ( labels ) == 0 or labels [ - 1 ] != '' ) and not origin is None : labels . extend ( list ( origin . labels ) ) return Name ( labels )
|
Convert unicode text into a Name object .
|
7,325
|
def from_text ( text , origin = root ) : if not isinstance ( text , str ) : if isinstance ( text , unicode ) and sys . hexversion >= 0x02030000 : return from_unicode ( text , origin ) else : raise ValueError ( "input to from_text() must be a string" ) if not ( origin is None or isinstance ( origin , Name ) ) : raise ValueError ( "origin must be a Name or None" ) labels = [ ] label = '' escaping = False edigits = 0 total = 0 if text == '@' : text = '' if text : if text == '.' : return Name ( [ '' ] ) for c in text : if escaping : if edigits == 0 : if c . isdigit ( ) : total = int ( c ) edigits += 1 else : label += c escaping = False else : if not c . isdigit ( ) : raise BadEscape total *= 10 total += int ( c ) edigits += 1 if edigits == 3 : escaping = False label += chr ( total ) elif c == '.' : if len ( label ) == 0 : raise EmptyLabel labels . append ( label ) label = '' elif c == '\\' : escaping = True edigits = 0 total = 0 else : label += c if escaping : raise BadEscape if len ( label ) > 0 : labels . append ( label ) else : labels . append ( '' ) if ( len ( labels ) == 0 or labels [ - 1 ] != '' ) and not origin is None : labels . extend ( list ( origin . labels ) ) return Name ( labels )
|
Convert text into a Name object .
|
7,326
|
def from_wire ( message , current ) : if not isinstance ( message , str ) : raise ValueError ( "input to from_wire() must be a byte string" ) message = dns . wiredata . maybe_wrap ( message ) labels = [ ] biggest_pointer = current hops = 0 count = ord ( message [ current ] ) current += 1 cused = 1 while count != 0 : if count < 64 : labels . append ( message [ current : current + count ] . unwrap ( ) ) current += count if hops == 0 : cused += count elif count >= 192 : current = ( count & 0x3f ) * 256 + ord ( message [ current ] ) if hops == 0 : cused += 1 if current >= biggest_pointer : raise BadPointer biggest_pointer = current hops += 1 else : raise BadLabelType count = ord ( message [ current ] ) current += 1 if hops == 0 : cused += 1 labels . append ( '' ) return ( Name ( labels ) , cused )
|
Convert possibly compressed wire format into a Name .
|
7,327
|
def is_subdomain ( self , other ) : ( nr , o , nl ) = self . fullcompare ( other ) if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL : return True return False
|
Is self a subdomain of other?
|
7,328
|
def is_superdomain ( self , other ) : ( nr , o , nl ) = self . fullcompare ( other ) if nr == NAMERELN_SUPERDOMAIN or nr == NAMERELN_EQUAL : return True return False
|
Is self a superdomain of other?
|
7,329
|
def to_text ( self , omit_final_dot = False ) : if len ( self . labels ) == 0 : return '@' if len ( self . labels ) == 1 and self . labels [ 0 ] == '' : return '.' if omit_final_dot and self . is_absolute ( ) : l = self . labels [ : - 1 ] else : l = self . labels s = '.' . join ( map ( _escapify , l ) ) return s
|
Convert name to text format .
|
7,330
|
def to_unicode ( self , omit_final_dot = False ) : if len ( self . labels ) == 0 : return u'@' if len ( self . labels ) == 1 and self . labels [ 0 ] == '' : return u'.' if omit_final_dot and self . is_absolute ( ) : l = self . labels [ : - 1 ] else : l = self . labels s = u'.' . join ( [ encodings . idna . ToUnicode ( _escapify ( x ) ) for x in l ] ) return s
|
Convert name to Unicode text format .
|
7,331
|
def to_digestable ( self , origin = None ) : if not self . is_absolute ( ) : if origin is None or not origin . is_absolute ( ) : raise NeedAbsoluteNameOrOrigin labels = list ( self . labels ) labels . extend ( list ( origin . labels ) ) else : labels = self . labels dlabels = [ "%s%s" % ( chr ( len ( x ) ) , x . lower ( ) ) for x in labels ] return '' . join ( dlabels )
|
Convert name to a format suitable for digesting in hashes .
|
7,332
|
def to_wire ( self , file = None , compress = None , origin = None ) : if file is None : file = cStringIO . StringIO ( ) want_return = True else : want_return = False if not self . is_absolute ( ) : if origin is None or not origin . is_absolute ( ) : raise NeedAbsoluteNameOrOrigin labels = list ( self . labels ) labels . extend ( list ( origin . labels ) ) else : labels = self . labels i = 0 for label in labels : n = Name ( labels [ i : ] ) i += 1 if not compress is None : pos = compress . get ( n ) else : pos = None if not pos is None : value = 0xc000 + pos s = struct . pack ( '!H' , value ) file . write ( s ) break else : if not compress is None and len ( n ) > 1 : pos = file . tell ( ) if pos < 0xc000 : compress [ n ] = pos l = len ( label ) file . write ( chr ( l ) ) if l > 0 : file . write ( label ) if want_return : return file . getvalue ( )
|
Convert name to wire format possibly compressing it .
|
7,333
|
def split ( self , depth ) : l = len ( self . labels ) if depth == 0 : return ( self , dns . name . empty ) elif depth == l : return ( dns . name . empty , self ) elif depth < 0 or depth > l : raise ValueError ( 'depth must be >= 0 and <= the length of the name' ) return ( Name ( self [ : - depth ] ) , Name ( self [ - depth : ] ) )
|
Split a name into a prefix and suffix at depth .
|
7,334
|
def concatenate ( self , other ) : if self . is_absolute ( ) and len ( other ) > 0 : raise AbsoluteConcatenation labels = list ( self . labels ) labels . extend ( list ( other . labels ) ) return Name ( labels )
|
Return a new name which is the concatenation of self and other .
|
7,335
|
def relativize ( self , origin ) : if not origin is None and self . is_subdomain ( origin ) : return Name ( self [ : - len ( origin ) ] ) else : return self
|
If self is a subdomain of origin return a new name which is self relative to origin . Otherwise return self .
|
7,336
|
def choose_relativity ( self , origin = None , relativize = True ) : if origin : if relativize : return self . relativize ( origin ) else : return self . derelativize ( origin ) else : return self
|
Return a name with the relativity desired by the caller . If origin is None then self is returned . Otherwise if relativize is true the name is relativized and if relativize is false the name is derelativized .
|
7,337
|
def parent ( self ) : if self == root or self == empty : raise NoParent return Name ( self . labels [ 1 : ] )
|
Return the parent of the name .
|
7,338
|
def init_win32com ( ) : global _initialized if _initialized : return import win32com . client if win32com . client . gencache . is_readonly : win32com . client . gencache . is_readonly = False win32com . client . gencache . Rebuild ( ) _initialized = True
|
Initialize the win32com . client cache .
|
7,339
|
def has_word ( ) : if not has_win32com : return False try : import _winreg as winreg except ImportError : import winreg try : key = winreg . OpenKey ( winreg . HKEY_CLASSES_ROOT , "Word.Application" ) winreg . CloseKey ( key ) return True except ( EnvironmentError , ImportError ) : pass return False
|
Determine if Word is available on the current system .
|
7,340
|
def get_word_app ( ) : if not has_word ( ) : return None pythoncom . CoInitialize ( ) import win32com . client app = win32com . client . gencache . EnsureDispatch ( "Word.Application" ) app . Visible = False return app
|
Return open Word . Application handle or None if Word is not available on this system .
|
7,341
|
def open_wordfile ( app , filename ) : return app . Documents . Open ( filename , ReadOnly = True , AddToRecentFiles = False , Visible = False , NoEncodingDialog = True )
|
Open given Word file with application object .
|
7,342
|
def get_line_number ( doc , wrange ) : lineno = 1 wrange . Select ( ) wdFirstCharacterLineNumber = constants ( "wdFirstCharacterLineNumber" ) wdGoToLine = constants ( "wdGoToLine" ) wdGoToPrevious = constants ( "wdGoToPrevious" ) while True : curline = doc . Selection . Information ( wdFirstCharacterLineNumber ) doc . Selection . GoTo ( wdGoToLine , wdGoToPrevious , Count = 1 , Name = "" ) lineno += 1 prevline = doc . Selection . Information ( wdFirstCharacterLineNumber ) if prevline == curline : break return lineno
|
Get line number for given range object .
|
7,343
|
def get_temp_filename ( content ) : fd , filename = fileutil . get_temp_file ( mode = 'wb' , suffix = '.doc' , prefix = 'lc_' ) try : fd . write ( content ) finally : fd . close ( ) return filename
|
Get temporary filename for content to parse .
|
7,344
|
def check ( self , url_data ) : content = url_data . get_content ( ) filename = get_temp_filename ( content ) try : app = get_word_app ( ) try : doc = open_wordfile ( app , filename ) if doc is None : raise Error ( "could not open word file %r" % filename ) try : for link in doc . Hyperlinks : line = get_line_number ( link . Range ) name = link . TextToDisplay url_data . add_url ( link . Address , name = name , line = line ) finally : close_wordfile ( doc ) finally : close_word_app ( app ) except Error as msg : log . warn ( LOG_PLUGIN , "Error parsing word file: %s" , msg )
|
Parse Word data .
|
7,345
|
def strip_caret_codes ( text ) : text = text . replace ( '^^' , '\x00' ) for token , foo in _ANSI_CODES : text = text . replace ( token , '' ) return text . replace ( '\x00' , '^' )
|
Strip out any caret codes from a string .
|
7,346
|
def word_wrap ( text , columns = 80 , indent = 4 , padding = 2 ) : paragraphs = _PARA_BREAK . split ( text ) lines = [ ] columns -= padding for para in paragraphs : if para . isspace ( ) : continue line = ' ' * indent for word in para . split ( ) : if ( len ( line ) + 1 + len ( word ) ) > columns : lines . append ( line ) line = ' ' * padding line += word else : line += ' ' + word if not line . isspace ( ) : lines . append ( line ) return lines
|
Given a block of text breaks into a list of lines wrapped to length .
|
7,347
|
def _get_char ( self ) : if self . ungotten_char is None : if self . eof : c = '' else : c = self . file . read ( 1 ) if c == '' : self . eof = True elif c == '\n' : self . line_number += 1 else : c = self . ungotten_char self . ungotten_char = None return c
|
Read a character from input .
|
7,348
|
def skip_whitespace ( self ) : skipped = 0 while True : c = self . _get_char ( ) if c != ' ' and c != '\t' : if ( c != '\n' ) or not self . multiline : self . _unget_char ( c ) return skipped skipped += 1
|
Consume input until a non - whitespace character is encountered .
|
7,349
|
def get_int ( self ) : token = self . get ( ) . unescape ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError ( 'expecting an identifier' ) if not token . value . isdigit ( ) : raise dns . exception . SyntaxError ( 'expecting an integer' ) return int ( token . value )
|
Read the next token and interpret it as an integer .
|
7,350
|
def get_uint8 ( self ) : value = self . get_int ( ) if value < 0 or value > 255 : raise dns . exception . SyntaxError ( '%d is not an unsigned 8-bit integer' % value ) return value
|
Read the next token and interpret it as an 8 - bit unsigned integer .
|
7,351
|
def get_uint16 ( self ) : value = self . get_int ( ) if value < 0 or value > 65535 : raise dns . exception . SyntaxError ( '%d is not an unsigned 16-bit integer' % value ) return value
|
Read the next token and interpret it as a 16 - bit unsigned integer .
|
7,352
|
def get_uint32 ( self ) : token = self . get ( ) . unescape ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError ( 'expecting an identifier' ) if not token . value . isdigit ( ) : raise dns . exception . SyntaxError ( 'expecting an integer' ) value = long ( token . value ) if value < 0 or value > 4294967296L : raise dns . exception . SyntaxError ( '%d is not an unsigned 32-bit integer' % value ) return value
|
Read the next token and interpret it as a 32 - bit unsigned integer .
|
7,353
|
def get_string ( self , origin = None ) : token = self . get ( ) . unescape ( ) if not ( token . is_identifier ( ) or token . is_quoted_string ( ) ) : raise dns . exception . SyntaxError ( 'expecting a string' ) return token . value
|
Read the next token and interpret it as a string .
|
7,354
|
def get_identifier ( self , origin = None ) : token = self . get ( ) . unescape ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError ( 'expecting an identifier' ) return token . value
|
Read the next token and raise an exception if it is not an identifier .
|
7,355
|
def get_name ( self , origin = None ) : token = self . get ( ) if not token . is_identifier ( ) : raise dns . exception . SyntaxError ( 'expecting an identifier' ) return dns . name . from_text ( token . value , origin )
|
Read the next token and interpret it as a DNS name .
|
7,356
|
def get_eol ( self ) : token = self . get ( ) if not token . is_eol_or_eof ( ) : raise dns . exception . SyntaxError ( 'expected EOL or EOF, got %d "%s"' % ( token . ttype , token . value ) ) return token . value
|
Read the next token and raise an exception if it isn t EOL or EOF .
|
7,357
|
def from_rdata_list ( ttl , rdatas ) : if len ( rdatas ) == 0 : raise ValueError ( "rdata list must not be empty" ) r = None for rd in rdatas : if r is None : r = Rdataset ( rd . rdclass , rd . rdtype ) r . update_ttl ( ttl ) first_time = False r . add ( rd ) return r
|
Create an rdataset with the specified TTL and with the specified list of rdata objects .
|
7,358
|
def update_ttl ( self , ttl ) : if len ( self ) == 0 : self . ttl = ttl elif ttl < self . ttl : self . ttl = ttl
|
Set the TTL of the rdataset to be the lesser of the set s current TTL or the specified TTL . If the set contains no rdatas set the TTL to the specified TTL .
|
7,359
|
def add ( self , rd , ttl = None ) : if self . rdclass != rd . rdclass or self . rdtype != rd . rdtype : raise IncompatibleTypes if not ttl is None : self . update_ttl ( ttl ) if self . rdtype == dns . rdatatype . RRSIG or self . rdtype == dns . rdatatype . SIG : covers = rd . covers ( ) if len ( self ) == 0 and self . covers == dns . rdatatype . NONE : self . covers = covers elif self . covers != covers : raise DifferingCovers if dns . rdatatype . is_singleton ( rd . rdtype ) and len ( self ) > 0 : self . clear ( ) super ( Rdataset , self ) . add ( rd )
|
Add the specified rdata to the rdataset .
|
7,360
|
def update ( self , other ) : self . update_ttl ( other . ttl ) super ( Rdataset , self ) . update ( other )
|
Add all rdatas in other to self .
|
7,361
|
def to_text ( self , name = None , origin = None , relativize = True , override_rdclass = None , ** kw ) : if not name is None : name = name . choose_relativity ( origin , relativize ) ntext = str ( name ) pad = ' ' else : ntext = '' pad = '' s = StringIO . StringIO ( ) if not override_rdclass is None : rdclass = override_rdclass else : rdclass = self . rdclass if len ( self ) == 0 : print >> s , '%s%s%s %s' % ( ntext , pad , dns . rdataclass . to_text ( rdclass ) , dns . rdatatype . to_text ( self . rdtype ) ) else : for rd in self : print >> s , '%s%s%d %s %s %s' % ( ntext , pad , self . ttl , dns . rdataclass . to_text ( rdclass ) , dns . rdatatype . to_text ( self . rdtype ) , rd . to_text ( origin = origin , relativize = relativize , ** kw ) ) return s . getvalue ( ) [ : - 1 ]
|
Convert the rdataset into DNS master file format .
|
7,362
|
def to_wire ( self , name , file , compress = None , origin = None , override_rdclass = None , want_shuffle = True ) : if not override_rdclass is None : rdclass = override_rdclass want_shuffle = False else : rdclass = self . rdclass file . seek ( 0 , 2 ) if len ( self ) == 0 : name . to_wire ( file , compress , origin ) stuff = struct . pack ( "!HHIH" , self . rdtype , rdclass , 0 , 0 ) file . write ( stuff ) return 1 else : if want_shuffle : l = list ( self ) random . shuffle ( l ) else : l = self for rd in l : name . to_wire ( file , compress , origin ) stuff = struct . pack ( "!HHIH" , self . rdtype , rdclass , self . ttl , 0 ) file . write ( stuff ) start = file . tell ( ) rd . to_wire ( file , compress , origin ) end = file . tell ( ) assert end - start < 65536 file . seek ( start - 2 ) stuff = struct . pack ( "!H" , end - start ) file . write ( stuff ) file . seek ( 0 , 2 ) return len ( self )
|
Convert the rdataset to wire format .
|
7,363
|
def match ( self , rdclass , rdtype , covers ) : if self . rdclass == rdclass and self . rdtype == rdtype and self . covers == covers : return True return False
|
Returns True if this rdataset matches the specified class type and covers
|
7,364
|
def get_plugin_modules ( folders , package = 'plugins' , parentpackage = 'linkcheck.dummy' ) : for folder in folders : for module in loader . get_folder_modules ( folder , parentpackage ) : yield module for module in loader . get_package_modules ( package ) : yield module
|
Get plugin modules for given folders .
|
7,365
|
def get_plugin_classes ( modules ) : classes = ( _ConnectionPlugin , _ContentPlugin , _ParserPlugin ) return loader . get_plugins ( modules , classes )
|
Get plugin classes for given modules .
|
7,366
|
def load_modules ( self , modules , config ) : for pluginclass in get_plugin_classes ( modules ) : name = pluginclass . __name__ if name in config [ "enabledplugins" ] : if issubclass ( pluginclass , _ConnectionPlugin ) : log . debug ( LOG_PLUGIN , "Enable connection plugin %s" , name ) self . connection_plugins . append ( pluginclass ( config [ name ] ) ) elif issubclass ( pluginclass , _ContentPlugin ) : log . debug ( LOG_PLUGIN , "Enable content plugin %s" , name ) self . content_plugins . append ( pluginclass ( config [ name ] ) ) elif issubclass ( pluginclass , _ParserPlugin ) : log . debug ( LOG_PLUGIN , "Enable parser plugin %s" , name ) self . parser_plugins . append ( pluginclass ( config [ name ] ) ) else : raise ValueError ( "Invalid plugin class %s" % pluginclass )
|
Load plugin modules .
|
7,367
|
def run_parser_plugins ( self , url_data , pagetype ) : run_plugins ( self . parser_plugins , url_data , stop_after_match = True , pagetype = pagetype )
|
Run parser plugins for given pagetype .
|
7,368
|
def check_urls ( aggregate ) : try : aggregate . visit_loginurl ( ) except Exception as msg : log . warn ( LOG_CHECK , _ ( "Error using login URL: %(msg)s." ) % dict ( msg = msg ) ) raise try : aggregate . logger . start_log_output ( ) except Exception as msg : log . error ( LOG_CHECK , _ ( "Error starting log output: %(msg)s." ) % dict ( msg = msg ) ) raise try : if not aggregate . urlqueue . empty ( ) : aggregate . start_threads ( ) check_url ( aggregate ) aggregate . finish ( ) aggregate . end_log_output ( ) except LinkCheckerInterrupt : raise except KeyboardInterrupt : interrupt ( aggregate ) except thread . error : log . warn ( LOG_CHECK , _ ( "Could not start a new thread. Check that the current user" " is allowed to start new threads." ) ) abort ( aggregate ) except Exception : console . internal_error ( ) aggregate . logger . log_internal_error ( ) abort ( aggregate )
|
Main check function ; checks all configured URLs until interrupted with Ctrl - C .
|
7,369
|
def check_url ( aggregate ) : while True : try : aggregate . urlqueue . join ( timeout = 30 ) break except urlqueue . Timeout : aggregate . remove_stopped_threads ( ) if not any ( aggregate . get_check_threads ( ) ) : break
|
Helper function waiting for URL queue .
|
7,370
|
def interrupt ( aggregate ) : while True : try : log . warn ( LOG_CHECK , _ ( "interrupt; waiting for active threads to finish" ) ) log . warn ( LOG_CHECK , _ ( "another interrupt will exit immediately" ) ) abort ( aggregate ) break except KeyboardInterrupt : pass
|
Interrupt execution and shutdown ignoring any subsequent interrupts .
|
7,371
|
def abort ( aggregate ) : while True : try : aggregate . abort ( ) aggregate . finish ( ) aggregate . end_log_output ( interrupt = True ) break except KeyboardInterrupt : log . warn ( LOG_CHECK , _ ( "user abort; force shutdown" ) ) aggregate . end_log_output ( interrupt = True ) abort_now ( )
|
Helper function to ensure a clean shutdown .
|
7,372
|
def abort_now ( ) : if os . name == 'posix' : import signal os . kill ( os . getpid ( ) , signal . SIGTERM ) time . sleep ( 1 ) os . kill ( os . getpid ( ) , signal . SIGKILL ) elif os . name == 'nt' : os . abort ( ) else : os . _exit ( 3 )
|
Force exit of current process without cleanup .
|
7,373
|
def get_aggregate ( config ) : _urlqueue = urlqueue . UrlQueue ( max_allowed_urls = config [ "maxnumurls" ] ) _robots_txt = robots_txt . RobotsTxt ( config [ "useragent" ] ) plugin_manager = plugins . PluginManager ( config ) result_cache = results . ResultCache ( ) return aggregator . Aggregate ( config , _urlqueue , _robots_txt , plugin_manager , result_cache )
|
Get an aggregator instance with given configuration .
|
7,374
|
def inet_ntoa ( address ) : if len ( address ) != 16 : raise ValueError ( "IPv6 addresses are 16 bytes long" ) hex = address . encode ( 'hex_codec' ) chunks = [ ] i = 0 l = len ( hex ) while i < l : chunk = hex [ i : i + 4 ] m = _leading_zero . match ( chunk ) if not m is None : chunk = m . group ( 1 ) chunks . append ( chunk ) i += 4 best_start = 0 best_len = 0 start = - 1 last_was_zero = False for i in xrange ( 8 ) : if chunks [ i ] != '0' : if last_was_zero : end = i current_len = end - start if current_len > best_len : best_start = start best_len = current_len last_was_zero = False elif not last_was_zero : start = i last_was_zero = True if last_was_zero : end = 8 current_len = end - start if current_len > best_len : best_start = start best_len = current_len if best_len > 0 : if best_start == 0 and ( best_len == 6 or best_len == 5 and chunks [ 5 ] == 'ffff' ) : if best_len == 6 : prefix = '::' else : prefix = '::ffff:' hex = prefix + dns . ipv4 . inet_ntoa ( address [ 12 : ] ) else : hex = ':' . join ( chunks [ : best_start ] ) + '::' + ':' . join ( chunks [ best_start + best_len : ] ) else : hex = ':' . join ( chunks ) return hex
|
Convert a network format IPv6 address into text .
|
7,375
|
def inet_aton ( text ) : if text == '::' : text = '0::' m = _v4_ending . match ( text ) if not m is None : text = "%s:%04x:%04x" % ( m . group ( 1 ) , int ( m . group ( 2 ) ) * 256 + int ( m . group ( 3 ) ) , int ( m . group ( 4 ) ) * 256 + int ( m . group ( 5 ) ) ) m = _colon_colon_start . match ( text ) if not m is None : text = text [ 1 : ] else : m = _colon_colon_end . match ( text ) if not m is None : text = text [ : - 1 ] chunks = text . split ( ':' ) l = len ( chunks ) if l > 8 : raise dns . exception . SyntaxError seen_empty = False canonical = [ ] for c in chunks : if c == '' : if seen_empty : raise dns . exception . SyntaxError seen_empty = True for i in xrange ( 0 , 8 - l + 1 ) : canonical . append ( '0000' ) else : lc = len ( c ) if lc > 4 : raise dns . exception . SyntaxError if lc != 4 : c = ( '0' * ( 4 - lc ) ) + c canonical . append ( c ) if l < 8 and not seen_empty : raise dns . exception . SyntaxError text = '' . join ( canonical ) try : return text . decode ( 'hex_codec' ) except TypeError : raise dns . exception . SyntaxError
|
Convert a text format IPv6 address into network format .
|
7,376
|
def encode ( text ) : if isinstance ( text , unicode ) : return text . encode ( i18n . default_encoding , 'ignore' ) return text
|
Encode text with default encoding if its Unicode .
|
7,377
|
def print_locale_info ( out = stderr ) : for key in ( "LANGUAGE" , "LC_ALL" , "LC_CTYPE" , "LANG" ) : print_env_info ( key , out = out ) print ( _ ( "Default locale:" ) , i18n . get_locale ( ) , file = out )
|
Print locale info .
|
7,378
|
def log_status ( self , checked , in_progress , queue , duration , num_urls ) : msg = _n ( "%2d thread active" , "%2d threads active" , in_progress ) % in_progress self . write ( u"%s, " % msg ) msg = _n ( "%5d link queued" , "%5d links queued" , queue ) % queue self . write ( u"%s, " % msg ) msg = _n ( "%4d link" , "%4d links" , checked ) % checked self . write ( u"%s" % msg ) msg = _n ( "%3d URL" , "%3d URLs" , num_urls ) % num_urls self . write ( u" in %s checked, " % msg ) msg = _ ( "runtime %s" ) % strformat . strduration_long ( duration ) self . writeln ( msg ) self . flush ( )
|
Write status message to file descriptor .
|
7,379
|
def writeln ( self , msg ) : self . fd . write ( u"%s%s" % ( msg , unicode ( os . linesep ) ) )
|
Write status message and line break to file descriptor .
|
7,380
|
def deprecated ( func ) : def newfunc ( * args , ** kwargs ) : warnings . warn ( "Call to deprecated function %s." % func . __name__ , category = DeprecationWarning ) return func ( * args , ** kwargs ) return update_func_meta ( newfunc , func )
|
A decorator which can be used to mark functions as deprecated . It emits a warning when the function is called .
|
7,381
|
def synchronize ( lock , func , log_duration_secs = 0 ) : def newfunc ( * args , ** kwargs ) : t = time . time ( ) with lock : duration = time . time ( ) - t if duration > log_duration_secs > 0 : print ( "WARN:" , func . __name__ , "locking took %0.2f seconds" % duration , file = sys . stderr ) return func ( * args , ** kwargs ) return update_func_meta ( newfunc , func )
|
Return synchronized function acquiring the given lock .
|
7,382
|
def notimplemented ( func ) : def newfunc ( * args , ** kwargs ) : co = func . func_code attrs = ( co . co_name , co . co_filename , co . co_firstlineno ) raise NotImplementedError ( "function %s at %s:%d is not implemented" % attrs ) return update_func_meta ( newfunc , func )
|
Raises a NotImplementedError if the function is called .
|
7,383
|
def timeit ( func , log , limit ) : def newfunc ( * args , ** kwargs ) : t = time . time ( ) res = func ( * args , ** kwargs ) duration = time . time ( ) - t if duration > limit : print ( func . __name__ , "took %0.2f seconds" % duration , file = log ) print ( args , file = log ) print ( kwargs , file = log ) return res return update_func_meta ( newfunc , func )
|
Print execution time of the function . For quick n dirty profiling .
|
7,384
|
def timed ( log = sys . stderr , limit = 2.0 ) : return lambda func : timeit ( func , log , limit )
|
Decorator to run a function with timing info .
|
7,385
|
def from_text ( text ) : if text . isdigit ( ) : total = long ( text ) else : if not text [ 0 ] . isdigit ( ) : raise BadTTL total = 0L current = 0L for c in text : if c . isdigit ( ) : current *= 10 current += long ( c ) else : c = c . lower ( ) if c == 'w' : total += current * 604800L elif c == 'd' : total += current * 86400L elif c == 'h' : total += current * 3600L elif c == 'm' : total += current * 60L elif c == 's' : total += current else : raise BadTTL ( "unknown unit '%s'" % c ) current = 0 if not current == 0 : raise BadTTL ( "trailing integer" ) if total < 0L or total > 2147483647L : raise BadTTL ( "TTL should be between 0 and 2^31 - 1 (inclusive)" ) return total
|
Convert the text form of a TTL to an integer .
|
7,386
|
def run ( self ) : try : self . run_checked ( ) except KeyboardInterrupt : thread . interrupt_main ( ) except Exception : self . internal_error ( )
|
Handle keyboard interrupt and other errors .
|
7,387
|
def parse ( self , url_data ) : self . url_data = url_data self . loc = False self . url = u"" data = url_data . get_content ( ) isfinal = True try : self . parser . Parse ( data , isfinal ) except ExpatError as expaterr : self . url_data . add_warning ( expaterr . message , tag = WARN_XML_PARSE_ERROR )
|
Parse XML URL data .
|
7,388
|
def start_element ( self , name , attrs ) : self . in_tag = ( name == self . tag ) self . url = u""
|
Set tag status for start element .
|
7,389
|
def add_url ( self ) : if self . url : self . url_data . add_url ( self . url , line = self . parser . CurrentLineNumber , column = self . parser . CurrentColumnNumber ) self . url = u""
|
Add non - empty URLs to the queue .
|
7,390
|
def get_shell_folder ( name ) : try : import _winreg as winreg except ImportError : import winreg lm = winreg . ConnectRegistry ( None , winreg . HKEY_CURRENT_USER ) try : key = winreg . OpenKey ( lm , r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" ) try : return winreg . QueryValueEx ( key , name ) [ 0 ] finally : key . Close ( ) finally : lm . Close ( )
|
Get Windows Shell Folder locations from the registry .
|
7,391
|
def on_connect ( client ) : print "++ Opened connection to %s" % client . addrport ( ) broadcast ( '%s joins the conversation.\n' % client . addrport ( ) ) CLIENT_LIST . append ( client ) client . send ( "Welcome to the Chat Server, %s.\n" % client . addrport ( ) )
|
Sample on_connect function . Handles new connections .
|
7,392
|
def on_disconnect ( client ) : print "-- Lost connection to %s" % client . addrport ( ) CLIENT_LIST . remove ( client ) broadcast ( '%s leaves the conversation.\n' % client . addrport ( ) )
|
Sample on_disconnect function . Handles lost connections .
|
7,393
|
def kick_idle ( ) : for client in CLIENT_LIST : if client . idle ( ) > IDLE_TIMEOUT : print ( '-- Kicking idle lobby client from %s' % client . addrport ( ) ) client . active = False
|
Looks for idle clients and disconnects them by setting active to False .
|
7,394
|
def chat ( client ) : global SERVER_RUN msg = client . get_command ( ) print '%s says, "%s"' % ( client . addrport ( ) , msg ) for guest in CLIENT_LIST : if guest != client : guest . send ( '%s says, %s\n' % ( client . addrport ( ) , msg ) ) else : guest . send ( 'You say, %s\n' % msg ) cmd = msg . lower ( ) if cmd == 'bye' : client . active = False elif cmd == 'shutdown' : SERVER_RUN = False
|
Echo whatever client types to everyone .
|
7,395
|
def get_netloc ( self ) : if self . proxy : scheme = self . proxytype host = self . proxyhost port = self . proxyport else : scheme = self . scheme host = self . host port = self . port return ( scheme , host , port )
|
Determine scheme host and port for this connection taking proxy data into account .
|
7,396
|
def validate ( wire , keyname , secret , now , request_mac , tsig_start , tsig_rdata , tsig_rdlen , ctx = None , multi = False , first = True ) : ( adcount , ) = struct . unpack ( "!H" , wire [ 10 : 12 ] ) if adcount == 0 : raise dns . exception . FormError adcount -= 1 new_wire = wire [ 0 : 10 ] + struct . pack ( "!H" , adcount ) + wire [ 12 : tsig_start ] current = tsig_rdata ( aname , used ) = dns . name . from_wire ( wire , current ) current = current + used ( upper_time , lower_time , fudge , mac_size ) = struct . unpack ( "!HIHH" , wire [ current : current + 10 ] ) time = ( ( upper_time + 0L ) << 32 ) + ( lower_time + 0L ) current += 10 mac = wire [ current : current + mac_size ] current += mac_size ( original_id , error , other_size ) = struct . unpack ( "!HHH" , wire [ current : current + 6 ] ) current += 6 other_data = wire [ current : current + other_size ] current += other_size if current != tsig_rdata + tsig_rdlen : raise dns . exception . FormError if error != 0 : if error == BADSIG : raise PeerBadSignature elif error == BADKEY : raise PeerBadKey elif error == BADTIME : raise PeerBadTime elif error == BADTRUNC : raise PeerBadTruncation else : raise PeerError ( 'unknown TSIG error code %d' % error ) time_low = time - fudge time_high = time + fudge if now < time_low or now > time_high : raise BadTime ( junk , our_mac , ctx ) = sign ( new_wire , keyname , secret , time , fudge , original_id , error , other_data , request_mac , ctx , multi , first , aname ) if ( our_mac != mac ) : raise BadSignature return ctx
|
Validate the specified TSIG rdata against the other input parameters .
|
7,397
|
def get_algorithm ( algorithm ) : global _hashes if _hashes is None : _setup_hashes ( ) if isinstance ( algorithm , ( str , unicode ) ) : algorithm = dns . name . from_text ( algorithm ) if sys . hexversion < 0x02050200 and ( algorithm == HMAC_SHA384 or algorithm == HMAC_SHA512 ) : raise NotImplementedError ( "TSIG algorithm " + str ( algorithm ) + " requires Python 2.5.2 or later" ) try : return ( algorithm . to_digestable ( ) , _hashes [ algorithm ] ) except KeyError : raise NotImplementedError ( "TSIG algorithm " + str ( algorithm ) + " is not supported" )
|
Returns the wire format string and the hash module to use for the specified TSIG algorithm
|
7,398
|
def inet_pton ( family , text ) : if family == AF_INET : return dns . ipv4 . inet_aton ( text ) elif family == AF_INET6 : return dns . ipv6 . inet_aton ( text ) else : raise NotImplementedError
|
Convert the textual form of a network address into its binary form .
|
7,399
|
def inet_ntop ( family , address ) : if family == AF_INET : return dns . ipv4 . inet_ntoa ( address ) elif family == AF_INET6 : return dns . ipv6 . inet_ntoa ( address ) else : raise NotImplementedError
|
Convert the binary form of a network address into its textual form .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.