idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
49,700
def get_transaction ( cls , txid ) : for api_call in cls . GET_TX_MAIN : try : return api_call ( txid ) except cls . IGNORED_ERRORS : pass raise ConnectionError ( 'All APIs are unreachable.' )
Gets the full transaction details .
49,701
def get_tx_amount ( cls , txid , txindex ) : for api_call in cls . GET_TX_AMOUNT_MAIN : try : return api_call ( txid , txindex ) except cls . IGNORED_ERRORS : pass raise ConnectionError ( 'All APIs are unreachable.' )
Gets the amount of a given transaction output .
49,702
def get_fee ( speed = FEE_SPEED_MEDIUM ) : if speed == FEE_SPEED_FAST : return DEFAULT_FEE_FAST elif speed == FEE_SPEED_MEDIUM : return DEFAULT_FEE_MEDIUM elif speed == FEE_SPEED_SLOW : return DEFAULT_FEE_SLOW else : raise ValueError ( 'Invalid speed argument.' )
Gets the recommended satoshi per byte fee .
49,703
def find_binutils_libs ( self , libdir , lib_ext ) : bfd_expr = re . compile ( "(lib(?:bfd)|(?:opcodes))(.*?)\%s" % lib_ext ) libs = { } for root , dirs , files in os . walk ( libdir ) : for f in files : m = bfd_expr . search ( f ) if m : lib , version = m . groups ( ) fp = os . path . join ( root , f ) if version in libs : libs [ version ] . append ( fp ) else : libs [ version ] = [ fp , ] multiarch_libs = dict ( [ ( v , _l ) for v , _l in libs . items ( ) if v . find ( "multiarch" ) != - 1 ] ) if len ( multiarch_libs ) > 1 : print "[W] Multiple binutils versions detected. Trying to build with default..." return multiarch_libs . values ( ) [ 0 ] if len ( multiarch_libs ) == 1 : return multiarch_libs . values ( ) [ 0 ] return libs . get ( "" , [ ] )
Find Binutils libraries .
49,704
def generate_source_files ( self ) : from pybfd . gen_supported_disasm import get_supported_architectures , get_supported_machines , generate_supported_architectures_source , generate_supported_disassembler_header , gen_supported_archs libs_dirs = [ os . path . dirname ( lib ) for lib in self . libs ] libopcodes = [ lib for lib in self . libs if os . path . basename ( lib ) . startswith ( "libopcodes" ) ] [ 0 ] print "[+] Detecting libbfd/libopcodes compiled architectures" if self . with_static_binutils : nms = [ os . path . join ( libs_dir , ".." , "bin" , "nm" ) , os . path . join ( libs_dir , ".." , "bin" , "gnm" ) ] path_to_nm = None for nm_fullpath in nms : if os . path . isfile ( nm_fullpath ) : path_to_nm = nm_fullpath break if path_to_nm == None : raise Exception ( "no suitable 'nm' found." ) else : path_to_nm = "nm" path_to_bfd_header = os . path . join ( self . includes , "bfd.h" ) supported_machines = get_supported_machines ( path_to_bfd_header ) supported_archs = get_supported_architectures ( path_to_nm , libopcodes , supported_machines , self . with_static_binutils == None ) source_bfd_archs_c = generate_supported_architectures_source ( supported_archs , supported_machines ) print "[+] Generating .C files..." gen_file = os . path . join ( PACKAGE_DIR , "gen_bfd_archs.c" ) with open ( gen_file , "w+" ) as fd : fd . write ( source_bfd_archs_c ) print "[+] %s" % gen_file if self . with_static_binutils : link_to_libs = [ ] else : link_to_libs = [ self . prepare_libs_for_cc ( os . path . basename ( lib ) ) for lib in self . libs ] c_compiler = new_compiler ( ) objects = c_compiler . compile ( [ os . path . join ( PACKAGE_DIR , "gen_bfd_archs.c" ) , ] , include_dirs = [ self . includes , ] ) program = c_compiler . link_executable ( objects , libraries = link_to_libs , library_dirs = libs_dirs , output_progname = "gen_bfd_archs" , output_dir = PACKAGE_DIR ) gen_tool = os . path . join ( PACKAGE_DIR , "gen_bfd_archs" ) gen_file = os . path . join ( self . build_lib , PACKAGE_DIR , "bfd_archs.py" ) cmd = "%s > %s" % ( gen_tool , gen_file ) print "[+] Generating .py files..." os . system ( cmd ) with open ( gen_file , "a" ) as f : f . write ( gen_supported_archs ( supported_archs ) ) for obj in objects : os . unlink ( obj ) os . unlink ( gen_tool ) print "[+] %s" % gen_file gen_source = generate_supported_disassembler_header ( supported_archs ) if len ( supported_archs ) == 0 : raise Exception ( "Unable to determine libopcodes' supported " "platforms from '%s'" % libopcodes ) print "[+] Generating .h files..." gen_file = os . path . join ( PACKAGE_DIR , "supported_disasm.h" ) with open ( gen_file , "w+" ) as fd : fd . write ( gen_source ) print "[+] %s" % gen_file return supported_archs
Genertate source files to be used during the compile process of the extension module . This is better than just hardcoding the values on python files because header definitions might change along differente Binutils versions and we ll be able to catch the changes and keep the correct values .
49,705
def _darwin_current_arch ( self ) : if sys . platform == "darwin" : if sys . maxsize > 2 ** 32 : return platform . mac_ver ( ) [ 2 ] else : return platform . processor ( )
Add Mac OS X support .
49,706
def init_parser ( ) : usage = "Usage: %(prog)s <option(s)> <file(s)>" description = " Display information from object <file(s)>.\n" description += " At least one of the following switches must be given:" parser = ArgumentParser ( usage = usage , description = description , add_help = False ) group = parser . add_mutually_exclusive_group ( ) group . add_argument ( "-a" , "--archive-headers" , action = DumpArchieveHeadersAction , type = FileType ( "r" ) , nargs = "+" , help = "Display archive header information" ) group . add_argument ( "-f" , "--file-headers" , action = DumpFileHeadersAction , type = FileType ( "r" ) , nargs = "+" , help = "Display the contents of the overall file header" ) group . add_argument ( "-h" , "--section-headers" , action = DumpSectionHeadersAction , type = FileType ( "r" ) , nargs = "+" , help = "Display the contents of the section headers" ) group . add_argument ( "-d" , "--disassemble" , action = DisassembleSectionAction , type = FileType ( "r" ) , nargs = "+" , help = "Display assembler contents of executable sections" ) group . add_argument ( "-D" , "--disassemble-all" , action = DisassembleSectionsAction , type = FileType ( "r" ) , nargs = "+" , help = "Display assembler contents of executable sections" ) group . add_argument ( "-s" , "--full-contents" , action = DumpSectionContentAction , type = FileType ( "r" ) , nargs = "+" , help = "Display the full contents of all sections requested" ) group . add_argument ( "-t" , "--syms" , action = DumpFileSymbols , type = FileType ( "r" ) , nargs = "+" , help = "Display the contents of the symbol table(s)" ) group . add_argument ( "-v" , "--version" , action = "version" , version = "%%(prog)s %s (%s)" % ( __version__ , __description__ ) , help = "Display this program's version number" ) group . add_argument ( "-i" , "--info" , action = ListFormatAndArchitecturesInformationAction , nargs = REMAINDER , help = "List object formats and architectures supported" ) group . add_argument ( "-H" , "--help" , action = "store_true" , default = False , help = "Display this information" ) return parser
Initialize option parser .
49,707
def dump ( self , src , length = 16 , start = 0 , preffix = "" ) : FILTER = "" . join ( [ ( len ( repr ( chr ( x ) ) ) == 3 ) and chr ( x ) or '.' for x in xrange ( 256 ) ] ) result = list ( ) for i in xrange ( 0 , len ( src ) , length ) : s = src [ i : i + length ] hexa = " " . join ( [ "%02X" % ord ( x ) for x in s ] ) printable = s . translate ( FILTER ) result . append ( "%s%08X %-*s %s\n" % ( preffix , start + i , length * 3 , hexa , printable ) ) return '' . join ( result )
Dump the specified buffer in hex + ASCII format .
49,708
def content ( self ) : return _bfd . section_get_content ( self . bfd , self . _ptr , 0 , self . size )
Return the entire section content .
49,709
def get_content ( self , offset , size ) : return _bfd . section_get_content ( self . bfd , self . _ptr , offset , size )
Return the specified number of bytes from the current section .
49,710
def main ( ) : test_targets = ( [ ARCH_I386 , MACH_I386_I386_INTEL_SYNTAX , ENDIAN_MONO , "\x55\x89\xe5\xE8\xB8\xFF\xFF\xFF" , 0x1000 ] , [ ARCH_I386 , MACH_X86_64_INTEL_SYNTAX , ENDIAN_MONO , "\x55\x48\x89\xe5\xE8\xA3\xFF\xFF\xFF" , 0x1000 ] , [ ARCH_ARM , MACH_ARM_2 , ENDIAN_LITTLE , "\x04\xe0\x2d\xe5\xED\xFF\xFF\xEB" , 0x1000 ] , [ ARCH_MIPS , MACH_MIPSISA32 , ENDIAN_BIG , "\x0C\x10\x00\x97\x00\x00\x00\x00" , 0x1000 ] , [ ARCH_POWERPC , MACH_PPC , ENDIAN_BIG , "\x94\x21\xFF\xE8\x7C\x08\x02\xA6" , 0x1000 ] , ) for target_arch , target_mach , target_endian , binary , address in test_targets : opcodes = Opcodes ( target_arch , target_mach , target_endian ) print "\n[+] Architecture %s - Machine %d" % ( opcodes . architecture_name , opcodes . machine ) print "[+] Disassembly:" for vma , size , disasm in opcodes . disassemble ( binary , address ) : print "0x%X (size=%d)\t %s" % ( vma , size , disasm )
Test case for simple opcode disassembly .
49,711
def initialize_bfd ( self , abfd ) : self . _ptr = _opcodes . initialize_bfd ( abfd . _ptr ) if self . architecture == ARCH_I386 : if abfd . arch_size == 32 : self . machine = MACH_I386_I386_INTEL_SYNTAX elif abfd . arch_size == 64 : self . machine = MACH_X86_64_INTEL_SYNTAX
Initialize underlying libOpcodes library using BFD .
49,712
def initialize_non_bfd ( self , architecture = None , machine = None , endian = ENDIAN_UNKNOWN ) : if None in [ architecture , machine , endian ] : return self . architecture = architecture self . machine = machine self . endian = endian
Initialize underlying libOpcodes library not using BFD .
49,713
def initialize_smart_disassemble ( self , data , start_address = 0 ) : _opcodes . initialize_smart_disassemble ( self . _ptr , data , start_address )
Set the binary buffer to disassemble with other related information ready for an instruction by instruction disassembly session .
49,714
def print_single_instruction_callback ( self , address , size , branch_delay_insn , insn_type , target , target2 , disassembly ) : print "0x%X SZ=%d BD=%d IT=%d\t%s" % ( address , size , branch_delay_insn , insn_type , disassembly ) return PYBFD_DISASM_CONTINUE
Callack on each disassembled instruction to print its information .
49,715
def disassemble ( self , data , start_address = 0 ) : return _opcodes . disassemble ( self . _ptr , data , start_address )
Return a list containing the virtual memory address instruction length and disassembly code for the given binary buffer .
49,716
def open ( self , _file , target = DEFAULT_TARGET ) : self . close ( ) if type ( _file ) is FileType : filename = _file . name if islink ( filename ) : raise BfdException ( "Symlinks file-descriptors are not valid" ) try : self . _ptr = _bfd . fdopenr ( filename , target , dup ( _file . fileno ( ) ) ) except Exception , err : raise BfdException ( "Unable to open file-descriptor %s : %s" % ( filename , err ) ) elif type ( _file ) is StringType : filename = _file try : with open ( _file ) : pass except IOError : raise BfdException ( "File %s does not exist." % filename ) try : self . _ptr = _bfd . openr ( filename , target ) except ( TypeError , IOError ) , err : raise BfdException ( "Unable to open file %s : %s" % ( filename , err ) ) elif type ( _file ) is IntType : self . _ptr = _file else : raise BfdException ( "Invalid file type specified for open operation (%r)" % _file ) try : if _bfd . check_format ( self . _ptr , BfdFormat . ARCHIVE ) : self . file_format = BfdFormat . ARCHIVE self . __populate_archive_files ( ) else : if _bfd . check_format ( self . _ptr , BfdFormat . OBJECT ) : self . file_format = BfdFormat . OBJECT elif _bfd . check_format ( self . _ptr , BfdFormat . CORE ) : self . file_format = BfdFormat . CORE else : pass raise BfdException ( _bfd . get_last_error_message ( ) ) except TypeError , err : raise BfdException ( "Unable to initialize file format : %s" % err ) if self . _ptr is not None : if self . file_format in [ BfdFormat . OBJECT , BfdFormat . CORE ] : self . __populate_sections ( ) self . __populate_symbols ( )
Open the existing file for reading .
49,717
def __populate_archive_files ( self ) : self . archive_files = [ ] for _ptr in _bfd . archive_list_files ( self . _ptr ) : try : self . archive_files . append ( Bfd ( _ptr ) ) except BfdException , err : pass
Store the list of files inside an archive file .
49,718
def archive_filenames ( self ) : try : return _bfd . archive_list_filenames ( self . _ptr ) except TypeError , err : raise BfdException ( err )
Return the list of files inside an archive file .
49,719
def file_format_name ( self ) : try : return BfdFormatNamesLong [ self . file_format ] except IndexError , err : raise BfdException ( "Invalid format specified (%d)" % self . file_format )
Return the current format name of the open bdf .
49,720
def __populate_sections ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) for section in _bfd . get_sections_list ( self . _ptr ) : try : bfd_section = BfdSection ( self . _ptr , section ) self . _sections [ bfd_section . name ] = bfd_section except BfdSectionException , err : pass
Get a list of the section present in the bfd to populate our internal list .
49,721
def __populate_symbols ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) try : symbols = _bfd . get_symbols ( self . _ptr ) sections = { } for section in self . sections : sections [ self . sections [ section ] . index ] = self . sections [ section ] for symbol in symbols : symbol_section_index = symbol [ 0 ] symbol_name = symbol [ 1 ] symbol_value = symbol [ 2 ] symbol_flags = symbol [ 3 ] symbol_flags = tuple ( [ f for f in SYMBOL_FLAGS_LIST if symbol_flags & f == f ] ) new_symbol = Symbol ( sections . get ( symbol_section_index , None ) , symbol_name , symbol_value , symbol_flags ) if new_symbol . section is None : continue symbol_address = new_symbol . section . vma + new_symbol . value self . _symbols [ symbol_address ] = new_symbol del sections except BfdSectionException , err : raise BfdException ( "Exception on symbolic ifnormation parsing." )
Get a list of the symbols present in the bfd to populate our internal list .
49,722
def close ( self ) : if self . _ptr : try : _bfd . close ( self . _ptr ) except TypeError , err : raise BfdException ( "Unable to close bfd (%s)" % err ) finally : self . _ptr = None
Close any existing BFD structure before open a new one .
49,723
def filename ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FILENAME )
Return the filename of the BFD file being processed .
49,724
def cacheable ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . CACHEABLE )
Return the cacheable attribute of the BFD file being processed .
49,725
def format ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FORMAT )
Return the format attribute of the BFD file being processed .
49,726
def target ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . TARGET )
Return the target of the BFD file being processed .
49,727
def machine ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FLAVOUR )
Return the flavour attribute of the BFD file being processed .
49,728
def family_coff ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FAMILY_COFF )
Return the family_coff attribute of the BFD file being processed .
49,729
def big_endian ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . IS_BIG_ENDIAN )
Return the big endian attribute of the BFD file being processed .
49,730
def little_endian ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . IS_LITTLE_ENDIAN )
Return the little_endian attribute of the BFD file being processed .
49,731
def header_big_endian ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . HEADER_BIG_ENDIAN )
Return the header_big_endian attribute of the BFD file being processed .
49,732
def header_little_endian ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . HEADER_LITTLE_ENDIAN )
Return the header_little_endian attribute of the BFD file being processed .
49,733
def file_flags ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . FILE_FLAGS )
Return the file flags attribute of the BFD file being processed .
49,734
def file_flags ( self , _file_flags ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . set_file_flags ( self . _ptr , _file_flags )
Set the new file flags attribute of the BFD file being processed .
49,735
def applicable_file_flags ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . APPLICABLE_FILE_FLAGS )
Return the applicable file flags attribute of the BFD file being processed .
49,736
def my_archieve ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . MY_ARCHIEVE )
Return the my archieve attribute of the BFD file being processed .
49,737
def has_map ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . HAS_MAP )
Return the has map attribute of the BFD file being processed .
49,738
def is_thin_archieve ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . IS_THIN_ARCHIEVE )
Return the is thin archieve attribute of the BFD file being processed .
49,739
def has_gap_in_elf_shndx ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . HAS_GAP_IN_ELF_SHNDX )
Return the has gap in elf shndx attribute of the BFD file being processed .
49,740
def valid_reloction_types ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . VALID_RELOC_TYPES )
Return the valid_reloc_types attribute of the BFD file being processed .
49,741
def user_data ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . USRDATA )
Return the usrdata attribute of the BFD file being processed .
49,742
def start_address ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . START_ADDRESS )
Return the start address attribute of the BFD file being processed .
49,743
def symbols_count ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . SYMCOUNT )
Return the symcount attribute of the BFD file being processed .
49,744
def out_symbols ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . OUTSYMBOLS )
Return the out symbols attribute of the BFD file being processed .
49,745
def sections_count ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . COUNT_SECTIONS )
Return the sections_count attribute of the BFD file being processed .
49,746
def dynamic_symbols_count ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . DYNAMIC_SYMCOUNT )
Return the dynamic symbols count attribute of the BFD file being processed .
49,747
def symbol_leading_char ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . SYMBOL_LEADING_CHAR )
Return the symbol leading char attribute of the BFD file being processed .
49,748
def arch_size ( self ) : if not self . _ptr : raise BfdException ( "BFD not initialized" ) try : return _bfd . get_arch_size ( self . _ptr ) except Exception , err : raise BfdException ( "Unable to determine architeure size." )
Return the architecure size in bits .
49,749
def display_matrix ( self , matrix , interval = 2.0 , brightness = 1.0 , fading = False , ignore_duplicates = False ) : self . _matrix_writer . write ( matrix = matrix , interval = interval , brightness = brightness , fading = fading , ignore_duplicates = ignore_duplicates )
Displays an LED matrix on Nuimo s LED matrix display .
49,750
def get_asn_origin_whois ( self , asn_registry = 'radb' , asn = None , retry_count = 3 , server = None , port = 43 ) : try : if server is None : server = ASN_ORIGIN_WHOIS [ asn_registry ] [ 'server' ] conn = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) conn . settimeout ( self . timeout ) log . debug ( 'ASN origin WHOIS query for {0} at {1}:{2}' . format ( asn , server , port ) ) conn . connect ( ( server , port ) ) query = ' -i origin {0}{1}' . format ( asn , '\r\n' ) conn . send ( query . encode ( ) ) response = '' while True : d = conn . recv ( 4096 ) . decode ( ) response += d if not d : break conn . close ( ) if 'Query rate limit exceeded' in response : if retry_count > 0 : log . debug ( 'ASN origin WHOIS query rate limit exceeded. ' 'Waiting...' ) sleep ( 1 ) return self . get_asn_origin_whois ( asn_registry = asn_registry , asn = asn , retry_count = retry_count - 1 , server = server , port = port ) else : raise WhoisRateLimitError ( 'ASN origin Whois lookup failed for {0}. Rate limit ' 'exceeded, wait and try again (possibly a ' 'temporary block).' . format ( asn ) ) elif ( 'error 501' in response or 'error 230' in response ) : log . debug ( 'ASN origin WHOIS query error: {0}' . format ( response ) ) raise ValueError return str ( response ) except ( socket . timeout , socket . error ) as e : log . debug ( 'ASN origin WHOIS query socket error: {0}' . format ( e ) ) if retry_count > 0 : log . debug ( 'ASN origin WHOIS query retrying (count: {0})' '' . format ( str ( retry_count ) ) ) return self . get_asn_origin_whois ( asn_registry = asn_registry , asn = asn , retry_count = retry_count - 1 , server = server , port = port ) else : raise WhoisLookupError ( 'ASN origin WHOIS lookup failed for {0}.' . format ( asn ) ) except WhoisRateLimitError : raise except : raise WhoisLookupError ( 'ASN origin WHOIS lookup failed for {0}.' . format ( asn ) )
The function for retrieving CIDR info for an ASN via whois .
49,751
def get_http_json ( self , url = None , retry_count = 3 , rate_limit_timeout = 120 , headers = None ) : if headers is None : headers = { 'Accept' : 'application/rdap+json' } try : log . debug ( 'HTTP query for {0} at {1}' . format ( self . address_str , url ) ) conn = Request ( url , headers = headers ) data = self . opener . open ( conn , timeout = self . timeout ) try : d = json . loads ( data . readall ( ) . decode ( 'utf-8' , 'ignore' ) ) except AttributeError : d = json . loads ( data . read ( ) . decode ( 'utf-8' , 'ignore' ) ) try : for tmp in d [ 'notices' ] : if tmp [ 'title' ] == 'Rate Limit Notice' : log . debug ( 'RDAP query rate limit exceeded.' ) if retry_count > 0 : log . debug ( 'Waiting {0} seconds...' . format ( str ( rate_limit_timeout ) ) ) sleep ( rate_limit_timeout ) return self . get_http_json ( url = url , retry_count = retry_count - 1 , rate_limit_timeout = rate_limit_timeout , headers = headers ) else : raise HTTPRateLimitError ( 'HTTP lookup failed for {0}. Rate limit ' 'exceeded, wait and try again (possibly a ' 'temporary block).' . format ( url ) ) except ( KeyError , IndexError ) : pass return d except HTTPError as e : if e . code == 429 : log . debug ( 'HTTP query rate limit exceeded.' ) if retry_count > 0 : log . debug ( 'Waiting {0} seconds...' . format ( str ( rate_limit_timeout ) ) ) sleep ( rate_limit_timeout ) return self . get_http_json ( url = url , retry_count = retry_count - 1 , rate_limit_timeout = rate_limit_timeout , headers = headers ) else : raise HTTPRateLimitError ( 'HTTP lookup failed for {0}. Rate limit ' 'exceeded, wait and try again (possibly a ' 'temporary block).' . format ( url ) ) else : raise HTTPLookupError ( 'HTTP lookup failed for {0} with error ' 'code {1}.' . format ( url , str ( e . code ) ) ) except ( URLError , socket . timeout , socket . error ) as e : log . debug ( 'HTTP query socket error: {0}' . format ( e ) ) if retry_count > 0 : log . debug ( 'HTTP query retrying (count: {0})' . format ( str ( retry_count ) ) ) return self . get_http_json ( url = url , retry_count = retry_count - 1 , rate_limit_timeout = rate_limit_timeout , headers = headers ) else : raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) ) except ( HTTPLookupError , HTTPRateLimitError ) as e : raise e except : raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) )
The function for retrieving a json result via HTTP .
49,752
def get_host ( self , retry_count = 3 ) : try : default_timeout_set = False if not socket . getdefaulttimeout ( ) : socket . setdefaulttimeout ( self . timeout ) default_timeout_set = True log . debug ( 'Host query for {0}' . format ( self . address_str ) ) ret = socket . gethostbyaddr ( self . address_str ) if default_timeout_set : socket . setdefaulttimeout ( None ) results = namedtuple ( 'get_host_results' , 'hostname, aliaslist, ' 'ipaddrlist' ) return results ( ret ) except ( socket . timeout , socket . error ) as e : log . debug ( 'Host query socket error: {0}' . format ( e ) ) if retry_count > 0 : log . debug ( 'Host query retrying (count: {0})' . format ( str ( retry_count ) ) ) return self . get_host ( retry_count - 1 ) else : raise HostLookupError ( 'Host lookup failed for {0}.' . format ( self . address_str ) ) except : raise HostLookupError ( 'Host lookup failed for {0}.' . format ( self . address_str ) )
The function for retrieving host information for an IP address .
49,753
def get_http_raw ( self , url = None , retry_count = 3 , headers = None , request_type = 'GET' , form_data = None ) : if headers is None : headers = { 'Accept' : 'text/html' } enc_form_data = None if form_data : enc_form_data = urlencode ( form_data ) try : enc_form_data = bytes ( enc_form_data , encoding = 'ascii' ) except TypeError : pass try : log . debug ( 'HTTP query for {0} at {1}' . format ( self . address_str , url ) ) try : conn = Request ( url = url , data = enc_form_data , headers = headers , ** { 'method' : request_type } ) except TypeError : conn = Request ( url = url , data = enc_form_data , headers = headers ) data = self . opener . open ( conn , timeout = self . timeout ) try : d = data . readall ( ) . decode ( 'ascii' , 'ignore' ) except AttributeError : d = data . read ( ) . decode ( 'ascii' , 'ignore' ) return str ( d ) except ( URLError , socket . timeout , socket . error ) as e : log . debug ( 'HTTP query socket error: {0}' . format ( e ) ) if retry_count > 0 : log . debug ( 'HTTP query retrying (count: {0})' . format ( str ( retry_count ) ) ) return self . get_http_raw ( url = url , retry_count = retry_count - 1 , headers = headers , request_type = request_type , form_data = form_data ) else : raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) ) except HTTPLookupError as e : raise e except Exception : raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) )
The function for retrieving a raw HTML result via HTTP .
49,754
def generate_output ( line = '0' , short = None , name = None , value = None , is_parent = False , colorize = True ) : output = '{0}{1}{2}{3}{4}{5}{6}{7}\n' . format ( LINES [ '{0}{1}' . format ( line , 'C' if colorize else '' ) ] if ( line in LINES . keys ( ) ) else '' , COLOR_DEPTH [ line ] if ( colorize and line in COLOR_DEPTH ) else '' , ANSI [ 'b' ] , short if short is not None else ( name if ( name is not None ) else '' ) , '' if ( name is None or short is None ) else ' ({0})' . format ( name ) , '' if ( name is None and short is None ) else ': ' , ANSI [ 'end' ] if colorize else '' , '' if is_parent else value ) return output
The function for formatting CLI output results .
49,755
def generate_output_header ( self , query_type = 'RDAP' ) : output = '\n{0}{1}{2} query for {3}:{4}\n\n' . format ( ANSI [ 'ul' ] , ANSI [ 'b' ] , query_type , self . obj . address_str , ANSI [ 'end' ] ) return output
The function for generating the CLI output header .
49,756
def generate_output_newline ( self , line = '0' , colorize = True ) : return generate_output ( line = line , is_parent = True , colorize = colorize )
The function for generating a CLI output new line .
49,757
def generate_output_asn ( self , json_data = None , hr = True , show_name = False , colorize = True ) : if json_data is None : json_data = { } keys = { 'asn' , 'asn_cidr' , 'asn_country_code' , 'asn_date' , 'asn_registry' , 'asn_description' } . intersection ( json_data ) output = '' for key in keys : output += generate_output ( line = '0' , short = HR_ASN [ key ] [ '_short' ] if hr else key , name = HR_ASN [ key ] [ '_name' ] if ( hr and show_name ) else None , value = ( json_data [ key ] if ( json_data [ key ] is not None and len ( json_data [ key ] ) > 0 and json_data [ key ] != 'NA' ) else 'None' ) , colorize = colorize ) return output
The function for generating CLI output ASN results .
49,758
def generate_output_entities ( self , json_data = None , hr = True , show_name = False , colorize = True ) : output = '' short = HR_RDAP [ 'entities' ] [ '_short' ] if hr else 'entities' name = HR_RDAP [ 'entities' ] [ '_name' ] if ( hr and show_name ) else None output += generate_output ( line = '0' , short = short , name = name , is_parent = False if ( json_data is None or json_data [ 'entities' ] is None ) else True , value = 'None' if ( json_data is None or json_data [ 'entities' ] is None ) else None , colorize = colorize ) if json_data is not None : for ent in json_data [ 'entities' ] : output += generate_output ( line = '1' , value = ent , colorize = colorize ) return output
The function for generating CLI output RDAP entity results .
49,759
def generate_output_events ( self , source , key , val , line = '2' , hr = True , show_name = False , colorize = True ) : output = generate_output ( line = line , short = HR_RDAP [ source ] [ key ] [ '_short' ] if hr else key , name = HR_RDAP [ source ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) if val is not None : count = 0 for item in val : try : action = item [ 'action' ] except KeyError : action = None try : timestamp = item [ 'timestamp' ] except KeyError : timestamp = None try : actor = item [ 'actor' ] except KeyError : actor = None if count > 0 : output += generate_output ( line = str ( int ( line ) + 1 ) , is_parent = True , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'action' ] [ '_short' ] if hr else 'action' , name = HR_RDAP_COMMON [ key ] [ 'action' ] [ '_name' ] if ( hr and show_name ) else None , value = action , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'timestamp' ] [ '_short' ] if hr else 'timestamp' , name = HR_RDAP_COMMON [ key ] [ 'timestamp' ] [ '_name' ] if ( hr and show_name ) else None , value = timestamp , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'actor' ] [ '_short' ] if hr else 'actor' , name = HR_RDAP_COMMON [ key ] [ 'actor' ] [ '_name' ] if ( hr and show_name ) else None , value = actor , colorize = colorize ) count += 1 return output
The function for generating CLI output RDAP events results .
49,760
def generate_output_list ( self , source , key , val , line = '2' , hr = True , show_name = False , colorize = True ) : output = generate_output ( line = line , short = HR_RDAP [ source ] [ key ] [ '_short' ] if hr else key , name = HR_RDAP [ source ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) if val is not None : for item in val : output += generate_output ( line = str ( int ( line ) + 1 ) , value = item , colorize = colorize ) return output
The function for generating CLI output RDAP list results .
49,761
def generate_output_notices ( self , source , key , val , line = '1' , hr = True , show_name = False , colorize = True ) : output = generate_output ( line = line , short = HR_RDAP [ source ] [ key ] [ '_short' ] if hr else key , name = HR_RDAP [ source ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) if val is not None : count = 0 for item in val : title = item [ 'title' ] description = item [ 'description' ] links = item [ 'links' ] if count > 0 : output += generate_output ( line = str ( int ( line ) + 1 ) , is_parent = True , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'title' ] [ '_short' ] if hr else ( 'title' ) , name = HR_RDAP_COMMON [ key ] [ 'title' ] [ '_name' ] if ( hr and show_name ) else None , value = title , colorize = colorize ) output += generate_output ( line = str ( int ( line ) + 1 ) , short = HR_RDAP_COMMON [ key ] [ 'description' ] [ '_short' ] if hr else 'description' , name = HR_RDAP_COMMON [ key ] [ 'description' ] [ '_name' ] if ( hr and show_name ) else None , value = description . replace ( '\n' , '\n{0}' . format ( generate_output ( line = '3' ) ) ) , colorize = colorize ) output += self . generate_output_list ( source = source , key = 'links' , val = links , line = str ( int ( line ) + 1 ) , hr = hr , show_name = show_name , colorize = colorize ) count += 1 return output
The function for generating CLI output RDAP notices results .
49,762
def generate_output_network ( self , json_data = None , hr = True , show_name = False , colorize = True ) : if json_data is None : json_data = { } output = generate_output ( line = '0' , short = HR_RDAP [ 'network' ] [ '_short' ] if hr else 'network' , name = HR_RDAP [ 'network' ] [ '_name' ] if ( hr and show_name ) else None , is_parent = True , colorize = colorize ) for key , val in json_data [ 'network' ] . items ( ) : if key in [ 'links' , 'status' ] : output += self . generate_output_list ( source = 'network' , key = key , val = val , line = '1' , hr = hr , show_name = show_name , colorize = colorize ) elif key in [ 'notices' , 'remarks' ] : output += self . generate_output_notices ( source = 'network' , key = key , val = val , line = '1' , hr = hr , show_name = show_name , colorize = colorize ) elif key == 'events' : output += self . generate_output_events ( source = 'network' , key = key , val = val , line = '1' , hr = hr , show_name = show_name , colorize = colorize ) elif key not in [ 'raw' ] : output += generate_output ( line = '1' , short = HR_RDAP [ 'network' ] [ key ] [ '_short' ] if hr else key , name = HR_RDAP [ 'network' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , value = val , colorize = colorize ) return output
The function for generating CLI output RDAP network results .
49,763
def generate_output_whois_nets ( self , json_data = None , hr = True , show_name = False , colorize = True ) : if json_data is None : json_data = { } output = generate_output ( line = '0' , short = HR_WHOIS [ 'nets' ] [ '_short' ] if hr else 'nets' , name = HR_WHOIS [ 'nets' ] [ '_name' ] if ( hr and show_name ) else None , is_parent = True , colorize = colorize ) count = 0 for net in json_data [ 'nets' ] : if count > 0 : output += self . generate_output_newline ( line = '1' , colorize = colorize ) count += 1 output += generate_output ( line = '1' , short = net [ 'handle' ] , is_parent = True , colorize = colorize ) for key , val in net . items ( ) : if val and '\n' in val : output += generate_output ( line = '2' , short = HR_WHOIS [ 'nets' ] [ key ] [ '_short' ] if hr else key , name = HR_WHOIS [ 'nets' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) for v in val . split ( '\n' ) : output += generate_output ( line = '3' , value = v , colorize = colorize ) else : output += generate_output ( line = '2' , short = HR_WHOIS [ 'nets' ] [ key ] [ '_short' ] if hr else key , name = HR_WHOIS [ 'nets' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , value = val , colorize = colorize ) return output
The function for generating CLI output Legacy Whois networks results .
49,764
def generate_output_nir ( self , json_data = None , hr = True , show_name = False , colorize = True ) : if json_data is None : json_data = { } output = generate_output ( line = '0' , short = HR_WHOIS_NIR [ 'nets' ] [ '_short' ] if hr else 'nir_nets' , name = HR_WHOIS_NIR [ 'nets' ] [ '_name' ] if ( hr and show_name ) else None , is_parent = True , colorize = colorize ) count = 0 if json_data [ 'nir' ] : for net in json_data [ 'nir' ] [ 'nets' ] : if count > 0 : output += self . generate_output_newline ( line = '1' , colorize = colorize ) count += 1 output += generate_output ( line = '1' , short = net [ 'handle' ] , is_parent = True , colorize = colorize ) for key , val in net . items ( ) : if val and ( isinstance ( val , dict ) or '\n' in val or key == 'nameservers' ) : output += generate_output ( line = '2' , short = ( HR_WHOIS_NIR [ 'nets' ] [ key ] [ '_short' ] if ( hr ) else key ) , name = HR_WHOIS_NIR [ 'nets' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , is_parent = False if ( val is None or len ( val ) == 0 ) else True , value = 'None' if ( val is None or len ( val ) == 0 ) else None , colorize = colorize ) if key == 'contacts' : for k , v in val . items ( ) : if v : output += generate_output ( line = '3' , is_parent = False if ( len ( v ) == 0 ) else True , name = k , colorize = colorize ) for contact_key , contact_val in v . items ( ) : if v is not None : tmp_out = '{0}{1}{2}' . format ( contact_key , ': ' , contact_val ) output += generate_output ( line = '4' , value = tmp_out , colorize = colorize ) elif key == 'nameservers' : for v in val : output += generate_output ( line = '3' , value = v , colorize = colorize ) else : for v in val . split ( '\n' ) : output += generate_output ( line = '3' , value = v , colorize = colorize ) else : output += generate_output ( line = '2' , short = ( HR_WHOIS_NIR [ 'nets' ] [ key ] [ '_short' ] if ( hr ) else key ) , name = HR_WHOIS_NIR [ 'nets' ] [ key ] [ '_name' ] if ( hr and show_name ) else None , value = val , colorize = colorize ) else : output += 'None' return output
The function for generating CLI output NIR network results .
49,765
def parse_fields_whois ( self , response ) : try : temp = response . split ( '|' ) ret = { 'asn_registry' : temp [ 4 ] . strip ( ' \n' ) } if ret [ 'asn_registry' ] not in self . rir_whois . keys ( ) : raise ASNRegistryError ( 'ASN registry {0} is not known.' . format ( ret [ 'asn_registry' ] ) ) ret [ 'asn' ] = temp [ 0 ] . strip ( ' \n' ) ret [ 'asn_cidr' ] = temp [ 2 ] . strip ( ' \n' ) ret [ 'asn_country_code' ] = temp [ 3 ] . strip ( ' \n' ) . upper ( ) ret [ 'asn_date' ] = temp [ 5 ] . strip ( ' \n' ) ret [ 'asn_description' ] = temp [ 6 ] . strip ( ' \n' ) except ASNRegistryError : raise except Exception as e : raise ASNParseError ( 'Parsing failed for "{0}" with exception: {1}.' '' . format ( response , e ) [ : 100 ] ) return ret
The function for parsing ASN fields from a whois response .
49,766
def parse_fields_http ( self , response , extra_org_map = None ) : org_map = self . org_map . copy ( ) try : org_map . update ( extra_org_map ) except ( TypeError , ValueError , IndexError , KeyError ) : pass try : asn_data = { 'asn_registry' : None , 'asn' : None , 'asn_cidr' : None , 'asn_country_code' : None , 'asn_date' : None , 'asn_description' : None } try : net_list = response [ 'nets' ] [ 'net' ] if not isinstance ( net_list , list ) : net_list = [ net_list ] except ( KeyError , TypeError ) : log . debug ( 'No networks found' ) net_list = [ ] for n in reversed ( net_list ) : try : asn_data [ 'asn_registry' ] = ( org_map [ n [ 'orgRef' ] [ '@handle' ] . upper ( ) ] ) except KeyError as e : log . debug ( 'Could not parse ASN registry via HTTP: ' '{0}' . format ( str ( e ) ) ) continue break if not asn_data [ 'asn_registry' ] : log . debug ( 'Could not parse ASN registry via HTTP' ) raise ASNRegistryError ( 'ASN registry lookup failed.' ) except ASNRegistryError : raise except Exception as e : raise ASNParseError ( 'Parsing failed for "{0}" with exception: {1}.' '' . format ( response , e ) [ : 100 ] ) return asn_data
The function for parsing ASN fields from a http response .
49,767
def get_nets_radb ( self , response , is_http = False ) : nets = [ ] if is_http : regex = r'route(?:6)?:[^\S\n]+(?P<val>.+?)<br>' else : regex = r'^route(?:6)?:[^\S\n]+(?P<val>.+|.+)$' for match in re . finditer ( regex , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) net [ 'cidr' ] = match . group ( 1 ) . strip ( ) net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ValueError : pass return nets
The function for parsing network blocks from ASN origin data .
49,768
def get_nets_jpnic ( self , response ) : nets = [ ] for match in re . finditer ( r'^.*?(\[Network Number\])[^\S\n]+.+?>(?P<val>.+?)</A>$' , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) tmp = ip_network ( match . group ( 2 ) ) try : network_address = tmp . network_address except AttributeError : network_address = tmp . ip pass try : broadcast_address = tmp . broadcast_address except AttributeError : broadcast_address = tmp . broadcast pass net [ 'range' ] = '{0} - {1}' . format ( network_address + 1 , broadcast_address ) cidr = ip_network ( match . group ( 2 ) . strip ( ) ) . __str__ ( ) net [ 'cidr' ] = cidr net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ( ValueError , TypeError ) : pass return nets
The function for parsing network blocks from jpnic whois data .
49,769
def get_contact ( self , response = None , nir = None , handle = None , retry_count = 3 , dt_format = None ) : if response or nir == 'krnic' : contact_response = response else : contact_response = self . _net . get_http_raw ( url = str ( NIR_WHOIS [ nir ] [ 'url' ] ) . format ( handle ) , retry_count = retry_count , headers = NIR_WHOIS [ nir ] [ 'request_headers' ] , request_type = NIR_WHOIS [ nir ] [ 'request_type' ] ) return self . parse_fields ( response = contact_response , fields_dict = NIR_WHOIS [ nir ] [ 'contact_fields' ] , dt_format = dt_format , hourdelta = int ( NIR_WHOIS [ nir ] [ 'dt_hourdelta' ] ) , is_contact = True )
The function for retrieving and parsing NIR whois data based on NIR_WHOIS contact_fields .
49,770
def _parse_address ( self , val ) : ret = { 'type' : None , 'value' : None } try : ret [ 'type' ] = val [ 1 ] [ 'type' ] except ( KeyError , ValueError , TypeError ) : pass try : ret [ 'value' ] = val [ 1 ] [ 'label' ] except ( KeyError , ValueError , TypeError ) : ret [ 'value' ] = '\n' . join ( val [ 3 ] ) . strip ( ) try : self . vars [ 'address' ] . append ( ret ) except AttributeError : self . vars [ 'address' ] = [ ] self . vars [ 'address' ] . append ( ret )
The function for parsing the vcard address .
49,771
def _parse_phone ( self , val ) : ret = { 'type' : None , 'value' : None } try : ret [ 'type' ] = val [ 1 ] [ 'type' ] except ( IndexError , KeyError , ValueError , TypeError ) : pass ret [ 'value' ] = val [ 3 ] . strip ( ) try : self . vars [ 'phone' ] . append ( ret ) except AttributeError : self . vars [ 'phone' ] = [ ] self . vars [ 'phone' ] . append ( ret )
The function for parsing the vcard phone numbers .
49,772
def _parse_email ( self , val ) : ret = { 'type' : None , 'value' : None } try : ret [ 'type' ] = val [ 1 ] [ 'type' ] except ( KeyError , ValueError , TypeError ) : pass ret [ 'value' ] = val [ 3 ] . strip ( ) try : self . vars [ 'email' ] . append ( ret ) except AttributeError : self . vars [ 'email' ] = [ ] self . vars [ 'email' ] . append ( ret )
The function for parsing the vcard email addresses .
49,773
def parse ( self ) : keys = { 'fn' : self . _parse_name , 'kind' : self . _parse_kind , 'adr' : self . _parse_address , 'tel' : self . _parse_phone , 'email' : self . _parse_email , 'role' : self . _parse_role , 'title' : self . _parse_title } for val in self . vcard : try : parser = keys . get ( val [ 0 ] ) parser ( val ) except ( KeyError , ValueError , TypeError ) : pass
The function for parsing the vcard to the vars dictionary .
49,774
def ipv4_lstrip_zeros ( address ) : obj = address . strip ( ) . split ( '.' ) for x , y in enumerate ( obj ) : obj [ x ] = y . split ( '/' ) [ 0 ] . lstrip ( '0' ) if obj [ x ] in [ '' , None ] : obj [ x ] = '0' return '.' . join ( obj )
The function to strip leading zeros in each octet of an IPv4 address .
49,775
def get_countries ( is_legacy_xml = False ) : countries = { } if sys . platform == 'win32' and getattr ( sys , 'frozen' , False ) : data_dir = path . dirname ( sys . executable ) else : data_dir = path . dirname ( __file__ ) if is_legacy_xml : log . debug ( 'Opening country code legacy XML: {0}' . format ( str ( data_dir ) + '/data/iso_3166-1_list_en.xml' ) ) f = io . open ( str ( data_dir ) + '/data/iso_3166-1_list_en.xml' , 'r' , encoding = 'ISO-8859-1' ) data = f . read ( ) if not data : return { } dom = parseString ( data ) entries = dom . getElementsByTagName ( 'ISO_3166-1_Entry' ) for entry in entries : code = entry . getElementsByTagName ( 'ISO_3166-1_Alpha-2_Code_element' ) [ 0 ] . firstChild . data name = entry . getElementsByTagName ( 'ISO_3166-1_Country_name' ) [ 0 ] . firstChild . data countries [ code ] = name . title ( ) else : log . debug ( 'Opening country code CSV: {0}' . format ( str ( data_dir ) + '/data/iso_3166-1_list_en.xml' ) ) f = io . open ( str ( data_dir ) + '/data/iso_3166-1.csv' , 'r' , encoding = 'utf-8' ) csv_reader = csv . reader ( f , delimiter = ',' , quotechar = '"' ) for row in csv_reader : code = row [ 0 ] name = row [ 1 ] countries [ code ] = name return countries
The function to generate a dictionary containing ISO_3166 - 1 country codes to names .
49,776
def unique_everseen ( iterable , key = None ) : seen = set ( ) seen_add = seen . add if key is None : for element in filterfalse ( seen . __contains__ , iterable ) : seen_add ( element ) yield element else : for element in iterable : k = key ( element ) if k not in seen : seen_add ( k ) yield element
The generator to list unique elements preserving the order . Remember all elements ever seen . This was taken from the itertools recipes .
49,777
def get_nets_arin ( self , response ) : nets = [ ] pattern = re . compile ( r'^NetRange:[^\S\n]+(.+)$' , re . MULTILINE ) temp = pattern . search ( response ) net_range = None net_range_start = None if temp is not None : net_range = temp . group ( 1 ) . strip ( ) net_range_start = temp . start ( ) for match in re . finditer ( r'^CIDR:[^\S\n]+(.+?,[^\S\n].+|.+)$' , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) if len ( nets ) > 0 : temp = pattern . search ( response , match . start ( ) ) net_range = None net_range_start = None if temp is not None : net_range = temp . group ( 1 ) . strip ( ) net_range_start = temp . start ( ) if net_range is not None : if net_range_start < match . start ( ) or len ( nets ) > 0 : try : net [ 'range' ] = '{0} - {1}' . format ( ip_network ( net_range ) [ 0 ] . __str__ ( ) , ip_network ( net_range ) [ - 1 ] . __str__ ( ) ) if '/' in net_range else net_range except ValueError : net [ 'range' ] = net_range net [ 'cidr' ] = ', ' . join ( [ ip_network ( c . strip ( ) ) . __str__ ( ) for c in match . group ( 1 ) . split ( ', ' ) ] ) net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ValueError : pass return nets
The function for parsing network blocks from ARIN whois data .
49,778
def get_nets_lacnic ( self , response ) : nets = [ ] for match in re . finditer ( r'^(inetnum|inet6num|route):[^\S\n]+(.+?,[^\S\n].+|.+)$' , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) net_range = match . group ( 2 ) . strip ( ) try : net [ 'range' ] = net [ 'range' ] = '{0} - {1}' . format ( ip_network ( net_range ) [ 0 ] . __str__ ( ) , ip_network ( net_range ) [ - 1 ] . __str__ ( ) ) if '/' in net_range else net_range except ValueError : net [ 'range' ] = net_range temp = [ ] for addr in net_range . split ( ', ' ) : count = addr . count ( '.' ) if count is not 0 and count < 4 : addr_split = addr . strip ( ) . split ( '/' ) for i in range ( count + 1 , 4 ) : addr_split [ 0 ] += '.0' addr = '/' . join ( addr_split ) temp . append ( ip_network ( addr . strip ( ) ) . __str__ ( ) ) net [ 'cidr' ] = ', ' . join ( temp ) net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ValueError : pass return nets
The function for parsing network blocks from LACNIC whois data .
49,779
def get_nets_other ( self , response ) : nets = [ ] for match in re . finditer ( r'^(inetnum|inet6num|route):[^\S\n]+((.+?)[^\S\n]-[^\S\n](.+)|' '.+)$' , response , re . MULTILINE ) : try : net = copy . deepcopy ( BASE_NET ) net_range = match . group ( 2 ) . strip ( ) try : net [ 'range' ] = net [ 'range' ] = '{0} - {1}' . format ( ip_network ( net_range ) [ 0 ] . __str__ ( ) , ip_network ( net_range ) [ - 1 ] . __str__ ( ) ) if '/' in net_range else net_range except ValueError : net [ 'range' ] = net_range if match . group ( 3 ) and match . group ( 4 ) : addrs = [ ] addrs . extend ( summarize_address_range ( ip_address ( match . group ( 3 ) . strip ( ) ) , ip_address ( match . group ( 4 ) . strip ( ) ) ) ) cidr = ', ' . join ( [ i . __str__ ( ) for i in collapse_addresses ( addrs ) ] ) else : cidr = ip_network ( net_range ) . __str__ ( ) net [ 'cidr' ] = cidr net [ 'start' ] = match . start ( ) net [ 'end' ] = match . end ( ) nets . append ( net ) except ( ValueError , TypeError ) : pass return nets
The function for parsing network blocks from generic whois data .
49,780
def convert_default ( self , field , ** params ) : for klass , ma_field in self . TYPE_MAPPING : if isinstance ( field , klass ) : return ma_field ( ** params ) return fields . Raw ( ** params )
Return raw field .
49,781
def make_instance ( self , data ) : if not self . opts . model : return data if self . instance is not None : for key , value in data . items ( ) : setattr ( self . instance , key , value ) return self . instance return self . opts . model ( ** data )
Build object from data .
49,782
def _extract ( self , stim ) : props = [ ( e . text , e . onset , e . duration ) for e in stim . elements ] vals , onsets , durations = map ( list , zip ( * props ) ) return ExtractorResult ( vals , stim , self , [ 'word' ] , onsets , durations )
Returns all words .
49,783
def get_filename ( self ) : if self . filename is None or not os . path . exists ( self . filename ) : tf = tempfile . mktemp ( ) + self . _default_file_extension self . save ( tf ) yield tf os . remove ( tf ) else : yield self . filename
Return the source filename of the current Stim .
49,784
def get_stim ( self , type_ , return_all = False ) : if isinstance ( type_ , string_types ) : type_ = _get_stim_class ( type_ ) matches = [ ] for s in self . elements : if isinstance ( s , type_ ) : if not return_all : return s matches . append ( s ) if not matches : return [ ] if return_all else None return matches
Returns component elements of the specified type .
49,785
def has_types ( self , types , all_ = True ) : func = all if all_ else any return func ( [ self . get_stim ( t ) for t in listify ( types ) ] )
Check whether the current component list matches all Stim types in the types argument .
49,786
def save ( self , path ) : self . clip . write_audiofile ( path , fps = self . sampling_rate )
Save clip data to file .
49,787
def get_converter ( in_type , out_type , * args , ** kwargs ) : convs = pliers . converters . __all__ out_type = listify ( out_type ) [ : : - 1 ] default_convs = config . get_option ( 'default_converters' ) for ot in out_type : conv_str = '%s->%s' % ( in_type . __name__ , ot . __name__ ) if conv_str in default_convs : convs = list ( default_convs [ conv_str ] ) + convs for name in convs : cls = getattr ( pliers . converters , name ) if not issubclass ( cls , Converter ) : continue available = cls . available if issubclass ( cls , EnvironmentKeyMixin ) else True if cls . _input_type == in_type and cls . _output_type in out_type and available : conv = cls ( * args , ** kwargs ) return conv return None
Scans the list of available Converters and returns an instantiation of the first one whose input and output types match those passed in .
49,788
def create_graph ( ) : with tf . gfile . FastGFile ( os . path . join ( FLAGS . model_dir , 'classify_image_graph_def.pb' ) , 'rb' ) as f : graph_def = tf . GraphDef ( ) graph_def . ParseFromString ( f . read ( ) ) _ = tf . import_graph_def ( graph_def , name = '' )
Creates a graph from saved GraphDef file and returns a saver .
49,789
def run_inference_on_image ( image ) : if not tf . gfile . Exists ( image ) : tf . logging . fatal ( 'File does not exist %s' , image ) image_data = tf . gfile . FastGFile ( image , 'rb' ) . read ( ) create_graph ( ) with tf . Session ( ) as sess : softmax_tensor = sess . graph . get_tensor_by_name ( 'softmax:0' ) predictions = sess . run ( softmax_tensor , { 'DecodeJpeg/contents:0' : image_data } ) predictions = np . squeeze ( predictions ) node_lookup = NodeLookup ( ) top_k = predictions . argsort ( ) [ - FLAGS . num_top_predictions : ] [ : : - 1 ] for node_id in top_k : human_string = node_lookup . id_to_string ( node_id ) score = predictions [ node_id ] print ( '%s (score = %.5f)' % ( human_string , score ) )
Runs inference on an image .
49,790
def load ( self , label_lookup_path , uid_lookup_path ) : if not tf . gfile . Exists ( uid_lookup_path ) : tf . logging . fatal ( 'File does not exist %s' , uid_lookup_path ) if not tf . gfile . Exists ( label_lookup_path ) : tf . logging . fatal ( 'File does not exist %s' , label_lookup_path ) proto_as_ascii_lines = tf . gfile . GFile ( uid_lookup_path ) . readlines ( ) uid_to_human = { } p = re . compile ( r'[n\d]*[ \S,]*' ) for line in proto_as_ascii_lines : parsed_items = p . findall ( line ) uid = parsed_items [ 0 ] human_string = parsed_items [ 2 ] uid_to_human [ uid ] = human_string node_id_to_uid = { } proto_as_ascii = tf . gfile . GFile ( label_lookup_path ) . readlines ( ) for line in proto_as_ascii : if line . startswith ( ' target_class:' ) : target_class = int ( line . split ( ': ' ) [ 1 ] ) if line . startswith ( ' target_class_string:' ) : target_class_string = line . split ( ': ' ) [ 1 ] node_id_to_uid [ target_class ] = target_class_string [ 1 : - 2 ] node_id_to_name = { } for key , val in node_id_to_uid . items ( ) : if val not in uid_to_human : tf . logging . fatal ( 'Failed to locate: %s' , val ) name = uid_to_human [ val ] node_id_to_name [ key ] = name return node_id_to_name
Loads a human readable English name for each softmax node .
49,791
def fetch_dictionary ( name , url = None , format = None , index = 0 , rename = None , save = True , force_retrieve = False ) : file_path = os . path . join ( _get_dictionary_path ( ) , name + '.csv' ) if not force_retrieve and os . path . exists ( file_path ) : df = pd . read_csv ( file_path ) index = datasets [ name ] . get ( 'index' , df . columns [ index ] ) return df . set_index ( index ) if name in datasets : url = datasets [ name ] [ 'url' ] format = datasets [ name ] . get ( 'format' , format ) index = datasets [ name ] . get ( 'index' , index ) rename = datasets . get ( 'rename' , rename ) if url is None : raise ValueError ( "Dataset '%s' not found in local storage or presets, " "and no download URL provided." % name ) data = _download_dictionary ( url , format = format , rename = rename ) if isinstance ( index , int ) : index = data . columns [ index ] data = data . set_index ( index ) if save : file_path = os . path . join ( _get_dictionary_path ( ) , name + '.csv' ) data . to_csv ( file_path , encoding = 'utf-8' ) return data
Retrieve a dictionary of text norms from the web or local storage .
49,792
def _to_df ( self , result , handle_annotations = None ) : annotations = result . _data if handle_annotations == 'first' : annotations = [ annotations [ 0 ] ] face_results = [ ] for i , annotation in enumerate ( annotations ) : data_dict = { } for field , val in annotation . items ( ) : if 'Confidence' in field : data_dict [ 'face_' + field ] = val elif 'oundingPoly' in field : for j , vertex in enumerate ( val [ 'vertices' ] ) : for dim in [ 'x' , 'y' ] : name = '%s_vertex%d_%s' % ( field , j + 1 , dim ) val = vertex [ dim ] if dim in vertex else np . nan data_dict [ name ] = val elif field == 'landmarks' : for lm in val : name = 'landmark_' + lm [ 'type' ] + '_%s' lm_pos = { name % k : v for ( k , v ) in lm [ 'position' ] . items ( ) } data_dict . update ( lm_pos ) else : data_dict [ field ] = val face_results . append ( data_dict ) return pd . DataFrame ( face_results )
Converts a Google API Face JSON response into a Pandas Dataframe .
49,793
def correlation_matrix ( df ) : columns = df . columns . tolist ( ) corr = pd . DataFrame ( np . corrcoef ( df , rowvar = 0 ) , columns = columns , index = columns ) return corr
Returns a pandas DataFrame with the pair - wise correlations of the columns .
49,794
def eigenvalues ( df ) : corr = np . corrcoef ( df , rowvar = 0 ) eigvals = np . linalg . eigvals ( corr ) return pd . Series ( eigvals , df . columns , name = 'Eigenvalue' )
Returns a pandas Series with eigenvalues of the correlation matrix .
49,795
def condition_indices ( df ) : eigvals = eigenvalues ( df ) cond_idx = np . sqrt ( eigvals . max ( ) / eigvals ) return pd . Series ( cond_idx , df . columns , name = 'Condition index' )
Returns a pandas Series with condition indices of the df columns .
49,796
def mahalanobis_distances ( df , axis = 0 ) : df = df . transpose ( ) if axis == 1 else df means = df . mean ( ) try : inv_cov = np . linalg . inv ( df . cov ( ) ) except LinAlgError : return pd . Series ( [ np . NAN ] * len ( df . index ) , df . index , name = 'Mahalanobis' ) dists = [ ] for i , sample in df . iterrows ( ) : dists . append ( mahalanobis ( sample , means , inv_cov ) ) return pd . Series ( dists , df . index , name = 'Mahalanobis' )
Returns a pandas Series with Mahalanobis distances for each sample on the axis .
49,797
def summary ( self , stdout = True , plot = False ) : if stdout : print ( 'Collinearity summary:' ) print ( pd . concat ( [ self . results [ 'Eigenvalues' ] , self . results [ 'ConditionIndices' ] , self . results [ 'VIFs' ] , self . results [ 'CorrelationMatrix' ] ] , axis = 1 ) ) print ( 'Outlier summary:' ) print ( self . results [ 'RowMahalanobisDistances' ] ) print ( self . results [ 'ColumnMahalanobisDistances' ] ) print ( 'Validity summary:' ) print ( self . results [ 'Variances' ] ) if plot : verify_dependencies ( 'seaborn' ) for key , result in self . results . items ( ) : if key == 'CorrelationMatrix' : ax = plt . axes ( ) sns . heatmap ( result , cmap = 'Blues' , ax = ax ) ax . set_title ( key ) sns . plt . show ( ) else : result . plot ( kind = 'bar' , title = key ) plt . show ( )
Displays diagnostics to the user
49,798
def add_nodes ( self , nodes , parent = None , mode = 'horizontal' ) : for n in nodes : node_args = self . _parse_node_args ( n ) if mode == 'horizontal' : self . add_node ( parent = parent , ** node_args ) elif mode == 'vertical' : parent = self . add_node ( parent = parent , return_node = True , ** node_args ) else : raise ValueError ( "Invalid mode for adding nodes to a graph:" "%s" % mode )
Adds one or more nodes to the current graph .
49,799
def add_node ( self , transformer , name = None , children = None , parent = None , parameters = { } , return_node = False ) : node = Node ( transformer , name , ** parameters ) self . nodes [ node . id ] = node if parent is None : self . roots . append ( node ) else : parent = self . nodes [ parent . id ] parent . add_child ( node ) if children is not None : self . add_nodes ( children , parent = node ) if return_node : return node
Adds a node to the current graph .