idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
49,400
def decompress ( data ) : d = Decompressor ( ) data = d . decompress ( data ) d . finish ( ) return data
Decompress a complete Brotli - compressed string .
49,401
def compress ( data , mode = DEFAULT_MODE , quality = lib . BROTLI_DEFAULT_QUALITY , lgwin = lib . BROTLI_DEFAULT_WINDOW , lgblock = 0 , dictionary = b'' ) : compressor = Compressor ( mode = mode , quality = quality , lgwin = lgwin , lgblock = lgblock , dictionary = dictionary ) compressed_data = compressor . _compress ( data , lib . BROTLI_OPERATION_FINISH ) assert lib . BrotliEncoderIsFinished ( compressor . _encoder ) == lib . BROTLI_TRUE assert ( lib . BrotliEncoderHasMoreOutput ( compressor . _encoder ) == lib . BROTLI_FALSE ) return compressed_data
Compress a string using Brotli .
49,402
def _compress ( self , data , operation ) : original_output_size = int ( math . ceil ( len ( data ) + ( len ( data ) >> 2 ) + 10240 ) ) available_out = ffi . new ( "size_t *" ) available_out [ 0 ] = original_output_size output_buffer = ffi . new ( "uint8_t []" , available_out [ 0 ] ) ptr_to_output_buffer = ffi . new ( "uint8_t **" , output_buffer ) input_size = ffi . new ( "size_t *" , len ( data ) ) input_buffer = ffi . new ( "uint8_t []" , data ) ptr_to_input_buffer = ffi . new ( "uint8_t **" , input_buffer ) rc = lib . BrotliEncoderCompressStream ( self . _encoder , operation , input_size , ptr_to_input_buffer , available_out , ptr_to_output_buffer , ffi . NULL ) if rc != lib . BROTLI_TRUE : raise Error ( "Error encountered compressing data." ) assert not input_size [ 0 ] size_of_output = original_output_size - available_out [ 0 ] return ffi . buffer ( output_buffer , size_of_output ) [ : ]
This private method compresses some data in a given mode . This is used because almost all of the code uses the exact same setup . It wouldn t have to but it doesn t hurt at all .
49,403
def flush ( self ) : chunks = [ ] chunks . append ( self . _compress ( b'' , lib . BROTLI_OPERATION_FLUSH ) ) while lib . BrotliEncoderHasMoreOutput ( self . _encoder ) == lib . BROTLI_TRUE : chunks . append ( self . _compress ( b'' , lib . BROTLI_OPERATION_FLUSH ) ) return b'' . join ( chunks )
Flush the compressor . This will emit the remaining output data but will not destroy the compressor . It can be used for example to ensure that given chunks of content will decompress immediately .
49,404
def finish ( self ) : chunks = [ ] while lib . BrotliEncoderIsFinished ( self . _encoder ) == lib . BROTLI_FALSE : chunks . append ( self . _compress ( b'' , lib . BROTLI_OPERATION_FINISH ) ) return b'' . join ( chunks )
Finish the compressor . This will emit the remaining output data and transition the compressor to a completed state . The compressor cannot be used again after this point and must be replaced .
49,405
def decompress ( self , data ) : chunks = [ ] available_in = ffi . new ( "size_t *" , len ( data ) ) in_buffer = ffi . new ( "uint8_t[]" , data ) next_in = ffi . new ( "uint8_t **" , in_buffer ) while True : buffer_size = 5 * len ( data ) available_out = ffi . new ( "size_t *" , buffer_size ) out_buffer = ffi . new ( "uint8_t[]" , buffer_size ) next_out = ffi . new ( "uint8_t **" , out_buffer ) rc = lib . BrotliDecoderDecompressStream ( self . _decoder , available_in , next_in , available_out , next_out , ffi . NULL ) if rc == lib . BROTLI_DECODER_RESULT_ERROR : error_code = lib . BrotliDecoderGetErrorCode ( self . _decoder ) error_message = lib . BrotliDecoderErrorString ( error_code ) raise Error ( "Decompression error: %s" % ffi . string ( error_message ) ) chunk = ffi . buffer ( out_buffer , buffer_size - available_out [ 0 ] ) [ : ] chunks . append ( chunk ) if rc == lib . BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT : assert available_in [ 0 ] == 0 break elif rc == lib . BROTLI_DECODER_RESULT_SUCCESS : break else : assert rc == lib . BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT return b'' . join ( chunks )
Decompress part of a complete Brotli - compressed string .
49,406
def finish ( self ) : assert ( lib . BrotliDecoderHasMoreOutput ( self . _decoder ) == lib . BROTLI_FALSE ) if lib . BrotliDecoderIsFinished ( self . _decoder ) == lib . BROTLI_FALSE : raise Error ( "Decompression error: incomplete compressed stream." ) return b''
Finish the decompressor . As the decompressor decompresses eagerly this will never actually emit any data . However it will potentially throw errors if a truncated or damaged data stream has been used .
49,407
def hsv_to_rgb ( hsv ) : h , s , v = hsv if s == 0 : return ( v , v , v ) h /= 60.0 i = math . floor ( h ) f = h - i p = v * ( 1.0 - s ) q = v * ( 1.0 - s * f ) t = v * ( 1.0 - s * ( 1.0 - f ) ) if i == 0 : return ( v , t , p ) elif i == 1 : return ( q , v , p ) elif i == 2 : return ( p , v , t ) elif i == 3 : return ( p , q , v ) elif i == 4 : return ( t , p , v ) else : return ( v , p , q )
Converts a tuple of hue saturation value to a tuple of red green blue . Hue should be an angle from 0 . 0 to 359 . 0 . Saturation and value should be a value from 0 . 0 to 1 . 0 where saturation controls the intensity of the hue and value controls the brightness .
49,408
def set_cursor ( self , col , row ) : if row > self . _lines : row = self . _lines - 1 self . write8 ( LCD_SETDDRAMADDR | ( col + LCD_ROW_OFFSETS [ row ] ) )
Move the cursor to an explicit column and row position .
49,409
def enable_display ( self , enable ) : if enable : self . displaycontrol |= LCD_DISPLAYON else : self . displaycontrol &= ~ LCD_DISPLAYON self . write8 ( LCD_DISPLAYCONTROL | self . displaycontrol )
Enable or disable the display . Set enable to True to enable .
49,410
def show_cursor ( self , show ) : if show : self . displaycontrol |= LCD_CURSORON else : self . displaycontrol &= ~ LCD_CURSORON self . write8 ( LCD_DISPLAYCONTROL | self . displaycontrol )
Show or hide the cursor . Cursor is shown if show is True .
49,411
def blink ( self , blink ) : if blink : self . displaycontrol |= LCD_BLINKON else : self . displaycontrol &= ~ LCD_BLINKON self . write8 ( LCD_DISPLAYCONTROL | self . displaycontrol )
Turn on or off cursor blinking . Set blink to True to enable blinking .
49,412
def set_left_to_right ( self ) : self . displaymode |= LCD_ENTRYLEFT self . write8 ( LCD_ENTRYMODESET | self . displaymode )
Set text direction left to right .
49,413
def set_right_to_left ( self ) : self . displaymode &= ~ LCD_ENTRYLEFT self . write8 ( LCD_ENTRYMODESET | self . displaymode )
Set text direction right to left .
49,414
def autoscroll ( self , autoscroll ) : if autoscroll : self . displaymode |= LCD_ENTRYSHIFTINCREMENT else : self . displaymode &= ~ LCD_ENTRYSHIFTINCREMENT self . write8 ( LCD_ENTRYMODESET | self . displaymode )
Autoscroll will right justify text from the cursor if set True otherwise it will left justify the text .
49,415
def message ( self , text ) : line = 0 for char in text : if char == '\n' : line += 1 col = 0 if self . displaymode & LCD_ENTRYLEFT > 0 else self . _cols - 1 self . set_cursor ( col , line ) else : self . write8 ( ord ( char ) , True )
Write text to display . Note that text can include newlines .
49,416
def findExp ( self , data ) : temp = [ ] for r in self . reg_exp : try : temp += re . findall ( r , data ) except : print self . name print r print "CABOOOOM!" verifiedExp = [ ] for t in temp : if self . isValidExp ( t ) : if t not in verifiedExp : verifiedExp . append ( t ) return self . getResults ( verifiedExp )
Method to look for the current regular expression in the provided string .
49,417
def exportUsufy ( data , ext , fileH ) : if ext == "csv" : usufyToCsvExport ( data , fileH + "." + ext ) elif ext == "gml" : usufyToGmlExport ( data , fileH + "." + ext ) elif ext == "json" : usufyToJsonExport ( data , fileH + "." + ext ) elif ext == "ods" : usufyToOdsExport ( data , fileH + "." + ext ) elif ext == "png" : usufyToPngExport ( data , fileH + "." + ext ) elif ext == "txt" : usufyToTextExport ( data , fileH + "." + ext ) elif ext == "xls" : usufyToXlsExport ( data , fileH + "." + ext ) elif ext == "xlsx" : usufyToXlsxExport ( data , fileH + "." + ext )
Method that exports the different structures onto different formats .
49,418
def _generateTabularData ( res , oldTabularData = { } , isTerminal = False , canUnicode = True ) : def _grabbingNewHeader ( h ) : if h [ 0 ] == "@" : h = h . replace ( "@" , "_" ) elif "i3visio." in h : h = h . replace ( "i3visio." , "i3visio_" ) return h allowedInTerminal = [ "i3visio_alias" , "i3visio_uri" , "i3visio_platform" , "i3visio_email" , "i3visio_ipv4" , "i3visio_phone" , "i3visio_dni" , "i3visio_domain" , "i3visio_platform_leaked" , ] values = { } headers = [ "_id" ] try : if not isTerminal : headers = oldTabularData [ "OSRFramework" ] [ 0 ] else : oldHeaders = oldTabularData [ "OSRFramework" ] [ 0 ] headers = [ ] for h in oldHeaders : h = _grabbingNewHeader ( h ) if h in allowedInTerminal : if h in [ "i3visio_domain" , "i3visio_alias" ] and "_source" in old_headers : pass else : headers . append ( h ) for i , h in enumerate ( headers ) : h = _grabbingNewHeader ( h ) headers [ i ] = h except : headers = [ "_id" ] for p in res : values [ p [ "value" ] ] = { } attributes = p [ "attributes" ] for a in attributes : h = _grabbingNewHeader ( a [ "type" ] ) if not isTerminal : values [ p [ "value" ] ] [ h ] = a [ "value" ] if str ( h ) not in headers : headers . append ( str ( h ) ) else : if h in allowedInTerminal : values [ p [ "value" ] ] [ h ] = a [ "value" ] if str ( h ) not in headers : headers . append ( str ( h ) ) data = { } workingSheet = [ ] workingSheet . append ( headers ) try : for dataRow in oldTabularData [ "OSRFramework" ] [ 1 : ] : newRow = [ ] for cell in dataRow : newRow . append ( cell ) for i in range ( len ( headers ) - len ( dataRow ) ) : newRow . append ( "[N/A]" ) workingSheet . append ( newRow ) except Exception , e : pass for prof in values . keys ( ) : newRow = [ ] for i , col in enumerate ( headers ) : try : if col == "_id" : newRow . append ( len ( workingSheet ) ) else : if canUnicode : newRow . append ( unicode ( values [ prof ] [ col ] ) ) else : newRow . append ( str ( values [ prof ] [ col ] ) ) except UnicodeEncodeError as e : newRow . append ( "[WARNING: Unicode Encode]" ) except : newRow . append ( "[N/A]" ) workingSheet . append ( newRow ) data . update ( { "OSRFramework" : workingSheet } ) return data
Method that recovers the values and columns from the current structure
49,419
def usufyToJsonExport ( d , fPath ) : oldData = [ ] try : with open ( fPath ) as iF : oldText = iF . read ( ) if oldText != "" : oldData = json . loads ( oldText ) except : pass jsonText = json . dumps ( oldData + d , indent = 2 , sort_keys = True ) with open ( fPath , "w" ) as oF : oF . write ( jsonText )
Workaround to export to a json file .
49,420
def usufyToTextExport ( d , fPath = None ) : if d == [ ] : return "+------------------+\n| No data found... |\n+------------------+" import pyexcel as pe import pyexcel . ext . text as text if fPath == None : isTerminal = True else : isTerminal = False try : oldData = get_data ( fPath ) except : oldData = { "OSRFramework" : [ ] } tabularData = _generateTabularData ( d , { "OSRFramework" : [ [ ] ] } , True , canUnicode = False ) sheet = pe . Sheet ( tabularData [ "OSRFramework" ] ) sheet . name = "Profiles recovered (" + getCurrentStrDatetime ( ) + ")." sheet . name_columns_by_row ( 0 ) text . TABLEFMT = "grid" try : with open ( fPath , "w" ) as oF : oF . write ( str ( sheet ) ) except Exception as e : return unicode ( sheet )
Workaround to export to a . txt file or to show the information .
49,421
def usufyToCsvExport ( d , fPath ) : from pyexcel_io import get_data try : oldData = { "OSRFramework" : get_data ( fPath ) } except : oldData = { "OSRFramework" : [ ] } tabularData = _generateTabularData ( d , oldData ) from pyexcel_io import save_data save_data ( fPath , tabularData [ "OSRFramework" ] )
Workaround to export to a CSV file .
49,422
def usufyToOdsExport ( d , fPath ) : from pyexcel_ods import get_data try : oldData = { "OSRFramework" : get_data ( fPath ) } except : oldData = { "OSRFramework" : [ ] } tabularData = _generateTabularData ( d , oldData ) from pyexcel_ods import save_data save_data ( fPath , tabularData )
Workaround to export to a . ods file .
49,423
def usufyToXlsExport ( d , fPath ) : from pyexcel_xls import get_data try : oldData = { "OSRFramework" : get_data ( fPath ) } except : oldData = { "OSRFramework" : [ ] } tabularData = _generateTabularData ( d , oldData ) from pyexcel_xls import save_data save_data ( fPath , tabularData )
Workaround to export to a . xls file .
49,424
def usufyToXlsxExport ( d , fPath ) : from pyexcel_xlsx import get_data try : oldData = { "OSRFramework" : get_data ( fPath ) } except : oldData = { "OSRFramework" : [ ] } tabularData = _generateTabularData ( d , oldData ) from pyexcel_xlsx import save_data save_data ( fPath , tabularData )
Workaround to export to a . xlsx file .
49,425
def _generateGraphData ( data , oldData = nx . Graph ( ) ) : def _addNewNode ( ent , g ) : try : label = unicode ( ent [ "value" ] ) except UnicodeEncodeError as e : label = str ( ent [ "value" ] ) g . add_node ( label ) g . node [ label ] [ "type" ] = ent [ "type" ] return label def _processAttributes ( elems , g ) : newAtts = { } newEntities = [ ] for att in elems : if att [ "type" ] [ 0 ] == "@" : attName = str ( att [ "type" ] [ 1 : ] ) . replace ( '_' , '' ) try : newAtts [ attName ] = int ( att [ "value" ] ) except : newAtts [ attName ] = att [ "value" ] elif att [ "type" ] [ : 8 ] == "i3visio." : ent = { "value" : att [ "value" ] , "type" : att [ "type" ] . replace ( "i3visio." , "i3visio_" ) , } newEntities . append ( ent ) hashLabel = _addNewNode ( ent , g ) newAttsInAttributes , newEntitiesInAttributes = _processAttributes ( att [ "attributes" ] , g ) g . node [ hashLabel ] . update ( newAttsInAttributes ) for new in newEntitiesInAttributes : graphData . add_edge ( hashLabel , json . dumps ( new ) ) try : pass except : pass else : pass return newAtts , newEntities graphData = oldData for elem in data : ent = { "value" : elem [ "value" ] , "type" : elem [ "type" ] , } new_node = _addNewNode ( ent , graphData ) newAtts , newEntities = _processAttributes ( elem [ "attributes" ] , graphData ) graphData . node [ new_node ] . update ( newAtts ) for other_node in newEntities : serEnt = json . dumps ( new_node ) try : other_node = unicode ( other_node [ "value" ] ) except UnicodeEncodeError as e : other_node = str ( other_node [ "value" ] ) graphData . add_edge ( new_node , other_node ) try : pass except : pass return graphData
Processing the data from i3visio structures to generate nodes and edges
49,426
def usufyToGmlExport ( d , fPath ) : try : oldData = nx . read_gml ( fPath ) except UnicodeDecodeError as e : print ( "UnicodeDecodeError:\t" + str ( e ) ) print ( "Something went wrong when reading the .gml file relating to the decoding of UNICODE." ) import time as time fPath += "_" + str ( time . time ( ) ) print ( "To avoid losing data, the output file will be renamed to use the timestamp as:\n" + fPath + "_" + str ( time . time ( ) ) ) print ( ) oldData = nx . Graph ( ) except Exception as e : oldData = nx . Graph ( ) newGraph = _generateGraphData ( d , oldData ) nx . write_gml ( newGraph , fPath )
Workaround to export data to a . gml file .
49,427
def usufyToPngExport ( d , fPath ) : newGraph = _generateGraphData ( d ) import matplotlib . pyplot as plt nx . draw ( newGraph ) plt . savefig ( fPath )
Workaround to export to a png file .
49,428
def fileToMD5 ( filename , block_size = 256 * 128 , binary = False ) : md5 = hashlib . md5 ( ) with open ( filename , 'rb' ) as f : for chunk in iter ( lambda : f . read ( block_size ) , b'' ) : md5 . update ( chunk ) if not binary : return md5 . hexdigest ( ) return md5 . digest ( )
A function that calculates the MD5 hash of a file .
49,429
def getCurrentStrDatetime ( ) : i = datetime . datetime . now ( ) strTime = "%s-%s-%s_%sh%sm" % ( i . year , i . month , i . day , i . hour , i . minute ) return strTime
Generating the current Datetime with a given format
49,430
def getFilesFromAFolder ( path ) : from os import listdir from os . path import isfile , join onlyFiles = [ ] for f in listdir ( path ) : if isfile ( join ( path , f ) ) : onlyFiles . append ( f ) return onlyFiles
Getting all the files in a folder .
49,431
def urisToBrowser ( uris = [ ] , autoraise = True ) : savout1 = os . dup ( 1 ) savout2 = os . dup ( 2 ) os . close ( 1 ) os . close ( 2 ) os . open ( os . devnull , os . O_RDWR ) try : for uri in uris : if ".onion" in uri : wb . open ( uri . replace ( ".onion" , ".onion.city" ) , new = 2 , autoraise = autoraise ) else : wb . open ( uri , new = 2 , autoraise = autoraise ) finally : os . dup2 ( savout1 , 1 ) os . dup2 ( savout2 , 2 )
Method that launches the URI in the default browser of the system
49,432
def openResultsInBrowser ( res ) : print ( emphasis ( "\n\tOpening URIs in the default web browser..." ) ) urisToBrowser ( [ "https://github.com/i3visio/osrframework" ] , autoraise = False ) time . sleep ( 2 ) uris = [ ] for r in res : for att in r [ "attributes" ] : if att [ "type" ] == "i3visio.uri" : uris . append ( att [ "value" ] ) urisToBrowser ( uris )
Method that collects the URI from a list of entities and opens them
49,433
def colorize ( text , messageType = None ) : formattedText = str ( text ) if "ERROR" in messageType : formattedText = colorama . Fore . RED + formattedText elif "WARNING" in messageType : formattedText = colorama . Fore . YELLOW + formattedText elif "SUCCESS" in messageType : formattedText = colorama . Fore . GREEN + formattedText elif "INFO" in messageType : formattedText = colorama . Fore . BLUE + formattedText if "BOLD" in messageType : formattedText = colorama . Style . BRIGHT + formattedText return formattedText + colorama . Style . RESET_ALL
Function that colorizes a message .
49,434
def showLicense ( ) : print ( "Trying to recover the contents of the license...\n" ) try : text = urllib . urlopen ( LICENSE_URL ) . read ( ) print ( "License retrieved from " + emphasis ( LICENSE_URL ) + "." ) raw_input ( "\n\tPress " + emphasis ( "<ENTER>" ) + " to print it.\n" ) print ( text ) except : print ( warning ( "The license could not be downloaded and printed." ) )
Method that prints the license if requested .
49,435
def expandEntitiesFromEmail ( e ) : email = { } email [ "type" ] = "i3visio.email" email [ "value" ] = e email [ "attributes" ] = [ ] alias = { } alias [ "type" ] = "i3visio.alias" alias [ "value" ] = e . split ( "@" ) [ 0 ] alias [ "attributes" ] = [ ] domain = { } domain [ "type" ] = "i3visio.domain" domain [ "value" ] = e . split ( "@" ) [ 1 ] domain [ "attributes" ] = [ ] return [ email , alias , domain ]
Method that receives an email an creates linked entities
49,436
def getNumberTLD ( ) : total = 0 for typeTld in TLD . keys ( ) : total += len ( TLD [ typeTld ] ) return total
Counting the total number of TLD being processed .
49,437
def getWhoisInfo ( domain ) : new = [ ] try : emails = { } emails [ "type" ] = "i3visio.alias" emails [ "value" ] = str ( domain . split ( "." ) [ 0 ] ) emails [ "attributes" ] = [ ] new . append ( emails ) except : pass info = whois . whois ( domain ) if info . status == None : raise Exception ( "UnknownDomainError: " + domain + " could not be resolved." ) try : emails = { } emails [ "type" ] = "i3visio.email" if type ( info . emails ) is not list : aux = [ info . emails ] emails [ "value" ] = json . dumps ( aux ) else : emails [ "value" ] = json . dumps ( info . emails ) emails [ "attributes" ] = [ ] new . append ( emails ) except : pass try : tmp = { } tmp [ "type" ] = "i3visio.location.country" tmp [ "value" ] = str ( info . country ) tmp [ "attributes" ] = [ ] new . append ( tmp ) except : pass try : tmp = { } tmp [ "type" ] = "i3visio.registrar" tmp [ "value" ] = str ( info . registrar ) tmp [ "attributes" ] = [ ] new . append ( tmp ) except : pass try : tmp = { } tmp [ "type" ] = "i3visio.fullname" try : tmp [ "value" ] = str ( info . name ) except : tmp [ "value" ] = info . name tmp [ "attributes" ] = [ ] new . append ( tmp ) except : pass return new
Method that trie to recover the whois info from a domain .
49,438
def createDomains ( tlds , nicks = None , nicksFile = None ) : domain_candidates = [ ] if nicks != None : for n in nicks : for t in tlds : tmp = { "domain" : n + t [ "tld" ] , "type" : t [ "type" ] , "tld" : t [ "tld" ] } domain_candidates . append ( tmp ) elif nicksFile != None : with open ( nicksFile , "r" ) as iF : nicks = iF . read ( ) . splitlines ( ) for n in nicks : for t in tlds : tmp = { "domain" : n + t [ "tld" ] , "type" : t [ "type" ] , "tld" : t [ "tld" ] } domain_candidates . append ( tmp ) return domain_candidates
Method that globally permits to generate the domains to be checked .
49,439
def weCanCheckTheseDomains ( email ) : notWorking = [ "@aol.com" , "@bk.ru" , "@breakthru.com" , "@gmx." , "@hotmail.co" , "@inbox.com" , "@latinmail.com" , "@libero.it" , "@mail.ru" , "@mail2tor.com" , "@outlook.com" , "@rambler.ru" , "@rocketmail.com" , "@starmedia.com" , "@ukr.net" "@yahoo." , "@ymail." ] for n in notWorking : if n in email : print ( "\t[*] Verification of '{}' aborted. Details:\n\t\t{}" . format ( general . warning ( email ) , "This domain CANNOT be verified using mailfy." ) ) return False emailDomains = EMAIL_DOMAINS safe = False for e in EMAIL_DOMAINS : if e in email : safe = True if not safe : print ( "\t[*] Verification of '{}' aborted. Details:\n\t\t{}" . format ( general . warning ( email ) , "This domain CANNOT be verified using mailfy." ) ) return False return True
Method that verifies if a domain can be safely verified .
49,440
def grabEmails ( emails = None , emailsFile = None , nicks = None , nicksFile = None , domains = EMAIL_DOMAINS , excludeDomains = [ ] ) : email_candidates = [ ] if emails != None : email_candidates = emails elif emailsFile != None : with open ( emailsFile , "r" ) as iF : email_candidates = iF . read ( ) . splitlines ( ) elif nicks != None : for n in nicks : for d in domains : if d not in excludeDomains : email_candidates . append ( n + "@" + d ) elif nicksFile != None : with open ( nicksFile , "r" ) as iF : nicks = iF . read ( ) . splitlines ( ) for n in nicks : for d in domains : if d not in excludeDomains : email_candidates . append ( n + "@" + d ) return email_candidates
Method that generates a list of emails .
49,441
def processMailList ( platformNames = [ ] , emails = [ ] ) : platforms = platform_selection . getPlatformsByName ( platformNames , mode = "mailfy" ) results = [ ] for e in emails : for pla in platforms : entities = pla . getInfo ( query = e , mode = "mailfy" ) if entities != { } : results += json . loads ( entities ) return results
Method to perform the email search .
49,442
def pool_function ( args ) : is_valid = True try : checker = emailahoy . VerifyEmail ( ) status , message = checker . verify_email_smtp ( args , from_host = 'gmail.com' , from_email = 'sample@gmail.com' ) if status == 250 : print ( "\t[*] Verification of '{}' status: {}. Details:\n\t\t{}" . format ( general . success ( args ) , general . success ( "SUCCESS ({})" . format ( str ( status ) ) ) , message . replace ( '\n' , '\n\t\t' ) ) ) is_valid = True else : print ( "\t[*] Verification of '{}' status: {}. Details:\n\t\t{}" . format ( general . error ( args ) , general . error ( "FAILED ({})" . format ( str ( status ) ) ) , message . replace ( '\n' , '\n\t\t' ) ) ) is_valid = False except Exception , e : print ( general . warning ( "WARNING. An error was found when performing the search. You can omit this message.\n" + str ( e ) ) ) is_valid = False aux = { } aux [ "type" ] = "i3visio.profile" aux [ "value" ] = "Email - " + args aux [ "attributes" ] = general . expandEntitiesFromEmail ( args ) platform = aux [ "attributes" ] [ 2 ] [ "value" ] . title ( ) aux [ "attributes" ] . append ( { "type" : "i3visio.platform" , "value" : platform , "attributes" : [ ] } ) if is_valid : return { "platform" : platform , "status" : "DONE" , "data" : aux } else : return { "platform" : platform , "status" : "DONE" , "data" : { } }
A wrapper for being able to launch all the threads .
49,443
def recoverURL ( self , url ) : self . setUserAgent ( ) if "https://" in url : self . setProxy ( protocol = "https" ) else : self . setProxy ( protocol = "http" ) if ".onion" in url : try : pass except : pass url = url . replace ( ".onion" , ".onion.cab" ) try : recurso = self . br . open ( url ) except : return None html = recurso . read ( ) return html
Public method to recover a resource .
49,444
def setNewPassword ( self , url , username , password ) : self . br . add_password ( url , username , password )
Public method to manually set the credentials for a url in the browser .
49,445
def setProxy ( self , protocol = "http" ) : try : new = { protocol : self . proxies [ protocol ] } self . br . set_proxies ( new ) except : pass
Public method to set a proxy for the browser .
49,446
def setUserAgent ( self , uA = None ) : logger = logging . getLogger ( "osrframework.utils" ) if not uA : if self . userAgents : logger = logging . debug ( "Selecting a new random User Agent." ) uA = random . choice ( self . userAgents ) else : logger = logging . debug ( "No user agent was inserted." ) return False self . br . addheaders = [ ( 'User-agent' , uA ) , ] return True
This method will be called whenever a new query will be executed .
49,447
def main ( args ) : tAW = TwitterAPIWrapper ( ) if args . type == "get_all_docs" : results = tAW . get_all_docs ( args . query ) elif args . type == "get_user" : results = tAW . get_user ( args . query ) elif args . type == "get_followers" : results = tAW . get_followers ( args . query ) print "... %s followers downloaded... " % ( len ( results ) ) with open ( '%s_followers.csv' % args . query , 'wb' ) as f : writer = csv . writer ( f ) for r in results : writer . writerow ( [ args . query , str ( r ) ] ) elif args . type == "get_friends" : results = tAW . get_friends ( args . query ) print "... %s friends downloaded... " % ( len ( results ) ) with open ( '%s_friends.csv' % args . query , 'wb' ) as f : writer = csv . writer ( f ) for r in results : writer . writerow ( [ args . query , str ( r ) ] ) elif args . type == "search_users" : results = tAW . search_users ( args . query ) return results
Query manager .
49,448
def _rate_limit_status ( self , api = None , mode = None ) : if api == None : api = self . connectToAPI ( ) if mode == None : print json . dumps ( api . rate_limit_status ( ) , indent = 2 ) raw_input ( "<Press ENTER>" ) else : while True : allLimits = api . rate_limit_status ( ) if mode == "get_user" : limit = allLimits [ "resources" ] [ "users" ] [ "/users/show/:id" ] [ "limit" ] remaining = allLimits [ "resources" ] [ "users" ] [ "/users/show/:id" ] [ "remaining" ] reset = allLimits [ "resources" ] [ "users" ] [ "/users/show/:id" ] [ "reset" ] elif mode == "get_followers" : limit = allLimits [ "resources" ] [ "followers" ] [ "/followers/ids" ] [ "limit" ] remaining = allLimits [ "resources" ] [ "followers" ] [ "/followers/ids" ] [ "remaining" ] reset = allLimits [ "resources" ] [ "followers" ] [ "/followers/ids" ] [ "reset" ] elif mode == "get_friends" : limit = allLimits [ "resources" ] [ "friends" ] [ "/friends/ids" ] [ "limit" ] remaining = allLimits [ "resources" ] [ "friends" ] [ "/friends/ids" ] [ "remaining" ] reset = allLimits [ "resources" ] [ "friends" ] [ "/friends/ids" ] [ "reset" ] elif mode == "search_users" : limit = allLimits [ "resources" ] [ "users" ] [ "/users/search" ] [ "limit" ] remaining = allLimits [ "resources" ] [ "users" ] [ "/users/search" ] [ "remaining" ] reset = allLimits [ "resources" ] [ "users" ] [ "/users/search" ] [ "reset" ] else : remaining = 1 if remaining > 0 : break else : waitTime = 60 print "No more queries remaining, sleeping for " + str ( waitTime ) + " seconds..." time . sleep ( waitTime ) return 0
Verifying the API limits
49,449
def get_followers ( self , query ) : api = self . _connectToAPI ( ) self . _rate_limit_status ( api = api , mode = "get_followers" ) try : friends_ids = api . followers_ids ( query ) except : return [ ] return friends_ids
Method to get the followers of a user .
49,450
def get_friends ( self , query ) : api = self . _connectToAPI ( ) self . _rate_limit_status ( api = api , mode = "get_friends" ) try : friends_ids = api . friends_ids ( query ) except : return [ ] return friends_ids
Method to get the friends of a user .
49,451
def get_user ( self , screen_name ) : api = self . _connectToAPI ( ) self . _rate_limit_status ( api = api , mode = "get_user" ) aux = [ ] try : user = api . get_user ( screen_name ) aux . append ( user . _json ) except tweepy . error . TweepError as e : pass res = [ ] for a in aux : res . append ( self . _processUser ( a ) ) return res
Method to perform the usufy searches .
49,452
def search_users ( self , query , n = 20 , maxUsers = 60 ) : api = self . _connectToAPI ( ) self . _rate_limit_status ( api = api , mode = "search_users" ) aux = [ ] page = 0 try : newUsers = api . search_users ( query , n , page ) for n in newUsers : aux . append ( n . _json ) while len ( aux ) < maxUsers & len ( newUsers ) > 0 : page += 1 print "Getting page %s of new users..." % page newUsers = api . search_users ( query , n , page ) aux . extend ( newUsers ) except : pass res = [ ] for a in aux : res . append ( self . _processUser ( a ) ) return res
Method to perform the searchfy searches .
49,453
def validate_categories ( categories ) : if not set ( categories ) <= Source . categories : invalid = list ( set ( categories ) - Source . categories ) raise ValueError ( 'Invalid categories: %s' % invalid )
Take an iterable of source categories and raise ValueError if some of them are invalid .
49,454
def checkIfHashIsCracked ( hash = None ) : apiURL = "http://md5db.net/api/" + str ( hash ) . lower ( ) try : data = urllib2 . urlopen ( apiURL ) . read ( ) return data except : return [ ]
Method that checks if the given hash is stored in the md5db . net website .
49,455
def fuzzUsufy ( fDomains = None , fFuzzStruct = None ) : if fFuzzStruct == None : fuzzingStructures = [ "http://<DOMAIN>/<USERNAME>" , "http://<DOMAIN>/~<USERNAME>" , "http://<DOMAIN>/?action=profile;user=<USERNAME>" , "http://<DOMAIN>/causes/author/<USERNAME>" , "http://<DOMAIN>/channel/<USERNAME>" , "http://<DOMAIN>/community/profile/<USERNAME>" , "http://<DOMAIN>/component/comprofiler/userprofiler/<USERNAME>" , "http://<DOMAIN>/details/@<USERNAME>" , "http://<DOMAIN>/foros/member.php?username=<USERNAME>" , "http://<DOMAIN>/forum/member/<USERNAME>" , "http://<DOMAIN>/forum/member.php?username=<USERNAME>" , "http://<DOMAIN>/forum/profile.php?mode=viewprofile&u=<USERNAME>" , "http://<DOMAIN>/home/<USERNAME>" , "http://<DOMAIN>/index.php?action=profile;user=<USERNAME>" , "http://<DOMAIN>/member_profile.php?u=<USERNAME>" , "http://<DOMAIN>/member.php?username=<USERNAME>" , "http://<DOMAIN>/members/?username=<USERNAME>" , "http://<DOMAIN>/members/<USERNAME>" , "http://<DOMAIN>/members/view/<USERNAME>" , "http://<DOMAIN>/mi-espacio/<USERNAME>" , "http://<DOMAIN>/u<USERNAME>" , "http://<DOMAIN>/u/<USERNAME>" , "http://<DOMAIN>/user-<USERNAME>" , "http://<DOMAIN>/user/<USERNAME>" , "http://<DOMAIN>/user/<USERNAME>.html" , "http://<DOMAIN>/users/<USERNAME>" , "http://<DOMAIN>/usr/<USERNAME>" , "http://<DOMAIN>/usuario/<USERNAME>" , "http://<DOMAIN>/usuarios/<USERNAME>" , "http://<DOMAIN>/en/users/<USERNAME>" , "http://<DOMAIN>/people/<USERNAME>" , "http://<DOMAIN>/profil/<USERNAME>" , "http://<DOMAIN>/profile/<USERNAME>" , "http://<DOMAIN>/profile/page/<USERNAME>" , "http://<DOMAIN>/rapidforum/index.php?action=profile;user=<USERNAME>" , "http://<DOMAIN>/social/usuarios/<USERNAME>" , "http://<USERNAME>.<DOMAIN>" , "http://<USERNAME>.<DOMAIN>/user/" ] else : try : fuzzingStructures = fFuzzStruct . read ( ) . splitlines ( ) except : print ( "Usufy could NOT open the following file: " + fFuzzStruct ) res = { } lines = fDomains . read ( ) . splitlines ( ) for l in lines : domain = l . split ( ) [ 0 ] print ( "Performing tests for" + domain + "..." ) nick = l . split ( ) [ 1 ] possibleURL = [ ] for struct in fuzzingStructures : urlToTry = struct . replace ( "<DOMAIN>" , domain ) test = urlToTry . replace ( "<USERNAME>" , nick . lower ( ) ) print ( "Processing " + test + "..." ) i3Browser = browser . Browser ( ) try : html = i3Browser . recoverURL ( test ) if nick in html : possibleURL . append ( test ) print ( general . success ( "\tPossible usufy found!!!\n" ) ) except : print ( "The resource could not be downloaded." ) res [ domain ] = possibleURL print ( json . dumps ( res , indent = 2 ) ) return res
Method to guess the usufy path against a list of domains or subdomains .
49,456
def _prepare_filtering_params ( domain = None , category = None , sponsored_source = None , has_field = None , has_fields = None , query_params_match = None , query_person_match = None , ** kwargs ) : if query_params_match not in ( None , True ) : raise ValueError ( 'query_params_match can only be `True`' ) if query_person_match not in ( None , True ) : raise ValueError ( 'query_person_match can only be `True`' ) params = [ ] if domain is not None : params . append ( 'domain:%s' % domain ) if category is not None : Source . validate_categories ( [ category ] ) params . append ( 'category:%s' % category ) if sponsored_source is not None : params . append ( 'sponsored_source:%s' % sponsored_source ) if query_params_match is not None : params . append ( 'query_params_match' ) if query_person_match is not None : params . append ( 'query_person_match' ) has_fields = has_fields or [ ] if has_field is not None : has_fields . append ( has_field ) for has_field in has_fields : params . append ( 'has_field:%s' % has_field . __name__ ) return params
Transform the params to the API format return a list of params .
49,457
def validate_query_params ( self , strict = True ) : if not ( self . api_key or default_api_key ) : raise ValueError ( 'API key is missing' ) if strict and self . query_params_mode not in ( None , 'and' , 'or' ) : raise ValueError ( 'query_params_match should be one of "and"/"or"' ) if not self . person . is_searchable : raise ValueError ( 'No valid name/username/phone/email in request' ) if strict and self . person . unsearchable_fields : raise ValueError ( 'Some fields are unsearchable: %s' % self . person . unsearchable_fields )
Check if the request is valid and can be sent raise ValueError if not . strict is a boolean argument that defaults to True which means an exception is raised on every invalid query parameter if set to False an exception is raised only when the search request cannot be performed because required query params are missing .
49,458
def group_records_by_domain ( self ) : key_function = lambda record : record . source . domain return self . group_records ( key_function )
Return the records grouped by the domain they came from . The return value is a dict a key in this dict is a domain and the value is a list of all the records with this domain .
49,459
def group_records_by_category ( self ) : Source . validate_categories ( categories ) key_function = lambda record : record . source . category return self . group_records ( key_function )
Return the records grouped by the category of their source . The return value is a dict a key in this dict is a category and the value is a list of all the records with this category .
49,460
def from_dict ( d ) : warnings_ = d . get ( 'warnings' , [ ] ) query = d . get ( 'query' ) or None if query : query = Person . from_dict ( query ) person = d . get ( 'person' ) or None if person : person = Person . from_dict ( person ) records = d . get ( 'records' ) if records : records = [ Record . from_dict ( record ) for record in records ] suggested_searches = d . get ( 'suggested_searches' ) if suggested_searches : suggested_searches = [ Record . from_dict ( record ) for record in suggested_searches ] return SearchAPIResponse ( query = query , person = person , records = records , suggested_searches = suggested_searches , warnings_ = warnings_ )
Transform the dict to a response object and return the response .
49,461
def to_dict ( self ) : d = { } if self . warnings : d [ 'warnings' ] = self . warnings if self . query is not None : d [ 'query' ] = self . query . to_dict ( ) if self . person is not None : d [ 'person' ] = self . person . to_dict ( ) if self . records : d [ 'records' ] = [ record . to_dict ( ) for record in self . records ] if self . suggested_searches : d [ 'suggested_searches' ] = [ record . to_dict ( ) for record in self . suggested_searches ] return d
Return a dict representation of the response .
49,462
def from_dict ( cls , d ) : kwargs = { } for key , val in d . iteritems ( ) : if key . startswith ( 'display' ) : continue if key . startswith ( '@' ) : key = key [ 1 : ] if key == 'type' : key = 'type_' elif key == 'valid_since' : val = str_to_datetime ( val ) elif key == 'date_range' : val = DateRange . from_dict ( val ) kwargs [ key . encode ( 'ascii' ) ] = val return cls ( ** kwargs )
Transform the dict to a field object and return the field .
49,463
def to_dict ( self ) : d = { } if self . valid_since is not None : d [ '@valid_since' ] = datetime_to_str ( self . valid_since ) for attr_list , prefix in [ ( self . attributes , '@' ) , ( self . children , '' ) ] : for attr in attr_list : value = getattr ( self , attr ) if isinstance ( value , Serializable ) : value = value . to_dict ( ) if value or isinstance ( value , ( bool , int , long ) ) : d [ prefix + attr ] = value if hasattr ( self , 'display' ) and self . display : d [ 'display' ] = self . display return d
Return a dict representation of the field .
49,464
def is_searchable ( self ) : first = alpha_chars ( self . first or u'' ) last = alpha_chars ( self . last or u'' ) raw = alpha_chars ( self . raw or u'' ) return ( len ( first ) >= 2 and len ( last ) >= 2 ) or len ( raw ) >= 4
A bool value that indicates whether the name is a valid name to search by .
49,465
def is_searchable ( self ) : return self . raw or ( self . is_valid_country and ( not self . state or self . is_valid_state ) )
A bool value that indicates whether the address is a valid address to search by .
49,466
def is_valid_state ( self ) : return self . is_valid_country and self . country . upper ( ) in STATES and self . state is not None and self . state . upper ( ) in STATES [ self . country . upper ( ) ]
A bool value that indicates whether the object s state is a valid state code .
49,467
def to_dict ( self ) : d = Field . to_dict ( self ) if self . display_international : d [ 'display_international' ] = self . display_international return d
Extend Field . to_dict take the display_international attribute .
49,468
def is_valid_email ( self ) : return bool ( self . address and Email . re_email . match ( self . address ) )
A bool value that indicates whether the address is a valid email address . Note that the check is done be matching to the regular expression at Email . re_email which is very basic and far from covering end - cases ...
49,469
def age ( self ) : if self . date_range is None : return dob = self . date_range . middle today = datetime . date . today ( ) if ( today . month , today . day ) < ( dob . month , dob . day ) : return today . year - dob . year - 1 else : return today . year - dob . year
int the estimated age of the person . Note that A DOB object is based on a date - range and the exact date is usually unknown so for age calculation the the middle of the range is assumed to be the real date - of - birth .
49,470
def age_range ( self ) : if self . date_range is None : return None , None start_date = DateRange ( self . date_range . start , self . date_range . start ) end_date = DateRange ( self . date_range . end , self . date_range . end ) start_age = DOB ( date_range = end_date ) . age end_age = DOB ( date_range = start_date ) . age return start_age , end_age
A tuple of two ints - the minimum and maximum age of the person .
49,471
def from_age_range ( start_age , end_age ) : if start_age < 0 or end_age < 0 : raise ValueError ( 'start_age and end_age can\'t be negative' ) if start_age > end_age : start_age , end_age = end_age , start_age today = datetime . date . today ( ) try : start_date = today . replace ( year = today . year - end_age - 1 ) except ValueError : start_date = today . replace ( year = today . year - end_age - 1 , day = 28 ) start_date += datetime . timedelta ( days = 1 ) try : end_date = today . replace ( year = today . year - start_age ) except ValueError : end_date = today . replace ( year = today . year - start_age , day = 28 ) date_range = DateRange ( start_date , end_date ) return DOB ( date_range = date_range )
Take a person s minimal and maximal age and return a new DOB object suitable for him .
49,472
def from_dict ( cls , d ) : relationship = super ( cls , cls ) . from_dict ( d ) if relationship . name is not None : relationship . name = Name . from_dict ( relationship . name ) return relationship
Extend Field . from_dict and also load the name from the dict .
49,473
def from_dict ( d ) : start = d . get ( 'start' ) end = d . get ( 'end' ) if not ( start and end ) : raise ValueError ( 'DateRange must have both start and end' ) start = str_to_date ( start ) end = str_to_date ( end ) return DateRange ( start , end )
Transform the dict to a DateRange object .
49,474
def to_dict ( self ) : d = { } d [ 'start' ] = date_to_str ( self . start ) d [ 'end' ] = date_to_str ( self . end ) return d
Transform the date - range to a dict .
49,475
def enumerateURL ( urlDict , outputFolder , startIndex = 0 , maxErrors = 100 ) : for i , url in enumerate ( urlDict . keys ( ) ) : domain = re . findall ( "://(.*)/" , url ) [ 0 ] index = startIndex consecutiveErrors = 0 i3Browser = browser . Browser ( ) while consecutiveErrors <= maxErrors : newQuery = url . replace ( "<INDEX>" , str ( index ) ) print ( newQuery ) try : data = i3Browser . recoverURL ( newQuery ) filename = domain . replace ( "/" , "|" ) + "_" + "-profile_" + str ( index ) . rjust ( 10 , "0" ) + ".html" if urlDict [ url ] != None : if urlDict [ url ] in data : print ( general . info ( "Storing resource as:\t" + filename + "..." ) ) with open ( outputFolder + "/" + filename , "w" ) as oF : oF . write ( data ) else : print ( general . info ( "Storing resource as:\t" + filename + "..." ) ) with open ( outputFolder + "/" + filename , "w" ) as oF : oF . write ( data ) except : pass index += 1
Function that performs the enumeration itself .
49,476
def checkIfEmailWasHacked ( email = None , sleepSeconds = 1 ) : time . sleep ( sleepSeconds ) print ( "\t[*] Bypassing Cloudflare Restriction..." ) ua = 'osrframework 0.18' useragent = { 'User-Agent' : ua } cookies , user_agent = cfscrape . get_tokens ( 'https://haveibeenpwned.com/api/v2/breachedaccount/test@example.com' , user_agent = ua ) leaks = [ ] apiURL = "https://haveibeenpwned.com/api/v2/breachedaccount/{}" . format ( email ) time . sleep ( sleepSeconds ) data = requests . get ( apiURL , headers = useragent , cookies = cookies , verify = True ) . text try : jsonData = json . loads ( data ) for e in jsonData : new = { } new [ "value" ] = "(HIBP) " + e . get ( "Name" ) + " - " + email new [ "type" ] = "i3visio.profile" new [ "attributes" ] = [ { "type" : "i3visio.platform_leaked" , "value" : e . get ( "Name" ) , "attributes" : [ ] } , { "type" : "@source" , "value" : "haveibeenpwned.com" , "attributes" : [ ] } , { "type" : "@source_uri" , "value" : apiURL , "attributes" : [ ] } , { "type" : "@pwn_count" , "value" : e . get ( "PwnCount" ) , "attributes" : [ ] } , { "type" : "@added_date" , "value" : e . get ( "AddedDate" ) , "attributes" : [ ] } , { "type" : "@breach_date" , "value" : e . get ( "BreachDate" ) , "attributes" : [ ] } , { "type" : "@description" , "value" : e . get ( "Description" ) , "attributes" : [ ] } ] + general . expandEntitiesFromEmail ( email ) leaks . append ( new ) except ValueError : return [ ] except Exception : print ( "ERROR: Something happenned when using HIBP API." ) return [ ] return leaks
Method that checks if the given email is stored in the HIBP website .
49,477
def get_page ( url ) : request = Request ( url ) request . add_header ( 'User-Agent' , 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0)' ) cookie_jar . add_cookie_header ( request ) response = urlopen ( request ) cookie_jar . extract_cookies ( response , request ) html = response . read ( ) response . close ( ) cookie_jar . save ( ) return html
Request the given URL and return the response page using the cookie jar .
49,478
def search ( query , tld = 'com' , lang = 'en' , num = 10 , start = 0 , stop = None , pause = 2.0 , only_standard = False ) : global BeautifulSoup if BeautifulSoup is None : try : from bs4 import BeautifulSoup except ImportError : from BeautifulSoup import BeautifulSoup hashes = set ( ) query = quote_plus ( query ) get_page ( url_home % vars ( ) ) if start : if num == 10 : url = url_next_page % vars ( ) else : url = url_next_page_num % vars ( ) else : if num == 10 : url = url_search % vars ( ) else : url = url_search_num % vars ( ) while not stop or start < stop : time . sleep ( pause ) html = get_page ( url ) soup = BeautifulSoup ( html ) anchors = soup . find ( id = 'search' ) . findAll ( 'a' ) for a in anchors : if only_standard and ( not a . parent or a . parent . name . lower ( ) != "h3" ) : continue try : link = a [ 'href' ] except KeyError : continue link = filter_result ( link ) if not link : continue h = hash ( link ) if h in hashes : continue hashes . add ( h ) yield link if not soup . find ( id = 'nav' ) : break start += num if num == 10 : url = url_next_page % vars ( ) else : url = url_next_page_num % vars ( )
Search the given query string using Google .
49,479
def add_fields ( self , fields ) : for field in fields : cls = field . __class__ try : container = FieldsContainer . class_container [ cls ] except KeyError : raise ValueError ( 'Object of type %s is an invalid field' % cls ) getattr ( self , container ) . append ( field )
Add the fields to their corresponding container . fields is an iterable of field objects from osrframework . thirdparties . pipl_com . lib . fields .
49,480
def all_fields ( self ) : return [ field for container in FieldsContainer . class_container . values ( ) for field in getattr ( self , container ) ]
A list with all the fields contained in this object .
49,481
def fields_from_dict ( d ) : class_container = FieldsContainer . class_container fields = [ field_cls . from_dict ( field_dict ) for field_cls , container in class_container . iteritems ( ) for field_dict in d . get ( container , [ ] ) ] return fields
Load the fields from the dict return a list with all the fields .
49,482
def fields_to_dict ( self ) : d = { } for container in FieldsContainer . class_container . values ( ) : fields = getattr ( self , container ) if fields : d [ container ] = [ field . to_dict ( ) for field in fields ] return d
Transform the object to a dict and return the dict .
49,483
def from_dict ( d ) : query_params_match = d . get ( '@query_params_match' ) query_person_match = d . get ( '@query_person_match' ) valid_since = d . get ( '@valid_since' ) if valid_since : valid_since = str_to_datetime ( valid_since ) source = Source . from_dict ( d . get ( 'source' , { } ) ) fields = Record . fields_from_dict ( d ) return Record ( source = source , fields = fields , query_params_match = query_params_match , query_person_match = query_person_match , valid_since = valid_since )
Transform the dict to a record object and return the record .
49,484
def to_dict ( self ) : d = { } if self . query_params_match is not None : d [ '@query_params_match' ] = self . query_params_match if self . query_person_match is not None : d [ '@query_person_match' ] = self . query_person_match if self . valid_since is not None : d [ '@valid_since' ] = datetime_to_str ( self . valid_since ) if self . source is not None : d [ 'source' ] = self . source . to_dict ( ) d . update ( self . fields_to_dict ( ) ) return d
Return a dict representation of the record .
49,485
def is_searchable ( self ) : filter_func = lambda field : field . is_searchable return bool ( filter ( filter_func , self . names ) or filter ( filter_func , self . emails ) or filter ( filter_func , self . phones ) or filter ( filter_func , self . usernames ) )
A bool value that indicates whether the person has enough data and can be sent as a query to the API .
49,486
def from_dict ( d ) : query_params_match = d . get ( '@query_params_match' ) sources = [ Source . from_dict ( source ) for source in d . get ( 'sources' , [ ] ) ] fields = Person . fields_from_dict ( d ) return Person ( fields = fields , sources = sources , query_params_match = query_params_match )
Transform the dict to a person object and return the person .
49,487
def to_dict ( self ) : d = { } if self . query_params_match is not None : d [ '@query_params_match' ] = self . query_params_match if self . sources : d [ 'sources' ] = [ source . to_dict ( ) for source in self . sources ] d . update ( self . fields_to_dict ( ) ) return d
Return a dict representation of the person .
49,488
def processPhoneList ( platformNames = [ ] , numbers = [ ] , excludePlatformNames = [ ] ) : platforms = platform_selection . getPlatformsByName ( platformNames , mode = "phonefy" , excludePlatformNames = excludePlatformNames ) results = [ ] for num in numbers : for pla in platforms : entities = pla . getInfo ( query = num , process = True , mode = "phonefy" ) if entities != { } : results += json . loads ( entities ) return results
Method to perform searchs on a series of numbers .
49,489
def createURL ( self , word , mode = "phonefy" ) : try : return self . modes [ mode ] [ "url" ] . format ( placeholder = urllib . pathname2url ( word ) ) except : if mode == "base" : if word [ 0 ] == "/" : return self . baseURL + word [ 1 : ] , word else : return self . baseURL + word else : try : return self . url [ mode ] . replace ( "<" + mode + ">" , urllib . pathname2url ( word ) ) except : pass return None
Method to create the URL replacing the word in the appropriate URL .
49,490
def launchQueryForMode ( self , query = None , mode = None ) : qURL = self . createURL ( word = query , mode = mode ) i3Browser = browser . Browser ( ) try : if self . needsCredentials [ mode ] : self . _getAuthenticated ( i3Browser , qURL ) data = i3Browser . recoverURL ( qURL ) else : data = i3Browser . recoverURL ( qURL ) return data except KeyError : print ( general . error ( "[*] '{}' is not a valid mode for this wrapper ({})." . format ( mode , self . __class__ . __name__ ) ) ) return None
Method that launches an i3Browser to collect data .
49,491
def getInfo ( self , query = None , process = False , mode = "phonefy" , qURI = None ) : results = [ ] data = "" if self . _modeIsValid ( mode = mode ) and self . _isValidQuery ( query , mode = mode ) : if mode in [ "mailfy" , "phonefy" , "searchfy" , "usufy" ] : try : results = getattr ( self , "do_{}" . format ( mode ) ) ( query ) except AttributeError as e : raise NotImplementedModeError ( str ( self ) , mode ) return json . dumps ( results )
Method that checks the presence of a given query and recovers the first list of complains .
49,492
def _modeIsValid ( self , mode ) : try : return mode in self . modes . keys ( ) except AttributeError as e : if mode in self . isValidMode . keys ( ) : if mode in self . isValidMode . keys ( ) : return True return False
Verification of whether the mode is a correct option to be used .
49,493
def _getAuthenticated ( self , browser , url ) : try : if len ( self . creds ) > 0 : c = random . choice ( self . creds ) [ 0 ] browser . setNewPassword ( url , c . user , c . password ) return True else : raise NoCredentialsException ( str ( self ) ) except AttributeError as e : raise BadImplementationError ( str ( e ) )
Getting authenticated .
49,494
def _isValidQuery ( self , query , mode = "phonefy" ) : try : validator = self . modes [ mode ] . get ( "query_validator" ) if validator : try : compiledRegexp = re . compile ( "^{expr}$" . format ( expr = validator ) ) return compiledRegexp . match ( query ) except AttributeError as e : return True except AttributeError as e : compiledRegexp = re . compile ( "^{r}$" . format ( r = self . validQuery [ mode ] ) ) return compiledRegexp . match ( query )
Method to verify if a given query is processable by the platform .
49,495
def _somethingFound ( self , data , mode = "phonefy" ) : if data : try : for text in self . notFoundText [ mode ] : if text in data : return False return True except AttributeError as e : verifier = self . modes . get ( mode ) if verifier : if verifier . get ( "not_found_text" , "" ) in data : return False else : return True return False
Verifying if something was found .
49,496
def do_phonefy ( self , query , ** kwargs ) : results = [ ] test = self . check_phonefy ( query , kwargs ) if test : r = { "type" : "i3visio.phone" , "value" : self . platformName + " - " + query , "attributes" : [ ] } try : aux = { "type" : "i3visio.uri" , "value" : self . createURL ( query , mode = "phonefy" ) , "attributes" : [ ] } r [ "attributes" ] . append ( aux ) except : pass aux = { "type" : "i3visio.platform" , "value" : self . platformName , "attributes" : [ ] } r [ "attributes" ] . append ( aux ) r [ "attributes" ] += self . process_phonefy ( test ) results . append ( r ) return results
Verifying a phonefy query in this platform .
49,497
def process_usufy ( self , data ) : mode = "usufy" info = [ ] try : verifier = self . modes . get ( mode , { } ) . get ( "extra_fields" , { } ) for field in verifier . keys ( ) : regexp = verifier [ field ] values = re . findall ( regexp , data ) for val in values : aux = { } aux [ "type" ] = field aux [ "value" ] = val aux [ "attributes" ] = [ ] if aux not in info : info . append ( aux ) except AttributeError as e : for field in self . fieldsRegExp [ mode ] . keys ( ) : try : regexp = self . fieldsRegExp [ mode ] [ field ] [ "start" ] + "([^\)]+)" + self . fieldsRegExp [ mode ] [ field ] [ "end" ] tmp = re . findall ( regexp , data ) values = [ ] for t in tmp : if self . fieldsRegExp [ mode ] [ field ] [ "end" ] in t : values . append ( t . split ( self . fieldsRegExp [ mode ] [ field ] [ "end" ] ) [ 0 ] ) else : values . append ( t ) except : regexp = self . fieldsRegExp [ mode ] [ field ] values = re . findall ( regexp , data ) for val in values : aux = { } aux [ "type" ] = field aux [ "value" ] = val aux [ "attributes" ] = [ ] if aux not in info : info . append ( aux ) return info
Method to process and extract the entities of a usufy
49,498
def doBenchmark ( plats ) : logger = logging . getLogger ( "osrframework.utils" ) res = { } args = [ ] tries = [ 1 , 4 , 8 , 16 , 24 , 32 , 40 , 48 , 56 , 64 ] logger . info ( "The test is starting recovering webpages by creating the following series of threads: " + str ( tries ) ) for i in tries : print "Testing creating " + str ( i ) + " simultaneous threads..." t0 = time . clock ( ) pool = Pool ( i ) poolResults = pool . map ( multi_run_wrapper , args ) t1 = time . clock ( ) res [ i ] = t1 - t0 print str ( i ) + "\t" + str ( res [ i ] ) + "\n" return res
Perform the benchmark ...
49,499
def changePermissionsRecursively ( path , uid , gid ) : os . chown ( path , uid , gid ) for item in os . listdir ( path ) : itempath = os . path . join ( path , item ) if os . path . isfile ( itempath ) : try : os . chown ( itempath , uid , gid ) except Exception as e : pass os . chmod ( itempath , 600 ) elif os . path . isdir ( itempath ) : try : os . chown ( itempath , uid , gid ) except Exception as e : pass os . chmod ( itempath , 6600 ) changePermissionsRecursively ( itempath , uid , gid )
Function to recursively change the user id and group id .