idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
57,600 | def _trampoline ( name , module , * args , ** kwargs ) : function = _function_lookup ( name , module ) return function ( * args , ** kwargs ) | Trampoline function for decorators . |
57,601 | def _function_lookup ( name , module ) : try : return _registered_functions [ name ] except KeyError : __import__ ( module ) mod = sys . modules [ module ] getattr ( mod , name ) return _registered_functions [ name ] | Searches the function between the registered ones . If not found it imports the module forcing its registration . |
57,602 | def worker_process ( params , channel ) : signal ( SIGINT , SIG_IGN ) if params . initializer is not None : if not run_initializer ( params . initializer , params . initargs ) : os . _exit ( 1 ) try : for task in worker_get_next_task ( channel , params . max_tasks ) : payload = task . payload result = process_execute ( payload . function , * payload . args , ** payload . kwargs ) send_result ( channel , Result ( task . id , result ) ) except ( EnvironmentError , OSError , RuntimeError ) as error : os . _exit ( error . errno if error . errno else 1 ) except EOFError : os . _exit ( 0 ) | The worker process routines . |
57,603 | def task_transaction ( channel ) : with channel . lock : if channel . poll ( 0 ) : task = channel . recv ( ) channel . send ( Acknowledgement ( os . getpid ( ) , task . id ) ) else : raise RuntimeError ( "Race condition between workers" ) return task | Ensures a task is fetched and acknowledged atomically . |
57,604 | def schedule ( self , task ) : self . task_manager . register ( task ) self . worker_manager . dispatch ( task ) | Schedules a new Task in the PoolManager . |
57,605 | def process_next_message ( self , timeout ) : message = self . worker_manager . receive ( timeout ) if isinstance ( message , Acknowledgement ) : self . task_manager . task_start ( message . task , message . worker ) elif isinstance ( message , Result ) : self . task_manager . task_done ( message . task , message . result ) | Processes the next message coming from the workers . |
57,606 | def update_tasks ( self ) : for task in self . task_manager . timeout_tasks ( ) : self . task_manager . task_done ( task . id , TimeoutError ( "Task timeout" , task . timeout ) ) self . worker_manager . stop_worker ( task . worker_id ) for task in self . task_manager . cancelled_tasks ( ) : self . task_manager . task_done ( task . id , CancelledError ( ) ) self . worker_manager . stop_worker ( task . worker_id ) | Handles timing out Tasks . |
57,607 | def update_workers ( self ) : for expiration in self . worker_manager . inspect_workers ( ) : self . handle_worker_expiration ( expiration ) self . worker_manager . create_workers ( ) | Handles unexpected processes termination . |
57,608 | def task_done ( self , task_id , result ) : try : task = self . tasks . pop ( task_id ) except KeyError : return else : if task . future . cancelled ( ) : task . set_running_or_notify_cancel ( ) elif isinstance ( result , BaseException ) : task . future . set_exception ( result ) else : task . future . set_result ( result ) self . task_done_callback ( ) | Set the tasks result and run the callback . |
57,609 | def inspect_workers ( self ) : workers = tuple ( self . workers . values ( ) ) expired = tuple ( w for w in workers if not w . is_alive ( ) ) for worker in expired : self . workers . pop ( worker . pid ) return ( ( w . pid , w . exitcode ) for w in expired if w . exitcode != 0 ) | Updates the workers status . |
57,610 | def iter_chunks ( chunksize , * iterables ) : iterables = iter ( zip ( * iterables ) ) while 1 : chunk = tuple ( islice ( iterables , chunksize ) ) if not chunk : return yield chunk | Iterates over zipped iterables in chunks . |
57,611 | def run_initializer ( initializer , initargs ) : try : initializer ( * initargs ) return True except Exception as error : logging . exception ( error ) return False | Runs the Pool initializer dealing with errors . |
57,612 | def join ( self , timeout = None ) : if self . _context . state == RUNNING : raise RuntimeError ( 'The Pool is still running' ) if self . _context . state == CLOSED : self . _wait_queue_depletion ( timeout ) self . stop ( ) self . join ( ) else : self . _context . task_queue . put ( None ) self . _stop_pool ( ) | Joins the pool waiting until all workers exited . |
57,613 | def thread ( function ) : @ wraps ( function ) def wrapper ( * args , ** kwargs ) : future = Future ( ) launch_thread ( _function_handler , function , args , kwargs , future ) return future return wrapper | Runs the decorated function within a concurrent thread taking care of the result and error management . |
57,614 | def _function_handler ( function , args , kwargs , future ) : future . set_running_or_notify_cancel ( ) try : result = function ( * args , ** kwargs ) except BaseException as error : error . traceback = format_exc ( ) future . set_exception ( error ) else : future . set_result ( result ) | Runs the actual function in separate thread and returns its result . |
57,615 | def create_cities_csv ( filename = "places2k.txt" , output = "cities.csv" ) : with open ( filename , 'r' ) as city_file : with open ( output , 'w' ) as out : for line in city_file : if line [ 0 : 2 ] == "PR" : continue out . write ( " " . join ( line [ 9 : 72 ] . split ( ) [ : - 1 ] ) + '\n' ) | Takes the places2k . txt from USPS and creates a simple file of all cities . |
57,616 | def parse_address ( self , address , line_number = - 1 ) : return Address ( address , self , line_number , self . logger ) | Return an Address object from the given address . Passes itself to the Address constructor to use all the custom loaded suffixes cities etc . |
57,617 | def load_cities ( self , filename ) : with open ( filename , 'r' ) as f : for line in f : self . cities . append ( line . strip ( ) . lower ( ) ) | Load up all cities in lowercase for easier matching . The file should have one city per line with no extra characters . This isn t strictly required but will vastly increase the accuracy . |
57,618 | def load_streets ( self , filename ) : with open ( filename , 'r' ) as f : for line in f : self . streets . append ( line . strip ( ) . lower ( ) ) | Load up all streets in lowercase for easier matching . The file should have one street per line with no extra characters . This isn t strictly required but will vastly increase the accuracy . |
57,619 | def preprocess_address ( self , address ) : address = address . replace ( "# " , "#" ) address = address . replace ( " & " , "&" ) if re . search ( r"-?-?\w+ units" , address , re . IGNORECASE ) : address = re . sub ( r"-?-?\w+ units" , "" , address , flags = re . IGNORECASE ) apartment_regexes = [ r'#\w+ & \w+' , '#\w+ rm \w+' , "#\w+-\w" , r'apt #{0,1}\w+' , r'apartment #{0,1}\w+' , r'#\w+' , r'# \w+' , r'rm \w+' , r'unit #?\w+' , r'units #?\w+' , r'- #{0,1}\w+' , r'no\s?\d+\w*' , r'style\s\w{1,2}' , r'townhouse style\s\w{1,2}' ] for regex in apartment_regexes : apartment_match = re . search ( regex , address , re . IGNORECASE ) if apartment_match : self . apartment = self . _clean ( apartment_match . group ( ) ) address = re . sub ( regex , "" , address , flags = re . IGNORECASE ) address = re . sub ( r"\,\s*\," , "," , address ) return address | Takes a basic address and attempts to clean it up extract reasonably assured bits that may throw off the rest of the parsing and return the cleaned address . |
57,620 | def check_state ( self , token ) : if len ( token ) == 2 and self . state is None : if token . capitalize ( ) in self . parser . states . keys ( ) : self . state = self . _clean ( self . parser . states [ token . capitalize ( ) ] ) return True elif token . upper ( ) in self . parser . states . values ( ) : self . state = self . _clean ( token . upper ( ) ) return True if self . state is None and self . street_suffix is None and len ( self . comma_separated_address ) > 1 : if token . capitalize ( ) in self . parser . states . keys ( ) : self . state = self . _clean ( self . parser . states [ token . capitalize ( ) ] ) return True elif token . upper ( ) in self . parser . states . values ( ) : self . state = self . _clean ( token . upper ( ) ) return True return False | Check if state is in either the keys or values of our states list . Must come before the suffix . |
57,621 | def check_city ( self , token ) : shortened_cities = { 'saint' : 'st.' } if self . city is None and self . state is not None and self . street_suffix is None : if token . lower ( ) in self . parser . cities : self . city = self . _clean ( token . capitalize ( ) ) return True return False if self . city is None and self . apartment is None and self . street_suffix is None and len ( self . comma_separated_address ) > 1 : if token . lower ( ) in self . parser . cities : self . city = self . _clean ( token . capitalize ( ) ) return True return False if self . city is not None and self . street_suffix is None and self . street is None : print "Checking for multi part city" , token . lower ( ) , token . lower ( ) in shortened_cities . keys ( ) if token . lower ( ) + ' ' + self . city in self . parser . cities : self . city = self . _clean ( ( token . lower ( ) + ' ' + self . city ) . capitalize ( ) ) return True if token . lower ( ) in shortened_cities . keys ( ) : token = shortened_cities [ token . lower ( ) ] print "Checking for shorted multi part city" , token . lower ( ) + ' ' + self . city if token . lower ( ) + ' ' + self . city . lower ( ) in self . parser . cities : self . city = self . _clean ( token . capitalize ( ) + ' ' + self . city . capitalize ( ) ) return True | Check if there is a known city from our city list . Must come before the suffix . |
57,622 | def check_street_suffix ( self , token ) : if self . street_suffix is None and self . street is None : if token . upper ( ) in self . parser . suffixes . keys ( ) : suffix = self . parser . suffixes [ token . upper ( ) ] self . street_suffix = self . _clean ( suffix . capitalize ( ) + '.' ) return True elif token . upper ( ) in self . parser . suffixes . values ( ) : self . street_suffix = self . _clean ( token . capitalize ( ) + '.' ) return True return False | Attempts to match a street suffix . If found it will return the abbreviation with the first letter capitalized and a period after it . E . g . St . or Ave . |
57,623 | def check_street ( self , token ) : if self . street is None and self . street_suffix is not None and self . street_prefix is None and self . house_number is None : self . street = self . _clean ( token . capitalize ( ) ) return True elif self . street is not None and self . street_suffix is not None and self . street_prefix is None and self . house_number is None : self . street = self . _clean ( token . capitalize ( ) + ' ' + self . street ) return True if not self . street_suffix and not self . street and token . lower ( ) in self . parser . streets : self . street = self . _clean ( token ) return True return False | Let s assume a street comes before a prefix and after a suffix . This isn t always the case but we ll deal with that in our guessing game . Also two word street names ... well ... |
57,624 | def check_street_prefix ( self , token ) : if self . street and not self . street_prefix and token . lower ( ) . replace ( '.' , '' ) in self . parser . prefixes . keys ( ) : self . street_prefix = self . _clean ( self . parser . prefixes [ token . lower ( ) . replace ( '.' , '' ) ] ) return True return False | Finds street prefixes such as N . or Northwest before a street name . Standardizes to 1 or two letters followed by a period . |
57,625 | def check_house_number ( self , token ) : if self . street and self . house_number is None and re . match ( street_num_regex , token . lower ( ) ) : if '/' in token : token = token . split ( '/' ) [ 0 ] if '-' in token : token = token . split ( '-' ) [ 0 ] self . house_number = self . _clean ( str ( token ) ) return True return False | Attempts to find a house number generally the first thing in an address . If anything is in front of it we assume it is a building name . |
57,626 | def check_building ( self , token ) : if self . street and self . house_number : if not self . building : self . building = self . _clean ( token ) else : self . building = self . _clean ( token + ' ' + self . building ) return True return False | Building name check . If we have leftover and everything else is set probably building names . Allows for multi word building names . |
57,627 | def guess_unmatched ( self , token ) : if token . lower ( ) in [ 'apt' , 'apartment' ] : return False if token . strip ( ) == '-' : return True if len ( token ) <= 2 : return False if self . street_suffix is None and self . street is None and self . street_prefix is None and self . house_number is None : if re . match ( r"[A-Za-z]" , token ) : if self . line_number >= 0 : pass else : pass self . street = self . _clean ( token . capitalize ( ) ) return True return False | When we find something that doesn t match we can make an educated guess and log it as such . |
57,628 | def full_address ( self ) : addr = "" if self . house_number : addr = addr + self . house_number if self . street_prefix : addr = addr + " " + self . street_prefix if self . street : addr = addr + " " + self . street if self . street_suffix : addr = addr + " " + self . street_suffix if self . apartment : addr = addr + " " + self . apartment if self . city : addr = addr + ", " + self . city if self . state : addr = addr + ", " + self . state if self . zip : addr = addr + " " + self . zip return addr | Print the address in a human readable format |
57,629 | def _get_dstk_intersections ( self , address , dstk_address ) : normalized_address = self . _normalize ( address ) normalized_dstk_address = self . _normalize ( dstk_address ) address_uniques = set ( normalized_address ) - set ( normalized_dstk_address ) dstk_address_uniques = set ( normalized_dstk_address ) - set ( normalized_address ) if self . logger : self . logger . debug ( "Address Uniques {0}" . format ( address_uniques ) ) if self . logger : self . logger . debug ( "DSTK Address Uniques {0}" . format ( dstk_address_uniques ) ) return ( len ( address_uniques ) , len ( dstk_address_uniques ) ) | Find the unique tokens in the original address and the returned address . |
57,630 | def _normalize ( self , address ) : normalized_address = [ ] if self . logger : self . logger . debug ( "Normalizing Address: {0}" . format ( address ) ) for token in address . split ( ) : if token . upper ( ) in self . parser . suffixes . keys ( ) : normalized_address . append ( self . parser . suffixes [ token . upper ( ) ] . lower ( ) ) elif token . upper ( ) in self . parser . suffixes . values ( ) : normalized_address . append ( token . lower ( ) ) elif token . upper ( ) . replace ( '.' , '' ) in self . parser . suffixes . values ( ) : normalized_address . append ( token . lower ( ) . replace ( '.' , '' ) ) elif token . lower ( ) in self . parser . prefixes . keys ( ) : normalized_address . append ( self . parser . prefixes [ token . lower ( ) ] . lower ( ) ) elif token . upper ( ) in self . parser . prefixes . values ( ) : normalized_address . append ( token . lower ( ) [ : - 1 ] ) elif token . upper ( ) + '.' in self . parser . prefixes . values ( ) : normalized_address . append ( token . lower ( ) ) else : normalized_address . append ( token . lower ( ) ) return normalized_address | Normalize prefixes suffixes and other to make matching original to returned easier . |
57,631 | def empty ( ) : if not hasattr ( empty , '_instance' ) : empty . _instance = Interval ( AtomicInterval ( OPEN , inf , - inf , OPEN ) ) return empty . _instance | Create an empty set . |
57,632 | def from_data ( data , conv = None , pinf = float ( 'inf' ) , ninf = float ( '-inf' ) ) : intervals = [ ] conv = ( lambda v : v ) if conv is None else conv def _convert ( bound ) : if bound == pinf : return inf elif bound == ninf : return - inf else : return conv ( bound ) for item in data : left , lower , upper , right = item intervals . append ( AtomicInterval ( left , _convert ( lower ) , _convert ( upper ) , right ) ) return Interval ( * intervals ) | Import an interval from a piece of data . |
57,633 | def is_empty ( self ) : return ( self . _lower > self . _upper or ( self . _lower == self . _upper and ( self . _left == OPEN or self . _right == OPEN ) ) ) | Test interval emptiness . |
57,634 | def to_atomic ( self ) : lower = self . _intervals [ 0 ] . lower left = self . _intervals [ 0 ] . left upper = self . _intervals [ - 1 ] . upper right = self . _intervals [ - 1 ] . right return AtomicInterval ( left , lower , upper , right ) | Return the smallest atomic interval containing this interval . |
57,635 | def register_graphql_handlers ( app : "Application" , engine_sdl : str = None , engine_schema_name : str = "default" , executor_context : dict = None , executor_http_endpoint : str = "/graphql" , executor_http_methods : List [ str ] = None , engine : Engine = None , subscription_ws_endpoint : Optional [ str ] = None , graphiql_enabled : bool = False , graphiql_options : Optional [ Dict [ str , Any ] ] = None , ) -> "Application" : if ( not engine_sdl and not engine ) or ( engine and engine_sdl ) : raise Exception ( "an engine OR an engine_sdl should be passed here, not both, not none" ) if not executor_context : executor_context = { } executor_context [ "app" ] = app if not executor_http_methods : executor_http_methods = [ "GET" , "POST" ] if not engine : engine = Engine ( engine_sdl , engine_schema_name ) app [ "ttftt_engine" ] = engine for method in executor_http_methods : try : app . router . add_route ( method , executor_http_endpoint , partial ( getattr ( Handlers , "handle_%s" % method . lower ( ) ) , executor_context , ) , ) except AttributeError : raise Exception ( "Unsupported < %s > http method" % method ) _set_subscription_ws_handler ( app , subscription_ws_endpoint , engine ) _set_graphiql_handler ( app , graphiql_enabled , graphiql_options , executor_http_endpoint , executor_http_methods , subscription_ws_endpoint , ) return app | Register a Tartiflette Engine to an app |
57,636 | async def on_shutdown ( app ) : for method in app . get ( "close_methods" , [ ] ) : logger . debug ( "Calling < %s >" , method ) if asyncio . iscoroutinefunction ( method ) : await method ( ) else : method ( ) | app SHUTDOWN event handler |
57,637 | def _load_from_file ( path ) : config = [ ] try : with open ( path , 'r' ) as config_file : config = yaml . load ( config_file ) [ 'normalizations' ] except EnvironmentError as e : raise ConfigError ( 'Problem while loading file: %s' % e . args [ 1 ] if len ( e . args ) > 1 else e ) except ( TypeError , KeyError ) as e : raise ConfigError ( 'Config file has an unexpected structure: %s' % e ) except yaml . YAMLError : raise ConfigError ( 'Invalid YAML file syntax' ) return config | Load a config file from the given path . |
57,638 | def _parse_normalization ( normalization ) : parsed_normalization = None if isinstance ( normalization , dict ) : if len ( normalization . keys ( ) ) == 1 : items = list ( normalization . items ( ) ) [ 0 ] if len ( items ) == 2 : if items [ 1 ] and isinstance ( items [ 1 ] , dict ) : parsed_normalization = items else : parsed_normalization = items [ 0 ] elif isinstance ( normalization , STR_TYPE ) : parsed_normalization = normalization return parsed_normalization | Parse a normalization item . |
57,639 | def _parse_normalizations ( self , normalizations ) : parsed_normalizations = [ ] if isinstance ( normalizations , list ) : for item in normalizations : normalization = self . _parse_normalization ( item ) if normalization : parsed_normalizations . append ( normalization ) else : raise ConfigError ( 'List expected. Found %s' % type ( normalizations ) ) return parsed_normalizations | Returns a list of parsed normalizations . |
57,640 | def initialize_logger ( debug ) : level = logging . DEBUG if debug else logging . INFO logger = logging . getLogger ( 'cucco' ) logger . setLevel ( level ) formatter = logging . Formatter ( '%(asctime)s %(levelname).1s %(message)s' ) console_handler = logging . StreamHandler ( ) console_handler . setLevel ( level ) console_handler . setFormatter ( formatter ) logger . addHandler ( console_handler ) return logger | Set up logger to be used by the library . |
57,641 | def batch ( ctx , path , recursive , watch ) : batch = Batch ( ctx . obj [ 'config' ] , ctx . obj [ 'cucco' ] ) if os . path . exists ( path ) : if watch : batch . watch ( path , recursive ) elif os . path . isfile ( path ) : batch . process_file ( path ) else : batch . process_files ( path , recursive ) else : click . echo ( 'Error: Specified path doesn\'t exists' , err = True ) sys . exit ( - 1 ) | Normalize files in a path . |
57,642 | def normalize ( ctx , text ) : if text : click . echo ( ctx . obj [ 'cucco' ] . normalize ( text ) ) else : for line in sys . stdin : click . echo ( ctx . obj [ 'cucco' ] . normalize ( line ) ) | Normalize text or piped input . |
57,643 | def cli ( ctx , config , debug , language , verbose ) : ctx . obj = { } try : ctx . obj [ 'config' ] = Config ( normalizations = config , language = language , debug = debug , verbose = verbose ) except ConfigError as e : click . echo ( e . message ) sys . exit ( - 1 ) ctx . obj [ 'cucco' ] = Cucco ( ctx . obj [ 'config' ] ) | Cucco allows to apply normalizations to a given text or file . This normalizations include among others removal of accent marks stop words an extra white spaces replacement of punctuation symbols emails emojis etc . |
57,644 | def files_generator ( path , recursive ) : if recursive : for ( path , _ , files ) in os . walk ( path ) : for file in files : if not file . endswith ( BATCH_EXTENSION ) : yield ( path , file ) else : for file in os . listdir ( path ) : if ( os . path . isfile ( os . path . join ( path , file ) ) and not file . endswith ( BATCH_EXTENSION ) ) : yield ( path , file ) | Yield files found in a given path . |
57,645 | def process_file ( self , path ) : if self . _config . verbose : self . _logger . info ( 'Processing file "%s"' , path ) output_path = '%s%s' % ( path , BATCH_EXTENSION ) with open ( output_path , 'w' ) as file : for line in lines_generator ( path ) : file . write ( '%s\n' % self . _cucco . normalize ( line . encode ( ) . decode ( 'utf-8' ) ) ) self . _logger . debug ( 'Created file "%s"' , output_path ) | Process a file applying normalizations . |
57,646 | def process_files ( self , path , recursive = False ) : self . _logger . info ( 'Processing files in "%s"' , path ) for ( path , file ) in files_generator ( path , recursive ) : if not file . endswith ( BATCH_EXTENSION ) : self . process_file ( os . path . join ( path , file ) ) | Apply normalizations over all files in the given directory . |
57,647 | def stop_watching ( self ) : self . _watch = False if self . _observer : self . _logger . info ( 'Stopping watcher' ) self . _observer . stop ( ) self . _logger . info ( 'Watcher stopped' ) | Stop watching for files . |
57,648 | def watch ( self , path , recursive = False ) : self . _logger . info ( 'Initializing watcher for path "%s"' , path ) handler = FileHandler ( self ) self . _observer = Observer ( ) self . _observer . schedule ( handler , path , recursive ) self . _logger . info ( 'Starting watcher' ) self . _observer . start ( ) self . _watch = True try : self . _logger . info ( 'Waiting for file events' ) while self . _watch : time . sleep ( 1 ) except KeyboardInterrupt : self . stop_watching ( ) self . _observer . join ( ) | Watch for files in a directory and apply normalizations . |
57,649 | def _process_event ( self , event ) : if ( not event . is_directory and not event . src_path . endswith ( BATCH_EXTENSION ) ) : self . _logger . info ( 'Detected file change: %s' , event . src_path ) self . _batch . process_file ( event . src_path ) | Process received events . |
57,650 | def on_created ( self , event ) : self . _logger . debug ( 'Detected create event on watched path: %s' , event . src_path ) self . _process_event ( event ) | Function called everytime a new file is created . |
57,651 | def on_modified ( self , event ) : self . _logger . debug ( 'Detected modify event on watched path: %s' , event . src_path ) self . _process_event ( event ) | Function called everytime a new file is modified . |
57,652 | def _parse_normalizations ( normalizations ) : str_type = str if sys . version_info [ 0 ] > 2 else ( str , unicode ) for normalization in normalizations : yield ( normalization , { } ) if isinstance ( normalization , str_type ) else normalization | Parse and yield normalizations . |
57,653 | def _parse_stop_words_file ( self , path ) : language = None loaded = False if os . path . isfile ( path ) : self . _logger . debug ( 'Loading stop words in %s' , path ) language = path . split ( '-' ) [ - 1 ] if not language in self . __stop_words : self . __stop_words [ language ] = set ( ) with codecs . open ( path , 'r' , 'UTF-8' ) as file : loaded = True for word in file : self . __stop_words [ language ] . add ( word . strip ( ) ) return loaded | Load stop words from the given path . |
57,654 | def normalize ( self , text , normalizations = None ) : for normalization , kwargs in self . _parse_normalizations ( normalizations or self . _config . normalizations ) : try : text = getattr ( self , normalization ) ( text , ** kwargs ) except AttributeError as e : self . _logger . debug ( 'Invalid normalization: %s' , e ) return text | Normalize a given text applying all normalizations . |
57,655 | def remove_accent_marks ( text , excluded = None ) : if excluded is None : excluded = set ( ) return unicodedata . normalize ( 'NFKC' , '' . join ( c for c in unicodedata . normalize ( 'NFKD' , text ) if unicodedata . category ( c ) != 'Mn' or c in excluded ) ) | Remove accent marks from input text . |
57,656 | def replace_characters ( self , text , characters , replacement = '' ) : if not characters : return text characters = '' . join ( sorted ( characters ) ) if characters in self . _characters_regexes : characters_regex = self . _characters_regexes [ characters ] else : characters_regex = re . compile ( "[%s]" % re . escape ( characters ) ) self . _characters_regexes [ characters ] = characters_regex return characters_regex . sub ( replacement , text ) | Remove characters from text . |
57,657 | def replace_punctuation ( self , text , excluded = None , replacement = '' ) : if excluded is None : excluded = set ( ) elif not isinstance ( excluded , set ) : excluded = set ( excluded ) punct = '' . join ( self . __punctuation . difference ( excluded ) ) return self . replace_characters ( text , characters = punct , replacement = replacement ) | Replace punctuation symbols in text . |
57,658 | def replace_symbols ( text , form = 'NFKD' , excluded = None , replacement = '' ) : if excluded is None : excluded = set ( ) categories = set ( [ 'Mn' , 'Sc' , 'Sk' , 'Sm' , 'So' ] ) return '' . join ( c if unicodedata . category ( c ) not in categories or c in excluded else replacement for c in unicodedata . normalize ( form , text ) ) | Replace symbols in text . |
57,659 | def get_idb_graph ( ) : digraph = nx . DiGraph ( ) for function in functions ( ) : for xref in itertools . chain ( function . xrefs_from , function . xrefs_to ) : frm = _try_get_function_start ( xref . frm ) to = _try_get_function_start ( xref . to ) digraph . add_edge ( frm , to ) return digraph | Export IDB to a NetworkX graph . |
57,660 | def name ( self ) : return self . TYPES . get ( self . _type , self . TYPES [ idaapi . o_idpspec0 ] ) | Name of the xref type . |
57,661 | def reg ( self ) : if self . type . is_displ or self . type . is_phrase : size = core . get_native_size ( ) return base . get_register_name ( self . reg_id , size ) if self . type . is_reg : return base . get_register_name ( self . reg_id , self . size ) else : raise exceptions . SarkOperandWithoutReg ( "Operand does not have a register." ) | Name of the register used in the operand . |
57,662 | def has_reg ( self , reg_name ) : return any ( operand . has_reg ( reg_name ) for operand in self . operands ) | Check if a register is used in the instruction . |
57,663 | def regs ( self ) : regs = set ( ) for operand in self . operands : if not operand . type . has_reg : continue regs . update ( operand . regs ) return regs | Names of all registers used by the instruction . |
57,664 | def _pad ( self , text ) : top_bottom = ( "\n" * self . _padding ) + " " right_left = " " * self . _padding * self . PAD_WIDTH return top_bottom + right_left + text + right_left + top_bottom | Pad the text . |
57,665 | def _make_unique_title ( self , title ) : unique_title = title for counter in itertools . count ( ) : unique_title = "{}-{}" . format ( title , counter ) if not idaapi . find_tform ( unique_title ) : break return unique_title | Make the title unique . |
57,666 | def _get_handler ( self , node_id ) : handler = self . _get_attrs ( node_id ) . get ( self . HANDLER , self . _default_handler ) if not isinstance ( handler , BasicNodeHandler ) : idaapi . msg ( ( "Invalid handler for node {}: {}. All handlers must inherit from" "`BasicNodeHandler`." ) . format ( node_id , handler ) ) handler = self . _default_handler return handler | Get the handler of a given node . |
57,667 | def _OnNodeInfo ( self , node_id ) : handler , value , attrs = self . _get_handling_triplet ( node_id ) frame_color = handler . on_frame_color ( value , attrs ) node_info = idaapi . node_info_t ( ) if frame_color is not None : node_info . frame_color = frame_color flags = node_info . get_flags_for_valid ( ) self . SetNodeInfo ( node_id , node_info , flags ) | Sets the node info based on its attributes . |
57,668 | def get_string ( ea ) : string_type = idc . GetStringType ( idaapi . get_item_head ( ea ) ) if string_type is None : raise exceptions . SarkNoString ( "No string at 0x{:08X}" . format ( ea ) ) string = idc . GetString ( ea , strtype = string_type ) if not string : raise exceptions . SarkNoString ( "No string at 0x{:08X}" . format ( ea ) ) return string | Read the string at the given ea . |
57,669 | def copy_current_file_offset ( ) : start , end = sark . get_selection ( ) try : file_offset = sark . core . get_fileregion_offset ( start ) clipboard . copy ( "0x{:08X}" . format ( file_offset ) ) except sark . exceptions . NoFileOffset : message ( "The current address cannot be mapped to a valid offset of the input file." ) | Get the file - offset mapped to the current address . |
57,670 | def fix_addresses ( start = None , end = None ) : if start in ( None , idaapi . BADADDR ) : start = idaapi . cvar . inf . minEA if end in ( None , idaapi . BADADDR ) : end = idaapi . cvar . inf . maxEA return start , end | Set missing addresses to start and end of IDB . |
57,671 | def set_name ( address , name , anyway = False ) : success = idaapi . set_name ( address , name , idaapi . SN_NOWARN | idaapi . SN_NOCHECK ) if success : return if anyway : success = idaapi . do_name_anyway ( address , name ) if success : return raise exceptions . SarkSetNameFailed ( "Failed renaming 0x{:08X} to {!r}." . format ( address , name ) ) raise exceptions . SarkErrorNameAlreadyExists ( "Can't rename 0x{:08X}. Name {!r} already exists." . format ( address , name ) ) | Set the name of an address . |
57,672 | def is_same_function ( ea1 , ea2 ) : func1 = idaapi . get_func ( ea1 ) func2 = idaapi . get_func ( ea2 ) if any ( func is None for func in ( func1 , func2 ) ) : return False return func1 . startEA == func2 . startEA | Are both addresses in the same function? |
57,673 | def get_nx_graph ( ea ) : nx_graph = networkx . DiGraph ( ) func = idaapi . get_func ( ea ) flowchart = FlowChart ( func ) for block in flowchart : nx_graph . add_node ( block . startEA ) for pred in block . preds ( ) : nx_graph . add_edge ( pred . startEA , block . startEA ) for succ in block . succs ( ) : nx_graph . add_edge ( block . startEA , succ . startEA ) return nx_graph | Convert an IDA flowchart to a NetworkX graph . |
57,674 | def codeblocks ( start = None , end = None , full = True ) : if full : for function in functions ( start , end ) : fc = FlowChart ( f = function . func_t ) for block in fc : yield block else : start , end = fix_addresses ( start , end ) for code_block in FlowChart ( bounds = ( start , end ) ) : yield code_block | Get all CodeBlock s in a given range . |
57,675 | def struct_member_error ( err , sid , name , offset , size ) : exception , msg = STRUCT_ERROR_MAP [ err ] struct_name = idc . GetStrucName ( sid ) return exception ( ( 'AddStructMember(struct="{}", member="{}", offset={}, size={}) ' 'failed: {}' ) . format ( struct_name , name , offset , size , msg ) ) | Create and format a struct member exception . |
57,676 | def create_struct ( name ) : sid = idc . GetStrucIdByName ( name ) if sid != idaapi . BADADDR : raise exceptions . SarkStructAlreadyExists ( "A struct names {!r} already exists." . format ( name ) ) sid = idc . AddStrucEx ( - 1 , name , 0 ) if sid == idaapi . BADADDR : raise exceptions . SarkStructCreationFailed ( "Struct creation failed." ) return sid | Create a structure . |
57,677 | def get_struct ( name ) : sid = idc . GetStrucIdByName ( name ) if sid == idaapi . BADADDR : raise exceptions . SarkStructNotFound ( ) return sid | Get a struct by it s name . |
57,678 | def get_common_register ( start , end ) : registers = defaultdict ( int ) for line in lines ( start , end ) : insn = line . insn for operand in insn . operands : if not operand . type . has_phrase : continue if not operand . base : continue register_name = operand . base registers [ register_name ] += 1 return max ( registers . iteritems ( ) , key = operator . itemgetter ( 1 ) ) [ 0 ] | Get the register most commonly used in accessing structs . |
57,679 | def _enum_member_error ( err , eid , name , value , bitmask ) : exception , msg = ENUM_ERROR_MAP [ err ] enum_name = idaapi . get_enum_name ( eid ) return exception ( ( 'add_enum_member(enum="{}", member="{}", value={}, bitmask=0x{:08X}) ' 'failed: {}' ) . format ( enum_name , name , value , bitmask , msg ) ) | Format enum member error . |
57,680 | def _get_enum ( name ) : eid = idaapi . get_enum ( name ) if eid == idaapi . BADADDR : raise exceptions . EnumNotFound ( 'Enum "{}" does not exist.' . format ( name ) ) return eid | Get an existing enum ID |
57,681 | def add_enum ( name = None , index = None , flags = idaapi . hexflag ( ) , bitfield = False ) : if name is not None : with ignored ( exceptions . EnumNotFound ) : _get_enum ( name ) raise exceptions . EnumAlreadyExists ( ) if index is None or index < 0 : index = idaapi . get_enum_qty ( ) eid = idaapi . add_enum ( index , name , flags ) if eid == idaapi . BADADDR : raise exceptions . EnumCreationFailed ( 'Failed creating enum "{}"' . format ( name ) ) if bitfield : idaapi . set_enum_bf ( eid , bitfield ) return Enum ( eid = eid ) | Create a new enum . |
57,682 | def _add_enum_member ( enum , name , value , bitmask = DEFMASK ) : error = idaapi . add_enum_member ( enum , name , value , bitmask ) if error : raise _enum_member_error ( error , enum , name , value , bitmask ) | Add an enum member . |
57,683 | def _iter_bitmasks ( eid ) : bitmask = idaapi . get_first_bmask ( eid ) yield bitmask while bitmask != DEFMASK : bitmask = idaapi . get_next_bmask ( eid , bitmask ) yield bitmask | Iterate all bitmasks in a given enum . |
57,684 | def _iter_enum_member_values ( eid , bitmask ) : value = idaapi . get_first_enum_member ( eid , bitmask ) yield value while value != DEFMASK : value = idaapi . get_next_enum_member ( eid , value , bitmask ) yield value | Iterate member values with given bitmask inside the enum |
57,685 | def _iter_serial_enum_member ( eid , value , bitmask ) : cid , serial = idaapi . get_first_serial_enum_member ( eid , value , bitmask ) while cid != idaapi . BADNODE : yield cid , serial cid , serial = idaapi . get_next_serial_enum_member ( cid , serial ) | Iterate serial and CID of enum members with given value and bitmask . |
57,686 | def _iter_enum_constant_ids ( eid ) : for bitmask in _iter_bitmasks ( eid ) : for value in _iter_enum_member_values ( eid , bitmask ) : for cid , serial in _iter_serial_enum_member ( eid , value , bitmask ) : yield cid | Iterate the constant IDs of all members in the given enum |
57,687 | def add ( self , name , value , bitmask = DEFMASK ) : _add_enum_member ( self . _eid , name , value , bitmask ) | Add an enum member |
57,688 | def remove ( self , name ) : member = self [ name ] serial = member . serial value = member . value bmask = member . bmask success = idaapi . del_enum_member ( self . _eid , value , serial , bmask ) if not success : raise exceptions . CantDeleteEnumMember ( "Can't delete enum member {!r}." . format ( name ) ) | Remove an enum member by name |
57,689 | def name ( self , name ) : success = idaapi . set_enum_name ( self . eid , name ) if not success : raise exceptions . CantRenameEnum ( "Cant rename enum {!r} to {!r}." . format ( self . name , name ) ) | Set the enum name . |
57,690 | def name ( self , name ) : success = idaapi . set_enum_member_name ( self . cid , name ) if not success : raise exceptions . CantRenameEnumMember ( "Failed renaming {!r} to {!r}. Does the name exist somewhere else?" . format ( self . name , name ) ) | Set the member name . |
57,691 | def functions ( start = None , end = None ) : start , end = fix_addresses ( start , end ) for func_t in idautils . Functions ( start , end ) : yield Function ( func_t ) | Get all functions in range . |
57,692 | def xrefs_from ( self ) : for line in self . lines : for xref in line . xrefs_from : if xref . type . is_flow : continue if xref . to in self and xref . iscode : continue yield xref | Xrefs from the function . |
57,693 | def set_name ( self , name , anyway = False ) : set_name ( self . startEA , name , anyway = anyway ) | Set Function Name . |
57,694 | def color ( self ) : color = idc . GetColor ( self . ea , idc . CIC_FUNC ) if color == 0xFFFFFFFF : return None return color | Function color in IDA View |
57,695 | def color ( self , color ) : if color is None : color = 0xFFFFFFFF idc . SetColor ( self . ea , idc . CIC_FUNC , color ) | Function Color in IDA View . |
57,696 | def lines ( start = None , end = None , reverse = False , selection = False ) : if selection : start , end = get_selection ( ) else : start , end = fix_addresses ( start , end ) if not reverse : item = idaapi . get_item_head ( start ) while item < end : yield Line ( item ) item += idaapi . get_item_size ( item ) else : item = idaapi . get_item_head ( end - 1 ) while item >= start : yield Line ( item ) item = idaapi . get_item_head ( item - 1 ) | Iterate lines in range . |
57,697 | def type ( self ) : properties = { self . is_code : "code" , self . is_data : "data" , self . is_string : "string" , self . is_tail : "tail" , self . is_unknown : "unknown" } for k , v in properties . items ( ) : if k : return v | return the type of the Line |
57,698 | def color ( self ) : color = idc . GetColor ( self . ea , idc . CIC_ITEM ) if color == 0xFFFFFFFF : return None return color | Line color in IDA View |
57,699 | def color ( self , color ) : if color is None : color = 0xFFFFFFFF idc . SetColor ( self . ea , idc . CIC_ITEM , color ) | Line Color in IDA View . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.