idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
6,800
def main ( ) : description = ( 'PATH can be either the path to a scratch file, or a ' 'directory containing scratch files. Multiple PATH ' 'arguments can be provided.' ) parser = OptionParser ( usage = '%prog -p PLUGIN_NAME [options] PATH...' , description = description , version = '%prog {}' . format ( __version__ ) ) parser . add_option ( '-d' , '--plugin-dir' , metavar = 'DIR' , help = ( 'Specify the path to a directory containing ' 'plugins. Plugins in this directory take ' 'precedence over similarly named plugins ' 'included with Hairball.' ) ) parser . add_option ( '-p' , '--plugin' , action = 'append' , help = ( 'Use the named plugin to perform analysis. ' 'This option can be provided multiple times.' ) ) parser . add_option ( '-k' , '--kurt-plugin' , action = 'append' , help = ( 'Provide either a python import path (e.g, ' 'kelp.octopi) to a package/module, or the path' ' to a python file, which will be loaded as a ' 'Kurt plugin. This option can be provided ' 'multiple times.' ) ) parser . add_option ( '-q' , '--quiet' , action = 'store_true' , help = ( 'Prevent output from Hairball. Plugins may still ' 'produce output.' ) ) parser . add_option ( '-C' , '--no-cache' , action = 'store_true' , help = 'Do not use Hairball\'s cache.' , default = False ) options , args = parser . parse_args ( sys . argv [ 1 : ] ) if not options . plugin : parser . error ( 'At least one plugin must be specified via -p.' ) if not args : parser . error ( 'At least one PATH must be provided.' ) if options . plugin_dir : if os . path . isdir ( options . plugin_dir ) : sys . path . append ( options . plugin_dir ) else : parser . error ( '{} is not a directory' . format ( options . plugin_dir ) ) hairball = Hairball ( options , args , cache = not options . no_cache ) hairball . initialize_plugins ( ) hairball . process ( ) hairball . finalize ( )
The entrypoint for the hairball command installed via setup . py .
6,801
def key_to_path ( self , key ) : return os . path . join ( self . cache_dir , key [ : 2 ] , key [ 2 : 4 ] , key [ 4 : ] + '.pkl' )
Return the fullpath to the file with sha1sum key .
6,802
def load ( self , filename ) : with open ( filename ) as fp : key = sha1 ( fp . read ( ) ) . hexdigest ( ) path = self . key_to_path ( key ) if key in self . hashes : try : with open ( path ) as fp : return cPickle . load ( fp ) except EOFError : os . unlink ( path ) self . hashes . remove ( key ) except IOError : self . hashes . remove ( key ) try : os . makedirs ( os . path . dirname ( path ) ) except OSError as exc : if exc . errno != errno . EEXIST : raise scratch = kurt . Project . load ( filename ) with os . fdopen ( os . open ( path , os . O_WRONLY | os . O_CREAT , 0400 ) , 'w' ) as fp : cPickle . dump ( scratch , fp , cPickle . HIGHEST_PROTOCOL ) self . hashes . add ( key ) return scratch
Optimized load and return the parsed version of filename .
6,803
def hairball_files ( self , paths , extensions ) : def add_file ( filename ) : return os . path . splitext ( filename ) [ 1 ] in extensions while paths : arg_path = paths . pop ( 0 ) if os . path . isdir ( arg_path ) : found = False for path , dirs , files in os . walk ( arg_path ) : dirs . sort ( ) for filename in sorted ( files ) : if add_file ( filename ) : yield os . path . join ( path , filename ) found = True if not found : if not self . options . quiet : print ( 'No files found in {}' . format ( arg_path ) ) elif add_file ( arg_path ) : yield arg_path elif not self . options . quiet : print ( 'Invalid file {}' . format ( arg_path ) ) print ( 'Did you forget to load a Kurt plugin (-k)?' )
Yield filepath to files with the proper extension within paths .
6,804
def initialize_plugins ( self ) : for plugin_name in self . options . plugin : parts = plugin_name . split ( '.' ) if len ( parts ) > 1 : module_name = '.' . join ( parts [ : - 1 ] ) class_name = parts [ - 1 ] else : module_name = parts [ 0 ] class_name = parts [ 0 ] . title ( ) plugin = None for package in ( None , 'hairball.plugins' ) : if package : module_name = '{}.{}' . format ( package , module_name ) try : module = __import__ ( module_name , fromlist = [ class_name ] ) plugin = getattr ( module , class_name ) ( ) if not isinstance ( plugin , HairballPlugin ) : sys . stderr . write ( 'Invalid type for plugin {}: {}\n' . format ( plugin_name , type ( plugin ) ) ) plugin = None else : break except ( ImportError , AttributeError ) : pass if plugin : self . plugins . append ( plugin ) else : sys . stderr . write ( 'Cannot find plugin {}\n' . format ( plugin_name ) ) if not self . plugins : sys . stderr . write ( 'No plugins loaded. Goodbye!\n' ) sys . exit ( 1 )
Attempt to Load and initialize all the plugins .
6,805
def process ( self ) : for filename in self . hairball_files ( self . paths , self . extensions ) : if not self . options . quiet : print ( filename ) try : if self . cache : scratch = self . cache . load ( filename ) else : scratch = kurt . Project . load ( filename ) except Exception : traceback . print_exc ( ) continue for plugin in self . plugins : plugin . _process ( scratch , filename = filename )
Run the analysis across all files found in the given paths .
6,806
def maybe_dotted ( module , throw = True ) : try : return Configurator ( ) . maybe_dotted ( module ) except ImportError as e : err = '%s not found. %s' % ( module , e ) if throw : raise ImportError ( err ) else : log . error ( err ) return None
If module is a dotted string pointing to the module imports and returns the module object .
6,807
def issequence ( arg ) : string_behaviour = ( isinstance ( arg , six . string_types ) or isinstance ( arg , six . text_type ) ) list_behaviour = hasattr ( arg , '__getitem__' ) or hasattr ( arg , '__iter__' ) return not string_behaviour and list_behaviour
Return True if arg acts as a list and does not look like a string .
6,808
def drop_reserved_params ( params ) : from nefertari import RESERVED_PARAMS params = params . copy ( ) for reserved_param in RESERVED_PARAMS : if reserved_param in params : params . pop ( reserved_param ) return params
Drops reserved params
6,809
def check_results ( tmp_ ) : if tmp_ [ 't' ] > 0 : if tmp_ [ 'l' ] > 0 : if tmp_ [ 'rr' ] > 0 or tmp_ [ 'ra' ] > 1 : print 1 , 3 , tmp_ return 3 elif tmp_ [ 'cr' ] > 0 or tmp_ [ 'ca' ] > 1 : print 2 , 3 , tmp_ return 3 elif tmp_ [ 'mr' ] > 0 or tmp_ [ 'ma' ] > 1 : print 3 , 2 , tmp_ return 2 if tmp_ [ 'cr' ] > 1 or tmp_ [ 'ca' ] > 2 : print 4 , 2 , tmp_ return 2 if tmp_ [ 'mr' ] > 0 or tmp_ [ 'ma' ] > 1 : if tmp_ [ 'cr' ] > 0 or tmp_ [ 'ca' ] > 1 : print 6 , 0 , tmp_ return 0 if tmp_ [ 'rr' ] > 1 or tmp_ [ 'ra' ] > 2 : print 7 , 0 , tmp_ return 0 if tmp_ [ 'sr' ] > 1 or tmp_ [ 'sa' ] > 2 : print 8 , 0 , tmp_ return 0 if tmp_ [ 'l' ] > 0 : if tmp_ [ 'rr' ] > 0 or tmp_ [ 'ra' ] > 1 : print 9 , 2 , tmp_ return 2 if tmp_ [ 'cr' ] > 0 or tmp_ [ 'ca' ] > 1 : print 10 , 0 , tmp_ return 0 return - 1
Return a 3 tuple for something .
6,810
def _check_animation ( self , last , last_level , gen ) : tmp_ = Counter ( ) results = Counter ( ) name , level , block = last , last_level , last others = False while name in self . ANIMATION and level >= last_level : if name in self . LOOP : if block != last : count = self . check_results ( tmp_ ) if count > - 1 : results [ count ] += 1 tmp_ . clear ( ) tmp_ [ 'last' ] += 1 for attribute in ( 'costume' , 'orientation' , 'position' , 'size' ) : if ( name , 'relative' ) in self . BLOCKMAPPING [ attribute ] : tmp_ [ ( attribute , 'relative' ) ] += 1 elif ( name , 'absolute' ) in self . BLOCKMAPPING [ attribute ] : tmp_ [ ( attribute , 'absolute' ) ] += 1 if name in self . TIMING : tmp_ [ 'timing' ] += 1 last_level = level name , level , block = next ( gen , ( '' , 0 , '' ) ) if name not in self . ANIMATION and name != '' : if not others : if block . type . shape != 'stack' : last_level = level ( name , level , block ) = next ( gen , ( '' , 0 , '' ) ) others = True count = self . check_results ( tmp_ ) if count > - 1 : results [ count ] += 1 return gen , results
Internal helper function to check the animation .
6,811
def analyze ( self , scratch , ** kwargs ) : results = Counter ( ) for script in self . iter_scripts ( scratch ) : gen = self . iter_blocks ( script . blocks ) name = 'start' level = None while name != '' : if name in self . ANIMATION : gen , count = self . _check_animation ( name , level , gen ) results . update ( count ) name , level , _ = next ( gen , ( '' , 0 , '' ) ) return { 'animation' : results }
Run and return the results from the Animation plugin .
6,812
def get_receive ( self , script_list ) : events = defaultdict ( set ) for script in script_list : if self . script_start_type ( script ) == self . HAT_WHEN_I_RECEIVE : event = script . blocks [ 0 ] . args [ 0 ] . lower ( ) events [ event ] . add ( script ) return events
Return a list of received events contained in script_list .
6,813
def analyze ( self , scratch , ** kwargs ) : all_scripts = list ( self . iter_scripts ( scratch ) ) results = defaultdict ( set ) broadcast = dict ( ( x , self . get_broadcast_events ( x ) ) for x in all_scripts ) correct = self . get_receive ( all_scripts ) results [ 'never broadcast' ] = set ( correct . keys ( ) ) for script , events in broadcast . items ( ) : for event in events . keys ( ) : if event is True : results [ 'dynamic broadcast' ] . add ( script . morph . name ) del events [ event ] elif event in correct : results [ 'never broadcast' ] . discard ( event ) else : results [ 'never received' ] . add ( event ) for event in correct . keys ( ) : if event in results [ 'never broadcast' ] : del correct [ event ] for events in broadcast . values ( ) : if len ( events ) > 1 : for event in events : if event in correct : results [ 'parallel broadcasts' ] . add ( event ) del correct [ event ] for event , scripts in correct . items ( ) : if len ( scripts ) > 1 : for script in scripts : for _ , _ , block in self . iter_blocks ( script . blocks ) : if block . type . shape == 'stack' : results [ 'multiple receivers with delay' ] . add ( event ) if event in correct : del correct [ event ] results [ 'success' ] = set ( correct . keys ( ) ) return { 'broadcast' : results }
Run and return the results from the BroadcastReceive plugin .
6,814
def analyze ( self , scratch , ** kwargs ) : errors = Counter ( ) for script in self . iter_scripts ( scratch ) : prev_name , prev_depth , prev_block = '' , 0 , script . blocks [ 0 ] gen = self . iter_blocks ( script . blocks ) for name , depth , block in gen : if prev_depth == depth : if prev_name in self . SAY_THINK : if name == 'play sound %s until done' : if not self . is_blank ( prev_block . args [ 0 ] ) : errors += self . check ( gen ) elif prev_name in self . SAY_THINK_DURATION and 'play sound %s' in name : errors [ '1' ] += 1 elif prev_name == 'play sound %s' : if name in self . SAY_THINK : errors [ self . INCORRECT ] += 1 elif name in self . SAY_THINK_DURATION : if self . is_blank ( block . args [ 0 ] ) : errors [ self . ERROR ] += 1 else : errors [ self . HACKISH ] += 1 elif prev_name == 'play sound %s until done' and name in self . ALL_SAY_THINK : if not self . is_blank ( block . args [ 0 ] ) : errors [ self . INCORRECT ] += 1 prev_name , prev_depth , prev_block = name , depth , block return { 'sound' : errors }
Categorize instances of attempted say and sound synchronization .
6,815
def check ( self , gen ) : retval = Counter ( ) name , _ , block = next ( gen , ( '' , 0 , '' ) ) if name in self . SAY_THINK : if self . is_blank ( block . args [ 0 ] ) : retval [ self . CORRECT ] += 1 else : name , _ , block = next ( gen , ( '' , 0 , '' ) ) if name == 'play sound %s until done' : retval [ self . CORRECT ] += 1 retval += self . check ( gen ) else : retval [ self . INCORRECT ] += 1 else : retval [ self . INCORRECT ] += 1 return retval
Check that the last part of the chain matches .
6,816
def _setup_stdout ( self ) : if self . buffer : if self . _stderr_buffer is None : self . _stderr_buffer = StringIO ( ) self . _stdout_buffer = StringIO ( ) sys . stdout = self . _stdout_buffer sys . stderr = self . _stderr_buffer
Hook stdout and stderr if buffering is enabled .
6,817
def _restore_stdout ( self ) : if self . buffer : if self . _mirror_output : output = sys . stdout . getvalue ( ) error = sys . stderr . getvalue ( ) if output : if not output . endswith ( '\n' ) : output += '\n' self . _original_stdout . write ( STDOUT_LINE % output ) if error : if not error . endswith ( '\n' ) : error += '\n' self . _original_stderr . write ( STDERR_LINE % error ) sys . stdout = self . _original_stdout sys . stderr = self . _original_stderr self . _stdout_buffer . seek ( 0 ) self . _stdout_buffer . truncate ( ) self . _stderr_buffer . seek ( 0 ) self . _stderr_buffer . truncate ( )
Unhook stdout and stderr if buffering is enabled .
6,818
def add_result_handler ( self , handler ) : self . _result_handlers . append ( handler ) if self . _sorted_handlers : self . _sorted_handlers = None
Register a new result handler .
6,819
def addError ( self , test , exception ) : result = self . _handle_result ( test , TestCompletionStatus . error , exception = exception ) self . errors . append ( result ) self . _mirror_output = True
Register that a test ended in an error .
6,820
def addFailure ( self , test , exception ) : result = self . _handle_result ( test , TestCompletionStatus . failure , exception = exception ) self . failures . append ( result ) self . _mirror_output = True
Register that a test ended with a failure .
6,821
def addSkip ( self , test , reason ) : result = self . _handle_result ( test , TestCompletionStatus . skipped , message = reason ) self . skipped . append ( result )
Register that a test that was skipped .
6,822
def addExpectedFailure ( self , test , exception ) : result = self . _handle_result ( test , TestCompletionStatus . expected_failure , exception = exception ) self . expectedFailures . append ( result )
Register that a test that failed and was expected to fail .
6,823
def addUnexpectedSuccess ( self , test ) : result = self . _handle_result ( test , TestCompletionStatus . unexpected_success ) self . unexpectedSuccesses . append ( result )
Register a test that passed unexpectedly .
6,824
def set_terminal_key ( self , encrypted_key ) : if encrypted_key : try : new_key = bytes . fromhex ( encrypted_key ) if len ( self . terminal_key ) != len ( new_key ) : return False self . terminal_key = self . tmk_cipher . decrypt ( new_key ) self . store_terminal_key ( raw2str ( self . terminal_key ) ) self . tpk_cipher = DES3 . new ( self . terminal_key , DES3 . MODE_ECB ) self . print_keys ( ) return True except ValueError : return False return False
Change the terminal key . The encrypted_key is a hex string . encrypted_key is expected to be encrypted under master key
6,825
def get_encrypted_pin ( self , clear_pin , card_number ) : if not self . terminal_key : print ( 'Terminal key is not set' ) return '' if self . pinblock_format == '01' : try : pinblock = bytes . fromhex ( get_pinblock ( clear_pin , card_number ) ) except TypeError : return '' encrypted_pinblock = self . tpk_cipher . encrypt ( pinblock ) return raw2str ( encrypted_pinblock ) else : print ( 'Unsupported PIN Block format' ) return ''
Get PIN block in ISO 0 format encrypted with the terminal key
6,826
def is_url ( string , allowed_schemes = None ) : if not is_full_string ( string ) : return False valid = bool ( URL_RE . search ( string ) ) if allowed_schemes : return valid and any ( [ string . startswith ( s ) for s in allowed_schemes ] ) return valid
Check if a string is a valid url .
6,827
def is_credit_card ( string , card_type = None ) : if not is_full_string ( string ) : return False if card_type : if card_type not in CREDIT_CARDS : raise KeyError ( 'Invalid card type "{}". Valid types are: {}' . format ( card_type , ', ' . join ( CREDIT_CARDS . keys ( ) ) ) ) return bool ( CREDIT_CARDS [ card_type ] . search ( string ) ) for c in CREDIT_CARDS : if CREDIT_CARDS [ c ] . search ( string ) : return True return False
Checks if a string is a valid credit card number . If card type is provided then it checks that specific type otherwise any known credit card number will be accepted .
6,828
def is_json ( string ) : if not is_full_string ( string ) : return False if bool ( JSON_WRAPPER_RE . search ( string ) ) : try : return isinstance ( json . loads ( string ) , dict ) except ( TypeError , ValueError , OverflowError ) : return False return False
Check if a string is a valid json .
6,829
def is_slug ( string , sign = '-' ) : if not is_full_string ( string ) : return False rex = r'^([a-z\d]+' + re . escape ( sign ) + r'?)*[a-z\d]$' return re . match ( rex , string ) is not None
Checks if a given string is a slug .
6,830
def shuffle ( string ) : s = sorted ( string ) random . shuffle ( s ) return '' . join ( s )
Return a new string containing shuffled items .
6,831
def strip_html ( string , keep_tag_content = False ) : r = HTML_TAG_ONLY_RE if keep_tag_content else HTML_RE return r . sub ( '' , string )
Remove html code contained into the given string .
6,832
def parse ( self , inputstring , document ) : link = json . loads ( inputstring ) env = document . settings . env source_dir = os . path . dirname ( env . doc2path ( env . docname ) ) abs_path = os . path . normpath ( os . path . join ( source_dir , link [ 'path' ] ) ) path = utils . relative_path ( None , abs_path ) path = nodes . reprunicode ( path ) document . settings . record_dependencies . add ( path ) env . note_dependency ( path ) target_root = env . config . nbsphinx_link_target_root target = utils . relative_path ( target_root , abs_path ) target = nodes . reprunicode ( target ) . replace ( os . path . sep , '/' ) env . metadata [ env . docname ] [ 'nbsphinx-link-target' ] = target try : formats = env . config . nbsphinx_custom_formats except AttributeError : pass else : formats . setdefault ( '.nblink' , lambda s : nbformat . reads ( s , as_version = _ipynbversion ) ) try : include_file = io . FileInput ( source_path = path , encoding = 'utf8' ) except UnicodeEncodeError as error : raise NotebookError ( u'Problems with linked notebook "%s" path:\n' 'Cannot encode input file path "%s" ' '(wrong locale?).' % ( env . docname , SafeString ( path ) ) ) except IOError as error : raise NotebookError ( u'Problems with linked notebook "%s" path:\n%s.' % ( env . docname , ErrorString ( error ) ) ) try : rawtext = include_file . read ( ) except UnicodeError as error : raise NotebookError ( u'Problem with linked notebook "%s":\n%s' % ( env . docname , ErrorString ( error ) ) ) return super ( LinkedNotebookParser , self ) . parse ( rawtext , document )
Parse the nblink file .
6,833
def finalize ( self ) : if self . total_duplicate > 0 : print ( '{} duplicate scripts found' . format ( self . total_duplicate ) ) for duplicate in self . list_duplicate : print ( duplicate )
Output the duplicate scripts detected .
6,834
def analyze ( self , scratch , ** kwargs ) : scripts_set = set ( ) for script in self . iter_scripts ( scratch ) : if script [ 0 ] . type . text == 'define %s' : continue blocks_list = [ ] for name , _ , _ in self . iter_blocks ( script . blocks ) : blocks_list . append ( name ) blocks_tuple = tuple ( blocks_list ) if blocks_tuple in scripts_set : if len ( blocks_list ) > 3 : self . total_duplicate += 1 self . list_duplicate . append ( blocks_list ) else : scripts_set . add ( blocks_tuple )
Run and return the results from the DuplicateScripts plugin .
6,835
def _set_content_type ( self , system ) : request = system . get ( 'request' ) if request : response = request . response ct = response . content_type if ct == response . default_content_type : response . content_type = 'application/json'
Set response content type
6,836
def _render_response ( self , value , system ) : view = system [ 'view' ] enc_class = getattr ( view , '_json_encoder' , None ) if enc_class is None : enc_class = get_json_encoder ( ) return json . dumps ( value , cls = enc_class )
Render a response
6,837
def _get_common_kwargs ( self , system ) : enc_class = getattr ( system [ 'view' ] , '_json_encoder' , None ) if enc_class is None : enc_class = get_json_encoder ( ) return { 'request' : system [ 'request' ] , 'encoder' : enc_class , }
Get kwargs common for all methods .
6,838
def _get_create_update_kwargs ( self , value , common_kw ) : kw = common_kw . copy ( ) kw [ 'body' ] = value if '_self' in value : kw [ 'headers' ] = [ ( 'Location' , value [ '_self' ] ) ] return kw
Get kwargs common to create update replace .
6,839
def _render_response ( self , value , system ) : super_call = super ( DefaultResponseRendererMixin , self ) . _render_response try : method_name = 'render_{}' . format ( system [ 'request' ] . action ) except ( KeyError , AttributeError ) : return super_call ( value , system ) method = getattr ( self , method_name , None ) if method is not None : common_kw = self . _get_common_kwargs ( system ) response = method ( value , system , common_kw ) system [ 'request' ] . response = response return return super_call ( value , system )
Handle response rendering .
6,840
def remember ( self , request , username , ** kw ) : if self . credentials_callback : token = self . credentials_callback ( username , request ) api_key = 'ApiKey {}:{}' . format ( username , token ) return [ ( 'WWW-Authenticate' , api_key ) ]
Returns WWW - Authenticate header with a value that should be used in Authorization header .
6,841
def _get_credentials ( self , request ) : authorization = request . headers . get ( 'Authorization' ) if not authorization : return None try : authmeth , authbytes = authorization . split ( ' ' , 1 ) except ValueError : return None if authmeth . lower ( ) != 'apikey' : return None if six . PY2 or isinstance ( authbytes , bytes ) : try : auth = authbytes . decode ( 'utf-8' ) except UnicodeDecodeError : auth = authbytes . decode ( 'latin-1' ) else : auth = authbytes try : username , api_key = auth . split ( ':' , 1 ) except ValueError : return None return username , api_key
Extract username and api key token from Authorization header
6,842
def _get_event_kwargs ( view_obj ) : request = view_obj . request view_method = getattr ( view_obj , request . action ) do_trigger = not ( getattr ( view_method , '_silent' , False ) or getattr ( view_obj , '_silent' , False ) ) if do_trigger : event_kwargs = { 'view' : view_obj , 'model' : view_obj . Model , 'fields' : FieldData . from_dict ( view_obj . _json_params , view_obj . Model ) } ctx = view_obj . context if hasattr ( ctx , 'pk_field' ) or isinstance ( ctx , DataProxy ) : event_kwargs [ 'instance' ] = ctx return event_kwargs
Helper function to get event kwargs .
6,843
def _get_event_cls ( view_obj , events_map ) : request = view_obj . request view_method = getattr ( view_obj , request . action ) event_action = ( getattr ( view_method , '_event_action' , None ) or request . action ) return events_map [ event_action ]
Helper function to get event class .
6,844
def subscribe_to_events ( config , subscriber , events , model = None ) : kwargs = { } if model is not None : kwargs [ 'model' ] = model for evt in events : config . add_subscriber ( subscriber , evt , ** kwargs )
Helper function to subscribe to group of events .
6,845
def add_field_processors ( config , processors , model , field ) : before_change_events = ( BeforeCreate , BeforeUpdate , BeforeReplace , BeforeUpdateMany , BeforeRegister , ) def wrapper ( event , _processors = processors , _field = field ) : proc_kw = { 'new_value' : event . field . new_value , 'instance' : event . instance , 'field' : event . field , 'request' : event . view . request , 'model' : event . model , 'event' : event , } for proc_func in _processors : proc_kw [ 'new_value' ] = proc_func ( ** proc_kw ) event . field . new_value = proc_kw [ 'new_value' ] event . set_field_value ( _field , proc_kw [ 'new_value' ] ) for evt in before_change_events : config . add_subscriber ( wrapper , evt , model = model , field = field )
Add processors for model field .
6,846
def set_field_value ( self , field_name , value ) : self . view . _json_params [ field_name ] = value if field_name in self . fields : self . fields [ field_name ] . new_value = value return fields = FieldData . from_dict ( { field_name : value } , self . model ) self . fields . update ( fields )
Set value of request field named field_name .
6,847
def set_field_value ( self , field_name , value ) : if self . response is None : return if 'data' in self . response : items = self . response [ 'data' ] else : items = [ self . response ] for item in items : item [ field_name ] = value
Set value of response field named field_name .
6,848
def process_fields_param ( fields ) : if not fields : return fields if isinstance ( fields , six . string_types ) : fields = split_strip ( fields ) if '_type' not in fields : fields . append ( '_type' ) return { '_source_include' : fields , '_source' : True , }
Process fields ES param .
6,849
def _catch_index_error ( self , response ) : code , headers , raw_data = response if not raw_data : return data = json . loads ( raw_data ) if not data or not data . get ( 'errors' ) : return try : error_dict = data [ 'items' ] [ 0 ] [ 'index' ] message = error_dict [ 'error' ] except ( KeyError , IndexError ) : return raise exception_response ( 400 , detail = message )
Catch and raise index errors which are not critical and thus not raised by elasticsearch - py .
6,850
def setup_mappings ( cls , force = False ) : if getattr ( cls , '_mappings_setup' , False ) and not force : log . debug ( 'ES mappings have been already set up for currently ' 'running application. Call `setup_mappings` with ' '`force=True` to perform mappings set up again.' ) return log . info ( 'Setting up ES mappings for all existing models' ) models = engine . get_document_classes ( ) try : for model_name , model_cls in models . items ( ) : if getattr ( model_cls , '_index_enabled' , False ) : es = cls ( model_cls . __name__ ) es . put_mapping ( body = model_cls . get_es_mapping ( ) ) except JHTTPBadRequest as ex : raise Exception ( ex . json [ 'extra' ] [ 'data' ] ) cls . _mappings_setup = True
Setup ES mappings for all existing models .
6,851
def process_chunks ( self , documents , operation ) : chunk_size = self . chunk_size start = end = 0 count = len ( documents ) while count : if count < chunk_size : chunk_size = count end += chunk_size bulk = documents [ start : end ] operation ( documents_actions = bulk ) start += chunk_size count -= chunk_size
Apply operation to chunks of documents of size self . chunk_size .
6,852
def index_missing_documents ( self , documents , request = None ) : log . info ( 'Trying to index documents of type `{}` missing from ' '`{}` index' . format ( self . doc_type , self . index_name ) ) if not documents : log . info ( 'No documents to index' ) return query_kwargs = dict ( index = self . index_name , doc_type = self . doc_type , fields = [ '_id' ] , body = { 'ids' : [ d [ '_pk' ] for d in documents ] } , ) try : response = self . api . mget ( ** query_kwargs ) except IndexNotFoundException : indexed_ids = set ( ) else : indexed_ids = set ( d [ '_id' ] for d in response [ 'docs' ] if d . get ( 'found' ) ) documents = [ d for d in documents if str ( d [ '_pk' ] ) not in indexed_ids ] if not documents : log . info ( 'No documents of type `{}` are missing from ' 'index `{}`' . format ( self . doc_type , self . index_name ) ) return self . _bulk ( 'index' , documents , request )
Index documents that are missing from ES index .
6,853
def get_version ( path = "src/devpy/__init__.py" ) : init_content = open ( path , "rt" ) . read ( ) pattern = r"^__version__ = ['\"]([^'\"]*)['\"]" return re . search ( pattern , init_content , re . M ) . group ( 1 )
Return the version of by with regex intead of importing it
6,854
def add_plugin_arguments ( self , parser ) : for manager in self . hook_managers . values ( ) : if len ( list ( manager ) ) == 0 : continue manager . map ( self . _add_hook_extension_arguments , parser ) for namespace , manager in self . driver_managers . items ( ) : choices = list ( sorted ( manager . names ( ) ) ) if len ( choices ) == 0 : continue option , dest = self . _namespace_to_option ( namespace ) parser . add_argument ( option , help = self . _help [ namespace ] , dest = dest , choices = choices , default = 'default' ) option_prefix = '{0}-' . format ( option ) dest_prefix = '{0}_' . format ( dest ) manager . map ( self . _add_driver_extension_arguments , parser , option_prefix , dest_prefix )
Add plugin arguments to argument parser .
6,855
def get_enabled_hook_plugins ( self , hook , args , ** kwargs ) : manager = self . hook_managers [ hook ] if len ( list ( manager ) ) == 0 : return [ ] return [ plugin for plugin in manager . map ( self . _create_hook_plugin , args , ** kwargs ) if plugin is not None ]
Get enabled plugins for specified hook name .
6,856
def get_driver ( self , namespace , parsed_args , ** kwargs ) : option , dest = self . _namespace_to_option ( namespace ) dest_prefix = '{0}_' . format ( dest ) driver_name = getattr ( parsed_args , dest , 'default' ) driver_extension = self . driver_managers [ namespace ] [ driver_name ] return driver_extension . plugin . from_args ( parsed_args , dest_prefix , ** kwargs )
Get mutually - exlusive plugin for plugin namespace .
6,857
def set_amount ( self , amount ) : if amount : try : self . IsoMessage . FieldData ( 4 , int ( amount ) ) except ValueError : self . IsoMessage . FieldData ( 4 , 0 ) self . rebuild ( )
Set transaction amount
6,858
def finalize ( self ) : for name , count in sorted ( self . blocks . items ( ) , key = lambda x : x [ 1 ] ) : print ( '{:3} {}' . format ( count , name ) ) print ( '{:3} total' . format ( sum ( self . blocks . values ( ) ) ) )
Output the aggregate block count results .
6,859
def analyze ( self , scratch , ** kwargs ) : file_blocks = Counter ( ) for script in self . iter_scripts ( scratch ) : for name , _ , _ in self . iter_blocks ( script . blocks ) : file_blocks [ name ] += 1 self . blocks . update ( file_blocks ) return { 'types' : file_blocks }
Run and return the results from the BlockCounts plugin .
6,860
def analyze ( self , scratch , ** kwargs ) : self . total_instances += 1 sprites = { } for sprite , script in self . iter_sprite_scripts ( scratch ) : if not script . reachable : sprites . setdefault ( sprite , [ ] ) . append ( script ) if sprites : self . dead_code_instances += 1 import pprint pprint . pprint ( sprites ) variable_event = any ( True in self . get_broadcast_events ( x ) for x in self . iter_scripts ( scratch ) ) return { 'dead_code' : { 'sprites' : sprites , 'variable_event' : variable_event } }
Run and return the results form the DeadCode plugin .
6,861
def finalize ( self ) : if self . total_instances > 1 : print ( '{} of {} instances contained dead code.' . format ( self . dead_code_instances , self . total_instances ) )
Output the number of instances that contained dead code .
6,862
def show_help ( name ) : print ( 'Usage: python3 {} [OPTIONS]... ' . format ( name ) ) print ( 'ISO8583 message client' ) print ( ' -v, --verbose\t\tRun transactions verbosely' ) print ( ' -p, --port=[PORT]\t\tTCP port to connect to, 1337 by default' ) print ( ' -s, --server=[IP]\t\tIP of the ISO host to connect to, 127.0.0.1 by default' ) print ( ' -t, --terminal=[ID]\t\tTerminal ID (used in DE 41 ISO field, 10001337 by default)' ) print ( ' -m, --merchant=[ID]\t\tMerchant ID (used in DE 42 ISO field, 999999999999001 by default)' ) print ( ' -k, --terminal-key=[KEY]\t\tTerminal key (\'DEADBEEF DEADBEEF DEADBEEF DEADBEEF\' by default)' ) print ( ' -K, --master-key=[KEY]\t\Master key (\'ABABABAB CDCDCDCD EFEFEFEF AEAEAEAE\' by default)' ) print ( ' -f, --file=[file.xml]\t\tUse transaction data from the given XML-file' )
Show help and basic usage
6,863
def find_top_level_directory ( start_directory ) : top_level = start_directory while os . path . isfile ( os . path . join ( top_level , '__init__.py' ) ) : top_level = os . path . dirname ( top_level ) if top_level == os . path . dirname ( top_level ) : raise ValueError ( "Can't find top level directory" ) return os . path . abspath ( top_level )
Finds the top - level directory of a project given a start directory inside the project .
6,864
def discover ( self , start , top_level_directory = None , pattern = 'test*.py' ) : logger . debug ( 'Starting test discovery' ) if os . path . isdir ( start ) : start_directory = start return self . discover_by_directory ( start_directory , top_level_directory = top_level_directory , pattern = pattern ) elif os . path . isfile ( start ) : start_filepath = start return self . discover_by_file ( start_filepath , top_level_directory = top_level_directory ) else : package_or_module = start return self . discover_by_module ( package_or_module , top_level_directory = top_level_directory , pattern = pattern )
Do test case discovery .
6,865
def discover_by_module ( self , module_name , top_level_directory = None , pattern = 'test*.py' ) : if top_level_directory is not None and top_level_directory not in sys . path : sys . path . insert ( 0 , top_level_directory ) logger . debug ( 'Discovering tests by module: module_name=%r, ' 'top_level_directory=%r, pattern=%r' , module_name , top_level_directory , pattern ) try : module , case_attributes = find_module_by_name ( module_name ) except ImportError : return self . discover_filtered_tests ( module_name , top_level_directory = top_level_directory , pattern = pattern ) dirname , basename = os . path . split ( module . __file__ ) basename = os . path . splitext ( basename ) [ 0 ] if len ( case_attributes ) == 0 and basename == '__init__' : return self . discover_by_directory ( dirname , top_level_directory , pattern = pattern ) elif len ( case_attributes ) == 0 : return self . _loader . load_module ( module ) return self . discover_single_case ( module , case_attributes )
Find all tests in a package or module or load a single test case if a class or test inside a module was specified .
6,866
def discover_single_case ( self , module , case_attributes ) : case = module loader = self . _loader for index , component in enumerate ( case_attributes ) : case = getattr ( case , component , None ) if case is None : return loader . create_suite ( ) elif loader . is_test_case ( case ) : rest = case_attributes [ index + 1 : ] if len ( rest ) > 1 : raise ValueError ( 'Too many components in module path' ) elif len ( rest ) == 1 : return loader . create_suite ( [ loader . load_test ( case , * rest ) ] ) return loader . load_case ( case ) return loader . create_suite ( )
Find and load a single TestCase or TestCase method from a module .
6,867
def discover_by_directory ( self , start_directory , top_level_directory = None , pattern = 'test*.py' ) : start_directory = os . path . abspath ( start_directory ) if top_level_directory is None : top_level_directory = find_top_level_directory ( start_directory ) logger . debug ( 'Discovering tests in directory: start_directory=%r, ' 'top_level_directory=%r, pattern=%r' , start_directory , top_level_directory , pattern ) assert_start_importable ( top_level_directory , start_directory ) if top_level_directory not in sys . path : sys . path . insert ( 0 , top_level_directory ) tests = self . _discover_tests ( start_directory , top_level_directory , pattern ) return self . _loader . create_suite ( list ( tests ) )
Run test discovery in a directory .
6,868
def discover_by_file ( self , start_filepath , top_level_directory = None ) : start_filepath = os . path . abspath ( start_filepath ) start_directory = os . path . dirname ( start_filepath ) if top_level_directory is None : top_level_directory = find_top_level_directory ( start_directory ) logger . debug ( 'Discovering tests in file: start_filepath=%r, ' 'top_level_directory=' , start_filepath , top_level_directory ) assert_start_importable ( top_level_directory , start_directory ) if top_level_directory not in sys . path : sys . path . insert ( 0 , top_level_directory ) tests = self . _load_from_file ( start_filepath , top_level_directory ) return self . _loader . create_suite ( list ( tests ) )
Run test discovery on a single file .
6,869
def _set_options_headers ( self , methods ) : request = self . request response = request . response response . headers [ 'Allow' ] = ', ' . join ( sorted ( methods ) ) if 'Access-Control-Request-Method' in request . headers : response . headers [ 'Access-Control-Allow-Methods' ] = ', ' . join ( sorted ( methods ) ) if 'Access-Control-Request-Headers' in request . headers : response . headers [ 'Access-Control-Allow-Headers' ] = 'origin, x-requested-with, content-type' return response
Set proper headers .
6,870
def _get_handled_methods ( self , actions_map ) : methods = ( 'OPTIONS' , ) defined_actions = [ ] for action_name in actions_map . keys ( ) : view_method = getattr ( self , action_name , None ) method_exists = view_method is not None method_defined = view_method != self . not_allowed_action if method_exists and method_defined : defined_actions . append ( action_name ) for action in defined_actions : methods += actions_map [ action ] return methods
Get names of HTTP methods that can be used at requested URI .
6,871
def item_options ( self , ** kwargs ) : actions = self . _item_actions . copy ( ) if self . _resource . is_singular : actions [ 'create' ] = ( 'POST' , ) methods = self . _get_handled_methods ( actions ) return self . _set_options_headers ( methods )
Handle collection OPTIONS request .
6,872
def collection_options ( self , ** kwargs ) : methods = self . _get_handled_methods ( self . _collection_actions ) return self . _set_options_headers ( methods )
Handle collection item OPTIONS request .
6,873
def pop_aggregations_params ( self ) : from nefertari . view import BaseView self . _query_params = BaseView . convert_dotted ( self . view . _query_params ) for key in self . _aggregations_keys : if key in self . _query_params : return self . _query_params . pop ( key ) else : raise KeyError ( 'Missing aggregation params' )
Pop and return aggregation params from query string params .
6,874
def get_aggregations_fields ( cls , params ) : fields = [ ] for key , val in params . items ( ) : if isinstance ( val , dict ) : fields += cls . get_aggregations_fields ( val ) if key == 'field' : fields . append ( val ) return fields
Recursively get values under the field key .
6,875
def check_aggregations_privacy ( self , aggregations_params ) : fields = self . get_aggregations_fields ( aggregations_params ) fields_dict = dictset . fromkeys ( fields ) fields_dict [ '_type' ] = self . view . Model . __name__ try : validate_data_privacy ( self . view . request , fields_dict ) except wrappers . ValidationError as ex : raise JHTTPForbidden ( 'Not enough permissions to aggregate on ' 'fields: {}' . format ( ex ) )
Check per - field privacy rules in aggregations .
6,876
def aggregate ( self ) : from nefertari . elasticsearch import ES aggregations_params = self . pop_aggregations_params ( ) if self . view . _auth_enabled : self . check_aggregations_privacy ( aggregations_params ) self . stub_wrappers ( ) return ES ( self . view . Model . __name__ ) . aggregate ( _aggregations_params = aggregations_params , ** self . _query_params )
Perform aggregation and return response .
6,877
def get_random_hex ( length ) : if length <= 0 : return '' return hexify ( random . randint ( pow ( 2 , length * 2 ) , pow ( 2 , length * 4 ) ) ) [ 0 : length ]
Return random hex string of a given length
6,878
def load_case ( self , testcase ) : tests = [ self . load_test ( testcase , name ) for name in self . find_test_method_names ( testcase ) ] return self . create_suite ( tests )
Load a TestSuite containing all TestCase instances for all tests in a TestCase subclass .
6,879
def load_module ( self , module ) : cases = self . get_test_cases_from_module ( module ) suites = [ self . load_case ( case ) for case in cases ] return self . create_suite ( suites )
Create and return a test suite containing all cases loaded from the provided module .
6,880
def Field ( self , field , Value = None ) : if Value == None : try : return self . __Bitmap [ field ] except KeyError : return None elif Value == 1 or Value == 0 : self . __Bitmap [ field ] = Value else : raise ValueError
Add field to bitmap
6,881
def FieldData ( self , field , Value = None ) : if Value == None : try : return self . __FieldData [ field ] except KeyError : return None else : if len ( str ( Value ) ) > self . __IsoSpec . MaxLength ( field ) : raise ValueError ( 'Value length larger than field maximum ({0})' . format ( self . __IsoSpec . MaxLength ( field ) ) ) self . Field ( field , Value = 1 ) self . __FieldData [ field ] = Value
Add field data
6,882
def authenticated_userid ( request ) : user = getattr ( request , 'user' , None ) key = user . pk_field ( ) return getattr ( user , key )
Helper function that can be used in db_key to support self as a collection key .
6,883
def iter_blocks ( block_list ) : queue = [ ( block , 0 ) for block in block_list if isinstance ( block , kurt . Block ) ] while queue : block , depth = queue . pop ( 0 ) assert block . type . text yield block . type . text , depth , block for arg in block . args : if hasattr ( arg , '__iter__' ) : queue [ 0 : 0 ] = [ ( x , depth + 1 ) for x in arg if isinstance ( x , kurt . Block ) ] elif isinstance ( arg , kurt . Block ) : queue . append ( ( arg , depth ) )
A generator for blocks contained in a block list .
6,884
def script_start_type ( script ) : if script [ 0 ] . type . text == 'when @greenFlag clicked' : return HairballPlugin . HAT_GREEN_FLAG elif script [ 0 ] . type . text == 'when I receive %s' : return HairballPlugin . HAT_WHEN_I_RECEIVE elif script [ 0 ] . type . text == 'when this sprite clicked' : return HairballPlugin . HAT_MOUSE elif script [ 0 ] . type . text == 'when %s key pressed' : return HairballPlugin . HAT_KEY else : return HairballPlugin . NO_HAT
Return the type of block the script begins with .
6,885
def get_broadcast_events ( cls , script ) : events = Counter ( ) for name , _ , block in cls . iter_blocks ( script ) : if 'broadcast %s' in name : if isinstance ( block . args [ 0 ] , kurt . Block ) : events [ True ] += 1 else : events [ block . args [ 0 ] . lower ( ) ] += 1 return events
Return a Counter of event - names that were broadcast .
6,886
def tag_reachable_scripts ( cls , scratch ) : if getattr ( scratch , 'hairball_prepared' , False ) : return reachable = set ( ) untriggered_events = { } for script in cls . iter_scripts ( scratch ) : if not isinstance ( script , kurt . Comment ) : starting_type = cls . script_start_type ( script ) if starting_type == cls . NO_HAT : script . reachable = False elif starting_type == cls . HAT_WHEN_I_RECEIVE : script . reachable = False message = script [ 0 ] . args [ 0 ] . lower ( ) untriggered_events . setdefault ( message , set ( ) ) . add ( script ) else : script . reachable = True reachable . add ( script ) while reachable : for event in cls . get_broadcast_events ( reachable . pop ( ) ) : if event in untriggered_events : for script in untriggered_events . pop ( event ) : script . reachable = True reachable . add ( script ) scratch . hairball_prepared = True
Tag each script with attribute reachable .
6,887
def description ( self ) : lines = [ ] for line in self . __doc__ . split ( '\n' ) [ 2 : ] : line = line . strip ( ) if line : lines . append ( line ) return ' ' . join ( lines )
Attribute that returns the plugin description from its docstring .
6,888
def _process ( self , scratch , filename , ** kwargs ) : self . tag_reachable_scripts ( scratch ) return self . analyze ( scratch , filename = filename , ** kwargs )
Internal hook that marks reachable scripts before calling analyze .
6,889
def _format_kwargs ( func ) : formats = { } formats [ 'blk' ] = [ "blank" ] formats [ 'dft' ] = [ "default" ] formats [ 'hdr' ] = [ "header" ] formats [ 'hlp' ] = [ "help" ] formats [ 'msg' ] = [ "message" ] formats [ 'shw' ] = [ "show" ] formats [ 'vld' ] = [ "valid" ] @ wraps ( func ) def inner ( * args , ** kwargs ) : for k in formats . keys ( ) : for v in formats [ k ] : if v in kwargs : kwargs [ k ] = kwargs [ v ] kwargs . pop ( v ) return func ( * args , ** kwargs ) return inner
Decorator to handle formatting kwargs to the proper names expected by the associated function . The formats dictionary string keys will be used as expected function kwargs and the value list of strings will be renamed to the associated key string .
6,890
def show_menu ( entries , ** kwargs ) : global _AUTO hdr = kwargs . get ( 'hdr' , "" ) note = kwargs . get ( 'note' , "" ) dft = kwargs . get ( 'dft' , "" ) fzf = kwargs . pop ( 'fzf' , True ) compact = kwargs . get ( 'compact' , False ) returns = kwargs . get ( 'returns' , "name" ) limit = kwargs . get ( 'limit' , None ) dft = kwargs . get ( 'dft' , None ) msg = [ ] if limit : return show_limit ( entries , ** kwargs ) def show_banner ( ) : banner = "-- MENU" if hdr : banner += ": " + hdr banner += " --" msg . append ( banner ) if _AUTO : return for i in entries : msg . append ( " (%s) %s" % ( i . name , i . desc ) ) valid = [ i . name for i in entries ] if type ( dft ) == int : dft = str ( dft ) if dft not in valid : dft = None if not compact : show_banner ( ) if note and not _AUTO : msg . append ( "[!] " + note ) if fzf : valid . append ( FCHR ) msg . append ( QSTR + kwargs . get ( 'msg' , "Enter menu selection" ) ) msg = os . linesep . join ( msg ) entry = None while entry not in entries : choice = ask ( msg , vld = valid , dft = dft , qstr = False ) if choice == FCHR and fzf : try : from iterfzf import iterfzf choice = iterfzf ( reversed ( [ "%s\t%s" % ( i . name , i . desc ) for i in entries ] ) ) . strip ( "\0" ) . split ( "\t" , 1 ) [ 0 ] except : warn ( "Issue encountered during fzf search." ) match = [ i for i in entries if i . name == choice ] if match : entry = match [ 0 ] if entry . func : fresult = run_func ( entry ) if "func" == returns : return fresult try : return getattr ( entry , returns ) except : return getattr ( entry , "name" )
Shows a menu with the given list of MenuEntry items .
6,891
def run_func ( entry ) : if entry . func : if entry . args and entry . krgs : return entry . func ( * entry . args , ** entry . krgs ) if entry . args : return entry . func ( * entry . args ) if entry . krgs : return entry . func ( ** entry . krgs ) return entry . func ( )
Runs the function associated with the given MenuEntry .
6,892
def enum_menu ( strs , menu = None , * args , ** kwargs ) : if not menu : menu = Menu ( * args , ** kwargs ) for s in strs : menu . enum ( s ) return menu
Enumerates the given list of strings into returned menu .
6,893
def ask ( msg = "Enter input" , fmt = None , dft = None , vld = None , shw = True , blk = False , hlp = None , qstr = True ) : global _AUTO def print_help ( ) : lst = [ v for v in vld if not callable ( v ) ] if blk : lst . remove ( "" ) for v in vld : if not callable ( v ) : continue if int == v : lst . append ( "<int>" ) elif float == v : lst . append ( "<float>" ) elif str == v : lst . append ( "<str>" ) else : lst . append ( "(" + v . __name__ + ")" ) if lst : echo ( "[HELP] Valid input: %s" % ( " | " . join ( [ str ( l ) for l in lst ] ) ) ) if hlp : echo ( "[HELP] Extra notes: " + hlp ) if blk : echo ( "[HELP] Input may be blank." ) vld = vld or [ ] hlp = hlp or "" if not hasattr ( vld , "__iter__" ) : vld = [ vld ] if not hasattr ( fmt , "__call__" ) : fmt = lambda x : x msg = "%s%s" % ( QSTR if qstr else "" , msg ) dft = fmt ( dft ) if dft != None else None if dft != None : msg += " [%s]" % ( dft if type ( dft ) is str else repr ( dft ) ) vld . append ( dft ) blk = False if vld : vld = list ( set ( [ fmt ( v ) if fmt ( v ) else v for v in vld ] ) ) if blk and "" not in vld : vld . append ( "" ) try : vld = sorted ( vld ) except : pass msg += ISTR ans = None while ans is None : get_input = _input if shw else getpass ans = get_input ( msg ) if _AUTO : echo ( ans ) if "?" == ans : print_help ( ) ans = None continue if "" == ans : if dft != None : ans = dft if not fmt else fmt ( dft ) break if "" not in vld : ans = None continue try : ans = ans if not fmt else fmt ( ans ) except : ans = None if vld : for v in vld : if type ( v ) is type and cast ( ans , v ) is not None : ans = cast ( ans , v ) break elif hasattr ( v , "__call__" ) : try : if v ( ans ) : break except : pass elif ans in vld : break else : ans = None return ans
Prompts the user for input and returns the given answer . Optionally checks if answer is valid .
6,894
def ask_yesno ( msg = "Proceed?" , dft = None ) : yes = [ "y" , "yes" , "Y" , "YES" ] no = [ "n" , "no" , "N" , "NO" ] if dft != None : dft = yes [ 0 ] if ( dft in yes or dft == True ) else no [ 0 ] return ask ( msg , dft = dft , vld = yes + no ) in yes
Prompts the user for a yes or no answer . Returns True for yes False for no .
6,895
def ask_int ( msg = "Enter an integer" , dft = None , vld = None , hlp = None ) : vld = vld or [ int ] return ask ( msg , dft = dft , vld = vld , fmt = partial ( cast , typ = int ) , hlp = hlp )
Prompts the user for an integer .
6,896
def ask_float ( msg = "Enter a float" , dft = None , vld = None , hlp = None ) : vld = vld or [ float ] return ask ( msg , dft = dft , vld = vld , fmt = partial ( cast , typ = float ) , hlp = hlp )
Prompts the user for a float .
6,897
def ask_str ( msg = "Enter a string" , dft = None , vld = None , shw = True , blk = True , hlp = None ) : vld = vld or [ str ] return ask ( msg , dft = dft , vld = vld , shw = shw , blk = blk , hlp = hlp )
Prompts the user for a string .
6,898
def ask_captcha ( length = 4 ) : captcha = "" . join ( random . choice ( string . ascii_lowercase ) for _ in range ( length ) ) ask_str ( 'Enter the following letters, "%s"' % ( captcha ) , vld = [ captcha , captcha . upper ( ) ] , blk = False )
Prompts the user for a random string .
6,899
def clear ( ) : if sys . platform . startswith ( "win" ) : call ( "cls" , shell = True ) else : call ( "clear" , shell = True )
Clears the console .