idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
55,000 | def print_error_to_io_stream ( err : Exception , io : TextIOBase , print_big_traceback : bool = True ) : if print_big_traceback : traceback . print_tb ( err . __traceback__ , file = io , limit = - GLOBAL_CONFIG . multiple_errors_tb_limit ) else : traceback . print_tb ( err . __traceback__ , file = io , limit = - 1 ) io . writelines ( ' ' + str ( err . __class__ . __name__ ) + ' : ' + str ( err ) ) | Utility method to print an exception s content to a stream |
55,001 | def should_hide_traceback ( e ) : if type ( e ) in { WrongTypeCreatedError , CascadeError , TypeInformationRequiredError } : return True elif type ( e ) . __name__ in { 'InvalidAttributeNameForConstructorError' , 'MissingMandatoryAttributeFiles' } : return True else : return False | Returns True if we can hide the error traceback in the warnings messages |
55,002 | def _create_parsing_plan ( self , desired_type : Type [ T ] , filesystem_object : PersistedObject , logger : Logger , log_only_last : bool = False ) -> ParsingPlan [ T ] : logger . debug ( '(B) ' + get_parsing_plan_log_str ( filesystem_object , desired_type , log_only_last = log_only_last , parser = self ) ) return CascadingParser . CascadingParsingPlan ( desired_type , filesystem_object , self , self . _parsers_list , logger = logger ) | Creates a parsing plan to parse the given filesystem object into the given desired_type . This overrides the method in AnyParser in order to provide a cascading parsing plan |
55,003 | def are_worth_chaining ( base_parser : Parser , to_type : Type [ S ] , converter : Converter [ S , T ] ) -> bool : if isinstance ( converter , ConversionChain ) : for conv in converter . _converters_list : if not Parser . are_worth_chaining ( base_parser , to_type , conv ) : return False return True else : return Parser . are_worth_chaining ( base_parser , to_type , converter ) | Utility method to check if it makes sense to chain this parser configured with the given to_type with this converter . It is an extension of ConverterChain . are_worth_chaining |
55,004 | def set_mode ( self , mode ) : _LOGGER . debug ( 'State change called from alarm device' ) if not mode : _LOGGER . info ( 'No mode supplied' ) elif mode not in CONST . ALL_MODES : _LOGGER . warning ( 'Invalid mode' ) response_object = self . _lupusec . set_mode ( CONST . MODE_TRANSLATION [ mode ] ) if response_object [ 'result' ] != 1 : _LOGGER . warning ( 'Mode setting unsuccessful' ) self . _json_state [ 'mode' ] = mode _LOGGER . info ( 'Mode set to: %s' , mode ) return True | Set Lupusec alarm mode . |
55,005 | def to_utf8 ( value ) : if isinstance ( value , unicode ) : return value . encode ( 'utf-8' ) assert isinstance ( value , str ) return value | Returns a string encoded using UTF - 8 . |
55,006 | def to_unicode ( value ) : if isinstance ( value , str ) : return value . decode ( 'utf-8' ) assert isinstance ( value , unicode ) return value | Returns a unicode string from a string using UTF - 8 to decode if needed . |
55,007 | def get_commands ( ) : commands = { } try : from percept . conf . base import settings apps = settings . INSTALLED_APPS except KeyError : apps = [ ] for app_name in apps : try : path = find_commands_module ( app_name ) commands . update ( dict ( [ ( name , app_name ) for name in find_all_commands ( path ) ] ) ) except ImportError as e : pass return commands | Get all valid commands return - all valid commands in dictionary form |
55,008 | def execute ( self ) : parser = LaxOptionParser ( usage = "%prog subcommand [options] [args]" , option_list = BaseCommand . option_list ) options , args = parser . parse_args ( self . argv ) options = handle_default_options ( options ) try : subcommand = self . argv [ 1 ] except IndexError : subcommand = 'help' if subcommand == 'help' : if len ( args ) <= 2 : parser . print_help ( ) sys . stdout . write ( self . help_text + '\n' ) else : self . fetch_command ( subcommand ) . run_from_argv ( self . argv ) | Run the command with the command line arguments |
55,009 | def help_text ( self ) : help_text = '\n' . join ( sorted ( get_commands ( ) . keys ( ) ) ) help_text = "\nCommands:\n" + help_text return help_text | Formats and prints the help text from the command list |
55,010 | def register ( self ) : register_url = self . base_url + "api/0.1.0/register" register_headers = { "apikey" : str ( self . owner_api_key ) , "resourceID" : str ( self . entity_id ) , "serviceType" : "publish,subscribe,historicData" } with self . no_ssl_verification ( ) : r = requests . get ( register_url , { } , headers = register_headers ) response = r . content . decode ( "utf-8" ) if "APIKey" in str ( r . content . decode ( "utf-8" ) ) : response = json . loads ( response [ : - 331 ] + "}" ) response [ "Registration" ] = "success" else : response = json . loads ( response ) response [ "Registration" ] = "failure" return response | Registers a new device with the name entity_id . This device has permissions for services like subscribe publish and access historical data . |
55,011 | def no_ssl_verification ( self ) : try : from functools import partialmethod except ImportError : from functools import partial class partialmethod ( partial ) : def __get__ ( self , instance , owner ) : if instance is None : return self return partial ( self . func , instance , * ( self . args or ( ) ) , ** ( self . keywords or { } ) ) old_request = requests . Session . request requests . Session . request = partialmethod ( old_request , verify = False ) warnings . filterwarnings ( 'ignore' , 'Unverified HTTPS request' ) yield warnings . resetwarnings ( ) requests . Session . request = old_request | Requests module fails due to lets encrypt ssl encryption . Will be fixed in the future release . |
55,012 | def publish ( self , data ) : if self . entity_api_key == "" : return { 'status' : 'failure' , 'response' : 'No API key found in request' } publish_url = self . base_url + "api/0.1.0/publish" publish_headers = { "apikey" : self . entity_api_key } publish_data = { "exchange" : "amq.topic" , "key" : str ( self . entity_id ) , "body" : str ( data ) } with self . no_ssl_verification ( ) : r = requests . post ( publish_url , json . dumps ( publish_data ) , headers = publish_headers ) response = dict ( ) if "No API key" in str ( r . content . decode ( "utf-8" ) ) : response [ "status" ] = "failure" r = json . loads ( r . content . decode ( "utf-8" ) ) [ 'message' ] elif 'publish message ok' in str ( r . content . decode ( "utf-8" ) ) : response [ "status" ] = "success" r = r . content . decode ( "utf-8" ) else : response [ "status" ] = "failure" r = r . content . decode ( "utf-8" ) response [ "response" ] = str ( r ) return response | This function allows an entity to publish data to the middleware . |
55,013 | def db ( self , entity , query_filters = "size=10" ) : if self . entity_api_key == "" : return { 'status' : 'failure' , 'response' : 'No API key found in request' } historic_url = self . base_url + "api/0.1.0/historicData?" + query_filters historic_headers = { "apikey" : self . entity_api_key , "Content-Type" : "application/json" } historic_query_data = json . dumps ( { "query" : { "match" : { "key" : entity } } } ) with self . no_ssl_verification ( ) : r = requests . get ( historic_url , data = historic_query_data , headers = historic_headers ) response = dict ( ) if "No API key" in str ( r . content . decode ( "utf-8" ) ) : response [ "status" ] = "failure" else : r = r . content . decode ( "utf-8" ) response = r return response | This function allows an entity to access the historic data . |
55,014 | def bind ( self , devices_to_bind ) : if self . entity_api_key == "" : return { 'status' : 'failure' , 'response' : 'No API key found in request' } url = self . base_url + "api/0.1.0/subscribe/bind" headers = { "apikey" : self . entity_api_key } data = { "exchange" : "amq.topic" , "keys" : devices_to_bind , "queue" : self . entity_id } with self . no_ssl_verification ( ) : r = requests . post ( url , json = data , headers = headers ) response = dict ( ) if "No API key" in str ( r . content . decode ( "utf-8" ) ) : response [ "status" ] = "failure" r = json . loads ( r . content . decode ( "utf-8" ) ) [ 'message' ] elif 'bind queue ok' in str ( r . content . decode ( "utf-8" ) ) : response [ "status" ] = "success" r = r . content . decode ( "utf-8" ) else : response [ "status" ] = "failure" r = r . content . decode ( "utf-8" ) response [ "response" ] = str ( r ) return response | This function allows an entity to list the devices to subscribe for data . This function must be called at least once before doing a subscribe . Subscribe function will listen to devices that are bound here . |
55,015 | def unbind ( self , devices_to_unbind ) : if self . entity_api_key == "" : return { 'status' : 'failure' , 'response' : 'No API key found in request' } url = self . base_url + "api/0.1.0/subscribe/unbind" headers = { "apikey" : self . entity_api_key } data = { "exchange" : "amq.topic" , "keys" : devices_to_unbind , "queue" : self . entity_id } with self . no_ssl_verification ( ) : r = requests . delete ( url , json = data , headers = headers ) print ( r ) response = dict ( ) if "No API key" in str ( r . content . decode ( "utf-8" ) ) : response [ "status" ] = "failure" r = json . loads ( r . content . decode ( "utf-8" ) ) [ 'message' ] elif 'unbind' in str ( r . content . decode ( "utf-8" ) ) : response [ "status" ] = "success" r = r . content . decode ( "utf-8" ) else : response [ "status" ] = "failure" r = r . content . decode ( "utf-8" ) response [ "response" ] = str ( r ) return response | This function allows an entity to unbound devices that are already bound . |
55,016 | def subscribe ( self , devices_to_bind = [ ] ) : if self . entity_api_key == "" : return { 'status' : 'failure' , 'response' : 'No API key found in request' } self . bind ( devices_to_bind ) loop = asyncio . new_event_loop ( ) t1 = threading . Thread ( target = self . start_subscribe_worker , args = ( loop , ) ) t1 . daemon = True t1 . start ( ) | This function allows an entity to subscribe for data from the devices specified in the bind operation . It creates a thread with an event loop to manager the tasks created in start_subscribe_worker . |
55,017 | def start_subscribe_worker ( self , loop ) : url = self . base_url + "api/0.1.0/subscribe" task = loop . create_task ( self . asynchronously_get_data ( url + "?name={0}" . format ( self . entity_id ) ) ) asyncio . set_event_loop ( loop ) loop . run_until_complete ( task ) self . event_loop = loop | Switch to new event loop as a thread and run until complete . |
55,018 | def stop_subscribe ( self ) : asyncio . gather ( * asyncio . Task . all_tasks ( ) ) . cancel ( ) self . event_loop . stop ( ) self . event_loop . close ( ) | This function is used to stop the event loop created when subscribe is called . But this function doesn t stop the thread and should be avoided until its completely developed . |
55,019 | def timeuntil ( d , now = None ) : if not now : if getattr ( d , 'tzinfo' , None ) : now = datetime . datetime . now ( LocalTimezone ( d ) ) else : now = datetime . datetime . now ( ) return timesince ( now , d ) | Like timesince but returns a string measuring the time until the given time . |
55,020 | def delete_all_eggs ( self ) : path_to_delete = os . path . join ( self . egg_directory , "lib" , "python" ) if os . path . exists ( path_to_delete ) : shutil . rmtree ( path_to_delete ) | delete all the eggs in the directory specified |
55,021 | def install_egg ( self , egg_name ) : if not os . path . exists ( self . egg_directory ) : os . makedirs ( self . egg_directory ) self . requirement_set . add_requirement ( InstallRequirement . from_line ( egg_name , None ) ) try : self . requirement_set . prepare_files ( self . finder ) self . requirement_set . install ( [ '--prefix=' + self . egg_directory ] , [ ] ) except DistributionNotFound : self . requirement_set . requirements . _keys . remove ( egg_name ) raise PipException ( ) | Install an egg into the egg directory |
55,022 | def chunks ( cls , iterable , n , fill = None ) : return cls ( itertools . zip_longest ( * [ iter ( iterable ) ] * n , fillvalue = fill ) ) | Collects elements in fixed - length chunks . |
55,023 | def partition ( cls , iterable , pred ) : t1 , t2 = itertools . tee ( iterable ) return cls ( itertools . filterfalse ( pred , t1 ) , filter ( pred , t2 ) ) | Use a predicate to partition items into false and true entries . |
55,024 | def count ( cls , iterable ) : iterable = iter ( iterable ) count = 0 while True : try : next ( iterable ) except StopIteration : break count += 1 return count | Returns the number of items in an iterable . |
55,025 | def column_names ( self , table ) : table_info = self . execute ( u'PRAGMA table_info(%s)' % quote ( table ) ) return ( column [ 'name' ] for column in table_info ) | An iterable of column names for a particular table or view . |
55,026 | def execute ( self , sql , * args , ** kwargs ) : try : self . cursor . execute ( sql , * args ) except self . sqlite3 . InterfaceError , msg : raise self . sqlite3 . InterfaceError ( unicode ( msg ) + '\nTry converting types or pickling.' ) rows = self . cursor . fetchall ( ) self . __commit_if_necessary ( kwargs ) if None == self . cursor . description : return None else : colnames = [ d [ 0 ] . decode ( 'utf-8' ) for d in self . cursor . description ] rawdata = [ OrderedDict ( zip ( colnames , row ) ) for row in rows ] return rawdata | Run raw SQL on the database and receive relaxing output . This is sort of the foundational method that most of the others build on . |
55,027 | def create_table ( self , data , table_name , error_if_exists = False , ** kwargs ) : 'Create a table based on the data, but don\'t insert anything.' converted_data = convert ( data ) if len ( converted_data ) == 0 or converted_data [ 0 ] == [ ] : raise ValueError ( u'You passed no sample values, or all the values you passed were null.' ) else : startdata = OrderedDict ( converted_data [ 0 ] ) for k , v in startdata . items ( ) : if v != None : break else : v = None if_not_exists = u'' if error_if_exists else u'IF NOT EXISTS' if v != None : try : sql = u % ( if_not_exists , quote ( table_name ) , quote ( k ) , get_column_type ( startdata [ k ] ) ) self . execute ( sql , commit = False ) except : raise else : self . commit ( ) for row in converted_data : self . __check_and_add_columns ( table_name , row ) | Create a table based on the data but don \ t insert anything . |
55,028 | def get_var ( self , key ) : 'Retrieve one saved variable from the database.' vt = quote ( self . __vars_table ) data = self . execute ( u'SELECT * FROM %s WHERE `key` = ?' % vt , [ key ] , commit = False ) if data == [ ] : raise NameError ( u'The DumpTruck variables table doesn\'t have a value for %s.' % key ) else : tmp = quote ( self . __vars_table_tmp ) row = data [ 0 ] self . execute ( u'DROP TABLE IF EXISTS %s' % tmp , commit = False ) self . execute ( u'CREATE TEMPORARY TABLE %s (`value` %s)' % ( tmp , row [ 'type' ] ) , commit = False ) self . execute ( u'INSERT INTO %s (`value`) VALUES (?)' % tmp , [ row [ 'value' ] ] , commit = False ) value = self . dump ( tmp ) [ 0 ] [ 'value' ] self . execute ( u'DROP TABLE %s' % tmp , commit = False ) return value | Retrieve one saved variable from the database . |
55,029 | def save_var ( self , key , value , ** kwargs ) : 'Save one variable to the database.' self . __check_or_create_vars_table ( ) column_type = get_column_type ( value ) tmp = quote ( self . __vars_table_tmp ) self . execute ( u'DROP TABLE IF EXISTS %s' % tmp , commit = False ) self . execute ( u'CREATE TABLE %s (`value` %s)' % ( tmp , column_type ) , commit = False ) self . execute ( u'INSERT INTO %s (`value`) VALUES (?)' % tmp , [ value ] , commit = False ) table = ( quote ( self . __vars_table ) , tmp ) params = [ key , column_type ] self . execute ( u % table , params ) self . execute ( u'DROP TABLE %s' % tmp , commit = False ) self . __commit_if_necessary ( kwargs ) | Save one variable to the database . |
55,030 | def drop ( self , table_name = 'dumptruck' , if_exists = False , ** kwargs ) : 'Drop a table.' return self . execute ( u'DROP TABLE %s %s;' % ( 'IF EXISTS' if if_exists else '' , quote ( table_name ) ) , ** kwargs ) | Drop a table . |
55,031 | def __install_perforce ( self , config ) : if not system . is_64_bit ( ) : self . logger . warn ( "Perforce formula is only designed for 64 bit systems! Not install executables..." ) return False version = config . get ( 'version' , 'r13.2' ) key = 'osx' if system . is_osx ( ) else 'linux' perforce_packages = package_dict [ version ] [ key ] d = self . directory . install_directory ( self . feature_name ) if not os . path . exists ( d ) : os . makedirs ( d ) self . logger . info ( "Downloading p4 executable..." ) with open ( os . path . join ( d , "p4" ) , 'wb+' ) as fh : fh . write ( lib . cleaned_request ( 'get' , url_prefix + perforce_packages [ 'p4' ] ) . content ) self . directory . symlink_to_bin ( "p4" , os . path . join ( d , "p4" ) ) self . p4_command = os . path . join ( d , "p4" ) self . logger . info ( "Installing p4v..." ) if system . is_osx ( ) : return self . _install_p4v_osx ( url_prefix + perforce_packages [ 'p4v' ] ) else : return self . _install_p4v_linux ( url_prefix + perforce_packages [ 'p4v' ] ) | install perforce binary |
55,032 | def _install_p4v_osx ( self , url , overwrite = False ) : package_exists = False root_dir = os . path . expanduser ( os . path . join ( "~" , "Applications" ) ) package_exists = len ( [ x for x in P4V_APPLICATIONS if os . path . exists ( os . path . join ( root_dir , x ) ) ] ) if not package_exists or overwrite : lib . extract_dmg ( url , root_dir ) else : self . logger . warn ( "P4V exists already in %s! Not overwriting..." % root_dir ) return True | Install perforce applications and binaries for mac |
55,033 | def _install_p4v_linux ( self , url ) : lib . extract_targz ( url , self . directory . install_directory ( self . feature_name ) , remove_common_prefix = True ) bin_path = os . path . join ( self . directory . install_directory ( self . feature_name ) , 'bin' ) if os . path . exists ( bin_path ) : for f in os . listdir ( bin_path ) : self . directory . symlink_to_bin ( f , os . path . join ( bin_path , f ) ) return True | Install perforce applications and binaries for linux |
55,034 | def __write_p4settings ( self , config ) : self . logger . info ( "Writing p4settings..." ) root_dir = os . path . expanduser ( config . get ( 'root_path' ) ) p4settings_path = os . path . join ( root_dir , ".p4settings" ) if os . path . exists ( p4settings_path ) : if self . target . get ( 'overwrite_p4settings' , False ) : self . logger . info ( "Overwriting existing p4settings..." ) os . remove ( p4settings_path ) else : return with open ( p4settings_path , "w+" ) as p4settings_file : p4settings_file . write ( p4settings_template % config . to_dict ( ) ) if config . get ( 'write_password_p4settings' , 'no' ) : p4settings_file . write ( "\nP4PASSWD=%s" % config [ 'password' ] ) | write perforce settings |
55,035 | def __configure_client ( self , config ) : self . logger . info ( "Configuring p4 client..." ) client_dict = config . to_dict ( ) client_dict [ 'root_path' ] = os . path . expanduser ( config . get ( 'root_path' ) ) os . chdir ( client_dict [ 'root_path' ] ) client_dict [ 'hostname' ] = system . NODE client_dict [ 'p4view' ] = config [ 'p4view' ] % self . environment . target . get_context_dict ( ) client = re . sub ( '//depot' , ' //depot' , p4client_template % client_dict ) self . logger . info ( lib . call ( "%s client -i" % self . p4_command , stdin = client , env = self . p4environ , cwd = client_dict [ 'root_path' ] ) ) | write the perforce client |
55,036 | def __install_eggs ( self , config ) : egg_carton = ( self . directory . install_directory ( self . feature_name ) , 'requirements.txt' ) eggs = self . __gather_eggs ( config ) self . logger . debug ( "Installing eggs %s..." % eggs ) self . __load_carton ( egg_carton , eggs ) self . __prepare_eggs ( egg_carton , config ) | Install eggs for a particular configuration |
55,037 | def __add_paths ( self , config ) : bin_path = os . path . join ( self . directory . install_directory ( self . feature_name ) , 'bin' ) whitelist_executables = self . _get_whitelisted_executables ( config ) for f in os . listdir ( bin_path ) : for pattern in BLACKLISTED_EXECUTABLES : if re . match ( pattern , f ) : continue if whitelist_executables and f not in whitelist_executables : continue self . directory . symlink_to_bin ( f , os . path . join ( bin_path , f ) ) | add the proper resources into the environment |
55,038 | def amended_commits ( commits ) : amended_sha1s = [ ] for message in commits . values ( ) : amended_sha1s . extend ( re . findall ( r'AMENDS\s([0-f]+)' , message ) ) return amended_sha1s | Return those git commit sha1s that have been amended later . |
55,039 | def enrich_git_log_dict ( messages , labels ) : for commit_sha1 , message in messages . items ( ) : component = None title = message . split ( '\n' ) [ 0 ] try : component , title = title . split ( ":" , 1 ) component = component . strip ( ) except ValueError : pass paragraphs = [ analyse_body_paragraph ( p , labels ) for p in message . split ( '\n\n' ) ] yield { 'sha1' : commit_sha1 , 'component' : component , 'title' : title . strip ( ) , 'tickets' : re . findall ( r'\s(#\d+)' , message ) , 'paragraphs' : [ ( label , remove_ticket_directives ( message ) ) for label , message in paragraphs ] , } | Enrich git log with related information on tickets . |
55,040 | def release ( obj , commit = 'HEAD' , components = False ) : options = obj . options repository = obj . repository try : sha = 'oid' commits = _pygit2_commits ( commit , repository ) except ImportError : try : sha = 'hexsha' commits = _git_commits ( commit , repository ) except ImportError : click . echo ( 'To use this feature, please install pygit2. ' 'GitPython will also work but is not recommended ' '(python <= 2.7 only).' , file = sys . stderr ) return 2 messages = OrderedDict ( [ ( getattr ( c , sha ) , c . message ) for c in commits ] ) for commit_sha1 in amended_commits ( messages ) : if commit_sha1 in messages : del messages [ commit_sha1 ] full_messages = list ( enrich_git_log_dict ( messages , options . get ( 'commit_msg_labels' ) ) ) indent = ' ' if components else '' wrapper = textwrap . TextWrapper ( width = 70 , initial_indent = indent + '- ' , subsequent_indent = indent + ' ' , ) for label , section in options . get ( 'commit_msg_labels' ) : if section is None : continue bullets = [ ] for commit in full_messages : bullets += [ { 'text' : bullet , 'component' : commit [ 'component' ] } for lbl , bullet in commit [ 'paragraphs' ] if lbl == label and bullet is not None ] if len ( bullets ) > 0 : click . echo ( section ) click . echo ( '~' * len ( section ) ) click . echo ( ) if components : def key ( cmt ) : return cmt [ 'component' ] for component , bullets in itertools . groupby ( sorted ( bullets , key = key ) , key ) : bullets = list ( bullets ) if len ( bullets ) > 0 : click . echo ( '+ {}' . format ( component ) ) click . echo ( ) for bullet in bullets : click . echo ( wrapper . fill ( bullet [ 'text' ] ) ) click . echo ( ) else : for bullet in bullets : click . echo ( wrapper . fill ( bullet [ 'text' ] ) ) click . echo ( ) return 0 | Generate release notes . |
55,041 | def redirect ( view = None , url = None , ** kwargs ) : if view : if url : kwargs [ "url" ] = url url = flask . url_for ( view , ** kwargs ) current_context . exit ( flask . redirect ( url ) ) | Redirects to the specified view or url |
55,042 | def statsd_metric ( name , count , elapsed ) : with statsd . pipeline ( ) as pipe : pipe . incr ( name , count ) pipe . timing ( name , int ( round ( 1000 * elapsed ) ) ) | Metric that records to statsd & graphite |
55,043 | def compile_file ( env , src_path , dst_path , encoding = 'utf-8' , base_dir = '' ) : src_file = file ( src_path , 'r' ) source = src_file . read ( ) . decode ( encoding ) name = src_path . replace ( base_dir , '' ) raw = env . compile ( source , name = name , filename = name , raw = True ) src_file . close ( ) dst_file = open ( dst_path , 'w' ) dst_file . write ( raw ) dst_file . close ( ) | Compiles a Jinja2 template to python code . |
55,044 | def _pre_dump ( cls ) : shutil . rmtree ( cls . outdir , ignore_errors = True ) os . makedirs ( cls . outdir ) super ( PlotMetric , cls ) . _pre_dump ( ) | Output all recorded stats |
55,045 | def _histogram ( self , which , mu , sigma , data ) : weights = np . ones_like ( data ) / len ( data ) n , bins , patches = plt . hist ( data , bins = 25 , weights = weights , facecolor = 'blue' , alpha = 0.5 ) plt . title ( r'%s %s: $\mu=%.2f$, $\sigma=%.2f$' % ( self . name , which . capitalize ( ) , mu , sigma ) ) plt . xlabel ( 'Items' if which == 'count' else 'Seconds' ) plt . ylabel ( 'Frequency' ) plt . gca ( ) . yaxis . set_major_formatter ( FuncFormatter ( lambda y , position : "{:.1f}%" . format ( y * 100 ) ) ) | plot a histogram . For internal use only |
55,046 | def _scatter ( self ) : plt . scatter ( self . count_arr , self . elapsed_arr ) plt . title ( '{}: Count vs. Elapsed' . format ( self . name ) ) plt . xlabel ( 'Items' ) plt . ylabel ( 'Seconds' ) | plot a scatter plot of count vs . elapsed . For internal use only |
55,047 | def bind ( self , __fun , * args , ** kwargs ) : with self . _lock : if self . _running or self . _completed or self . _cancelled : raise RuntimeError ( 'Future object can not be reused' ) if self . _worker : raise RuntimeError ( 'Future object is already bound' ) self . _worker = functools . partial ( __fun , * args , ** kwargs ) return self | Bind a worker function to the future . This worker function will be executed when the future is executed . |
55,048 | def set_result ( self , result ) : with self . _lock : if self . _enqueued : raise RuntimeError ( 'can not set result of enqueued Future' ) self . _result = result self . _completed = True callbacks = self . _prepare_done_callbacks ( ) callbacks ( ) | Allows you to set the result of the future without requiring the future to actually be executed . This can be used if the result is available before the future is run allowing you to keep the future as the interface for retrieving the result data . |
55,049 | def set_exception ( self , exc_info ) : if not isinstance ( exc_info , tuple ) : if not isinstance ( exc_info , BaseException ) : raise TypeError ( 'expected BaseException instance' ) try : raise exc_info except : exc_info = sys . exc_info ( ) exc_info = ( exc_info [ 0 ] , exc_info [ 1 ] , exc_info [ 2 ] ) with self . _lock : if self . _enqueued : raise RuntimeError ( 'can not set exception of enqueued Future' ) self . _exc_info = exc_info self . _completed = True callbacks = self . _prepare_done_callbacks ( ) callbacks ( ) | This method allows you to set an exception in the future without requring that exception to be raised from the futures worker . This method can be called on an unbound future . |
55,050 | def enqueue ( self , future ) : future . enqueue ( ) with self . _lock : if self . _shutdown : raise RuntimeError ( 'ThreadPool has been shut down and can no ' 'longer accept futures.' ) self . _queue . append ( future ) if len ( self . _running ) == len ( self . _workers ) : self . _new_worker ( ) self . _lock . notify_all ( ) | Enqueue a future to be processed by one of the threads in the pool . The future must be bound to a worker and not have been started yet . |
55,051 | def submit ( self , __fun , * args , ** kwargs ) : future = Future ( ) . bind ( __fun , * args , ** kwargs ) self . enqueue ( future ) return future | Creates a new future and enqueues it . Returns the future . |
55,052 | def timeit ( func ) : @ wraps ( func ) def timer_wrapper ( * args , ** kwargs ) : with Timer ( ) as timer : result = func ( * args , ** kwargs ) return result , timer return timer_wrapper | Returns the number of seconds that a function took along with the result |
55,053 | def timeout ( seconds ) : def _timeout_error ( signal , frame ) : raise TimeoutError ( "Operation did not finish within \ {} seconds" . format ( seconds ) ) def timeout_decorator ( func ) : @ wraps ( func ) def timeout_wrapper ( * args , ** kwargs ) : signal . signal ( signal . SIGALRM , _timeout_error ) signal . alarm ( seconds ) try : return func ( * args , ** kwargs ) finally : signal . alarm ( 0 ) return timeout_wrapper return timeout_decorator | Raises a TimeoutError if a function does not terminate within specified seconds . |
55,054 | def create_persisted_object ( self , location : str , logger : Logger ) -> PersistedObject : logger . debug ( 'Checking all files under [{loc}]' . format ( loc = location ) ) obj = FileMappingConfiguration . RecursivePersistedObject ( location = location , file_mapping_conf = self , logger = logger ) logger . debug ( 'File checks done' ) return obj | Creates a PersistedObject representing the object at location location and recursively creates all of its children |
55,055 | def is_multifile_object_without_children ( self , location : str ) -> bool : return isdir ( location ) and len ( self . find_multifile_object_children ( location ) ) == 0 | Returns True if an item with this location is present as a multifile object without children . For this implementation this means that there is a folder without any files in it |
55,056 | def get_multifile_object_child_location ( self , parent_item_prefix : str , child_name : str ) -> str : check_var ( parent_item_prefix , var_types = str , var_name = 'parent_item_prefix' ) check_var ( child_name , var_types = str , var_name = 'item_name' ) if not isdir ( parent_item_prefix ) : raise ValueError ( 'Cannot get attribute item in non-flat mode, parent item path is not a folder : ' + parent_item_prefix ) return join ( parent_item_prefix , child_name ) | Implementation of the parent abstract method . In this mode the attribute is a file inside the parent object folder |
55,057 | def is_multifile_object_without_children ( self , location : str ) -> bool : if isdir ( location ) : return len ( self . find_multifile_object_children ( location ) ) == 0 else : if exists ( location ) : return True else : return False | Returns True if an item with this location is present as a multifile object without children . For this implementation this means that there is a file with the appropriate name but without extension |
55,058 | def special_links_replace ( text , urls ) : match_number = r'([A-Za-z0-9]+)' + r'(\+*)' reference_list = [ ( r'regulations:article:' + match_number , urls [ 'regulations' ] ) , ( r'regulations:regulation:' + match_number , urls [ 'regulations' ] ) , ( r'guidelines:article:' + match_number , urls [ 'guidelines' ] ) , ( r'guidelines:guideline:' + match_number , urls [ 'guidelines' ] ) , ] anchor_list = [ ( r'regulations:contents' , urls [ 'regulations' ] + r'#contents' ) , ( r'guidelines:contents' , urls [ 'guidelines' ] + r'#contents' ) , ( r'regulations:top' , urls [ 'regulations' ] + r'#' ) , ( r'guidelines:top' , urls [ 'guidelines' ] + r'#' ) , ( r'link:pdf' , urls [ 'pdf' ] + '.pdf' ) , ] retval = text for match , repl in reference_list : retval = re . sub ( match , repl + r'#\1\2' , retval ) for match , repl in anchor_list : retval = re . sub ( match , repl , retval ) return retval | Replace simplified Regulations and Guidelines links into actual links . urls dictionary is expected to provide actual links to the targeted Regulations and Guidelines as well as to the PDF file . |
55,059 | def link2html ( text ) : match = r'\[([^\]]+)\]\(([^)]+)\)' replace = r'<a href="\2">\1</a>' return re . sub ( match , replace , text ) | Turns md links to html |
55,060 | def simple_md2html ( text , urls ) : retval = special_links_replace ( text , urls ) retval = re . sub ( r'\n\n' , r'</p><p>' , retval ) retval = re . sub ( r'\n' , r'<br />\n' , retval ) retval = re . sub ( r'"' , r'"' , retval ) retval = list2html ( retval ) return link2html ( retval ) | Convert a text from md to html |
55,061 | def generate_ul ( self , a_list ) : return len ( a_list ) > 0 and ( isinstance ( a_list [ 0 ] , Rule ) or isinstance ( a_list [ 0 ] , LabelDecl ) ) | Determines if we should generate th ul around the list a_list |
55,062 | def get_version_info ( ) : version_info = { } with open ( os . path . join ( "refcycle" , "version.py" ) , 'r' ) as f : version_code = compile ( f . read ( ) , "version.py" , 'exec' ) exec ( version_code , version_info ) return version_info | Extract version information as a dictionary from version . py . |
55,063 | def div_filter ( key : str , value : list , format : str , meta : Any ) -> Optional [ list ] : if key != "Div" or format != "latex" : return None [ [ _ , classes , _ ] , contents ] = value try : alert_type = [ name . split ( "-" ) [ 1 ] for name in classes if "-" in name ] [ 0 ] except IndexError : return None if alert_type not in ALLOWED_ALERT_TYPES . __members__ : return None filtered = [ RawBlock ( "latex" , rf"\begin{{{alert_type}box}}" ) ] filtered . extend ( contents ) filtered . append ( RawBlock ( "latex" , rf"\end{{{alert_type}box}}" ) ) return filtered | Filter the JSON value for alert divs . |
55,064 | def convert_div ( text : str , format : Optional [ str ] = None ) -> "applyJSONFilters" : return applyJSONFilters ( [ div_filter ] , text , format = format ) | Apply the dev_filter action to the text . |
55,065 | def raw_html_filter ( key : str , value : list , format : str , meta : Any ) -> Optional [ list ] : if key == "RawInline" and format == "latex" and value [ 0 ] == "html" : if value [ 1 ] == "<sup>" : filtered = [ RawInline ( "latex" , r"\textsuperscript{" ) ] elif value [ 1 ] == "</sup>" : filtered = [ RawInline ( "latex" , "}" ) ] elif value [ 1 ] == "<sub>" : filtered = [ RawInline ( "latex" , r"\textsubscript{" ) ] elif value [ 1 ] == "</sub>" : filtered = [ RawInline ( "latex" , "}" ) ] else : return None return filtered return None | Filter the JSON value for raw html to convert to LaTeX . |
55,066 | def convert_raw_html ( text : str , format : Optional [ str ] = None ) -> "applyJSONFilters" : return applyJSONFilters ( [ raw_html_filter ] , text , format = format ) | Apply the raw_html_filter action to the text . |
55,067 | def add ( self , element ) : key = self . _transform ( element ) if key not in self . _elements : self . _elements [ key ] = element | Add an element to this set . |
55,068 | def add_items_to_message ( msg , log_dict ) : out = msg for key , value in log_dict . items ( ) : out += " {}={}" . format ( key , value ) return out | Utility function to add dictionary items to a log message . |
55,069 | def metric ( cls , name , count , elapsed ) : if name is None : warnings . warn ( "Ignoring unnamed metric" , stacklevel = 3 ) return with cls . lock : if not cls . instances : shutil . rmtree ( cls . outdir , ignore_errors = True ) os . makedirs ( cls . outdir ) if cls . dump_atexit : atexit . register ( cls . dump ) try : self = cls . instances [ name ] except KeyError : self = cls . instances [ name ] = cls ( name ) self . writer . writerow ( ( count , "%f" % elapsed ) ) | A metric function that writes multiple CSV files |
55,070 | def metric ( self , name , count , elapsed ) : if name is None : warnings . warn ( "Ignoring unnamed metric" , stacklevel = 3 ) return with self . lock : self . writer . writerow ( ( name , count , "%f" % elapsed ) ) | A metric function that writes a single CSV file |
55,071 | def read ( parts ) : cur_dir = os . path . abspath ( os . path . dirname ( __file__ ) ) with codecs . open ( os . path . join ( cur_dir , * parts ) , "rb" , "utf-8" ) as f : return f . read ( ) | Build an absolute path from parts array and and return the contents of the resulting file . Assume UTF - 8 encoding . |
55,072 | def ensure_clean_git ( operation = 'operation' ) : if os . system ( 'git diff-index --quiet HEAD --' ) : print ( "Unstaged or uncommitted changes detected. {} aborted." . format ( operation . capitalize ( ) ) ) sys . exit ( ) | Verify that git has no uncommitted changes |
55,073 | def hasmethod ( obj , meth ) : if hasattr ( obj , meth ) : return callable ( getattr ( obj , meth ) ) return False | Checks if an object obj has a callable method meth return True or False |
55,074 | def hasvar ( obj , var ) : if hasattr ( obj , var ) : return not callable ( getattr ( obj , var ) ) return False | Checks if object obj has a variable var return True or False |
55,075 | def getmethattr ( obj , meth ) : if hasmethod ( obj , meth ) : return getattr ( obj , meth ) ( ) elif hasvar ( obj , meth ) : return getattr ( obj , meth ) return None | Returns either the variable value or method invocation |
55,076 | def assure_obj_child_dict ( obj , var ) : if not var in obj or type ( obj [ var ] ) != type ( { } ) : obj [ var ] = { } return obj | Assure the object has the specified child dict |
55,077 | def warmup ( f ) : @ wraps ( f ) def wrapped ( self , * args , ** kwargs ) : if not self . warmed_up : self . warmup ( ) return f ( self , * args , ** kwargs ) return wrapped | Decorator to run warmup before running a command |
55,078 | def install_required ( f ) : @ wraps ( f ) def wrapped ( self , * args , ** kwargs ) : if self . directory . new : raise SprinterException ( "Namespace %s is not yet installed!" % self . namespace ) return f ( self , * args , ** kwargs ) return wrapped | Return an exception if the namespace is not already installed |
55,079 | def install ( self ) : self . phase = PHASE . INSTALL if not self . directory . new : self . logger . info ( "Namespace %s directory already exists!" % self . namespace ) self . source = load_manifest ( self . directory . manifest_path ) return self . update ( ) try : self . logger . info ( "Installing environment %s..." % self . namespace ) self . directory . initialize ( ) self . install_sandboxes ( ) self . instantiate_features ( ) self . grab_inputs ( ) self . _specialize ( ) for feature in self . features . run_order : self . run_action ( feature , 'sync' ) self . inject_environment_config ( ) self . _finalize ( ) except Exception : self . logger . debug ( "" , exc_info = sys . exc_info ( ) ) self . logger . info ( "An error occured during installation!" ) if not self . ignore_errors : self . clear_all ( ) self . logger . info ( "Removing installation %s..." % self . namespace ) self . directory . remove ( ) et , ei , tb = sys . exc_info ( ) reraise ( et , ei , tb ) | Install the environment |
55,080 | def update ( self , reconfigure = False ) : try : self . phase = PHASE . UPDATE self . logger . info ( "Updating environment %s..." % self . namespace ) self . install_sandboxes ( ) self . instantiate_features ( ) if reconfigure : self . grab_inputs ( reconfigure = True ) else : self . _copy_source_to_target ( ) self . _specialize ( reconfigure = reconfigure ) for feature in self . features . run_order : self . run_action ( feature , 'sync' ) self . inject_environment_config ( ) self . _finalize ( ) except Exception : self . logger . debug ( "" , exc_info = sys . exc_info ( ) ) et , ei , tb = sys . exc_info ( ) reraise ( et , ei , tb ) | update the environment |
55,081 | def remove ( self ) : try : self . phase = PHASE . REMOVE self . logger . info ( "Removing environment %s..." % self . namespace ) self . instantiate_features ( ) self . _specialize ( ) for feature in self . features . run_order : try : self . run_action ( feature , 'sync' ) except FormulaException : pass self . clear_all ( ) self . directory . remove ( ) self . injections . commit ( ) if self . error_occured : self . logger . error ( warning_template ) self . logger . error ( REMOVE_WARNING ) except Exception : self . logger . debug ( "" , exc_info = sys . exc_info ( ) ) et , ei , tb = sys . exc_info ( ) reraise ( et , ei , tb ) | remove the environment |
55,082 | def deactivate ( self ) : try : self . phase = PHASE . DEACTIVATE self . logger . info ( "Deactivating environment %s..." % self . namespace ) self . directory . rewrite_config = False self . instantiate_features ( ) self . _specialize ( ) for feature in self . features . run_order : self . logger . info ( "Deactivating %s..." % feature [ 0 ] ) self . run_action ( feature , 'deactivate' ) self . clear_all ( ) self . _finalize ( ) except Exception : self . logger . debug ( "" , exc_info = sys . exc_info ( ) ) et , ei , tb = sys . exc_info ( ) reraise ( et , ei , tb ) | deactivate the environment |
55,083 | def validate ( self ) : self . phase = PHASE . VALIDATE self . logger . info ( "Validating %s..." % self . namespace ) self . instantiate_features ( ) context_dict = { } if self . target : for s in self . target . formula_sections ( ) : context_dict [ "%s:root_dir" % s ] = self . directory . install_directory ( s ) context_dict [ 'config:root_dir' ] = self . directory . root_dir context_dict [ 'config:node' ] = system . NODE self . target . add_additional_context ( context_dict ) for feature in self . features . run_order : self . run_action ( feature , 'validate' , run_if_error = True ) | Validate the target environment |
55,084 | def clear_all ( self ) : self . injections . clear_all ( ) for config_file in CONFIG_FILES : self . injections . clear ( os . path . join ( "~" , config_file ) ) | clear all files that were to be injected |
55,085 | def write_debug_log ( self , file_path ) : with open ( file_path , "wb+" ) as fh : fh . write ( system . get_system_info ( ) . encode ( 'utf-8' ) ) self . _debug_stream . seek ( 0 ) fh . write ( self . _debug_stream . read ( ) . encode ( 'utf-8' ) ) fh . write ( "The following errors occured:\n" . encode ( 'utf-8' ) ) for error in self . _errors : fh . write ( ( error + "\n" ) . encode ( 'utf-8' ) ) for k , v in self . _error_dict . items ( ) : if len ( v ) > 0 : fh . write ( ( "Error(s) in %s with formula %s:\n" % k ) . encode ( 'utf-8' ) ) for error in v : fh . write ( ( error + "\n" ) . encode ( 'utf-8' ) ) | Write the debug log to a file |
55,086 | def write_manifest ( self ) : if os . path . exists ( self . directory . manifest_path ) : self . main_manifest . write ( open ( self . directory . manifest_path , "w+" ) ) | Write the manifest to the file |
55,087 | def message_failure ( self ) : if not isinstance ( self . main_manifest , Manifest ) : return None return self . main_manifest . get ( 'config' , 'message_failure' , default = None ) | return a failure message if one exists |
55,088 | def warmup ( self ) : self . logger . debug ( "Warming up..." ) try : if not isinstance ( self . source , Manifest ) and self . source : self . source = load_manifest ( self . source ) if not isinstance ( self . target , Manifest ) and self . target : self . target = load_manifest ( self . target ) self . main_manifest = self . target or self . source except lib . BadCredentialsException : e = sys . exc_info ( ) [ 1 ] self . logger . error ( str ( e ) ) raise SprinterException ( "Fatal error! Bad credentials to grab manifest!" ) if not getattr ( self , 'namespace' , None ) : if self . target : self . namespace = self . target . namespace elif not self . namespace and self . source : self . namespace = self . source . namespace else : raise SprinterException ( "No environment name has been specified!" ) self . directory_root = self . custom_directory_root if not self . directory : if not self . directory_root : self . directory_root = os . path . join ( self . root , self . namespace ) self . directory = Directory ( self . directory_root , shell_util_path = self . shell_util_path ) if not self . injections : self . injections = Injections ( wrapper = "%s_%s" % ( self . sprinter_namespace . upper ( ) , self . namespace ) , override = "SPRINTER_OVERRIDES" ) if not self . global_injections : self . global_injections = Injections ( wrapper = "%s" % self . sprinter_namespace . upper ( ) + "GLOBALS" , override = "SPRINTER_OVERRIDES" ) os . environ [ 'PATH' ] = self . directory . bin_path ( ) + ":" + os . environ [ 'PATH' ] self . warmed_up = True | initialize variables necessary to perform a sprinter action |
55,089 | def _inject_config_source ( self , source_filename , files_to_inject ) : src_exec = "[ -r {0} ] && . {0}" . format ( os . path . join ( self . directory . root_dir , source_filename ) ) for config_file in files_to_inject : config_path = os . path . expanduser ( os . path . join ( "~" , config_file ) ) if os . path . exists ( config_path ) : self . injections . inject ( config_path , src_exec ) break else : config_file = files_to_inject [ 0 ] config_path = os . path . expanduser ( os . path . join ( "~" , config_file ) ) self . logger . info ( "No config files found to source %s, creating ~/%s!" % ( source_filename , config_file ) ) self . injections . inject ( config_path , src_exec ) return ( config_file , config_path ) | Inject existing environmental config with namespace sourcing . Returns a tuple of the first file name and path found . |
55,090 | def _finalize ( self ) : self . logger . info ( "Finalizing..." ) self . write_manifest ( ) if self . directory . rewrite_config : self . directory . add_to_rc ( '' ) if system . is_osx ( ) and self . main_manifest . is_affirmative ( 'config' , 'use_global_packagemanagers' ) : self . directory . add_to_env ( '__sprinter_prepend_path "%s" PATH' % '/usr/local/bin' ) self . directory . add_to_env ( '__sprinter_prepend_path "%s" PATH' % self . directory . bin_path ( ) ) self . directory . add_to_env ( '__sprinter_prepend_path "%s" LIBRARY_PATH' % self . directory . lib_path ( ) ) self . directory . add_to_env ( '__sprinter_prepend_path "%s" C_INCLUDE_PATH' % self . directory . include_path ( ) ) self . directory . finalize ( ) self . injections . commit ( ) self . global_injections . commit ( ) if not os . path . exists ( os . path . join ( self . root , ".global" ) ) : self . logger . debug ( "Global directory doesn't exist! creating..." ) os . makedirs ( os . path . join ( self . root , ".global" ) ) self . logger . debug ( "Writing shell util file..." ) with open ( self . shell_util_path , 'w+' ) as fh : fh . write ( shell_utils_template ) if self . error_occured : raise SprinterException ( "Error occured!" ) if self . message_success ( ) : self . logger . info ( self . message_success ( ) ) self . logger . info ( "Done!" ) self . logger . info ( "NOTE: Please remember to open new shells/terminals to use the modified environment" ) | command to run at the end of sprinter s run |
55,091 | def _build_logger ( self , level = logging . INFO ) : self . _debug_stream = StringIO ( ) logger = logging . getLogger ( 'sprinter' ) out_hdlr = logging . StreamHandler ( sys . stdout ) out_hdlr . setLevel ( level ) logger . addHandler ( out_hdlr ) debug_hdlr = logging . StreamHandler ( self . _debug_stream ) debug_hdlr . setFormatter ( logging . Formatter ( '%(asctime)s %(message)s' ) ) debug_hdlr . setLevel ( logging . DEBUG ) logger . addHandler ( debug_hdlr ) logger . setLevel ( logging . DEBUG ) return logger | return a logger . if logger is none generate a logger from stdout |
55,092 | def run_action ( self , feature , action , run_if_error = False , raise_exception = True ) : if len ( self . _error_dict [ feature ] ) > 0 and not run_if_error : return error = None instance = self . features [ feature ] try : getattr ( instance , action ) ( ) except Exception as e : e = sys . exc_info ( ) [ 1 ] self . logger . info ( "An exception occurred with action %s in feature %s!" % ( action , feature ) ) self . logger . debug ( "Exception" , exc_info = sys . exc_info ( ) ) error = str ( e ) self . log_feature_error ( feature , str ( e ) ) if error is not None and raise_exception : exception_msg = "%s action failed for feature %s: %s" % ( action , feature , error ) if self . phase == PHASE . REMOVE : raise FormulaException ( exception_msg ) else : raise SprinterException ( exception_msg ) return error | Run an action and log it s output in case of errors |
55,093 | def _specialize ( self , reconfigure = False ) : for manifest in [ self . source , self . target ] : context_dict = { } if manifest : for s in manifest . formula_sections ( ) : context_dict [ "%s:root_dir" % s ] = self . directory . install_directory ( s ) context_dict [ 'config:root_dir' ] = self . directory . root_dir context_dict [ 'config:node' ] = system . NODE manifest . add_additional_context ( context_dict ) self . _validate_manifest ( ) for feature in self . features . run_order : if not reconfigure : self . run_action ( feature , 'resolve' ) instance = self . features [ feature ] if instance . target : self . run_action ( feature , 'prompt' ) | Add variables and specialize contexts |
55,094 | def _copy_source_to_target ( self ) : if self . source and self . target : for k , v in self . source . items ( 'config' ) : self . target . set_input ( k , v ) | copy source user configuration to target |
55,095 | def grab_inputs ( self , reconfigure = False ) : self . _copy_source_to_target ( ) if self . target : self . target . grab_inputs ( force = reconfigure ) | Resolve the source and target config section |
55,096 | def parse_domain ( url ) : domain_match = lib . DOMAIN_REGEX . match ( url ) if domain_match : return domain_match . group ( ) | parse the domain from the url |
55,097 | def get_credentials ( options , environment ) : if options [ '--username' ] or options [ '--auth' ] : if not options [ '--username' ] : options [ '<username>' ] = lib . prompt ( "Please enter the username for %s..." % environment ) if not options [ '--password' ] : options [ '<password>' ] = lib . prompt ( "Please enter the password for %s..." % environment , secret = True ) return options | Get credentials or prompt for them from options |
55,098 | def full_name ( self ) : entity = self . entity . __name__ if self . entity is not None else None name = self . name if self . name is not None else None if entity and name : return entity + '.' + name elif entity : return entity + '.<unnamed>' elif name : return '<unbound>.' + name else : return '<unbound>.<unnamed>' | The full name of the field . This is the field s entities name concatenated with the field s name . If the field is unnamed or not bound to an entity the result respectively contains None . |
55,099 | def type_name ( self ) : res = self . type . __name__ if self . type . __module__ not in ( '__builtin__' , 'builtins' ) : res = self . type . __module__ + '.' + res return res | Returns the full type identifier of the field . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.