idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
50,000
def default_ubuntu_tr ( mod ) : pkg = 'python-%s' % mod . lower ( ) py2pkg = pkg py3pkg = 'python3-%s' % mod . lower ( ) return ( pkg , py2pkg , py3pkg )
Default translation function for Ubuntu based systems
50,001
def default_suse_tr ( mod ) : pkg = 'python-%s' % mod py2pkg = 'python2-%s' % mod py3pkg = 'python3-%s' % mod return ( pkg , py2pkg , py3pkg )
Default translation function for openSUSE SLES and other SUSE based systems
50,002
def module2package ( mod , dist , pkg_map = None , py_vers = ( 'py' , ) ) : if not pkg_map : pkg_map = get_pkg_map ( dist ) for rule in pkg_map : pkglist = rule ( mod , dist ) if pkglist : break else : tr_func = get_default_tr_func ( dist ) pkglist = tr_func ( mod ) output = [ ] for v in py_vers : if v == 'py' : output . append ( pkglist [ 0 ] ) elif v == 'py2' : output . append ( pkglist [ 1 ] ) elif v == 'py3' : output . append ( pkglist [ 2 ] ) else : raise Exception ( 'Invalid version "%s"' % ( v ) ) if len ( output ) == 1 : return output [ 0 ] else : return output
Return a corresponding package name for a python module .
50,003
def module2upstream ( mod ) : for rule in OPENSTACK_UPSTREAM_PKG_MAP : pkglist = rule ( mod , dist = None ) if pkglist : return pkglist [ 0 ] return mod
Return a corresponding OpenStack upstream name for a python module .
50,004
def main ( ) : parser = argparse . ArgumentParser ( description = 'Python module name to' 'package name' ) group = parser . add_mutually_exclusive_group ( ) group . add_argument ( '--dist' , help = 'distribution style ' '(default: %(default)s)' , default = platform . linux_distribution ( ) [ 0 ] ) group . add_argument ( '--upstream' , help = 'map to OpenStack project name' , action = 'store_true' ) parser . add_argument ( '--pyver' , help = 'Python versions to return. "py" is ' 'the unversioned name' , action = 'append' , choices = [ 'py' , 'py2' , 'py3' ] , default = [ ] ) parser . add_argument ( 'modulename' , help = 'python module name' ) args = vars ( parser . parse_args ( ) ) pyversions = args [ 'pyver' ] if args [ 'pyver' ] else [ 'py' ] if args [ 'upstream' ] : print ( module2upstream ( args [ 'modulename' ] ) ) else : pylist = module2package ( args [ 'modulename' ] , args [ 'dist' ] , py_vers = pyversions ) if type ( pylist ) is list : print ( ' ' . join ( pylist ) ) else : print ( pylist )
for resolving names from command line
50,005
def get_paging ( self ) : orig_page_num = self . page_num orig_page_size = self . page_size try : page_num = int ( self . request . GET . get ( self . page_kwarg or 'p' ) ) if page_num < 1 : page_num = orig_page_num except : page_num = orig_page_num try : orig_page_size = self . page_size page_size = int ( self . request . GET . get ( self . page_size_kwarg or 'ps' ) ) if page_size < 1 : page_size = orig_page_size except : page_size = orig_page_size return page_num , page_size
Gets the paging values passed through the query string params .
50,006
def make_call_types ( f , globals_d ) : arg_spec = getargspec ( f ) args = [ k for k in arg_spec . args if k != "self" ] defaults = { } if arg_spec . defaults : default_args = args [ - len ( arg_spec . defaults ) : ] for a , default in zip ( default_args , arg_spec . defaults ) : defaults [ a ] = default if not getattr ( f , "__annotations__" , None ) : annotations = make_annotations ( f , globals_d ) else : annotations = f . __annotations__ call_types = OrderedDict ( ) for a in args : anno = anno_with_default ( annotations [ a ] , defaults . get ( a , NO_DEFAULT ) ) assert isinstance ( anno , Anno ) , "Argument %r has type %r which is not an Anno" % ( a , anno ) call_types [ a ] = anno return_type = anno_with_default ( annotations . get ( "return" , None ) ) if return_type is Any : return_type = Anno ( "Any return value" , Any , "return" ) assert return_type is None or isinstance ( return_type , Anno ) , "Return has type %r which is not an Anno" % ( return_type , ) return call_types , return_type
Make a call_types dictionary that describes what arguments to pass to f
50,007
def make_annotations ( f , globals_d = None ) : locals_d = { } if globals_d is None : globals_d = { } locals_d = EchoDict ( ) lines , _ = inspect . getsourcelines ( f ) arg_spec = getargspec ( f ) args = list ( arg_spec . args ) if arg_spec . varargs is not None : args . append ( arg_spec . varargs ) if arg_spec . keywords is not None : args . append ( arg_spec . keywords ) it = iter ( lines ) types = [ ] found = None for token in tokenize . generate_tokens ( lambda : next ( it ) ) : typ , string , start , end , line = token if typ == tokenize . COMMENT : found = type_re . match ( string ) if found : parts = found . groups ( ) if parts [ 0 ] != "(...)" : expr = parts [ 0 ] . replace ( "*" , "" ) try : ob = eval ( expr , globals_d , locals_d ) except Exception as e : raise ValueError ( "Error evaluating %r: %s" % ( expr , e ) ) if isinstance ( ob , tuple ) : types += list ( ob ) else : types . append ( ob ) if parts [ 1 ] : try : ob = eval ( parts [ 2 ] , globals_d , locals_d ) except Exception as e : raise ValueError ( "Error evaluating %r: %s" % ( parts [ 2 ] , e ) ) if args and args [ 0 ] in [ "self" , "cls" ] : if len ( args ) == len ( types ) + 1 : args = args [ 1 : ] assert len ( args ) == len ( types ) , "Args %r Types %r length mismatch" % ( args , types ) ret = dict ( zip ( args , types ) ) ret [ "return" ] = ob return ret if found : raise ValueError ( "Got to the end of the function without seeing ->" ) return { }
Create an annotations dictionary from Python2 type comments
50,008
def get_object ( self , ** kwargs ) : if hasattr ( self , 'object' ) and self . object : return self . object obj = super ( CommonSingleObjectViewMixin , self ) . get_object ( ** kwargs ) self . object = obj return obj
Sometimes preprocessing of a view need to happen before the object attribute has been set for a view . In this case just return the object if it has already been set when it s called down the road since there s no need to make another query .
50,009
def hex_timestamp_to_datetime ( hex_timestamp ) : if not hex_timestamp . startswith ( '0x' ) : hex_timestamp = '0x{0}' . format ( hex_timestamp ) return datetime . fromtimestamp ( int ( hex_timestamp , 16 ) )
Converts hex timestamp to a datetime object .
50,010
def now_by_tz ( tz = 'US/Central' , ignoretz = True ) : if isinstance ( tz , string_types ) : tz = pytz . timezone ( tz ) if ignoretz : return datetime . now ( tz ) . replace ( tzinfo = None ) return datetime . now ( tz )
Gets the current datetime object by timezone .
50,011
def tz_to_utc ( dt , tz , ignoretz = True ) : if isinstance ( tz , string_types ) : tz = pytz . timezone ( tz ) dt = tz . localize ( dt ) dt = datetime . astimezone ( dt , pytz . timezone ( 'UTC' ) ) if ignoretz : return dt . replace ( tzinfo = None ) return dt
Converts a datetime object from the specified timezone to a UTC datetime .
50,012
def utc_to_tz ( dt , tz , ignoretz = True ) : if isinstance ( tz , string_types ) : tz = pytz . timezone ( tz ) dt = pytz . utc . localize ( dt ) dt = dt . astimezone ( tz ) if ignoretz : return dt . replace ( tzinfo = None ) return dt
Converts UTC datetime object to the specific timezone .
50,013
def turn_emails_off ( view_func ) : EMAIL_BACKEND_DUMMY = 'django.core.mail.backends.dummy.EmailBackend' def decorated ( request , * args , ** kwargs ) : orig_email_backend = settings . EMAIL_BACKEND settings . EMAIL_BACKEND = EMAIL_BACKEND_DUMMY response = view_func ( request , * args , ** kwargs ) settings . EMAIL_BACKEND = orig_email_backend return response return decorated
Turns emails off so no emails will be sent .
50,014
def get_md5_for_file ( file ) : md5 = hashlib . md5 ( ) while True : data = file . read ( md5 . block_size ) if not data : break md5 . update ( data ) return md5 . hexdigest ( )
Get the md5 hash for a file .
50,015
def get_dict_from_json_file ( path , encoding = 'utf-8' ) : with open ( path , encoding = encoding ) as data_file : return json . loads ( data_file . read ( ) )
Gets a dict of data form a json file .
50,016
def linebreaks_safe ( value , autoescape = True ) : if isinstance ( value , string_types ) and '\n' in value : return linebreaks_filter ( value , autoescape = autoescape ) return value
Adds linebreaks only for text that has a newline character .
50,017
def add_sub_resource ( self , relative_id , sub_resource ) : existing_sub_resources = self . resources . get ( sub_resource . RELATIVE_PATH_TEMPLATE , defaultdict ( list ) ) existing_sub_resources [ relative_id ] . append ( sub_resource ) self . resources . update ( { sub_resource . RELATIVE_PATH_TEMPLATE : existing_sub_resources } )
Add sub resource
50,018
def cloudshell_model_name ( self ) : if self . shell_name : return "{shell_name}.{resource_model}" . format ( shell_name = self . shell_name , resource_model = self . RESOURCE_MODEL . replace ( " " , "" ) ) else : return self . RESOURCE_MODEL
Return the name of the CloudShell model
50,019
def parse_expression ( source : str ) -> ExpressionSource : if not is_expression ( source ) : msg = 'Expression is not valid. Expression should be matched with regular expression: {0}' . format ( EXPRESSION_REGEX ) raise ExpressionError ( msg , source ) if not source . startswith ( '{' ) : [ type_ , source ] = source . split ( ':' , 1 ) elif source . endswith ( '}}' ) : type_ = 'twoways' else : type_ = 'oneway' return ( type_ , source [ 1 : - 1 ] )
Returns tuple with expression type and expression body
50,020
def _server_whitelist ( self ) : whitelist = [ ] for server in self . servers : if ( server not in self . last_error or self . last_error [ server ] < time . time ( ) - self . PENALTY_BOX_TIME ) : whitelist . append ( server ) if not whitelist : whitelist . append ( sorted ( self . last_error . items ( ) , key = lambda kv : kv [ 1 ] ) [ 0 ] [ 0 ] ) return whitelist
Returns list of servers that have not errored in the last five minutes . If all servers have errored in the last five minutes returns list with one item the server that errored least recently .
50,021
def _parse_array ( value ) : value = value . lstrip ( ) if not value or value [ 0 ] not in _bracket_strings : return None return json . loads ( value )
Coerce value into an list .
50,022
def _parse_boolean ( value ) : value = value . lower ( ) if value in _true_strings : return True elif value in _false_strings : return False else : return None
Coerce value into an bool .
50,023
def _parse_object ( value ) : value = value . lstrip ( ) if not value or value [ 0 ] not in _brace_strings : return None return json . loads ( value )
Coerce value into a dict .
50,024
def parse_value ( value , allowed_types , name = 'value' ) : if not isinstance ( value , str ) : raise ValueError ( 'value for %r must be a string' % name ) if isinstance ( allowed_types , str ) : allowed_types = [ allowed_types ] if 'null' in allowed_types and value == '' : return 'null' , None for allowed_type , parser in _parser_funcs : if allowed_type in allowed_types : try : parsed_value = parser ( value ) if parsed_value is not None : return allowed_type , parsed_value except ( TypeError , ValueError ) : pass raise ParseError ( '%s must be a valid type (%s)' % ( name , ', ' . join ( allowed_types ) ) )
Parse a value into one of a number of types .
50,025
def parse_json ( value : str , sig_params : List [ inspect . Parameter ] = None ) -> dict : try : loaded = json . loads ( value ) except Exception as e : message = 'Error parsing JSON: %r error: %s' % ( value , e ) logging . debug ( message , exc_info = e ) raise ParseError ( message ) if sig_params is not None : return map_param_names ( loaded , sig_params ) return loaded
Parse a value as JSON .
50,026
def map_param_names ( req_params : dict , sig_params : List [ inspect . Parameter ] ) -> dict : new_request_params = { } for k , param in sig_params . items ( ) : param_name = getattr ( param . annotation , 'param_name' , None ) key = k if param_name is None else param_name if key in req_params : new_request_params [ k ] = req_params [ key ] return new_request_params
Maps request param names to match logic function param names .
50,027
def parse_form_and_query_params ( req_params : dict , sig_params : dict ) -> dict : from doctor . types import SuperType , UnionType errors = { } parsed_params = { } for param , value in req_params . items ( ) : if param not in sig_params : continue if not issubclass ( sig_params [ param ] . annotation , SuperType ) : continue custom_parser = sig_params [ param ] . annotation . parser if custom_parser is not None : if not callable ( custom_parser ) : warnings . warn ( 'Parser `{}` is not callable, using default parser.' . format ( custom_parser ) ) custom_parser = None try : if custom_parser is not None : parsed_params [ param ] = custom_parser ( value ) else : if issubclass ( sig_params [ param ] . annotation , UnionType ) : json_type = [ _native_type_to_json [ _type . native_type ] for _type in sig_params [ param ] . annotation . types ] else : native_type = sig_params [ param ] . annotation . native_type json_type = [ _native_type_to_json [ native_type ] ] if sig_params [ param ] . annotation . nullable : json_type . append ( 'null' ) _ , parsed_params [ param ] = parse_value ( value , json_type ) except ParseError as e : errors [ param ] = str ( e ) if errors : raise TypeSystemError ( errors , errors = errors ) return parsed_params
Uses the parameter annotations to coerce string params .
50,028
def _handle_task ( self ) : headers = self . request . headers message = None try : status_code , output = process_async_task ( headers , self . request . body ) except AbortAndRestart as restart : status_code = 549 message = 'Retry Async Task' output = str ( restart ) self . response . set_status ( status_code , message ) self . response . out . write ( output )
Pass request info to the async framework .
50,029
def migrate_autoload_details ( autoload_details , shell_name , shell_type ) : mapping = { } for resource in autoload_details . resources : resource . model = "{shell_name}.{model}" . format ( shell_name = shell_name , model = resource . model ) mapping [ resource . relative_address ] = resource . model for attribute in autoload_details . attributes : if not attribute . relative_address : attribute . attribute_name = "{shell_type}.{attr_name}" . format ( shell_type = shell_type , attr_name = attribute . attribute_name ) else : attribute . attribute_name = "{model}.{attr_name}" . format ( model = mapping [ attribute . relative_address ] , attr_name = attribute . attribute_name ) return autoload_details
Migrate autoload details . Add namespace for attributes
50,030
def _get_convert_method ( self ) : for type_ , method in self . _map . items ( ) : if type ( self . value ) is bool and type_ is not bool : continue if isinstance ( self . value , type_ ) : return method if is_sql_instance ( self . value ) : return self . _raw return self . _undefined
Get right method to convert of the value .
50,031
def flatten ( rho , pval , sortby = "cor" ) : n = rho . shape [ 0 ] idx = np . triu_indices ( n , k = 1 ) tab = pd . DataFrame ( columns = [ 'i' , 'j' , 'cor' , 'pval' ] , data = np . c_ [ idx [ 0 ] , idx [ 1 ] , rho [ idx ] , pval [ idx ] ] ) tab [ [ 'i' , "j" ] ] = tab [ [ 'i' , "j" ] ] . astype ( int ) if sortby == "cor" : tab [ 'abscor' ] = np . abs ( tab [ 'cor' ] ) tab . sort_values ( by = 'abscor' , inplace = True , ascending = False ) elif sortby == "pval" : tab . sort_values ( by = 'pval' , inplace = True , ascending = True ) return tab [ [ "i" , "j" , "cor" , "pval" ] ]
Flatten correlation and p - value matrix
50,032
def max ( self ) -> int : if self . signed : v = ( 1 << ( self . bits - 1 ) ) - 1 else : v = ( 1 << self . bits ) - 1 if self . lsb0 : v = v - ( v % 2 ) return v
Get the maximum value this immediate can have
50,033
def set_from_bits ( self , value : int ) : if self . signed : value = - ( value & self . tcmask ) + ( value & ~ self . tcmask ) self . set ( value )
Set the immediate value from machine code bits . Those are not sign extended so it will take care of the proper handling .
50,034
def randomize ( self ) : self . value = randint ( self . min ( ) , self . max ( ) ) if self . lsb0 : self . value = self . value - ( self . value % 2 )
Randomize this immediate to a legal value
50,035
def _init ( ) : if ( hasattr ( _local_context , '_initialized' ) and _local_context . _initialized == os . environ . get ( 'REQUEST_ID_HASH' ) ) : return _local_context . registry = [ ] _local_context . _executing_async_context = None _local_context . _executing_async = [ ] _local_context . _initialized = os . environ . get ( 'REQUEST_ID_HASH' ) return _local_context
Initialize the furious context and registry .
50,036
def _handle_results ( options ) : results_processor = options . get ( '_process_results' ) if not results_processor : results_processor = _process_results processor_result = results_processor ( ) if isinstance ( processor_result , ( Async , Context ) ) : processor_result . start ( )
Process the results of executing the Async s target .
50,037
def encode_exception ( exception ) : import sys return AsyncException ( unicode ( exception ) , exception . args , sys . exc_info ( ) , exception )
Encode exception to a form that can be passed around and serialized .
50,038
def _execute_callback ( async , callback ) : from furious . async import Async if not callback : return async . result . payload if isinstance ( callback , Async ) : return callback . start ( ) return callback ( )
Execute the given callback or insert the Async callback or if no callback is given return the async . result .
50,039
def complex_state_generator_bravo ( last_state = '' ) : from random import choice states = [ 'ALPHA' , 'BRAVO' , 'BRAVO' , 'DONE' ] if last_state : states . remove ( last_state ) state = choice ( states ) logging . info ( 'Generating a state... %s' , state ) return state
Pick a state .
50,040
def state_machine_success ( ) : from furious . async import Async from furious . context import get_current_async result = get_current_async ( ) . result if result == 'ALPHA' : logging . info ( 'Inserting continuation for state %s.' , result ) return Async ( target = complex_state_generator_alpha , args = [ result ] ) elif result == 'BRAVO' : logging . info ( 'Inserting continuation for state %s.' , result ) return Async ( target = complex_state_generator_bravo , args = [ result ] ) logging . info ( 'Done working, stop now.' )
A positive result! Iterate!
50,041
def get_example_curl_lines ( method : str , url : str , params : dict , headers : dict ) -> List [ str ] : parts = [ 'curl {}' . format ( pipes . quote ( url ) ) ] parts . append ( '-X {}' . format ( method ) ) for header in headers : parts . append ( "-H '{}: {}'" . format ( header , headers [ header ] ) ) if method not in ( 'DELETE' , 'GET' ) : if params : parts . append ( "-H 'Content-Type: application/json' -d" ) pretty_json = json . dumps ( params , separators = ( ',' , ': ' ) , indent = 4 , sort_keys = True ) json_lines = pretty_json . split ( '\n' ) json_lines [ - 1 ] = ' ' + json_lines [ - 1 ] pretty_json = '\n' . join ( json_lines ) parts . append ( pipes . quote ( pretty_json ) ) wrapped = [ parts . pop ( 0 ) ] for part in parts : if len ( wrapped [ - 1 ] ) + len ( part ) < 80 : wrapped [ - 1 ] += ' ' + part else : wrapped [ - 1 ] += ' \\' wrapped . append ( ' ' + part ) return wrapped
Render a cURL command for the given request .
50,042
def get_example_lines ( headers : Dict [ str , str ] , method : str , url : str , params : Dict [ str , Any ] , response : str ) -> List [ str ] : lines = [ '' , 'Example Request:' , '' , '.. code-block:: bash' , '' ] lines . extend ( prefix_lines ( get_example_curl_lines ( method , url , params , headers ) , ' ' ) ) lines . extend ( [ '' , 'Example Response:' , '' ] ) try : response = json . dumps ( json . loads ( response ) , indent = 2 , separators = ( ',' , ': ' ) , sort_keys = True ) lines . extend ( [ '.. code-block:: json' , '' ] ) except Exception : lines . extend ( [ '.. code-block:: text' , '' ] ) lines . extend ( prefix_lines ( response , ' ' ) ) return lines
Render a reStructuredText example for the given request and response .
50,043
def get_object_reference ( obj : Object ) -> str : resource_name = obj . title if resource_name is None : class_name = obj . __name__ resource_name = class_name_to_resource_name ( class_name ) ALL_RESOURCES [ resource_name ] = obj return ' See :ref:`resource-{}`.' . format ( '-' . join ( resource_name . split ( ' ' ) ) . lower ( ) . strip ( ) )
Gets an object reference string from the obj instance .
50,044
def get_array_items_description ( item : Array ) -> str : desc = '' if isinstance ( item . items , list ) : desc = '' item_pos_template = ( ' *Item {pos} must be*: {description}{enum}{ref}' ) for pos , item in enumerate ( item . items ) : _enum = '' ref = '' if issubclass ( item , Enum ) : _enum = ' Must be one of: `{}`' . format ( item . enum ) if item . case_insensitive : _enum += ' (case-insensitive)' _enum += '.' elif issubclass ( item , Object ) : ref = get_object_reference ( item ) desc += item_pos_template . format ( pos = pos , description = item . description , enum = _enum , ref = ref ) else : desc = item . items . description _enum = '' ref = '' if issubclass ( item . items , Enum ) : _enum = ' Must be one of: `{}`' . format ( item . items . enum ) if item . items . case_insensitive : _enum += ' (case-insensitive)' _enum += '.' elif issubclass ( item . items , Object ) : ref = get_object_reference ( item . items ) desc = ' *Items must be*: {description}{enum}{ref}' . format ( description = desc , enum = _enum , ref = ref ) return desc
Returns a description for an array s items .
50,045
def get_json_types ( annotated_type : SuperType ) -> List [ str ] : types = [ ] if issubclass ( annotated_type , UnionType ) : types = [ str ( t . native_type . __name__ ) for t in annotated_type . types ] elif issubclass ( annotated_type , Array ) : if annotated_type . items is not None : if not isinstance ( annotated_type . items , list ) : types . append ( 'list[{}]' . format ( str ( annotated_type . items . native_type . __name__ ) ) ) else : _types = [ str ( t . native_type . __name__ ) for t in annotated_type . items ] types . append ( 'list[{}]' . format ( ',' . join ( _types ) ) ) else : types . append ( 'list' ) else : types . append ( str ( annotated_type . native_type . __name__ ) ) return types
Returns the json types for the provided annotated type .
50,046
def get_json_object_lines ( annotation : ResourceAnnotation , properties : Dict [ str , Any ] , field : str , url_params : Dict , request : bool = False , object_property : bool = False ) -> List [ str ] : sig_params = annotation . logic . _doctor_signature . parameters required_lines = [ ] lines = [ ] default_field = field for prop in sorted ( properties . keys ( ) ) : annotated_type = properties [ prop ] field = default_field if request and prop in url_params : field = 'param' types = get_json_types ( annotated_type ) description = annotated_type . description obj_ref = '' if issubclass ( annotated_type , Object ) : obj_ref = get_object_reference ( annotated_type ) elif ( issubclass ( annotated_type , Array ) and annotated_type . items is not None and not isinstance ( annotated_type . items , list ) and issubclass ( annotated_type . items , Object ) ) : obj_ref = get_object_reference ( annotated_type . items ) elif ( issubclass ( annotated_type , Array ) and isinstance ( annotated_type . items , list ) ) : for item in annotated_type . items : if issubclass ( item , Object ) : get_object_reference ( item ) enum = '' if issubclass ( annotated_type , Enum ) : enum = ' Must be one of: `{}`' . format ( annotated_type . enum ) if annotated_type . case_insensitive : enum += ' (case-insensitive)' enum += '.' if ( issubclass ( annotated_type , Array ) and annotated_type . items is not None ) : array_description = get_array_items_description ( annotated_type ) if obj_ref in array_description : obj_ref = '' description += array_description default = '' if ( request and prop in sig_params and sig_params [ prop ] . default != Signature . empty ) : default = ' (Defaults to `{}`) ' . format ( sig_params [ prop ] . default ) field_prop = prop line_template = ( ':{field} {types} {prop}: {description}{enum}{default}{obj_ref}' ) if request and prop in annotation . params . required : description = '**Required**. ' + description required_lines . append ( line_template . format ( field = field , types = ',' . join ( types ) , prop = field_prop , description = description , enum = enum , obj_ref = obj_ref , default = default ) ) else : lines . append ( line_template . format ( field = field , types = ',' . join ( types ) , prop = field_prop , description = description , enum = enum , obj_ref = obj_ref , default = default ) ) return required_lines + lines
Generate documentation for the given object annotation .
50,047
def get_json_lines ( annotation : ResourceAnnotation , field : str , route : str , request : bool = False ) -> List : url_params = URL_PARAMS_RE . findall ( route ) if not request : return_type = annotation . logic . _doctor_signature . return_annotation if issubclass ( return_type , Response ) : if return_type . __args__ is not None : return_type = return_type . __args__ [ 0 ] if issubclass ( return_type , Array ) : if issubclass ( return_type . items , Object ) : properties = return_type . items . properties field += 'arr' else : return [ ] elif issubclass ( return_type , Object ) : properties = return_type . properties else : return [ ] else : if annotation . logic . _doctor_req_obj_type : properties = annotation . logic . _doctor_req_obj_type . properties else : parameters = annotation . annotated_parameters properties = { k : p . annotation for k , p in parameters . items ( ) } return get_json_object_lines ( annotation , properties , field , url_params , request )
Generate documentation lines for the given annotation .
50,048
def get_resource_object_doc_lines ( ) -> List [ str ] : for resource_name , a_type in ALL_RESOURCES . copy ( ) . items ( ) : for prop_a_type in a_type . properties . values ( ) : if issubclass ( prop_a_type , Object ) : resource_name = prop_a_type . title if resource_name is None : class_name = prop_a_type . __name__ resource_name = class_name_to_resource_name ( class_name ) ALL_RESOURCES [ resource_name ] = prop_a_type elif ( issubclass ( prop_a_type , Array ) and prop_a_type . items is not None and not isinstance ( prop_a_type . items , list ) and issubclass ( prop_a_type . items , Object ) ) : resource_name = prop_a_type . items . title if resource_name is None : class_name = prop_a_type . items . __name__ resource_name = class_name_to_resource_name ( class_name ) ALL_RESOURCES [ resource_name ] = prop_a_type . items if not ALL_RESOURCES : return [ ] lines = [ 'Resource Objects' , '----------------' ] for resource_name in sorted ( ALL_RESOURCES . keys ( ) ) : a_type = ALL_RESOURCES [ resource_name ] resource_ref = '_resource-{}' . format ( '-' . join ( resource_name . lower ( ) . split ( ' ' ) ) ) lines . extend ( [ '.. {}:' . format ( resource_ref ) , '' ] ) lines . extend ( [ resource_name , '#' * len ( resource_name ) ] ) lines . extend ( [ a_type . description , '' ] ) if a_type . properties : lines . extend ( [ 'Attributes' , '**********' ] ) for prop in a_type . properties : prop_a_type = a_type . properties [ prop ] description = a_type . properties [ prop ] . description . strip ( ) obj_ref = '' if issubclass ( prop_a_type , Object ) : obj_ref = get_object_reference ( prop_a_type ) elif ( issubclass ( prop_a_type , Array ) and prop_a_type . items is not None and not isinstance ( prop_a_type . items , list ) and issubclass ( prop_a_type . items , Object ) ) : obj_ref = get_object_reference ( prop_a_type . items ) elif ( issubclass ( prop_a_type , Array ) and prop_a_type . items is not None ) : description += get_array_items_description ( prop_a_type ) native_type = a_type . properties [ prop ] . native_type . __name__ if prop in a_type . required : description = '**Required**. ' + description lines . append ( '* **{}** (*{}*) - {}{}' . format ( prop , native_type , description , obj_ref ) . strip ( ) ) lines . append ( '' ) lines . extend ( [ 'Example' , '*******' ] ) example = a_type . get_example ( ) pretty_json = json . dumps ( example , separators = ( ',' , ': ' ) , indent = 4 , sort_keys = True ) pretty_json_lines = prefix_lines ( pretty_json , ' ' ) lines . extend ( [ '.. code-block:: json' , '' ] ) lines . extend ( pretty_json_lines ) return lines
Generate documentation lines for all collected resource objects .
50,049
def get_name ( value ) -> str : if value . __module__ == '__builtin__' : return value . __name__ else : return '.' . join ( ( value . __module__ , value . __name__ ) )
Return a best guess at the qualified name for a class or function .
50,050
def normalize_route ( route : str ) -> str : normalized_route = str ( route ) . lstrip ( '^' ) . rstrip ( '$' ) . rstrip ( '?' ) normalized_route = normalized_route . replace ( '<' , '(' ) . replace ( '>' , ')' ) return normalized_route
Strip some of the ugly regexp characters from the given pattern .
50,051
def prefix_lines ( lines , prefix ) : if isinstance ( lines , bytes ) : lines = lines . decode ( 'utf-8' ) if isinstance ( lines , str ) : lines = lines . splitlines ( ) return [ prefix + line for line in lines ]
Add the prefix to each of the lines .
50,052
def class_name_to_resource_name ( class_name : str ) -> str : s = re . sub ( '(.)([A-Z][a-z]+)' , r'\1 \2' , class_name ) return re . sub ( '([a-z0-9])([A-Z])' , r'\1 \2' , s )
Converts a camel case class name to a resource name with spaces .
50,053
def _prepare_env ( self ) : env = self . state . document . settings . env if not hasattr ( env , self . directive_name ) : state = DirectiveState ( ) setattr ( env , self . directive_name , state ) else : state = getattr ( env , self . directive_name ) return env , state
Setup the document s environment if necessary .
50,054
def run ( self ) : if self . directive_name is None : raise NotImplementedError ( 'directive_name must be implemented by ' 'subclasses of BaseDirective' ) env , state = self . _prepare_env ( ) state . doc_names . add ( env . docname ) directive_name = '<{}>' . format ( self . directive_name ) node = nodes . section ( ) node . document = self . state . document result = ViewList ( ) for line in self . _render_rst ( ) : if line . startswith ( HEADING_TOKEN ) : heading = line [ HEADING_TOKEN_LENGTH : ] result . append ( heading , directive_name ) result . append ( '-' * len ( heading ) , directive_name ) else : result . append ( line , directive_name ) nested_parse_with_titles ( self . state , result , node ) return node . children
Called by Sphinx to generate documentation for this directive .
50,055
def get_outdated_docs ( cls , app , env , added , changed , removed ) : state = getattr ( env , cls . directive_name , None ) if state and state . doc_names : return sorted ( state . doc_names ) else : return [ ]
Handler for Sphinx s env - get - outdated event .
50,056
def purge_docs ( cls , app , env , docname ) : state = getattr ( env , cls . directive_name , None ) if state and docname in state . doc_names : state . doc_names . remove ( docname )
Handler for Sphinx s env - purge - doc event .
50,057
def setup ( cls , app ) : if cls . directive_name is None : raise NotImplementedError ( 'directive_name must be set by ' 'subclasses of BaseDirective' ) if not app . registry . has_domain ( 'http' ) : setup_httpdomain ( app ) app . add_config_value ( '{}_harness' . format ( cls . directive_name ) , None , 'env' ) app . add_directive ( cls . directive_name , cls ) app . connect ( 'builder-inited' , cls . run_setup ) app . connect ( 'build-finished' , cls . run_teardown ) app . connect ( 'env-get-outdated' , cls . get_outdated_docs ) app . connect ( 'env-purge-doc' , cls . purge_docs )
Called by Sphinx to setup an extension .
50,058
def define_header_values ( self , http_method , route , values , update = False ) : self . defined_header_values [ ( http_method . lower ( ) , route ) ] = { 'update' : update , 'values' : values }
Define header values for a given request .
50,059
def define_example_values ( self , http_method , route , values , update = False ) : self . defined_example_values [ ( http_method . lower ( ) , route ) ] = { 'update' : update , 'values' : values }
Define example values for a given request .
50,060
def _get_annotation_heading ( self , handler , route , heading = None ) : if hasattr ( handler , '_doctor_heading' ) : return handler . _doctor_heading heading = '' handler_path = str ( handler ) try : handler_file_name = handler_path . split ( '.' ) [ - 2 ] except IndexError : handler_file_name = 'handler' if handler_file_name . startswith ( 'handler' ) : class_name = handler_path . split ( '.' ) [ - 1 ] internal = False for word in CAMEL_CASE_RE . findall ( class_name ) : if word == 'Internal' : internal = True continue elif word . startswith ( ( 'List' , 'Handler' , 'Resource' ) ) : break heading += '%s ' % ( word , ) if internal : heading = heading . strip ( ) heading += ' (Internal)' else : heading = ' ' . join ( handler_file_name . split ( '_' ) ) . title ( ) if 'internal' in route : heading += ' (Internal)' return heading . strip ( )
Returns the heading text for an annotation .
50,061
def _get_headers ( self , route : str , annotation : ResourceAnnotation ) -> Dict : headers = self . headers . copy ( ) defined_header_values = self . defined_header_values . get ( ( annotation . http_method . lower ( ) , str ( route ) ) ) if defined_header_values is not None : if defined_header_values [ 'update' ] : headers . update ( defined_header_values [ 'values' ] ) else : headers = defined_header_values [ 'values' ] return headers
Gets headers for the provided route .
50,062
def _get_example_values ( self , route : str , annotation : ResourceAnnotation ) -> Dict [ str , Any ] : defined_values = self . defined_example_values . get ( ( annotation . http_method . lower ( ) , str ( route ) ) ) if defined_values and not defined_values [ 'update' ] : return defined_values [ 'values' ] if annotation . logic . _doctor_req_obj_type : values = annotation . logic . _doctor_req_obj_type . get_example ( ) else : values = { k : v . annotation . get_example ( ) for k , v in annotation . annotated_parameters . items ( ) } if defined_values : values . update ( defined_values [ 'values' ] ) if annotation . http_method == 'GET' : for k , v in values . items ( ) : if isinstance ( v , ( list , dict ) ) : values [ k ] = json . dumps ( v ) return values
Gets example values for all properties in the annotation s schema .
50,063
def get_device_name ( file_name , sys_obj_id , delimiter = ":" ) : try : with open ( file_name , "rb" ) as csv_file : csv_reader = csv . reader ( csv_file , delimiter = delimiter ) for row in csv_reader : if len ( row ) >= 2 and row [ 0 ] == sys_obj_id : return row [ 1 ] except IOError : pass return sys_obj_id
Get device name by its SNMP sysObjectID property from the file map
50,064
def context_complete ( context_id ) : logging . info ( 'Context %s is.......... DONE.' , context_id ) from furious . context import get_current_async_with_context _ , context = get_current_async_with_context ( ) if not context : logging . error ( "Could not load context" ) return for task_id , result in context . result . items ( ) : logging . info ( "#########################" ) logging . info ( "Task Id: %s and Result: %s" , task_id , result ) return context_id
Log out that the context is complete .
50,065
def process_messages ( tag , retries = 0 ) : from furious . batcher import bump_batch from furious . batcher import MESSAGE_DEFAULT_QUEUE from furious . batcher import MessageIterator from furious . batcher import MessageProcessor from google . appengine . api import memcache if retries > 5 : logging . info ( "Process messages hit max retry and is exiting" ) return message_iterator = MessageIterator ( tag , MESSAGE_DEFAULT_QUEUE , 500 ) client = memcache . Client ( ) stats = client . gets ( tag ) stats = json . loads ( stats ) if stats else get_default_stats ( ) work_processed = False for message in message_iterator : work_processed = True value = int ( message . get ( "value" , 0 ) ) color = message . get ( "color" ) . lower ( ) set_stats ( stats [ "totals" ] , value ) set_stats ( stats [ "colors" ] [ color ] , value ) json_stats = json . dumps ( stats ) if not client . add ( tag , json_stats ) : if not client . cas ( tag , json_stats ) : raise Exception ( "Transaction Collision." ) bump_batch ( tag ) if work_processed : retries = 0 else : retries += 1 processor = MessageProcessor ( target = process_messages , args = ( "colors" , ) , kwargs = { 'retries' : retries } , tag = "colors" ) processor . start ( )
Processes the messages pulled fromm a queue based off the tag passed in . Will insert another processor if any work was processed or the retry count is under the max retry count . Will update a aggregated stats object with the data in the payload of the messages processed .
50,066
def set_stats ( stats , value ) : stats [ "total_count" ] += 1 stats [ "value" ] += value stats [ "average" ] = stats [ "value" ] / stats [ "total_count" ] if value > stats [ "max" ] : stats [ "max" ] = value if value < stats [ "min" ] or stats [ "min" ] == 0 : stats [ "min" ] = value
Updates the stats with the value passed in .
50,067
def serialize_to_json ( result , unpicklable = False ) : json = jsonpickle . encode ( result , unpicklable = unpicklable ) result_for_output = str ( json ) return result_for_output
Serializes output as JSON and writes it to console output wrapped with special prefix and suffix
50,068
def orchestration_save ( self , mode = "shallow" , custom_params = None ) : save_params = { 'folder_path' : '' , 'configuration_type' : 'running' , 'return_artifact' : True } params = dict ( ) if custom_params : params = jsonpickle . decode ( custom_params ) save_params . update ( params . get ( 'custom_params' , { } ) ) save_params [ 'folder_path' ] = self . get_path ( save_params [ 'folder_path' ] ) saved_artifact = self . save ( ** save_params ) saved_artifact_info = OrchestrationSavedArtifactInfo ( resource_name = self . resource_config . name , created_date = datetime . datetime . now ( ) , restore_rules = self . get_restore_rules ( ) , saved_artifact = saved_artifact ) save_response = OrchestrationSaveResult ( saved_artifacts_info = saved_artifact_info ) self . _validate_artifact_info ( saved_artifact_info ) return serialize_to_json ( save_response )
Orchestration Save command
50,069
def get_path ( self , path = '' ) : if not path : host = self . resource_config . backup_location if ':' not in host : scheme = self . resource_config . backup_type if not scheme or scheme . lower ( ) == self . DEFAULT_FILE_SYSTEM . lower ( ) : scheme = self . file_system scheme = re . sub ( '(:|/+).*$' , '' , scheme , re . DOTALL ) host = re . sub ( '^/+' , '' , host ) host = '{}://{}' . format ( scheme , host ) path = host url = UrlParser . parse_url ( path ) if url [ UrlParser . SCHEME ] . lower ( ) in AUTHORIZATION_REQUIRED_STORAGE : if UrlParser . USERNAME not in url or not url [ UrlParser . USERNAME ] : url [ UrlParser . USERNAME ] = self . resource_config . backup_user if UrlParser . PASSWORD not in url or not url [ UrlParser . PASSWORD ] : url [ UrlParser . PASSWORD ] = self . _api . DecryptPassword ( self . resource_config . backup_password ) . Value try : result = UrlParser . build_url ( url ) except Exception as e : self . _logger . error ( 'Failed to build url: {}' . format ( e ) ) raise Exception ( 'ConfigurationOperations' , 'Failed to build path url to remote host' ) return result
Validate incoming path if path is empty build it from resource attributes If path is invalid - raise exception
50,070
def _validate_configuration_type ( self , configuration_type ) : if configuration_type . lower ( ) != 'running' and configuration_type . lower ( ) != 'startup' : raise Exception ( self . __class__ . __name__ , 'Configuration Type is invalid. Should be startup or running' )
Validate configuration type
50,071
def _validate_artifact_info ( self , saved_config ) : is_fail = False fail_attribute = '' for class_attribute in self . REQUIRED_SAVE_ATTRIBUTES_LIST : if type ( class_attribute ) is tuple : if not hasattr ( saved_config , class_attribute [ 0 ] ) : is_fail = True fail_attribute = class_attribute [ 0 ] elif not hasattr ( getattr ( saved_config , class_attribute [ 0 ] ) , class_attribute [ 1 ] ) : is_fail = True fail_attribute = class_attribute [ 1 ] else : if not hasattr ( saved_config , class_attribute ) : is_fail = True fail_attribute = class_attribute if is_fail : raise Exception ( 'ConfigurationOperations' , 'Mandatory field {0} is missing in Saved Artifact Info request json' . format ( fail_attribute ) )
Validate OrchestrationSavedArtifactInfo object for key components
50,072
def register ( key , initializer : callable , param = None ) : get_current_scope ( ) . container . register ( key , initializer , param )
Adds resolver to global container
50,073
def register_single ( key , value , param = None ) : get_current_scope ( ) . container . register ( key , lambda : value , param )
Generates resolver to return singleton value and adds it to global container
50,074
def wrap_with_scope ( func , scope_name = None ) : if scope_name is None : scope_name = get_current_scope ( ) . name return lambda * args , scope = scope_name , ** kwargs : _call_with_scope ( func , scope , args , kwargs )
Wraps function with scope . If scope_name is None current scope is used
50,075
def inject ( * injections ) : def _decorate ( func ) : def _decorated ( * args , ** kwargs ) : args = list ( args ) keys_to_inject = [ name for name in injections if name not in kwargs ] for key in keys_to_inject : kwargs [ key ] = get_current_scope ( ) . container . get ( key ) return func ( * args , ** kwargs ) return _decorated return _decorate
Resolves dependencies using global container and passed it with optional parameters
50,076
def register ( self , key , initializer : callable , param = None ) : if not callable ( initializer ) : raise DependencyError ( 'Initializer {0} is not callable' . format ( initializer ) ) if key not in self . _initializers : self . _initializers [ key ] = { } self . _initializers [ key ] [ param ] = initializer
Add resolver to container
50,077
def create_routes ( routes : Tuple [ Route ] ) -> List [ Tuple [ str , Resource ] ] : return doctor_create_routes ( routes , handle_http , default_base_handler_class = Resource )
A thin wrapper around create_routes that passes in flask specific values .
50,078
def confusion_to_mcc ( * args ) : if len ( args ) is 1 : tn , fp , fn , tp = args [ 0 ] . ravel ( ) . astype ( float ) elif len ( args ) is 4 : tn , fp , fn , tp = [ float ( a ) for a in args ] else : raise Exception ( ( "Input argument is not an 2x2 matrix, " "nor 4 elements tn, fp, fn, tp." ) ) return ( tp * tn - fp * fn ) / np . sqrt ( ( tp + fp ) * ( tp + fn ) * ( tn + fp ) * ( tn + fn ) )
Convert the confusion matrix to the Matthews correlation coefficient
50,079
def create_node ( xml_node : XmlNode , ** init_args ) : inst_type = get_inst_type ( xml_node ) init_args [ 'xml_node' ] = xml_node inst = create_inst ( inst_type , ** init_args ) if not isinstance ( inst , Node ) : inst = convert_to_node ( inst , ** init_args ) return inst
Creates node from xml node using namespace as module and tag name as class name
50,080
def get_inst_type ( xml_node : XmlNode ) : ( module_path , class_name ) = ( xml_node . namespace , xml_node . name ) try : return import_module ( module_path ) . __dict__ [ class_name ] except ( KeyError , ImportError , ModuleNotFoundError ) : message = 'Import "{0}.{1}" is failed.' . format ( module_path , class_name ) raise RenderingError ( message , xml_node . view_info )
Returns type by xml node
50,081
def create_inst ( inst_type , ** init_args ) : args , kwargs = get_init_args ( inst_type , init_args ) return inst_type ( * args , ** kwargs )
Creates class instance with args
50,082
def get_init_args ( inst_type , init_args : dict , add_kwargs = False ) -> Tuple [ List , Dict ] : try : parameters = signature ( inst_type ) . parameters . values ( ) args_keys = [ p . name for p in parameters if p . kind in [ Parameter . POSITIONAL_ONLY , Parameter . POSITIONAL_OR_KEYWORD ] and p . default == Parameter . empty ] args = [ init_args [ key ] for key in args_keys ] kwargs = _get_var_kwargs ( parameters , args_keys , init_args ) if add_kwargs else _get_kwargs ( parameters , init_args ) except KeyError as key_error : msg_format = 'parameter with key "{0}" is not found in node args' raise RenderingError ( msg_format . format ( key_error . args [ 0 ] ) ) return ( args , kwargs )
Returns tuple with args and kwargs to pass it to inst_type constructor
50,083
def convert_to_node ( instance , xml_node : XmlNode , node_globals : InheritedDict = None ) -> InstanceNode : return InstanceNode ( instance , xml_node , node_globals )
Wraps passed instance with InstanceNode
50,084
def _auto_insert_check ( self ) : if not self . batch_size : return if len ( self . _tasks ) >= self . batch_size : self . _handle_tasks ( )
Automatically insert tasks asynchronously . Depending on batch_size insert or wait until next call .
50,085
def isa ( mnemonic : str , opcode : int , funct3 : int = None , funct7 : int = None , * , variant = RV32I , extension = None ) : def wrapper ( wrapped ) : class WrappedClass ( wrapped ) : _mnemonic = mnemonic _opcode = opcode _funct3 = funct3 _funct7 = funct7 _variant = variant _extension = extension @ staticmethod def _match ( machinecode : int ) : f3 = ( machinecode >> 12 ) & 0x7 f7 = ( machinecode >> 25 ) & 0x7f if funct3 is not None and f3 != funct3 : return False if funct7 is not None and f7 != funct7 : return False return True WrappedClass . __name__ = wrapped . __name__ WrappedClass . __module__ = wrapped . __module__ WrappedClass . __qualname__ = wrapped . __qualname__ return WrappedClass return wrapper
Decorator for the instructions . The decorator contains the static information for the instructions in particular the encoding parameters and the assembler mnemonic .
50,086
def get_insns ( cls = None ) : insns = [ ] if cls is None : cls = Instruction if "_mnemonic" in cls . __dict__ . keys ( ) : insns = [ cls ] for subcls in cls . __subclasses__ ( ) : insns += get_insns ( subcls ) return insns
Get all Instructions . This is based on all known subclasses of cls . If non is given all Instructions are returned . Only such instructions are returned that can be generated i . e . that have a mnemonic opcode etc . So other classes in the hierarchy are not matched .
50,087
def reverse_lookup ( mnemonic : str ) : for i in get_insns ( ) : if "_mnemonic" in i . __dict__ and i . _mnemonic == mnemonic : return i return None
Find instruction that matches the mnemonic .
50,088
def get_value_from_schema ( schema , definition : dict , key : str , definition_key : str ) : resolved_definition = definition . copy ( ) if '$ref' in resolved_definition : try : resolved_definition = schema . resolve ( definition [ '$ref' ] ) except SchemaError as e : raise TypeSystemError ( str ( e ) ) try : value = resolved_definition [ key ] except KeyError : if resolved_definition [ 'type' ] == 'array' : return [ get_value_from_schema ( schema , resolved_definition [ 'items' ] , key , definition_key ) ] elif resolved_definition [ 'type' ] == 'object' : value = { } for prop , definition in resolved_definition [ 'properties' ] . items ( ) : value [ prop ] = get_value_from_schema ( schema , definition , key , definition_key ) return value raise TypeSystemError ( 'Definition `{}` is missing a {}.' . format ( definition_key , key ) ) return value
Gets a value from a schema and definition .
50,089
def get_types ( json_type : StrOrList ) -> typing . Tuple [ str , str ] : if isinstance ( json_type , list ) : for j_type in json_type : if j_type != 'null' : json_type = j_type break return ( json_type , JSON_TYPES_TO_NATIVE [ json_type ] )
Returns the json and native python type based on the json_type input .
50,090
def new_type ( cls , ** kwargs ) -> typing . Type : props = dict ( cls . __dict__ ) props . update ( kwargs ) return type ( cls . __name__ , ( cls , ) , props )
Create a user defined type .
50,091
def get_example ( cls ) -> dict : if cls . example is not None : return cls . example return { k : v . get_example ( ) for k , v in cls . properties . items ( ) }
Returns an example value for the Dict type .
50,092
def get_example ( cls ) -> list : if cls . example is not None : return cls . example if cls . items is not None : if isinstance ( cls . items , list ) : return [ item . get_example ( ) for item in cls . items ] else : return [ cls . items . get_example ( ) ] return [ 1 ]
Returns an example value for the Array type .
50,093
def render_node ( xml_node : XmlNode , ** args ) -> Node : try : node = create_node ( xml_node , ** args ) pipeline = get_pipeline ( node ) run_steps ( node , pipeline , ** args ) return node except CoreError as error : error . add_view_info ( xml_node . view_info ) raise except : info = exc_info ( ) msg = 'Unknown error occurred during rendering' error = RenderingError ( msg , xml_node . view_info ) error . add_cause ( info [ 1 ] ) raise error from info [ 1 ]
Renders node from xml node
50,094
def get_pipeline ( node : Node ) -> RenderingPipeline : pipeline = _get_registered_pipeline ( node ) if pipeline is None : msg = _get_pipeline_registration_error_message ( node ) raise RenderingError ( msg ) return pipeline
Gets rendering pipeline for passed node
50,095
def run_steps ( node : Node , pipeline : RenderingPipeline , ** args ) : for step in pipeline . steps : result = step ( node , pipeline = pipeline , ** args ) if isinstance ( result , dict ) : args = { ** args , ** result }
Runs instance node rendering steps
50,096
def apply_attributes ( node : Node , ** _ ) : for attr in node . xml_node . attrs : apply_attribute ( node , attr )
Applies xml attributes to instance node and setups bindings
50,097
def apply_attribute ( node : Node , attr : XmlAttr ) : setter = get_setter ( attr ) stripped_value = attr . value . strip ( ) if attr . value else '' if is_expression ( stripped_value ) : ( binding_type , expr_body ) = parse_expression ( stripped_value ) binder ( ) . apply ( binding_type , node = node , attr = attr , modifier = setter , expr_body = expr_body ) else : setter ( node , attr . name , attr . value )
Maps xml attribute to instance node property and setups bindings
50,098
def call_set_attr ( node : Node , key : str , value ) : node . set_attr ( key , value )
Calls node setter
50,099
def render_children ( node : Node , ** child_args ) : for xml_node in node . xml_node . children : child = render ( xml_node , ** child_args ) node . add_child ( child )
Render node children