idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
5,500
|
def on_failure ( self , exc , task_id , args , kwargs , einfo ) : key = self . _get_cache_key ( args , kwargs ) _ , penalty = cache . get ( key , ( 0 , 0 ) ) if penalty < self . MAX_PENALTY : penalty += 1 logger . debug ( 'The task %s is penalized and will be executed on %d run.' % ( self . name , penalty ) ) cache . set ( key , ( penalty , penalty ) , self . CACHE_LIFETIME ) return super ( PenalizedBackgroundTask , self ) . on_failure ( exc , task_id , args , kwargs , einfo )
|
Increases penalty for the task and resets the counter .
|
5,501
|
def on_success ( self , retval , task_id , args , kwargs ) : key = self . _get_cache_key ( args , kwargs ) if cache . get ( key ) is not None : cache . delete ( key ) logger . debug ( 'Penalty for the task %s has been removed.' % self . name ) return super ( PenalizedBackgroundTask , self ) . on_success ( retval , task_id , args , kwargs )
|
Clears cache for the task .
|
5,502
|
def log_backend_action ( action = None ) : def decorator ( func ) : @ functools . wraps ( func ) def wrapped ( self , instance , * args , ** kwargs ) : action_name = func . func_name . replace ( '_' , ' ' ) if action is None else action logger . debug ( 'About to %s `%s` (PK: %s).' , action_name , instance , instance . pk ) result = func ( self , instance , * args , ** kwargs ) logger . debug ( 'Action `%s` was executed successfully for `%s` (PK: %s).' , action_name , instance , instance . pk ) return result return wrapped return decorator
|
Logging for backend method .
|
5,503
|
def get_service_resources ( cls , model ) : key = cls . get_model_key ( model ) return cls . get_service_name_resources ( key )
|
Get resource models by service model
|
5,504
|
def get_service_name_resources ( cls , service_name ) : from django . apps import apps resources = cls . _registry [ service_name ] [ 'resources' ] . keys ( ) return [ apps . get_model ( resource ) for resource in resources ]
|
Get resource models by service name
|
5,505
|
def _is_active_model ( cls , model ) : return ( '.' . join ( model . __module__ . split ( '.' ) [ : 2 ] ) in settings . INSTALLED_APPS or '.' . join ( model . __module__ . split ( '.' ) [ : 1 ] ) in settings . INSTALLED_APPS )
|
Check is model app name is in list of INSTALLED_APPS
|
5,506
|
def get_context_data_from_headers ( request , headers_schema ) : if not headers_schema : return None env = request . parsed_data . xml . xpath ( '/soap:Envelope' , namespaces = SoapProtocol . namespaces ) [ 0 ] header = env . xpath ( './soap:Header/*' , namespaces = SoapProtocol . namespaces ) if len ( header ) < 1 : return None return headers_schema . validate ( xml2obj ( header [ 0 ] , headers_schema ) )
|
Extracts context data from request headers according to specified schema .
|
5,507
|
def lazy_constant ( fn ) : class NewLazyConstant ( LazyConstant ) : @ functools . wraps ( fn ) def __call__ ( self ) : return self . get_value ( ) return NewLazyConstant ( fn )
|
Decorator to make a function that takes no arguments use the LazyConstant class .
|
5,508
|
def lru_cache ( maxsize = 128 , key_fn = None ) : def decorator ( fn ) : cache = LRUCache ( maxsize ) argspec = inspect2 . getfullargspec ( fn ) arg_names = argspec . args [ 1 : ] + argspec . kwonlyargs kwargs_defaults = get_kwargs_defaults ( argspec ) cache_key = key_fn if cache_key is None : def cache_key ( args , kwargs ) : return get_args_tuple ( args , kwargs , arg_names , kwargs_defaults ) @ functools . wraps ( fn ) def wrapper ( * args , ** kwargs ) : key = cache_key ( args , kwargs ) try : return cache [ key ] except KeyError : value = fn ( * args , ** kwargs ) cache [ key ] = value return value wrapper . clear = cache . clear return wrapper return decorator
|
Decorator that adds an LRU cache of size maxsize to the decorated function .
|
5,509
|
def cached_per_instance ( ) : def cache_fun ( fun ) : argspec = inspect2 . getfullargspec ( fun ) arg_names = argspec . args [ 1 : ] + argspec . kwonlyargs kwargs_defaults = get_kwargs_defaults ( argspec ) cache = { } def cache_key ( args , kwargs ) : return get_args_tuple ( args , kwargs , arg_names , kwargs_defaults ) def clear_cache ( instance_key , ref ) : del cache [ instance_key ] @ functools . wraps ( fun ) def new_fun ( self , * args , ** kwargs ) : instance_key = id ( self ) if instance_key not in cache : ref = weakref . ref ( self , functools . partial ( clear_cache , instance_key ) ) cache [ instance_key ] = ( ref , { } ) instance_cache = cache [ instance_key ] [ 1 ] k = cache_key ( args , kwargs ) if k not in instance_cache : instance_cache [ k ] = fun ( self , * args , ** kwargs ) return instance_cache [ k ] new_fun . __cached_per_instance_cache__ = cache return new_fun return cache_fun
|
Decorator that adds caching to an instance method .
|
5,510
|
def get_args_tuple ( args , kwargs , arg_names , kwargs_defaults ) : args_list = list ( args ) args_len = len ( args ) all_args_len = len ( arg_names ) try : while args_len < all_args_len : arg_name = arg_names [ args_len ] if arg_name in kwargs_defaults : args_list . append ( kwargs . get ( arg_name , kwargs_defaults [ arg_name ] ) ) else : args_list . append ( kwargs [ arg_name ] ) args_len += 1 except KeyError as e : raise TypeError ( "Missing argument %r" % ( e . args [ 0 ] , ) ) return tuple ( args_list )
|
Generates a cache key from the passed in arguments .
|
5,511
|
def get_kwargs_defaults ( argspec ) : arg_names = tuple ( argspec . args ) defaults = argspec . defaults or ( ) num_args = len ( argspec . args ) - len ( defaults ) kwargs_defaults = { } for i , default_value in enumerate ( defaults ) : kwargs_defaults [ arg_names [ num_args + i ] ] = default_value if getattr ( argspec , "kwonlydefaults" , None ) : kwargs_defaults . update ( argspec . kwonlydefaults ) return kwargs_defaults
|
Computes a kwargs_defaults dictionary for use by get_args_tuple given an argspec .
|
5,512
|
def memoize ( fun ) : argspec = inspect2 . getfullargspec ( fun ) arg_names = argspec . args + argspec . kwonlyargs kwargs_defaults = get_kwargs_defaults ( argspec ) def cache_key ( args , kwargs ) : return get_args_tuple ( args , kwargs , arg_names , kwargs_defaults ) @ functools . wraps ( fun ) def new_fun ( * args , ** kwargs ) : k = cache_key ( args , kwargs ) if k not in new_fun . __cache : new_fun . __cache [ k ] = fun ( * args , ** kwargs ) return new_fun . __cache [ k ] def clear_cache ( ) : new_fun . __cache . clear ( ) new_fun . __cache = { } new_fun . clear_cache = clear_cache return new_fun
|
Memoizes return values of the decorated function .
|
5,513
|
def memoize_with_ttl ( ttl_secs = 60 * 60 * 24 ) : error_msg = ( "Incorrect usage of qcore.caching.memoize_with_ttl: " "ttl_secs must be a positive integer." ) assert_is_instance ( ttl_secs , six . integer_types , error_msg ) assert_gt ( ttl_secs , 0 , error_msg ) def cache_fun ( fun ) : argspec = inspect2 . getfullargspec ( fun ) arg_names = argspec . args + argspec . kwonlyargs kwargs_defaults = get_kwargs_defaults ( argspec ) def cache_key ( args , kwargs ) : return repr ( get_args_tuple ( args , kwargs , arg_names , kwargs_defaults ) ) @ functools . wraps ( fun ) def new_fun ( * args , ** kwargs ) : k = cache_key ( args , kwargs ) current_time = int ( time . time ( ) ) if k not in new_fun . __cache or k not in new_fun . __cache_times : new_fun . __cache [ k ] = fun ( * args , ** kwargs ) new_fun . __cache_times [ k ] = current_time return new_fun . __cache [ k ] cache_time = new_fun . __cache_times [ k ] if current_time - cache_time > ttl_secs : new_fun . __cache [ k ] = fun ( * args , ** kwargs ) new_fun . __cache_times [ k ] = current_time return new_fun . __cache [ k ] def clear_cache ( ) : new_fun . __cache . clear ( ) new_fun . __cache_times . clear ( ) def dirty ( * args , ** kwargs ) : k = cache_key ( args , kwargs ) new_fun . __cache . pop ( k , None ) new_fun . __cache_times . pop ( k , None ) new_fun . __cache = { } new_fun . __cache_times = { } new_fun . clear_cache = clear_cache new_fun . dirty = dirty return new_fun return cache_fun
|
Memoizes return values of the decorated function for a given time - to - live .
|
5,514
|
def get_value ( self ) : if self . value is not_computed : self . value = self . value_provider ( ) if self . value is not_computed : return None return self . value
|
Returns the value of the constant .
|
5,515
|
def compute ( self ) : self . value = self . value_provider ( ) if self . value is not_computed : return None else : return self . value
|
Computes the value . Does not look at the cache .
|
5,516
|
def get ( self , key , default = miss ) : if key not in self . _dict : return default return self [ key ]
|
Return the value for given key if it exists .
|
5,517
|
def clear ( self , omit_item_evicted = False ) : if not omit_item_evicted : items = self . _dict . items ( ) for key , value in items : self . _evict_item ( key , value ) self . _dict . clear ( )
|
Empty the cache and optionally invoke item_evicted callback .
|
5,518
|
def get_permitted_objects_uuids ( cls , user ) : uuids = filter_queryset_for_user ( cls . objects . all ( ) , user ) . values_list ( 'uuid' , flat = True ) key = core_utils . camel_case_to_underscore ( cls . __name__ ) + '_uuid' return { key : uuids }
|
Return query dictionary to search objects available to user .
|
5,519
|
def from_buffer ( self , buf ) : with self . lock : try : if isinstance ( buf , str ) and str != bytes : buf = buf . encode ( 'utf-8' , errors = 'replace' ) return maybe_decode ( magic_buffer ( self . cookie , buf ) ) except MagicException as e : return self . _handle509Bug ( e )
|
Identify the contents of buf
|
5,520
|
def start ( self , timeout = None , root_object = None ) : if self . _is_running : return if timeout : self . _timeout = timeout self . _start_time = int ( time ( ) ) pushcenter_logger . debug ( "[NURESTPushCenter] Starting push center on url %s ..." % self . url ) self . _is_running = True self . __root_object = root_object from . nurest_session import NURESTSession current_session = NURESTSession . get_current_session ( ) args_session = { 'session' : current_session } self . _thread = StoppableThread ( target = self . _listen , name = 'push-center' , kwargs = args_session ) self . _thread . daemon = True self . _thread . start ( )
|
Starts listening to events .
|
5,521
|
def stop ( self ) : if not self . _is_running : return pushcenter_logger . debug ( "[NURESTPushCenter] Stopping..." ) self . _thread . stop ( ) self . _thread . join ( ) self . _is_running = False self . _current_connection = None self . _start_time = None self . _timeout = None
|
Stops listening for events .
|
5,522
|
def wait_until_exit ( self ) : if self . _timeout is None : raise Exception ( "Thread will never exit. Use stop or specify timeout when starting it!" ) self . _thread . join ( ) self . stop ( )
|
Wait until thread exit
|
5,523
|
def _did_receive_event ( self , connection ) : if not self . _is_running : return if connection . has_timeouted : return response = connection . response data = None if response . status_code != 200 : pushcenter_logger . error ( "[NURESTPushCenter]: Connection failure [%s] %s" % ( response . status_code , response . errors ) ) else : data = response . data if len ( self . _delegate_methods ) > 0 : for m in self . _delegate_methods : try : m ( data ) except Exception as exc : pushcenter_logger . error ( "[NURESTPushCenter] Delegate method %s failed:\n%s" % ( m , exc ) ) elif data : events = data [ 'events' ] self . nb_events_received += len ( events ) self . nb_push_received += 1 pushcenter_logger . info ( "[NURESTPushCenter] Received Push #%s (total=%s, latest=%s)\n%s" % ( self . nb_push_received , self . nb_events_received , len ( events ) , json . dumps ( events , indent = 4 ) ) ) self . _last_events . extend ( events ) if self . _is_running : uuid = None if data and 'uuid' in data : uuid = data [ 'uuid' ] self . _listen ( uuid )
|
Receive an event from connection
|
5,524
|
def _listen ( self , uuid = None , session = None ) : if self . url is None : raise Exception ( "NURESTPushCenter needs to have a valid URL. please use setURL: before starting it." ) events_url = "%s/events" % self . url if uuid : events_url = "%s?uuid=%s" % ( events_url , uuid ) request = NURESTRequest ( method = 'GET' , url = events_url ) connection = NURESTConnection ( request = request , async = True , callback = self . _did_receive_event , root_object = self . _root_object ) if self . _timeout : if int ( time ( ) ) - self . _start_time >= self . _timeout : pushcenter_logger . debug ( "[NURESTPushCenter] Timeout (timeout=%ss)." % self . _timeout ) return else : connection . timeout = self . _timeout pushcenter_logger . info ( 'Bambou Sending >>>>>>\n%s %s' % ( request . method , request . url ) ) connection . start ( )
|
Listen a connection uuid
|
5,525
|
def add_delegate ( self , callback ) : if callback in self . _delegate_methods : return self . _delegate_methods . append ( callback )
|
Registers a new delegate callback
|
5,526
|
def remove_delegate ( self , callback ) : if callback not in self . _delegate_methods : return self . _delegate_methods . remove ( callback )
|
Unregisters a registered delegate function or a method .
|
5,527
|
def _read_config ( cls ) : cls . _config_parser = configparser . ConfigParser ( ) cls . _config_parser . read ( cls . _default_attribute_values_configuration_file_path )
|
Reads the configuration file if any
|
5,528
|
def get_default_attribute_value ( cls , object_class , property_name , attr_type = str ) : if not cls . _default_attribute_values_configuration_file_path : return None if not cls . _config_parser : cls . _read_config ( ) class_name = object_class . __name__ if not cls . _config_parser . has_section ( class_name ) : return None if not cls . _config_parser . has_option ( class_name , property_name ) : return None if sys . version_info < ( 3 , ) : integer_types = ( int , long , ) else : integer_types = ( int , ) if isinstance ( attr_type , integer_types ) : return cls . _config_parser . getint ( class_name , property_name ) elif attr_type is bool : return cls . _config_parser . getboolean ( class_name , property_name ) else : return cls . _config_parser . get ( class_name , property_name )
|
Gets the default value of a given property for a given object .
|
5,529
|
def filter ( self , request , queryset , view ) : summary_queryset = queryset filtered_querysets = [ ] for queryset in summary_queryset . querysets : filter_class = self . _get_filter ( queryset ) queryset = filter_class ( request . query_params , queryset = queryset ) . qs filtered_querysets . append ( queryset ) summary_queryset . querysets = filtered_querysets return summary_queryset
|
Filter each resource separately using its own filter
|
5,530
|
def TypeFactory ( type_ ) : if isinstance ( type_ , type ) and issubclass ( type_ , Type ) : return type_ for x in __types__ : if x . represents ( type_ ) : return x . get ( type_ ) raise UnknownType ( type_ )
|
This function creates a standard form type from a simplified form .
|
5,531
|
def dummy_image ( filetype = 'gif' ) : GIF = 'R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7' tmp_file = tempfile . NamedTemporaryFile ( suffix = '.%s' % filetype ) tmp_file . write ( base64 . b64decode ( GIF ) ) return open ( tmp_file . name , 'rb' )
|
Generate empty image in temporary file for testing
|
5,532
|
def utime_delta ( days = 0 , hours = 0 , minutes = 0 , seconds = 0 ) : return ( days * DAY ) + ( hours * HOUR ) + ( minutes * MINUTE ) + ( seconds * SECOND )
|
Gets time delta in microseconds .
|
5,533
|
def execute_with_timeout ( fn , args = None , kwargs = None , timeout = None , fail_if_no_timer = True , signal_type = _default_signal_type , timer_type = _default_timer_type , timeout_exception_cls = TimeoutError , ) : if args is None : args = empty_tuple if kwargs is None : kwargs = empty_dict if timeout is None or timeout == 0 or signal_type is None or timer_type is None : return fn ( * args , ** kwargs ) def signal_handler ( signum , frame ) : raise timeout_exception_cls ( inspection . get_function_call_str ( fn , args , kwargs ) ) old_signal_handler = none timer_is_set = False try : try : old_signal_handler = signal . signal ( signal_type , signal_handler ) signal . setitimer ( timer_type , timeout ) timer_is_set = True except ValueError : if fail_if_no_timer : raise NotSupportedError ( "Timer is not available; the code is probably invoked from outside the main " "thread." ) return fn ( * args , ** kwargs ) finally : if timer_is_set : signal . setitimer ( timer_type , 0 ) if old_signal_handler is not none : signal . signal ( signal_type , old_signal_handler )
|
Executes specified function with timeout . Uses SIGALRM to interrupt it .
|
5,534
|
def get_original_fn ( fn ) : fn_type = type ( fn ) if fn_type is classmethod or fn_type is staticmethod : return get_original_fn ( fn . __func__ ) if hasattr ( fn , "original_fn" ) : return fn . original_fn if hasattr ( fn , "fn" ) : fn . original_fn = get_original_fn ( fn . fn ) return fn . original_fn return fn
|
Gets the very original function of a decorated one .
|
5,535
|
def get_full_name ( src ) : if hasattr ( src , "_full_name_" ) : return src . _full_name_ if hasattr ( src , "is_decorator" ) : if hasattr ( src , "decorator" ) : _full_name_ = str ( src . decorator ) else : _full_name_ = str ( src ) try : src . _full_name_ = _full_name_ except AttributeError : pass except TypeError : pass elif hasattr ( src , "im_class" ) : cls = src . im_class _full_name_ = get_full_name ( cls ) + "." + src . __name__ elif hasattr ( src , "__module__" ) and hasattr ( src , "__name__" ) : _full_name_ = ( ( "<unknown module>" if src . __module__ is None else src . __module__ ) + "." + src . __name__ ) try : src . _full_name_ = _full_name_ except AttributeError : pass except TypeError : pass else : _full_name_ = str ( get_original_fn ( src ) ) return _full_name_
|
Gets full class or function name .
|
5,536
|
def getargspec ( func ) : if inspect . ismethod ( func ) : func = func . __func__ try : code = func . __code__ except AttributeError : raise TypeError ( "{!r} is not a Python function" . format ( func ) ) if hasattr ( code , "co_kwonlyargcount" ) and code . co_kwonlyargcount > 0 : raise ValueError ( "keyword-only arguments are not supported by getargspec()" ) args , varargs , varkw = inspect . getargs ( code ) return inspect . ArgSpec ( args , varargs , varkw , func . __defaults__ )
|
Variation of inspect . getargspec that works for more functions .
|
5,537
|
def is_cython_or_generator ( fn ) : if hasattr ( fn , "__func__" ) : fn = fn . __func__ if inspect . isgeneratorfunction ( fn ) : return True name = type ( fn ) . __name__ return ( name == "generator" or name == "method_descriptor" or name == "cython_function_or_method" or name == "builtin_function_or_method" )
|
Returns whether this function is either a generator function or a Cythonized function .
|
5,538
|
def is_classmethod ( fn ) : if not inspect . ismethod ( fn ) : return False if not hasattr ( fn , "__self__" ) : return False im_self = fn . __self__ if im_self is None : return False return isinstance ( im_self , six . class_types )
|
Returns whether f is a classmethod .
|
5,539
|
def wraps ( wrapped , assigned = functools . WRAPPER_ASSIGNMENTS , updated = functools . WRAPPER_UPDATES ) : if not is_cython_function ( wrapped ) : return functools . wraps ( wrapped , assigned , updated ) else : return lambda wrapper : wrapper
|
Cython - compatible functools . wraps implementation .
|
5,540
|
def DictOf ( name , * fields ) : ret = type ( name , ( Dict , ) , { 'fields' : [ ] } ) ret . add_fields ( * fields ) return ret
|
This function creates a dict type with the specified name and fields .
|
5,541
|
def ListOf ( element_type , element_none_value = None ) : from pyws . functions . args . types import TypeFactory element_type = TypeFactory ( element_type ) return type ( element_type . __name__ + 'List' , ( List , ) , { 'element_type' : element_type , 'element_none_value' : element_none_value } )
|
This function creates a list type with element type element_type and an empty element value element_none_value .
|
5,542
|
def get_actions ( self , request , view ) : metadata = OrderedDict ( ) actions = self . get_resource_actions ( view ) resource = view . get_object ( ) for action_name , action in actions . items ( ) : if action_name == 'update' : view . request = clone_request ( request , 'PUT' ) else : view . action = action_name data = ActionSerializer ( action , action_name , request , view , resource ) metadata [ action_name ] = data . serialize ( ) if not metadata [ action_name ] [ 'enabled' ] : continue fields = self . get_action_fields ( view , action_name , resource ) if not fields : metadata [ action_name ] [ 'type' ] = 'button' else : metadata [ action_name ] [ 'type' ] = 'form' metadata [ action_name ] [ 'fields' ] = fields view . action = None view . request = request return metadata
|
Return metadata for resource - specific actions such as start stop unlink
|
5,543
|
def get_action_fields ( self , view , action_name , resource ) : serializer = view . get_serializer ( resource ) fields = OrderedDict ( ) if not isinstance ( serializer , view . serializer_class ) or action_name == 'update' : fields = self . get_fields ( serializer . fields ) return fields
|
Get fields exposed by action s serializer
|
5,544
|
def get_fields ( self , serializer_fields ) : fields = OrderedDict ( ) for field_name , field in serializer_fields . items ( ) : if field_name == 'tags' : continue info = self . get_field_info ( field , field_name ) if info : fields [ field_name ] = info return fields
|
Get fields metadata skipping empty fields
|
5,545
|
def recalculate_estimate ( recalculate_total = False ) : CostTrackingRegister . autodiscover ( ) for resource_model in CostTrackingRegister . registered_resources : for resource in resource_model . objects . all ( ) : _update_resource_consumed ( resource , recalculate_total = recalculate_total ) ancestors_models = [ m for m in models . PriceEstimate . get_estimated_models ( ) if not issubclass ( m , structure_models . ResourceMixin ) ] for model in ancestors_models : for ancestor in model . objects . all ( ) : _update_ancestor_consumed ( ancestor )
|
Recalculate price of consumables that were used by resource until now .
|
5,546
|
def get_data ( self , path ) : return LineCacheNotebookDecoder ( code = self . code , raw = self . raw , markdown = self . markdown ) . decode ( self . decode ( ) , self . path )
|
Needs to return the string source for the module .
|
5,547
|
def loader ( self ) : loader = super ( ) . loader if self . _lazy and ( sys . version_info . major , sys . version_info . minor ) != ( 3 , 4 ) : loader = LazyLoader . factory ( loader ) return partial ( loader , ** { object . lstrip ( "_" ) : getattr ( self , object ) for object in self . __slots__ } )
|
Create a lazy loader source file loader .
|
5,548
|
def get_urls ( self ) : urls = [ ] for action in self . get_extra_actions ( ) : regex = r'^{}/$' . format ( self . _get_action_href ( action ) ) view = self . admin_site . admin_view ( action ) urls . append ( url ( regex , view ) ) return urls + super ( ExtraActionsMixin , self ) . get_urls ( )
|
Inject extra action URLs .
|
5,549
|
def changelist_view ( self , request , extra_context = None ) : links = [ ] for action in self . get_extra_actions ( ) : links . append ( { 'label' : self . _get_action_label ( action ) , 'href' : self . _get_action_href ( action ) } ) extra_context = extra_context or { } extra_context [ 'extra_links' ] = links return super ( ExtraActionsMixin , self ) . changelist_view ( request , extra_context = extra_context , )
|
Inject extra links into template context .
|
5,550
|
def start ( self ) : if NURESTSession . session_stack : bambou_logger . critical ( "Starting a session inside a with statement is not supported." ) raise Exception ( "Starting a session inside a with statement is not supported." ) NURESTSession . current_session = self self . _authenticate ( ) return self
|
Starts the session .
|
5,551
|
def init_quotas ( sender , instance , created = False , ** kwargs ) : if not created : return for field in sender . get_quotas_fields ( ) : try : field . get_or_create_quota ( scope = instance ) except CreationConditionFailedQuotaError : pass
|
Initialize new instances quotas
|
5,552
|
def handle_aggregated_quotas ( sender , instance , ** kwargs ) : quota = instance if quota . scope is None : return quota_field = quota . get_field ( ) if isinstance ( quota_field , fields . UsageAggregatorQuotaField ) or quota_field is None : return signal = kwargs [ 'signal' ] for aggregator_quota in quota_field . get_aggregator_quotas ( quota ) : field = aggregator_quota . get_field ( ) if signal == signals . post_save : field . post_child_quota_save ( aggregator_quota . scope , child_quota = quota , created = kwargs . get ( 'created' ) ) elif signal == signals . pre_delete : field . pre_child_quota_delete ( aggregator_quota . scope , child_quota = quota )
|
Call aggregated quotas fields update methods
|
5,553
|
def get_settings ( self , link ) : return reverse ( 'servicesettings-detail' , kwargs = { 'uuid' : link . service . settings . uuid } , request = self . context [ 'request' ] )
|
URL of service settings
|
5,554
|
def get_url ( self , link ) : view_name = SupportedServices . get_detail_view_for_model ( link . service ) return reverse ( view_name , kwargs = { 'uuid' : link . service . uuid . hex } , request = self . context [ 'request' ] )
|
URL of service
|
5,555
|
def get_resources_count ( self , link ) : total = 0 for model in SupportedServices . get_service_resources ( link . service ) : query = { model . Permissions . project_path . split ( '__' ) [ 0 ] : link } total += model . objects . filter ( ** query ) . count ( ) return total
|
Count total number of all resources connected to link
|
5,556
|
def drop_columns ( self , max_na_values : int = None , max_unique_values : int = None ) : step = { } if max_na_values is not None : step = { 'data-set' : self . iid , 'operation' : 'drop-na' , 'expression' : '{"max_na_values":%s, "axis": 1}' % max_na_values } if max_unique_values is not None : step = { 'data-set' : self . iid , 'operation' : 'drop-unique' , 'expression' : '{"max_unique_values":%s}' % max_unique_values } self . attr_update ( attr = 'steps' , value = [ step ] )
|
When max_na_values was informed remove columns when the proportion of total NA values more than max_na_values threshold .
|
5,557
|
def get_sorted_dependencies ( service_model ) : app_models = list ( service_model . _meta . app_config . get_models ( ) ) dependencies = { model : set ( ) for model in app_models } relations = ( relation for model in app_models for relation in model . _meta . related_objects if relation . on_delete in ( models . PROTECT , models . CASCADE ) ) for rel in relations : dependencies [ rel . model ] . add ( rel . related_model ) return stable_topological_sort ( app_models , dependencies )
|
Returns list of application models in topological order . It is used in order to correctly delete dependent resources .
|
5,558
|
def update_pulled_fields ( instance , imported_instance , fields ) : modified = False for field in fields : pulled_value = getattr ( imported_instance , field ) current_value = getattr ( instance , field ) if current_value != pulled_value : setattr ( instance , field , pulled_value ) logger . info ( "%s's with PK %s %s field updated from value '%s' to value '%s'" , instance . __class__ . __name__ , instance . pk , field , current_value , pulled_value ) modified = True error_message = getattr ( imported_instance , 'error_message' , '' ) or getattr ( instance , 'error_message' , '' ) if error_message and instance . error_message != error_message : instance . error_message = imported_instance . error_message modified = True if modified : instance . save ( )
|
Update instance fields based on imported from backend data . Save changes to DB only one or more fields were changed .
|
5,559
|
def handle_resource_update_success ( resource ) : update_fields = [ ] if resource . state == resource . States . ERRED : resource . recover ( ) update_fields . append ( 'state' ) if resource . state in ( resource . States . UPDATING , resource . States . CREATING ) : resource . set_ok ( ) update_fields . append ( 'state' ) if resource . error_message : resource . error_message = '' update_fields . append ( 'error_message' ) if update_fields : resource . save ( update_fields = update_fields ) logger . warning ( '%s %s (PK: %s) was successfully updated.' % ( resource . __class__ . __name__ , resource , resource . pk ) )
|
Recover resource if its state is ERRED and clear error message .
|
5,560
|
def set_header ( self , header , value ) : if not isinstance ( value , ( str , bytes ) ) : raise TypeError ( "header values must be str or bytes, but %s value has type %s" % ( header , type ( value ) ) ) self . _headers [ header ] = value
|
Set header value
|
5,561
|
def set_instance_erred ( self , instance , error_message ) : instance . set_erred ( ) instance . error_message = error_message instance . save ( update_fields = [ 'state' , 'error_message' ] )
|
Mark instance as erred and save error message
|
5,562
|
def map_language ( language , dash3 = True ) : if dash3 : from iso639 import languages else : from pycountry import languages if '_' in language : language = language . split ( '_' ) [ 0 ] if len ( language ) == 2 : try : return languages . get ( alpha2 = language . lower ( ) ) except KeyError : pass elif len ( language ) == 3 : if dash3 : try : return languages . get ( part3 = language . lower ( ) ) except KeyError : pass try : return languages . get ( terminology = language . lower ( ) ) except KeyError : pass try : return languages . get ( bibliographic = language . lower ( ) ) except KeyError : pass else : try : return languages . get ( name = language . title ( ) ) except KeyError : pass if dash3 : try : return languages . get ( inverted = language . title ( ) ) except KeyError : pass for l in re . split ( '[,.;: ]+' , language ) : try : return languages . get ( name = l . title ( ) ) except KeyError : pass
|
Use ISO 639 - 3 ??
|
5,563
|
def get_task_signature ( cls , instance , serialized_instance , ** kwargs ) : cleanup_tasks = [ ProjectResourceCleanupTask ( ) . si ( core_utils . serialize_class ( executor_cls ) , core_utils . serialize_class ( model_cls ) , serialized_instance , ) for ( model_cls , executor_cls ) in cls . executors ] if not cleanup_tasks : return core_tasks . EmptyTask ( ) return chain ( cleanup_tasks )
|
Delete each resource using specific executor . Convert executors to task and combine all deletion task into single sequential task .
|
5,564
|
def format_time_and_value_to_segment_list ( time_and_value_list , segments_count , start_timestamp , end_timestamp , average = False ) : segment_list = [ ] time_step = ( end_timestamp - start_timestamp ) / segments_count for i in range ( segments_count ) : segment_start_timestamp = start_timestamp + time_step * i segment_end_timestamp = segment_start_timestamp + time_step value_list = [ value for time , value in time_and_value_list if time >= segment_start_timestamp and time < segment_end_timestamp ] segment_value = sum ( value_list ) if average and len ( value_list ) != 0 : segment_value /= len ( value_list ) segment_list . append ( { 'from' : segment_start_timestamp , 'to' : segment_end_timestamp , 'value' : segment_value , } ) return segment_list
|
Format time_and_value_list to time segments
|
5,565
|
def serialize_instance ( instance ) : model_name = force_text ( instance . _meta ) return '{}:{}' . format ( model_name , instance . pk )
|
Serialize Django model instance
|
5,566
|
def deserialize_instance ( serialized_instance ) : model_name , pk = serialized_instance . split ( ':' ) model = apps . get_model ( model_name ) return model . _default_manager . get ( pk = pk )
|
Deserialize Django model instance
|
5,567
|
def deserialize_class ( serilalized_cls ) : module_name , cls_name = serilalized_cls . split ( ':' ) module = importlib . import_module ( module_name ) return getattr ( module , cls_name )
|
Deserialize Python class
|
5,568
|
def instance_from_url ( url , user = None ) : from waldur_core . structure . managers import filter_queryset_for_user url = clear_url ( url ) match = resolve ( url ) model = get_model_from_resolve_match ( match ) queryset = model . objects . all ( ) if user is not None : queryset = filter_queryset_for_user ( model . objects . all ( ) , user ) return queryset . get ( ** match . kwargs )
|
Restore instance from URL
|
5,569
|
def commit ( self ) : if not self . _parent . _is_active : raise exc . InvalidRequestError ( "This transaction is inactive" ) yield from self . _do_commit ( ) self . _is_active = False
|
Commit this transaction .
|
5,570
|
def _get_app_config ( self , app_name ) : matches = [ app_config for app_config in apps . get_app_configs ( ) if app_config . name == app_name ] if not matches : return return matches [ 0 ]
|
Returns an app config for the given name not by label .
|
5,571
|
def _get_app_version ( self , app_config ) : base_name = app_config . __module__ . split ( '.' ) [ 0 ] module = __import__ ( base_name ) return getattr ( module , '__version__' , 'N/A' )
|
Some plugins ship multiple applications and extensions . However all of them have the same version because they are released together . That s why only - top level module is used to fetch version information .
|
5,572
|
def _get_erred_shared_settings_module ( self ) : result_module = modules . LinkList ( title = _ ( 'Shared provider settings in erred state' ) ) result_module . template = 'admin/dashboard/erred_link_list.html' erred_state = structure_models . SharedServiceSettings . States . ERRED queryset = structure_models . SharedServiceSettings . objects settings_in_erred_state = queryset . filter ( state = erred_state ) . count ( ) if settings_in_erred_state : result_module . title = '%s (%s)' % ( result_module . title , settings_in_erred_state ) for service_settings in queryset . filter ( state = erred_state ) . iterator ( ) : module_child = self . _get_link_to_instance ( service_settings ) module_child [ 'error' ] = service_settings . error_message result_module . children . append ( module_child ) else : result_module . pre_content = _ ( 'Nothing found.' ) return result_module
|
Returns a LinkList based module which contains link to shared service setting instances in ERRED state .
|
5,573
|
def _get_erred_resources_module ( self ) : result_module = modules . LinkList ( title = _ ( 'Resources in erred state' ) ) erred_state = structure_models . NewResource . States . ERRED children = [ ] resource_models = SupportedServices . get_resource_models ( ) resources_in_erred_state_overall = 0 for resource_type , resource_model in resource_models . items ( ) : queryset = resource_model . objects . filter ( service_project_link__service__settings__shared = True ) erred_amount = queryset . filter ( state = erred_state ) . count ( ) if erred_amount : resources_in_erred_state_overall = resources_in_erred_state_overall + erred_amount link = self . _get_erred_resource_link ( resource_model , erred_amount , erred_state ) children . append ( link ) if resources_in_erred_state_overall : result_module . title = '%s (%s)' % ( result_module . title , resources_in_erred_state_overall ) result_module . children = children else : result_module . pre_content = _ ( 'Nothing found.' ) return result_module
|
Returns a list of links to resources which are in ERRED state and linked to a shared service settings .
|
5,574
|
def fetcher_with_object ( cls , parent_object , relationship = "child" ) : fetcher = cls ( ) fetcher . parent_object = parent_object fetcher . relationship = relationship rest_name = cls . managed_object_rest_name ( ) parent_object . register_fetcher ( fetcher , rest_name ) return fetcher
|
Register the fetcher for a served object .
|
5,575
|
def _prepare_headers ( self , request , filter = None , order_by = None , group_by = [ ] , page = None , page_size = None ) : if filter : request . set_header ( 'X-Nuage-Filter' , filter ) if order_by : request . set_header ( 'X-Nuage-OrderBy' , order_by ) if page is not None : request . set_header ( 'X-Nuage-Page' , str ( page ) ) if page_size : request . set_header ( 'X-Nuage-PageSize' , str ( page_size ) ) if len ( group_by ) > 0 : header = ", " . join ( group_by ) request . set_header ( 'X-Nuage-GroupBy' , 'true' ) request . set_header ( 'X-Nuage-Attributes' , header )
|
Prepare headers for the given request
|
5,576
|
def fetch ( self , filter = None , order_by = None , group_by = [ ] , page = None , page_size = None , query_parameters = None , commit = True , async = False , callback = None ) : request = NURESTRequest ( method = HTTP_METHOD_GET , url = self . _prepare_url ( ) , params = query_parameters ) self . _prepare_headers ( request = request , filter = filter , order_by = order_by , group_by = group_by , page = page , page_size = page_size ) if async : return self . parent_object . send_request ( request = request , async = async , local_callback = self . _did_fetch , remote_callback = callback , user_info = { 'commit' : commit } ) connection = self . parent_object . send_request ( request = request , user_info = { 'commit' : commit } ) return self . _did_fetch ( connection = connection )
|
Fetch objects according to given filter and page .
|
5,577
|
def _did_fetch ( self , connection ) : self . current_connection = connection response = connection . response should_commit = 'commit' not in connection . user_info or connection . user_info [ 'commit' ] if connection . response . status_code >= 400 and BambouConfig . _should_raise_bambou_http_error : raise BambouHTTPError ( connection = connection ) if response . status_code != 200 : if should_commit : self . current_total_count = 0 self . current_page = 0 self . current_ordered_by = '' return self . _send_content ( content = None , connection = connection ) results = response . data fetched_objects = list ( ) current_ids = list ( ) if should_commit : if 'X-Nuage-Count' in response . headers and response . headers [ 'X-Nuage-Count' ] : self . current_total_count = int ( response . headers [ 'X-Nuage-Count' ] ) if 'X-Nuage-Page' in response . headers and response . headers [ 'X-Nuage-Page' ] : self . current_page = int ( response . headers [ 'X-Nuage-Page' ] ) if 'X-Nuage-OrderBy' in response . headers and response . headers [ 'X-Nuage-OrderBy' ] : self . current_ordered_by = response . headers [ 'X-Nuage-OrderBy' ] if results : for result in results : nurest_object = self . new ( ) nurest_object . from_dict ( result ) nurest_object . parent = self . parent_object fetched_objects . append ( nurest_object ) if not should_commit : continue current_ids . append ( nurest_object . id ) if nurest_object in self : idx = self . index ( nurest_object ) current_object = self [ idx ] current_object . from_dict ( nurest_object . to_dict ( ) ) else : self . append ( nurest_object ) if should_commit : for obj in self : if obj . id not in current_ids : self . remove ( obj ) return self . _send_content ( content = fetched_objects , connection = connection )
|
Fetching objects has been done
|
5,578
|
def get ( self , filter = None , order_by = None , group_by = [ ] , page = None , page_size = None , query_parameters = None , commit = True , async = False , callback = None ) : return self . fetch ( filter = filter , order_by = order_by , group_by = group_by , page = page , page_size = page_size , query_parameters = query_parameters , commit = commit ) [ 2 ]
|
Fetch object and directly return them
|
5,579
|
def get_first ( self , filter = None , order_by = None , group_by = [ ] , query_parameters = None , commit = False , async = False , callback = None ) : objects = self . get ( filter = filter , order_by = order_by , group_by = group_by , page = 0 , page_size = 1 , query_parameters = query_parameters , commit = commit ) return objects [ 0 ] if len ( objects ) else None
|
Fetch object and directly return the first one
|
5,580
|
def _did_count ( self , connection ) : self . current_connection = connection response = connection . response count = 0 callback = None if 'X-Nuage-Count' in response . headers : count = int ( response . headers [ 'X-Nuage-Count' ] ) if 'remote' in connection . callbacks : callback = connection . callbacks [ 'remote' ] if connection . async : if callback : callback ( self , self . parent_object , count ) self . current_connection . reset ( ) self . current_connection = None else : if connection . response . status_code >= 400 and BambouConfig . _should_raise_bambou_http_error : raise BambouHTTPError ( connection = connection ) return ( self , self . parent_object , count )
|
Called when count if finished
|
5,581
|
def _send_content ( self , content , connection ) : if connection : if connection . async : callback = connection . callbacks [ 'remote' ] if callback : callback ( self , self . parent_object , content ) self . current_connection . reset ( ) self . current_connection = None else : return ( self , self . parent_object , content )
|
Send a content array from the connection
|
5,582
|
def update_configuration ( self , new_configuration ) : if new_configuration == self . configuration : return False now = timezone . now ( ) if now . month != self . price_estimate . month : raise ConsumptionDetailUpdateError ( 'It is possible to update consumption details only for current month.' ) minutes_from_last_update = self . _get_minutes_from_last_update ( now ) for consumable_item , usage in self . configuration . items ( ) : consumed_after_modification = usage * minutes_from_last_update self . consumed_before_update [ consumable_item ] = ( self . consumed_before_update . get ( consumable_item , 0 ) + consumed_after_modification ) self . configuration = new_configuration self . last_update_time = now self . save ( ) return True
|
Save how much consumables were used and update current configuration .
|
5,583
|
def _get_minutes_from_last_update ( self , time ) : time_from_last_update = time - self . last_update_time return int ( time_from_last_update . total_seconds ( ) / 60 )
|
How much minutes passed from last update to given time
|
5,584
|
def get_for_resource ( resource ) : resource_content_type = ContentType . objects . get_for_model ( resource ) default_items = set ( DefaultPriceListItem . objects . filter ( resource_content_type = resource_content_type ) ) service = resource . service_project_link . service items = set ( PriceListItem . objects . filter ( default_price_list_item__in = default_items , service = service ) . select_related ( 'default_price_list_item' ) ) rewrited_defaults = set ( [ i . default_price_list_item for i in items ] ) return items | ( default_items - rewrited_defaults )
|
Get list of all price list items that should be used for resource .
|
5,585
|
def get_extensions ( cls ) : assemblies = [ ] for waldur_extension in pkg_resources . iter_entry_points ( 'waldur_extensions' ) : extension_module = waldur_extension . load ( ) if inspect . isclass ( extension_module ) and issubclass ( extension_module , cls ) : if not extension_module . is_assembly ( ) : yield extension_module else : assemblies . append ( extension_module ) for assembly in assemblies : yield assembly
|
Get a list of available extensions
|
5,586
|
def ellipsis ( source , max_length ) : if max_length == 0 or len ( source ) <= max_length : return source return source [ : max ( 0 , max_length - 3 ) ] + "..."
|
Truncates a string to be at most max_length long .
|
5,587
|
def dict_to_object ( source ) : target = inspectable_class . InspectableClass ( ) for k , v in source . items ( ) : setattr ( target , k , v ) return target
|
Returns an object with the key - value pairs in source as attributes .
|
5,588
|
def copy_public_attrs ( source_obj , dest_obj ) : for name , value in inspect . getmembers ( source_obj ) : if not any ( name . startswith ( x ) for x in [ "_" , "func" , "im" ] ) : setattr ( dest_obj , name , value )
|
Shallow copies all public attributes from source_obj to dest_obj .
|
5,589
|
def object_from_string ( name ) : if six . PY3 : if not isinstance ( name , str ) : raise TypeError ( "name must be str, not %r" % type ( name ) ) else : if isinstance ( name , unicode ) : name = name . encode ( "ascii" ) if not isinstance ( name , ( str , unicode ) ) : raise TypeError ( "name must be bytes or unicode, got %r" % type ( name ) ) pos = name . rfind ( "." ) if pos < 0 : raise ValueError ( "Invalid function or class name %s" % name ) module_name = name [ : pos ] func_name = name [ pos + 1 : ] try : mod = __import__ ( module_name , fromlist = [ func_name ] , level = 0 ) except ImportError : parts = name . split ( "." ) mod = __import__ ( parts [ 0 ] , level = 0 ) for i in range ( 1 , len ( parts ) ) : mod = getattr ( mod , parts [ i ] ) return mod else : return getattr ( mod , func_name )
|
Creates a Python class or function from its fully qualified name .
|
5,590
|
def catchable_exceptions ( exceptions ) : if isinstance ( exceptions , type ) and issubclass ( exceptions , BaseException ) : return True if ( isinstance ( exceptions , tuple ) and exceptions and all ( issubclass ( it , BaseException ) for it in exceptions ) ) : return True return False
|
Returns True if exceptions can be caught in the except clause .
|
5,591
|
def override ( self , value ) : if self . _value is not value : return _ScopedValueOverrideContext ( self , value ) else : return empty_context
|
Temporarily overrides the old value with the new one .
|
5,592
|
def validate_object_action ( self , action_name , obj = None ) : action_method = getattr ( self , action_name ) if not getattr ( action_method , 'detail' , False ) and action_name not in ( 'update' , 'partial_update' , 'destroy' ) : return validators = getattr ( self , action_name + '_validators' , [ ] ) for validator in validators : validator ( obj or self . get_object ( ) )
|
Execute validation for actions that are related to particular object
|
5,593
|
def row_dict ( self , row ) : d = { } for field_name , index in self . field_map . items ( ) : d [ field_name ] = self . field_value ( row , field_name ) return d
|
returns dictionary version of row using keys from self . field_map
|
5,594
|
def _get_content_type_queryset ( models_list ) : content_type_ids = { c . id for c in ContentType . objects . get_for_models ( * models_list ) . values ( ) } return ContentType . objects . filter ( id__in = content_type_ids )
|
Get list of services content types
|
5,595
|
def init_registered ( self , request ) : created_items = models . DefaultPriceListItem . init_from_registered_resources ( ) if created_items : message = ungettext ( _ ( 'Price item was created: %s.' ) % created_items [ 0 ] . name , _ ( 'Price items were created: %s.' ) % ', ' . join ( item . name for item in created_items ) , len ( created_items ) ) self . message_user ( request , message ) else : self . message_user ( request , _ ( 'Price items for all registered resources have been updated.' ) ) return redirect ( reverse ( 'admin:cost_tracking_defaultpricelistitem_changelist' ) )
|
Create default price list items for each registered resource .
|
5,596
|
def reinit_configurations ( self , request ) : now = timezone . now ( ) changed_resources = [ ] for resource_model in CostTrackingRegister . registered_resources : for resource in resource_model . objects . all ( ) : try : pe = models . PriceEstimate . objects . get ( scope = resource , month = now . month , year = now . year ) except models . PriceEstimate . DoesNotExist : changed_resources . append ( resource ) else : new_configuration = CostTrackingRegister . get_configuration ( resource ) if new_configuration != pe . consumption_details . configuration : changed_resources . append ( resource ) for resource in changed_resources : models . PriceEstimate . update_resource_estimate ( resource , CostTrackingRegister . get_configuration ( resource ) ) message = _ ( 'Configuration was reinitialized for %(count)s resources' ) % { 'count' : len ( changed_resources ) } self . message_user ( request , message ) return redirect ( reverse ( 'admin:cost_tracking_defaultpricelistitem_changelist' ) )
|
Re - initialize configuration for resource if it has been changed .
|
5,597
|
def encrypt ( self , message ) : if not isinstance ( message , ( bytes , str ) ) : raise TypeError return hashlib . sha1 ( message . encode ( 'utf-8' ) ) . hexdigest ( )
|
Encrypt the given message
|
5,598
|
def get_version_fields ( self ) : options = reversion . _get_options ( self ) return options . fields or [ f . name for f in self . _meta . fields if f not in options . exclude ]
|
Get field that are tracked in object history versions .
|
5,599
|
def _is_version_duplicate ( self ) : if self . id is None : return False try : latest_version = Version . objects . get_for_object ( self ) . latest ( 'revision__date_created' ) except Version . DoesNotExist : return False latest_version_object = latest_version . _object_version . object fields = self . get_version_fields ( ) return all ( [ getattr ( self , f ) == getattr ( latest_version_object , f ) for f in fields ] )
|
Define should new version be created for object or no .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.