repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
quora/qcore
qcore/caching.py
lru_cache
def lru_cache(maxsize=128, key_fn=None): """Decorator that adds an LRU cache of size maxsize to the decorated function. maxsize is the number of different keys cache can accomodate. key_fn is the function that builds key from args. The default key function creates a tuple out of args and kwargs. If you use the default, there is no reason not to use functools.lru_cache directly. Possible use cases: - Your cache key is very large, so you don't want to keep the whole key in memory. - The function takes some arguments that don't affect the result. """ def decorator(fn): cache = LRUCache(maxsize) argspec = inspect2.getfullargspec(fn) arg_names = argspec.args[1:] + argspec.kwonlyargs # remove self kwargs_defaults = get_kwargs_defaults(argspec) cache_key = key_fn if cache_key is None: def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_names, kwargs_defaults) @functools.wraps(fn) def wrapper(*args, **kwargs): key = cache_key(args, kwargs) try: return cache[key] except KeyError: value = fn(*args, **kwargs) cache[key] = value return value wrapper.clear = cache.clear return wrapper return decorator
python
def lru_cache(maxsize=128, key_fn=None): """Decorator that adds an LRU cache of size maxsize to the decorated function. maxsize is the number of different keys cache can accomodate. key_fn is the function that builds key from args. The default key function creates a tuple out of args and kwargs. If you use the default, there is no reason not to use functools.lru_cache directly. Possible use cases: - Your cache key is very large, so you don't want to keep the whole key in memory. - The function takes some arguments that don't affect the result. """ def decorator(fn): cache = LRUCache(maxsize) argspec = inspect2.getfullargspec(fn) arg_names = argspec.args[1:] + argspec.kwonlyargs # remove self kwargs_defaults = get_kwargs_defaults(argspec) cache_key = key_fn if cache_key is None: def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_names, kwargs_defaults) @functools.wraps(fn) def wrapper(*args, **kwargs): key = cache_key(args, kwargs) try: return cache[key] except KeyError: value = fn(*args, **kwargs) cache[key] = value return value wrapper.clear = cache.clear return wrapper return decorator
[ "def", "lru_cache", "(", "maxsize", "=", "128", ",", "key_fn", "=", "None", ")", ":", "def", "decorator", "(", "fn", ")", ":", "cache", "=", "LRUCache", "(", "maxsize", ")", "argspec", "=", "inspect2", ".", "getfullargspec", "(", "fn", ")", "arg_names", "=", "argspec", ".", "args", "[", "1", ":", "]", "+", "argspec", ".", "kwonlyargs", "# remove self", "kwargs_defaults", "=", "get_kwargs_defaults", "(", "argspec", ")", "cache_key", "=", "key_fn", "if", "cache_key", "is", "None", ":", "def", "cache_key", "(", "args", ",", "kwargs", ")", ":", "return", "get_args_tuple", "(", "args", ",", "kwargs", ",", "arg_names", ",", "kwargs_defaults", ")", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "key", "=", "cache_key", "(", "args", ",", "kwargs", ")", "try", ":", "return", "cache", "[", "key", "]", "except", "KeyError", ":", "value", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "cache", "[", "key", "]", "=", "value", "return", "value", "wrapper", ".", "clear", "=", "cache", ".", "clear", "return", "wrapper", "return", "decorator" ]
Decorator that adds an LRU cache of size maxsize to the decorated function. maxsize is the number of different keys cache can accomodate. key_fn is the function that builds key from args. The default key function creates a tuple out of args and kwargs. If you use the default, there is no reason not to use functools.lru_cache directly. Possible use cases: - Your cache key is very large, so you don't want to keep the whole key in memory. - The function takes some arguments that don't affect the result.
[ "Decorator", "that", "adds", "an", "LRU", "cache", "of", "size", "maxsize", "to", "the", "decorated", "function", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L238-L278
quora/qcore
qcore/caching.py
cached_per_instance
def cached_per_instance(): """Decorator that adds caching to an instance method. The cached value is stored so that it gets garbage collected together with the instance. The cached values are not stored when the object is pickled. """ def cache_fun(fun): argspec = inspect2.getfullargspec(fun) arg_names = argspec.args[1:] + argspec.kwonlyargs # remove self kwargs_defaults = get_kwargs_defaults(argspec) cache = {} def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_names, kwargs_defaults) def clear_cache(instance_key, ref): del cache[instance_key] @functools.wraps(fun) def new_fun(self, *args, **kwargs): instance_key = id(self) if instance_key not in cache: ref = weakref.ref(self, functools.partial(clear_cache, instance_key)) cache[instance_key] = (ref, {}) instance_cache = cache[instance_key][1] k = cache_key(args, kwargs) if k not in instance_cache: instance_cache[k] = fun(self, *args, **kwargs) return instance_cache[k] # just so unit tests can check that this is cleaned up correctly new_fun.__cached_per_instance_cache__ = cache return new_fun return cache_fun
python
def cached_per_instance(): """Decorator that adds caching to an instance method. The cached value is stored so that it gets garbage collected together with the instance. The cached values are not stored when the object is pickled. """ def cache_fun(fun): argspec = inspect2.getfullargspec(fun) arg_names = argspec.args[1:] + argspec.kwonlyargs # remove self kwargs_defaults = get_kwargs_defaults(argspec) cache = {} def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_names, kwargs_defaults) def clear_cache(instance_key, ref): del cache[instance_key] @functools.wraps(fun) def new_fun(self, *args, **kwargs): instance_key = id(self) if instance_key not in cache: ref = weakref.ref(self, functools.partial(clear_cache, instance_key)) cache[instance_key] = (ref, {}) instance_cache = cache[instance_key][1] k = cache_key(args, kwargs) if k not in instance_cache: instance_cache[k] = fun(self, *args, **kwargs) return instance_cache[k] # just so unit tests can check that this is cleaned up correctly new_fun.__cached_per_instance_cache__ = cache return new_fun return cache_fun
[ "def", "cached_per_instance", "(", ")", ":", "def", "cache_fun", "(", "fun", ")", ":", "argspec", "=", "inspect2", ".", "getfullargspec", "(", "fun", ")", "arg_names", "=", "argspec", ".", "args", "[", "1", ":", "]", "+", "argspec", ".", "kwonlyargs", "# remove self", "kwargs_defaults", "=", "get_kwargs_defaults", "(", "argspec", ")", "cache", "=", "{", "}", "def", "cache_key", "(", "args", ",", "kwargs", ")", ":", "return", "get_args_tuple", "(", "args", ",", "kwargs", ",", "arg_names", ",", "kwargs_defaults", ")", "def", "clear_cache", "(", "instance_key", ",", "ref", ")", ":", "del", "cache", "[", "instance_key", "]", "@", "functools", ".", "wraps", "(", "fun", ")", "def", "new_fun", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "instance_key", "=", "id", "(", "self", ")", "if", "instance_key", "not", "in", "cache", ":", "ref", "=", "weakref", ".", "ref", "(", "self", ",", "functools", ".", "partial", "(", "clear_cache", ",", "instance_key", ")", ")", "cache", "[", "instance_key", "]", "=", "(", "ref", ",", "{", "}", ")", "instance_cache", "=", "cache", "[", "instance_key", "]", "[", "1", "]", "k", "=", "cache_key", "(", "args", ",", "kwargs", ")", "if", "k", "not", "in", "instance_cache", ":", "instance_cache", "[", "k", "]", "=", "fun", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "instance_cache", "[", "k", "]", "# just so unit tests can check that this is cleaned up correctly", "new_fun", ".", "__cached_per_instance_cache__", "=", "cache", "return", "new_fun", "return", "cache_fun" ]
Decorator that adds caching to an instance method. The cached value is stored so that it gets garbage collected together with the instance. The cached values are not stored when the object is pickled.
[ "Decorator", "that", "adds", "caching", "to", "an", "instance", "method", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L281-L319
quora/qcore
qcore/caching.py
get_args_tuple
def get_args_tuple(args, kwargs, arg_names, kwargs_defaults): """Generates a cache key from the passed in arguments.""" args_list = list(args) args_len = len(args) all_args_len = len(arg_names) try: while args_len < all_args_len: arg_name = arg_names[args_len] if arg_name in kwargs_defaults: args_list.append(kwargs.get(arg_name, kwargs_defaults[arg_name])) else: args_list.append(kwargs[arg_name]) args_len += 1 except KeyError as e: raise TypeError("Missing argument %r" % (e.args[0],)) return tuple(args_list)
python
def get_args_tuple(args, kwargs, arg_names, kwargs_defaults): """Generates a cache key from the passed in arguments.""" args_list = list(args) args_len = len(args) all_args_len = len(arg_names) try: while args_len < all_args_len: arg_name = arg_names[args_len] if arg_name in kwargs_defaults: args_list.append(kwargs.get(arg_name, kwargs_defaults[arg_name])) else: args_list.append(kwargs[arg_name]) args_len += 1 except KeyError as e: raise TypeError("Missing argument %r" % (e.args[0],)) return tuple(args_list)
[ "def", "get_args_tuple", "(", "args", ",", "kwargs", ",", "arg_names", ",", "kwargs_defaults", ")", ":", "args_list", "=", "list", "(", "args", ")", "args_len", "=", "len", "(", "args", ")", "all_args_len", "=", "len", "(", "arg_names", ")", "try", ":", "while", "args_len", "<", "all_args_len", ":", "arg_name", "=", "arg_names", "[", "args_len", "]", "if", "arg_name", "in", "kwargs_defaults", ":", "args_list", ".", "append", "(", "kwargs", ".", "get", "(", "arg_name", ",", "kwargs_defaults", "[", "arg_name", "]", ")", ")", "else", ":", "args_list", ".", "append", "(", "kwargs", "[", "arg_name", "]", ")", "args_len", "+=", "1", "except", "KeyError", "as", "e", ":", "raise", "TypeError", "(", "\"Missing argument %r\"", "%", "(", "e", ".", "args", "[", "0", "]", ",", ")", ")", "return", "tuple", "(", "args_list", ")" ]
Generates a cache key from the passed in arguments.
[ "Generates", "a", "cache", "key", "from", "the", "passed", "in", "arguments", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L322-L337
quora/qcore
qcore/caching.py
get_kwargs_defaults
def get_kwargs_defaults(argspec): """Computes a kwargs_defaults dictionary for use by get_args_tuple given an argspec.""" arg_names = tuple(argspec.args) defaults = argspec.defaults or () num_args = len(argspec.args) - len(defaults) kwargs_defaults = {} for i, default_value in enumerate(defaults): kwargs_defaults[arg_names[num_args + i]] = default_value if getattr(argspec, "kwonlydefaults", None): kwargs_defaults.update(argspec.kwonlydefaults) return kwargs_defaults
python
def get_kwargs_defaults(argspec): """Computes a kwargs_defaults dictionary for use by get_args_tuple given an argspec.""" arg_names = tuple(argspec.args) defaults = argspec.defaults or () num_args = len(argspec.args) - len(defaults) kwargs_defaults = {} for i, default_value in enumerate(defaults): kwargs_defaults[arg_names[num_args + i]] = default_value if getattr(argspec, "kwonlydefaults", None): kwargs_defaults.update(argspec.kwonlydefaults) return kwargs_defaults
[ "def", "get_kwargs_defaults", "(", "argspec", ")", ":", "arg_names", "=", "tuple", "(", "argspec", ".", "args", ")", "defaults", "=", "argspec", ".", "defaults", "or", "(", ")", "num_args", "=", "len", "(", "argspec", ".", "args", ")", "-", "len", "(", "defaults", ")", "kwargs_defaults", "=", "{", "}", "for", "i", ",", "default_value", "in", "enumerate", "(", "defaults", ")", ":", "kwargs_defaults", "[", "arg_names", "[", "num_args", "+", "i", "]", "]", "=", "default_value", "if", "getattr", "(", "argspec", ",", "\"kwonlydefaults\"", ",", "None", ")", ":", "kwargs_defaults", ".", "update", "(", "argspec", ".", "kwonlydefaults", ")", "return", "kwargs_defaults" ]
Computes a kwargs_defaults dictionary for use by get_args_tuple given an argspec.
[ "Computes", "a", "kwargs_defaults", "dictionary", "for", "use", "by", "get_args_tuple", "given", "an", "argspec", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L340-L350
quora/qcore
qcore/caching.py
memoize
def memoize(fun): """Memoizes return values of the decorated function. Similar to l0cache, but the cache persists for the duration of the process, unless clear_cache() is called on the function. """ argspec = inspect2.getfullargspec(fun) arg_names = argspec.args + argspec.kwonlyargs kwargs_defaults = get_kwargs_defaults(argspec) def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_names, kwargs_defaults) @functools.wraps(fun) def new_fun(*args, **kwargs): k = cache_key(args, kwargs) if k not in new_fun.__cache: new_fun.__cache[k] = fun(*args, **kwargs) return new_fun.__cache[k] def clear_cache(): """Removes all cached values for this function.""" new_fun.__cache.clear() new_fun.__cache = {} new_fun.clear_cache = clear_cache return new_fun
python
def memoize(fun): """Memoizes return values of the decorated function. Similar to l0cache, but the cache persists for the duration of the process, unless clear_cache() is called on the function. """ argspec = inspect2.getfullargspec(fun) arg_names = argspec.args + argspec.kwonlyargs kwargs_defaults = get_kwargs_defaults(argspec) def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_names, kwargs_defaults) @functools.wraps(fun) def new_fun(*args, **kwargs): k = cache_key(args, kwargs) if k not in new_fun.__cache: new_fun.__cache[k] = fun(*args, **kwargs) return new_fun.__cache[k] def clear_cache(): """Removes all cached values for this function.""" new_fun.__cache.clear() new_fun.__cache = {} new_fun.clear_cache = clear_cache return new_fun
[ "def", "memoize", "(", "fun", ")", ":", "argspec", "=", "inspect2", ".", "getfullargspec", "(", "fun", ")", "arg_names", "=", "argspec", ".", "args", "+", "argspec", ".", "kwonlyargs", "kwargs_defaults", "=", "get_kwargs_defaults", "(", "argspec", ")", "def", "cache_key", "(", "args", ",", "kwargs", ")", ":", "return", "get_args_tuple", "(", "args", ",", "kwargs", ",", "arg_names", ",", "kwargs_defaults", ")", "@", "functools", ".", "wraps", "(", "fun", ")", "def", "new_fun", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "k", "=", "cache_key", "(", "args", ",", "kwargs", ")", "if", "k", "not", "in", "new_fun", ".", "__cache", ":", "new_fun", ".", "__cache", "[", "k", "]", "=", "fun", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "new_fun", ".", "__cache", "[", "k", "]", "def", "clear_cache", "(", ")", ":", "\"\"\"Removes all cached values for this function.\"\"\"", "new_fun", ".", "__cache", ".", "clear", "(", ")", "new_fun", ".", "__cache", "=", "{", "}", "new_fun", ".", "clear_cache", "=", "clear_cache", "return", "new_fun" ]
Memoizes return values of the decorated function. Similar to l0cache, but the cache persists for the duration of the process, unless clear_cache() is called on the function.
[ "Memoizes", "return", "values", "of", "the", "decorated", "function", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L353-L380
quora/qcore
qcore/caching.py
memoize_with_ttl
def memoize_with_ttl(ttl_secs=60 * 60 * 24): """Memoizes return values of the decorated function for a given time-to-live. Similar to l0cache, but the cache persists for the duration of the process, unless clear_cache() is called on the function or the time-to-live expires. By default, the time-to-live is set to 24 hours. """ error_msg = ( "Incorrect usage of qcore.caching.memoize_with_ttl: " "ttl_secs must be a positive integer." ) assert_is_instance(ttl_secs, six.integer_types, error_msg) assert_gt(ttl_secs, 0, error_msg) def cache_fun(fun): argspec = inspect2.getfullargspec(fun) arg_names = argspec.args + argspec.kwonlyargs kwargs_defaults = get_kwargs_defaults(argspec) def cache_key(args, kwargs): return repr(get_args_tuple(args, kwargs, arg_names, kwargs_defaults)) @functools.wraps(fun) def new_fun(*args, **kwargs): k = cache_key(args, kwargs) current_time = int(time.time()) # k is not in the cache; perform the function and cache the result. if k not in new_fun.__cache or k not in new_fun.__cache_times: new_fun.__cache[k] = fun(*args, **kwargs) new_fun.__cache_times[k] = current_time return new_fun.__cache[k] # k is in the cache at this point. Check if the ttl has expired; # if so, recompute the value and cache it. cache_time = new_fun.__cache_times[k] if current_time - cache_time > ttl_secs: new_fun.__cache[k] = fun(*args, **kwargs) new_fun.__cache_times[k] = current_time # finally, return the cached result. return new_fun.__cache[k] def clear_cache(): """Removes all cached values for this function.""" new_fun.__cache.clear() new_fun.__cache_times.clear() def dirty(*args, **kwargs): """Dirties the function for a given set of arguments.""" k = cache_key(args, kwargs) new_fun.__cache.pop(k, None) new_fun.__cache_times.pop(k, None) new_fun.__cache = {} new_fun.__cache_times = {} new_fun.clear_cache = clear_cache new_fun.dirty = dirty return new_fun return cache_fun
python
def memoize_with_ttl(ttl_secs=60 * 60 * 24): """Memoizes return values of the decorated function for a given time-to-live. Similar to l0cache, but the cache persists for the duration of the process, unless clear_cache() is called on the function or the time-to-live expires. By default, the time-to-live is set to 24 hours. """ error_msg = ( "Incorrect usage of qcore.caching.memoize_with_ttl: " "ttl_secs must be a positive integer." ) assert_is_instance(ttl_secs, six.integer_types, error_msg) assert_gt(ttl_secs, 0, error_msg) def cache_fun(fun): argspec = inspect2.getfullargspec(fun) arg_names = argspec.args + argspec.kwonlyargs kwargs_defaults = get_kwargs_defaults(argspec) def cache_key(args, kwargs): return repr(get_args_tuple(args, kwargs, arg_names, kwargs_defaults)) @functools.wraps(fun) def new_fun(*args, **kwargs): k = cache_key(args, kwargs) current_time = int(time.time()) # k is not in the cache; perform the function and cache the result. if k not in new_fun.__cache or k not in new_fun.__cache_times: new_fun.__cache[k] = fun(*args, **kwargs) new_fun.__cache_times[k] = current_time return new_fun.__cache[k] # k is in the cache at this point. Check if the ttl has expired; # if so, recompute the value and cache it. cache_time = new_fun.__cache_times[k] if current_time - cache_time > ttl_secs: new_fun.__cache[k] = fun(*args, **kwargs) new_fun.__cache_times[k] = current_time # finally, return the cached result. return new_fun.__cache[k] def clear_cache(): """Removes all cached values for this function.""" new_fun.__cache.clear() new_fun.__cache_times.clear() def dirty(*args, **kwargs): """Dirties the function for a given set of arguments.""" k = cache_key(args, kwargs) new_fun.__cache.pop(k, None) new_fun.__cache_times.pop(k, None) new_fun.__cache = {} new_fun.__cache_times = {} new_fun.clear_cache = clear_cache new_fun.dirty = dirty return new_fun return cache_fun
[ "def", "memoize_with_ttl", "(", "ttl_secs", "=", "60", "*", "60", "*", "24", ")", ":", "error_msg", "=", "(", "\"Incorrect usage of qcore.caching.memoize_with_ttl: \"", "\"ttl_secs must be a positive integer.\"", ")", "assert_is_instance", "(", "ttl_secs", ",", "six", ".", "integer_types", ",", "error_msg", ")", "assert_gt", "(", "ttl_secs", ",", "0", ",", "error_msg", ")", "def", "cache_fun", "(", "fun", ")", ":", "argspec", "=", "inspect2", ".", "getfullargspec", "(", "fun", ")", "arg_names", "=", "argspec", ".", "args", "+", "argspec", ".", "kwonlyargs", "kwargs_defaults", "=", "get_kwargs_defaults", "(", "argspec", ")", "def", "cache_key", "(", "args", ",", "kwargs", ")", ":", "return", "repr", "(", "get_args_tuple", "(", "args", ",", "kwargs", ",", "arg_names", ",", "kwargs_defaults", ")", ")", "@", "functools", ".", "wraps", "(", "fun", ")", "def", "new_fun", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "k", "=", "cache_key", "(", "args", ",", "kwargs", ")", "current_time", "=", "int", "(", "time", ".", "time", "(", ")", ")", "# k is not in the cache; perform the function and cache the result.", "if", "k", "not", "in", "new_fun", ".", "__cache", "or", "k", "not", "in", "new_fun", ".", "__cache_times", ":", "new_fun", ".", "__cache", "[", "k", "]", "=", "fun", "(", "*", "args", ",", "*", "*", "kwargs", ")", "new_fun", ".", "__cache_times", "[", "k", "]", "=", "current_time", "return", "new_fun", ".", "__cache", "[", "k", "]", "# k is in the cache at this point. Check if the ttl has expired;", "# if so, recompute the value and cache it.", "cache_time", "=", "new_fun", ".", "__cache_times", "[", "k", "]", "if", "current_time", "-", "cache_time", ">", "ttl_secs", ":", "new_fun", ".", "__cache", "[", "k", "]", "=", "fun", "(", "*", "args", ",", "*", "*", "kwargs", ")", "new_fun", ".", "__cache_times", "[", "k", "]", "=", "current_time", "# finally, return the cached result.", "return", "new_fun", ".", "__cache", "[", "k", "]", "def", "clear_cache", "(", ")", ":", "\"\"\"Removes all cached values for this function.\"\"\"", "new_fun", ".", "__cache", ".", "clear", "(", ")", "new_fun", ".", "__cache_times", ".", "clear", "(", ")", "def", "dirty", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Dirties the function for a given set of arguments.\"\"\"", "k", "=", "cache_key", "(", "args", ",", "kwargs", ")", "new_fun", ".", "__cache", ".", "pop", "(", "k", ",", "None", ")", "new_fun", ".", "__cache_times", ".", "pop", "(", "k", ",", "None", ")", "new_fun", ".", "__cache", "=", "{", "}", "new_fun", ".", "__cache_times", "=", "{", "}", "new_fun", ".", "clear_cache", "=", "clear_cache", "new_fun", ".", "dirty", "=", "dirty", "return", "new_fun", "return", "cache_fun" ]
Memoizes return values of the decorated function for a given time-to-live. Similar to l0cache, but the cache persists for the duration of the process, unless clear_cache() is called on the function or the time-to-live expires. By default, the time-to-live is set to 24 hours.
[ "Memoizes", "return", "values", "of", "the", "decorated", "function", "for", "a", "given", "time", "-", "to", "-", "live", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L383-L445
quora/qcore
qcore/caching.py
LazyConstant.get_value
def get_value(self): """Returns the value of the constant.""" if self.value is not_computed: self.value = self.value_provider() if self.value is not_computed: return None return self.value
python
def get_value(self): """Returns the value of the constant.""" if self.value is not_computed: self.value = self.value_provider() if self.value is not_computed: return None return self.value
[ "def", "get_value", "(", "self", ")", ":", "if", "self", ".", "value", "is", "not_computed", ":", "self", ".", "value", "=", "self", ".", "value_provider", "(", ")", "if", "self", ".", "value", "is", "not_computed", ":", "return", "None", "return", "self", ".", "value" ]
Returns the value of the constant.
[ "Returns", "the", "value", "of", "the", "constant", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L64-L70
quora/qcore
qcore/caching.py
LazyConstant.compute
def compute(self): """Computes the value. Does not look at the cache.""" self.value = self.value_provider() if self.value is not_computed: return None else: return self.value
python
def compute(self): """Computes the value. Does not look at the cache.""" self.value = self.value_provider() if self.value is not_computed: return None else: return self.value
[ "def", "compute", "(", "self", ")", ":", "self", ".", "value", "=", "self", ".", "value_provider", "(", ")", "if", "self", ".", "value", "is", "not_computed", ":", "return", "None", "else", ":", "return", "self", ".", "value" ]
Computes the value. Does not look at the cache.
[ "Computes", "the", "value", ".", "Does", "not", "look", "at", "the", "cache", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L72-L78
quora/qcore
qcore/caching.py
LRUCache.get
def get(self, key, default=miss): """Return the value for given key if it exists.""" if key not in self._dict: return default # invokes __getitem__, which updates the item return self[key]
python
def get(self, key, default=miss): """Return the value for given key if it exists.""" if key not in self._dict: return default # invokes __getitem__, which updates the item return self[key]
[ "def", "get", "(", "self", ",", "key", ",", "default", "=", "miss", ")", ":", "if", "key", "not", "in", "self", ".", "_dict", ":", "return", "default", "# invokes __getitem__, which updates the item", "return", "self", "[", "key", "]" ]
Return the value for given key if it exists.
[ "Return", "the", "value", "for", "given", "key", "if", "it", "exists", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L187-L193
quora/qcore
qcore/caching.py
LRUCache.clear
def clear(self, omit_item_evicted=False): """Empty the cache and optionally invoke item_evicted callback.""" if not omit_item_evicted: items = self._dict.items() for key, value in items: self._evict_item(key, value) self._dict.clear()
python
def clear(self, omit_item_evicted=False): """Empty the cache and optionally invoke item_evicted callback.""" if not omit_item_evicted: items = self._dict.items() for key, value in items: self._evict_item(key, value) self._dict.clear()
[ "def", "clear", "(", "self", ",", "omit_item_evicted", "=", "False", ")", ":", "if", "not", "omit_item_evicted", ":", "items", "=", "self", ".", "_dict", ".", "items", "(", ")", "for", "key", ",", "value", "in", "items", ":", "self", ".", "_evict_item", "(", "key", ",", "value", ")", "self", ".", "_dict", ".", "clear", "(", ")" ]
Empty the cache and optionally invoke item_evicted callback.
[ "Empty", "the", "cache", "and", "optionally", "invoke", "item_evicted", "callback", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/caching.py#L209-L215
zeromake/aiosqlite3
aiosqlite3/utils.py
create_future
def create_future(loop): # pragma: no cover """Compatibility wrapper for the loop.create_future() call introduced in 3.5.2.""" if hasattr(loop, 'create_future'): return loop.create_future() return asyncio.Future(loop=loop)
python
def create_future(loop): # pragma: no cover """Compatibility wrapper for the loop.create_future() call introduced in 3.5.2.""" if hasattr(loop, 'create_future'): return loop.create_future() return asyncio.Future(loop=loop)
[ "def", "create_future", "(", "loop", ")", ":", "# pragma: no cover", "if", "hasattr", "(", "loop", ",", "'create_future'", ")", ":", "return", "loop", ".", "create_future", "(", ")", "return", "asyncio", ".", "Future", "(", "loop", "=", "loop", ")" ]
Compatibility wrapper for the loop.create_future() call introduced in 3.5.2.
[ "Compatibility", "wrapper", "for", "the", "loop", ".", "create_future", "()", "call", "introduced", "in", "3", ".", "5", ".", "2", "." ]
train
https://github.com/zeromake/aiosqlite3/blob/1a74a062507e2df8f833a70885e69dca0ab3e7e7/aiosqlite3/utils.py#L17-L23
zeromake/aiosqlite3
aiosqlite3/utils.py
create_task
def create_task(coro, loop): # pragma: no cover """Compatibility wrapper for the loop.create_task() call introduced in 3.4.2.""" if hasattr(loop, 'create_task'): return loop.create_task(coro) return asyncio.Task(coro, loop=loop)
python
def create_task(coro, loop): # pragma: no cover """Compatibility wrapper for the loop.create_task() call introduced in 3.4.2.""" if hasattr(loop, 'create_task'): return loop.create_task(coro) return asyncio.Task(coro, loop=loop)
[ "def", "create_task", "(", "coro", ",", "loop", ")", ":", "# pragma: no cover", "if", "hasattr", "(", "loop", ",", "'create_task'", ")", ":", "return", "loop", ".", "create_task", "(", "coro", ")", "return", "asyncio", ".", "Task", "(", "coro", ",", "loop", "=", "loop", ")" ]
Compatibility wrapper for the loop.create_task() call introduced in 3.4.2.
[ "Compatibility", "wrapper", "for", "the", "loop", ".", "create_task", "()", "call", "introduced", "in", "3", ".", "4", ".", "2", "." ]
train
https://github.com/zeromake/aiosqlite3/blob/1a74a062507e2df8f833a70885e69dca0ab3e7e7/aiosqlite3/utils.py#L26-L32
zeromake/aiosqlite3
aiosqlite3/utils.py
proxy_property_directly
def proxy_property_directly(bind_attr, attrs): """ 为类添加代理属性 """ def cls_builder(cls): """ 添加到类 """ for attr_name in attrs: setattr(cls, attr_name, _make_proxy_property(bind_attr, attr_name)) return cls return cls_builder
python
def proxy_property_directly(bind_attr, attrs): """ 为类添加代理属性 """ def cls_builder(cls): """ 添加到类 """ for attr_name in attrs: setattr(cls, attr_name, _make_proxy_property(bind_attr, attr_name)) return cls return cls_builder
[ "def", "proxy_property_directly", "(", "bind_attr", ",", "attrs", ")", ":", "def", "cls_builder", "(", "cls", ")", ":", "\"\"\"\n 添加到类\n \"\"\"", "for", "attr_name", "in", "attrs", ":", "setattr", "(", "cls", ",", "attr_name", ",", "_make_proxy_property", "(", "bind_attr", ",", "attr_name", ")", ")", "return", "cls", "return", "cls_builder" ]
为类添加代理属性
[ "为类添加代理属性" ]
train
https://github.com/zeromake/aiosqlite3/blob/1a74a062507e2df8f833a70885e69dca0ab3e7e7/aiosqlite3/utils.py#L265-L276
opennode/waldur-core
waldur_core/structure/models.py
StructureLoggableMixin.get_permitted_objects_uuids
def get_permitted_objects_uuids(cls, user): """ Return query dictionary to search objects available to user. """ uuids = filter_queryset_for_user(cls.objects.all(), user).values_list('uuid', flat=True) key = core_utils.camel_case_to_underscore(cls.__name__) + '_uuid' return {key: uuids}
python
def get_permitted_objects_uuids(cls, user): """ Return query dictionary to search objects available to user. """ uuids = filter_queryset_for_user(cls.objects.all(), user).values_list('uuid', flat=True) key = core_utils.camel_case_to_underscore(cls.__name__) + '_uuid' return {key: uuids}
[ "def", "get_permitted_objects_uuids", "(", "cls", ",", "user", ")", ":", "uuids", "=", "filter_queryset_for_user", "(", "cls", ".", "objects", ".", "all", "(", ")", ",", "user", ")", ".", "values_list", "(", "'uuid'", ",", "flat", "=", "True", ")", "key", "=", "core_utils", ".", "camel_case_to_underscore", "(", "cls", ".", "__name__", ")", "+", "'_uuid'", "return", "{", "key", ":", "uuids", "}" ]
Return query dictionary to search objects available to user.
[ "Return", "query", "dictionary", "to", "search", "objects", "available", "to", "user", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/models.py#L79-L85
opennode/waldur-core
waldur_core/structure/models.py
PermissionMixin.has_user
def has_user(self, user, role=None, timestamp=False): """ Checks whether user has role in entity. `timestamp` can have following values: - False - check whether user has role in entity at the moment. - None - check whether user has permanent role in entity. - Datetime object - check whether user will have role in entity at specific timestamp. """ permissions = self.permissions.filter(user=user, is_active=True) if role is not None: permissions = permissions.filter(role=role) if timestamp is None: permissions = permissions.filter(expiration_time=None) elif timestamp: permissions = permissions.filter(Q(expiration_time=None) | Q(expiration_time__gte=timestamp)) return permissions.exists()
python
def has_user(self, user, role=None, timestamp=False): """ Checks whether user has role in entity. `timestamp` can have following values: - False - check whether user has role in entity at the moment. - None - check whether user has permanent role in entity. - Datetime object - check whether user will have role in entity at specific timestamp. """ permissions = self.permissions.filter(user=user, is_active=True) if role is not None: permissions = permissions.filter(role=role) if timestamp is None: permissions = permissions.filter(expiration_time=None) elif timestamp: permissions = permissions.filter(Q(expiration_time=None) | Q(expiration_time__gte=timestamp)) return permissions.exists()
[ "def", "has_user", "(", "self", ",", "user", ",", "role", "=", "None", ",", "timestamp", "=", "False", ")", ":", "permissions", "=", "self", ".", "permissions", ".", "filter", "(", "user", "=", "user", ",", "is_active", "=", "True", ")", "if", "role", "is", "not", "None", ":", "permissions", "=", "permissions", ".", "filter", "(", "role", "=", "role", ")", "if", "timestamp", "is", "None", ":", "permissions", "=", "permissions", ".", "filter", "(", "expiration_time", "=", "None", ")", "elif", "timestamp", ":", "permissions", "=", "permissions", ".", "filter", "(", "Q", "(", "expiration_time", "=", "None", ")", "|", "Q", "(", "expiration_time__gte", "=", "timestamp", ")", ")", "return", "permissions", ".", "exists", "(", ")" ]
Checks whether user has role in entity. `timestamp` can have following values: - False - check whether user has role in entity at the moment. - None - check whether user has permanent role in entity. - Datetime object - check whether user will have role in entity at specific timestamp.
[ "Checks", "whether", "user", "has", "role", "in", "entity", ".", "timestamp", "can", "have", "following", "values", ":", "-", "False", "-", "check", "whether", "user", "has", "role", "in", "entity", "at", "the", "moment", ".", "-", "None", "-", "check", "whether", "user", "has", "permanent", "role", "in", "entity", ".", "-", "Datetime", "object", "-", "check", "whether", "user", "will", "have", "role", "in", "entity", "at", "specific", "timestamp", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/models.py#L191-L209
opennode/waldur-core
waldur_core/core/magic.py
Magic.from_buffer
def from_buffer(self, buf): """ Identify the contents of `buf` """ with self.lock: try: # if we're on python3, convert buf to bytes # otherwise this string is passed as wchar* # which is not what libmagic expects if isinstance(buf, str) and str != bytes: buf = buf.encode('utf-8', errors='replace') return maybe_decode(magic_buffer(self.cookie, buf)) except MagicException as e: return self._handle509Bug(e)
python
def from_buffer(self, buf): """ Identify the contents of `buf` """ with self.lock: try: # if we're on python3, convert buf to bytes # otherwise this string is passed as wchar* # which is not what libmagic expects if isinstance(buf, str) and str != bytes: buf = buf.encode('utf-8', errors='replace') return maybe_decode(magic_buffer(self.cookie, buf)) except MagicException as e: return self._handle509Bug(e)
[ "def", "from_buffer", "(", "self", ",", "buf", ")", ":", "with", "self", ".", "lock", ":", "try", ":", "# if we're on python3, convert buf to bytes", "# otherwise this string is passed as wchar*", "# which is not what libmagic expects", "if", "isinstance", "(", "buf", ",", "str", ")", "and", "str", "!=", "bytes", ":", "buf", "=", "buf", ".", "encode", "(", "'utf-8'", ",", "errors", "=", "'replace'", ")", "return", "maybe_decode", "(", "magic_buffer", "(", "self", ".", "cookie", ",", "buf", ")", ")", "except", "MagicException", "as", "e", ":", "return", "self", ".", "_handle509Bug", "(", "e", ")" ]
Identify the contents of `buf`
[ "Identify", "the", "contents", "of", "buf" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/magic.py#L71-L84
nuagenetworks/bambou
bambou/nurest_push_center.py
NURESTPushCenter.start
def start(self, timeout=None, root_object=None): """ Starts listening to events. Args: timeout (int): number of seconds before timeout. Used for testing purpose only. root_object (bambou.NURESTRootObject): NURESTRootObject object that is listening. Used for testing purpose only. """ if self._is_running: return if timeout: self._timeout = timeout self._start_time = int(time()) pushcenter_logger.debug("[NURESTPushCenter] Starting push center on url %s ..." % self.url) self._is_running = True self.__root_object = root_object from .nurest_session import NURESTSession current_session = NURESTSession.get_current_session() args_session = {'session': current_session} self._thread = StoppableThread(target=self._listen, name='push-center', kwargs=args_session) self._thread.daemon = True self._thread.start()
python
def start(self, timeout=None, root_object=None): """ Starts listening to events. Args: timeout (int): number of seconds before timeout. Used for testing purpose only. root_object (bambou.NURESTRootObject): NURESTRootObject object that is listening. Used for testing purpose only. """ if self._is_running: return if timeout: self._timeout = timeout self._start_time = int(time()) pushcenter_logger.debug("[NURESTPushCenter] Starting push center on url %s ..." % self.url) self._is_running = True self.__root_object = root_object from .nurest_session import NURESTSession current_session = NURESTSession.get_current_session() args_session = {'session': current_session} self._thread = StoppableThread(target=self._listen, name='push-center', kwargs=args_session) self._thread.daemon = True self._thread.start()
[ "def", "start", "(", "self", ",", "timeout", "=", "None", ",", "root_object", "=", "None", ")", ":", "if", "self", ".", "_is_running", ":", "return", "if", "timeout", ":", "self", ".", "_timeout", "=", "timeout", "self", ".", "_start_time", "=", "int", "(", "time", "(", ")", ")", "pushcenter_logger", ".", "debug", "(", "\"[NURESTPushCenter] Starting push center on url %s ...\"", "%", "self", ".", "url", ")", "self", ".", "_is_running", "=", "True", "self", ".", "__root_object", "=", "root_object", "from", ".", "nurest_session", "import", "NURESTSession", "current_session", "=", "NURESTSession", ".", "get_current_session", "(", ")", "args_session", "=", "{", "'session'", ":", "current_session", "}", "self", ".", "_thread", "=", "StoppableThread", "(", "target", "=", "self", ".", "_listen", ",", "name", "=", "'push-center'", ",", "kwargs", "=", "args_session", ")", "self", ".", "_thread", ".", "daemon", "=", "True", "self", ".", "_thread", ".", "start", "(", ")" ]
Starts listening to events. Args: timeout (int): number of seconds before timeout. Used for testing purpose only. root_object (bambou.NURESTRootObject): NURESTRootObject object that is listening. Used for testing purpose only.
[ "Starts", "listening", "to", "events", "." ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_push_center.py#L103-L128
nuagenetworks/bambou
bambou/nurest_push_center.py
NURESTPushCenter.stop
def stop(self): """ Stops listening for events. """ if not self._is_running: return pushcenter_logger.debug("[NURESTPushCenter] Stopping...") self._thread.stop() self._thread.join() self._is_running = False self._current_connection = None self._start_time = None self._timeout = None
python
def stop(self): """ Stops listening for events. """ if not self._is_running: return pushcenter_logger.debug("[NURESTPushCenter] Stopping...") self._thread.stop() self._thread.join() self._is_running = False self._current_connection = None self._start_time = None self._timeout = None
[ "def", "stop", "(", "self", ")", ":", "if", "not", "self", ".", "_is_running", ":", "return", "pushcenter_logger", ".", "debug", "(", "\"[NURESTPushCenter] Stopping...\"", ")", "self", ".", "_thread", ".", "stop", "(", ")", "self", ".", "_thread", ".", "join", "(", ")", "self", ".", "_is_running", "=", "False", "self", ".", "_current_connection", "=", "None", "self", ".", "_start_time", "=", "None", "self", ".", "_timeout", "=", "None" ]
Stops listening for events.
[ "Stops", "listening", "for", "events", "." ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_push_center.py#L130-L144
nuagenetworks/bambou
bambou/nurest_push_center.py
NURESTPushCenter.wait_until_exit
def wait_until_exit(self): """ Wait until thread exit Used for testing purpose only """ if self._timeout is None: raise Exception("Thread will never exit. Use stop or specify timeout when starting it!") self._thread.join() self.stop()
python
def wait_until_exit(self): """ Wait until thread exit Used for testing purpose only """ if self._timeout is None: raise Exception("Thread will never exit. Use stop or specify timeout when starting it!") self._thread.join() self.stop()
[ "def", "wait_until_exit", "(", "self", ")", ":", "if", "self", ".", "_timeout", "is", "None", ":", "raise", "Exception", "(", "\"Thread will never exit. Use stop or specify timeout when starting it!\"", ")", "self", ".", "_thread", ".", "join", "(", ")", "self", ".", "stop", "(", ")" ]
Wait until thread exit Used for testing purpose only
[ "Wait", "until", "thread", "exit" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_push_center.py#L146-L156
nuagenetworks/bambou
bambou/nurest_push_center.py
NURESTPushCenter._did_receive_event
def _did_receive_event(self, connection): """ Receive an event from connection """ if not self._is_running: return if connection.has_timeouted: return response = connection.response data = None if response.status_code != 200: pushcenter_logger.error("[NURESTPushCenter]: Connection failure [%s] %s" % (response.status_code, response.errors)) else: data = response.data if len(self._delegate_methods) > 0: for m in self._delegate_methods: try: m(data) except Exception as exc: pushcenter_logger.error("[NURESTPushCenter] Delegate method %s failed:\n%s" % (m, exc)) elif data: events = data['events'] self.nb_events_received += len(events) self.nb_push_received += 1 pushcenter_logger.info("[NURESTPushCenter] Received Push #%s (total=%s, latest=%s)\n%s" % (self.nb_push_received, self.nb_events_received, len(events), json.dumps(events, indent=4))) self._last_events.extend(events) if self._is_running: uuid = None if data and 'uuid' in data: uuid = data['uuid'] self._listen(uuid)
python
def _did_receive_event(self, connection): """ Receive an event from connection """ if not self._is_running: return if connection.has_timeouted: return response = connection.response data = None if response.status_code != 200: pushcenter_logger.error("[NURESTPushCenter]: Connection failure [%s] %s" % (response.status_code, response.errors)) else: data = response.data if len(self._delegate_methods) > 0: for m in self._delegate_methods: try: m(data) except Exception as exc: pushcenter_logger.error("[NURESTPushCenter] Delegate method %s failed:\n%s" % (m, exc)) elif data: events = data['events'] self.nb_events_received += len(events) self.nb_push_received += 1 pushcenter_logger.info("[NURESTPushCenter] Received Push #%s (total=%s, latest=%s)\n%s" % (self.nb_push_received, self.nb_events_received, len(events), json.dumps(events, indent=4))) self._last_events.extend(events) if self._is_running: uuid = None if data and 'uuid' in data: uuid = data['uuid'] self._listen(uuid)
[ "def", "_did_receive_event", "(", "self", ",", "connection", ")", ":", "if", "not", "self", ".", "_is_running", ":", "return", "if", "connection", ".", "has_timeouted", ":", "return", "response", "=", "connection", ".", "response", "data", "=", "None", "if", "response", ".", "status_code", "!=", "200", ":", "pushcenter_logger", ".", "error", "(", "\"[NURESTPushCenter]: Connection failure [%s] %s\"", "%", "(", "response", ".", "status_code", ",", "response", ".", "errors", ")", ")", "else", ":", "data", "=", "response", ".", "data", "if", "len", "(", "self", ".", "_delegate_methods", ")", ">", "0", ":", "for", "m", "in", "self", ".", "_delegate_methods", ":", "try", ":", "m", "(", "data", ")", "except", "Exception", "as", "exc", ":", "pushcenter_logger", ".", "error", "(", "\"[NURESTPushCenter] Delegate method %s failed:\\n%s\"", "%", "(", "m", ",", "exc", ")", ")", "elif", "data", ":", "events", "=", "data", "[", "'events'", "]", "self", ".", "nb_events_received", "+=", "len", "(", "events", ")", "self", ".", "nb_push_received", "+=", "1", "pushcenter_logger", ".", "info", "(", "\"[NURESTPushCenter] Received Push #%s (total=%s, latest=%s)\\n%s\"", "%", "(", "self", ".", "nb_push_received", ",", "self", ".", "nb_events_received", ",", "len", "(", "events", ")", ",", "json", ".", "dumps", "(", "events", ",", "indent", "=", "4", ")", ")", ")", "self", ".", "_last_events", ".", "extend", "(", "events", ")", "if", "self", ".", "_is_running", ":", "uuid", "=", "None", "if", "data", "and", "'uuid'", "in", "data", ":", "uuid", "=", "data", "[", "'uuid'", "]", "self", ".", "_listen", "(", "uuid", ")" ]
Receive an event from connection
[ "Receive", "an", "event", "from", "connection" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_push_center.py#L173-L210
nuagenetworks/bambou
bambou/nurest_push_center.py
NURESTPushCenter._listen
def _listen(self, uuid=None, session=None): """ Listen a connection uuid """ if self.url is None: raise Exception("NURESTPushCenter needs to have a valid URL. please use setURL: before starting it.") events_url = "%s/events" % self.url if uuid: events_url = "%s?uuid=%s" % (events_url, uuid) request = NURESTRequest(method='GET', url=events_url) # Force async to False so the push center will have only 1 thread running connection = NURESTConnection(request=request, async=True, callback=self._did_receive_event, root_object=self._root_object) if self._timeout: if int(time()) - self._start_time >= self._timeout: pushcenter_logger.debug("[NURESTPushCenter] Timeout (timeout=%ss)." % self._timeout) return else: connection.timeout = self._timeout pushcenter_logger.info('Bambou Sending >>>>>>\n%s %s' % (request.method, request.url)) # connection.ignore_request_idle = True connection.start()
python
def _listen(self, uuid=None, session=None): """ Listen a connection uuid """ if self.url is None: raise Exception("NURESTPushCenter needs to have a valid URL. please use setURL: before starting it.") events_url = "%s/events" % self.url if uuid: events_url = "%s?uuid=%s" % (events_url, uuid) request = NURESTRequest(method='GET', url=events_url) # Force async to False so the push center will have only 1 thread running connection = NURESTConnection(request=request, async=True, callback=self._did_receive_event, root_object=self._root_object) if self._timeout: if int(time()) - self._start_time >= self._timeout: pushcenter_logger.debug("[NURESTPushCenter] Timeout (timeout=%ss)." % self._timeout) return else: connection.timeout = self._timeout pushcenter_logger.info('Bambou Sending >>>>>>\n%s %s' % (request.method, request.url)) # connection.ignore_request_idle = True connection.start()
[ "def", "_listen", "(", "self", ",", "uuid", "=", "None", ",", "session", "=", "None", ")", ":", "if", "self", ".", "url", "is", "None", ":", "raise", "Exception", "(", "\"NURESTPushCenter needs to have a valid URL. please use setURL: before starting it.\"", ")", "events_url", "=", "\"%s/events\"", "%", "self", ".", "url", "if", "uuid", ":", "events_url", "=", "\"%s?uuid=%s\"", "%", "(", "events_url", ",", "uuid", ")", "request", "=", "NURESTRequest", "(", "method", "=", "'GET'", ",", "url", "=", "events_url", ")", "# Force async to False so the push center will have only 1 thread running", "connection", "=", "NURESTConnection", "(", "request", "=", "request", ",", "async", "=", "True", ",", "callback", "=", "self", ".", "_did_receive_event", ",", "root_object", "=", "self", ".", "_root_object", ")", "if", "self", ".", "_timeout", ":", "if", "int", "(", "time", "(", ")", ")", "-", "self", ".", "_start_time", ">=", "self", ".", "_timeout", ":", "pushcenter_logger", ".", "debug", "(", "\"[NURESTPushCenter] Timeout (timeout=%ss).\"", "%", "self", ".", "_timeout", ")", "return", "else", ":", "connection", ".", "timeout", "=", "self", ".", "_timeout", "pushcenter_logger", ".", "info", "(", "'Bambou Sending >>>>>>\\n%s %s'", "%", "(", "request", ".", "method", ",", "request", ".", "url", ")", ")", "# connection.ignore_request_idle = True", "connection", ".", "start", "(", ")" ]
Listen a connection uuid
[ "Listen", "a", "connection", "uuid" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_push_center.py#L212-L238
nuagenetworks/bambou
bambou/nurest_push_center.py
NURESTPushCenter.add_delegate
def add_delegate(self, callback): """ Registers a new delegate callback The prototype should be function(data), where data will be the decoded json push Args: callback (function): method to trigger when push center receives events """ if callback in self._delegate_methods: return self._delegate_methods.append(callback)
python
def add_delegate(self, callback): """ Registers a new delegate callback The prototype should be function(data), where data will be the decoded json push Args: callback (function): method to trigger when push center receives events """ if callback in self._delegate_methods: return self._delegate_methods.append(callback)
[ "def", "add_delegate", "(", "self", ",", "callback", ")", ":", "if", "callback", "in", "self", ".", "_delegate_methods", ":", "return", "self", ".", "_delegate_methods", ".", "append", "(", "callback", ")" ]
Registers a new delegate callback The prototype should be function(data), where data will be the decoded json push Args: callback (function): method to trigger when push center receives events
[ "Registers", "a", "new", "delegate", "callback" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_push_center.py#L240-L252
nuagenetworks/bambou
bambou/nurest_push_center.py
NURESTPushCenter.remove_delegate
def remove_delegate(self, callback): """ Unregisters a registered delegate function or a method. Args: callback(function): method to trigger when push center receives events """ if callback not in self._delegate_methods: return self._delegate_methods.remove(callback)
python
def remove_delegate(self, callback): """ Unregisters a registered delegate function or a method. Args: callback(function): method to trigger when push center receives events """ if callback not in self._delegate_methods: return self._delegate_methods.remove(callback)
[ "def", "remove_delegate", "(", "self", ",", "callback", ")", ":", "if", "callback", "not", "in", "self", ".", "_delegate_methods", ":", "return", "self", ".", "_delegate_methods", ".", "remove", "(", "callback", ")" ]
Unregisters a registered delegate function or a method. Args: callback(function): method to trigger when push center receives events
[ "Unregisters", "a", "registered", "delegate", "function", "or", "a", "method", "." ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_push_center.py#L254-L264
nuagenetworks/bambou
bambou/config.py
BambouConfig._read_config
def _read_config(cls): """ Reads the configuration file if any """ cls._config_parser = configparser.ConfigParser() cls._config_parser.read(cls._default_attribute_values_configuration_file_path)
python
def _read_config(cls): """ Reads the configuration file if any """ cls._config_parser = configparser.ConfigParser() cls._config_parser.read(cls._default_attribute_values_configuration_file_path)
[ "def", "_read_config", "(", "cls", ")", ":", "cls", ".", "_config_parser", "=", "configparser", ".", "ConfigParser", "(", ")", "cls", ".", "_config_parser", ".", "read", "(", "cls", ".", "_default_attribute_values_configuration_file_path", ")" ]
Reads the configuration file if any
[ "Reads", "the", "configuration", "file", "if", "any" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/config.py#L108-L113
nuagenetworks/bambou
bambou/config.py
BambouConfig.get_default_attribute_value
def get_default_attribute_value(cls, object_class, property_name, attr_type=str): """ Gets the default value of a given property for a given object. These properties can be set in a config INI file looking like .. code-block:: ini [NUEntity] default_behavior = THIS speed = 1000 [NUOtherEntity] attribute_name = a value This will be used when creating a :class:`bambou.NURESTObject` when no parameter or data is provided """ if not cls._default_attribute_values_configuration_file_path: return None if not cls._config_parser: cls._read_config() class_name = object_class.__name__ if not cls._config_parser.has_section(class_name): return None if not cls._config_parser.has_option(class_name, property_name): return None if sys.version_info < (3,): integer_types = (int, long,) else: integer_types = (int,) if isinstance(attr_type, integer_types): return cls._config_parser.getint(class_name, property_name) elif attr_type is bool: return cls._config_parser.getboolean(class_name, property_name) else: return cls._config_parser.get(class_name, property_name)
python
def get_default_attribute_value(cls, object_class, property_name, attr_type=str): """ Gets the default value of a given property for a given object. These properties can be set in a config INI file looking like .. code-block:: ini [NUEntity] default_behavior = THIS speed = 1000 [NUOtherEntity] attribute_name = a value This will be used when creating a :class:`bambou.NURESTObject` when no parameter or data is provided """ if not cls._default_attribute_values_configuration_file_path: return None if not cls._config_parser: cls._read_config() class_name = object_class.__name__ if not cls._config_parser.has_section(class_name): return None if not cls._config_parser.has_option(class_name, property_name): return None if sys.version_info < (3,): integer_types = (int, long,) else: integer_types = (int,) if isinstance(attr_type, integer_types): return cls._config_parser.getint(class_name, property_name) elif attr_type is bool: return cls._config_parser.getboolean(class_name, property_name) else: return cls._config_parser.get(class_name, property_name)
[ "def", "get_default_attribute_value", "(", "cls", ",", "object_class", ",", "property_name", ",", "attr_type", "=", "str", ")", ":", "if", "not", "cls", ".", "_default_attribute_values_configuration_file_path", ":", "return", "None", "if", "not", "cls", ".", "_config_parser", ":", "cls", ".", "_read_config", "(", ")", "class_name", "=", "object_class", ".", "__name__", "if", "not", "cls", ".", "_config_parser", ".", "has_section", "(", "class_name", ")", ":", "return", "None", "if", "not", "cls", ".", "_config_parser", ".", "has_option", "(", "class_name", ",", "property_name", ")", ":", "return", "None", "if", "sys", ".", "version_info", "<", "(", "3", ",", ")", ":", "integer_types", "=", "(", "int", ",", "long", ",", ")", "else", ":", "integer_types", "=", "(", "int", ",", ")", "if", "isinstance", "(", "attr_type", ",", "integer_types", ")", ":", "return", "cls", ".", "_config_parser", ".", "getint", "(", "class_name", ",", "property_name", ")", "elif", "attr_type", "is", "bool", ":", "return", "cls", ".", "_config_parser", ".", "getboolean", "(", "class_name", ",", "property_name", ")", "else", ":", "return", "cls", ".", "_config_parser", ".", "get", "(", "class_name", ",", "property_name", ")" ]
Gets the default value of a given property for a given object. These properties can be set in a config INI file looking like .. code-block:: ini [NUEntity] default_behavior = THIS speed = 1000 [NUOtherEntity] attribute_name = a value This will be used when creating a :class:`bambou.NURESTObject` when no parameter or data is provided
[ "Gets", "the", "default", "value", "of", "a", "given", "property", "for", "a", "given", "object", "." ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/config.py#L116-L157
opennode/waldur-core
waldur_core/core/filters.py
SummaryFilter.filter
def filter(self, request, queryset, view): """ Filter each resource separately using its own filter """ summary_queryset = queryset filtered_querysets = [] for queryset in summary_queryset.querysets: filter_class = self._get_filter(queryset) queryset = filter_class(request.query_params, queryset=queryset).qs filtered_querysets.append(queryset) summary_queryset.querysets = filtered_querysets return summary_queryset
python
def filter(self, request, queryset, view): """ Filter each resource separately using its own filter """ summary_queryset = queryset filtered_querysets = [] for queryset in summary_queryset.querysets: filter_class = self._get_filter(queryset) queryset = filter_class(request.query_params, queryset=queryset).qs filtered_querysets.append(queryset) summary_queryset.querysets = filtered_querysets return summary_queryset
[ "def", "filter", "(", "self", ",", "request", ",", "queryset", ",", "view", ")", ":", "summary_queryset", "=", "queryset", "filtered_querysets", "=", "[", "]", "for", "queryset", "in", "summary_queryset", ".", "querysets", ":", "filter_class", "=", "self", ".", "_get_filter", "(", "queryset", ")", "queryset", "=", "filter_class", "(", "request", ".", "query_params", ",", "queryset", "=", "queryset", ")", ".", "qs", "filtered_querysets", ".", "append", "(", "queryset", ")", "summary_queryset", ".", "querysets", "=", "filtered_querysets", "return", "summary_queryset" ]
Filter each resource separately using its own filter
[ "Filter", "each", "resource", "separately", "using", "its", "own", "filter" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/filters.py#L275-L285
stepank/pyws
src/pyws/functions/args/types/__init__.py
TypeFactory
def TypeFactory(type_): """ This function creates a standard form type from a simplified form. >>> from datetime import date, datetime >>> from pyws.functions.args import TypeFactory >>> from pyws.functions.args import String, Integer, Float, Date, DateTime >>> TypeFactory(str) == String True >>> TypeFactory(float) == Float True >>> TypeFactory(date) == Date True >>> TypeFactory(datetime) == DateTime True >>> from operator import attrgetter >>> from pyws.functions.args import Dict >>> dct = TypeFactory({0: 'HelloWorldDict', 'hello': str, 'world': int}) >>> issubclass(dct, Dict) True >>> dct.__name__ 'HelloWorldDict' >>> fields = sorted(dct.fields, key=attrgetter('name')) >>> len(dct.fields) 2 >>> fields[0].name == 'hello' True >>> fields[0].type == String True >>> fields[1].name == 'world' True >>> fields[1].type == Integer True >>> from pyws.functions.args import List >>> lst = TypeFactory([int]) >>> issubclass(lst, List) True >>> lst.__name__ 'IntegerList' >>> lst.element_type == Integer True """ if isinstance(type_, type) and issubclass(type_, Type): return type_ for x in __types__: if x.represents(type_): return x.get(type_) raise UnknownType(type_)
python
def TypeFactory(type_): """ This function creates a standard form type from a simplified form. >>> from datetime import date, datetime >>> from pyws.functions.args import TypeFactory >>> from pyws.functions.args import String, Integer, Float, Date, DateTime >>> TypeFactory(str) == String True >>> TypeFactory(float) == Float True >>> TypeFactory(date) == Date True >>> TypeFactory(datetime) == DateTime True >>> from operator import attrgetter >>> from pyws.functions.args import Dict >>> dct = TypeFactory({0: 'HelloWorldDict', 'hello': str, 'world': int}) >>> issubclass(dct, Dict) True >>> dct.__name__ 'HelloWorldDict' >>> fields = sorted(dct.fields, key=attrgetter('name')) >>> len(dct.fields) 2 >>> fields[0].name == 'hello' True >>> fields[0].type == String True >>> fields[1].name == 'world' True >>> fields[1].type == Integer True >>> from pyws.functions.args import List >>> lst = TypeFactory([int]) >>> issubclass(lst, List) True >>> lst.__name__ 'IntegerList' >>> lst.element_type == Integer True """ if isinstance(type_, type) and issubclass(type_, Type): return type_ for x in __types__: if x.represents(type_): return x.get(type_) raise UnknownType(type_)
[ "def", "TypeFactory", "(", "type_", ")", ":", "if", "isinstance", "(", "type_", ",", "type", ")", "and", "issubclass", "(", "type_", ",", "Type", ")", ":", "return", "type_", "for", "x", "in", "__types__", ":", "if", "x", ".", "represents", "(", "type_", ")", ":", "return", "x", ".", "get", "(", "type_", ")", "raise", "UnknownType", "(", "type_", ")" ]
This function creates a standard form type from a simplified form. >>> from datetime import date, datetime >>> from pyws.functions.args import TypeFactory >>> from pyws.functions.args import String, Integer, Float, Date, DateTime >>> TypeFactory(str) == String True >>> TypeFactory(float) == Float True >>> TypeFactory(date) == Date True >>> TypeFactory(datetime) == DateTime True >>> from operator import attrgetter >>> from pyws.functions.args import Dict >>> dct = TypeFactory({0: 'HelloWorldDict', 'hello': str, 'world': int}) >>> issubclass(dct, Dict) True >>> dct.__name__ 'HelloWorldDict' >>> fields = sorted(dct.fields, key=attrgetter('name')) >>> len(dct.fields) 2 >>> fields[0].name == 'hello' True >>> fields[0].type == String True >>> fields[1].name == 'world' True >>> fields[1].type == Integer True >>> from pyws.functions.args import List >>> lst = TypeFactory([int]) >>> issubclass(lst, List) True >>> lst.__name__ 'IntegerList' >>> lst.element_type == Integer True
[ "This", "function", "creates", "a", "standard", "form", "type", "from", "a", "simplified", "form", "." ]
train
https://github.com/stepank/pyws/blob/ff39133aabeb56bbb08d66286ac0cc8731eda7dd/src/pyws/functions/args/types/__init__.py#L21-L70
opennode/waldur-core
waldur_core/structure/images.py
dummy_image
def dummy_image(filetype='gif'): """ Generate empty image in temporary file for testing """ # 1x1px Transparent GIF GIF = 'R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7' tmp_file = tempfile.NamedTemporaryFile(suffix='.%s' % filetype) tmp_file.write(base64.b64decode(GIF)) return open(tmp_file.name, 'rb')
python
def dummy_image(filetype='gif'): """ Generate empty image in temporary file for testing """ # 1x1px Transparent GIF GIF = 'R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7' tmp_file = tempfile.NamedTemporaryFile(suffix='.%s' % filetype) tmp_file.write(base64.b64decode(GIF)) return open(tmp_file.name, 'rb')
[ "def", "dummy_image", "(", "filetype", "=", "'gif'", ")", ":", "# 1x1px Transparent GIF", "GIF", "=", "'R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7'", "tmp_file", "=", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.%s'", "%", "filetype", ")", "tmp_file", ".", "write", "(", "base64", ".", "b64decode", "(", "GIF", ")", ")", "return", "open", "(", "tmp_file", ".", "name", ",", "'rb'", ")" ]
Generate empty image in temporary file for testing
[ "Generate", "empty", "image", "in", "temporary", "file", "for", "testing" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/images.py#L14-L20
quora/qcore
qcore/microtime.py
utime_delta
def utime_delta(days=0, hours=0, minutes=0, seconds=0): """Gets time delta in microseconds. Note: Do NOT use this function without keyword arguments. It will become much-much harder to add extra time ranges later if positional arguments are used. """ return (days * DAY) + (hours * HOUR) + (minutes * MINUTE) + (seconds * SECOND)
python
def utime_delta(days=0, hours=0, minutes=0, seconds=0): """Gets time delta in microseconds. Note: Do NOT use this function without keyword arguments. It will become much-much harder to add extra time ranges later if positional arguments are used. """ return (days * DAY) + (hours * HOUR) + (minutes * MINUTE) + (seconds * SECOND)
[ "def", "utime_delta", "(", "days", "=", "0", ",", "hours", "=", "0", ",", "minutes", "=", "0", ",", "seconds", "=", "0", ")", ":", "return", "(", "days", "*", "DAY", ")", "+", "(", "hours", "*", "HOUR", ")", "+", "(", "minutes", "*", "MINUTE", ")", "+", "(", "seconds", "*", "SECOND", ")" ]
Gets time delta in microseconds. Note: Do NOT use this function without keyword arguments. It will become much-much harder to add extra time ranges later if positional arguments are used.
[ "Gets", "time", "delta", "in", "microseconds", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/microtime.py#L74-L81
quora/qcore
qcore/microtime.py
execute_with_timeout
def execute_with_timeout( fn, args=None, kwargs=None, timeout=None, fail_if_no_timer=True, signal_type=_default_signal_type, timer_type=_default_timer_type, timeout_exception_cls=TimeoutError, ): """ Executes specified function with timeout. Uses SIGALRM to interrupt it. :type fn: function :param fn: function to execute :type args: tuple :param args: function args :type kwargs: dict :param kwargs: function kwargs :type timeout: float :param timeout: timeout, seconds; 0 or None means no timeout :type fail_if_no_timer: bool :param fail_if_no_timer: fail, if timer is nor available; normally it's available only in the main thread :type signal_type: signalnum :param signal_type: type of signal to use (see signal module) :type timer_type: signal.ITIMER_REAL, signal.ITIMER_VIRTUAL or signal.ITIMER_PROF :param timer_type: type of timer to use (see signal module) :type timeout_exception_cls: class :param timeout_exception_cls: exception to throw in case of timeout :return: fn call result. """ if args is None: args = empty_tuple if kwargs is None: kwargs = empty_dict if timeout is None or timeout == 0 or signal_type is None or timer_type is None: return fn(*args, **kwargs) def signal_handler(signum, frame): raise timeout_exception_cls(inspection.get_function_call_str(fn, args, kwargs)) old_signal_handler = none timer_is_set = False try: try: old_signal_handler = signal.signal(signal_type, signal_handler) signal.setitimer(timer_type, timeout) timer_is_set = True except ValueError: if fail_if_no_timer: raise NotSupportedError( "Timer is not available; the code is probably invoked from outside the main " "thread." ) return fn(*args, **kwargs) finally: if timer_is_set: signal.setitimer(timer_type, 0) if old_signal_handler is not none: signal.signal(signal_type, old_signal_handler)
python
def execute_with_timeout( fn, args=None, kwargs=None, timeout=None, fail_if_no_timer=True, signal_type=_default_signal_type, timer_type=_default_timer_type, timeout_exception_cls=TimeoutError, ): """ Executes specified function with timeout. Uses SIGALRM to interrupt it. :type fn: function :param fn: function to execute :type args: tuple :param args: function args :type kwargs: dict :param kwargs: function kwargs :type timeout: float :param timeout: timeout, seconds; 0 or None means no timeout :type fail_if_no_timer: bool :param fail_if_no_timer: fail, if timer is nor available; normally it's available only in the main thread :type signal_type: signalnum :param signal_type: type of signal to use (see signal module) :type timer_type: signal.ITIMER_REAL, signal.ITIMER_VIRTUAL or signal.ITIMER_PROF :param timer_type: type of timer to use (see signal module) :type timeout_exception_cls: class :param timeout_exception_cls: exception to throw in case of timeout :return: fn call result. """ if args is None: args = empty_tuple if kwargs is None: kwargs = empty_dict if timeout is None or timeout == 0 or signal_type is None or timer_type is None: return fn(*args, **kwargs) def signal_handler(signum, frame): raise timeout_exception_cls(inspection.get_function_call_str(fn, args, kwargs)) old_signal_handler = none timer_is_set = False try: try: old_signal_handler = signal.signal(signal_type, signal_handler) signal.setitimer(timer_type, timeout) timer_is_set = True except ValueError: if fail_if_no_timer: raise NotSupportedError( "Timer is not available; the code is probably invoked from outside the main " "thread." ) return fn(*args, **kwargs) finally: if timer_is_set: signal.setitimer(timer_type, 0) if old_signal_handler is not none: signal.signal(signal_type, old_signal_handler)
[ "def", "execute_with_timeout", "(", "fn", ",", "args", "=", "None", ",", "kwargs", "=", "None", ",", "timeout", "=", "None", ",", "fail_if_no_timer", "=", "True", ",", "signal_type", "=", "_default_signal_type", ",", "timer_type", "=", "_default_timer_type", ",", "timeout_exception_cls", "=", "TimeoutError", ",", ")", ":", "if", "args", "is", "None", ":", "args", "=", "empty_tuple", "if", "kwargs", "is", "None", ":", "kwargs", "=", "empty_dict", "if", "timeout", "is", "None", "or", "timeout", "==", "0", "or", "signal_type", "is", "None", "or", "timer_type", "is", "None", ":", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "def", "signal_handler", "(", "signum", ",", "frame", ")", ":", "raise", "timeout_exception_cls", "(", "inspection", ".", "get_function_call_str", "(", "fn", ",", "args", ",", "kwargs", ")", ")", "old_signal_handler", "=", "none", "timer_is_set", "=", "False", "try", ":", "try", ":", "old_signal_handler", "=", "signal", ".", "signal", "(", "signal_type", ",", "signal_handler", ")", "signal", ".", "setitimer", "(", "timer_type", ",", "timeout", ")", "timer_is_set", "=", "True", "except", "ValueError", ":", "if", "fail_if_no_timer", ":", "raise", "NotSupportedError", "(", "\"Timer is not available; the code is probably invoked from outside the main \"", "\"thread.\"", ")", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "if", "timer_is_set", ":", "signal", ".", "setitimer", "(", "timer_type", ",", "0", ")", "if", "old_signal_handler", "is", "not", "none", ":", "signal", ".", "signal", "(", "signal_type", ",", "old_signal_handler", ")" ]
Executes specified function with timeout. Uses SIGALRM to interrupt it. :type fn: function :param fn: function to execute :type args: tuple :param args: function args :type kwargs: dict :param kwargs: function kwargs :type timeout: float :param timeout: timeout, seconds; 0 or None means no timeout :type fail_if_no_timer: bool :param fail_if_no_timer: fail, if timer is nor available; normally it's available only in the main thread :type signal_type: signalnum :param signal_type: type of signal to use (see signal module) :type timer_type: signal.ITIMER_REAL, signal.ITIMER_VIRTUAL or signal.ITIMER_PROF :param timer_type: type of timer to use (see signal module) :type timeout_exception_cls: class :param timeout_exception_cls: exception to throw in case of timeout :return: fn call result.
[ "Executes", "specified", "function", "with", "timeout", ".", "Uses", "SIGALRM", "to", "interrupt", "it", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/microtime.py#L132-L202
quora/qcore
qcore/inspection.py
get_original_fn
def get_original_fn(fn): """Gets the very original function of a decorated one.""" fn_type = type(fn) if fn_type is classmethod or fn_type is staticmethod: return get_original_fn(fn.__func__) if hasattr(fn, "original_fn"): return fn.original_fn if hasattr(fn, "fn"): fn.original_fn = get_original_fn(fn.fn) return fn.original_fn return fn
python
def get_original_fn(fn): """Gets the very original function of a decorated one.""" fn_type = type(fn) if fn_type is classmethod or fn_type is staticmethod: return get_original_fn(fn.__func__) if hasattr(fn, "original_fn"): return fn.original_fn if hasattr(fn, "fn"): fn.original_fn = get_original_fn(fn.fn) return fn.original_fn return fn
[ "def", "get_original_fn", "(", "fn", ")", ":", "fn_type", "=", "type", "(", "fn", ")", "if", "fn_type", "is", "classmethod", "or", "fn_type", "is", "staticmethod", ":", "return", "get_original_fn", "(", "fn", ".", "__func__", ")", "if", "hasattr", "(", "fn", ",", "\"original_fn\"", ")", ":", "return", "fn", ".", "original_fn", "if", "hasattr", "(", "fn", ",", "\"fn\"", ")", ":", "fn", ".", "original_fn", "=", "get_original_fn", "(", "fn", ".", "fn", ")", "return", "fn", ".", "original_fn", "return", "fn" ]
Gets the very original function of a decorated one.
[ "Gets", "the", "very", "original", "function", "of", "a", "decorated", "one", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L27-L38
quora/qcore
qcore/inspection.py
get_full_name
def get_full_name(src): """Gets full class or function name.""" if hasattr(src, "_full_name_"): return src._full_name_ if hasattr(src, "is_decorator"): # Our own decorator or binder if hasattr(src, "decorator"): # Our own binder _full_name_ = str(src.decorator) # It's a short-living object, so we don't cache result else: # Our own decorator _full_name_ = str(src) try: src._full_name_ = _full_name_ except AttributeError: pass except TypeError: pass elif hasattr(src, "im_class"): # Bound method cls = src.im_class _full_name_ = get_full_name(cls) + "." + src.__name__ # It's a short-living object, so we don't cache result elif hasattr(src, "__module__") and hasattr(src, "__name__"): # Func or class _full_name_ = ( ("<unknown module>" if src.__module__ is None else src.__module__) + "." + src.__name__ ) try: src._full_name_ = _full_name_ except AttributeError: pass except TypeError: pass else: # Something else _full_name_ = str(get_original_fn(src)) return _full_name_
python
def get_full_name(src): """Gets full class or function name.""" if hasattr(src, "_full_name_"): return src._full_name_ if hasattr(src, "is_decorator"): # Our own decorator or binder if hasattr(src, "decorator"): # Our own binder _full_name_ = str(src.decorator) # It's a short-living object, so we don't cache result else: # Our own decorator _full_name_ = str(src) try: src._full_name_ = _full_name_ except AttributeError: pass except TypeError: pass elif hasattr(src, "im_class"): # Bound method cls = src.im_class _full_name_ = get_full_name(cls) + "." + src.__name__ # It's a short-living object, so we don't cache result elif hasattr(src, "__module__") and hasattr(src, "__name__"): # Func or class _full_name_ = ( ("<unknown module>" if src.__module__ is None else src.__module__) + "." + src.__name__ ) try: src._full_name_ = _full_name_ except AttributeError: pass except TypeError: pass else: # Something else _full_name_ = str(get_original_fn(src)) return _full_name_
[ "def", "get_full_name", "(", "src", ")", ":", "if", "hasattr", "(", "src", ",", "\"_full_name_\"", ")", ":", "return", "src", ".", "_full_name_", "if", "hasattr", "(", "src", ",", "\"is_decorator\"", ")", ":", "# Our own decorator or binder", "if", "hasattr", "(", "src", ",", "\"decorator\"", ")", ":", "# Our own binder", "_full_name_", "=", "str", "(", "src", ".", "decorator", ")", "# It's a short-living object, so we don't cache result", "else", ":", "# Our own decorator", "_full_name_", "=", "str", "(", "src", ")", "try", ":", "src", ".", "_full_name_", "=", "_full_name_", "except", "AttributeError", ":", "pass", "except", "TypeError", ":", "pass", "elif", "hasattr", "(", "src", ",", "\"im_class\"", ")", ":", "# Bound method", "cls", "=", "src", ".", "im_class", "_full_name_", "=", "get_full_name", "(", "cls", ")", "+", "\".\"", "+", "src", ".", "__name__", "# It's a short-living object, so we don't cache result", "elif", "hasattr", "(", "src", ",", "\"__module__\"", ")", "and", "hasattr", "(", "src", ",", "\"__name__\"", ")", ":", "# Func or class", "_full_name_", "=", "(", "(", "\"<unknown module>\"", "if", "src", ".", "__module__", "is", "None", "else", "src", ".", "__module__", ")", "+", "\".\"", "+", "src", ".", "__name__", ")", "try", ":", "src", ".", "_full_name_", "=", "_full_name_", "except", "AttributeError", ":", "pass", "except", "TypeError", ":", "pass", "else", ":", "# Something else", "_full_name_", "=", "str", "(", "get_original_fn", "(", "src", ")", ")", "return", "_full_name_" ]
Gets full class or function name.
[ "Gets", "full", "class", "or", "function", "name", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L41-L82
quora/qcore
qcore/inspection.py
get_function_call_str
def get_function_call_str(fn, args, kwargs): """Converts method call (function and its arguments) to a str(...)-like string.""" def str_converter(v): try: return str(v) except Exception: try: return repr(v) except Exception: return "<n/a str raised>" result = get_full_name(fn) + "(" first = True for v in args: if first: first = False else: result += "," result += str_converter(v) for k, v in kwargs.items(): if first: first = False else: result += "," result += str(k) + "=" + str_converter(v) result += ")" return result
python
def get_function_call_str(fn, args, kwargs): """Converts method call (function and its arguments) to a str(...)-like string.""" def str_converter(v): try: return str(v) except Exception: try: return repr(v) except Exception: return "<n/a str raised>" result = get_full_name(fn) + "(" first = True for v in args: if first: first = False else: result += "," result += str_converter(v) for k, v in kwargs.items(): if first: first = False else: result += "," result += str(k) + "=" + str_converter(v) result += ")" return result
[ "def", "get_function_call_str", "(", "fn", ",", "args", ",", "kwargs", ")", ":", "def", "str_converter", "(", "v", ")", ":", "try", ":", "return", "str", "(", "v", ")", "except", "Exception", ":", "try", ":", "return", "repr", "(", "v", ")", "except", "Exception", ":", "return", "\"<n/a str raised>\"", "result", "=", "get_full_name", "(", "fn", ")", "+", "\"(\"", "first", "=", "True", "for", "v", "in", "args", ":", "if", "first", ":", "first", "=", "False", "else", ":", "result", "+=", "\",\"", "result", "+=", "str_converter", "(", "v", ")", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ":", "if", "first", ":", "first", "=", "False", "else", ":", "result", "+=", "\",\"", "result", "+=", "str", "(", "k", ")", "+", "\"=\"", "+", "str_converter", "(", "v", ")", "result", "+=", "\")\"", "return", "result" ]
Converts method call (function and its arguments) to a str(...)-like string.
[ "Converts", "method", "call", "(", "function", "and", "its", "arguments", ")", "to", "a", "str", "(", "...", ")", "-", "like", "string", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L85-L112
quora/qcore
qcore/inspection.py
get_function_call_repr
def get_function_call_repr(fn, args, kwargs): """Converts method call (function and its arguments) to a repr(...)-like string.""" result = get_full_name(fn) + "(" first = True for v in args: if first: first = False else: result += "," result += repr(v) for k, v in kwargs.items(): if first: first = False else: result += "," result += str(k) + "=" + repr(v) result += ")" return result
python
def get_function_call_repr(fn, args, kwargs): """Converts method call (function and its arguments) to a repr(...)-like string.""" result = get_full_name(fn) + "(" first = True for v in args: if first: first = False else: result += "," result += repr(v) for k, v in kwargs.items(): if first: first = False else: result += "," result += str(k) + "=" + repr(v) result += ")" return result
[ "def", "get_function_call_repr", "(", "fn", ",", "args", ",", "kwargs", ")", ":", "result", "=", "get_full_name", "(", "fn", ")", "+", "\"(\"", "first", "=", "True", "for", "v", "in", "args", ":", "if", "first", ":", "first", "=", "False", "else", ":", "result", "+=", "\",\"", "result", "+=", "repr", "(", "v", ")", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ":", "if", "first", ":", "first", "=", "False", "else", ":", "result", "+=", "\",\"", "result", "+=", "str", "(", "k", ")", "+", "\"=\"", "+", "repr", "(", "v", ")", "result", "+=", "\")\"", "return", "result" ]
Converts method call (function and its arguments) to a repr(...)-like string.
[ "Converts", "method", "call", "(", "function", "and", "its", "arguments", ")", "to", "a", "repr", "(", "...", ")", "-", "like", "string", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L115-L133
quora/qcore
qcore/inspection.py
getargspec
def getargspec(func): """Variation of inspect.getargspec that works for more functions. This function works for Cythonized, non-cpdef functions, which expose argspec information but are not accepted by getargspec. It also works for Python 3 functions that use annotations, which are simply ignored. However, keyword-only arguments are not supported. """ if inspect.ismethod(func): func = func.__func__ # Cythonized functions have a .__code__, but don't pass inspect.isfunction() try: code = func.__code__ except AttributeError: raise TypeError("{!r} is not a Python function".format(func)) if hasattr(code, "co_kwonlyargcount") and code.co_kwonlyargcount > 0: raise ValueError("keyword-only arguments are not supported by getargspec()") args, varargs, varkw = inspect.getargs(code) return inspect.ArgSpec(args, varargs, varkw, func.__defaults__)
python
def getargspec(func): """Variation of inspect.getargspec that works for more functions. This function works for Cythonized, non-cpdef functions, which expose argspec information but are not accepted by getargspec. It also works for Python 3 functions that use annotations, which are simply ignored. However, keyword-only arguments are not supported. """ if inspect.ismethod(func): func = func.__func__ # Cythonized functions have a .__code__, but don't pass inspect.isfunction() try: code = func.__code__ except AttributeError: raise TypeError("{!r} is not a Python function".format(func)) if hasattr(code, "co_kwonlyargcount") and code.co_kwonlyargcount > 0: raise ValueError("keyword-only arguments are not supported by getargspec()") args, varargs, varkw = inspect.getargs(code) return inspect.ArgSpec(args, varargs, varkw, func.__defaults__)
[ "def", "getargspec", "(", "func", ")", ":", "if", "inspect", ".", "ismethod", "(", "func", ")", ":", "func", "=", "func", ".", "__func__", "# Cythonized functions have a .__code__, but don't pass inspect.isfunction()", "try", ":", "code", "=", "func", ".", "__code__", "except", "AttributeError", ":", "raise", "TypeError", "(", "\"{!r} is not a Python function\"", ".", "format", "(", "func", ")", ")", "if", "hasattr", "(", "code", ",", "\"co_kwonlyargcount\"", ")", "and", "code", ".", "co_kwonlyargcount", ">", "0", ":", "raise", "ValueError", "(", "\"keyword-only arguments are not supported by getargspec()\"", ")", "args", ",", "varargs", ",", "varkw", "=", "inspect", ".", "getargs", "(", "code", ")", "return", "inspect", ".", "ArgSpec", "(", "args", ",", "varargs", ",", "varkw", ",", "func", ".", "__defaults__", ")" ]
Variation of inspect.getargspec that works for more functions. This function works for Cythonized, non-cpdef functions, which expose argspec information but are not accepted by getargspec. It also works for Python 3 functions that use annotations, which are simply ignored. However, keyword-only arguments are not supported.
[ "Variation", "of", "inspect", ".", "getargspec", "that", "works", "for", "more", "functions", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L136-L154
quora/qcore
qcore/inspection.py
is_cython_or_generator
def is_cython_or_generator(fn): """Returns whether this function is either a generator function or a Cythonized function.""" if hasattr(fn, "__func__"): fn = fn.__func__ # Class method, static method if inspect.isgeneratorfunction(fn): return True name = type(fn).__name__ return ( name == "generator" or name == "method_descriptor" or name == "cython_function_or_method" or name == "builtin_function_or_method" )
python
def is_cython_or_generator(fn): """Returns whether this function is either a generator function or a Cythonized function.""" if hasattr(fn, "__func__"): fn = fn.__func__ # Class method, static method if inspect.isgeneratorfunction(fn): return True name = type(fn).__name__ return ( name == "generator" or name == "method_descriptor" or name == "cython_function_or_method" or name == "builtin_function_or_method" )
[ "def", "is_cython_or_generator", "(", "fn", ")", ":", "if", "hasattr", "(", "fn", ",", "\"__func__\"", ")", ":", "fn", "=", "fn", ".", "__func__", "# Class method, static method", "if", "inspect", ".", "isgeneratorfunction", "(", "fn", ")", ":", "return", "True", "name", "=", "type", "(", "fn", ")", ".", "__name__", "return", "(", "name", "==", "\"generator\"", "or", "name", "==", "\"method_descriptor\"", "or", "name", "==", "\"cython_function_or_method\"", "or", "name", "==", "\"builtin_function_or_method\"", ")" ]
Returns whether this function is either a generator function or a Cythonized function.
[ "Returns", "whether", "this", "function", "is", "either", "a", "generator", "function", "or", "a", "Cythonized", "function", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L157-L169
quora/qcore
qcore/inspection.py
is_cython_function
def is_cython_function(fn): """Checks if a function is compiled w/Cython.""" if hasattr(fn, "__func__"): fn = fn.__func__ # Class method, static method name = type(fn).__name__ return ( name == "method_descriptor" or name == "cython_function_or_method" or name == "builtin_function_or_method" )
python
def is_cython_function(fn): """Checks if a function is compiled w/Cython.""" if hasattr(fn, "__func__"): fn = fn.__func__ # Class method, static method name = type(fn).__name__ return ( name == "method_descriptor" or name == "cython_function_or_method" or name == "builtin_function_or_method" )
[ "def", "is_cython_function", "(", "fn", ")", ":", "if", "hasattr", "(", "fn", ",", "\"__func__\"", ")", ":", "fn", "=", "fn", ".", "__func__", "# Class method, static method", "name", "=", "type", "(", "fn", ")", ".", "__name__", "return", "(", "name", "==", "\"method_descriptor\"", "or", "name", "==", "\"cython_function_or_method\"", "or", "name", "==", "\"builtin_function_or_method\"", ")" ]
Checks if a function is compiled w/Cython.
[ "Checks", "if", "a", "function", "is", "compiled", "w", "/", "Cython", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L172-L181
quora/qcore
qcore/inspection.py
is_classmethod
def is_classmethod(fn): """Returns whether f is a classmethod.""" # This is True for bound methods if not inspect.ismethod(fn): return False if not hasattr(fn, "__self__"): return False im_self = fn.__self__ # This is None for instance methods on classes, but True # for instance methods on instances. if im_self is None: return False # This is True for class methods of new- and old-style classes, respectively return isinstance(im_self, six.class_types)
python
def is_classmethod(fn): """Returns whether f is a classmethod.""" # This is True for bound methods if not inspect.ismethod(fn): return False if not hasattr(fn, "__self__"): return False im_self = fn.__self__ # This is None for instance methods on classes, but True # for instance methods on instances. if im_self is None: return False # This is True for class methods of new- and old-style classes, respectively return isinstance(im_self, six.class_types)
[ "def", "is_classmethod", "(", "fn", ")", ":", "# This is True for bound methods", "if", "not", "inspect", ".", "ismethod", "(", "fn", ")", ":", "return", "False", "if", "not", "hasattr", "(", "fn", ",", "\"__self__\"", ")", ":", "return", "False", "im_self", "=", "fn", ".", "__self__", "# This is None for instance methods on classes, but True", "# for instance methods on instances.", "if", "im_self", "is", "None", ":", "return", "False", "# This is True for class methods of new- and old-style classes, respectively", "return", "isinstance", "(", "im_self", ",", "six", ".", "class_types", ")" ]
Returns whether f is a classmethod.
[ "Returns", "whether", "f", "is", "a", "classmethod", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L189-L202
quora/qcore
qcore/inspection.py
wraps
def wraps( wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES ): """Cython-compatible functools.wraps implementation.""" if not is_cython_function(wrapped): return functools.wraps(wrapped, assigned, updated) else: return lambda wrapper: wrapper
python
def wraps( wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES ): """Cython-compatible functools.wraps implementation.""" if not is_cython_function(wrapped): return functools.wraps(wrapped, assigned, updated) else: return lambda wrapper: wrapper
[ "def", "wraps", "(", "wrapped", ",", "assigned", "=", "functools", ".", "WRAPPER_ASSIGNMENTS", ",", "updated", "=", "functools", ".", "WRAPPER_UPDATES", ")", ":", "if", "not", "is_cython_function", "(", "wrapped", ")", ":", "return", "functools", ".", "wraps", "(", "wrapped", ",", "assigned", ",", "updated", ")", "else", ":", "return", "lambda", "wrapper", ":", "wrapper" ]
Cython-compatible functools.wraps implementation.
[ "Cython", "-", "compatible", "functools", ".", "wraps", "implementation", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L205-L212
quora/qcore
qcore/inspection.py
get_subclass_tree
def get_subclass_tree(cls, ensure_unique=True): """Returns all subclasses (direct and recursive) of cls.""" subclasses = [] # cls.__subclasses__() fails on classes inheriting from type for subcls in type.__subclasses__(cls): subclasses.append(subcls) subclasses.extend(get_subclass_tree(subcls, ensure_unique)) return list(set(subclasses)) if ensure_unique else subclasses
python
def get_subclass_tree(cls, ensure_unique=True): """Returns all subclasses (direct and recursive) of cls.""" subclasses = [] # cls.__subclasses__() fails on classes inheriting from type for subcls in type.__subclasses__(cls): subclasses.append(subcls) subclasses.extend(get_subclass_tree(subcls, ensure_unique)) return list(set(subclasses)) if ensure_unique else subclasses
[ "def", "get_subclass_tree", "(", "cls", ",", "ensure_unique", "=", "True", ")", ":", "subclasses", "=", "[", "]", "# cls.__subclasses__() fails on classes inheriting from type", "for", "subcls", "in", "type", ".", "__subclasses__", "(", "cls", ")", ":", "subclasses", ".", "append", "(", "subcls", ")", "subclasses", ".", "extend", "(", "get_subclass_tree", "(", "subcls", ",", "ensure_unique", ")", ")", "return", "list", "(", "set", "(", "subclasses", ")", ")", "if", "ensure_unique", "else", "subclasses" ]
Returns all subclasses (direct and recursive) of cls.
[ "Returns", "all", "subclasses", "(", "direct", "and", "recursive", ")", "of", "cls", "." ]
train
https://github.com/quora/qcore/blob/fa5cd438eea554db35fd29cbc8dfbde69f09961c/qcore/inspection.py#L215-L222
stepank/pyws
src/pyws/functions/args/types/complex.py
DictOf
def DictOf(name, *fields): """ This function creates a dict type with the specified name and fields. >>> from pyws.functions.args import DictOf, Field >>> dct = DictOf( ... 'HelloWorldDict', Field('hello', str), Field('hello', int)) >>> issubclass(dct, Dict) True >>> dct.__name__ 'HelloWorldDict' >>> len(dct.fields) 2 """ ret = type(name, (Dict,), {'fields': []}) #noinspection PyUnresolvedReferences ret.add_fields(*fields) return ret
python
def DictOf(name, *fields): """ This function creates a dict type with the specified name and fields. >>> from pyws.functions.args import DictOf, Field >>> dct = DictOf( ... 'HelloWorldDict', Field('hello', str), Field('hello', int)) >>> issubclass(dct, Dict) True >>> dct.__name__ 'HelloWorldDict' >>> len(dct.fields) 2 """ ret = type(name, (Dict,), {'fields': []}) #noinspection PyUnresolvedReferences ret.add_fields(*fields) return ret
[ "def", "DictOf", "(", "name", ",", "*", "fields", ")", ":", "ret", "=", "type", "(", "name", ",", "(", "Dict", ",", ")", ",", "{", "'fields'", ":", "[", "]", "}", ")", "#noinspection PyUnresolvedReferences", "ret", ".", "add_fields", "(", "*", "fields", ")", "return", "ret" ]
This function creates a dict type with the specified name and fields. >>> from pyws.functions.args import DictOf, Field >>> dct = DictOf( ... 'HelloWorldDict', Field('hello', str), Field('hello', int)) >>> issubclass(dct, Dict) True >>> dct.__name__ 'HelloWorldDict' >>> len(dct.fields) 2
[ "This", "function", "creates", "a", "dict", "type", "with", "the", "specified", "name", "and", "fields", "." ]
train
https://github.com/stepank/pyws/blob/ff39133aabeb56bbb08d66286ac0cc8731eda7dd/src/pyws/functions/args/types/complex.py#L88-L105
stepank/pyws
src/pyws/functions/args/types/complex.py
ListOf
def ListOf(element_type, element_none_value=None): """ This function creates a list type with element type ``element_type`` and an empty element value ``element_none_value``. >>> from pyws.functions.args import Integer, ListOf >>> lst = ListOf(int) >>> issubclass(lst, List) True >>> lst.__name__ 'IntegerList' >>> lst.element_type == Integer True """ from pyws.functions.args.types import TypeFactory element_type = TypeFactory(element_type) return type(element_type.__name__ + 'List', (List,), { 'element_type': element_type, 'element_none_value': element_none_value})
python
def ListOf(element_type, element_none_value=None): """ This function creates a list type with element type ``element_type`` and an empty element value ``element_none_value``. >>> from pyws.functions.args import Integer, ListOf >>> lst = ListOf(int) >>> issubclass(lst, List) True >>> lst.__name__ 'IntegerList' >>> lst.element_type == Integer True """ from pyws.functions.args.types import TypeFactory element_type = TypeFactory(element_type) return type(element_type.__name__ + 'List', (List,), { 'element_type': element_type, 'element_none_value': element_none_value})
[ "def", "ListOf", "(", "element_type", ",", "element_none_value", "=", "None", ")", ":", "from", "pyws", ".", "functions", ".", "args", ".", "types", "import", "TypeFactory", "element_type", "=", "TypeFactory", "(", "element_type", ")", "return", "type", "(", "element_type", ".", "__name__", "+", "'List'", ",", "(", "List", ",", ")", ",", "{", "'element_type'", ":", "element_type", ",", "'element_none_value'", ":", "element_none_value", "}", ")" ]
This function creates a list type with element type ``element_type`` and an empty element value ``element_none_value``. >>> from pyws.functions.args import Integer, ListOf >>> lst = ListOf(int) >>> issubclass(lst, List) True >>> lst.__name__ 'IntegerList' >>> lst.element_type == Integer True
[ "This", "function", "creates", "a", "list", "type", "with", "element", "type", "element_type", "and", "an", "empty", "element", "value", "element_none_value", "." ]
train
https://github.com/stepank/pyws/blob/ff39133aabeb56bbb08d66286ac0cc8731eda7dd/src/pyws/functions/args/types/complex.py#L108-L126
opennode/waldur-core
waldur_core/structure/metadata.py
ActionsMetadata.get_actions
def get_actions(self, request, view): """ Return metadata for resource-specific actions, such as start, stop, unlink """ metadata = OrderedDict() actions = self.get_resource_actions(view) resource = view.get_object() for action_name, action in actions.items(): if action_name == 'update': view.request = clone_request(request, 'PUT') else: view.action = action_name data = ActionSerializer(action, action_name, request, view, resource) metadata[action_name] = data.serialize() if not metadata[action_name]['enabled']: continue fields = self.get_action_fields(view, action_name, resource) if not fields: metadata[action_name]['type'] = 'button' else: metadata[action_name]['type'] = 'form' metadata[action_name]['fields'] = fields view.action = None view.request = request return metadata
python
def get_actions(self, request, view): """ Return metadata for resource-specific actions, such as start, stop, unlink """ metadata = OrderedDict() actions = self.get_resource_actions(view) resource = view.get_object() for action_name, action in actions.items(): if action_name == 'update': view.request = clone_request(request, 'PUT') else: view.action = action_name data = ActionSerializer(action, action_name, request, view, resource) metadata[action_name] = data.serialize() if not metadata[action_name]['enabled']: continue fields = self.get_action_fields(view, action_name, resource) if not fields: metadata[action_name]['type'] = 'button' else: metadata[action_name]['type'] = 'form' metadata[action_name]['fields'] = fields view.action = None view.request = request return metadata
[ "def", "get_actions", "(", "self", ",", "request", ",", "view", ")", ":", "metadata", "=", "OrderedDict", "(", ")", "actions", "=", "self", ".", "get_resource_actions", "(", "view", ")", "resource", "=", "view", ".", "get_object", "(", ")", "for", "action_name", ",", "action", "in", "actions", ".", "items", "(", ")", ":", "if", "action_name", "==", "'update'", ":", "view", ".", "request", "=", "clone_request", "(", "request", ",", "'PUT'", ")", "else", ":", "view", ".", "action", "=", "action_name", "data", "=", "ActionSerializer", "(", "action", ",", "action_name", ",", "request", ",", "view", ",", "resource", ")", "metadata", "[", "action_name", "]", "=", "data", ".", "serialize", "(", ")", "if", "not", "metadata", "[", "action_name", "]", "[", "'enabled'", "]", ":", "continue", "fields", "=", "self", ".", "get_action_fields", "(", "view", ",", "action_name", ",", "resource", ")", "if", "not", "fields", ":", "metadata", "[", "action_name", "]", "[", "'type'", "]", "=", "'button'", "else", ":", "metadata", "[", "action_name", "]", "[", "'type'", "]", "=", "'form'", "metadata", "[", "action_name", "]", "[", "'fields'", "]", "=", "fields", "view", ".", "action", "=", "None", "view", ".", "request", "=", "request", "return", "metadata" ]
Return metadata for resource-specific actions, such as start, stop, unlink
[ "Return", "metadata", "for", "resource", "-", "specific", "actions", "such", "as", "start", "stop", "unlink" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/metadata.py#L96-L125
opennode/waldur-core
waldur_core/structure/metadata.py
ActionsMetadata.get_action_fields
def get_action_fields(self, view, action_name, resource): """ Get fields exposed by action's serializer """ serializer = view.get_serializer(resource) fields = OrderedDict() if not isinstance(serializer, view.serializer_class) or action_name == 'update': fields = self.get_fields(serializer.fields) return fields
python
def get_action_fields(self, view, action_name, resource): """ Get fields exposed by action's serializer """ serializer = view.get_serializer(resource) fields = OrderedDict() if not isinstance(serializer, view.serializer_class) or action_name == 'update': fields = self.get_fields(serializer.fields) return fields
[ "def", "get_action_fields", "(", "self", ",", "view", ",", "action_name", ",", "resource", ")", ":", "serializer", "=", "view", ".", "get_serializer", "(", "resource", ")", "fields", "=", "OrderedDict", "(", ")", "if", "not", "isinstance", "(", "serializer", ",", "view", ".", "serializer_class", ")", "or", "action_name", "==", "'update'", ":", "fields", "=", "self", ".", "get_fields", "(", "serializer", ".", "fields", ")", "return", "fields" ]
Get fields exposed by action's serializer
[ "Get", "fields", "exposed", "by", "action", "s", "serializer" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/metadata.py#L150-L158
opennode/waldur-core
waldur_core/structure/metadata.py
ActionsMetadata.get_serializer_info
def get_serializer_info(self, serializer): """ Given an instance of a serializer, return a dictionary of metadata about its fields. """ if hasattr(serializer, 'child'): # If this is a `ListSerializer` then we want to examine the # underlying child serializer instance instead. serializer = serializer.child return self.get_fields(serializer.fields)
python
def get_serializer_info(self, serializer): """ Given an instance of a serializer, return a dictionary of metadata about its fields. """ if hasattr(serializer, 'child'): # If this is a `ListSerializer` then we want to examine the # underlying child serializer instance instead. serializer = serializer.child return self.get_fields(serializer.fields)
[ "def", "get_serializer_info", "(", "self", ",", "serializer", ")", ":", "if", "hasattr", "(", "serializer", ",", "'child'", ")", ":", "# If this is a `ListSerializer` then we want to examine the", "# underlying child serializer instance instead.", "serializer", "=", "serializer", ".", "child", "return", "self", ".", "get_fields", "(", "serializer", ".", "fields", ")" ]
Given an instance of a serializer, return a dictionary of metadata about its fields.
[ "Given", "an", "instance", "of", "a", "serializer", "return", "a", "dictionary", "of", "metadata", "about", "its", "fields", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/metadata.py#L160-L169
opennode/waldur-core
waldur_core/structure/metadata.py
ActionsMetadata.get_fields
def get_fields(self, serializer_fields): """ Get fields metadata skipping empty fields """ fields = OrderedDict() for field_name, field in serializer_fields.items(): # Skip tags field in action because it is needed only for resource creation # See also: WAL-1223 if field_name == 'tags': continue info = self.get_field_info(field, field_name) if info: fields[field_name] = info return fields
python
def get_fields(self, serializer_fields): """ Get fields metadata skipping empty fields """ fields = OrderedDict() for field_name, field in serializer_fields.items(): # Skip tags field in action because it is needed only for resource creation # See also: WAL-1223 if field_name == 'tags': continue info = self.get_field_info(field, field_name) if info: fields[field_name] = info return fields
[ "def", "get_fields", "(", "self", ",", "serializer_fields", ")", ":", "fields", "=", "OrderedDict", "(", ")", "for", "field_name", ",", "field", "in", "serializer_fields", ".", "items", "(", ")", ":", "# Skip tags field in action because it is needed only for resource creation", "# See also: WAL-1223", "if", "field_name", "==", "'tags'", ":", "continue", "info", "=", "self", ".", "get_field_info", "(", "field", ",", "field_name", ")", "if", "info", ":", "fields", "[", "field_name", "]", "=", "info", "return", "fields" ]
Get fields metadata skipping empty fields
[ "Get", "fields", "metadata", "skipping", "empty", "fields" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/metadata.py#L171-L184
opennode/waldur-core
waldur_core/structure/metadata.py
ActionsMetadata.get_field_info
def get_field_info(self, field, field_name): """ Given an instance of a serializer field, return a dictionary of metadata about it. """ field_info = OrderedDict() field_info['type'] = self.label_lookup[field] field_info['required'] = getattr(field, 'required', False) attrs = [ 'label', 'help_text', 'default_value', 'placeholder', 'required', 'min_length', 'max_length', 'min_value', 'max_value', 'many' ] if getattr(field, 'read_only', False): return None for attr in attrs: value = getattr(field, attr, None) if value is not None and value != '': field_info[attr] = force_text(value, strings_only=True) if 'label' not in field_info: field_info['label'] = field_name.replace('_', ' ').title() if hasattr(field, 'view_name'): list_view = field.view_name.replace('-detail', '-list') base_url = reverse(list_view, request=self.request) field_info['type'] = 'select' field_info['url'] = base_url if hasattr(field, 'query_params'): field_info['url'] += '?%s' % urlencode(field.query_params) field_info['value_field'] = getattr(field, 'value_field', 'url') field_info['display_name_field'] = getattr(field, 'display_name_field', 'display_name') if hasattr(field, 'choices') and not hasattr(field, 'queryset'): field_info['choices'] = [ { 'value': choice_value, 'display_name': force_text(choice_name, strings_only=True) } for choice_value, choice_name in field.choices.items() ] return field_info
python
def get_field_info(self, field, field_name): """ Given an instance of a serializer field, return a dictionary of metadata about it. """ field_info = OrderedDict() field_info['type'] = self.label_lookup[field] field_info['required'] = getattr(field, 'required', False) attrs = [ 'label', 'help_text', 'default_value', 'placeholder', 'required', 'min_length', 'max_length', 'min_value', 'max_value', 'many' ] if getattr(field, 'read_only', False): return None for attr in attrs: value = getattr(field, attr, None) if value is not None and value != '': field_info[attr] = force_text(value, strings_only=True) if 'label' not in field_info: field_info['label'] = field_name.replace('_', ' ').title() if hasattr(field, 'view_name'): list_view = field.view_name.replace('-detail', '-list') base_url = reverse(list_view, request=self.request) field_info['type'] = 'select' field_info['url'] = base_url if hasattr(field, 'query_params'): field_info['url'] += '?%s' % urlencode(field.query_params) field_info['value_field'] = getattr(field, 'value_field', 'url') field_info['display_name_field'] = getattr(field, 'display_name_field', 'display_name') if hasattr(field, 'choices') and not hasattr(field, 'queryset'): field_info['choices'] = [ { 'value': choice_value, 'display_name': force_text(choice_name, strings_only=True) } for choice_value, choice_name in field.choices.items() ] return field_info
[ "def", "get_field_info", "(", "self", ",", "field", ",", "field_name", ")", ":", "field_info", "=", "OrderedDict", "(", ")", "field_info", "[", "'type'", "]", "=", "self", ".", "label_lookup", "[", "field", "]", "field_info", "[", "'required'", "]", "=", "getattr", "(", "field", ",", "'required'", ",", "False", ")", "attrs", "=", "[", "'label'", ",", "'help_text'", ",", "'default_value'", ",", "'placeholder'", ",", "'required'", ",", "'min_length'", ",", "'max_length'", ",", "'min_value'", ",", "'max_value'", ",", "'many'", "]", "if", "getattr", "(", "field", ",", "'read_only'", ",", "False", ")", ":", "return", "None", "for", "attr", "in", "attrs", ":", "value", "=", "getattr", "(", "field", ",", "attr", ",", "None", ")", "if", "value", "is", "not", "None", "and", "value", "!=", "''", ":", "field_info", "[", "attr", "]", "=", "force_text", "(", "value", ",", "strings_only", "=", "True", ")", "if", "'label'", "not", "in", "field_info", ":", "field_info", "[", "'label'", "]", "=", "field_name", ".", "replace", "(", "'_'", ",", "' '", ")", ".", "title", "(", ")", "if", "hasattr", "(", "field", ",", "'view_name'", ")", ":", "list_view", "=", "field", ".", "view_name", ".", "replace", "(", "'-detail'", ",", "'-list'", ")", "base_url", "=", "reverse", "(", "list_view", ",", "request", "=", "self", ".", "request", ")", "field_info", "[", "'type'", "]", "=", "'select'", "field_info", "[", "'url'", "]", "=", "base_url", "if", "hasattr", "(", "field", ",", "'query_params'", ")", ":", "field_info", "[", "'url'", "]", "+=", "'?%s'", "%", "urlencode", "(", "field", ".", "query_params", ")", "field_info", "[", "'value_field'", "]", "=", "getattr", "(", "field", ",", "'value_field'", ",", "'url'", ")", "field_info", "[", "'display_name_field'", "]", "=", "getattr", "(", "field", ",", "'display_name_field'", ",", "'display_name'", ")", "if", "hasattr", "(", "field", ",", "'choices'", ")", "and", "not", "hasattr", "(", "field", ",", "'queryset'", ")", ":", "field_info", "[", "'choices'", "]", "=", "[", "{", "'value'", ":", "choice_value", ",", "'display_name'", ":", "force_text", "(", "choice_name", ",", "strings_only", "=", "True", ")", "}", "for", "choice_value", ",", "choice_name", "in", "field", ".", "choices", ".", "items", "(", ")", "]", "return", "field_info" ]
Given an instance of a serializer field, return a dictionary of metadata about it.
[ "Given", "an", "instance", "of", "a", "serializer", "field", "return", "a", "dictionary", "of", "metadata", "about", "it", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/metadata.py#L186-L230
zeromake/aiosqlite3
aiosqlite3/connection.py
connect
def connect( database: str, loop: asyncio.BaseEventLoop = None, executor: concurrent.futures.Executor = None, timeout: int = 5, echo: bool = False, isolation_level: str = '', check_same_thread: bool = False, **kwargs: dict ): """ 把async方法执行后的对象创建为async上下文模式 """ coro = _connect( database, loop=loop, executor=executor, timeout=timeout, echo=echo, isolation_level=isolation_level, check_same_thread=check_same_thread, **kwargs ) return _ContextManager(coro)
python
def connect( database: str, loop: asyncio.BaseEventLoop = None, executor: concurrent.futures.Executor = None, timeout: int = 5, echo: bool = False, isolation_level: str = '', check_same_thread: bool = False, **kwargs: dict ): """ 把async方法执行后的对象创建为async上下文模式 """ coro = _connect( database, loop=loop, executor=executor, timeout=timeout, echo=echo, isolation_level=isolation_level, check_same_thread=check_same_thread, **kwargs ) return _ContextManager(coro)
[ "def", "connect", "(", "database", ":", "str", ",", "loop", ":", "asyncio", ".", "BaseEventLoop", "=", "None", ",", "executor", ":", "concurrent", ".", "futures", ".", "Executor", "=", "None", ",", "timeout", ":", "int", "=", "5", ",", "echo", ":", "bool", "=", "False", ",", "isolation_level", ":", "str", "=", "''", ",", "check_same_thread", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ":", "dict", ")", ":", "coro", "=", "_connect", "(", "database", ",", "loop", "=", "loop", ",", "executor", "=", "executor", ",", "timeout", "=", "timeout", ",", "echo", "=", "echo", ",", "isolation_level", "=", "isolation_level", ",", "check_same_thread", "=", "check_same_thread", ",", "*", "*", "kwargs", ")", "return", "_ContextManager", "(", "coro", ")" ]
把async方法执行后的对象创建为async上下文模式
[ "把async方法执行后的对象创建为async上下文模式" ]
train
https://github.com/zeromake/aiosqlite3/blob/1a74a062507e2df8f833a70885e69dca0ab3e7e7/aiosqlite3/connection.py#L443-L466
opennode/waldur-core
waldur_core/cost_tracking/tasks.py
recalculate_estimate
def recalculate_estimate(recalculate_total=False): """ Recalculate price of consumables that were used by resource until now. Regular task. It is too expensive to calculate consumed price on each request, so we store cached price each hour. If recalculate_total is True - task also recalculates total estimate for current month. """ # Celery does not import server.urls and does not discover cost tracking modules. # So they should be discovered implicitly. CostTrackingRegister.autodiscover() # Step 1. Recalculate resources estimates. for resource_model in CostTrackingRegister.registered_resources: for resource in resource_model.objects.all(): _update_resource_consumed(resource, recalculate_total=recalculate_total) # Step 2. Move from down to top and recalculate consumed estimate for each # object based on its children. ancestors_models = [m for m in models.PriceEstimate.get_estimated_models() if not issubclass(m, structure_models.ResourceMixin)] for model in ancestors_models: for ancestor in model.objects.all(): _update_ancestor_consumed(ancestor)
python
def recalculate_estimate(recalculate_total=False): """ Recalculate price of consumables that were used by resource until now. Regular task. It is too expensive to calculate consumed price on each request, so we store cached price each hour. If recalculate_total is True - task also recalculates total estimate for current month. """ # Celery does not import server.urls and does not discover cost tracking modules. # So they should be discovered implicitly. CostTrackingRegister.autodiscover() # Step 1. Recalculate resources estimates. for resource_model in CostTrackingRegister.registered_resources: for resource in resource_model.objects.all(): _update_resource_consumed(resource, recalculate_total=recalculate_total) # Step 2. Move from down to top and recalculate consumed estimate for each # object based on its children. ancestors_models = [m for m in models.PriceEstimate.get_estimated_models() if not issubclass(m, structure_models.ResourceMixin)] for model in ancestors_models: for ancestor in model.objects.all(): _update_ancestor_consumed(ancestor)
[ "def", "recalculate_estimate", "(", "recalculate_total", "=", "False", ")", ":", "# Celery does not import server.urls and does not discover cost tracking modules.", "# So they should be discovered implicitly.", "CostTrackingRegister", ".", "autodiscover", "(", ")", "# Step 1. Recalculate resources estimates.", "for", "resource_model", "in", "CostTrackingRegister", ".", "registered_resources", ":", "for", "resource", "in", "resource_model", ".", "objects", ".", "all", "(", ")", ":", "_update_resource_consumed", "(", "resource", ",", "recalculate_total", "=", "recalculate_total", ")", "# Step 2. Move from down to top and recalculate consumed estimate for each", "# object based on its children.", "ancestors_models", "=", "[", "m", "for", "m", "in", "models", ".", "PriceEstimate", ".", "get_estimated_models", "(", ")", "if", "not", "issubclass", "(", "m", ",", "structure_models", ".", "ResourceMixin", ")", "]", "for", "model", "in", "ancestors_models", ":", "for", "ancestor", "in", "model", ".", "objects", ".", "all", "(", ")", ":", "_update_ancestor_consumed", "(", "ancestor", ")" ]
Recalculate price of consumables that were used by resource until now. Regular task. It is too expensive to calculate consumed price on each request, so we store cached price each hour. If recalculate_total is True - task also recalculates total estimate for current month.
[ "Recalculate", "price", "of", "consumables", "that", "were", "used", "by", "resource", "until", "now", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/cost_tracking/tasks.py#L8-L29
deathbeds/importnb
src/importnb/loader.py
ImportLibMixin.get_data
def get_data(self, path): """Needs to return the string source for the module.""" return LineCacheNotebookDecoder( code=self.code, raw=self.raw, markdown=self.markdown ).decode(self.decode(), self.path)
python
def get_data(self, path): """Needs to return the string source for the module.""" return LineCacheNotebookDecoder( code=self.code, raw=self.raw, markdown=self.markdown ).decode(self.decode(), self.path)
[ "def", "get_data", "(", "self", ",", "path", ")", ":", "return", "LineCacheNotebookDecoder", "(", "code", "=", "self", ".", "code", ",", "raw", "=", "self", ".", "raw", ",", "markdown", "=", "self", ".", "markdown", ")", ".", "decode", "(", "self", ".", "decode", "(", ")", ",", "self", ".", "path", ")" ]
Needs to return the string source for the module.
[ "Needs", "to", "return", "the", "string", "source", "for", "the", "module", "." ]
train
https://github.com/deathbeds/importnb/blob/ec870d1f8ab99fd5b363267f89787a3e442a779f/src/importnb/loader.py#L138-L142
deathbeds/importnb
src/importnb/loader.py
NotebookBaseLoader.loader
def loader(self): """Create a lazy loader source file loader.""" loader = super().loader if self._lazy and (sys.version_info.major, sys.version_info.minor) != (3, 4): loader = LazyLoader.factory(loader) # Strip the leading underscore from slots return partial( loader, **{object.lstrip("_"): getattr(self, object) for object in self.__slots__} )
python
def loader(self): """Create a lazy loader source file loader.""" loader = super().loader if self._lazy and (sys.version_info.major, sys.version_info.minor) != (3, 4): loader = LazyLoader.factory(loader) # Strip the leading underscore from slots return partial( loader, **{object.lstrip("_"): getattr(self, object) for object in self.__slots__} )
[ "def", "loader", "(", "self", ")", ":", "loader", "=", "super", "(", ")", ".", "loader", "if", "self", ".", "_lazy", "and", "(", "sys", ".", "version_info", ".", "major", ",", "sys", ".", "version_info", ".", "minor", ")", "!=", "(", "3", ",", "4", ")", ":", "loader", "=", "LazyLoader", ".", "factory", "(", "loader", ")", "# Strip the leading underscore from slots", "return", "partial", "(", "loader", ",", "*", "*", "{", "object", ".", "lstrip", "(", "\"_\"", ")", ":", "getattr", "(", "self", ",", "object", ")", "for", "object", "in", "self", ".", "__slots__", "}", ")" ]
Create a lazy loader source file loader.
[ "Create", "a", "lazy", "loader", "source", "file", "loader", "." ]
train
https://github.com/deathbeds/importnb/blob/ec870d1f8ab99fd5b363267f89787a3e442a779f/src/importnb/loader.py#L182-L190
deathbeds/importnb
src/importnb/loader.py
FromFileMixin.load
def load(cls, filename, dir=None, main=False, **kwargs): """Import a notebook as a module from a filename. dir: The directory to load the file from. main: Load the module in the __main__ context. > assert Notebook.load('loader.ipynb') """ name = main and "__main__" or Path(filename).stem loader = cls(name, str(filename), **kwargs) module = module_from_spec(FileModuleSpec(name, loader, origin=loader.path)) cwd = str(Path(loader.path).parent) try: with ExitStack() as stack: sys.path.append(cwd) loader.name != "__main__" and stack.enter_context(_installed_safely(module)) loader.exec_module(module) finally: sys.path.pop() return module
python
def load(cls, filename, dir=None, main=False, **kwargs): """Import a notebook as a module from a filename. dir: The directory to load the file from. main: Load the module in the __main__ context. > assert Notebook.load('loader.ipynb') """ name = main and "__main__" or Path(filename).stem loader = cls(name, str(filename), **kwargs) module = module_from_spec(FileModuleSpec(name, loader, origin=loader.path)) cwd = str(Path(loader.path).parent) try: with ExitStack() as stack: sys.path.append(cwd) loader.name != "__main__" and stack.enter_context(_installed_safely(module)) loader.exec_module(module) finally: sys.path.pop() return module
[ "def", "load", "(", "cls", ",", "filename", ",", "dir", "=", "None", ",", "main", "=", "False", ",", "*", "*", "kwargs", ")", ":", "name", "=", "main", "and", "\"__main__\"", "or", "Path", "(", "filename", ")", ".", "stem", "loader", "=", "cls", "(", "name", ",", "str", "(", "filename", ")", ",", "*", "*", "kwargs", ")", "module", "=", "module_from_spec", "(", "FileModuleSpec", "(", "name", ",", "loader", ",", "origin", "=", "loader", ".", "path", ")", ")", "cwd", "=", "str", "(", "Path", "(", "loader", ".", "path", ")", ".", "parent", ")", "try", ":", "with", "ExitStack", "(", ")", "as", "stack", ":", "sys", ".", "path", ".", "append", "(", "cwd", ")", "loader", ".", "name", "!=", "\"__main__\"", "and", "stack", ".", "enter_context", "(", "_installed_safely", "(", "module", ")", ")", "loader", ".", "exec_module", "(", "module", ")", "finally", ":", "sys", ".", "path", ".", "pop", "(", ")", "return", "module" ]
Import a notebook as a module from a filename. dir: The directory to load the file from. main: Load the module in the __main__ context. > assert Notebook.load('loader.ipynb')
[ "Import", "a", "notebook", "as", "a", "module", "from", "a", "filename", ".", "dir", ":", "The", "directory", "to", "load", "the", "file", "from", ".", "main", ":", "Load", "the", "module", "in", "the", "__main__", "context", ".", ">", "assert", "Notebook", ".", "load", "(", "loader", ".", "ipynb", ")" ]
train
https://github.com/deathbeds/importnb/blob/ec870d1f8ab99fd5b363267f89787a3e442a779f/src/importnb/loader.py#L213-L233
deathbeds/importnb
src/importnb/loader.py
Notebook.source_to_code
def source_to_code(self, nodes, path, *, _optimize=-1): """* Convert the current source to ast * Apply ast transformers. * Compile the code.""" if not isinstance(nodes, ast.Module): nodes = ast.parse(nodes, self.path) if self._markdown_docstring: nodes = update_docstring(nodes) return super().source_to_code( ast.fix_missing_locations(self.visit(nodes)), path, _optimize=_optimize )
python
def source_to_code(self, nodes, path, *, _optimize=-1): """* Convert the current source to ast * Apply ast transformers. * Compile the code.""" if not isinstance(nodes, ast.Module): nodes = ast.parse(nodes, self.path) if self._markdown_docstring: nodes = update_docstring(nodes) return super().source_to_code( ast.fix_missing_locations(self.visit(nodes)), path, _optimize=_optimize )
[ "def", "source_to_code", "(", "self", ",", "nodes", ",", "path", ",", "*", ",", "_optimize", "=", "-", "1", ")", ":", "if", "not", "isinstance", "(", "nodes", ",", "ast", ".", "Module", ")", ":", "nodes", "=", "ast", ".", "parse", "(", "nodes", ",", "self", ".", "path", ")", "if", "self", ".", "_markdown_docstring", ":", "nodes", "=", "update_docstring", "(", "nodes", ")", "return", "super", "(", ")", ".", "source_to_code", "(", "ast", ".", "fix_missing_locations", "(", "self", ".", "visit", "(", "nodes", ")", ")", ",", "path", ",", "_optimize", "=", "_optimize", ")" ]
* Convert the current source to ast * Apply ast transformers. * Compile the code.
[ "*", "Convert", "the", "current", "source", "to", "ast", "*", "Apply", "ast", "transformers", ".", "*", "Compile", "the", "code", "." ]
train
https://github.com/deathbeds/importnb/blob/ec870d1f8ab99fd5b363267f89787a3e442a779f/src/importnb/loader.py#L293-L303
opennode/waldur-core
waldur_core/core/admin.py
ExtraActionsMixin.get_urls
def get_urls(self): """ Inject extra action URLs. """ urls = [] for action in self.get_extra_actions(): regex = r'^{}/$'.format(self._get_action_href(action)) view = self.admin_site.admin_view(action) urls.append(url(regex, view)) return urls + super(ExtraActionsMixin, self).get_urls()
python
def get_urls(self): """ Inject extra action URLs. """ urls = [] for action in self.get_extra_actions(): regex = r'^{}/$'.format(self._get_action_href(action)) view = self.admin_site.admin_view(action) urls.append(url(regex, view)) return urls + super(ExtraActionsMixin, self).get_urls()
[ "def", "get_urls", "(", "self", ")", ":", "urls", "=", "[", "]", "for", "action", "in", "self", ".", "get_extra_actions", "(", ")", ":", "regex", "=", "r'^{}/$'", ".", "format", "(", "self", ".", "_get_action_href", "(", "action", ")", ")", "view", "=", "self", ".", "admin_site", ".", "admin_view", "(", "action", ")", "urls", ".", "append", "(", "url", "(", "regex", ",", "view", ")", ")", "return", "urls", "+", "super", "(", "ExtraActionsMixin", ",", "self", ")", ".", "get_urls", "(", ")" ]
Inject extra action URLs.
[ "Inject", "extra", "action", "URLs", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/admin.py#L312-L323
opennode/waldur-core
waldur_core/core/admin.py
ExtraActionsMixin.changelist_view
def changelist_view(self, request, extra_context=None): """ Inject extra links into template context. """ links = [] for action in self.get_extra_actions(): links.append({ 'label': self._get_action_label(action), 'href': self._get_action_href(action) }) extra_context = extra_context or {} extra_context['extra_links'] = links return super(ExtraActionsMixin, self).changelist_view( request, extra_context=extra_context, )
python
def changelist_view(self, request, extra_context=None): """ Inject extra links into template context. """ links = [] for action in self.get_extra_actions(): links.append({ 'label': self._get_action_label(action), 'href': self._get_action_href(action) }) extra_context = extra_context or {} extra_context['extra_links'] = links return super(ExtraActionsMixin, self).changelist_view( request, extra_context=extra_context, )
[ "def", "changelist_view", "(", "self", ",", "request", ",", "extra_context", "=", "None", ")", ":", "links", "=", "[", "]", "for", "action", "in", "self", ".", "get_extra_actions", "(", ")", ":", "links", ".", "append", "(", "{", "'label'", ":", "self", ".", "_get_action_label", "(", "action", ")", ",", "'href'", ":", "self", ".", "_get_action_href", "(", "action", ")", "}", ")", "extra_context", "=", "extra_context", "or", "{", "}", "extra_context", "[", "'extra_links'", "]", "=", "links", "return", "super", "(", "ExtraActionsMixin", ",", "self", ")", ".", "changelist_view", "(", "request", ",", "extra_context", "=", "extra_context", ",", ")" ]
Inject extra links into template context.
[ "Inject", "extra", "links", "into", "template", "context", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/admin.py#L325-L342
nuagenetworks/bambou
bambou/nurest_session.py
NURESTSession.start
def start(self): """ Starts the session. Starting the session will actually get the API key of the current user """ if NURESTSession.session_stack: bambou_logger.critical("Starting a session inside a with statement is not supported.") raise Exception("Starting a session inside a with statement is not supported.") NURESTSession.current_session = self self._authenticate() return self
python
def start(self): """ Starts the session. Starting the session will actually get the API key of the current user """ if NURESTSession.session_stack: bambou_logger.critical("Starting a session inside a with statement is not supported.") raise Exception("Starting a session inside a with statement is not supported.") NURESTSession.current_session = self self._authenticate() return self
[ "def", "start", "(", "self", ")", ":", "if", "NURESTSession", ".", "session_stack", ":", "bambou_logger", ".", "critical", "(", "\"Starting a session inside a with statement is not supported.\"", ")", "raise", "Exception", "(", "\"Starting a session inside a with statement is not supported.\"", ")", "NURESTSession", ".", "current_session", "=", "self", "self", ".", "_authenticate", "(", ")", "return", "self" ]
Starts the session. Starting the session will actually get the API key of the current user
[ "Starts", "the", "session", "." ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_session.py#L155-L169
opennode/waldur-core
waldur_core/quotas/handlers.py
init_quotas
def init_quotas(sender, instance, created=False, **kwargs): """ Initialize new instances quotas """ if not created: return for field in sender.get_quotas_fields(): try: field.get_or_create_quota(scope=instance) except CreationConditionFailedQuotaError: pass
python
def init_quotas(sender, instance, created=False, **kwargs): """ Initialize new instances quotas """ if not created: return for field in sender.get_quotas_fields(): try: field.get_or_create_quota(scope=instance) except CreationConditionFailedQuotaError: pass
[ "def", "init_quotas", "(", "sender", ",", "instance", ",", "created", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "not", "created", ":", "return", "for", "field", "in", "sender", ".", "get_quotas_fields", "(", ")", ":", "try", ":", "field", ".", "get_or_create_quota", "(", "scope", "=", "instance", ")", "except", "CreationConditionFailedQuotaError", ":", "pass" ]
Initialize new instances quotas
[ "Initialize", "new", "instances", "quotas" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/quotas/handlers.py#L35-L43
opennode/waldur-core
waldur_core/quotas/handlers.py
count_quota_handler_factory
def count_quota_handler_factory(count_quota_field): """ Creates handler that will recalculate count_quota on creation/deletion """ def recalculate_count_quota(sender, instance, **kwargs): signal = kwargs['signal'] if signal == signals.post_save and kwargs.get('created'): count_quota_field.add_usage(instance, delta=1) elif signal == signals.post_delete: count_quota_field.add_usage(instance, delta=-1, fail_silently=True) return recalculate_count_quota
python
def count_quota_handler_factory(count_quota_field): """ Creates handler that will recalculate count_quota on creation/deletion """ def recalculate_count_quota(sender, instance, **kwargs): signal = kwargs['signal'] if signal == signals.post_save and kwargs.get('created'): count_quota_field.add_usage(instance, delta=1) elif signal == signals.post_delete: count_quota_field.add_usage(instance, delta=-1, fail_silently=True) return recalculate_count_quota
[ "def", "count_quota_handler_factory", "(", "count_quota_field", ")", ":", "def", "recalculate_count_quota", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "signal", "=", "kwargs", "[", "'signal'", "]", "if", "signal", "==", "signals", ".", "post_save", "and", "kwargs", ".", "get", "(", "'created'", ")", ":", "count_quota_field", ".", "add_usage", "(", "instance", ",", "delta", "=", "1", ")", "elif", "signal", "==", "signals", ".", "post_delete", ":", "count_quota_field", ".", "add_usage", "(", "instance", ",", "delta", "=", "-", "1", ",", "fail_silently", "=", "True", ")", "return", "recalculate_count_quota" ]
Creates handler that will recalculate count_quota on creation/deletion
[ "Creates", "handler", "that", "will", "recalculate", "count_quota", "on", "creation", "/", "deletion" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/quotas/handlers.py#L46-L56
opennode/waldur-core
waldur_core/quotas/handlers.py
handle_aggregated_quotas
def handle_aggregated_quotas(sender, instance, **kwargs): """ Call aggregated quotas fields update methods """ quota = instance # aggregation is not supported for global quotas. if quota.scope is None: return quota_field = quota.get_field() # usage aggregation should not count another usage aggregator field to avoid calls duplication. if isinstance(quota_field, fields.UsageAggregatorQuotaField) or quota_field is None: return signal = kwargs['signal'] for aggregator_quota in quota_field.get_aggregator_quotas(quota): field = aggregator_quota.get_field() if signal == signals.post_save: field.post_child_quota_save(aggregator_quota.scope, child_quota=quota, created=kwargs.get('created')) elif signal == signals.pre_delete: field.pre_child_quota_delete(aggregator_quota.scope, child_quota=quota)
python
def handle_aggregated_quotas(sender, instance, **kwargs): """ Call aggregated quotas fields update methods """ quota = instance # aggregation is not supported for global quotas. if quota.scope is None: return quota_field = quota.get_field() # usage aggregation should not count another usage aggregator field to avoid calls duplication. if isinstance(quota_field, fields.UsageAggregatorQuotaField) or quota_field is None: return signal = kwargs['signal'] for aggregator_quota in quota_field.get_aggregator_quotas(quota): field = aggregator_quota.get_field() if signal == signals.post_save: field.post_child_quota_save(aggregator_quota.scope, child_quota=quota, created=kwargs.get('created')) elif signal == signals.pre_delete: field.pre_child_quota_delete(aggregator_quota.scope, child_quota=quota)
[ "def", "handle_aggregated_quotas", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "quota", "=", "instance", "# aggregation is not supported for global quotas.", "if", "quota", ".", "scope", "is", "None", ":", "return", "quota_field", "=", "quota", ".", "get_field", "(", ")", "# usage aggregation should not count another usage aggregator field to avoid calls duplication.", "if", "isinstance", "(", "quota_field", ",", "fields", ".", "UsageAggregatorQuotaField", ")", "or", "quota_field", "is", "None", ":", "return", "signal", "=", "kwargs", "[", "'signal'", "]", "for", "aggregator_quota", "in", "quota_field", ".", "get_aggregator_quotas", "(", "quota", ")", ":", "field", "=", "aggregator_quota", ".", "get_field", "(", ")", "if", "signal", "==", "signals", ".", "post_save", ":", "field", ".", "post_child_quota_save", "(", "aggregator_quota", ".", "scope", ",", "child_quota", "=", "quota", ",", "created", "=", "kwargs", ".", "get", "(", "'created'", ")", ")", "elif", "signal", "==", "signals", ".", "pre_delete", ":", "field", ".", "pre_child_quota_delete", "(", "aggregator_quota", ".", "scope", ",", "child_quota", "=", "quota", ")" ]
Call aggregated quotas fields update methods
[ "Call", "aggregated", "quotas", "fields", "update", "methods" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/quotas/handlers.py#L59-L75
opennode/waldur-core
waldur_core/structure/serializers.py
NestedServiceProjectLinkSerializer.get_settings
def get_settings(self, link): """ URL of service settings """ return reverse( 'servicesettings-detail', kwargs={'uuid': link.service.settings.uuid}, request=self.context['request'])
python
def get_settings(self, link): """ URL of service settings """ return reverse( 'servicesettings-detail', kwargs={'uuid': link.service.settings.uuid}, request=self.context['request'])
[ "def", "get_settings", "(", "self", ",", "link", ")", ":", "return", "reverse", "(", "'servicesettings-detail'", ",", "kwargs", "=", "{", "'uuid'", ":", "link", ".", "service", ".", "settings", ".", "uuid", "}", ",", "request", "=", "self", ".", "context", "[", "'request'", "]", ")" ]
URL of service settings
[ "URL", "of", "service", "settings" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/serializers.py#L140-L145
opennode/waldur-core
waldur_core/structure/serializers.py
NestedServiceProjectLinkSerializer.get_url
def get_url(self, link): """ URL of service """ view_name = SupportedServices.get_detail_view_for_model(link.service) return reverse(view_name, kwargs={'uuid': link.service.uuid.hex}, request=self.context['request'])
python
def get_url(self, link): """ URL of service """ view_name = SupportedServices.get_detail_view_for_model(link.service) return reverse(view_name, kwargs={'uuid': link.service.uuid.hex}, request=self.context['request'])
[ "def", "get_url", "(", "self", ",", "link", ")", ":", "view_name", "=", "SupportedServices", ".", "get_detail_view_for_model", "(", "link", ".", "service", ")", "return", "reverse", "(", "view_name", ",", "kwargs", "=", "{", "'uuid'", ":", "link", ".", "service", ".", "uuid", ".", "hex", "}", ",", "request", "=", "self", ".", "context", "[", "'request'", "]", ")" ]
URL of service
[ "URL", "of", "service" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/serializers.py#L147-L152
opennode/waldur-core
waldur_core/structure/serializers.py
NestedServiceProjectLinkSerializer.get_resources_count
def get_resources_count(self, link): """ Count total number of all resources connected to link """ total = 0 for model in SupportedServices.get_service_resources(link.service): # Format query path from resource to service project link query = {model.Permissions.project_path.split('__')[0]: link} total += model.objects.filter(**query).count() return total
python
def get_resources_count(self, link): """ Count total number of all resources connected to link """ total = 0 for model in SupportedServices.get_service_resources(link.service): # Format query path from resource to service project link query = {model.Permissions.project_path.split('__')[0]: link} total += model.objects.filter(**query).count() return total
[ "def", "get_resources_count", "(", "self", ",", "link", ")", ":", "total", "=", "0", "for", "model", "in", "SupportedServices", ".", "get_service_resources", "(", "link", ".", "service", ")", ":", "# Format query path from resource to service project link", "query", "=", "{", "model", ".", "Permissions", ".", "project_path", ".", "split", "(", "'__'", ")", "[", "0", "]", ":", "link", "}", "total", "+=", "model", ".", "objects", ".", "filter", "(", "*", "*", "query", ")", ".", "count", "(", ")", "return", "total" ]
Count total number of all resources connected to link
[ "Count", "total", "number", "of", "all", "resources", "connected", "to", "link" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/serializers.py#L166-L175
OpenDataScienceLab/skdata
skdata/data.py
SkDataSet.drop_columns
def drop_columns( self, max_na_values: int = None, max_unique_values: int = None ): """ When max_na_values was informed, remove columns when the proportion of total NA values more than max_na_values threshold. When max_unique_values was informed, remove columns when the proportion of the total of unique values is more than the max_unique_values threshold, just for columns with type as object or category. :param max_na_values: proportion threshold of max na values :param max_unique_values: :return: """ step = {} if max_na_values is not None: step = { 'data-set': self.iid, 'operation': 'drop-na', 'expression': '{"max_na_values":%s, "axis": 1}' % max_na_values } if max_unique_values is not None: step = { 'data-set': self.iid, 'operation': 'drop-unique', 'expression': '{"max_unique_values":%s}' % max_unique_values } self.attr_update(attr='steps', value=[step])
python
def drop_columns( self, max_na_values: int = None, max_unique_values: int = None ): """ When max_na_values was informed, remove columns when the proportion of total NA values more than max_na_values threshold. When max_unique_values was informed, remove columns when the proportion of the total of unique values is more than the max_unique_values threshold, just for columns with type as object or category. :param max_na_values: proportion threshold of max na values :param max_unique_values: :return: """ step = {} if max_na_values is not None: step = { 'data-set': self.iid, 'operation': 'drop-na', 'expression': '{"max_na_values":%s, "axis": 1}' % max_na_values } if max_unique_values is not None: step = { 'data-set': self.iid, 'operation': 'drop-unique', 'expression': '{"max_unique_values":%s}' % max_unique_values } self.attr_update(attr='steps', value=[step])
[ "def", "drop_columns", "(", "self", ",", "max_na_values", ":", "int", "=", "None", ",", "max_unique_values", ":", "int", "=", "None", ")", ":", "step", "=", "{", "}", "if", "max_na_values", "is", "not", "None", ":", "step", "=", "{", "'data-set'", ":", "self", ".", "iid", ",", "'operation'", ":", "'drop-na'", ",", "'expression'", ":", "'{\"max_na_values\":%s, \"axis\": 1}'", "%", "max_na_values", "}", "if", "max_unique_values", "is", "not", "None", ":", "step", "=", "{", "'data-set'", ":", "self", ".", "iid", ",", "'operation'", ":", "'drop-unique'", ",", "'expression'", ":", "'{\"max_unique_values\":%s}'", "%", "max_unique_values", "}", "self", ".", "attr_update", "(", "attr", "=", "'steps'", ",", "value", "=", "[", "step", "]", ")" ]
When max_na_values was informed, remove columns when the proportion of total NA values more than max_na_values threshold. When max_unique_values was informed, remove columns when the proportion of the total of unique values is more than the max_unique_values threshold, just for columns with type as object or category. :param max_na_values: proportion threshold of max na values :param max_unique_values: :return:
[ "When", "max_na_values", "was", "informed", "remove", "columns", "when", "the", "proportion", "of", "total", "NA", "values", "more", "than", "max_na_values", "threshold", "." ]
train
https://github.com/OpenDataScienceLab/skdata/blob/34f06845a944ff4f048b55c7babdd8420f71a6b9/skdata/data.py#L175-L204
OpenDataScienceLab/skdata
skdata/data.py
SkDataSet.dropna
def dropna(self): """ :return: """ step = { 'data-set': self.iid, 'operation': 'drop-na', 'expression': '{"axis": 0}' } self.attr_update(attr='steps', value=[step])
python
def dropna(self): """ :return: """ step = { 'data-set': self.iid, 'operation': 'drop-na', 'expression': '{"axis": 0}' } self.attr_update(attr='steps', value=[step])
[ "def", "dropna", "(", "self", ")", ":", "step", "=", "{", "'data-set'", ":", "self", ".", "iid", ",", "'operation'", ":", "'drop-na'", ",", "'expression'", ":", "'{\"axis\": 0}'", "}", "self", ".", "attr_update", "(", "attr", "=", "'steps'", ",", "value", "=", "[", "step", "]", ")" ]
:return:
[ ":", "return", ":" ]
train
https://github.com/OpenDataScienceLab/skdata/blob/34f06845a944ff4f048b55c7babdd8420f71a6b9/skdata/data.py#L206-L217
OpenDataScienceLab/skdata
skdata/data.py
SkDataSet.log
def log(self, message: str): """ @deprecated :param message: :return: """ dset_log_id = '_%s_log' % self.iid if dset_log_id not in self.parent.data.keys(): dset = self.parent.data.create_dataset( dset_log_id, shape=(1,), dtype=np.dtype([ ('dt_log', '<i8'), ('message', 'S250') ]) ) else: dset = self.parent.data[dset_log_id] timestamp = np.array( datetime.now().strftime("%s") ).astype('<i8').view('<M8[s]') dset['dt_log'] = timestamp.view('<i8') dset['message'] = message self.parent.data.flush()
python
def log(self, message: str): """ @deprecated :param message: :return: """ dset_log_id = '_%s_log' % self.iid if dset_log_id not in self.parent.data.keys(): dset = self.parent.data.create_dataset( dset_log_id, shape=(1,), dtype=np.dtype([ ('dt_log', '<i8'), ('message', 'S250') ]) ) else: dset = self.parent.data[dset_log_id] timestamp = np.array( datetime.now().strftime("%s") ).astype('<i8').view('<M8[s]') dset['dt_log'] = timestamp.view('<i8') dset['message'] = message self.parent.data.flush()
[ "def", "log", "(", "self", ",", "message", ":", "str", ")", ":", "dset_log_id", "=", "'_%s_log'", "%", "self", ".", "iid", "if", "dset_log_id", "not", "in", "self", ".", "parent", ".", "data", ".", "keys", "(", ")", ":", "dset", "=", "self", ".", "parent", ".", "data", ".", "create_dataset", "(", "dset_log_id", ",", "shape", "=", "(", "1", ",", ")", ",", "dtype", "=", "np", ".", "dtype", "(", "[", "(", "'dt_log'", ",", "'<i8'", ")", ",", "(", "'message'", ",", "'S250'", ")", "]", ")", ")", "else", ":", "dset", "=", "self", ".", "parent", ".", "data", "[", "dset_log_id", "]", "timestamp", "=", "np", ".", "array", "(", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "\"%s\"", ")", ")", ".", "astype", "(", "'<i8'", ")", ".", "view", "(", "'<M8[s]'", ")", "dset", "[", "'dt_log'", "]", "=", "timestamp", ".", "view", "(", "'<i8'", ")", "dset", "[", "'message'", "]", "=", "message", "self", ".", "parent", ".", "data", ".", "flush", "(", ")" ]
@deprecated :param message: :return:
[ "@deprecated" ]
train
https://github.com/OpenDataScienceLab/skdata/blob/34f06845a944ff4f048b55c7babdd8420f71a6b9/skdata/data.py#L219-L246
OpenDataScienceLab/skdata
skdata/data.py
SkDataSet.summary
def summary(self, compute=False) -> pd.DataFrame: """ :param compute: if should call compute method :return: """ if compute or self.result is None: self.compute() return summary(self.result)
python
def summary(self, compute=False) -> pd.DataFrame: """ :param compute: if should call compute method :return: """ if compute or self.result is None: self.compute() return summary(self.result)
[ "def", "summary", "(", "self", ",", "compute", "=", "False", ")", "->", "pd", ".", "DataFrame", ":", "if", "compute", "or", "self", ".", "result", "is", "None", ":", "self", ".", "compute", "(", ")", "return", "summary", "(", "self", ".", "result", ")" ]
:param compute: if should call compute method :return:
[ ":", "param", "compute", ":", "if", "should", "call", "compute", "method", ":", "return", ":" ]
train
https://github.com/OpenDataScienceLab/skdata/blob/34f06845a944ff4f048b55c7babdd8420f71a6b9/skdata/data.py#L248-L255
stepank/pyws
src/pyws/adapters/_twisted.py
serve
def serve(request, server): """ Twisted Web adapter. It has two arguments: #. ``request`` is a Twisted Web request object, #. ``server`` is a pyws server object. First one is the context of an application, function ``serve`` transforms it into a pyws request object. Then it feeds the request to the server, gets the response, sets header ``Content-Type`` and returns response text. """ request_ = Request('/'.join(request.postpath), request.content.read() if not request.method == 'GET' else '', request.args, request.args, {}) response = server.process_request(request_) request.setHeader('Content-Type', response.content_type) request.setResponseCode(get_http_response_code_num(response)) return response.text
python
def serve(request, server): """ Twisted Web adapter. It has two arguments: #. ``request`` is a Twisted Web request object, #. ``server`` is a pyws server object. First one is the context of an application, function ``serve`` transforms it into a pyws request object. Then it feeds the request to the server, gets the response, sets header ``Content-Type`` and returns response text. """ request_ = Request('/'.join(request.postpath), request.content.read() if not request.method == 'GET' else '', request.args, request.args, {}) response = server.process_request(request_) request.setHeader('Content-Type', response.content_type) request.setResponseCode(get_http_response_code_num(response)) return response.text
[ "def", "serve", "(", "request", ",", "server", ")", ":", "request_", "=", "Request", "(", "'/'", ".", "join", "(", "request", ".", "postpath", ")", ",", "request", ".", "content", ".", "read", "(", ")", "if", "not", "request", ".", "method", "==", "'GET'", "else", "''", ",", "request", ".", "args", ",", "request", ".", "args", ",", "{", "}", ")", "response", "=", "server", ".", "process_request", "(", "request_", ")", "request", ".", "setHeader", "(", "'Content-Type'", ",", "response", ".", "content_type", ")", "request", ".", "setResponseCode", "(", "get_http_response_code_num", "(", "response", ")", ")", "return", "response", ".", "text" ]
Twisted Web adapter. It has two arguments: #. ``request`` is a Twisted Web request object, #. ``server`` is a pyws server object. First one is the context of an application, function ``serve`` transforms it into a pyws request object. Then it feeds the request to the server, gets the response, sets header ``Content-Type`` and returns response text.
[ "Twisted", "Web", "adapter", ".", "It", "has", "two", "arguments", ":" ]
train
https://github.com/stepank/pyws/blob/ff39133aabeb56bbb08d66286ac0cc8731eda7dd/src/pyws/adapters/_twisted.py#L4-L25
opennode/waldur-core
waldur_core/structure/utils.py
get_sorted_dependencies
def get_sorted_dependencies(service_model): """ Returns list of application models in topological order. It is used in order to correctly delete dependent resources. """ app_models = list(service_model._meta.app_config.get_models()) dependencies = {model: set() for model in app_models} relations = ( relation for model in app_models for relation in model._meta.related_objects if relation.on_delete in (models.PROTECT, models.CASCADE) ) for rel in relations: dependencies[rel.model].add(rel.related_model) return stable_topological_sort(app_models, dependencies)
python
def get_sorted_dependencies(service_model): """ Returns list of application models in topological order. It is used in order to correctly delete dependent resources. """ app_models = list(service_model._meta.app_config.get_models()) dependencies = {model: set() for model in app_models} relations = ( relation for model in app_models for relation in model._meta.related_objects if relation.on_delete in (models.PROTECT, models.CASCADE) ) for rel in relations: dependencies[rel.model].add(rel.related_model) return stable_topological_sort(app_models, dependencies)
[ "def", "get_sorted_dependencies", "(", "service_model", ")", ":", "app_models", "=", "list", "(", "service_model", ".", "_meta", ".", "app_config", ".", "get_models", "(", ")", ")", "dependencies", "=", "{", "model", ":", "set", "(", ")", "for", "model", "in", "app_models", "}", "relations", "=", "(", "relation", "for", "model", "in", "app_models", "for", "relation", "in", "model", ".", "_meta", ".", "related_objects", "if", "relation", ".", "on_delete", "in", "(", "models", ".", "PROTECT", ",", "models", ".", "CASCADE", ")", ")", "for", "rel", "in", "relations", ":", "dependencies", "[", "rel", ".", "model", "]", ".", "add", "(", "rel", ".", "related_model", ")", "return", "stable_topological_sort", "(", "app_models", ",", "dependencies", ")" ]
Returns list of application models in topological order. It is used in order to correctly delete dependent resources.
[ "Returns", "list", "of", "application", "models", "in", "topological", "order", ".", "It", "is", "used", "in", "order", "to", "correctly", "delete", "dependent", "resources", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/utils.py#L38-L53
opennode/waldur-core
waldur_core/structure/utils.py
update_pulled_fields
def update_pulled_fields(instance, imported_instance, fields): """ Update instance fields based on imported from backend data. Save changes to DB only one or more fields were changed. """ modified = False for field in fields: pulled_value = getattr(imported_instance, field) current_value = getattr(instance, field) if current_value != pulled_value: setattr(instance, field, pulled_value) logger.info("%s's with PK %s %s field updated from value '%s' to value '%s'", instance.__class__.__name__, instance.pk, field, current_value, pulled_value) modified = True error_message = getattr(imported_instance, 'error_message', '') or getattr(instance, 'error_message', '') if error_message and instance.error_message != error_message: instance.error_message = imported_instance.error_message modified = True if modified: instance.save()
python
def update_pulled_fields(instance, imported_instance, fields): """ Update instance fields based on imported from backend data. Save changes to DB only one or more fields were changed. """ modified = False for field in fields: pulled_value = getattr(imported_instance, field) current_value = getattr(instance, field) if current_value != pulled_value: setattr(instance, field, pulled_value) logger.info("%s's with PK %s %s field updated from value '%s' to value '%s'", instance.__class__.__name__, instance.pk, field, current_value, pulled_value) modified = True error_message = getattr(imported_instance, 'error_message', '') or getattr(instance, 'error_message', '') if error_message and instance.error_message != error_message: instance.error_message = imported_instance.error_message modified = True if modified: instance.save()
[ "def", "update_pulled_fields", "(", "instance", ",", "imported_instance", ",", "fields", ")", ":", "modified", "=", "False", "for", "field", "in", "fields", ":", "pulled_value", "=", "getattr", "(", "imported_instance", ",", "field", ")", "current_value", "=", "getattr", "(", "instance", ",", "field", ")", "if", "current_value", "!=", "pulled_value", ":", "setattr", "(", "instance", ",", "field", ",", "pulled_value", ")", "logger", ".", "info", "(", "\"%s's with PK %s %s field updated from value '%s' to value '%s'\"", ",", "instance", ".", "__class__", ".", "__name__", ",", "instance", ".", "pk", ",", "field", ",", "current_value", ",", "pulled_value", ")", "modified", "=", "True", "error_message", "=", "getattr", "(", "imported_instance", ",", "'error_message'", ",", "''", ")", "or", "getattr", "(", "instance", ",", "'error_message'", ",", "''", ")", "if", "error_message", "and", "instance", ".", "error_message", "!=", "error_message", ":", "instance", ".", "error_message", "=", "imported_instance", ".", "error_message", "modified", "=", "True", "if", "modified", ":", "instance", ".", "save", "(", ")" ]
Update instance fields based on imported from backend data. Save changes to DB only one or more fields were changed.
[ "Update", "instance", "fields", "based", "on", "imported", "from", "backend", "data", ".", "Save", "changes", "to", "DB", "only", "one", "or", "more", "fields", "were", "changed", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/utils.py#L91-L110
opennode/waldur-core
waldur_core/structure/utils.py
handle_resource_not_found
def handle_resource_not_found(resource): """ Set resource state to ERRED and append/create "not found" error message. """ resource.set_erred() resource.runtime_state = '' message = 'Does not exist at backend.' if message not in resource.error_message: if not resource.error_message: resource.error_message = message else: resource.error_message += ' (%s)' % message resource.save() logger.warning('%s %s (PK: %s) does not exist at backend.' % ( resource.__class__.__name__, resource, resource.pk))
python
def handle_resource_not_found(resource): """ Set resource state to ERRED and append/create "not found" error message. """ resource.set_erred() resource.runtime_state = '' message = 'Does not exist at backend.' if message not in resource.error_message: if not resource.error_message: resource.error_message = message else: resource.error_message += ' (%s)' % message resource.save() logger.warning('%s %s (PK: %s) does not exist at backend.' % ( resource.__class__.__name__, resource, resource.pk))
[ "def", "handle_resource_not_found", "(", "resource", ")", ":", "resource", ".", "set_erred", "(", ")", "resource", ".", "runtime_state", "=", "''", "message", "=", "'Does not exist at backend.'", "if", "message", "not", "in", "resource", ".", "error_message", ":", "if", "not", "resource", ".", "error_message", ":", "resource", ".", "error_message", "=", "message", "else", ":", "resource", ".", "error_message", "+=", "' (%s)'", "%", "message", "resource", ".", "save", "(", ")", "logger", ".", "warning", "(", "'%s %s (PK: %s) does not exist at backend.'", "%", "(", "resource", ".", "__class__", ".", "__name__", ",", "resource", ",", "resource", ".", "pk", ")", ")" ]
Set resource state to ERRED and append/create "not found" error message.
[ "Set", "resource", "state", "to", "ERRED", "and", "append", "/", "create", "not", "found", "error", "message", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/utils.py#L113-L127
opennode/waldur-core
waldur_core/structure/utils.py
handle_resource_update_success
def handle_resource_update_success(resource): """ Recover resource if its state is ERRED and clear error message. """ update_fields = [] if resource.state == resource.States.ERRED: resource.recover() update_fields.append('state') if resource.state in (resource.States.UPDATING, resource.States.CREATING): resource.set_ok() update_fields.append('state') if resource.error_message: resource.error_message = '' update_fields.append('error_message') if update_fields: resource.save(update_fields=update_fields) logger.warning('%s %s (PK: %s) was successfully updated.' % ( resource.__class__.__name__, resource, resource.pk))
python
def handle_resource_update_success(resource): """ Recover resource if its state is ERRED and clear error message. """ update_fields = [] if resource.state == resource.States.ERRED: resource.recover() update_fields.append('state') if resource.state in (resource.States.UPDATING, resource.States.CREATING): resource.set_ok() update_fields.append('state') if resource.error_message: resource.error_message = '' update_fields.append('error_message') if update_fields: resource.save(update_fields=update_fields) logger.warning('%s %s (PK: %s) was successfully updated.' % ( resource.__class__.__name__, resource, resource.pk))
[ "def", "handle_resource_update_success", "(", "resource", ")", ":", "update_fields", "=", "[", "]", "if", "resource", ".", "state", "==", "resource", ".", "States", ".", "ERRED", ":", "resource", ".", "recover", "(", ")", "update_fields", ".", "append", "(", "'state'", ")", "if", "resource", ".", "state", "in", "(", "resource", ".", "States", ".", "UPDATING", ",", "resource", ".", "States", ".", "CREATING", ")", ":", "resource", ".", "set_ok", "(", ")", "update_fields", ".", "append", "(", "'state'", ")", "if", "resource", ".", "error_message", ":", "resource", ".", "error_message", "=", "''", "update_fields", ".", "append", "(", "'error_message'", ")", "if", "update_fields", ":", "resource", ".", "save", "(", "update_fields", "=", "update_fields", ")", "logger", ".", "warning", "(", "'%s %s (PK: %s) was successfully updated.'", "%", "(", "resource", ".", "__class__", ".", "__name__", ",", "resource", ",", "resource", ".", "pk", ")", ")" ]
Recover resource if its state is ERRED and clear error message.
[ "Recover", "resource", "if", "its", "state", "is", "ERRED", "and", "clear", "error", "message", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/utils.py#L130-L150
nuagenetworks/bambou
bambou/nurest_request.py
NURESTRequest.set_header
def set_header(self, header, value): """ Set header value """ # requests>=2.11 only accepts `str` or `bytes` header values # raising an exception here, instead of leaving it to `requests` makes # it easy to know where we passed a wrong header type in the code. if not isinstance(value, (str, bytes)): raise TypeError("header values must be str or bytes, but %s value has type %s" % (header, type(value))) self._headers[header] = value
python
def set_header(self, header, value): """ Set header value """ # requests>=2.11 only accepts `str` or `bytes` header values # raising an exception here, instead of leaving it to `requests` makes # it easy to know where we passed a wrong header type in the code. if not isinstance(value, (str, bytes)): raise TypeError("header values must be str or bytes, but %s value has type %s" % (header, type(value))) self._headers[header] = value
[ "def", "set_header", "(", "self", ",", "header", ",", "value", ")", ":", "# requests>=2.11 only accepts `str` or `bytes` header values", "# raising an exception here, instead of leaving it to `requests` makes", "# it easy to know where we passed a wrong header type in the code.", "if", "not", "isinstance", "(", "value", ",", "(", "str", ",", "bytes", ")", ")", ":", "raise", "TypeError", "(", "\"header values must be str or bytes, but %s value has type %s\"", "%", "(", "header", ",", "type", "(", "value", ")", ")", ")", "self", ".", "_headers", "[", "header", "]", "=", "value" ]
Set header value
[ "Set", "header", "value" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_request.py#L120-L127
opennode/waldur-core
waldur_core/structure/tasks.py
BackgroundPullTask.set_instance_erred
def set_instance_erred(self, instance, error_message): """ Mark instance as erred and save error message """ instance.set_erred() instance.error_message = error_message instance.save(update_fields=['state', 'error_message'])
python
def set_instance_erred(self, instance, error_message): """ Mark instance as erred and save error message """ instance.set_erred() instance.error_message = error_message instance.save(update_fields=['state', 'error_message'])
[ "def", "set_instance_erred", "(", "self", ",", "instance", ",", "error_message", ")", ":", "instance", ".", "set_erred", "(", ")", "instance", ".", "error_message", "=", "error_message", "instance", ".", "save", "(", "update_fields", "=", "[", "'state'", ",", "'error_message'", "]", ")" ]
Mark instance as erred and save error message
[ "Mark", "instance", "as", "erred", "and", "save", "error", "message" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/tasks.py#L118-L122
noumar/iso639
examples/logic.py
map_language
def map_language(language, dash3=True): """ Use ISO 639-3 ?? """ if dash3: from iso639 import languages else: from pycountry import languages if '_' in language: language = language.split('_')[0] if len(language) == 2: try: return languages.get(alpha2=language.lower()) except KeyError: pass elif len(language) == 3: if dash3: try: return languages.get(part3=language.lower()) except KeyError: pass try: return languages.get(terminology=language.lower()) except KeyError: pass try: return languages.get(bibliographic=language.lower()) except KeyError: pass else: try: return languages.get(name=language.title()) except KeyError: pass if dash3: try: return languages.get(inverted=language.title()) except KeyError: pass for l in re.split('[,.;: ]+', language): try: return languages.get(name=l.title()) except KeyError: pass
python
def map_language(language, dash3=True): """ Use ISO 639-3 ?? """ if dash3: from iso639 import languages else: from pycountry import languages if '_' in language: language = language.split('_')[0] if len(language) == 2: try: return languages.get(alpha2=language.lower()) except KeyError: pass elif len(language) == 3: if dash3: try: return languages.get(part3=language.lower()) except KeyError: pass try: return languages.get(terminology=language.lower()) except KeyError: pass try: return languages.get(bibliographic=language.lower()) except KeyError: pass else: try: return languages.get(name=language.title()) except KeyError: pass if dash3: try: return languages.get(inverted=language.title()) except KeyError: pass for l in re.split('[,.;: ]+', language): try: return languages.get(name=l.title()) except KeyError: pass
[ "def", "map_language", "(", "language", ",", "dash3", "=", "True", ")", ":", "if", "dash3", ":", "from", "iso639", "import", "languages", "else", ":", "from", "pycountry", "import", "languages", "if", "'_'", "in", "language", ":", "language", "=", "language", ".", "split", "(", "'_'", ")", "[", "0", "]", "if", "len", "(", "language", ")", "==", "2", ":", "try", ":", "return", "languages", ".", "get", "(", "alpha2", "=", "language", ".", "lower", "(", ")", ")", "except", "KeyError", ":", "pass", "elif", "len", "(", "language", ")", "==", "3", ":", "if", "dash3", ":", "try", ":", "return", "languages", ".", "get", "(", "part3", "=", "language", ".", "lower", "(", ")", ")", "except", "KeyError", ":", "pass", "try", ":", "return", "languages", ".", "get", "(", "terminology", "=", "language", ".", "lower", "(", ")", ")", "except", "KeyError", ":", "pass", "try", ":", "return", "languages", ".", "get", "(", "bibliographic", "=", "language", ".", "lower", "(", ")", ")", "except", "KeyError", ":", "pass", "else", ":", "try", ":", "return", "languages", ".", "get", "(", "name", "=", "language", ".", "title", "(", ")", ")", "except", "KeyError", ":", "pass", "if", "dash3", ":", "try", ":", "return", "languages", ".", "get", "(", "inverted", "=", "language", ".", "title", "(", ")", ")", "except", "KeyError", ":", "pass", "for", "l", "in", "re", ".", "split", "(", "'[,.;: ]+'", ",", "language", ")", ":", "try", ":", "return", "languages", ".", "get", "(", "name", "=", "l", ".", "title", "(", ")", ")", "except", "KeyError", ":", "pass" ]
Use ISO 639-3 ??
[ "Use", "ISO", "639", "-", "3", "??" ]
train
https://github.com/noumar/iso639/blob/2175cf04b8b8cec79d99a6c4ad31295d67c22cd6/examples/logic.py#L11-L39
opennode/waldur-core
waldur_core/structure/executors.py
BaseCleanupExecutor.get_task_signature
def get_task_signature(cls, instance, serialized_instance, **kwargs): """ Delete each resource using specific executor. Convert executors to task and combine all deletion task into single sequential task. """ cleanup_tasks = [ ProjectResourceCleanupTask().si( core_utils.serialize_class(executor_cls), core_utils.serialize_class(model_cls), serialized_instance, ) for (model_cls, executor_cls) in cls.executors ] if not cleanup_tasks: return core_tasks.EmptyTask() return chain(cleanup_tasks)
python
def get_task_signature(cls, instance, serialized_instance, **kwargs): """ Delete each resource using specific executor. Convert executors to task and combine all deletion task into single sequential task. """ cleanup_tasks = [ ProjectResourceCleanupTask().si( core_utils.serialize_class(executor_cls), core_utils.serialize_class(model_cls), serialized_instance, ) for (model_cls, executor_cls) in cls.executors ] if not cleanup_tasks: return core_tasks.EmptyTask() return chain(cleanup_tasks)
[ "def", "get_task_signature", "(", "cls", ",", "instance", ",", "serialized_instance", ",", "*", "*", "kwargs", ")", ":", "cleanup_tasks", "=", "[", "ProjectResourceCleanupTask", "(", ")", ".", "si", "(", "core_utils", ".", "serialize_class", "(", "executor_cls", ")", ",", "core_utils", ".", "serialize_class", "(", "model_cls", ")", ",", "serialized_instance", ",", ")", "for", "(", "model_cls", ",", "executor_cls", ")", "in", "cls", ".", "executors", "]", "if", "not", "cleanup_tasks", ":", "return", "core_tasks", ".", "EmptyTask", "(", ")", "return", "chain", "(", "cleanup_tasks", ")" ]
Delete each resource using specific executor. Convert executors to task and combine all deletion task into single sequential task.
[ "Delete", "each", "resource", "using", "specific", "executor", ".", "Convert", "executors", "to", "task", "and", "combine", "all", "deletion", "task", "into", "single", "sequential", "task", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/structure/executors.py#L99-L116
opennode/waldur-core
waldur_core/core/utils.py
sort_dict
def sort_dict(unsorted_dict): """ Return a OrderedDict ordered by key names from the :unsorted_dict: """ sorted_dict = OrderedDict() # sort items before inserting them into a dict for key, value in sorted(unsorted_dict.items(), key=itemgetter(0)): sorted_dict[key] = value return sorted_dict
python
def sort_dict(unsorted_dict): """ Return a OrderedDict ordered by key names from the :unsorted_dict: """ sorted_dict = OrderedDict() # sort items before inserting them into a dict for key, value in sorted(unsorted_dict.items(), key=itemgetter(0)): sorted_dict[key] = value return sorted_dict
[ "def", "sort_dict", "(", "unsorted_dict", ")", ":", "sorted_dict", "=", "OrderedDict", "(", ")", "# sort items before inserting them into a dict", "for", "key", ",", "value", "in", "sorted", "(", "unsorted_dict", ".", "items", "(", ")", ",", "key", "=", "itemgetter", "(", "0", ")", ")", ":", "sorted_dict", "[", "key", "]", "=", "value", "return", "sorted_dict" ]
Return a OrderedDict ordered by key names from the :unsorted_dict:
[ "Return", "a", "OrderedDict", "ordered", "by", "key", "names", "from", "the", ":", "unsorted_dict", ":" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/utils.py#L25-L33
opennode/waldur-core
waldur_core/core/utils.py
format_time_and_value_to_segment_list
def format_time_and_value_to_segment_list(time_and_value_list, segments_count, start_timestamp, end_timestamp, average=False): """ Format time_and_value_list to time segments Parameters ^^^^^^^^^^ time_and_value_list: list of tuples Have to be sorted by time Example: [(time, value), (time, value) ...] segments_count: integer How many segments will be in result Returns ^^^^^^^ List of dictionaries Example: [{'from': time1, 'to': time2, 'value': sum_of_values_from_time1_to_time2}, ...] """ segment_list = [] time_step = (end_timestamp - start_timestamp) / segments_count for i in range(segments_count): segment_start_timestamp = start_timestamp + time_step * i segment_end_timestamp = segment_start_timestamp + time_step value_list = [ value for time, value in time_and_value_list if time >= segment_start_timestamp and time < segment_end_timestamp] segment_value = sum(value_list) if average and len(value_list) != 0: segment_value /= len(value_list) segment_list.append({ 'from': segment_start_timestamp, 'to': segment_end_timestamp, 'value': segment_value, }) return segment_list
python
def format_time_and_value_to_segment_list(time_and_value_list, segments_count, start_timestamp, end_timestamp, average=False): """ Format time_and_value_list to time segments Parameters ^^^^^^^^^^ time_and_value_list: list of tuples Have to be sorted by time Example: [(time, value), (time, value) ...] segments_count: integer How many segments will be in result Returns ^^^^^^^ List of dictionaries Example: [{'from': time1, 'to': time2, 'value': sum_of_values_from_time1_to_time2}, ...] """ segment_list = [] time_step = (end_timestamp - start_timestamp) / segments_count for i in range(segments_count): segment_start_timestamp = start_timestamp + time_step * i segment_end_timestamp = segment_start_timestamp + time_step value_list = [ value for time, value in time_and_value_list if time >= segment_start_timestamp and time < segment_end_timestamp] segment_value = sum(value_list) if average and len(value_list) != 0: segment_value /= len(value_list) segment_list.append({ 'from': segment_start_timestamp, 'to': segment_end_timestamp, 'value': segment_value, }) return segment_list
[ "def", "format_time_and_value_to_segment_list", "(", "time_and_value_list", ",", "segments_count", ",", "start_timestamp", ",", "end_timestamp", ",", "average", "=", "False", ")", ":", "segment_list", "=", "[", "]", "time_step", "=", "(", "end_timestamp", "-", "start_timestamp", ")", "/", "segments_count", "for", "i", "in", "range", "(", "segments_count", ")", ":", "segment_start_timestamp", "=", "start_timestamp", "+", "time_step", "*", "i", "segment_end_timestamp", "=", "segment_start_timestamp", "+", "time_step", "value_list", "=", "[", "value", "for", "time", ",", "value", "in", "time_and_value_list", "if", "time", ">=", "segment_start_timestamp", "and", "time", "<", "segment_end_timestamp", "]", "segment_value", "=", "sum", "(", "value_list", ")", "if", "average", "and", "len", "(", "value_list", ")", "!=", "0", ":", "segment_value", "/=", "len", "(", "value_list", ")", "segment_list", ".", "append", "(", "{", "'from'", ":", "segment_start_timestamp", ",", "'to'", ":", "segment_end_timestamp", ",", "'value'", ":", "segment_value", ",", "}", ")", "return", "segment_list" ]
Format time_and_value_list to time segments Parameters ^^^^^^^^^^ time_and_value_list: list of tuples Have to be sorted by time Example: [(time, value), (time, value) ...] segments_count: integer How many segments will be in result Returns ^^^^^^^ List of dictionaries Example: [{'from': time1, 'to': time2, 'value': sum_of_values_from_time1_to_time2}, ...]
[ "Format", "time_and_value_list", "to", "time", "segments" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/utils.py#L36-L71
opennode/waldur-core
waldur_core/core/utils.py
serialize_instance
def serialize_instance(instance): """ Serialize Django model instance """ model_name = force_text(instance._meta) return '{}:{}'.format(model_name, instance.pk)
python
def serialize_instance(instance): """ Serialize Django model instance """ model_name = force_text(instance._meta) return '{}:{}'.format(model_name, instance.pk)
[ "def", "serialize_instance", "(", "instance", ")", ":", "model_name", "=", "force_text", "(", "instance", ".", "_meta", ")", "return", "'{}:{}'", ".", "format", "(", "model_name", ",", "instance", ".", "pk", ")" ]
Serialize Django model instance
[ "Serialize", "Django", "model", "instance" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/utils.py#L121-L124
opennode/waldur-core
waldur_core/core/utils.py
deserialize_instance
def deserialize_instance(serialized_instance): """ Deserialize Django model instance """ model_name, pk = serialized_instance.split(':') model = apps.get_model(model_name) return model._default_manager.get(pk=pk)
python
def deserialize_instance(serialized_instance): """ Deserialize Django model instance """ model_name, pk = serialized_instance.split(':') model = apps.get_model(model_name) return model._default_manager.get(pk=pk)
[ "def", "deserialize_instance", "(", "serialized_instance", ")", ":", "model_name", ",", "pk", "=", "serialized_instance", ".", "split", "(", "':'", ")", "model", "=", "apps", ".", "get_model", "(", "model_name", ")", "return", "model", ".", "_default_manager", ".", "get", "(", "pk", "=", "pk", ")" ]
Deserialize Django model instance
[ "Deserialize", "Django", "model", "instance" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/utils.py#L127-L131
opennode/waldur-core
waldur_core/core/utils.py
deserialize_class
def deserialize_class(serilalized_cls): """ Deserialize Python class """ module_name, cls_name = serilalized_cls.split(':') module = importlib.import_module(module_name) return getattr(module, cls_name)
python
def deserialize_class(serilalized_cls): """ Deserialize Python class """ module_name, cls_name = serilalized_cls.split(':') module = importlib.import_module(module_name) return getattr(module, cls_name)
[ "def", "deserialize_class", "(", "serilalized_cls", ")", ":", "module_name", ",", "cls_name", "=", "serilalized_cls", ".", "split", "(", "':'", ")", "module", "=", "importlib", ".", "import_module", "(", "module_name", ")", "return", "getattr", "(", "module", ",", "cls_name", ")" ]
Deserialize Python class
[ "Deserialize", "Python", "class" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/utils.py#L139-L143
opennode/waldur-core
waldur_core/core/utils.py
instance_from_url
def instance_from_url(url, user=None): """ Restore instance from URL """ # XXX: This circular dependency will be removed then filter_queryset_for_user # will be moved to model manager method from waldur_core.structure.managers import filter_queryset_for_user url = clear_url(url) match = resolve(url) model = get_model_from_resolve_match(match) queryset = model.objects.all() if user is not None: queryset = filter_queryset_for_user(model.objects.all(), user) return queryset.get(**match.kwargs)
python
def instance_from_url(url, user=None): """ Restore instance from URL """ # XXX: This circular dependency will be removed then filter_queryset_for_user # will be moved to model manager method from waldur_core.structure.managers import filter_queryset_for_user url = clear_url(url) match = resolve(url) model = get_model_from_resolve_match(match) queryset = model.objects.all() if user is not None: queryset = filter_queryset_for_user(model.objects.all(), user) return queryset.get(**match.kwargs)
[ "def", "instance_from_url", "(", "url", ",", "user", "=", "None", ")", ":", "# XXX: This circular dependency will be removed then filter_queryset_for_user", "# will be moved to model manager method", "from", "waldur_core", ".", "structure", ".", "managers", "import", "filter_queryset_for_user", "url", "=", "clear_url", "(", "url", ")", "match", "=", "resolve", "(", "url", ")", "model", "=", "get_model_from_resolve_match", "(", "match", ")", "queryset", "=", "model", ".", "objects", ".", "all", "(", ")", "if", "user", "is", "not", "None", ":", "queryset", "=", "filter_queryset_for_user", "(", "model", ".", "objects", ".", "all", "(", ")", ",", "user", ")", "return", "queryset", ".", "get", "(", "*", "*", "match", ".", "kwargs", ")" ]
Restore instance from URL
[ "Restore", "instance", "from", "URL" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/core/utils.py#L161-L173
zeromake/aiosqlite3
aiosqlite3/sa/transaction.py
Transaction.close
def close(self): """ Close this transaction. If this transaction is the base transaction in a begin/commit nesting, the transaction will rollback(). Otherwise, the method returns. This is used to cancel a Transaction without affecting the scope of an enclosing transaction. """ if not self._connection or not self._parent: return if not self._parent._is_active: # pragma: no cover self._connection = None # self._parent = None return if self._parent is self: yield from self.rollback() else: self._is_active = False self._connection = None self._parent = None
python
def close(self): """ Close this transaction. If this transaction is the base transaction in a begin/commit nesting, the transaction will rollback(). Otherwise, the method returns. This is used to cancel a Transaction without affecting the scope of an enclosing transaction. """ if not self._connection or not self._parent: return if not self._parent._is_active: # pragma: no cover self._connection = None # self._parent = None return if self._parent is self: yield from self.rollback() else: self._is_active = False self._connection = None self._parent = None
[ "def", "close", "(", "self", ")", ":", "if", "not", "self", ".", "_connection", "or", "not", "self", ".", "_parent", ":", "return", "if", "not", "self", ".", "_parent", ".", "_is_active", ":", "# pragma: no cover", "self", ".", "_connection", "=", "None", "# self._parent = None", "return", "if", "self", ".", "_parent", "is", "self", ":", "yield", "from", "self", ".", "rollback", "(", ")", "else", ":", "self", ".", "_is_active", "=", "False", "self", ".", "_connection", "=", "None", "self", ".", "_parent", "=", "None" ]
Close this transaction. If this transaction is the base transaction in a begin/commit nesting, the transaction will rollback(). Otherwise, the method returns. This is used to cancel a Transaction without affecting the scope of an enclosing transaction.
[ "Close", "this", "transaction", "." ]
train
https://github.com/zeromake/aiosqlite3/blob/1a74a062507e2df8f833a70885e69dca0ab3e7e7/aiosqlite3/sa/transaction.py#L50-L73
zeromake/aiosqlite3
aiosqlite3/sa/transaction.py
Transaction.commit
def commit(self): """ Commit this transaction. """ if not self._parent._is_active: raise exc.InvalidRequestError("This transaction is inactive") yield from self._do_commit() self._is_active = False
python
def commit(self): """ Commit this transaction. """ if not self._parent._is_active: raise exc.InvalidRequestError("This transaction is inactive") yield from self._do_commit() self._is_active = False
[ "def", "commit", "(", "self", ")", ":", "if", "not", "self", ".", "_parent", ".", "_is_active", ":", "raise", "exc", ".", "InvalidRequestError", "(", "\"This transaction is inactive\"", ")", "yield", "from", "self", ".", "_do_commit", "(", ")", "self", ".", "_is_active", "=", "False" ]
Commit this transaction.
[ "Commit", "this", "transaction", "." ]
train
https://github.com/zeromake/aiosqlite3/blob/1a74a062507e2df8f833a70885e69dca0ab3e7e7/aiosqlite3/sa/transaction.py#L90-L98
opennode/waldur-core
waldur_core/server/admin/dashboard.py
CustomIndexDashboard._get_app_config
def _get_app_config(self, app_name): """ Returns an app config for the given name, not by label. """ matches = [app_config for app_config in apps.get_app_configs() if app_config.name == app_name] if not matches: return return matches[0]
python
def _get_app_config(self, app_name): """ Returns an app config for the given name, not by label. """ matches = [app_config for app_config in apps.get_app_configs() if app_config.name == app_name] if not matches: return return matches[0]
[ "def", "_get_app_config", "(", "self", ",", "app_name", ")", ":", "matches", "=", "[", "app_config", "for", "app_config", "in", "apps", ".", "get_app_configs", "(", ")", "if", "app_config", ".", "name", "==", "app_name", "]", "if", "not", "matches", ":", "return", "return", "matches", "[", "0", "]" ]
Returns an app config for the given name, not by label.
[ "Returns", "an", "app", "config", "for", "the", "given", "name", "not", "by", "label", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/server/admin/dashboard.py#L56-L65
opennode/waldur-core
waldur_core/server/admin/dashboard.py
CustomIndexDashboard._get_app_version
def _get_app_version(self, app_config): """ Some plugins ship multiple applications and extensions. However all of them have the same version, because they are released together. That's why only-top level module is used to fetch version information. """ base_name = app_config.__module__.split('.')[0] module = __import__(base_name) return getattr(module, '__version__', 'N/A')
python
def _get_app_version(self, app_config): """ Some plugins ship multiple applications and extensions. However all of them have the same version, because they are released together. That's why only-top level module is used to fetch version information. """ base_name = app_config.__module__.split('.')[0] module = __import__(base_name) return getattr(module, '__version__', 'N/A')
[ "def", "_get_app_version", "(", "self", ",", "app_config", ")", ":", "base_name", "=", "app_config", ".", "__module__", ".", "split", "(", "'.'", ")", "[", "0", "]", "module", "=", "__import__", "(", "base_name", ")", "return", "getattr", "(", "module", ",", "'__version__'", ",", "'N/A'", ")" ]
Some plugins ship multiple applications and extensions. However all of them have the same version, because they are released together. That's why only-top level module is used to fetch version information.
[ "Some", "plugins", "ship", "multiple", "applications", "and", "extensions", ".", "However", "all", "of", "them", "have", "the", "same", "version", "because", "they", "are", "released", "together", ".", "That", "s", "why", "only", "-", "top", "level", "module", "is", "used", "to", "fetch", "version", "information", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/server/admin/dashboard.py#L77-L86
opennode/waldur-core
waldur_core/server/admin/dashboard.py
CustomIndexDashboard._get_quick_access_info
def _get_quick_access_info(self): """ Returns a list of ListLink items to be added to Quick Access tab. Contains: - links to Organizations, Projects and Users; - a link to shared service settings; - custom configured links in admin/settings FLUENT_DASHBOARD_QUICK_ACCESS_LINKS attribute; """ quick_access_links = [] # add custom links quick_access_links.extend(settings.FLUENT_DASHBOARD_QUICK_ACCESS_LINKS) for model in (structure_models.Project, structure_models.Customer, core_models.User, structure_models.SharedServiceSettings): quick_access_links.append(self._get_link_to_model(model)) return quick_access_links
python
def _get_quick_access_info(self): """ Returns a list of ListLink items to be added to Quick Access tab. Contains: - links to Organizations, Projects and Users; - a link to shared service settings; - custom configured links in admin/settings FLUENT_DASHBOARD_QUICK_ACCESS_LINKS attribute; """ quick_access_links = [] # add custom links quick_access_links.extend(settings.FLUENT_DASHBOARD_QUICK_ACCESS_LINKS) for model in (structure_models.Project, structure_models.Customer, core_models.User, structure_models.SharedServiceSettings): quick_access_links.append(self._get_link_to_model(model)) return quick_access_links
[ "def", "_get_quick_access_info", "(", "self", ")", ":", "quick_access_links", "=", "[", "]", "# add custom links", "quick_access_links", ".", "extend", "(", "settings", ".", "FLUENT_DASHBOARD_QUICK_ACCESS_LINKS", ")", "for", "model", "in", "(", "structure_models", ".", "Project", ",", "structure_models", ".", "Customer", ",", "core_models", ".", "User", ",", "structure_models", ".", "SharedServiceSettings", ")", ":", "quick_access_links", ".", "append", "(", "self", ".", "_get_link_to_model", "(", "model", ")", ")", "return", "quick_access_links" ]
Returns a list of ListLink items to be added to Quick Access tab. Contains: - links to Organizations, Projects and Users; - a link to shared service settings; - custom configured links in admin/settings FLUENT_DASHBOARD_QUICK_ACCESS_LINKS attribute;
[ "Returns", "a", "list", "of", "ListLink", "items", "to", "be", "added", "to", "Quick", "Access", "tab", ".", "Contains", ":", "-", "links", "to", "Organizations", "Projects", "and", "Users", ";", "-", "a", "link", "to", "shared", "service", "settings", ";", "-", "custom", "configured", "links", "in", "admin", "/", "settings", "FLUENT_DASHBOARD_QUICK_ACCESS_LINKS", "attribute", ";" ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/server/admin/dashboard.py#L88-L107
opennode/waldur-core
waldur_core/server/admin/dashboard.py
CustomIndexDashboard._get_erred_shared_settings_module
def _get_erred_shared_settings_module(self): """ Returns a LinkList based module which contains link to shared service setting instances in ERRED state. """ result_module = modules.LinkList(title=_('Shared provider settings in erred state')) result_module.template = 'admin/dashboard/erred_link_list.html' erred_state = structure_models.SharedServiceSettings.States.ERRED queryset = structure_models.SharedServiceSettings.objects settings_in_erred_state = queryset.filter(state=erred_state).count() if settings_in_erred_state: result_module.title = '%s (%s)' % (result_module.title, settings_in_erred_state) for service_settings in queryset.filter(state=erred_state).iterator(): module_child = self._get_link_to_instance(service_settings) module_child['error'] = service_settings.error_message result_module.children.append(module_child) else: result_module.pre_content = _('Nothing found.') return result_module
python
def _get_erred_shared_settings_module(self): """ Returns a LinkList based module which contains link to shared service setting instances in ERRED state. """ result_module = modules.LinkList(title=_('Shared provider settings in erred state')) result_module.template = 'admin/dashboard/erred_link_list.html' erred_state = structure_models.SharedServiceSettings.States.ERRED queryset = structure_models.SharedServiceSettings.objects settings_in_erred_state = queryset.filter(state=erred_state).count() if settings_in_erred_state: result_module.title = '%s (%s)' % (result_module.title, settings_in_erred_state) for service_settings in queryset.filter(state=erred_state).iterator(): module_child = self._get_link_to_instance(service_settings) module_child['error'] = service_settings.error_message result_module.children.append(module_child) else: result_module.pre_content = _('Nothing found.') return result_module
[ "def", "_get_erred_shared_settings_module", "(", "self", ")", ":", "result_module", "=", "modules", ".", "LinkList", "(", "title", "=", "_", "(", "'Shared provider settings in erred state'", ")", ")", "result_module", ".", "template", "=", "'admin/dashboard/erred_link_list.html'", "erred_state", "=", "structure_models", ".", "SharedServiceSettings", ".", "States", ".", "ERRED", "queryset", "=", "structure_models", ".", "SharedServiceSettings", ".", "objects", "settings_in_erred_state", "=", "queryset", ".", "filter", "(", "state", "=", "erred_state", ")", ".", "count", "(", ")", "if", "settings_in_erred_state", ":", "result_module", ".", "title", "=", "'%s (%s)'", "%", "(", "result_module", ".", "title", ",", "settings_in_erred_state", ")", "for", "service_settings", "in", "queryset", ".", "filter", "(", "state", "=", "erred_state", ")", ".", "iterator", "(", ")", ":", "module_child", "=", "self", ".", "_get_link_to_instance", "(", "service_settings", ")", "module_child", "[", "'error'", "]", "=", "service_settings", ".", "error_message", "result_module", ".", "children", ".", "append", "(", "module_child", ")", "else", ":", "result_module", ".", "pre_content", "=", "_", "(", "'Nothing found.'", ")", "return", "result_module" ]
Returns a LinkList based module which contains link to shared service setting instances in ERRED state.
[ "Returns", "a", "LinkList", "based", "module", "which", "contains", "link", "to", "shared", "service", "setting", "instances", "in", "ERRED", "state", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/server/admin/dashboard.py#L135-L155
opennode/waldur-core
waldur_core/server/admin/dashboard.py
CustomIndexDashboard._get_erred_resources_module
def _get_erred_resources_module(self): """ Returns a list of links to resources which are in ERRED state and linked to a shared service settings. """ result_module = modules.LinkList(title=_('Resources in erred state')) erred_state = structure_models.NewResource.States.ERRED children = [] resource_models = SupportedServices.get_resource_models() resources_in_erred_state_overall = 0 for resource_type, resource_model in resource_models.items(): queryset = resource_model.objects.filter(service_project_link__service__settings__shared=True) erred_amount = queryset.filter(state=erred_state).count() if erred_amount: resources_in_erred_state_overall = resources_in_erred_state_overall + erred_amount link = self._get_erred_resource_link(resource_model, erred_amount, erred_state) children.append(link) if resources_in_erred_state_overall: result_module.title = '%s (%s)' % (result_module.title, resources_in_erred_state_overall) result_module.children = children else: result_module.pre_content = _('Nothing found.') return result_module
python
def _get_erred_resources_module(self): """ Returns a list of links to resources which are in ERRED state and linked to a shared service settings. """ result_module = modules.LinkList(title=_('Resources in erred state')) erred_state = structure_models.NewResource.States.ERRED children = [] resource_models = SupportedServices.get_resource_models() resources_in_erred_state_overall = 0 for resource_type, resource_model in resource_models.items(): queryset = resource_model.objects.filter(service_project_link__service__settings__shared=True) erred_amount = queryset.filter(state=erred_state).count() if erred_amount: resources_in_erred_state_overall = resources_in_erred_state_overall + erred_amount link = self._get_erred_resource_link(resource_model, erred_amount, erred_state) children.append(link) if resources_in_erred_state_overall: result_module.title = '%s (%s)' % (result_module.title, resources_in_erred_state_overall) result_module.children = children else: result_module.pre_content = _('Nothing found.') return result_module
[ "def", "_get_erred_resources_module", "(", "self", ")", ":", "result_module", "=", "modules", ".", "LinkList", "(", "title", "=", "_", "(", "'Resources in erred state'", ")", ")", "erred_state", "=", "structure_models", ".", "NewResource", ".", "States", ".", "ERRED", "children", "=", "[", "]", "resource_models", "=", "SupportedServices", ".", "get_resource_models", "(", ")", "resources_in_erred_state_overall", "=", "0", "for", "resource_type", ",", "resource_model", "in", "resource_models", ".", "items", "(", ")", ":", "queryset", "=", "resource_model", ".", "objects", ".", "filter", "(", "service_project_link__service__settings__shared", "=", "True", ")", "erred_amount", "=", "queryset", ".", "filter", "(", "state", "=", "erred_state", ")", ".", "count", "(", ")", "if", "erred_amount", ":", "resources_in_erred_state_overall", "=", "resources_in_erred_state_overall", "+", "erred_amount", "link", "=", "self", ".", "_get_erred_resource_link", "(", "resource_model", ",", "erred_amount", ",", "erred_state", ")", "children", ".", "append", "(", "link", ")", "if", "resources_in_erred_state_overall", ":", "result_module", ".", "title", "=", "'%s (%s)'", "%", "(", "result_module", ".", "title", ",", "resources_in_erred_state_overall", ")", "result_module", ".", "children", "=", "children", "else", ":", "result_module", ".", "pre_content", "=", "_", "(", "'Nothing found.'", ")", "return", "result_module" ]
Returns a list of links to resources which are in ERRED state and linked to a shared service settings.
[ "Returns", "a", "list", "of", "links", "to", "resources", "which", "are", "in", "ERRED", "state", "and", "linked", "to", "a", "shared", "service", "settings", "." ]
train
https://github.com/opennode/waldur-core/blob/d6c17a9592bb6c49c33567542eef8d099605a46a/waldur_core/server/admin/dashboard.py#L157-L181
stepank/pyws
src/pyws/adapters/_django.py
serve
def serve(request, tail, server): """ Django adapter. It has three arguments: #. ``request`` is a Django request object, #. ``tail`` is everything that's left from an URL, which adapter is attached to, #. ``server`` is a pyws server object. First two are the context of an application, function ``serve`` transforms them into a pyws request object. Then it feeds the request to the server, gets the response and transforms it into a Django response object. """ if request.GET: body = '' else: try: body = request.body except AttributeError: body = request.raw_post_data request = Request( tail, body, parse_qs(request.META['QUERY_STRING']), parse_qs(body), request.COOKIES, ) response = server.process_request(request) return HttpResponse( response.text, content_type=response.content_type, status=get_http_response_code_num(response))
python
def serve(request, tail, server): """ Django adapter. It has three arguments: #. ``request`` is a Django request object, #. ``tail`` is everything that's left from an URL, which adapter is attached to, #. ``server`` is a pyws server object. First two are the context of an application, function ``serve`` transforms them into a pyws request object. Then it feeds the request to the server, gets the response and transforms it into a Django response object. """ if request.GET: body = '' else: try: body = request.body except AttributeError: body = request.raw_post_data request = Request( tail, body, parse_qs(request.META['QUERY_STRING']), parse_qs(body), request.COOKIES, ) response = server.process_request(request) return HttpResponse( response.text, content_type=response.content_type, status=get_http_response_code_num(response))
[ "def", "serve", "(", "request", ",", "tail", ",", "server", ")", ":", "if", "request", ".", "GET", ":", "body", "=", "''", "else", ":", "try", ":", "body", "=", "request", ".", "body", "except", "AttributeError", ":", "body", "=", "request", ".", "raw_post_data", "request", "=", "Request", "(", "tail", ",", "body", ",", "parse_qs", "(", "request", ".", "META", "[", "'QUERY_STRING'", "]", ")", ",", "parse_qs", "(", "body", ")", ",", "request", ".", "COOKIES", ",", ")", "response", "=", "server", ".", "process_request", "(", "request", ")", "return", "HttpResponse", "(", "response", ".", "text", ",", "content_type", "=", "response", ".", "content_type", ",", "status", "=", "get_http_response_code_num", "(", "response", ")", ")" ]
Django adapter. It has three arguments: #. ``request`` is a Django request object, #. ``tail`` is everything that's left from an URL, which adapter is attached to, #. ``server`` is a pyws server object. First two are the context of an application, function ``serve`` transforms them into a pyws request object. Then it feeds the request to the server, gets the response and transforms it into a Django response object.
[ "Django", "adapter", ".", "It", "has", "three", "arguments", ":" ]
train
https://github.com/stepank/pyws/blob/ff39133aabeb56bbb08d66286ac0cc8731eda7dd/src/pyws/adapters/_django.py#L10-L44
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher.index
def index(self, nurest_object): """ Get index of the given item Args: nurest_object (bambou.NURESTObject): the NURESTObject object to verify Returns: Returns the position of the object. Raises: Raise a ValueError exception if object is not present """ for index, obj in enumerate(self): if obj.equals(nurest_object): return index raise ValueError("%s is not in %s" % (nurest_object, self))
python
def index(self, nurest_object): """ Get index of the given item Args: nurest_object (bambou.NURESTObject): the NURESTObject object to verify Returns: Returns the position of the object. Raises: Raise a ValueError exception if object is not present """ for index, obj in enumerate(self): if obj.equals(nurest_object): return index raise ValueError("%s is not in %s" % (nurest_object, self))
[ "def", "index", "(", "self", ",", "nurest_object", ")", ":", "for", "index", ",", "obj", "in", "enumerate", "(", "self", ")", ":", "if", "obj", ".", "equals", "(", "nurest_object", ")", ":", "return", "index", "raise", "ValueError", "(", "\"%s is not in %s\"", "%", "(", "nurest_object", ",", "self", ")", ")" ]
Get index of the given item Args: nurest_object (bambou.NURESTObject): the NURESTObject object to verify Returns: Returns the position of the object. Raises: Raise a ValueError exception if object is not present
[ "Get", "index", "of", "the", "given", "item", "Args", ":", "nurest_object", "(", "bambou", ".", "NURESTObject", ")", ":", "the", "NURESTObject", "object", "to", "verify" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L78-L93
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher.fetcher_with_object
def fetcher_with_object(cls, parent_object, relationship="child"): """ Register the fetcher for a served object. This method will fill the fetcher with `managed_class` instances Args: parent_object: the instance of the parent object to serve Returns: It returns the fetcher instance. """ fetcher = cls() fetcher.parent_object = parent_object fetcher.relationship = relationship rest_name = cls.managed_object_rest_name() parent_object.register_fetcher(fetcher, rest_name) return fetcher
python
def fetcher_with_object(cls, parent_object, relationship="child"): """ Register the fetcher for a served object. This method will fill the fetcher with `managed_class` instances Args: parent_object: the instance of the parent object to serve Returns: It returns the fetcher instance. """ fetcher = cls() fetcher.parent_object = parent_object fetcher.relationship = relationship rest_name = cls.managed_object_rest_name() parent_object.register_fetcher(fetcher, rest_name) return fetcher
[ "def", "fetcher_with_object", "(", "cls", ",", "parent_object", ",", "relationship", "=", "\"child\"", ")", ":", "fetcher", "=", "cls", "(", ")", "fetcher", ".", "parent_object", "=", "parent_object", "fetcher", ".", "relationship", "=", "relationship", "rest_name", "=", "cls", ".", "managed_object_rest_name", "(", ")", "parent_object", ".", "register_fetcher", "(", "fetcher", ",", "rest_name", ")", "return", "fetcher" ]
Register the fetcher for a served object. This method will fill the fetcher with `managed_class` instances Args: parent_object: the instance of the parent object to serve Returns: It returns the fetcher instance.
[ "Register", "the", "fetcher", "for", "a", "served", "object", "." ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L183-L202
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher._prepare_headers
def _prepare_headers(self, request, filter=None, order_by=None, group_by=[], page=None, page_size=None): """ Prepare headers for the given request Args: request: the NURESTRequest to send filter: string order_by: string group_by: list of names page: int page_size: int """ if filter: request.set_header('X-Nuage-Filter', filter) if order_by: request.set_header('X-Nuage-OrderBy', order_by) if page is not None: request.set_header('X-Nuage-Page', str(page)) if page_size: request.set_header('X-Nuage-PageSize', str(page_size)) if len(group_by) > 0: header = ", ".join(group_by) request.set_header('X-Nuage-GroupBy', 'true') request.set_header('X-Nuage-Attributes', header)
python
def _prepare_headers(self, request, filter=None, order_by=None, group_by=[], page=None, page_size=None): """ Prepare headers for the given request Args: request: the NURESTRequest to send filter: string order_by: string group_by: list of names page: int page_size: int """ if filter: request.set_header('X-Nuage-Filter', filter) if order_by: request.set_header('X-Nuage-OrderBy', order_by) if page is not None: request.set_header('X-Nuage-Page', str(page)) if page_size: request.set_header('X-Nuage-PageSize', str(page_size)) if len(group_by) > 0: header = ", ".join(group_by) request.set_header('X-Nuage-GroupBy', 'true') request.set_header('X-Nuage-Attributes', header)
[ "def", "_prepare_headers", "(", "self", ",", "request", ",", "filter", "=", "None", ",", "order_by", "=", "None", ",", "group_by", "=", "[", "]", ",", "page", "=", "None", ",", "page_size", "=", "None", ")", ":", "if", "filter", ":", "request", ".", "set_header", "(", "'X-Nuage-Filter'", ",", "filter", ")", "if", "order_by", ":", "request", ".", "set_header", "(", "'X-Nuage-OrderBy'", ",", "order_by", ")", "if", "page", "is", "not", "None", ":", "request", ".", "set_header", "(", "'X-Nuage-Page'", ",", "str", "(", "page", ")", ")", "if", "page_size", ":", "request", ".", "set_header", "(", "'X-Nuage-PageSize'", ",", "str", "(", "page_size", ")", ")", "if", "len", "(", "group_by", ")", ">", "0", ":", "header", "=", "\", \"", ".", "join", "(", "group_by", ")", "request", ".", "set_header", "(", "'X-Nuage-GroupBy'", ",", "'true'", ")", "request", ".", "set_header", "(", "'X-Nuage-Attributes'", ",", "header", ")" ]
Prepare headers for the given request Args: request: the NURESTRequest to send filter: string order_by: string group_by: list of names page: int page_size: int
[ "Prepare", "headers", "for", "the", "given", "request" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L222-L249
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher.fetch
def fetch(self, filter=None, order_by=None, group_by=[], page=None, page_size=None, query_parameters=None, commit=True, async=False, callback=None): """ Fetch objects according to given filter and page. Note: This method fetches all managed class objects and store them in local_name of the served object. which means that the parent object will hold them in a list. You can prevent this behavior by setting commit to False. In that case, the fetched children won't be added in the parent object cache. Args: filter (string): string that represents a predicate filter order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page commit (bool): boolean to update current object callback (function): Callback that should be called in case of a async request Returns: tuple: Returns a tuple of information (fetcher, served object, fetched objects, connection) Example: >>> entity.children.fetch() (<NUChildrenFetcher at aaaa>, <NUEntity at bbbb>, [<NUChildren at ccc>, <NUChildren at ddd>], <NURESTConnection at zzz>) """ request = NURESTRequest(method=HTTP_METHOD_GET, url=self._prepare_url(), params=query_parameters) self._prepare_headers(request=request, filter=filter, order_by=order_by, group_by=group_by, page=page, page_size=page_size) if async: return self.parent_object.send_request(request=request, async=async, local_callback=self._did_fetch, remote_callback=callback, user_info={'commit': commit}) connection = self.parent_object.send_request(request=request, user_info={'commit': commit}) return self._did_fetch(connection=connection)
python
def fetch(self, filter=None, order_by=None, group_by=[], page=None, page_size=None, query_parameters=None, commit=True, async=False, callback=None): """ Fetch objects according to given filter and page. Note: This method fetches all managed class objects and store them in local_name of the served object. which means that the parent object will hold them in a list. You can prevent this behavior by setting commit to False. In that case, the fetched children won't be added in the parent object cache. Args: filter (string): string that represents a predicate filter order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page commit (bool): boolean to update current object callback (function): Callback that should be called in case of a async request Returns: tuple: Returns a tuple of information (fetcher, served object, fetched objects, connection) Example: >>> entity.children.fetch() (<NUChildrenFetcher at aaaa>, <NUEntity at bbbb>, [<NUChildren at ccc>, <NUChildren at ddd>], <NURESTConnection at zzz>) """ request = NURESTRequest(method=HTTP_METHOD_GET, url=self._prepare_url(), params=query_parameters) self._prepare_headers(request=request, filter=filter, order_by=order_by, group_by=group_by, page=page, page_size=page_size) if async: return self.parent_object.send_request(request=request, async=async, local_callback=self._did_fetch, remote_callback=callback, user_info={'commit': commit}) connection = self.parent_object.send_request(request=request, user_info={'commit': commit}) return self._did_fetch(connection=connection)
[ "def", "fetch", "(", "self", ",", "filter", "=", "None", ",", "order_by", "=", "None", ",", "group_by", "=", "[", "]", ",", "page", "=", "None", ",", "page_size", "=", "None", ",", "query_parameters", "=", "None", ",", "commit", "=", "True", ",", "async", "=", "False", ",", "callback", "=", "None", ")", ":", "request", "=", "NURESTRequest", "(", "method", "=", "HTTP_METHOD_GET", ",", "url", "=", "self", ".", "_prepare_url", "(", ")", ",", "params", "=", "query_parameters", ")", "self", ".", "_prepare_headers", "(", "request", "=", "request", ",", "filter", "=", "filter", ",", "order_by", "=", "order_by", ",", "group_by", "=", "group_by", ",", "page", "=", "page", ",", "page_size", "=", "page_size", ")", "if", "async", ":", "return", "self", ".", "parent_object", ".", "send_request", "(", "request", "=", "request", ",", "async", "=", "async", ",", "local_callback", "=", "self", ".", "_did_fetch", ",", "remote_callback", "=", "callback", ",", "user_info", "=", "{", "'commit'", ":", "commit", "}", ")", "connection", "=", "self", ".", "parent_object", ".", "send_request", "(", "request", "=", "request", ",", "user_info", "=", "{", "'commit'", ":", "commit", "}", ")", "return", "self", ".", "_did_fetch", "(", "connection", "=", "connection", ")" ]
Fetch objects according to given filter and page. Note: This method fetches all managed class objects and store them in local_name of the served object. which means that the parent object will hold them in a list. You can prevent this behavior by setting commit to False. In that case, the fetched children won't be added in the parent object cache. Args: filter (string): string that represents a predicate filter order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page commit (bool): boolean to update current object callback (function): Callback that should be called in case of a async request Returns: tuple: Returns a tuple of information (fetcher, served object, fetched objects, connection) Example: >>> entity.children.fetch() (<NUChildrenFetcher at aaaa>, <NUEntity at bbbb>, [<NUChildren at ccc>, <NUChildren at ddd>], <NURESTConnection at zzz>)
[ "Fetch", "objects", "according", "to", "given", "filter", "and", "page", "." ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L256-L291
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher._did_fetch
def _did_fetch(self, connection): """ Fetching objects has been done """ self.current_connection = connection response = connection.response should_commit = 'commit' not in connection.user_info or connection.user_info['commit'] if connection.response.status_code >= 400 and BambouConfig._should_raise_bambou_http_error: raise BambouHTTPError(connection=connection) if response.status_code != 200: if should_commit: self.current_total_count = 0 self.current_page = 0 self.current_ordered_by = '' return self._send_content(content=None, connection=connection) results = response.data fetched_objects = list() current_ids = list() if should_commit: if 'X-Nuage-Count' in response.headers and response.headers['X-Nuage-Count']: self.current_total_count = int(response.headers['X-Nuage-Count']) if 'X-Nuage-Page' in response.headers and response.headers['X-Nuage-Page']: self.current_page = int(response.headers['X-Nuage-Page']) if 'X-Nuage-OrderBy' in response.headers and response.headers['X-Nuage-OrderBy']: self.current_ordered_by = response.headers['X-Nuage-OrderBy'] if results: for result in results: nurest_object = self.new() nurest_object.from_dict(result) nurest_object.parent = self.parent_object fetched_objects.append(nurest_object) if not should_commit: continue current_ids.append(nurest_object.id) if nurest_object in self: idx = self.index(nurest_object) current_object = self[idx] current_object.from_dict(nurest_object.to_dict()) else: self.append(nurest_object) if should_commit: for obj in self: if obj.id not in current_ids: self.remove(obj) return self._send_content(content=fetched_objects, connection=connection)
python
def _did_fetch(self, connection): """ Fetching objects has been done """ self.current_connection = connection response = connection.response should_commit = 'commit' not in connection.user_info or connection.user_info['commit'] if connection.response.status_code >= 400 and BambouConfig._should_raise_bambou_http_error: raise BambouHTTPError(connection=connection) if response.status_code != 200: if should_commit: self.current_total_count = 0 self.current_page = 0 self.current_ordered_by = '' return self._send_content(content=None, connection=connection) results = response.data fetched_objects = list() current_ids = list() if should_commit: if 'X-Nuage-Count' in response.headers and response.headers['X-Nuage-Count']: self.current_total_count = int(response.headers['X-Nuage-Count']) if 'X-Nuage-Page' in response.headers and response.headers['X-Nuage-Page']: self.current_page = int(response.headers['X-Nuage-Page']) if 'X-Nuage-OrderBy' in response.headers and response.headers['X-Nuage-OrderBy']: self.current_ordered_by = response.headers['X-Nuage-OrderBy'] if results: for result in results: nurest_object = self.new() nurest_object.from_dict(result) nurest_object.parent = self.parent_object fetched_objects.append(nurest_object) if not should_commit: continue current_ids.append(nurest_object.id) if nurest_object in self: idx = self.index(nurest_object) current_object = self[idx] current_object.from_dict(nurest_object.to_dict()) else: self.append(nurest_object) if should_commit: for obj in self: if obj.id not in current_ids: self.remove(obj) return self._send_content(content=fetched_objects, connection=connection)
[ "def", "_did_fetch", "(", "self", ",", "connection", ")", ":", "self", ".", "current_connection", "=", "connection", "response", "=", "connection", ".", "response", "should_commit", "=", "'commit'", "not", "in", "connection", ".", "user_info", "or", "connection", ".", "user_info", "[", "'commit'", "]", "if", "connection", ".", "response", ".", "status_code", ">=", "400", "and", "BambouConfig", ".", "_should_raise_bambou_http_error", ":", "raise", "BambouHTTPError", "(", "connection", "=", "connection", ")", "if", "response", ".", "status_code", "!=", "200", ":", "if", "should_commit", ":", "self", ".", "current_total_count", "=", "0", "self", ".", "current_page", "=", "0", "self", ".", "current_ordered_by", "=", "''", "return", "self", ".", "_send_content", "(", "content", "=", "None", ",", "connection", "=", "connection", ")", "results", "=", "response", ".", "data", "fetched_objects", "=", "list", "(", ")", "current_ids", "=", "list", "(", ")", "if", "should_commit", ":", "if", "'X-Nuage-Count'", "in", "response", ".", "headers", "and", "response", ".", "headers", "[", "'X-Nuage-Count'", "]", ":", "self", ".", "current_total_count", "=", "int", "(", "response", ".", "headers", "[", "'X-Nuage-Count'", "]", ")", "if", "'X-Nuage-Page'", "in", "response", ".", "headers", "and", "response", ".", "headers", "[", "'X-Nuage-Page'", "]", ":", "self", ".", "current_page", "=", "int", "(", "response", ".", "headers", "[", "'X-Nuage-Page'", "]", ")", "if", "'X-Nuage-OrderBy'", "in", "response", ".", "headers", "and", "response", ".", "headers", "[", "'X-Nuage-OrderBy'", "]", ":", "self", ".", "current_ordered_by", "=", "response", ".", "headers", "[", "'X-Nuage-OrderBy'", "]", "if", "results", ":", "for", "result", "in", "results", ":", "nurest_object", "=", "self", ".", "new", "(", ")", "nurest_object", ".", "from_dict", "(", "result", ")", "nurest_object", ".", "parent", "=", "self", ".", "parent_object", "fetched_objects", ".", "append", "(", "nurest_object", ")", "if", "not", "should_commit", ":", "continue", "current_ids", ".", "append", "(", "nurest_object", ".", "id", ")", "if", "nurest_object", "in", "self", ":", "idx", "=", "self", ".", "index", "(", "nurest_object", ")", "current_object", "=", "self", "[", "idx", "]", "current_object", ".", "from_dict", "(", "nurest_object", ".", "to_dict", "(", ")", ")", "else", ":", "self", ".", "append", "(", "nurest_object", ")", "if", "should_commit", ":", "for", "obj", "in", "self", ":", "if", "obj", ".", "id", "not", "in", "current_ids", ":", "self", ".", "remove", "(", "obj", ")", "return", "self", ".", "_send_content", "(", "content", "=", "fetched_objects", ",", "connection", "=", "connection", ")" ]
Fetching objects has been done
[ "Fetching", "objects", "has", "been", "done" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L293-L351
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher.get
def get(self, filter=None, order_by=None, group_by=[], page=None, page_size=None, query_parameters=None, commit=True, async=False, callback=None): """ Fetch object and directly return them Note: `get` won't put the fetched objects in the parent's children list. You cannot override this behavior. If you want to commit them in the parent you can use :method:vsdk.NURESTFetcher.fetch or manually add the list with :method:vsdk.NURESTObject.add_child Args: filter (string): string that represents a predicate filter order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page commit (bool): boolean to update current object callback (function): Callback that should be called in case of a async request Returns: list: list of vsdk.NURESTObject if any Example: >>> print entity.children.get() [<NUChildren at xxx>, <NUChildren at yyyy>, <NUChildren at zzz>] """ return self.fetch(filter=filter, order_by=order_by, group_by=group_by, page=page, page_size=page_size, query_parameters=query_parameters, commit=commit)[2]
python
def get(self, filter=None, order_by=None, group_by=[], page=None, page_size=None, query_parameters=None, commit=True, async=False, callback=None): """ Fetch object and directly return them Note: `get` won't put the fetched objects in the parent's children list. You cannot override this behavior. If you want to commit them in the parent you can use :method:vsdk.NURESTFetcher.fetch or manually add the list with :method:vsdk.NURESTObject.add_child Args: filter (string): string that represents a predicate filter order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page commit (bool): boolean to update current object callback (function): Callback that should be called in case of a async request Returns: list: list of vsdk.NURESTObject if any Example: >>> print entity.children.get() [<NUChildren at xxx>, <NUChildren at yyyy>, <NUChildren at zzz>] """ return self.fetch(filter=filter, order_by=order_by, group_by=group_by, page=page, page_size=page_size, query_parameters=query_parameters, commit=commit)[2]
[ "def", "get", "(", "self", ",", "filter", "=", "None", ",", "order_by", "=", "None", ",", "group_by", "=", "[", "]", ",", "page", "=", "None", ",", "page_size", "=", "None", ",", "query_parameters", "=", "None", ",", "commit", "=", "True", ",", "async", "=", "False", ",", "callback", "=", "None", ")", ":", "return", "self", ".", "fetch", "(", "filter", "=", "filter", ",", "order_by", "=", "order_by", ",", "group_by", "=", "group_by", ",", "page", "=", "page", ",", "page_size", "=", "page_size", ",", "query_parameters", "=", "query_parameters", ",", "commit", "=", "commit", ")", "[", "2", "]" ]
Fetch object and directly return them Note: `get` won't put the fetched objects in the parent's children list. You cannot override this behavior. If you want to commit them in the parent you can use :method:vsdk.NURESTFetcher.fetch or manually add the list with :method:vsdk.NURESTObject.add_child Args: filter (string): string that represents a predicate filter order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page commit (bool): boolean to update current object callback (function): Callback that should be called in case of a async request Returns: list: list of vsdk.NURESTObject if any Example: >>> print entity.children.get() [<NUChildren at xxx>, <NUChildren at yyyy>, <NUChildren at zzz>]
[ "Fetch", "object", "and", "directly", "return", "them" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L353-L378
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher.get_first
def get_first(self, filter=None, order_by=None, group_by=[], query_parameters=None, commit=False, async=False, callback=None): """ Fetch object and directly return the first one Note: `get_first` won't put the fetched object in the parent's children list. You cannot override this behavior. If you want to commit it in the parent you can use :method:vsdk.NURESTFetcher.fetch or manually add it with :method:vsdk.NURESTObject.add_child Args: filter (string): string that represents a predicate filter order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page commit (bool): boolean to update current object callback (function): Callback that should be called in case of a async request Returns: vsdk.NURESTObject: the first object if any, or None Example: >>> print entity.children.get_first(filter="name == 'My Entity'") <NUChildren at xxx> """ objects = self.get(filter=filter, order_by=order_by, group_by=group_by, page=0, page_size=1, query_parameters=query_parameters, commit=commit) return objects[0] if len(objects) else None
python
def get_first(self, filter=None, order_by=None, group_by=[], query_parameters=None, commit=False, async=False, callback=None): """ Fetch object and directly return the first one Note: `get_first` won't put the fetched object in the parent's children list. You cannot override this behavior. If you want to commit it in the parent you can use :method:vsdk.NURESTFetcher.fetch or manually add it with :method:vsdk.NURESTObject.add_child Args: filter (string): string that represents a predicate filter order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page commit (bool): boolean to update current object callback (function): Callback that should be called in case of a async request Returns: vsdk.NURESTObject: the first object if any, or None Example: >>> print entity.children.get_first(filter="name == 'My Entity'") <NUChildren at xxx> """ objects = self.get(filter=filter, order_by=order_by, group_by=group_by, page=0, page_size=1, query_parameters=query_parameters, commit=commit) return objects[0] if len(objects) else None
[ "def", "get_first", "(", "self", ",", "filter", "=", "None", ",", "order_by", "=", "None", ",", "group_by", "=", "[", "]", ",", "query_parameters", "=", "None", ",", "commit", "=", "False", ",", "async", "=", "False", ",", "callback", "=", "None", ")", ":", "objects", "=", "self", ".", "get", "(", "filter", "=", "filter", ",", "order_by", "=", "order_by", ",", "group_by", "=", "group_by", ",", "page", "=", "0", ",", "page_size", "=", "1", ",", "query_parameters", "=", "query_parameters", ",", "commit", "=", "commit", ")", "return", "objects", "[", "0", "]", "if", "len", "(", "objects", ")", "else", "None" ]
Fetch object and directly return the first one Note: `get_first` won't put the fetched object in the parent's children list. You cannot override this behavior. If you want to commit it in the parent you can use :method:vsdk.NURESTFetcher.fetch or manually add it with :method:vsdk.NURESTObject.add_child Args: filter (string): string that represents a predicate filter order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page commit (bool): boolean to update current object callback (function): Callback that should be called in case of a async request Returns: vsdk.NURESTObject: the first object if any, or None Example: >>> print entity.children.get_first(filter="name == 'My Entity'") <NUChildren at xxx>
[ "Fetch", "object", "and", "directly", "return", "the", "first", "one" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L380-L406
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher.count
def count(self, filter=None, order_by=None, group_by=[], page=None, page_size=None, query_parameters=None, async=False, callback=None): """ Get the total count of objects that can be fetched according to filter This method can be asynchronous and trigger the callback method when result is ready. Args: filter (string): string that represents a predicate fitler (eg. name == 'x') order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page callback (function): Method that will be triggered asynchronously Returns: Returns a transaction ID when asynchronous call is made. Otherwise it will return a tuple of information containing (fetcher, served object, count of fetched objects) """ request = NURESTRequest(method=HTTP_METHOD_HEAD, url=self._prepare_url(), params=query_parameters) self._prepare_headers(request=request, filter=filter, order_by=order_by, group_by=group_by, page=page, page_size=page_size) if async: return self.parent_object.send_request(request=request, async=async, local_callback=self._did_count, remote_callback=callback) else: connection = self.parent_object.send_request(request=request) return self._did_count(connection)
python
def count(self, filter=None, order_by=None, group_by=[], page=None, page_size=None, query_parameters=None, async=False, callback=None): """ Get the total count of objects that can be fetched according to filter This method can be asynchronous and trigger the callback method when result is ready. Args: filter (string): string that represents a predicate fitler (eg. name == 'x') order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page callback (function): Method that will be triggered asynchronously Returns: Returns a transaction ID when asynchronous call is made. Otherwise it will return a tuple of information containing (fetcher, served object, count of fetched objects) """ request = NURESTRequest(method=HTTP_METHOD_HEAD, url=self._prepare_url(), params=query_parameters) self._prepare_headers(request=request, filter=filter, order_by=order_by, group_by=group_by, page=page, page_size=page_size) if async: return self.parent_object.send_request(request=request, async=async, local_callback=self._did_count, remote_callback=callback) else: connection = self.parent_object.send_request(request=request) return self._did_count(connection)
[ "def", "count", "(", "self", ",", "filter", "=", "None", ",", "order_by", "=", "None", ",", "group_by", "=", "[", "]", ",", "page", "=", "None", ",", "page_size", "=", "None", ",", "query_parameters", "=", "None", ",", "async", "=", "False", ",", "callback", "=", "None", ")", ":", "request", "=", "NURESTRequest", "(", "method", "=", "HTTP_METHOD_HEAD", ",", "url", "=", "self", ".", "_prepare_url", "(", ")", ",", "params", "=", "query_parameters", ")", "self", ".", "_prepare_headers", "(", "request", "=", "request", ",", "filter", "=", "filter", ",", "order_by", "=", "order_by", ",", "group_by", "=", "group_by", ",", "page", "=", "page", ",", "page_size", "=", "page_size", ")", "if", "async", ":", "return", "self", ".", "parent_object", ".", "send_request", "(", "request", "=", "request", ",", "async", "=", "async", ",", "local_callback", "=", "self", ".", "_did_count", ",", "remote_callback", "=", "callback", ")", "else", ":", "connection", "=", "self", ".", "parent_object", ".", "send_request", "(", "request", "=", "request", ")", "return", "self", ".", "_did_count", "(", "connection", ")" ]
Get the total count of objects that can be fetched according to filter This method can be asynchronous and trigger the callback method when result is ready. Args: filter (string): string that represents a predicate fitler (eg. name == 'x') order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page callback (function): Method that will be triggered asynchronously Returns: Returns a transaction ID when asynchronous call is made. Otherwise it will return a tuple of information containing (fetcher, served object, count of fetched objects)
[ "Get", "the", "total", "count", "of", "objects", "that", "can", "be", "fetched", "according", "to", "filter" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L408-L436
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher.get_count
def get_count(self, filter=None, order_by=None, group_by=[], page=None, page_size=None, query_parameters=None): """ Get the total count of objects that can be fetched according to filter Args: filter (string): string that represents a predicate fitler (eg. name == 'x') order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page Returns: Returns the number of objects found """ return self.count(filter=filter, order_by=order_by, group_by=group_by, page=page, page_size=page_size, query_parameters=query_parameters, async=False)[2]
python
def get_count(self, filter=None, order_by=None, group_by=[], page=None, page_size=None, query_parameters=None): """ Get the total count of objects that can be fetched according to filter Args: filter (string): string that represents a predicate fitler (eg. name == 'x') order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page Returns: Returns the number of objects found """ return self.count(filter=filter, order_by=order_by, group_by=group_by, page=page, page_size=page_size, query_parameters=query_parameters, async=False)[2]
[ "def", "get_count", "(", "self", ",", "filter", "=", "None", ",", "order_by", "=", "None", ",", "group_by", "=", "[", "]", ",", "page", "=", "None", ",", "page_size", "=", "None", ",", "query_parameters", "=", "None", ")", ":", "return", "self", ".", "count", "(", "filter", "=", "filter", ",", "order_by", "=", "order_by", ",", "group_by", "=", "group_by", ",", "page", "=", "page", ",", "page_size", "=", "page_size", ",", "query_parameters", "=", "query_parameters", ",", "async", "=", "False", ")", "[", "2", "]" ]
Get the total count of objects that can be fetched according to filter Args: filter (string): string that represents a predicate fitler (eg. name == 'x') order_by (string): string that represents an order by clause group_by (string): list of names for grouping page (int): number of the page to load page_size (int): number of results per page Returns: Returns the number of objects found
[ "Get", "the", "total", "count", "of", "objects", "that", "can", "be", "fetched", "according", "to", "filter" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L438-L452
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher._did_count
def _did_count(self, connection): """ Called when count if finished """ self.current_connection = connection response = connection.response count = 0 callback = None if 'X-Nuage-Count' in response.headers: count = int(response.headers['X-Nuage-Count']) if 'remote' in connection.callbacks: callback = connection.callbacks['remote'] if connection.async: if callback: callback(self, self.parent_object, count) self.current_connection.reset() self.current_connection = None else: if connection.response.status_code >= 400 and BambouConfig._should_raise_bambou_http_error: raise BambouHTTPError(connection=connection) return (self, self.parent_object, count)
python
def _did_count(self, connection): """ Called when count if finished """ self.current_connection = connection response = connection.response count = 0 callback = None if 'X-Nuage-Count' in response.headers: count = int(response.headers['X-Nuage-Count']) if 'remote' in connection.callbacks: callback = connection.callbacks['remote'] if connection.async: if callback: callback(self, self.parent_object, count) self.current_connection.reset() self.current_connection = None else: if connection.response.status_code >= 400 and BambouConfig._should_raise_bambou_http_error: raise BambouHTTPError(connection=connection) return (self, self.parent_object, count)
[ "def", "_did_count", "(", "self", ",", "connection", ")", ":", "self", ".", "current_connection", "=", "connection", "response", "=", "connection", ".", "response", "count", "=", "0", "callback", "=", "None", "if", "'X-Nuage-Count'", "in", "response", ".", "headers", ":", "count", "=", "int", "(", "response", ".", "headers", "[", "'X-Nuage-Count'", "]", ")", "if", "'remote'", "in", "connection", ".", "callbacks", ":", "callback", "=", "connection", ".", "callbacks", "[", "'remote'", "]", "if", "connection", ".", "async", ":", "if", "callback", ":", "callback", "(", "self", ",", "self", ".", "parent_object", ",", "count", ")", "self", ".", "current_connection", ".", "reset", "(", ")", "self", ".", "current_connection", "=", "None", "else", ":", "if", "connection", ".", "response", ".", "status_code", ">=", "400", "and", "BambouConfig", ".", "_should_raise_bambou_http_error", ":", "raise", "BambouHTTPError", "(", "connection", "=", "connection", ")", "return", "(", "self", ",", "self", ".", "parent_object", ",", "count", ")" ]
Called when count if finished
[ "Called", "when", "count", "if", "finished" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L454-L479
nuagenetworks/bambou
bambou/nurest_fetcher.py
NURESTFetcher._send_content
def _send_content(self, content, connection): """ Send a content array from the connection """ if connection: if connection.async: callback = connection.callbacks['remote'] if callback: callback(self, self.parent_object, content) self.current_connection.reset() self.current_connection = None else: return (self, self.parent_object, content)
python
def _send_content(self, content, connection): """ Send a content array from the connection """ if connection: if connection.async: callback = connection.callbacks['remote'] if callback: callback(self, self.parent_object, content) self.current_connection.reset() self.current_connection = None else: return (self, self.parent_object, content)
[ "def", "_send_content", "(", "self", ",", "content", ",", "connection", ")", ":", "if", "connection", ":", "if", "connection", ".", "async", ":", "callback", "=", "connection", ".", "callbacks", "[", "'remote'", "]", "if", "callback", ":", "callback", "(", "self", ",", "self", ".", "parent_object", ",", "content", ")", "self", ".", "current_connection", ".", "reset", "(", ")", "self", ".", "current_connection", "=", "None", "else", ":", "return", "(", "self", ",", "self", ".", "parent_object", ",", "content", ")" ]
Send a content array from the connection
[ "Send", "a", "content", "array", "from", "the", "connection" ]
train
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L481-L495