partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
cli_auth
Authenticates and then outputs the resulting information. See :py:mod:`swiftly.cli.auth` for context usage information. See :py:class:`CLIAuth` for more information.
swiftly/cli/auth.py
def cli_auth(context): """ Authenticates and then outputs the resulting information. See :py:mod:`swiftly.cli.auth` for context usage information. See :py:class:`CLIAuth` for more information. """ with context.io_manager.with_stdout() as fp: with context.client_manager.with_client() as client: info = [] client.auth() if getattr(client, 'auth_cache_path', None): info.append(('Auth Cache', client.auth_cache_path)) if getattr(client, 'auth_url', None): info.append(('Auth URL', client.auth_url)) if getattr(client, 'auth_user', None): info.append(('Auth User', client.auth_user)) if getattr(client, 'auth_key', None): info.append(('Auth Key', client.auth_key)) if getattr(client, 'auth_tenant', None): info.append(('Auth Tenant', client.auth_tenant)) if getattr(client, 'auth_methods', None): info.append(('Auth Methods', client.auth_methods)) if getattr(client, 'storage_path', None): info.append(('Direct Storage Path', client.storage_path)) if getattr(client, 'cdn_path', None): info.append(('Direct CDN Path', client.cdn_path)) if getattr(client, 'local_path', None): info.append(('Local Path', client.local_path)) if getattr(client, 'regions', None): info.append(('Regions', ' '.join(client.regions))) if getattr(client, 'default_region', None): info.append(('Default Region', client.default_region)) if getattr(client, 'region', None): info.append(('Selected Region', client.region)) if getattr(client, 'snet', None): info.append(('SNet', client.snet)) if getattr(client, 'storage_url', None): info.append(('Storage URL', client.storage_url)) if getattr(client, 'cdn_url', None): info.append(('CDN URL', client.cdn_url)) if getattr(client, 'auth_token', None): info.append(('Auth Token', client.auth_token)) if not info: info.append(( 'No auth information available', 'Maybe no credentials were provided?')) fmt = '%%-%ds %%s\n' % (max(len(t) for t, v in info) + 1) for t, v in info: fp.write(fmt % (t + ':', v)) fp.flush()
def cli_auth(context): """ Authenticates and then outputs the resulting information. See :py:mod:`swiftly.cli.auth` for context usage information. See :py:class:`CLIAuth` for more information. """ with context.io_manager.with_stdout() as fp: with context.client_manager.with_client() as client: info = [] client.auth() if getattr(client, 'auth_cache_path', None): info.append(('Auth Cache', client.auth_cache_path)) if getattr(client, 'auth_url', None): info.append(('Auth URL', client.auth_url)) if getattr(client, 'auth_user', None): info.append(('Auth User', client.auth_user)) if getattr(client, 'auth_key', None): info.append(('Auth Key', client.auth_key)) if getattr(client, 'auth_tenant', None): info.append(('Auth Tenant', client.auth_tenant)) if getattr(client, 'auth_methods', None): info.append(('Auth Methods', client.auth_methods)) if getattr(client, 'storage_path', None): info.append(('Direct Storage Path', client.storage_path)) if getattr(client, 'cdn_path', None): info.append(('Direct CDN Path', client.cdn_path)) if getattr(client, 'local_path', None): info.append(('Local Path', client.local_path)) if getattr(client, 'regions', None): info.append(('Regions', ' '.join(client.regions))) if getattr(client, 'default_region', None): info.append(('Default Region', client.default_region)) if getattr(client, 'region', None): info.append(('Selected Region', client.region)) if getattr(client, 'snet', None): info.append(('SNet', client.snet)) if getattr(client, 'storage_url', None): info.append(('Storage URL', client.storage_url)) if getattr(client, 'cdn_url', None): info.append(('CDN URL', client.cdn_url)) if getattr(client, 'auth_token', None): info.append(('Auth Token', client.auth_token)) if not info: info.append(( 'No auth information available', 'Maybe no credentials were provided?')) fmt = '%%-%ds %%s\n' % (max(len(t) for t, v in info) + 1) for t, v in info: fp.write(fmt % (t + ':', v)) fp.flush()
[ "Authenticates", "and", "then", "outputs", "the", "resulting", "information", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/auth.py#L32-L83
[ "def", "cli_auth", "(", "context", ")", ":", "with", "context", ".", "io_manager", ".", "with_stdout", "(", ")", "as", "fp", ":", "with", "context", ".", "client_manager", ".", "with_client", "(", ")", "as", "client", ":", "info", "=", "[", "]", "client", ".", "auth", "(", ")", "if", "getattr", "(", "client", ",", "'auth_cache_path'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Auth Cache'", ",", "client", ".", "auth_cache_path", ")", ")", "if", "getattr", "(", "client", ",", "'auth_url'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Auth URL'", ",", "client", ".", "auth_url", ")", ")", "if", "getattr", "(", "client", ",", "'auth_user'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Auth User'", ",", "client", ".", "auth_user", ")", ")", "if", "getattr", "(", "client", ",", "'auth_key'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Auth Key'", ",", "client", ".", "auth_key", ")", ")", "if", "getattr", "(", "client", ",", "'auth_tenant'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Auth Tenant'", ",", "client", ".", "auth_tenant", ")", ")", "if", "getattr", "(", "client", ",", "'auth_methods'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Auth Methods'", ",", "client", ".", "auth_methods", ")", ")", "if", "getattr", "(", "client", ",", "'storage_path'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Direct Storage Path'", ",", "client", ".", "storage_path", ")", ")", "if", "getattr", "(", "client", ",", "'cdn_path'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Direct CDN Path'", ",", "client", ".", "cdn_path", ")", ")", "if", "getattr", "(", "client", ",", "'local_path'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Local Path'", ",", "client", ".", "local_path", ")", ")", "if", "getattr", "(", "client", ",", "'regions'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Regions'", ",", "' '", ".", "join", "(", "client", ".", "regions", ")", ")", ")", "if", "getattr", "(", "client", ",", "'default_region'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Default Region'", ",", "client", ".", "default_region", ")", ")", "if", "getattr", "(", "client", ",", "'region'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Selected Region'", ",", "client", ".", "region", ")", ")", "if", "getattr", "(", "client", ",", "'snet'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'SNet'", ",", "client", ".", "snet", ")", ")", "if", "getattr", "(", "client", ",", "'storage_url'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Storage URL'", ",", "client", ".", "storage_url", ")", ")", "if", "getattr", "(", "client", ",", "'cdn_url'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'CDN URL'", ",", "client", ".", "cdn_url", ")", ")", "if", "getattr", "(", "client", ",", "'auth_token'", ",", "None", ")", ":", "info", ".", "append", "(", "(", "'Auth Token'", ",", "client", ".", "auth_token", ")", ")", "if", "not", "info", ":", "info", ".", "append", "(", "(", "'No auth information available'", ",", "'Maybe no credentials were provided?'", ")", ")", "fmt", "=", "'%%-%ds %%s\\n'", "%", "(", "max", "(", "len", "(", "t", ")", "for", "t", ",", "v", "in", "info", ")", "+", "1", ")", "for", "t", ",", "v", "in", "info", ":", "fp", ".", "write", "(", "fmt", "%", "(", "t", "+", "':'", ",", "v", ")", ")", "fp", ".", "flush", "(", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
generate_temp_url
Returns a TempURL good for the given request method, url, and number of seconds from now, signed by the given key.
swiftly/client/utils.py
def generate_temp_url(method, url, seconds, key): """ Returns a TempURL good for the given request method, url, and number of seconds from now, signed by the given key. """ method = method.upper() base_url, object_path = url.split('/v1/') object_path = '/v1/' + object_path expires = int(time.time() + seconds) hmac_body = '%s\n%s\n%s' % (method, expires, object_path) sig = hmac.new(key, hmac_body, hashlib.sha1).hexdigest() return '%s%s?temp_url_sig=%s&temp_url_expires=%s' % ( base_url, object_path, sig, expires)
def generate_temp_url(method, url, seconds, key): """ Returns a TempURL good for the given request method, url, and number of seconds from now, signed by the given key. """ method = method.upper() base_url, object_path = url.split('/v1/') object_path = '/v1/' + object_path expires = int(time.time() + seconds) hmac_body = '%s\n%s\n%s' % (method, expires, object_path) sig = hmac.new(key, hmac_body, hashlib.sha1).hexdigest() return '%s%s?temp_url_sig=%s&temp_url_expires=%s' % ( base_url, object_path, sig, expires)
[ "Returns", "a", "TempURL", "good", "for", "the", "given", "request", "method", "url", "and", "number", "of", "seconds", "from", "now", "signed", "by", "the", "given", "key", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/client/utils.py#L26-L38
[ "def", "generate_temp_url", "(", "method", ",", "url", ",", "seconds", ",", "key", ")", ":", "method", "=", "method", ".", "upper", "(", ")", "base_url", ",", "object_path", "=", "url", ".", "split", "(", "'/v1/'", ")", "object_path", "=", "'/v1/'", "+", "object_path", "expires", "=", "int", "(", "time", ".", "time", "(", ")", "+", "seconds", ")", "hmac_body", "=", "'%s\\n%s\\n%s'", "%", "(", "method", ",", "expires", ",", "object_path", ")", "sig", "=", "hmac", ".", "new", "(", "key", ",", "hmac_body", ",", "hashlib", ".", "sha1", ")", ".", "hexdigest", "(", ")", "return", "'%s%s?temp_url_sig=%s&temp_url_expires=%s'", "%", "(", "base_url", ",", "object_path", ",", "sig", ",", "expires", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
quote
Much like parse.quote in that it returns a URL encoded string for the given value, protecting the safe characters; but this version also ensures the value is UTF-8 encoded.
swiftly/client/utils.py
def quote(value, safe='/:'): """ Much like parse.quote in that it returns a URL encoded string for the given value, protecting the safe characters; but this version also ensures the value is UTF-8 encoded. """ if isinstance(value, six.text_type): value = value.encode('utf8') elif not isinstance(value, six.string_types): value = str(value) return parse.quote(value, safe)
def quote(value, safe='/:'): """ Much like parse.quote in that it returns a URL encoded string for the given value, protecting the safe characters; but this version also ensures the value is UTF-8 encoded. """ if isinstance(value, six.text_type): value = value.encode('utf8') elif not isinstance(value, six.string_types): value = str(value) return parse.quote(value, safe)
[ "Much", "like", "parse", ".", "quote", "in", "that", "it", "returns", "a", "URL", "encoded", "string", "for", "the", "given", "value", "protecting", "the", "safe", "characters", ";", "but", "this", "version", "also", "ensures", "the", "value", "is", "UTF", "-", "8", "encoded", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/client/utils.py#L57-L67
[ "def", "quote", "(", "value", ",", "safe", "=", "'/:'", ")", ":", "if", "isinstance", "(", "value", ",", "six", ".", "text_type", ")", ":", "value", "=", "value", ".", "encode", "(", "'utf8'", ")", "elif", "not", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "value", "=", "str", "(", "value", ")", "return", "parse", ".", "quote", "(", "value", ",", "safe", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
headers_to_dict
Converts a sequence of (name, value) tuples into a dict where if a given name occurs more than once its value in the dict will be a list of values.
swiftly/client/utils.py
def headers_to_dict(headers): """ Converts a sequence of (name, value) tuples into a dict where if a given name occurs more than once its value in the dict will be a list of values. """ hdrs = {} for h, v in headers: h = h.lower() if h in hdrs: if isinstance(hdrs[h], list): hdrs[h].append(v) else: hdrs[h] = [hdrs[h], v] else: hdrs[h] = v return hdrs
def headers_to_dict(headers): """ Converts a sequence of (name, value) tuples into a dict where if a given name occurs more than once its value in the dict will be a list of values. """ hdrs = {} for h, v in headers: h = h.lower() if h in hdrs: if isinstance(hdrs[h], list): hdrs[h].append(v) else: hdrs[h] = [hdrs[h], v] else: hdrs[h] = v return hdrs
[ "Converts", "a", "sequence", "of", "(", "name", "value", ")", "tuples", "into", "a", "dict", "where", "if", "a", "given", "name", "occurs", "more", "than", "once", "its", "value", "in", "the", "dict", "will", "be", "a", "list", "of", "values", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/client/utils.py#L70-L86
[ "def", "headers_to_dict", "(", "headers", ")", ":", "hdrs", "=", "{", "}", "for", "h", ",", "v", "in", "headers", ":", "h", "=", "h", ".", "lower", "(", ")", "if", "h", "in", "hdrs", ":", "if", "isinstance", "(", "hdrs", "[", "h", "]", ",", "list", ")", ":", "hdrs", "[", "h", "]", ".", "append", "(", "v", ")", "else", ":", "hdrs", "[", "h", "]", "=", "[", "hdrs", "[", "h", "]", ",", "v", "]", "else", ":", "hdrs", "[", "h", "]", "=", "v", "return", "hdrs" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_fordo
Issues commands for each item in an account or container listing. See :py:mod:`swiftly.cli.fordo` for context usage information. See :py:class:`CLIForDo` for more information.
swiftly/cli/fordo.py
def cli_fordo(context, path=None): """ Issues commands for each item in an account or container listing. See :py:mod:`swiftly.cli.fordo` for context usage information. See :py:class:`CLIForDo` for more information. """ path = path.lstrip('/') if path else None if path and '/' in path: raise ReturnCode( 'path must be an empty string or a container name; was %r' % path) limit = context.query.get('limit') delimiter = context.query.get('delimiter') prefix = context.query.get('prefix') marker = context.query.get('marker') end_marker = context.query.get('end_marker') conc = Concurrency(context.concurrency) while True: with context.client_manager.with_client() as client: if not path: status, reason, headers, contents = client.get_account( headers=context.headers, prefix=prefix, delimiter=delimiter, marker=marker, end_marker=end_marker, limit=limit, query=context.query, cdn=context.cdn) else: status, reason, headers, contents = client.get_container( path, headers=context.headers, prefix=prefix, delimiter=delimiter, marker=marker, end_marker=end_marker, limit=limit, query=context.query, cdn=context.cdn) if status // 100 != 2: if status == 404 and context.ignore_404: return if hasattr(contents, 'read'): contents.read() if not path: raise ReturnCode( 'listing account: %s %s' % (status, reason)) else: raise ReturnCode( 'listing container %r: %s %s' % (path, status, reason)) if not contents: break for item in contents: name = (path + '/' if path else '') + item.get( 'name', item.get('subdir')) args = list(context.remaining_args) try: index = args.index('<item>') except ValueError: raise ReturnCode( 'No "<item>" designation found in the "do" clause.') args[index] = name for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: conc.join() raise exc_value conc.spawn(name, _cli_call, context, name, args) marker = contents[-1]['name'] if limit: break conc.join() for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: conc.join() raise exc_value
def cli_fordo(context, path=None): """ Issues commands for each item in an account or container listing. See :py:mod:`swiftly.cli.fordo` for context usage information. See :py:class:`CLIForDo` for more information. """ path = path.lstrip('/') if path else None if path and '/' in path: raise ReturnCode( 'path must be an empty string or a container name; was %r' % path) limit = context.query.get('limit') delimiter = context.query.get('delimiter') prefix = context.query.get('prefix') marker = context.query.get('marker') end_marker = context.query.get('end_marker') conc = Concurrency(context.concurrency) while True: with context.client_manager.with_client() as client: if not path: status, reason, headers, contents = client.get_account( headers=context.headers, prefix=prefix, delimiter=delimiter, marker=marker, end_marker=end_marker, limit=limit, query=context.query, cdn=context.cdn) else: status, reason, headers, contents = client.get_container( path, headers=context.headers, prefix=prefix, delimiter=delimiter, marker=marker, end_marker=end_marker, limit=limit, query=context.query, cdn=context.cdn) if status // 100 != 2: if status == 404 and context.ignore_404: return if hasattr(contents, 'read'): contents.read() if not path: raise ReturnCode( 'listing account: %s %s' % (status, reason)) else: raise ReturnCode( 'listing container %r: %s %s' % (path, status, reason)) if not contents: break for item in contents: name = (path + '/' if path else '') + item.get( 'name', item.get('subdir')) args = list(context.remaining_args) try: index = args.index('<item>') except ValueError: raise ReturnCode( 'No "<item>" designation found in the "do" clause.') args[index] = name for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: conc.join() raise exc_value conc.spawn(name, _cli_call, context, name, args) marker = contents[-1]['name'] if limit: break conc.join() for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: conc.join() raise exc_value
[ "Issues", "commands", "for", "each", "item", "in", "an", "account", "or", "container", "listing", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/fordo.py#L71-L138
[ "def", "cli_fordo", "(", "context", ",", "path", "=", "None", ")", ":", "path", "=", "path", ".", "lstrip", "(", "'/'", ")", "if", "path", "else", "None", "if", "path", "and", "'/'", "in", "path", ":", "raise", "ReturnCode", "(", "'path must be an empty string or a container name; was %r'", "%", "path", ")", "limit", "=", "context", ".", "query", ".", "get", "(", "'limit'", ")", "delimiter", "=", "context", ".", "query", ".", "get", "(", "'delimiter'", ")", "prefix", "=", "context", ".", "query", ".", "get", "(", "'prefix'", ")", "marker", "=", "context", ".", "query", ".", "get", "(", "'marker'", ")", "end_marker", "=", "context", ".", "query", ".", "get", "(", "'end_marker'", ")", "conc", "=", "Concurrency", "(", "context", ".", "concurrency", ")", "while", "True", ":", "with", "context", ".", "client_manager", ".", "with_client", "(", ")", "as", "client", ":", "if", "not", "path", ":", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "get_account", "(", "headers", "=", "context", ".", "headers", ",", "prefix", "=", "prefix", ",", "delimiter", "=", "delimiter", ",", "marker", "=", "marker", ",", "end_marker", "=", "end_marker", ",", "limit", "=", "limit", ",", "query", "=", "context", ".", "query", ",", "cdn", "=", "context", ".", "cdn", ")", "else", ":", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "get_container", "(", "path", ",", "headers", "=", "context", ".", "headers", ",", "prefix", "=", "prefix", ",", "delimiter", "=", "delimiter", ",", "marker", "=", "marker", ",", "end_marker", "=", "end_marker", ",", "limit", "=", "limit", ",", "query", "=", "context", ".", "query", ",", "cdn", "=", "context", ".", "cdn", ")", "if", "status", "//", "100", "!=", "2", ":", "if", "status", "==", "404", "and", "context", ".", "ignore_404", ":", "return", "if", "hasattr", "(", "contents", ",", "'read'", ")", ":", "contents", ".", "read", "(", ")", "if", "not", "path", ":", "raise", "ReturnCode", "(", "'listing account: %s %s'", "%", "(", "status", ",", "reason", ")", ")", "else", ":", "raise", "ReturnCode", "(", "'listing container %r: %s %s'", "%", "(", "path", ",", "status", ",", "reason", ")", ")", "if", "not", "contents", ":", "break", "for", "item", "in", "contents", ":", "name", "=", "(", "path", "+", "'/'", "if", "path", "else", "''", ")", "+", "item", ".", "get", "(", "'name'", ",", "item", ".", "get", "(", "'subdir'", ")", ")", "args", "=", "list", "(", "context", ".", "remaining_args", ")", "try", ":", "index", "=", "args", ".", "index", "(", "'<item>'", ")", "except", "ValueError", ":", "raise", "ReturnCode", "(", "'No \"<item>\" designation found in the \"do\" clause.'", ")", "args", "[", "index", "]", "=", "name", "for", "(", "exc_type", ",", "exc_value", ",", "exc_tb", ",", "result", ")", "in", "six", ".", "itervalues", "(", "conc", ".", "get_results", "(", ")", ")", ":", "if", "exc_value", ":", "conc", ".", "join", "(", ")", "raise", "exc_value", "conc", ".", "spawn", "(", "name", ",", "_cli_call", ",", "context", ",", "name", ",", "args", ")", "marker", "=", "contents", "[", "-", "1", "]", "[", "'name'", "]", "if", "limit", ":", "break", "conc", ".", "join", "(", ")", "for", "(", "exc_type", ",", "exc_value", ",", "exc_tb", ",", "result", ")", "in", "six", ".", "itervalues", "(", "conc", ".", "get_results", "(", ")", ")", ":", "if", "exc_value", ":", "conc", ".", "join", "(", ")", "raise", "exc_value" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
ClientManager.get_client
Obtains a client for use, whether an existing unused client or a brand new one if none are available.
swiftly/client/manager.py
def get_client(self): """ Obtains a client for use, whether an existing unused client or a brand new one if none are available. """ client = None try: client = self.clients.get(block=False) except queue.Empty: pass if not client: self.client_id += 1 kwargs = dict(self.kwargs) kwargs['verbose_id'] = kwargs.get( 'verbose_id', '') + str(self.client_id) client = self.client_class(*self.args, **kwargs) return client
def get_client(self): """ Obtains a client for use, whether an existing unused client or a brand new one if none are available. """ client = None try: client = self.clients.get(block=False) except queue.Empty: pass if not client: self.client_id += 1 kwargs = dict(self.kwargs) kwargs['verbose_id'] = kwargs.get( 'verbose_id', '') + str(self.client_id) client = self.client_class(*self.args, **kwargs) return client
[ "Obtains", "a", "client", "for", "use", "whether", "an", "existing", "unused", "client", "or", "a", "brand", "new", "one", "if", "none", "are", "available", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/client/manager.py#L41-L57
[ "def", "get_client", "(", "self", ")", ":", "client", "=", "None", "try", ":", "client", "=", "self", ".", "clients", ".", "get", "(", "block", "=", "False", ")", "except", "queue", ".", "Empty", ":", "pass", "if", "not", "client", ":", "self", ".", "client_id", "+=", "1", "kwargs", "=", "dict", "(", "self", ".", "kwargs", ")", "kwargs", "[", "'verbose_id'", "]", "=", "kwargs", ".", "get", "(", "'verbose_id'", ",", "''", ")", "+", "str", "(", "self", ".", "client_id", ")", "client", "=", "self", ".", "client_class", "(", "*", "self", ".", "args", ",", "*", "*", "kwargs", ")", "return", "client" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_head
Performs a HEAD on the item (account, container, or object). See :py:mod:`swiftly.cli.head` for context usage information. See :py:class:`CLIHead` for more information.
swiftly/cli/head.py
def cli_head(context, path=None): """ Performs a HEAD on the item (account, container, or object). See :py:mod:`swiftly.cli.head` for context usage information. See :py:class:`CLIHead` for more information. """ path = path.lstrip('/') if path else None with context.client_manager.with_client() as client: if not path: status, reason, headers, contents = client.head_account( headers=context.headers, query=context.query, cdn=context.cdn) mute = context.muted_account_headers elif '/' not in path.rstrip('/'): path = path.rstrip('/') status, reason, headers, contents = client.head_container( path, headers=context.headers, query=context.query, cdn=context.cdn) mute = context.muted_container_headers else: status, reason, headers, contents = client.head_object( *path.split('/', 1), headers=context.headers, query=context.query, cdn=context.cdn) mute = context.muted_object_headers if hasattr(contents, 'read'): contents = contents.read() if status // 100 != 2: if status == 404 and context.ignore_404: return if not path: raise ReturnCode('heading account: %s %s' % (status, reason)) elif '/' not in path: raise ReturnCode( 'heading container %r: %s %s' % (path, status, reason)) else: raise ReturnCode( 'heading object %r: %s %s' % (path, status, reason)) else: with context.io_manager.with_stdout() as fp: context.write_headers(fp, headers, mute)
def cli_head(context, path=None): """ Performs a HEAD on the item (account, container, or object). See :py:mod:`swiftly.cli.head` for context usage information. See :py:class:`CLIHead` for more information. """ path = path.lstrip('/') if path else None with context.client_manager.with_client() as client: if not path: status, reason, headers, contents = client.head_account( headers=context.headers, query=context.query, cdn=context.cdn) mute = context.muted_account_headers elif '/' not in path.rstrip('/'): path = path.rstrip('/') status, reason, headers, contents = client.head_container( path, headers=context.headers, query=context.query, cdn=context.cdn) mute = context.muted_container_headers else: status, reason, headers, contents = client.head_object( *path.split('/', 1), headers=context.headers, query=context.query, cdn=context.cdn) mute = context.muted_object_headers if hasattr(contents, 'read'): contents = contents.read() if status // 100 != 2: if status == 404 and context.ignore_404: return if not path: raise ReturnCode('heading account: %s %s' % (status, reason)) elif '/' not in path: raise ReturnCode( 'heading container %r: %s %s' % (path, status, reason)) else: raise ReturnCode( 'heading object %r: %s %s' % (path, status, reason)) else: with context.io_manager.with_stdout() as fp: context.write_headers(fp, headers, mute)
[ "Performs", "a", "HEAD", "on", "the", "item", "(", "account", "container", "or", "object", ")", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/head.py#L42-L82
[ "def", "cli_head", "(", "context", ",", "path", "=", "None", ")", ":", "path", "=", "path", ".", "lstrip", "(", "'/'", ")", "if", "path", "else", "None", "with", "context", ".", "client_manager", ".", "with_client", "(", ")", "as", "client", ":", "if", "not", "path", ":", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "head_account", "(", "headers", "=", "context", ".", "headers", ",", "query", "=", "context", ".", "query", ",", "cdn", "=", "context", ".", "cdn", ")", "mute", "=", "context", ".", "muted_account_headers", "elif", "'/'", "not", "in", "path", ".", "rstrip", "(", "'/'", ")", ":", "path", "=", "path", ".", "rstrip", "(", "'/'", ")", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "head_container", "(", "path", ",", "headers", "=", "context", ".", "headers", ",", "query", "=", "context", ".", "query", ",", "cdn", "=", "context", ".", "cdn", ")", "mute", "=", "context", ".", "muted_container_headers", "else", ":", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "head_object", "(", "*", "path", ".", "split", "(", "'/'", ",", "1", ")", ",", "headers", "=", "context", ".", "headers", ",", "query", "=", "context", ".", "query", ",", "cdn", "=", "context", ".", "cdn", ")", "mute", "=", "context", ".", "muted_object_headers", "if", "hasattr", "(", "contents", ",", "'read'", ")", ":", "contents", "=", "contents", ".", "read", "(", ")", "if", "status", "//", "100", "!=", "2", ":", "if", "status", "==", "404", "and", "context", ".", "ignore_404", ":", "return", "if", "not", "path", ":", "raise", "ReturnCode", "(", "'heading account: %s %s'", "%", "(", "status", ",", "reason", ")", ")", "elif", "'/'", "not", "in", "path", ":", "raise", "ReturnCode", "(", "'heading container %r: %s %s'", "%", "(", "path", ",", "status", ",", "reason", ")", ")", "else", ":", "raise", "ReturnCode", "(", "'heading object %r: %s %s'", "%", "(", "path", ",", "status", ",", "reason", ")", ")", "else", ":", "with", "context", ".", "io_manager", ".", "with_stdout", "(", ")", "as", "fp", ":", "context", ".", "write_headers", "(", "fp", ",", "headers", ",", "mute", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
aes_encrypt
Generator that encrypts a content stream using AES 256 in CBC mode. :param key: Any string to use as the encryption key. :param stdin: Where to read the contents from. :param preamble: str to yield initially useful for providing a hint for future readers as to the algorithm in use. :param chunk_size: Largest amount to read at once. :param content_length: The number of bytes to read from stdin. None or < 0 indicates reading until EOF.
swiftly/dencrypt.py
def aes_encrypt(key, stdin, preamble=None, chunk_size=65536, content_length=None): """ Generator that encrypts a content stream using AES 256 in CBC mode. :param key: Any string to use as the encryption key. :param stdin: Where to read the contents from. :param preamble: str to yield initially useful for providing a hint for future readers as to the algorithm in use. :param chunk_size: Largest amount to read at once. :param content_length: The number of bytes to read from stdin. None or < 0 indicates reading until EOF. """ if not AES256CBC_Support: raise Exception( 'AES256CBC not supported; likely pycrypto is not installed') if preamble: yield preamble # Always use 256-bit key key = hashlib.sha256(key).digest() # At least 16 and a multiple of 16 chunk_size = max(16, chunk_size >> 4 << 4) iv = Crypto.Random.new().read(16) yield iv encryptor = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_CBC, iv) reading = True left = None if content_length is not None and content_length >= 0: left = content_length while reading: size = chunk_size if left is not None and size > left: size = left chunk = stdin.read(size) if not chunk: if left is not None and left > 0: raise IOError('Early EOF from input') # Indicates how many usable bytes in last block yield encryptor.encrypt('\x00' * 16) break if left is not None: left -= len(chunk) if left <= 0: reading = False block = chunk trailing = len(block) % 16 while trailing: size = 16 - trailing if left is not None and size > left: size = left chunk = stdin.read(size) if not chunk: if left is not None and left > 0: raise IOError('Early EOF from input') reading = False # Indicates how many usable bytes in last block chunk = chr(trailing) * (16 - trailing) elif left is not None: left -= len(chunk) if left <= 0: reading = False block += chunk trailing = len(block) % 16 yield encryptor.encrypt(block)
def aes_encrypt(key, stdin, preamble=None, chunk_size=65536, content_length=None): """ Generator that encrypts a content stream using AES 256 in CBC mode. :param key: Any string to use as the encryption key. :param stdin: Where to read the contents from. :param preamble: str to yield initially useful for providing a hint for future readers as to the algorithm in use. :param chunk_size: Largest amount to read at once. :param content_length: The number of bytes to read from stdin. None or < 0 indicates reading until EOF. """ if not AES256CBC_Support: raise Exception( 'AES256CBC not supported; likely pycrypto is not installed') if preamble: yield preamble # Always use 256-bit key key = hashlib.sha256(key).digest() # At least 16 and a multiple of 16 chunk_size = max(16, chunk_size >> 4 << 4) iv = Crypto.Random.new().read(16) yield iv encryptor = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_CBC, iv) reading = True left = None if content_length is not None and content_length >= 0: left = content_length while reading: size = chunk_size if left is not None and size > left: size = left chunk = stdin.read(size) if not chunk: if left is not None and left > 0: raise IOError('Early EOF from input') # Indicates how many usable bytes in last block yield encryptor.encrypt('\x00' * 16) break if left is not None: left -= len(chunk) if left <= 0: reading = False block = chunk trailing = len(block) % 16 while trailing: size = 16 - trailing if left is not None and size > left: size = left chunk = stdin.read(size) if not chunk: if left is not None and left > 0: raise IOError('Early EOF from input') reading = False # Indicates how many usable bytes in last block chunk = chr(trailing) * (16 - trailing) elif left is not None: left -= len(chunk) if left <= 0: reading = False block += chunk trailing = len(block) % 16 yield encryptor.encrypt(block)
[ "Generator", "that", "encrypts", "a", "content", "stream", "using", "AES", "256", "in", "CBC", "mode", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/dencrypt.py#L35-L99
[ "def", "aes_encrypt", "(", "key", ",", "stdin", ",", "preamble", "=", "None", ",", "chunk_size", "=", "65536", ",", "content_length", "=", "None", ")", ":", "if", "not", "AES256CBC_Support", ":", "raise", "Exception", "(", "'AES256CBC not supported; likely pycrypto is not installed'", ")", "if", "preamble", ":", "yield", "preamble", "# Always use 256-bit key", "key", "=", "hashlib", ".", "sha256", "(", "key", ")", ".", "digest", "(", ")", "# At least 16 and a multiple of 16", "chunk_size", "=", "max", "(", "16", ",", "chunk_size", ">>", "4", "<<", "4", ")", "iv", "=", "Crypto", ".", "Random", ".", "new", "(", ")", ".", "read", "(", "16", ")", "yield", "iv", "encryptor", "=", "Crypto", ".", "Cipher", ".", "AES", ".", "new", "(", "key", ",", "Crypto", ".", "Cipher", ".", "AES", ".", "MODE_CBC", ",", "iv", ")", "reading", "=", "True", "left", "=", "None", "if", "content_length", "is", "not", "None", "and", "content_length", ">=", "0", ":", "left", "=", "content_length", "while", "reading", ":", "size", "=", "chunk_size", "if", "left", "is", "not", "None", "and", "size", ">", "left", ":", "size", "=", "left", "chunk", "=", "stdin", ".", "read", "(", "size", ")", "if", "not", "chunk", ":", "if", "left", "is", "not", "None", "and", "left", ">", "0", ":", "raise", "IOError", "(", "'Early EOF from input'", ")", "# Indicates how many usable bytes in last block", "yield", "encryptor", ".", "encrypt", "(", "'\\x00'", "*", "16", ")", "break", "if", "left", "is", "not", "None", ":", "left", "-=", "len", "(", "chunk", ")", "if", "left", "<=", "0", ":", "reading", "=", "False", "block", "=", "chunk", "trailing", "=", "len", "(", "block", ")", "%", "16", "while", "trailing", ":", "size", "=", "16", "-", "trailing", "if", "left", "is", "not", "None", "and", "size", ">", "left", ":", "size", "=", "left", "chunk", "=", "stdin", ".", "read", "(", "size", ")", "if", "not", "chunk", ":", "if", "left", "is", "not", "None", "and", "left", ">", "0", ":", "raise", "IOError", "(", "'Early EOF from input'", ")", "reading", "=", "False", "# Indicates how many usable bytes in last block", "chunk", "=", "chr", "(", "trailing", ")", "*", "(", "16", "-", "trailing", ")", "elif", "left", "is", "not", "None", ":", "left", "-=", "len", "(", "chunk", ")", "if", "left", "<=", "0", ":", "reading", "=", "False", "block", "+=", "chunk", "trailing", "=", "len", "(", "block", ")", "%", "16", "yield", "encryptor", ".", "encrypt", "(", "block", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
aes_decrypt
Generator that decrypts a content stream using AES 256 in CBC mode. :param key: Any string to use as the decryption key. :param stdin: Where to read the encrypted data from. :param chunk_size: Largest amount to read at once.
swiftly/dencrypt.py
def aes_decrypt(key, stdin, chunk_size=65536): """ Generator that decrypts a content stream using AES 256 in CBC mode. :param key: Any string to use as the decryption key. :param stdin: Where to read the encrypted data from. :param chunk_size: Largest amount to read at once. """ if not AES256CBC_Support: raise Exception( 'AES256CBC not supported; likely pycrypto is not installed') # Always use 256-bit key key = hashlib.sha256(key).digest() # At least 16 and a multiple of 16 chunk_size = max(16, chunk_size >> 4 << 4) iv = stdin.read(16) while len(iv) < 16: chunk = stdin.read(16 - len(iv)) if not chunk: raise IOError('EOF reading IV') decryptor = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_CBC, iv) data = '' while True: chunk = stdin.read(chunk_size) if not chunk: if len(data) != 16: raise IOError('EOF reading encrypted stream') data = decryptor.decrypt(data) trailing = ord(data[-1]) if trailing > 15: raise IOError( 'EOF reading encrypted stream or trailing value corrupted ' '%s' % trailing) yield data[:trailing] break data += chunk if len(data) > 16: # Always leave at least one byte pending trailing = (len(data) % 16) or 16 yield decryptor.decrypt(data[:-trailing]) data = data[-trailing:]
def aes_decrypt(key, stdin, chunk_size=65536): """ Generator that decrypts a content stream using AES 256 in CBC mode. :param key: Any string to use as the decryption key. :param stdin: Where to read the encrypted data from. :param chunk_size: Largest amount to read at once. """ if not AES256CBC_Support: raise Exception( 'AES256CBC not supported; likely pycrypto is not installed') # Always use 256-bit key key = hashlib.sha256(key).digest() # At least 16 and a multiple of 16 chunk_size = max(16, chunk_size >> 4 << 4) iv = stdin.read(16) while len(iv) < 16: chunk = stdin.read(16 - len(iv)) if not chunk: raise IOError('EOF reading IV') decryptor = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_CBC, iv) data = '' while True: chunk = stdin.read(chunk_size) if not chunk: if len(data) != 16: raise IOError('EOF reading encrypted stream') data = decryptor.decrypt(data) trailing = ord(data[-1]) if trailing > 15: raise IOError( 'EOF reading encrypted stream or trailing value corrupted ' '%s' % trailing) yield data[:trailing] break data += chunk if len(data) > 16: # Always leave at least one byte pending trailing = (len(data) % 16) or 16 yield decryptor.decrypt(data[:-trailing]) data = data[-trailing:]
[ "Generator", "that", "decrypts", "a", "content", "stream", "using", "AES", "256", "in", "CBC", "mode", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/dencrypt.py#L102-L143
[ "def", "aes_decrypt", "(", "key", ",", "stdin", ",", "chunk_size", "=", "65536", ")", ":", "if", "not", "AES256CBC_Support", ":", "raise", "Exception", "(", "'AES256CBC not supported; likely pycrypto is not installed'", ")", "# Always use 256-bit key", "key", "=", "hashlib", ".", "sha256", "(", "key", ")", ".", "digest", "(", ")", "# At least 16 and a multiple of 16", "chunk_size", "=", "max", "(", "16", ",", "chunk_size", ">>", "4", "<<", "4", ")", "iv", "=", "stdin", ".", "read", "(", "16", ")", "while", "len", "(", "iv", ")", "<", "16", ":", "chunk", "=", "stdin", ".", "read", "(", "16", "-", "len", "(", "iv", ")", ")", "if", "not", "chunk", ":", "raise", "IOError", "(", "'EOF reading IV'", ")", "decryptor", "=", "Crypto", ".", "Cipher", ".", "AES", ".", "new", "(", "key", ",", "Crypto", ".", "Cipher", ".", "AES", ".", "MODE_CBC", ",", "iv", ")", "data", "=", "''", "while", "True", ":", "chunk", "=", "stdin", ".", "read", "(", "chunk_size", ")", "if", "not", "chunk", ":", "if", "len", "(", "data", ")", "!=", "16", ":", "raise", "IOError", "(", "'EOF reading encrypted stream'", ")", "data", "=", "decryptor", ".", "decrypt", "(", "data", ")", "trailing", "=", "ord", "(", "data", "[", "-", "1", "]", ")", "if", "trailing", ">", "15", ":", "raise", "IOError", "(", "'EOF reading encrypted stream or trailing value corrupted '", "'%s'", "%", "trailing", ")", "yield", "data", "[", ":", "trailing", "]", "break", "data", "+=", "chunk", "if", "len", "(", "data", ")", ">", "16", ":", "# Always leave at least one byte pending", "trailing", "=", "(", "len", "(", "data", ")", "%", "16", ")", "or", "16", "yield", "decryptor", ".", "decrypt", "(", "data", "[", ":", "-", "trailing", "]", ")", "data", "=", "data", "[", "-", "trailing", ":", "]" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_put_directory_structure
Performs PUTs rooted at the path using a directory structure pointed to by context.input\_. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information.
swiftly/cli/put.py
def cli_put_directory_structure(context, path): """ Performs PUTs rooted at the path using a directory structure pointed to by context.input\_. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ if not context.input_: raise ReturnCode( 'called cli_put_directory_structure without context.input_ set') if not os.path.isdir(context.input_): raise ReturnCode( '%r is not a directory' % context.input_) if not path: raise ReturnCode( 'uploading a directory structure requires at least a container ' 'name') new_context = context.copy() new_context.input_ = None container = path.split('/', 1)[0] cli_put_container(new_context, container) ilen = len(context.input_) if not context.input_.endswith(os.sep): ilen += 1 conc = Concurrency(context.concurrency) for (dirpath, dirnames, filenames) in os.walk(context.input_): if not dirnames and not filenames: new_context = context.copy() new_context.headers = dict(context.headers) new_context.headers['content-type'] = 'text/directory' new_context.headers['x-object-meta-mtime'] = \ '%f' % os.path.getmtime(context.input_) new_context.input_ = None new_context.empty = True new_path = path if path[-1] != '/': new_path += '/' new_path += dirpath[ilen:] for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: conc.join() raise exc_value conc.spawn(new_path, cli_put_object, new_context, new_path) else: for fname in filenames: new_context = context.copy() new_context.input_ = os.path.join(dirpath, fname) new_path = path if path[-1] != '/': new_path += '/' if dirpath[ilen:]: new_path += dirpath[ilen:] + '/' new_path += fname for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: conc.join() raise exc_value conc.spawn(new_path, cli_put_object, new_context, new_path) conc.join() for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: raise exc_value
def cli_put_directory_structure(context, path): """ Performs PUTs rooted at the path using a directory structure pointed to by context.input\_. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ if not context.input_: raise ReturnCode( 'called cli_put_directory_structure without context.input_ set') if not os.path.isdir(context.input_): raise ReturnCode( '%r is not a directory' % context.input_) if not path: raise ReturnCode( 'uploading a directory structure requires at least a container ' 'name') new_context = context.copy() new_context.input_ = None container = path.split('/', 1)[0] cli_put_container(new_context, container) ilen = len(context.input_) if not context.input_.endswith(os.sep): ilen += 1 conc = Concurrency(context.concurrency) for (dirpath, dirnames, filenames) in os.walk(context.input_): if not dirnames and not filenames: new_context = context.copy() new_context.headers = dict(context.headers) new_context.headers['content-type'] = 'text/directory' new_context.headers['x-object-meta-mtime'] = \ '%f' % os.path.getmtime(context.input_) new_context.input_ = None new_context.empty = True new_path = path if path[-1] != '/': new_path += '/' new_path += dirpath[ilen:] for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: conc.join() raise exc_value conc.spawn(new_path, cli_put_object, new_context, new_path) else: for fname in filenames: new_context = context.copy() new_context.input_ = os.path.join(dirpath, fname) new_path = path if path[-1] != '/': new_path += '/' if dirpath[ilen:]: new_path += dirpath[ilen:] + '/' new_path += fname for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: conc.join() raise exc_value conc.spawn(new_path, cli_put_object, new_context, new_path) conc.join() for (exc_type, exc_value, exc_tb, result) in \ six.itervalues(conc.get_results()): if exc_value: raise exc_value
[ "Performs", "PUTs", "rooted", "at", "the", "path", "using", "a", "directory", "structure", "pointed", "to", "by", "context", ".", "input", "\\", "_", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/put.py#L64-L130
[ "def", "cli_put_directory_structure", "(", "context", ",", "path", ")", ":", "if", "not", "context", ".", "input_", ":", "raise", "ReturnCode", "(", "'called cli_put_directory_structure without context.input_ set'", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "context", ".", "input_", ")", ":", "raise", "ReturnCode", "(", "'%r is not a directory'", "%", "context", ".", "input_", ")", "if", "not", "path", ":", "raise", "ReturnCode", "(", "'uploading a directory structure requires at least a container '", "'name'", ")", "new_context", "=", "context", ".", "copy", "(", ")", "new_context", ".", "input_", "=", "None", "container", "=", "path", ".", "split", "(", "'/'", ",", "1", ")", "[", "0", "]", "cli_put_container", "(", "new_context", ",", "container", ")", "ilen", "=", "len", "(", "context", ".", "input_", ")", "if", "not", "context", ".", "input_", ".", "endswith", "(", "os", ".", "sep", ")", ":", "ilen", "+=", "1", "conc", "=", "Concurrency", "(", "context", ".", "concurrency", ")", "for", "(", "dirpath", ",", "dirnames", ",", "filenames", ")", "in", "os", ".", "walk", "(", "context", ".", "input_", ")", ":", "if", "not", "dirnames", "and", "not", "filenames", ":", "new_context", "=", "context", ".", "copy", "(", ")", "new_context", ".", "headers", "=", "dict", "(", "context", ".", "headers", ")", "new_context", ".", "headers", "[", "'content-type'", "]", "=", "'text/directory'", "new_context", ".", "headers", "[", "'x-object-meta-mtime'", "]", "=", "'%f'", "%", "os", ".", "path", ".", "getmtime", "(", "context", ".", "input_", ")", "new_context", ".", "input_", "=", "None", "new_context", ".", "empty", "=", "True", "new_path", "=", "path", "if", "path", "[", "-", "1", "]", "!=", "'/'", ":", "new_path", "+=", "'/'", "new_path", "+=", "dirpath", "[", "ilen", ":", "]", "for", "(", "exc_type", ",", "exc_value", ",", "exc_tb", ",", "result", ")", "in", "six", ".", "itervalues", "(", "conc", ".", "get_results", "(", ")", ")", ":", "if", "exc_value", ":", "conc", ".", "join", "(", ")", "raise", "exc_value", "conc", ".", "spawn", "(", "new_path", ",", "cli_put_object", ",", "new_context", ",", "new_path", ")", "else", ":", "for", "fname", "in", "filenames", ":", "new_context", "=", "context", ".", "copy", "(", ")", "new_context", ".", "input_", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "fname", ")", "new_path", "=", "path", "if", "path", "[", "-", "1", "]", "!=", "'/'", ":", "new_path", "+=", "'/'", "if", "dirpath", "[", "ilen", ":", "]", ":", "new_path", "+=", "dirpath", "[", "ilen", ":", "]", "+", "'/'", "new_path", "+=", "fname", "for", "(", "exc_type", ",", "exc_value", ",", "exc_tb", ",", "result", ")", "in", "six", ".", "itervalues", "(", "conc", ".", "get_results", "(", ")", ")", ":", "if", "exc_value", ":", "conc", ".", "join", "(", ")", "raise", "exc_value", "conc", ".", "spawn", "(", "new_path", ",", "cli_put_object", ",", "new_context", ",", "new_path", ")", "conc", ".", "join", "(", ")", "for", "(", "exc_type", ",", "exc_value", ",", "exc_tb", ",", "result", ")", "in", "six", ".", "itervalues", "(", "conc", ".", "get_results", "(", ")", ")", ":", "if", "exc_value", ":", "raise", "exc_value" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_put_account
Performs a PUT on the account. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information.
swiftly/cli/put.py
def cli_put_account(context): """ Performs a PUT on the account. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ body = None if context.input_: if context.input_ == '-': body = context.io_manager.get_stdin() else: body = open(context.input_, 'rb') with context.client_manager.with_client() as client: status, reason, headers, contents = client.put_account( headers=context.headers, query=context.query, cdn=context.cdn, body=body) if hasattr(contents, 'read'): contents.read() if status // 100 != 2: raise ReturnCode('putting account: %s %s' % (status, reason))
def cli_put_account(context): """ Performs a PUT on the account. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ body = None if context.input_: if context.input_ == '-': body = context.io_manager.get_stdin() else: body = open(context.input_, 'rb') with context.client_manager.with_client() as client: status, reason, headers, contents = client.put_account( headers=context.headers, query=context.query, cdn=context.cdn, body=body) if hasattr(contents, 'read'): contents.read() if status // 100 != 2: raise ReturnCode('putting account: %s %s' % (status, reason))
[ "Performs", "a", "PUT", "on", "the", "account", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/put.py#L133-L154
[ "def", "cli_put_account", "(", "context", ")", ":", "body", "=", "None", "if", "context", ".", "input_", ":", "if", "context", ".", "input_", "==", "'-'", ":", "body", "=", "context", ".", "io_manager", ".", "get_stdin", "(", ")", "else", ":", "body", "=", "open", "(", "context", ".", "input_", ",", "'rb'", ")", "with", "context", ".", "client_manager", ".", "with_client", "(", ")", "as", "client", ":", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "put_account", "(", "headers", "=", "context", ".", "headers", ",", "query", "=", "context", ".", "query", ",", "cdn", "=", "context", ".", "cdn", ",", "body", "=", "body", ")", "if", "hasattr", "(", "contents", ",", "'read'", ")", ":", "contents", ".", "read", "(", ")", "if", "status", "//", "100", "!=", "2", ":", "raise", "ReturnCode", "(", "'putting account: %s %s'", "%", "(", "status", ",", "reason", ")", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_put_container
Performs a PUT on the container. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information.
swiftly/cli/put.py
def cli_put_container(context, path): """ Performs a PUT on the container. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ path = path.rstrip('/') if '/' in path: raise ReturnCode('called cli_put_container with object %r' % path) body = None if context.input_: if context.input_ == '-': body = context.io_manager.get_stdin() else: body = open(context.input_, 'rb') with context.client_manager.with_client() as client: status, reason, headers, contents = client.put_container( path, headers=context.headers, query=context.query, cdn=context.cdn, body=body) if hasattr(contents, 'read'): contents.read() if status // 100 != 2: raise ReturnCode( 'putting container %r: %s %s' % (path, status, reason))
def cli_put_container(context, path): """ Performs a PUT on the container. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ path = path.rstrip('/') if '/' in path: raise ReturnCode('called cli_put_container with object %r' % path) body = None if context.input_: if context.input_ == '-': body = context.io_manager.get_stdin() else: body = open(context.input_, 'rb') with context.client_manager.with_client() as client: status, reason, headers, contents = client.put_container( path, headers=context.headers, query=context.query, cdn=context.cdn, body=body) if hasattr(contents, 'read'): contents.read() if status // 100 != 2: raise ReturnCode( 'putting container %r: %s %s' % (path, status, reason))
[ "Performs", "a", "PUT", "on", "the", "container", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/put.py#L157-L182
[ "def", "cli_put_container", "(", "context", ",", "path", ")", ":", "path", "=", "path", ".", "rstrip", "(", "'/'", ")", "if", "'/'", "in", "path", ":", "raise", "ReturnCode", "(", "'called cli_put_container with object %r'", "%", "path", ")", "body", "=", "None", "if", "context", ".", "input_", ":", "if", "context", ".", "input_", "==", "'-'", ":", "body", "=", "context", ".", "io_manager", ".", "get_stdin", "(", ")", "else", ":", "body", "=", "open", "(", "context", ".", "input_", ",", "'rb'", ")", "with", "context", ".", "client_manager", ".", "with_client", "(", ")", "as", "client", ":", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "put_container", "(", "path", ",", "headers", "=", "context", ".", "headers", ",", "query", "=", "context", ".", "query", ",", "cdn", "=", "context", ".", "cdn", ",", "body", "=", "body", ")", "if", "hasattr", "(", "contents", ",", "'read'", ")", ":", "contents", ".", "read", "(", ")", "if", "status", "//", "100", "!=", "2", ":", "raise", "ReturnCode", "(", "'putting container %r: %s %s'", "%", "(", "path", ",", "status", ",", "reason", ")", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_put_object
Performs a PUT on the object. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information.
swiftly/cli/put.py
def cli_put_object(context, path): """ Performs a PUT on the object. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ if context.different and context.encrypt: raise ReturnCode( 'context.different will not work properly with context.encrypt ' 'since encryption may change the object size') put_headers = dict(context.headers) if context.empty: body = '' put_headers['content-length'] = '0' elif not context.input_ or context.input_ == '-': stdin = context.io_manager.get_stdin() if context.stdin_segmentation: def reader(): while True: chunk = stdin.read(65536) if chunk: yield chunk else: return segment_body = FileLikeIter(reader(), context.segment_size) prefix = _create_container(context, path, time.time(), 0) new_context = context.copy() new_context.stdin_segmentation = False new_context.stdin = segment_body new_context.headers = dict(context.headers) segment_n = 0 path2info = {} while not segment_body.is_empty(): segment_path = _get_segment_path(prefix, segment_n) etag = cli_put_object(new_context, segment_path) size = segment_body.limit - segment_body.left path2info[segment_path] = (size, etag) segment_body.reset_limit() segment_n += 1 body = _get_manifest_body(context, prefix, path2info, put_headers) else: if hasattr(context, 'stdin'): body = context.stdin else: body = stdin elif context.seek is not None: if context.encrypt: raise ReturnCode( 'putting object %r: Cannot use encryption and context.seek' % path) body = open(context.input_, 'rb') body.seek(context.seek) else: l_mtime = os.path.getmtime(context.input_) l_size = os.path.getsize(context.input_) put_headers['content-length'] = str(l_size) if context.newer or context.different: r_mtime = None r_size = None with context.client_manager.with_client() as client: status, reason, headers, contents = client.head_object( *path.split('/', 1), headers=context.headers, query=context.query, cdn=context.cdn) if hasattr(contents, 'read'): contents.read() if status // 100 == 2: r_mtime = headers.get('x-object-meta-mtime') if r_mtime: try: r_mtime = float(r_mtime) except ValueError: r_mtime = None r_size = headers.get('content-length') if r_size: try: r_size = int(r_size) except ValueError: r_size = None elif status != 404: raise ReturnCode( 'could not head %r for conditional check; skipping put: ' '%s %s' % (path, status, reason)) if context.newer and r_mtime is not None or l_mtime <= r_mtime: return if context.different and r_mtime is not None and \ l_mtime == r_mtime and r_size is not None and \ l_size == r_size: return put_headers['x-object-meta-mtime'] = '%f' % l_mtime size = os.path.getsize(context.input_) if size > context.segment_size: if context.encrypt: raise ReturnCode( 'putting object %r: Cannot use encryption for objects ' 'greater than the segment size' % path) prefix = _create_container(context, path, l_mtime, size) conc = Concurrency(context.concurrency) start = 0 segment = 0 path2info = {} while start < size: new_context = context.copy() new_context.headers = dict(context.headers) new_context.headers['content-length'] = str(min( size - start, context.segment_size)) new_context.seek = start new_path = _get_segment_path(prefix, segment) for (ident, (exc_type, exc_value, exc_tb, result)) in \ six.iteritems(conc.get_results()): if exc_value: conc.join() raise exc_value path2info[ident] = result conc.spawn( new_path, cli_put_object, new_context, new_path) segment += 1 start += context.segment_size conc.join() for (ident, (exc_type, exc_value, exc_tb, result)) in \ six.iteritems(conc.get_results()): if exc_value: raise exc_value path2info[ident] = result body = _get_manifest_body(context, prefix, path2info, put_headers) else: body = open(context.input_, 'rb') with context.client_manager.with_client() as client: if context.encrypt: content_length = put_headers.get('content-length') if content_length: content_length = int(content_length) if hasattr(body, 'read'): body = FileLikeIter(aes_encrypt( context.encrypt, body, preamble=AES256CBC, chunk_size=getattr(client, 'chunk_size', 65536), content_length=content_length)) else: body = FileLikeIter(aes_encrypt( context.encrypt, FileLikeIter([body]), preamble=AES256CBC, chunk_size=getattr(client, 'chunk_size', 65536), content_length=content_length)) if 'content-length' in put_headers: del put_headers['content-length'] container, obj = path.split('/', 1) status, reason, headers, contents = client.put_object( container, obj, body, headers=put_headers, query=context.query, cdn=context.cdn) if hasattr(contents, 'read'): contents = contents.read() if status // 100 != 2: raise ReturnCode( 'putting object %r: %s %s %r' % (path, status, reason, contents)) if context.seek is not None: content_length = put_headers.get('content-length') etag = headers.get('etag') if content_length and etag: content_length = int(content_length) else: with context.client_manager.with_client() as client: container, obj = path.split('/', 1) status, reason, headers, contents = client.head_object( container, obj, cdn=context.cdn) if hasattr(contents, 'read'): contents = contents.read() if status // 100 != 2: raise ReturnCode( 'heading object %r: %s %s %r' % (path, status, reason, contents)) content_length = headers.get('content-length') etag = headers.get('etag') if content_length: content_length = int(content_length) return content_length, etag if context.stdin is not None: return headers.get('etag')
def cli_put_object(context, path): """ Performs a PUT on the object. See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ if context.different and context.encrypt: raise ReturnCode( 'context.different will not work properly with context.encrypt ' 'since encryption may change the object size') put_headers = dict(context.headers) if context.empty: body = '' put_headers['content-length'] = '0' elif not context.input_ or context.input_ == '-': stdin = context.io_manager.get_stdin() if context.stdin_segmentation: def reader(): while True: chunk = stdin.read(65536) if chunk: yield chunk else: return segment_body = FileLikeIter(reader(), context.segment_size) prefix = _create_container(context, path, time.time(), 0) new_context = context.copy() new_context.stdin_segmentation = False new_context.stdin = segment_body new_context.headers = dict(context.headers) segment_n = 0 path2info = {} while not segment_body.is_empty(): segment_path = _get_segment_path(prefix, segment_n) etag = cli_put_object(new_context, segment_path) size = segment_body.limit - segment_body.left path2info[segment_path] = (size, etag) segment_body.reset_limit() segment_n += 1 body = _get_manifest_body(context, prefix, path2info, put_headers) else: if hasattr(context, 'stdin'): body = context.stdin else: body = stdin elif context.seek is not None: if context.encrypt: raise ReturnCode( 'putting object %r: Cannot use encryption and context.seek' % path) body = open(context.input_, 'rb') body.seek(context.seek) else: l_mtime = os.path.getmtime(context.input_) l_size = os.path.getsize(context.input_) put_headers['content-length'] = str(l_size) if context.newer or context.different: r_mtime = None r_size = None with context.client_manager.with_client() as client: status, reason, headers, contents = client.head_object( *path.split('/', 1), headers=context.headers, query=context.query, cdn=context.cdn) if hasattr(contents, 'read'): contents.read() if status // 100 == 2: r_mtime = headers.get('x-object-meta-mtime') if r_mtime: try: r_mtime = float(r_mtime) except ValueError: r_mtime = None r_size = headers.get('content-length') if r_size: try: r_size = int(r_size) except ValueError: r_size = None elif status != 404: raise ReturnCode( 'could not head %r for conditional check; skipping put: ' '%s %s' % (path, status, reason)) if context.newer and r_mtime is not None or l_mtime <= r_mtime: return if context.different and r_mtime is not None and \ l_mtime == r_mtime and r_size is not None and \ l_size == r_size: return put_headers['x-object-meta-mtime'] = '%f' % l_mtime size = os.path.getsize(context.input_) if size > context.segment_size: if context.encrypt: raise ReturnCode( 'putting object %r: Cannot use encryption for objects ' 'greater than the segment size' % path) prefix = _create_container(context, path, l_mtime, size) conc = Concurrency(context.concurrency) start = 0 segment = 0 path2info = {} while start < size: new_context = context.copy() new_context.headers = dict(context.headers) new_context.headers['content-length'] = str(min( size - start, context.segment_size)) new_context.seek = start new_path = _get_segment_path(prefix, segment) for (ident, (exc_type, exc_value, exc_tb, result)) in \ six.iteritems(conc.get_results()): if exc_value: conc.join() raise exc_value path2info[ident] = result conc.spawn( new_path, cli_put_object, new_context, new_path) segment += 1 start += context.segment_size conc.join() for (ident, (exc_type, exc_value, exc_tb, result)) in \ six.iteritems(conc.get_results()): if exc_value: raise exc_value path2info[ident] = result body = _get_manifest_body(context, prefix, path2info, put_headers) else: body = open(context.input_, 'rb') with context.client_manager.with_client() as client: if context.encrypt: content_length = put_headers.get('content-length') if content_length: content_length = int(content_length) if hasattr(body, 'read'): body = FileLikeIter(aes_encrypt( context.encrypt, body, preamble=AES256CBC, chunk_size=getattr(client, 'chunk_size', 65536), content_length=content_length)) else: body = FileLikeIter(aes_encrypt( context.encrypt, FileLikeIter([body]), preamble=AES256CBC, chunk_size=getattr(client, 'chunk_size', 65536), content_length=content_length)) if 'content-length' in put_headers: del put_headers['content-length'] container, obj = path.split('/', 1) status, reason, headers, contents = client.put_object( container, obj, body, headers=put_headers, query=context.query, cdn=context.cdn) if hasattr(contents, 'read'): contents = contents.read() if status // 100 != 2: raise ReturnCode( 'putting object %r: %s %s %r' % (path, status, reason, contents)) if context.seek is not None: content_length = put_headers.get('content-length') etag = headers.get('etag') if content_length and etag: content_length = int(content_length) else: with context.client_manager.with_client() as client: container, obj = path.split('/', 1) status, reason, headers, contents = client.head_object( container, obj, cdn=context.cdn) if hasattr(contents, 'read'): contents = contents.read() if status // 100 != 2: raise ReturnCode( 'heading object %r: %s %s %r' % (path, status, reason, contents)) content_length = headers.get('content-length') etag = headers.get('etag') if content_length: content_length = int(content_length) return content_length, etag if context.stdin is not None: return headers.get('etag')
[ "Performs", "a", "PUT", "on", "the", "object", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/put.py#L185-L365
[ "def", "cli_put_object", "(", "context", ",", "path", ")", ":", "if", "context", ".", "different", "and", "context", ".", "encrypt", ":", "raise", "ReturnCode", "(", "'context.different will not work properly with context.encrypt '", "'since encryption may change the object size'", ")", "put_headers", "=", "dict", "(", "context", ".", "headers", ")", "if", "context", ".", "empty", ":", "body", "=", "''", "put_headers", "[", "'content-length'", "]", "=", "'0'", "elif", "not", "context", ".", "input_", "or", "context", ".", "input_", "==", "'-'", ":", "stdin", "=", "context", ".", "io_manager", ".", "get_stdin", "(", ")", "if", "context", ".", "stdin_segmentation", ":", "def", "reader", "(", ")", ":", "while", "True", ":", "chunk", "=", "stdin", ".", "read", "(", "65536", ")", "if", "chunk", ":", "yield", "chunk", "else", ":", "return", "segment_body", "=", "FileLikeIter", "(", "reader", "(", ")", ",", "context", ".", "segment_size", ")", "prefix", "=", "_create_container", "(", "context", ",", "path", ",", "time", ".", "time", "(", ")", ",", "0", ")", "new_context", "=", "context", ".", "copy", "(", ")", "new_context", ".", "stdin_segmentation", "=", "False", "new_context", ".", "stdin", "=", "segment_body", "new_context", ".", "headers", "=", "dict", "(", "context", ".", "headers", ")", "segment_n", "=", "0", "path2info", "=", "{", "}", "while", "not", "segment_body", ".", "is_empty", "(", ")", ":", "segment_path", "=", "_get_segment_path", "(", "prefix", ",", "segment_n", ")", "etag", "=", "cli_put_object", "(", "new_context", ",", "segment_path", ")", "size", "=", "segment_body", ".", "limit", "-", "segment_body", ".", "left", "path2info", "[", "segment_path", "]", "=", "(", "size", ",", "etag", ")", "segment_body", ".", "reset_limit", "(", ")", "segment_n", "+=", "1", "body", "=", "_get_manifest_body", "(", "context", ",", "prefix", ",", "path2info", ",", "put_headers", ")", "else", ":", "if", "hasattr", "(", "context", ",", "'stdin'", ")", ":", "body", "=", "context", ".", "stdin", "else", ":", "body", "=", "stdin", "elif", "context", ".", "seek", "is", "not", "None", ":", "if", "context", ".", "encrypt", ":", "raise", "ReturnCode", "(", "'putting object %r: Cannot use encryption and context.seek'", "%", "path", ")", "body", "=", "open", "(", "context", ".", "input_", ",", "'rb'", ")", "body", ".", "seek", "(", "context", ".", "seek", ")", "else", ":", "l_mtime", "=", "os", ".", "path", ".", "getmtime", "(", "context", ".", "input_", ")", "l_size", "=", "os", ".", "path", ".", "getsize", "(", "context", ".", "input_", ")", "put_headers", "[", "'content-length'", "]", "=", "str", "(", "l_size", ")", "if", "context", ".", "newer", "or", "context", ".", "different", ":", "r_mtime", "=", "None", "r_size", "=", "None", "with", "context", ".", "client_manager", ".", "with_client", "(", ")", "as", "client", ":", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "head_object", "(", "*", "path", ".", "split", "(", "'/'", ",", "1", ")", ",", "headers", "=", "context", ".", "headers", ",", "query", "=", "context", ".", "query", ",", "cdn", "=", "context", ".", "cdn", ")", "if", "hasattr", "(", "contents", ",", "'read'", ")", ":", "contents", ".", "read", "(", ")", "if", "status", "//", "100", "==", "2", ":", "r_mtime", "=", "headers", ".", "get", "(", "'x-object-meta-mtime'", ")", "if", "r_mtime", ":", "try", ":", "r_mtime", "=", "float", "(", "r_mtime", ")", "except", "ValueError", ":", "r_mtime", "=", "None", "r_size", "=", "headers", ".", "get", "(", "'content-length'", ")", "if", "r_size", ":", "try", ":", "r_size", "=", "int", "(", "r_size", ")", "except", "ValueError", ":", "r_size", "=", "None", "elif", "status", "!=", "404", ":", "raise", "ReturnCode", "(", "'could not head %r for conditional check; skipping put: '", "'%s %s'", "%", "(", "path", ",", "status", ",", "reason", ")", ")", "if", "context", ".", "newer", "and", "r_mtime", "is", "not", "None", "or", "l_mtime", "<=", "r_mtime", ":", "return", "if", "context", ".", "different", "and", "r_mtime", "is", "not", "None", "and", "l_mtime", "==", "r_mtime", "and", "r_size", "is", "not", "None", "and", "l_size", "==", "r_size", ":", "return", "put_headers", "[", "'x-object-meta-mtime'", "]", "=", "'%f'", "%", "l_mtime", "size", "=", "os", ".", "path", ".", "getsize", "(", "context", ".", "input_", ")", "if", "size", ">", "context", ".", "segment_size", ":", "if", "context", ".", "encrypt", ":", "raise", "ReturnCode", "(", "'putting object %r: Cannot use encryption for objects '", "'greater than the segment size'", "%", "path", ")", "prefix", "=", "_create_container", "(", "context", ",", "path", ",", "l_mtime", ",", "size", ")", "conc", "=", "Concurrency", "(", "context", ".", "concurrency", ")", "start", "=", "0", "segment", "=", "0", "path2info", "=", "{", "}", "while", "start", "<", "size", ":", "new_context", "=", "context", ".", "copy", "(", ")", "new_context", ".", "headers", "=", "dict", "(", "context", ".", "headers", ")", "new_context", ".", "headers", "[", "'content-length'", "]", "=", "str", "(", "min", "(", "size", "-", "start", ",", "context", ".", "segment_size", ")", ")", "new_context", ".", "seek", "=", "start", "new_path", "=", "_get_segment_path", "(", "prefix", ",", "segment", ")", "for", "(", "ident", ",", "(", "exc_type", ",", "exc_value", ",", "exc_tb", ",", "result", ")", ")", "in", "six", ".", "iteritems", "(", "conc", ".", "get_results", "(", ")", ")", ":", "if", "exc_value", ":", "conc", ".", "join", "(", ")", "raise", "exc_value", "path2info", "[", "ident", "]", "=", "result", "conc", ".", "spawn", "(", "new_path", ",", "cli_put_object", ",", "new_context", ",", "new_path", ")", "segment", "+=", "1", "start", "+=", "context", ".", "segment_size", "conc", ".", "join", "(", ")", "for", "(", "ident", ",", "(", "exc_type", ",", "exc_value", ",", "exc_tb", ",", "result", ")", ")", "in", "six", ".", "iteritems", "(", "conc", ".", "get_results", "(", ")", ")", ":", "if", "exc_value", ":", "raise", "exc_value", "path2info", "[", "ident", "]", "=", "result", "body", "=", "_get_manifest_body", "(", "context", ",", "prefix", ",", "path2info", ",", "put_headers", ")", "else", ":", "body", "=", "open", "(", "context", ".", "input_", ",", "'rb'", ")", "with", "context", ".", "client_manager", ".", "with_client", "(", ")", "as", "client", ":", "if", "context", ".", "encrypt", ":", "content_length", "=", "put_headers", ".", "get", "(", "'content-length'", ")", "if", "content_length", ":", "content_length", "=", "int", "(", "content_length", ")", "if", "hasattr", "(", "body", ",", "'read'", ")", ":", "body", "=", "FileLikeIter", "(", "aes_encrypt", "(", "context", ".", "encrypt", ",", "body", ",", "preamble", "=", "AES256CBC", ",", "chunk_size", "=", "getattr", "(", "client", ",", "'chunk_size'", ",", "65536", ")", ",", "content_length", "=", "content_length", ")", ")", "else", ":", "body", "=", "FileLikeIter", "(", "aes_encrypt", "(", "context", ".", "encrypt", ",", "FileLikeIter", "(", "[", "body", "]", ")", ",", "preamble", "=", "AES256CBC", ",", "chunk_size", "=", "getattr", "(", "client", ",", "'chunk_size'", ",", "65536", ")", ",", "content_length", "=", "content_length", ")", ")", "if", "'content-length'", "in", "put_headers", ":", "del", "put_headers", "[", "'content-length'", "]", "container", ",", "obj", "=", "path", ".", "split", "(", "'/'", ",", "1", ")", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "put_object", "(", "container", ",", "obj", ",", "body", ",", "headers", "=", "put_headers", ",", "query", "=", "context", ".", "query", ",", "cdn", "=", "context", ".", "cdn", ")", "if", "hasattr", "(", "contents", ",", "'read'", ")", ":", "contents", "=", "contents", ".", "read", "(", ")", "if", "status", "//", "100", "!=", "2", ":", "raise", "ReturnCode", "(", "'putting object %r: %s %s %r'", "%", "(", "path", ",", "status", ",", "reason", ",", "contents", ")", ")", "if", "context", ".", "seek", "is", "not", "None", ":", "content_length", "=", "put_headers", ".", "get", "(", "'content-length'", ")", "etag", "=", "headers", ".", "get", "(", "'etag'", ")", "if", "content_length", "and", "etag", ":", "content_length", "=", "int", "(", "content_length", ")", "else", ":", "with", "context", ".", "client_manager", ".", "with_client", "(", ")", "as", "client", ":", "container", ",", "obj", "=", "path", ".", "split", "(", "'/'", ",", "1", ")", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "head_object", "(", "container", ",", "obj", ",", "cdn", "=", "context", ".", "cdn", ")", "if", "hasattr", "(", "contents", ",", "'read'", ")", ":", "contents", "=", "contents", ".", "read", "(", ")", "if", "status", "//", "100", "!=", "2", ":", "raise", "ReturnCode", "(", "'heading object %r: %s %s %r'", "%", "(", "path", ",", "status", ",", "reason", ",", "contents", ")", ")", "content_length", "=", "headers", ".", "get", "(", "'content-length'", ")", "etag", "=", "headers", ".", "get", "(", "'etag'", ")", "if", "content_length", ":", "content_length", "=", "int", "(", "content_length", ")", "return", "content_length", ",", "etag", "if", "context", ".", "stdin", "is", "not", "None", ":", "return", "headers", ".", "get", "(", "'etag'", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_put
Performs a PUT on the item (account, container, or object). See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information.
swiftly/cli/put.py
def cli_put(context, path): """ Performs a PUT on the item (account, container, or object). See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ path = path.lstrip('/') if path else '' if context.input_ and os.path.isdir(context.input_): return cli_put_directory_structure(context, path) if not path: return cli_put_account(context) elif '/' not in path.rstrip('/'): return cli_put_container(context, path) else: return cli_put_object(context, path)
def cli_put(context, path): """ Performs a PUT on the item (account, container, or object). See :py:mod:`swiftly.cli.put` for context usage information. See :py:class:`CLIPut` for more information. """ path = path.lstrip('/') if path else '' if context.input_ and os.path.isdir(context.input_): return cli_put_directory_structure(context, path) if not path: return cli_put_account(context) elif '/' not in path.rstrip('/'): return cli_put_container(context, path) else: return cli_put_object(context, path)
[ "Performs", "a", "PUT", "on", "the", "item", "(", "account", "container", "or", "object", ")", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/put.py#L368-L384
[ "def", "cli_put", "(", "context", ",", "path", ")", ":", "path", "=", "path", ".", "lstrip", "(", "'/'", ")", "if", "path", "else", "''", "if", "context", ".", "input_", "and", "os", ".", "path", ".", "isdir", "(", "context", ".", "input_", ")", ":", "return", "cli_put_directory_structure", "(", "context", ",", "path", ")", "if", "not", "path", ":", "return", "cli_put_account", "(", "context", ")", "elif", "'/'", "not", "in", "path", ".", "rstrip", "(", "'/'", ")", ":", "return", "cli_put_container", "(", "context", ",", "path", ")", "else", ":", "return", "cli_put_object", "(", "context", ",", "path", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
_get_manifest_body
Returns body for manifest file and modifies put_headers. path2info is a dict like {"path": (size, etag)}
swiftly/cli/put.py
def _get_manifest_body(context, prefix, path2info, put_headers): """ Returns body for manifest file and modifies put_headers. path2info is a dict like {"path": (size, etag)} """ if context.static_segments: body = json.dumps([ {'path': '/' + p, 'size_bytes': s, 'etag': e} for p, (s, e) in sorted(six.iteritems(path2info)) ]) put_headers['content-length'] = str(len(body)) context.query['multipart-manifest'] = 'put' else: body = '' put_headers['content-length'] = '0' put_headers['x-object-manifest'] = prefix return body
def _get_manifest_body(context, prefix, path2info, put_headers): """ Returns body for manifest file and modifies put_headers. path2info is a dict like {"path": (size, etag)} """ if context.static_segments: body = json.dumps([ {'path': '/' + p, 'size_bytes': s, 'etag': e} for p, (s, e) in sorted(six.iteritems(path2info)) ]) put_headers['content-length'] = str(len(body)) context.query['multipart-manifest'] = 'put' else: body = '' put_headers['content-length'] = '0' put_headers['x-object-manifest'] = prefix return body
[ "Returns", "body", "for", "manifest", "file", "and", "modifies", "put_headers", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/put.py#L394-L412
[ "def", "_get_manifest_body", "(", "context", ",", "prefix", ",", "path2info", ",", "put_headers", ")", ":", "if", "context", ".", "static_segments", ":", "body", "=", "json", ".", "dumps", "(", "[", "{", "'path'", ":", "'/'", "+", "p", ",", "'size_bytes'", ":", "s", ",", "'etag'", ":", "e", "}", "for", "p", ",", "(", "s", ",", "e", ")", "in", "sorted", "(", "six", ".", "iteritems", "(", "path2info", ")", ")", "]", ")", "put_headers", "[", "'content-length'", "]", "=", "str", "(", "len", "(", "body", ")", ")", "context", ".", "query", "[", "'multipart-manifest'", "]", "=", "'put'", "else", ":", "body", "=", "''", "put_headers", "[", "'content-length'", "]", "=", "'0'", "put_headers", "[", "'x-object-manifest'", "]", "=", "prefix", "return", "body" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
_create_container
Creates container for segments of file with `path`
swiftly/cli/put.py
def _create_container(context, path, l_mtime, size): """ Creates container for segments of file with `path` """ new_context = context.copy() new_context.input_ = None new_context.headers = None new_context.query = None container = path.split('/', 1)[0] + '_segments' cli_put_container(new_context, container) prefix = container + '/' + path.split('/', 1)[1] prefix = '%s/%s/%s/' % (prefix, l_mtime, size) return prefix
def _create_container(context, path, l_mtime, size): """ Creates container for segments of file with `path` """ new_context = context.copy() new_context.input_ = None new_context.headers = None new_context.query = None container = path.split('/', 1)[0] + '_segments' cli_put_container(new_context, container) prefix = container + '/' + path.split('/', 1)[1] prefix = '%s/%s/%s/' % (prefix, l_mtime, size) return prefix
[ "Creates", "container", "for", "segments", "of", "file", "with", "path" ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/put.py#L415-L428
[ "def", "_create_container", "(", "context", ",", "path", ",", "l_mtime", ",", "size", ")", ":", "new_context", "=", "context", ".", "copy", "(", ")", "new_context", ".", "input_", "=", "None", "new_context", ".", "headers", "=", "None", "new_context", ".", "query", "=", "None", "container", "=", "path", ".", "split", "(", "'/'", ",", "1", ")", "[", "0", "]", "+", "'_segments'", "cli_put_container", "(", "new_context", ",", "container", ")", "prefix", "=", "container", "+", "'/'", "+", "path", ".", "split", "(", "'/'", ",", "1", ")", "[", "1", "]", "prefix", "=", "'%s/%s/%s/'", "%", "(", "prefix", ",", "l_mtime", ",", "size", ")", "return", "prefix" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_tempurl
Generates a TempURL and sends that to the context.io_manager's stdout. See :py:mod:`swiftly.cli.tempurl` for context usage information. See :py:class:`CLITempURL` for more information. :param context: The :py:class:`swiftly.cli.context.CLIContext` to use. :param method: The method for the TempURL (GET, PUT, etc.) :param path: The path the TempURL should direct to. :param seconds: The number of seconds the TempURL should be good for. Default: 3600 :param use_container: If True, will create a container level TempURL useing X-Container-Meta-Temp-Url-Key instead of X-Account-Meta-Temp-Url-Key.
swiftly/cli/tempurl.py
def cli_tempurl(context, method, path, seconds=None, use_container=False): """ Generates a TempURL and sends that to the context.io_manager's stdout. See :py:mod:`swiftly.cli.tempurl` for context usage information. See :py:class:`CLITempURL` for more information. :param context: The :py:class:`swiftly.cli.context.CLIContext` to use. :param method: The method for the TempURL (GET, PUT, etc.) :param path: The path the TempURL should direct to. :param seconds: The number of seconds the TempURL should be good for. Default: 3600 :param use_container: If True, will create a container level TempURL useing X-Container-Meta-Temp-Url-Key instead of X-Account-Meta-Temp-Url-Key. """ with contextlib.nested( context.io_manager.with_stdout(), context.client_manager.with_client()) as (fp, client): method = method.upper() path = path.lstrip('/') seconds = seconds if seconds is not None else 3600 if '/' not in path: raise ReturnCode( 'invalid tempurl path %r; should have a / within it' % path) if use_container: key_type = 'container' container = path.split('/', 1)[0] status, reason, headers, contents = \ client.head_container(container) else: key_type = 'account' status, reason, headers, contents = \ client.head_account() if status // 100 != 2: raise ReturnCode( 'obtaining X-%s-Meta-Temp-Url-Key: %s %s' % (key_type.title(), status, reason)) key = headers.get('x-%s-meta-temp-url-key' % key_type) if not key: raise ReturnCode( 'there is no X-%s-Meta-Temp-Url-Key set for this %s' % (key_type.title(), key_type)) url = client.storage_url + '/' + path fp.write(generate_temp_url(method, url, seconds, key)) fp.write('\n') fp.flush()
def cli_tempurl(context, method, path, seconds=None, use_container=False): """ Generates a TempURL and sends that to the context.io_manager's stdout. See :py:mod:`swiftly.cli.tempurl` for context usage information. See :py:class:`CLITempURL` for more information. :param context: The :py:class:`swiftly.cli.context.CLIContext` to use. :param method: The method for the TempURL (GET, PUT, etc.) :param path: The path the TempURL should direct to. :param seconds: The number of seconds the TempURL should be good for. Default: 3600 :param use_container: If True, will create a container level TempURL useing X-Container-Meta-Temp-Url-Key instead of X-Account-Meta-Temp-Url-Key. """ with contextlib.nested( context.io_manager.with_stdout(), context.client_manager.with_client()) as (fp, client): method = method.upper() path = path.lstrip('/') seconds = seconds if seconds is not None else 3600 if '/' not in path: raise ReturnCode( 'invalid tempurl path %r; should have a / within it' % path) if use_container: key_type = 'container' container = path.split('/', 1)[0] status, reason, headers, contents = \ client.head_container(container) else: key_type = 'account' status, reason, headers, contents = \ client.head_account() if status // 100 != 2: raise ReturnCode( 'obtaining X-%s-Meta-Temp-Url-Key: %s %s' % (key_type.title(), status, reason)) key = headers.get('x-%s-meta-temp-url-key' % key_type) if not key: raise ReturnCode( 'there is no X-%s-Meta-Temp-Url-Key set for this %s' % (key_type.title(), key_type)) url = client.storage_url + '/' + path fp.write(generate_temp_url(method, url, seconds, key)) fp.write('\n') fp.flush()
[ "Generates", "a", "TempURL", "and", "sends", "that", "to", "the", "context", ".", "io_manager", "s", "stdout", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/tempurl.py#L32-L81
[ "def", "cli_tempurl", "(", "context", ",", "method", ",", "path", ",", "seconds", "=", "None", ",", "use_container", "=", "False", ")", ":", "with", "contextlib", ".", "nested", "(", "context", ".", "io_manager", ".", "with_stdout", "(", ")", ",", "context", ".", "client_manager", ".", "with_client", "(", ")", ")", "as", "(", "fp", ",", "client", ")", ":", "method", "=", "method", ".", "upper", "(", ")", "path", "=", "path", ".", "lstrip", "(", "'/'", ")", "seconds", "=", "seconds", "if", "seconds", "is", "not", "None", "else", "3600", "if", "'/'", "not", "in", "path", ":", "raise", "ReturnCode", "(", "'invalid tempurl path %r; should have a / within it'", "%", "path", ")", "if", "use_container", ":", "key_type", "=", "'container'", "container", "=", "path", ".", "split", "(", "'/'", ",", "1", ")", "[", "0", "]", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "head_container", "(", "container", ")", "else", ":", "key_type", "=", "'account'", "status", ",", "reason", ",", "headers", ",", "contents", "=", "client", ".", "head_account", "(", ")", "if", "status", "//", "100", "!=", "2", ":", "raise", "ReturnCode", "(", "'obtaining X-%s-Meta-Temp-Url-Key: %s %s'", "%", "(", "key_type", ".", "title", "(", ")", ",", "status", ",", "reason", ")", ")", "key", "=", "headers", ".", "get", "(", "'x-%s-meta-temp-url-key'", "%", "key_type", ")", "if", "not", "key", ":", "raise", "ReturnCode", "(", "'there is no X-%s-Meta-Temp-Url-Key set for this %s'", "%", "(", "key_type", ".", "title", "(", ")", ",", "key_type", ")", ")", "url", "=", "client", ".", "storage_url", "+", "'/'", "+", "path", "fp", ".", "write", "(", "generate_temp_url", "(", "method", ",", "url", ",", "seconds", ",", "key", ")", ")", "fp", ".", "write", "(", "'\\n'", ")", "fp", ".", "flush", "(", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
StandardClient.auth
See :py:func:`swiftly.client.client.Client.auth`
swiftly/client/standardclient.py
def auth(self): """ See :py:func:`swiftly.client.client.Client.auth` """ self.reset() if not self.auth_url: raise ValueError('No Auth URL has been provided.') funcs = [] if self.auth_methods: for method in self.auth_methods.split(','): funcs.append(getattr(self, '_' + method)) if not funcs: if '1.0' in self.auth_url: funcs = [self._auth1, self._auth2key, self._auth2password] if not self.auth_tenant: funcs.append(self._auth2password_force_tenant) else: funcs = [self._auth2key, self._auth2password] if not self.auth_tenant: funcs.append(self._auth2password_force_tenant) funcs.append(self._auth1) info = [] for func in funcs: status, reason = func() info.append('%s %s' % (status, reason)) if status // 100 == 2: break else: raise self.HTTPException('Auth failure %r.' % info)
def auth(self): """ See :py:func:`swiftly.client.client.Client.auth` """ self.reset() if not self.auth_url: raise ValueError('No Auth URL has been provided.') funcs = [] if self.auth_methods: for method in self.auth_methods.split(','): funcs.append(getattr(self, '_' + method)) if not funcs: if '1.0' in self.auth_url: funcs = [self._auth1, self._auth2key, self._auth2password] if not self.auth_tenant: funcs.append(self._auth2password_force_tenant) else: funcs = [self._auth2key, self._auth2password] if not self.auth_tenant: funcs.append(self._auth2password_force_tenant) funcs.append(self._auth1) info = [] for func in funcs: status, reason = func() info.append('%s %s' % (status, reason)) if status // 100 == 2: break else: raise self.HTTPException('Auth failure %r.' % info)
[ "See", ":", "py", ":", "func", ":", "swiftly", ".", "client", ".", "client", ".", "Client", ".", "auth" ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/client/standardclient.py#L222-L250
[ "def", "auth", "(", "self", ")", ":", "self", ".", "reset", "(", ")", "if", "not", "self", ".", "auth_url", ":", "raise", "ValueError", "(", "'No Auth URL has been provided.'", ")", "funcs", "=", "[", "]", "if", "self", ".", "auth_methods", ":", "for", "method", "in", "self", ".", "auth_methods", ".", "split", "(", "','", ")", ":", "funcs", ".", "append", "(", "getattr", "(", "self", ",", "'_'", "+", "method", ")", ")", "if", "not", "funcs", ":", "if", "'1.0'", "in", "self", ".", "auth_url", ":", "funcs", "=", "[", "self", ".", "_auth1", ",", "self", ".", "_auth2key", ",", "self", ".", "_auth2password", "]", "if", "not", "self", ".", "auth_tenant", ":", "funcs", ".", "append", "(", "self", ".", "_auth2password_force_tenant", ")", "else", ":", "funcs", "=", "[", "self", ".", "_auth2key", ",", "self", ".", "_auth2password", "]", "if", "not", "self", ".", "auth_tenant", ":", "funcs", ".", "append", "(", "self", ".", "_auth2password_force_tenant", ")", "funcs", ".", "append", "(", "self", ".", "_auth1", ")", "info", "=", "[", "]", "for", "func", "in", "funcs", ":", "status", ",", "reason", "=", "func", "(", ")", "info", ".", "append", "(", "'%s %s'", "%", "(", "status", ",", "reason", ")", ")", "if", "status", "//", "100", "==", "2", ":", "break", "else", ":", "raise", "self", ".", "HTTPException", "(", "'Auth failure %r.'", "%", "info", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
StandardClient.request
See :py:func:`swiftly.client.client.Client.request`
swiftly/client/standardclient.py
def request(self, method, path, contents, headers, decode_json=False, stream=False, query=None, cdn=False): """ See :py:func:`swiftly.client.client.Client.request` """ if query: path += '?' + '&'.join( ('%s=%s' % (quote(k), quote(v)) if v else quote(k)) for k, v in sorted(six.iteritems(query))) reset_func = self._default_reset_func if isinstance(contents, six.string_types): contents = StringIO(contents) tell = getattr(contents, 'tell', None) seek = getattr(contents, 'seek', None) if tell and seek: try: orig_pos = tell() reset_func = lambda: seek(orig_pos) except Exception: tell = seek = None elif not contents: reset_func = lambda: None status = 0 reason = 'Unknown' attempt = 0 while attempt < self.attempts: attempt += 1 if time() >= self.conn_discard: self.storage_conn = None self.cdn_conn = None if cdn: conn = self.cdn_conn conn_path = self.cdn_path else: conn = self.storage_conn conn_path = self.storage_path if not conn: parsed, conn = self._connect(cdn=cdn) if conn: if cdn: self.cdn_conn = conn self.cdn_path = conn_path = parsed.path else: self.storage_conn = conn self.storage_path = conn_path = parsed.path else: raise self.HTTPException( '%s %s failed: No connection' % (method, path)) self.conn_discard = time() + 4 titled_headers = dict((k.title(), v) for k, v in six.iteritems({ 'User-Agent': self.user_agent, 'X-Auth-Token': self.auth_token})) if headers: titled_headers.update( (k.title(), v) for k, v in six.iteritems(headers)) try: if not hasattr(contents, 'read'): if method not in self.no_content_methods and contents and \ 'Content-Length' not in titled_headers and \ 'Transfer-Encoding' not in titled_headers: titled_headers['Content-Length'] = str( len(contents or '')) verbose_headers = ' '.join( '%s: %s' % (k, v) for k, v in sorted(six.iteritems(titled_headers))) self.verbose( '> %s %s %s', method, conn_path + path, verbose_headers) conn.request( method, conn_path + path, contents, titled_headers) else: conn.putrequest(method, conn_path + path) content_length = None for h, v in sorted(six.iteritems(titled_headers)): if h == 'Content-Length': content_length = int(v) conn.putheader(h, v) if method not in self.no_content_methods and \ content_length is None: titled_headers['Transfer-Encoding'] = 'chunked' conn.putheader('Transfer-Encoding', 'chunked') conn.endheaders() verbose_headers = ' '.join( '%s: %s' % (k, v) for k, v in sorted(six.iteritems(titled_headers))) self.verbose( '> %s %s %s', method, conn_path + path, verbose_headers) if method not in self.no_content_methods and \ content_length is None: chunk = contents.read(self.chunk_size) while chunk: conn.send('%x\r\n%s\r\n' % (len(chunk), chunk)) chunk = contents.read(self.chunk_size) conn.send('0\r\n\r\n') else: left = content_length or 0 while left > 0: size = self.chunk_size if size > left: size = left chunk = contents.read(size) if not chunk: raise IOError('Early EOF from input') conn.send(chunk) left -= len(chunk) resp = conn.getresponse() status = resp.status reason = resp.reason hdrs = headers_to_dict(resp.getheaders()) if stream: value = resp else: value = resp.read() resp.close() except Exception as err: status = 0 reason = '%s %s' % (type(err), str(err)) hdrs = {} value = None self.verbose('< %s %s', status or '-', reason) self.verbose('< %s', hdrs) if status == 401: if stream: value.close() conn.close() self.auth() attempt -= 1 elif status and status // 100 != 5: if not stream and decode_json and status // 100 == 2: if value: value = json.loads(value.decode('utf-8')) else: value = None self.conn_discard = time() + 4 return (status, reason, hdrs, value) else: if stream and value: value.close() conn.close() if reset_func: reset_func() self.sleep(2 ** attempt) raise self.HTTPException( '%s %s failed: %s %s' % (method, path, status, reason))
def request(self, method, path, contents, headers, decode_json=False, stream=False, query=None, cdn=False): """ See :py:func:`swiftly.client.client.Client.request` """ if query: path += '?' + '&'.join( ('%s=%s' % (quote(k), quote(v)) if v else quote(k)) for k, v in sorted(six.iteritems(query))) reset_func = self._default_reset_func if isinstance(contents, six.string_types): contents = StringIO(contents) tell = getattr(contents, 'tell', None) seek = getattr(contents, 'seek', None) if tell and seek: try: orig_pos = tell() reset_func = lambda: seek(orig_pos) except Exception: tell = seek = None elif not contents: reset_func = lambda: None status = 0 reason = 'Unknown' attempt = 0 while attempt < self.attempts: attempt += 1 if time() >= self.conn_discard: self.storage_conn = None self.cdn_conn = None if cdn: conn = self.cdn_conn conn_path = self.cdn_path else: conn = self.storage_conn conn_path = self.storage_path if not conn: parsed, conn = self._connect(cdn=cdn) if conn: if cdn: self.cdn_conn = conn self.cdn_path = conn_path = parsed.path else: self.storage_conn = conn self.storage_path = conn_path = parsed.path else: raise self.HTTPException( '%s %s failed: No connection' % (method, path)) self.conn_discard = time() + 4 titled_headers = dict((k.title(), v) for k, v in six.iteritems({ 'User-Agent': self.user_agent, 'X-Auth-Token': self.auth_token})) if headers: titled_headers.update( (k.title(), v) for k, v in six.iteritems(headers)) try: if not hasattr(contents, 'read'): if method not in self.no_content_methods and contents and \ 'Content-Length' not in titled_headers and \ 'Transfer-Encoding' not in titled_headers: titled_headers['Content-Length'] = str( len(contents or '')) verbose_headers = ' '.join( '%s: %s' % (k, v) for k, v in sorted(six.iteritems(titled_headers))) self.verbose( '> %s %s %s', method, conn_path + path, verbose_headers) conn.request( method, conn_path + path, contents, titled_headers) else: conn.putrequest(method, conn_path + path) content_length = None for h, v in sorted(six.iteritems(titled_headers)): if h == 'Content-Length': content_length = int(v) conn.putheader(h, v) if method not in self.no_content_methods and \ content_length is None: titled_headers['Transfer-Encoding'] = 'chunked' conn.putheader('Transfer-Encoding', 'chunked') conn.endheaders() verbose_headers = ' '.join( '%s: %s' % (k, v) for k, v in sorted(six.iteritems(titled_headers))) self.verbose( '> %s %s %s', method, conn_path + path, verbose_headers) if method not in self.no_content_methods and \ content_length is None: chunk = contents.read(self.chunk_size) while chunk: conn.send('%x\r\n%s\r\n' % (len(chunk), chunk)) chunk = contents.read(self.chunk_size) conn.send('0\r\n\r\n') else: left = content_length or 0 while left > 0: size = self.chunk_size if size > left: size = left chunk = contents.read(size) if not chunk: raise IOError('Early EOF from input') conn.send(chunk) left -= len(chunk) resp = conn.getresponse() status = resp.status reason = resp.reason hdrs = headers_to_dict(resp.getheaders()) if stream: value = resp else: value = resp.read() resp.close() except Exception as err: status = 0 reason = '%s %s' % (type(err), str(err)) hdrs = {} value = None self.verbose('< %s %s', status or '-', reason) self.verbose('< %s', hdrs) if status == 401: if stream: value.close() conn.close() self.auth() attempt -= 1 elif status and status // 100 != 5: if not stream and decode_json and status // 100 == 2: if value: value = json.loads(value.decode('utf-8')) else: value = None self.conn_discard = time() + 4 return (status, reason, hdrs, value) else: if stream and value: value.close() conn.close() if reset_func: reset_func() self.sleep(2 ** attempt) raise self.HTTPException( '%s %s failed: %s %s' % (method, path, status, reason))
[ "See", ":", "py", ":", "func", ":", "swiftly", ".", "client", ".", "client", ".", "Client", ".", "request" ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/client/standardclient.py#L464-L608
[ "def", "request", "(", "self", ",", "method", ",", "path", ",", "contents", ",", "headers", ",", "decode_json", "=", "False", ",", "stream", "=", "False", ",", "query", "=", "None", ",", "cdn", "=", "False", ")", ":", "if", "query", ":", "path", "+=", "'?'", "+", "'&'", ".", "join", "(", "(", "'%s=%s'", "%", "(", "quote", "(", "k", ")", ",", "quote", "(", "v", ")", ")", "if", "v", "else", "quote", "(", "k", ")", ")", "for", "k", ",", "v", "in", "sorted", "(", "six", ".", "iteritems", "(", "query", ")", ")", ")", "reset_func", "=", "self", ".", "_default_reset_func", "if", "isinstance", "(", "contents", ",", "six", ".", "string_types", ")", ":", "contents", "=", "StringIO", "(", "contents", ")", "tell", "=", "getattr", "(", "contents", ",", "'tell'", ",", "None", ")", "seek", "=", "getattr", "(", "contents", ",", "'seek'", ",", "None", ")", "if", "tell", "and", "seek", ":", "try", ":", "orig_pos", "=", "tell", "(", ")", "reset_func", "=", "lambda", ":", "seek", "(", "orig_pos", ")", "except", "Exception", ":", "tell", "=", "seek", "=", "None", "elif", "not", "contents", ":", "reset_func", "=", "lambda", ":", "None", "status", "=", "0", "reason", "=", "'Unknown'", "attempt", "=", "0", "while", "attempt", "<", "self", ".", "attempts", ":", "attempt", "+=", "1", "if", "time", "(", ")", ">=", "self", ".", "conn_discard", ":", "self", ".", "storage_conn", "=", "None", "self", ".", "cdn_conn", "=", "None", "if", "cdn", ":", "conn", "=", "self", ".", "cdn_conn", "conn_path", "=", "self", ".", "cdn_path", "else", ":", "conn", "=", "self", ".", "storage_conn", "conn_path", "=", "self", ".", "storage_path", "if", "not", "conn", ":", "parsed", ",", "conn", "=", "self", ".", "_connect", "(", "cdn", "=", "cdn", ")", "if", "conn", ":", "if", "cdn", ":", "self", ".", "cdn_conn", "=", "conn", "self", ".", "cdn_path", "=", "conn_path", "=", "parsed", ".", "path", "else", ":", "self", ".", "storage_conn", "=", "conn", "self", ".", "storage_path", "=", "conn_path", "=", "parsed", ".", "path", "else", ":", "raise", "self", ".", "HTTPException", "(", "'%s %s failed: No connection'", "%", "(", "method", ",", "path", ")", ")", "self", ".", "conn_discard", "=", "time", "(", ")", "+", "4", "titled_headers", "=", "dict", "(", "(", "k", ".", "title", "(", ")", ",", "v", ")", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "{", "'User-Agent'", ":", "self", ".", "user_agent", ",", "'X-Auth-Token'", ":", "self", ".", "auth_token", "}", ")", ")", "if", "headers", ":", "titled_headers", ".", "update", "(", "(", "k", ".", "title", "(", ")", ",", "v", ")", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "headers", ")", ")", "try", ":", "if", "not", "hasattr", "(", "contents", ",", "'read'", ")", ":", "if", "method", "not", "in", "self", ".", "no_content_methods", "and", "contents", "and", "'Content-Length'", "not", "in", "titled_headers", "and", "'Transfer-Encoding'", "not", "in", "titled_headers", ":", "titled_headers", "[", "'Content-Length'", "]", "=", "str", "(", "len", "(", "contents", "or", "''", ")", ")", "verbose_headers", "=", "' '", ".", "join", "(", "'%s: %s'", "%", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "sorted", "(", "six", ".", "iteritems", "(", "titled_headers", ")", ")", ")", "self", ".", "verbose", "(", "'> %s %s %s'", ",", "method", ",", "conn_path", "+", "path", ",", "verbose_headers", ")", "conn", ".", "request", "(", "method", ",", "conn_path", "+", "path", ",", "contents", ",", "titled_headers", ")", "else", ":", "conn", ".", "putrequest", "(", "method", ",", "conn_path", "+", "path", ")", "content_length", "=", "None", "for", "h", ",", "v", "in", "sorted", "(", "six", ".", "iteritems", "(", "titled_headers", ")", ")", ":", "if", "h", "==", "'Content-Length'", ":", "content_length", "=", "int", "(", "v", ")", "conn", ".", "putheader", "(", "h", ",", "v", ")", "if", "method", "not", "in", "self", ".", "no_content_methods", "and", "content_length", "is", "None", ":", "titled_headers", "[", "'Transfer-Encoding'", "]", "=", "'chunked'", "conn", ".", "putheader", "(", "'Transfer-Encoding'", ",", "'chunked'", ")", "conn", ".", "endheaders", "(", ")", "verbose_headers", "=", "' '", ".", "join", "(", "'%s: %s'", "%", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "sorted", "(", "six", ".", "iteritems", "(", "titled_headers", ")", ")", ")", "self", ".", "verbose", "(", "'> %s %s %s'", ",", "method", ",", "conn_path", "+", "path", ",", "verbose_headers", ")", "if", "method", "not", "in", "self", ".", "no_content_methods", "and", "content_length", "is", "None", ":", "chunk", "=", "contents", ".", "read", "(", "self", ".", "chunk_size", ")", "while", "chunk", ":", "conn", ".", "send", "(", "'%x\\r\\n%s\\r\\n'", "%", "(", "len", "(", "chunk", ")", ",", "chunk", ")", ")", "chunk", "=", "contents", ".", "read", "(", "self", ".", "chunk_size", ")", "conn", ".", "send", "(", "'0\\r\\n\\r\\n'", ")", "else", ":", "left", "=", "content_length", "or", "0", "while", "left", ">", "0", ":", "size", "=", "self", ".", "chunk_size", "if", "size", ">", "left", ":", "size", "=", "left", "chunk", "=", "contents", ".", "read", "(", "size", ")", "if", "not", "chunk", ":", "raise", "IOError", "(", "'Early EOF from input'", ")", "conn", ".", "send", "(", "chunk", ")", "left", "-=", "len", "(", "chunk", ")", "resp", "=", "conn", ".", "getresponse", "(", ")", "status", "=", "resp", ".", "status", "reason", "=", "resp", ".", "reason", "hdrs", "=", "headers_to_dict", "(", "resp", ".", "getheaders", "(", ")", ")", "if", "stream", ":", "value", "=", "resp", "else", ":", "value", "=", "resp", ".", "read", "(", ")", "resp", ".", "close", "(", ")", "except", "Exception", "as", "err", ":", "status", "=", "0", "reason", "=", "'%s %s'", "%", "(", "type", "(", "err", ")", ",", "str", "(", "err", ")", ")", "hdrs", "=", "{", "}", "value", "=", "None", "self", ".", "verbose", "(", "'< %s %s'", ",", "status", "or", "'-'", ",", "reason", ")", "self", ".", "verbose", "(", "'< %s'", ",", "hdrs", ")", "if", "status", "==", "401", ":", "if", "stream", ":", "value", ".", "close", "(", ")", "conn", ".", "close", "(", ")", "self", ".", "auth", "(", ")", "attempt", "-=", "1", "elif", "status", "and", "status", "//", "100", "!=", "5", ":", "if", "not", "stream", "and", "decode_json", "and", "status", "//", "100", "==", "2", ":", "if", "value", ":", "value", "=", "json", ".", "loads", "(", "value", ".", "decode", "(", "'utf-8'", ")", ")", "else", ":", "value", "=", "None", "self", ".", "conn_discard", "=", "time", "(", ")", "+", "4", "return", "(", "status", ",", "reason", ",", "hdrs", ",", "value", ")", "else", ":", "if", "stream", "and", "value", ":", "value", ".", "close", "(", ")", "conn", ".", "close", "(", ")", "if", "reset_func", ":", "reset_func", "(", ")", "self", ".", "sleep", "(", "2", "**", "attempt", ")", "raise", "self", ".", "HTTPException", "(", "'%s %s failed: %s %s'", "%", "(", "method", ",", "path", ",", "status", ",", "reason", ")", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
StandardClient.reset
See :py:func:`swiftly.client.client.Client.reset`
swiftly/client/standardclient.py
def reset(self): """ See :py:func:`swiftly.client.client.Client.reset` """ for conn in (self.storage_conn, self.cdn_conn): if conn: try: conn.close() except Exception: pass self.storage_conn = None self.cdn_conn = None
def reset(self): """ See :py:func:`swiftly.client.client.Client.reset` """ for conn in (self.storage_conn, self.cdn_conn): if conn: try: conn.close() except Exception: pass self.storage_conn = None self.cdn_conn = None
[ "See", ":", "py", ":", "func", ":", "swiftly", ".", "client", ".", "client", ".", "Client", ".", "reset" ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/client/standardclient.py#L610-L621
[ "def", "reset", "(", "self", ")", ":", "for", "conn", "in", "(", "self", ".", "storage_conn", ",", "self", ".", "cdn_conn", ")", ":", "if", "conn", ":", "try", ":", "conn", ".", "close", "(", ")", "except", "Exception", ":", "pass", "self", ".", "storage_conn", "=", "None", "self", ".", "cdn_conn", "=", "None" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
StandardClient.get_account_hash
See :py:func:`swiftly.client.client.Client.get_account_hash`
swiftly/client/standardclient.py
def get_account_hash(self): """ See :py:func:`swiftly.client.client.Client.get_account_hash` """ if not(self.storage_url or self.storage_path): self.auth() return (self.storage_url or self.storage_path).rsplit('/', 1)[1]
def get_account_hash(self): """ See :py:func:`swiftly.client.client.Client.get_account_hash` """ if not(self.storage_url or self.storage_path): self.auth() return (self.storage_url or self.storage_path).rsplit('/', 1)[1]
[ "See", ":", "py", ":", "func", ":", "swiftly", ".", "client", ".", "client", ".", "Client", ".", "get_account_hash" ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/client/standardclient.py#L623-L629
[ "def", "get_account_hash", "(", "self", ")", ":", "if", "not", "(", "self", ".", "storage_url", "or", "self", ".", "storage_path", ")", ":", "self", ".", "auth", "(", ")", "return", "(", "self", ".", "storage_url", "or", "self", ".", "storage_path", ")", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "1", "]" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_trans
Translates any information that can be determined from the x_trans_id and sends that to the context.io_manager's stdout. See :py:mod:`swiftly.cli.trans` for context usage information. See :py:class:`CLITrans` for more information.
swiftly/cli/trans.py
def cli_trans(context, x_trans_id): """ Translates any information that can be determined from the x_trans_id and sends that to the context.io_manager's stdout. See :py:mod:`swiftly.cli.trans` for context usage information. See :py:class:`CLITrans` for more information. """ with context.io_manager.with_stdout() as fp: trans_time = get_trans_id_time(x_trans_id) trans_info = x_trans_id[34:] msg = 'X-Trans-Id: ' + x_trans_id + '\n' if not trans_time: msg += 'Time Stamp: None, old style id with no time ' \ 'embedded\nUTC Time: None, old style id with no time ' \ 'embedded\n' else: msg += 'Time Stamp: %s\nUTC Time: %s\n' % ( trans_time, time.strftime( '%a %Y-%m-%d %H:%M:%S UTC', time.gmtime(trans_time))) msg += 'Additional Info: ' + trans_info + '\n' fp.write(msg) fp.flush()
def cli_trans(context, x_trans_id): """ Translates any information that can be determined from the x_trans_id and sends that to the context.io_manager's stdout. See :py:mod:`swiftly.cli.trans` for context usage information. See :py:class:`CLITrans` for more information. """ with context.io_manager.with_stdout() as fp: trans_time = get_trans_id_time(x_trans_id) trans_info = x_trans_id[34:] msg = 'X-Trans-Id: ' + x_trans_id + '\n' if not trans_time: msg += 'Time Stamp: None, old style id with no time ' \ 'embedded\nUTC Time: None, old style id with no time ' \ 'embedded\n' else: msg += 'Time Stamp: %s\nUTC Time: %s\n' % ( trans_time, time.strftime( '%a %Y-%m-%d %H:%M:%S UTC', time.gmtime(trans_time))) msg += 'Additional Info: ' + trans_info + '\n' fp.write(msg) fp.flush()
[ "Translates", "any", "information", "that", "can", "be", "determined", "from", "the", "x_trans_id", "and", "sends", "that", "to", "the", "context", ".", "io_manager", "s", "stdout", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/trans.py#L31-L55
[ "def", "cli_trans", "(", "context", ",", "x_trans_id", ")", ":", "with", "context", ".", "io_manager", ".", "with_stdout", "(", ")", "as", "fp", ":", "trans_time", "=", "get_trans_id_time", "(", "x_trans_id", ")", "trans_info", "=", "x_trans_id", "[", "34", ":", "]", "msg", "=", "'X-Trans-Id: '", "+", "x_trans_id", "+", "'\\n'", "if", "not", "trans_time", ":", "msg", "+=", "'Time Stamp: None, old style id with no time '", "'embedded\\nUTC Time: None, old style id with no time '", "'embedded\\n'", "else", ":", "msg", "+=", "'Time Stamp: %s\\nUTC Time: %s\\n'", "%", "(", "trans_time", ",", "time", ".", "strftime", "(", "'%a %Y-%m-%d %H:%M:%S UTC'", ",", "time", ".", "gmtime", "(", "trans_time", ")", ")", ")", "msg", "+=", "'Additional Info: '", "+", "trans_info", "+", "'\\n'", "fp", ".", "write", "(", "msg", ")", "fp", ".", "flush", "(", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_help
Outputs help information. See :py:mod:`swiftly.cli.help` for context usage information. See :py:class:`CLIHelp` for more information. :param context: The :py:class:`swiftly.cli.context.CLIContext` to use. :param command_name: The command_name to output help information for, or set to None or an empty string to output the general help information. :param general_parser: The :py:class:`swiftly.cli.optionparser.OptionParser` for general usage. :param command_parsers: A dict of (name, :py:class:`CLICommand`) for specific command usage.
swiftly/cli/help.py
def cli_help(context, command_name, general_parser, command_parsers): """ Outputs help information. See :py:mod:`swiftly.cli.help` for context usage information. See :py:class:`CLIHelp` for more information. :param context: The :py:class:`swiftly.cli.context.CLIContext` to use. :param command_name: The command_name to output help information for, or set to None or an empty string to output the general help information. :param general_parser: The :py:class:`swiftly.cli.optionparser.OptionParser` for general usage. :param command_parsers: A dict of (name, :py:class:`CLICommand`) for specific command usage. """ if command_name == 'for': command_name = 'fordo' with context.io_manager.with_stdout() as stdout: if not command_name: general_parser.print_help(stdout) elif command_name in command_parsers: command_parsers[command_name].option_parser.print_help(stdout) else: raise ReturnCode('unknown command %r' % command_name)
def cli_help(context, command_name, general_parser, command_parsers): """ Outputs help information. See :py:mod:`swiftly.cli.help` for context usage information. See :py:class:`CLIHelp` for more information. :param context: The :py:class:`swiftly.cli.context.CLIContext` to use. :param command_name: The command_name to output help information for, or set to None or an empty string to output the general help information. :param general_parser: The :py:class:`swiftly.cli.optionparser.OptionParser` for general usage. :param command_parsers: A dict of (name, :py:class:`CLICommand`) for specific command usage. """ if command_name == 'for': command_name = 'fordo' with context.io_manager.with_stdout() as stdout: if not command_name: general_parser.print_help(stdout) elif command_name in command_parsers: command_parsers[command_name].option_parser.print_help(stdout) else: raise ReturnCode('unknown command %r' % command_name)
[ "Outputs", "help", "information", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/help.py#L28-L55
[ "def", "cli_help", "(", "context", ",", "command_name", ",", "general_parser", ",", "command_parsers", ")", ":", "if", "command_name", "==", "'for'", ":", "command_name", "=", "'fordo'", "with", "context", ".", "io_manager", ".", "with_stdout", "(", ")", "as", "stdout", ":", "if", "not", "command_name", ":", "general_parser", ".", "print_help", "(", "stdout", ")", "elif", "command_name", "in", "command_parsers", ":", "command_parsers", "[", "command_name", "]", ".", "option_parser", ".", "print_help", "(", "stdout", ")", "else", ":", "raise", "ReturnCode", "(", "'unknown command %r'", "%", "command_name", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
FileLikeIter.read
read([size]) -> read at most size bytes, returned as a string. If the size argument is negative or omitted, read until EOF is reached. Notice that when in non-blocking mode, less data than what was requested may be returned, even if no size parameter was given.
swiftly/filelikeiter.py
def read(self, size=-1): """ read([size]) -> read at most size bytes, returned as a string. If the size argument is negative or omitted, read until EOF is reached. Notice that when in non-blocking mode, less data than what was requested may be returned, even if no size parameter was given. """ if self.left is not None: size = min(size, self.left) if self.closed: raise ValueError('I/O operation on closed file') if size < 0: return ''.join(self) elif not size: chunk = '' elif self.buf: chunk = self.buf self.buf = None else: try: chunk = next(self.iterator) except StopIteration: return '' if len(chunk) > size: self.buf = chunk[size:] chunk = chunk[:size] if self.left is not None: self.left -= len(chunk) return chunk
def read(self, size=-1): """ read([size]) -> read at most size bytes, returned as a string. If the size argument is negative or omitted, read until EOF is reached. Notice that when in non-blocking mode, less data than what was requested may be returned, even if no size parameter was given. """ if self.left is not None: size = min(size, self.left) if self.closed: raise ValueError('I/O operation on closed file') if size < 0: return ''.join(self) elif not size: chunk = '' elif self.buf: chunk = self.buf self.buf = None else: try: chunk = next(self.iterator) except StopIteration: return '' if len(chunk) > size: self.buf = chunk[size:] chunk = chunk[:size] if self.left is not None: self.left -= len(chunk) return chunk
[ "read", "(", "[", "size", "]", ")", "-", ">", "read", "at", "most", "size", "bytes", "returned", "as", "a", "string", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/filelikeiter.py#L58-L87
[ "def", "read", "(", "self", ",", "size", "=", "-", "1", ")", ":", "if", "self", ".", "left", "is", "not", "None", ":", "size", "=", "min", "(", "size", ",", "self", ".", "left", ")", "if", "self", ".", "closed", ":", "raise", "ValueError", "(", "'I/O operation on closed file'", ")", "if", "size", "<", "0", ":", "return", "''", ".", "join", "(", "self", ")", "elif", "not", "size", ":", "chunk", "=", "''", "elif", "self", ".", "buf", ":", "chunk", "=", "self", ".", "buf", "self", ".", "buf", "=", "None", "else", ":", "try", ":", "chunk", "=", "next", "(", "self", ".", "iterator", ")", "except", "StopIteration", ":", "return", "''", "if", "len", "(", "chunk", ")", ">", "size", ":", "self", ".", "buf", "=", "chunk", "[", "size", ":", "]", "chunk", "=", "chunk", "[", ":", "size", "]", "if", "self", ".", "left", "is", "not", "None", ":", "self", ".", "left", "-=", "len", "(", "chunk", ")", "return", "chunk" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
FileLikeIter.readline
readline([size]) -> next line from the file, as a string. Retain newline. A non-negative size argument limits the maximum number of bytes to return (an incomplete line may be returned then). Return an empty string at EOF.
swiftly/filelikeiter.py
def readline(self, size=-1): """ readline([size]) -> next line from the file, as a string. Retain newline. A non-negative size argument limits the maximum number of bytes to return (an incomplete line may be returned then). Return an empty string at EOF. """ if self.closed: raise ValueError('I/O operation on closed file') data = '' while '\n' not in data and (size < 0 or len(data) < size): if size < 0: chunk = self.read(1024) else: chunk = self.read(size - len(data)) if not chunk: break data += chunk if '\n' in data: data, sep, rest = data.partition('\n') data += sep if self.buf: self.buf = rest + self.buf else: self.buf = rest return data
def readline(self, size=-1): """ readline([size]) -> next line from the file, as a string. Retain newline. A non-negative size argument limits the maximum number of bytes to return (an incomplete line may be returned then). Return an empty string at EOF. """ if self.closed: raise ValueError('I/O operation on closed file') data = '' while '\n' not in data and (size < 0 or len(data) < size): if size < 0: chunk = self.read(1024) else: chunk = self.read(size - len(data)) if not chunk: break data += chunk if '\n' in data: data, sep, rest = data.partition('\n') data += sep if self.buf: self.buf = rest + self.buf else: self.buf = rest return data
[ "readline", "(", "[", "size", "]", ")", "-", ">", "next", "line", "from", "the", "file", "as", "a", "string", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/filelikeiter.py#L89-L115
[ "def", "readline", "(", "self", ",", "size", "=", "-", "1", ")", ":", "if", "self", ".", "closed", ":", "raise", "ValueError", "(", "'I/O operation on closed file'", ")", "data", "=", "''", "while", "'\\n'", "not", "in", "data", "and", "(", "size", "<", "0", "or", "len", "(", "data", ")", "<", "size", ")", ":", "if", "size", "<", "0", ":", "chunk", "=", "self", ".", "read", "(", "1024", ")", "else", ":", "chunk", "=", "self", ".", "read", "(", "size", "-", "len", "(", "data", ")", ")", "if", "not", "chunk", ":", "break", "data", "+=", "chunk", "if", "'\\n'", "in", "data", ":", "data", ",", "sep", ",", "rest", "=", "data", ".", "partition", "(", "'\\n'", ")", "data", "+=", "sep", "if", "self", ".", "buf", ":", "self", ".", "buf", "=", "rest", "+", "self", ".", "buf", "else", ":", "self", ".", "buf", "=", "rest", "return", "data" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
FileLikeIter.readlines
readlines([size]) -> list of strings, each a line from the file. Call readline() repeatedly and return a list of the lines so read. The optional size argument, if given, is an approximate bound on the total number of bytes in the lines returned.
swiftly/filelikeiter.py
def readlines(self, sizehint=-1): """ readlines([size]) -> list of strings, each a line from the file. Call readline() repeatedly and return a list of the lines so read. The optional size argument, if given, is an approximate bound on the total number of bytes in the lines returned. """ if self.closed: raise ValueError('I/O operation on closed file') lines = [] while True: line = self.readline(sizehint) if not line: break lines.append(line) if sizehint >= 0: sizehint -= len(line) if sizehint <= 0: break return lines
def readlines(self, sizehint=-1): """ readlines([size]) -> list of strings, each a line from the file. Call readline() repeatedly and return a list of the lines so read. The optional size argument, if given, is an approximate bound on the total number of bytes in the lines returned. """ if self.closed: raise ValueError('I/O operation on closed file') lines = [] while True: line = self.readline(sizehint) if not line: break lines.append(line) if sizehint >= 0: sizehint -= len(line) if sizehint <= 0: break return lines
[ "readlines", "(", "[", "size", "]", ")", "-", ">", "list", "of", "strings", "each", "a", "line", "from", "the", "file", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/filelikeiter.py#L117-L137
[ "def", "readlines", "(", "self", ",", "sizehint", "=", "-", "1", ")", ":", "if", "self", ".", "closed", ":", "raise", "ValueError", "(", "'I/O operation on closed file'", ")", "lines", "=", "[", "]", "while", "True", ":", "line", "=", "self", ".", "readline", "(", "sizehint", ")", "if", "not", "line", ":", "break", "lines", ".", "append", "(", "line", ")", "if", "sizehint", ">=", "0", ":", "sizehint", "-=", "len", "(", "line", ")", "if", "sizehint", "<=", "0", ":", "break", "return", "lines" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
FileLikeIter.is_empty
Check whether the "file" is empty reading the single byte.
swiftly/filelikeiter.py
def is_empty(self): """ Check whether the "file" is empty reading the single byte. """ something = self.read(1) if something: if self.buf: self.buf = something + self.buf else: self.buf = something return False else: return True
def is_empty(self): """ Check whether the "file" is empty reading the single byte. """ something = self.read(1) if something: if self.buf: self.buf = something + self.buf else: self.buf = something return False else: return True
[ "Check", "whether", "the", "file", "is", "empty", "reading", "the", "single", "byte", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/filelikeiter.py#L139-L151
[ "def", "is_empty", "(", "self", ")", ":", "something", "=", "self", ".", "read", "(", "1", ")", "if", "something", ":", "if", "self", ".", "buf", ":", "self", ".", "buf", "=", "something", "+", "self", ".", "buf", "else", ":", "self", ".", "buf", "=", "something", "return", "False", "else", ":", "return", "True" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
cli_encrypt
Encrypts context.io_manager's stdin and sends that to context.io_manager's stdout. This can be useful to encrypt to disk before attempting to upload, allowing uploads retries and segmented encrypted objects. See :py:mod:`swiftly.cli.encrypt` for context usage information. See :py:class:`CLIEncrypt` for more information.
swiftly/cli/encrypt.py
def cli_encrypt(context, key): """ Encrypts context.io_manager's stdin and sends that to context.io_manager's stdout. This can be useful to encrypt to disk before attempting to upload, allowing uploads retries and segmented encrypted objects. See :py:mod:`swiftly.cli.encrypt` for context usage information. See :py:class:`CLIEncrypt` for more information. """ with context.io_manager.with_stdout() as stdout: with context.io_manager.with_stdin() as stdin: for chunk in aes_encrypt(key, stdin, preamble=AES256CBC): stdout.write(chunk) stdout.flush()
def cli_encrypt(context, key): """ Encrypts context.io_manager's stdin and sends that to context.io_manager's stdout. This can be useful to encrypt to disk before attempting to upload, allowing uploads retries and segmented encrypted objects. See :py:mod:`swiftly.cli.encrypt` for context usage information. See :py:class:`CLIEncrypt` for more information. """ with context.io_manager.with_stdout() as stdout: with context.io_manager.with_stdin() as stdin: for chunk in aes_encrypt(key, stdin, preamble=AES256CBC): stdout.write(chunk) stdout.flush()
[ "Encrypts", "context", ".", "io_manager", "s", "stdin", "and", "sends", "that", "to", "context", ".", "io_manager", "s", "stdout", "." ]
gholt/swiftly
python
https://github.com/gholt/swiftly/blob/5bcc1c65323b1caf1f85adbefd9fc4988c072149/swiftly/cli/encrypt.py#L31-L47
[ "def", "cli_encrypt", "(", "context", ",", "key", ")", ":", "with", "context", ".", "io_manager", ".", "with_stdout", "(", ")", "as", "stdout", ":", "with", "context", ".", "io_manager", ".", "with_stdin", "(", ")", "as", "stdin", ":", "for", "chunk", "in", "aes_encrypt", "(", "key", ",", "stdin", ",", "preamble", "=", "AES256CBC", ")", ":", "stdout", ".", "write", "(", "chunk", ")", "stdout", ".", "flush", "(", ")" ]
5bcc1c65323b1caf1f85adbefd9fc4988c072149
test
get_build_commits
Determine the value for BUILD_COMMITS from the app and repository config. Resolves the previous BUILD_ALL_COMMITS = True/False option to BUILD_COMMITS = 'ALL'/'LAST' respectively.
leeroy/github.py
def get_build_commits(app, repo_config): """ Determine the value for BUILD_COMMITS from the app and repository config. Resolves the previous BUILD_ALL_COMMITS = True/False option to BUILD_COMMITS = 'ALL'/'LAST' respectively. """ build_commits = repo_config.get("build_commits") build_all_commits = repo_config.get("build_all_commits", app.config.get("BUILD_ALL_COMMITS")) if not build_commits and build_all_commits is not None: # Determine BUILD_COMMITS from legacy BUILD_ALL_COMMITS if build_all_commits: build_commits = BUILD_COMMITS_ALL else: build_commits = BUILD_COMMITS_LAST warnings.warn("BUILD_ALL_COMMITS is deprecated. Use the BUILD_COMMITS " "setting instead.", DeprecationWarning) elif not build_commits: # Determine BUILD_COMMITS from global app config. build_commits = app.config["BUILD_COMMITS"] return build_commits
def get_build_commits(app, repo_config): """ Determine the value for BUILD_COMMITS from the app and repository config. Resolves the previous BUILD_ALL_COMMITS = True/False option to BUILD_COMMITS = 'ALL'/'LAST' respectively. """ build_commits = repo_config.get("build_commits") build_all_commits = repo_config.get("build_all_commits", app.config.get("BUILD_ALL_COMMITS")) if not build_commits and build_all_commits is not None: # Determine BUILD_COMMITS from legacy BUILD_ALL_COMMITS if build_all_commits: build_commits = BUILD_COMMITS_ALL else: build_commits = BUILD_COMMITS_LAST warnings.warn("BUILD_ALL_COMMITS is deprecated. Use the BUILD_COMMITS " "setting instead.", DeprecationWarning) elif not build_commits: # Determine BUILD_COMMITS from global app config. build_commits = app.config["BUILD_COMMITS"] return build_commits
[ "Determine", "the", "value", "for", "BUILD_COMMITS", "from", "the", "app", "and", "repository", "config", ".", "Resolves", "the", "previous", "BUILD_ALL_COMMITS", "=", "True", "/", "False", "option", "to", "BUILD_COMMITS", "=", "ALL", "/", "LAST", "respectively", "." ]
litl/leeroy
python
https://github.com/litl/leeroy/blob/ab6565a3b63e9103d8c011d9c62a9c0ad589b051/leeroy/github.py#L71-L92
[ "def", "get_build_commits", "(", "app", ",", "repo_config", ")", ":", "build_commits", "=", "repo_config", ".", "get", "(", "\"build_commits\"", ")", "build_all_commits", "=", "repo_config", ".", "get", "(", "\"build_all_commits\"", ",", "app", ".", "config", ".", "get", "(", "\"BUILD_ALL_COMMITS\"", ")", ")", "if", "not", "build_commits", "and", "build_all_commits", "is", "not", "None", ":", "# Determine BUILD_COMMITS from legacy BUILD_ALL_COMMITS", "if", "build_all_commits", ":", "build_commits", "=", "BUILD_COMMITS_ALL", "else", ":", "build_commits", "=", "BUILD_COMMITS_LAST", "warnings", ".", "warn", "(", "\"BUILD_ALL_COMMITS is deprecated. Use the BUILD_COMMITS \"", "\"setting instead.\"", ",", "DeprecationWarning", ")", "elif", "not", "build_commits", ":", "# Determine BUILD_COMMITS from global app config.", "build_commits", "=", "app", ".", "config", "[", "\"BUILD_COMMITS\"", "]", "return", "build_commits" ]
ab6565a3b63e9103d8c011d9c62a9c0ad589b051
test
get_status
Gets the status of a commit. .. note:: ``repo_name`` might not ever be anything other than ``repo_config['github_repo']``. :param app: Flask app for leeroy :param repo_config: configuration for the repo :param repo_name: The name of the owner/repo :param sha: SHA for the status we are looking for :return: returns json response of status
leeroy/github.py
def get_status(app, repo_config, repo_name, sha): """Gets the status of a commit. .. note:: ``repo_name`` might not ever be anything other than ``repo_config['github_repo']``. :param app: Flask app for leeroy :param repo_config: configuration for the repo :param repo_name: The name of the owner/repo :param sha: SHA for the status we are looking for :return: returns json response of status """ url = get_api_url(app, repo_config, github_status_url).format( repo_name=repo_name, sha=sha) logging.debug("Getting status for %s %s", repo_name, sha) s = get_session_for_repo(app, repo_config) response = s.get(url) if not response.ok: raise Exception("Unable to get status: {}".format(response.status_code)) return response
def get_status(app, repo_config, repo_name, sha): """Gets the status of a commit. .. note:: ``repo_name`` might not ever be anything other than ``repo_config['github_repo']``. :param app: Flask app for leeroy :param repo_config: configuration for the repo :param repo_name: The name of the owner/repo :param sha: SHA for the status we are looking for :return: returns json response of status """ url = get_api_url(app, repo_config, github_status_url).format( repo_name=repo_name, sha=sha) logging.debug("Getting status for %s %s", repo_name, sha) s = get_session_for_repo(app, repo_config) response = s.get(url) if not response.ok: raise Exception("Unable to get status: {}".format(response.status_code)) return response
[ "Gets", "the", "status", "of", "a", "commit", "." ]
litl/leeroy
python
https://github.com/litl/leeroy/blob/ab6565a3b63e9103d8c011d9c62a9c0ad589b051/leeroy/github.py#L155-L175
[ "def", "get_status", "(", "app", ",", "repo_config", ",", "repo_name", ",", "sha", ")", ":", "url", "=", "get_api_url", "(", "app", ",", "repo_config", ",", "github_status_url", ")", ".", "format", "(", "repo_name", "=", "repo_name", ",", "sha", "=", "sha", ")", "logging", ".", "debug", "(", "\"Getting status for %s %s\"", ",", "repo_name", ",", "sha", ")", "s", "=", "get_session_for_repo", "(", "app", ",", "repo_config", ")", "response", "=", "s", ".", "get", "(", "url", ")", "if", "not", "response", ".", "ok", ":", "raise", "Exception", "(", "\"Unable to get status: {}\"", ".", "format", "(", "response", ".", "status_code", ")", ")", "return", "response" ]
ab6565a3b63e9103d8c011d9c62a9c0ad589b051
test
get_pull_request
Data for a given pull request. :param app: Flask app :param repo_config: dict with ``github_repo`` key :param pull_request: the pull request number
leeroy/github.py
def get_pull_request(app, repo_config, pull_request): """Data for a given pull request. :param app: Flask app :param repo_config: dict with ``github_repo`` key :param pull_request: the pull request number """ response = get_api_response( app, repo_config, "/repos/{{repo_name}}/pulls/{0}".format(pull_request)) if not response.ok: raise Exception("Unable to get pull request: status code {}".format(response.status_code)) return response.json
def get_pull_request(app, repo_config, pull_request): """Data for a given pull request. :param app: Flask app :param repo_config: dict with ``github_repo`` key :param pull_request: the pull request number """ response = get_api_response( app, repo_config, "/repos/{{repo_name}}/pulls/{0}".format(pull_request)) if not response.ok: raise Exception("Unable to get pull request: status code {}".format(response.status_code)) return response.json
[ "Data", "for", "a", "given", "pull", "request", "." ]
litl/leeroy
python
https://github.com/litl/leeroy/blob/ab6565a3b63e9103d8c011d9c62a9c0ad589b051/leeroy/github.py#L236-L248
[ "def", "get_pull_request", "(", "app", ",", "repo_config", ",", "pull_request", ")", ":", "response", "=", "get_api_response", "(", "app", ",", "repo_config", ",", "\"/repos/{{repo_name}}/pulls/{0}\"", ".", "format", "(", "pull_request", ")", ")", "if", "not", "response", ".", "ok", ":", "raise", "Exception", "(", "\"Unable to get pull request: status code {}\"", ".", "format", "(", "response", ".", "status_code", ")", ")", "return", "response", ".", "json" ]
ab6565a3b63e9103d8c011d9c62a9c0ad589b051
test
get_pull_requests
Last 30 pull requests from a repository. :param app: Flask app :param repo_config: dict with ``github_repo`` key :returns: id for a pull request
leeroy/github.py
def get_pull_requests(app, repo_config): """Last 30 pull requests from a repository. :param app: Flask app :param repo_config: dict with ``github_repo`` key :returns: id for a pull request """ response = get_api_response(app, repo_config, "/repos/{repo_name}/pulls") if not response.ok: raise Exception("Unable to get pull requests: status code {}".format(response.status_code)) return (item for item in response.json)
def get_pull_requests(app, repo_config): """Last 30 pull requests from a repository. :param app: Flask app :param repo_config: dict with ``github_repo`` key :returns: id for a pull request """ response = get_api_response(app, repo_config, "/repos/{repo_name}/pulls") if not response.ok: raise Exception("Unable to get pull requests: status code {}".format(response.status_code)) return (item for item in response.json)
[ "Last", "30", "pull", "requests", "from", "a", "repository", "." ]
litl/leeroy
python
https://github.com/litl/leeroy/blob/ab6565a3b63e9103d8c011d9c62a9c0ad589b051/leeroy/github.py#L251-L262
[ "def", "get_pull_requests", "(", "app", ",", "repo_config", ")", ":", "response", "=", "get_api_response", "(", "app", ",", "repo_config", ",", "\"/repos/{repo_name}/pulls\"", ")", "if", "not", "response", ".", "ok", ":", "raise", "Exception", "(", "\"Unable to get pull requests: status code {}\"", ".", "format", "(", "response", ".", "status_code", ")", ")", "return", "(", "item", "for", "item", "in", "response", ".", "json", ")" ]
ab6565a3b63e9103d8c011d9c62a9c0ad589b051
test
Plugin.write
Write obj in elasticsearch. :param obj: value to be written in elasticsearch. :param resource_id: id for the resource. :return: id of the transaction.
oceandb_elasticsearch_driver/plugin.py
def write(self, obj, resource_id=None): """Write obj in elasticsearch. :param obj: value to be written in elasticsearch. :param resource_id: id for the resource. :return: id of the transaction. """ self.logger.debug('elasticsearch::write::{}'.format(resource_id)) if resource_id is not None: if self.driver._es.exists( index=self.driver._index, id=resource_id, doc_type='_doc' ): raise ValueError( "Resource \"{}\" already exists, use update instead".format(resource_id)) return self.driver._es.index( index=self.driver._index, id=resource_id, body=obj, doc_type='_doc', refresh='wait_for' )['_id']
def write(self, obj, resource_id=None): """Write obj in elasticsearch. :param obj: value to be written in elasticsearch. :param resource_id: id for the resource. :return: id of the transaction. """ self.logger.debug('elasticsearch::write::{}'.format(resource_id)) if resource_id is not None: if self.driver._es.exists( index=self.driver._index, id=resource_id, doc_type='_doc' ): raise ValueError( "Resource \"{}\" already exists, use update instead".format(resource_id)) return self.driver._es.index( index=self.driver._index, id=resource_id, body=obj, doc_type='_doc', refresh='wait_for' )['_id']
[ "Write", "obj", "in", "elasticsearch", ".", ":", "param", "obj", ":", "value", "to", "be", "written", "in", "elasticsearch", ".", ":", "param", "resource_id", ":", "id", "for", "the", "resource", ".", ":", "return", ":", "id", "of", "the", "transaction", "." ]
oceanprotocol/oceandb-elasticsearch-driver
python
https://github.com/oceanprotocol/oceandb-elasticsearch-driver/blob/11901e8396252b9dbb70fd48debcfa82f1dd1ff2/oceandb_elasticsearch_driver/plugin.py#L33-L54
[ "def", "write", "(", "self", ",", "obj", ",", "resource_id", "=", "None", ")", ":", "self", ".", "logger", ".", "debug", "(", "'elasticsearch::write::{}'", ".", "format", "(", "resource_id", ")", ")", "if", "resource_id", "is", "not", "None", ":", "if", "self", ".", "driver", ".", "_es", ".", "exists", "(", "index", "=", "self", ".", "driver", ".", "_index", ",", "id", "=", "resource_id", ",", "doc_type", "=", "'_doc'", ")", ":", "raise", "ValueError", "(", "\"Resource \\\"{}\\\" already exists, use update instead\"", ".", "format", "(", "resource_id", ")", ")", "return", "self", ".", "driver", ".", "_es", ".", "index", "(", "index", "=", "self", ".", "driver", ".", "_index", ",", "id", "=", "resource_id", ",", "body", "=", "obj", ",", "doc_type", "=", "'_doc'", ",", "refresh", "=", "'wait_for'", ")", "[", "'_id'", "]" ]
11901e8396252b9dbb70fd48debcfa82f1dd1ff2
test
Plugin.read
Read object in elasticsearch using the resource_id. :param resource_id: id of the object to be read. :return: object value from elasticsearch.
oceandb_elasticsearch_driver/plugin.py
def read(self, resource_id): """Read object in elasticsearch using the resource_id. :param resource_id: id of the object to be read. :return: object value from elasticsearch. """ self.logger.debug('elasticsearch::read::{}'.format(resource_id)) return self.driver._es.get( index=self.driver._index, id=resource_id, doc_type='_doc' )['_source']
def read(self, resource_id): """Read object in elasticsearch using the resource_id. :param resource_id: id of the object to be read. :return: object value from elasticsearch. """ self.logger.debug('elasticsearch::read::{}'.format(resource_id)) return self.driver._es.get( index=self.driver._index, id=resource_id, doc_type='_doc' )['_source']
[ "Read", "object", "in", "elasticsearch", "using", "the", "resource_id", ".", ":", "param", "resource_id", ":", "id", "of", "the", "object", "to", "be", "read", ".", ":", "return", ":", "object", "value", "from", "elasticsearch", "." ]
oceanprotocol/oceandb-elasticsearch-driver
python
https://github.com/oceanprotocol/oceandb-elasticsearch-driver/blob/11901e8396252b9dbb70fd48debcfa82f1dd1ff2/oceandb_elasticsearch_driver/plugin.py#L56-L66
[ "def", "read", "(", "self", ",", "resource_id", ")", ":", "self", ".", "logger", ".", "debug", "(", "'elasticsearch::read::{}'", ".", "format", "(", "resource_id", ")", ")", "return", "self", ".", "driver", ".", "_es", ".", "get", "(", "index", "=", "self", ".", "driver", ".", "_index", ",", "id", "=", "resource_id", ",", "doc_type", "=", "'_doc'", ")", "[", "'_source'", "]" ]
11901e8396252b9dbb70fd48debcfa82f1dd1ff2
test
Plugin.update
Update object in elasticsearch using the resource_id. :param metadata: new metadata for the transaction. :param resource_id: id of the object to be updated. :return: id of the object.
oceandb_elasticsearch_driver/plugin.py
def update(self, obj, resource_id): """Update object in elasticsearch using the resource_id. :param metadata: new metadata for the transaction. :param resource_id: id of the object to be updated. :return: id of the object. """ self.logger.debug('elasticsearch::update::{}'.format(resource_id)) return self.driver._es.index( index=self.driver._index, id=resource_id, body=obj, doc_type='_doc', refresh='wait_for' )['_id']
def update(self, obj, resource_id): """Update object in elasticsearch using the resource_id. :param metadata: new metadata for the transaction. :param resource_id: id of the object to be updated. :return: id of the object. """ self.logger.debug('elasticsearch::update::{}'.format(resource_id)) return self.driver._es.index( index=self.driver._index, id=resource_id, body=obj, doc_type='_doc', refresh='wait_for' )['_id']
[ "Update", "object", "in", "elasticsearch", "using", "the", "resource_id", ".", ":", "param", "metadata", ":", "new", "metadata", "for", "the", "transaction", ".", ":", "param", "resource_id", ":", "id", "of", "the", "object", "to", "be", "updated", ".", ":", "return", ":", "id", "of", "the", "object", "." ]
oceanprotocol/oceandb-elasticsearch-driver
python
https://github.com/oceanprotocol/oceandb-elasticsearch-driver/blob/11901e8396252b9dbb70fd48debcfa82f1dd1ff2/oceandb_elasticsearch_driver/plugin.py#L68-L81
[ "def", "update", "(", "self", ",", "obj", ",", "resource_id", ")", ":", "self", ".", "logger", ".", "debug", "(", "'elasticsearch::update::{}'", ".", "format", "(", "resource_id", ")", ")", "return", "self", ".", "driver", ".", "_es", ".", "index", "(", "index", "=", "self", ".", "driver", ".", "_index", ",", "id", "=", "resource_id", ",", "body", "=", "obj", ",", "doc_type", "=", "'_doc'", ",", "refresh", "=", "'wait_for'", ")", "[", "'_id'", "]" ]
11901e8396252b9dbb70fd48debcfa82f1dd1ff2
test
Plugin.delete
Delete an object from elasticsearch. :param resource_id: id of the object to be deleted. :return:
oceandb_elasticsearch_driver/plugin.py
def delete(self, resource_id): """Delete an object from elasticsearch. :param resource_id: id of the object to be deleted. :return: """ self.logger.debug('elasticsearch::delete::{}'.format(resource_id)) if self.driver._es.exists( index=self.driver._index, id=resource_id, doc_type='_doc' ) == False: raise ValueError("Resource \"{}\" does not exists".format(resource_id)) return self.driver._es.delete( index=self.driver._index, id=resource_id, doc_type='_doc' )
def delete(self, resource_id): """Delete an object from elasticsearch. :param resource_id: id of the object to be deleted. :return: """ self.logger.debug('elasticsearch::delete::{}'.format(resource_id)) if self.driver._es.exists( index=self.driver._index, id=resource_id, doc_type='_doc' ) == False: raise ValueError("Resource \"{}\" does not exists".format(resource_id)) return self.driver._es.delete( index=self.driver._index, id=resource_id, doc_type='_doc' )
[ "Delete", "an", "object", "from", "elasticsearch", ".", ":", "param", "resource_id", ":", "id", "of", "the", "object", "to", "be", "deleted", ".", ":", "return", ":" ]
oceanprotocol/oceandb-elasticsearch-driver
python
https://github.com/oceanprotocol/oceandb-elasticsearch-driver/blob/11901e8396252b9dbb70fd48debcfa82f1dd1ff2/oceandb_elasticsearch_driver/plugin.py#L83-L99
[ "def", "delete", "(", "self", ",", "resource_id", ")", ":", "self", ".", "logger", ".", "debug", "(", "'elasticsearch::delete::{}'", ".", "format", "(", "resource_id", ")", ")", "if", "self", ".", "driver", ".", "_es", ".", "exists", "(", "index", "=", "self", ".", "driver", ".", "_index", ",", "id", "=", "resource_id", ",", "doc_type", "=", "'_doc'", ")", "==", "False", ":", "raise", "ValueError", "(", "\"Resource \\\"{}\\\" does not exists\"", ".", "format", "(", "resource_id", ")", ")", "return", "self", ".", "driver", ".", "_es", ".", "delete", "(", "index", "=", "self", ".", "driver", ".", "_index", ",", "id", "=", "resource_id", ",", "doc_type", "=", "'_doc'", ")" ]
11901e8396252b9dbb70fd48debcfa82f1dd1ff2
test
Plugin.list
List all the objects saved elasticsearch. :param search_from: start offset of objects to return. :param search_to: last offset of objects to return. :param limit: max number of values to be returned. :return: list with transactions.
oceandb_elasticsearch_driver/plugin.py
def list(self, search_from=None, search_to=None, limit=None): """List all the objects saved elasticsearch. :param search_from: start offset of objects to return. :param search_to: last offset of objects to return. :param limit: max number of values to be returned. :return: list with transactions. """ self.logger.debug('elasticsearch::list') body = { 'sort': [ {"_id": "asc"}, ], 'query': { 'match_all': {} } } if search_from: body['from'] = search_from if search_to: body['size'] = search_to - search_from if limit: body['size'] = limit page = self.driver._es.search( index=self.driver._index, doc_type='_doc', body=body ) object_list = [] for x in page['hits']['hits']: object_list.append(x['_source']) return object_list
def list(self, search_from=None, search_to=None, limit=None): """List all the objects saved elasticsearch. :param search_from: start offset of objects to return. :param search_to: last offset of objects to return. :param limit: max number of values to be returned. :return: list with transactions. """ self.logger.debug('elasticsearch::list') body = { 'sort': [ {"_id": "asc"}, ], 'query': { 'match_all': {} } } if search_from: body['from'] = search_from if search_to: body['size'] = search_to - search_from if limit: body['size'] = limit page = self.driver._es.search( index=self.driver._index, doc_type='_doc', body=body ) object_list = [] for x in page['hits']['hits']: object_list.append(x['_source']) return object_list
[ "List", "all", "the", "objects", "saved", "elasticsearch", ".", ":", "param", "search_from", ":", "start", "offset", "of", "objects", "to", "return", ".", ":", "param", "search_to", ":", "last", "offset", "of", "objects", "to", "return", ".", ":", "param", "limit", ":", "max", "number", "of", "values", "to", "be", "returned", ".", ":", "return", ":", "list", "with", "transactions", "." ]
oceanprotocol/oceandb-elasticsearch-driver
python
https://github.com/oceanprotocol/oceandb-elasticsearch-driver/blob/11901e8396252b9dbb70fd48debcfa82f1dd1ff2/oceandb_elasticsearch_driver/plugin.py#L101-L134
[ "def", "list", "(", "self", ",", "search_from", "=", "None", ",", "search_to", "=", "None", ",", "limit", "=", "None", ")", ":", "self", ".", "logger", ".", "debug", "(", "'elasticsearch::list'", ")", "body", "=", "{", "'sort'", ":", "[", "{", "\"_id\"", ":", "\"asc\"", "}", ",", "]", ",", "'query'", ":", "{", "'match_all'", ":", "{", "}", "}", "}", "if", "search_from", ":", "body", "[", "'from'", "]", "=", "search_from", "if", "search_to", ":", "body", "[", "'size'", "]", "=", "search_to", "-", "search_from", "if", "limit", ":", "body", "[", "'size'", "]", "=", "limit", "page", "=", "self", ".", "driver", ".", "_es", ".", "search", "(", "index", "=", "self", ".", "driver", ".", "_index", ",", "doc_type", "=", "'_doc'", ",", "body", "=", "body", ")", "object_list", "=", "[", "]", "for", "x", "in", "page", "[", "'hits'", "]", "[", "'hits'", "]", ":", "object_list", ".", "append", "(", "x", "[", "'_source'", "]", ")", "return", "object_list" ]
11901e8396252b9dbb70fd48debcfa82f1dd1ff2
test
Plugin.query
Query elasticsearch for objects. :param search_model: object of QueryModel. :return: list of objects that match the query.
oceandb_elasticsearch_driver/plugin.py
def query(self, search_model: QueryModel): """Query elasticsearch for objects. :param search_model: object of QueryModel. :return: list of objects that match the query. """ query_parsed = query_parser(search_model.query) self.logger.debug(f'elasticsearch::query::{query_parsed[0]}') if search_model.sort is not None: self._mapping_to_sort(search_model.sort.keys()) sort = self._sort_object(search_model.sort) else: sort = [{"_id": "asc"}] if search_model.query == {}: query = {'match_all': {}} else: query = query_parsed[0] body = { 'query': query, 'sort': sort, 'from': (search_model.page - 1) * search_model.offset, 'size': search_model.offset, } page = self.driver._es.search( index=self.driver._index, doc_type='_doc', body=body, q=query_parsed[1] ) object_list = [] for x in page['hits']['hits']: object_list.append(x['_source']) return object_list
def query(self, search_model: QueryModel): """Query elasticsearch for objects. :param search_model: object of QueryModel. :return: list of objects that match the query. """ query_parsed = query_parser(search_model.query) self.logger.debug(f'elasticsearch::query::{query_parsed[0]}') if search_model.sort is not None: self._mapping_to_sort(search_model.sort.keys()) sort = self._sort_object(search_model.sort) else: sort = [{"_id": "asc"}] if search_model.query == {}: query = {'match_all': {}} else: query = query_parsed[0] body = { 'query': query, 'sort': sort, 'from': (search_model.page - 1) * search_model.offset, 'size': search_model.offset, } page = self.driver._es.search( index=self.driver._index, doc_type='_doc', body=body, q=query_parsed[1] ) object_list = [] for x in page['hits']['hits']: object_list.append(x['_source']) return object_list
[ "Query", "elasticsearch", "for", "objects", ".", ":", "param", "search_model", ":", "object", "of", "QueryModel", ".", ":", "return", ":", "list", "of", "objects", "that", "match", "the", "query", "." ]
oceanprotocol/oceandb-elasticsearch-driver
python
https://github.com/oceanprotocol/oceandb-elasticsearch-driver/blob/11901e8396252b9dbb70fd48debcfa82f1dd1ff2/oceandb_elasticsearch_driver/plugin.py#L136-L170
[ "def", "query", "(", "self", ",", "search_model", ":", "QueryModel", ")", ":", "query_parsed", "=", "query_parser", "(", "search_model", ".", "query", ")", "self", ".", "logger", ".", "debug", "(", "f'elasticsearch::query::{query_parsed[0]}'", ")", "if", "search_model", ".", "sort", "is", "not", "None", ":", "self", ".", "_mapping_to_sort", "(", "search_model", ".", "sort", ".", "keys", "(", ")", ")", "sort", "=", "self", ".", "_sort_object", "(", "search_model", ".", "sort", ")", "else", ":", "sort", "=", "[", "{", "\"_id\"", ":", "\"asc\"", "}", "]", "if", "search_model", ".", "query", "==", "{", "}", ":", "query", "=", "{", "'match_all'", ":", "{", "}", "}", "else", ":", "query", "=", "query_parsed", "[", "0", "]", "body", "=", "{", "'query'", ":", "query", ",", "'sort'", ":", "sort", ",", "'from'", ":", "(", "search_model", ".", "page", "-", "1", ")", "*", "search_model", ".", "offset", ",", "'size'", ":", "search_model", ".", "offset", ",", "}", "page", "=", "self", ".", "driver", ".", "_es", ".", "search", "(", "index", "=", "self", ".", "driver", ".", "_index", ",", "doc_type", "=", "'_doc'", ",", "body", "=", "body", ",", "q", "=", "query_parsed", "[", "1", "]", ")", "object_list", "=", "[", "]", "for", "x", "in", "page", "[", "'hits'", "]", "[", "'hits'", "]", ":", "object_list", ".", "append", "(", "x", "[", "'_source'", "]", ")", "return", "object_list" ]
11901e8396252b9dbb70fd48debcfa82f1dd1ff2
test
Plugin.text_query
Query elasticsearch for objects. :param search_model: object of FullTextModel :return: list of objects that match the query.
oceandb_elasticsearch_driver/plugin.py
def text_query(self, search_model: FullTextModel): """Query elasticsearch for objects. :param search_model: object of FullTextModel :return: list of objects that match the query. """ self.logger.debug('elasticsearch::text_query::{}'.format(search_model.text)) if search_model.sort is not None: self._mapping_to_sort(search_model.sort.keys()) sort = self._sort_object(search_model.sort) else: sort = [{"service.metadata.curation.rating": "asc"}] body = { 'sort': sort, 'from': (search_model.page - 1) * search_model.offset, 'size': search_model.offset, } page = self.driver._es.search( index=self.driver._index, doc_type='_doc', body=body, q=search_model.text ) object_list = [] for x in page['hits']['hits']: object_list.append(x['_source']) return object_list
def text_query(self, search_model: FullTextModel): """Query elasticsearch for objects. :param search_model: object of FullTextModel :return: list of objects that match the query. """ self.logger.debug('elasticsearch::text_query::{}'.format(search_model.text)) if search_model.sort is not None: self._mapping_to_sort(search_model.sort.keys()) sort = self._sort_object(search_model.sort) else: sort = [{"service.metadata.curation.rating": "asc"}] body = { 'sort': sort, 'from': (search_model.page - 1) * search_model.offset, 'size': search_model.offset, } page = self.driver._es.search( index=self.driver._index, doc_type='_doc', body=body, q=search_model.text ) object_list = [] for x in page['hits']['hits']: object_list.append(x['_source']) return object_list
[ "Query", "elasticsearch", "for", "objects", ".", ":", "param", "search_model", ":", "object", "of", "FullTextModel", ":", "return", ":", "list", "of", "objects", "that", "match", "the", "query", "." ]
oceanprotocol/oceandb-elasticsearch-driver
python
https://github.com/oceanprotocol/oceandb-elasticsearch-driver/blob/11901e8396252b9dbb70fd48debcfa82f1dd1ff2/oceandb_elasticsearch_driver/plugin.py#L172-L199
[ "def", "text_query", "(", "self", ",", "search_model", ":", "FullTextModel", ")", ":", "self", ".", "logger", ".", "debug", "(", "'elasticsearch::text_query::{}'", ".", "format", "(", "search_model", ".", "text", ")", ")", "if", "search_model", ".", "sort", "is", "not", "None", ":", "self", ".", "_mapping_to_sort", "(", "search_model", ".", "sort", ".", "keys", "(", ")", ")", "sort", "=", "self", ".", "_sort_object", "(", "search_model", ".", "sort", ")", "else", ":", "sort", "=", "[", "{", "\"service.metadata.curation.rating\"", ":", "\"asc\"", "}", "]", "body", "=", "{", "'sort'", ":", "sort", ",", "'from'", ":", "(", "search_model", ".", "page", "-", "1", ")", "*", "search_model", ".", "offset", ",", "'size'", ":", "search_model", ".", "offset", ",", "}", "page", "=", "self", ".", "driver", ".", "_es", ".", "search", "(", "index", "=", "self", ".", "driver", ".", "_index", ",", "doc_type", "=", "'_doc'", ",", "body", "=", "body", ",", "q", "=", "search_model", ".", "text", ")", "object_list", "=", "[", "]", "for", "x", "in", "page", "[", "'hits'", "]", "[", "'hits'", "]", ":", "object_list", ".", "append", "(", "x", "[", "'_source'", "]", ")", "return", "object_list" ]
11901e8396252b9dbb70fd48debcfa82f1dd1ff2
test
Migration.forwards
Write your forwards methods here.
hero_slider/south_migrations/0003_set_existing_slideritems_to_be_published.py
def forwards(self, orm): "Write your forwards methods here." # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." for title in orm['hero_slider.SliderItemTitle'].objects.all(): title.is_published = True title.save()
def forwards(self, orm): "Write your forwards methods here." # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." for title in orm['hero_slider.SliderItemTitle'].objects.all(): title.is_published = True title.save()
[ "Write", "your", "forwards", "methods", "here", "." ]
bitlabstudio/django-hero-slider
python
https://github.com/bitlabstudio/django-hero-slider/blob/8153b3eece76c47210a266c2edb660725c34a56e/hero_slider/south_migrations/0003_set_existing_slideritems_to_be_published.py#L10-L15
[ "def", "forwards", "(", "self", ",", "orm", ")", ":", "# Note: Remember to use orm['appname.ModelName'] rather than \"from appname.models...\"", "for", "title", "in", "orm", "[", "'hero_slider.SliderItemTitle'", "]", ".", "objects", ".", "all", "(", ")", ":", "title", ".", "is_published", "=", "True", "title", ".", "save", "(", ")" ]
8153b3eece76c47210a266c2edb660725c34a56e
test
get_slider_items
Returns the published slider items.
hero_slider/templatetags/hero_slider_tags.py
def get_slider_items(context, amount=None): """Returns the published slider items.""" req = context.get('request') qs = SliderItem.objects.published(req).order_by('position') if amount: qs = qs[:amount] return qs
def get_slider_items(context, amount=None): """Returns the published slider items.""" req = context.get('request') qs = SliderItem.objects.published(req).order_by('position') if amount: qs = qs[:amount] return qs
[ "Returns", "the", "published", "slider", "items", "." ]
bitlabstudio/django-hero-slider
python
https://github.com/bitlabstudio/django-hero-slider/blob/8153b3eece76c47210a266c2edb660725c34a56e/hero_slider/templatetags/hero_slider_tags.py#L11-L17
[ "def", "get_slider_items", "(", "context", ",", "amount", "=", "None", ")", ":", "req", "=", "context", ".", "get", "(", "'request'", ")", "qs", "=", "SliderItem", ".", "objects", ".", "published", "(", "req", ")", ".", "order_by", "(", "'position'", ")", "if", "amount", ":", "qs", "=", "qs", "[", ":", "amount", "]", "return", "qs" ]
8153b3eece76c47210a266c2edb660725c34a56e
test
render_hero_slider
Renders the hero slider.
hero_slider/templatetags/hero_slider_tags.py
def render_hero_slider(context): """ Renders the hero slider. """ req = context.get('request') qs = SliderItem.objects.published(req).order_by('position') return { 'slider_items': qs, }
def render_hero_slider(context): """ Renders the hero slider. """ req = context.get('request') qs = SliderItem.objects.published(req).order_by('position') return { 'slider_items': qs, }
[ "Renders", "the", "hero", "slider", "." ]
bitlabstudio/django-hero-slider
python
https://github.com/bitlabstudio/django-hero-slider/blob/8153b3eece76c47210a266c2edb660725c34a56e/hero_slider/templatetags/hero_slider_tags.py#L21-L30
[ "def", "render_hero_slider", "(", "context", ")", ":", "req", "=", "context", ".", "get", "(", "'request'", ")", "qs", "=", "SliderItem", ".", "objects", ".", "published", "(", "req", ")", ".", "order_by", "(", "'position'", ")", "return", "{", "'slider_items'", ":", "qs", ",", "}" ]
8153b3eece76c47210a266c2edb660725c34a56e
test
RWLock.reader_acquire
Acquire the lock to read
arthur/utils.py
def reader_acquire(self): """Acquire the lock to read""" self._order_mutex.acquire() self._readers_mutex.acquire() if self._readers == 0: self._access_mutex.acquire() self._readers += 1 self._order_mutex.release() self._readers_mutex.release()
def reader_acquire(self): """Acquire the lock to read""" self._order_mutex.acquire() self._readers_mutex.acquire() if self._readers == 0: self._access_mutex.acquire() self._readers += 1 self._order_mutex.release() self._readers_mutex.release()
[ "Acquire", "the", "lock", "to", "read" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/utils.py#L45-L56
[ "def", "reader_acquire", "(", "self", ")", ":", "self", ".", "_order_mutex", ".", "acquire", "(", ")", "self", ".", "_readers_mutex", ".", "acquire", "(", ")", "if", "self", ".", "_readers", "==", "0", ":", "self", ".", "_access_mutex", ".", "acquire", "(", ")", "self", ".", "_readers", "+=", "1", "self", ".", "_order_mutex", ".", "release", "(", ")", "self", ".", "_readers_mutex", ".", "release", "(", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
RWLock.reader_release
Release the lock after reading
arthur/utils.py
def reader_release(self): """Release the lock after reading""" self._readers_mutex.acquire() self._readers -= 1 if self._readers == 0: self._access_mutex.release() self._readers_mutex.release()
def reader_release(self): """Release the lock after reading""" self._readers_mutex.acquire() self._readers -= 1 if self._readers == 0: self._access_mutex.release() self._readers_mutex.release()
[ "Release", "the", "lock", "after", "reading" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/utils.py#L58-L67
[ "def", "reader_release", "(", "self", ")", ":", "self", ".", "_readers_mutex", ".", "acquire", "(", ")", "self", ".", "_readers", "-=", "1", "if", "self", ".", "_readers", "==", "0", ":", "self", ".", "_access_mutex", ".", "release", "(", ")", "self", ".", "_readers_mutex", ".", "release", "(", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
RWLock.writer_acquire
Acquire the lock to write
arthur/utils.py
def writer_acquire(self): """Acquire the lock to write""" self._order_mutex.acquire() self._access_mutex.acquire() self._order_mutex.release()
def writer_acquire(self): """Acquire the lock to write""" self._order_mutex.acquire() self._access_mutex.acquire() self._order_mutex.release()
[ "Acquire", "the", "lock", "to", "write" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/utils.py#L69-L74
[ "def", "writer_acquire", "(", "self", ")", ":", "self", ".", "_order_mutex", ".", "acquire", "(", ")", "self", ".", "_access_mutex", ".", "acquire", "(", ")", "self", ".", "_order_mutex", ".", "release", "(", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
TaskRegistry.add
Add a task to the registry. This method adds task using `task_id` as identifier. If a task with the same identifier already exists on the registry, a `AlreadyExistsError` exception will be raised. :param task_id: identifier of the task to add :param backend: backend used to fetch data from the repository :param category: category of the items to fetch :param backend_args: dictionary of arguments required to run the backend :param archiving_cfg: archiving config for the task, if needed :param scheduling_cfg: scheduling config for the task, if needed :returns: the new task added to the registry :raises AlreadyExistsError: raised when the given task identifier exists on the registry
arthur/tasks.py
def add(self, task_id, backend, category, backend_args, archiving_cfg=None, scheduling_cfg=None): """Add a task to the registry. This method adds task using `task_id` as identifier. If a task with the same identifier already exists on the registry, a `AlreadyExistsError` exception will be raised. :param task_id: identifier of the task to add :param backend: backend used to fetch data from the repository :param category: category of the items to fetch :param backend_args: dictionary of arguments required to run the backend :param archiving_cfg: archiving config for the task, if needed :param scheduling_cfg: scheduling config for the task, if needed :returns: the new task added to the registry :raises AlreadyExistsError: raised when the given task identifier exists on the registry """ self._rwlock.writer_acquire() if task_id in self._tasks: self._rwlock.writer_release() raise AlreadyExistsError(element=str(task_id)) task = Task(task_id, backend, category, backend_args, archiving_cfg=archiving_cfg, scheduling_cfg=scheduling_cfg) self._tasks[task_id] = task self._rwlock.writer_release() logger.debug("Task %s added to the registry", str(task_id)) return task
def add(self, task_id, backend, category, backend_args, archiving_cfg=None, scheduling_cfg=None): """Add a task to the registry. This method adds task using `task_id` as identifier. If a task with the same identifier already exists on the registry, a `AlreadyExistsError` exception will be raised. :param task_id: identifier of the task to add :param backend: backend used to fetch data from the repository :param category: category of the items to fetch :param backend_args: dictionary of arguments required to run the backend :param archiving_cfg: archiving config for the task, if needed :param scheduling_cfg: scheduling config for the task, if needed :returns: the new task added to the registry :raises AlreadyExistsError: raised when the given task identifier exists on the registry """ self._rwlock.writer_acquire() if task_id in self._tasks: self._rwlock.writer_release() raise AlreadyExistsError(element=str(task_id)) task = Task(task_id, backend, category, backend_args, archiving_cfg=archiving_cfg, scheduling_cfg=scheduling_cfg) self._tasks[task_id] = task self._rwlock.writer_release() logger.debug("Task %s added to the registry", str(task_id)) return task
[ "Add", "a", "task", "to", "the", "registry", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/tasks.py#L102-L137
[ "def", "add", "(", "self", ",", "task_id", ",", "backend", ",", "category", ",", "backend_args", ",", "archiving_cfg", "=", "None", ",", "scheduling_cfg", "=", "None", ")", ":", "self", ".", "_rwlock", ".", "writer_acquire", "(", ")", "if", "task_id", "in", "self", ".", "_tasks", ":", "self", ".", "_rwlock", ".", "writer_release", "(", ")", "raise", "AlreadyExistsError", "(", "element", "=", "str", "(", "task_id", ")", ")", "task", "=", "Task", "(", "task_id", ",", "backend", ",", "category", ",", "backend_args", ",", "archiving_cfg", "=", "archiving_cfg", ",", "scheduling_cfg", "=", "scheduling_cfg", ")", "self", ".", "_tasks", "[", "task_id", "]", "=", "task", "self", ".", "_rwlock", ".", "writer_release", "(", ")", "logger", ".", "debug", "(", "\"Task %s added to the registry\"", ",", "str", "(", "task_id", ")", ")", "return", "task" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
TaskRegistry.remove
Remove a task from the registry. To remove it, pass its identifier with `taks_id` parameter. When the identifier is not found, a `NotFoundError` exception is raised. :param task_id: identifier of the task to remove :raises NotFoundError: raised when the given task identifier is not found on the registry
arthur/tasks.py
def remove(self, task_id): """Remove a task from the registry. To remove it, pass its identifier with `taks_id` parameter. When the identifier is not found, a `NotFoundError` exception is raised. :param task_id: identifier of the task to remove :raises NotFoundError: raised when the given task identifier is not found on the registry """ try: self._rwlock.writer_acquire() del self._tasks[task_id] except KeyError: raise NotFoundError(element=str(task_id)) finally: self._rwlock.writer_release() logger.debug("Task %s removed from the registry", str(task_id))
def remove(self, task_id): """Remove a task from the registry. To remove it, pass its identifier with `taks_id` parameter. When the identifier is not found, a `NotFoundError` exception is raised. :param task_id: identifier of the task to remove :raises NotFoundError: raised when the given task identifier is not found on the registry """ try: self._rwlock.writer_acquire() del self._tasks[task_id] except KeyError: raise NotFoundError(element=str(task_id)) finally: self._rwlock.writer_release() logger.debug("Task %s removed from the registry", str(task_id))
[ "Remove", "a", "task", "from", "the", "registry", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/tasks.py#L139-L159
[ "def", "remove", "(", "self", ",", "task_id", ")", ":", "try", ":", "self", ".", "_rwlock", ".", "writer_acquire", "(", ")", "del", "self", ".", "_tasks", "[", "task_id", "]", "except", "KeyError", ":", "raise", "NotFoundError", "(", "element", "=", "str", "(", "task_id", ")", ")", "finally", ":", "self", ".", "_rwlock", ".", "writer_release", "(", ")", "logger", ".", "debug", "(", "\"Task %s removed from the registry\"", ",", "str", "(", "task_id", ")", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
TaskRegistry.get
Get a task from the registry. Retrieve a task from the registry using its task identifier. When the task does not exist, a `NotFoundError` exception will be raised. :param task_id: task identifier :returns: a task object :raises NotFoundError: raised when the requested task is not found on the registry
arthur/tasks.py
def get(self, task_id): """Get a task from the registry. Retrieve a task from the registry using its task identifier. When the task does not exist, a `NotFoundError` exception will be raised. :param task_id: task identifier :returns: a task object :raises NotFoundError: raised when the requested task is not found on the registry """ try: self._rwlock.reader_acquire() task = self._tasks[task_id] except KeyError: raise NotFoundError(element=str(task_id)) finally: self._rwlock.reader_release() return task
def get(self, task_id): """Get a task from the registry. Retrieve a task from the registry using its task identifier. When the task does not exist, a `NotFoundError` exception will be raised. :param task_id: task identifier :returns: a task object :raises NotFoundError: raised when the requested task is not found on the registry """ try: self._rwlock.reader_acquire() task = self._tasks[task_id] except KeyError: raise NotFoundError(element=str(task_id)) finally: self._rwlock.reader_release() return task
[ "Get", "a", "task", "from", "the", "registry", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/tasks.py#L161-L183
[ "def", "get", "(", "self", ",", "task_id", ")", ":", "try", ":", "self", ".", "_rwlock", ".", "reader_acquire", "(", ")", "task", "=", "self", ".", "_tasks", "[", "task_id", "]", "except", "KeyError", ":", "raise", "NotFoundError", "(", "element", "=", "str", "(", "task_id", ")", ")", "finally", ":", "self", ".", "_rwlock", ".", "reader_release", "(", ")", "return", "task" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
TaskRegistry.tasks
Get the list of tasks
arthur/tasks.py
def tasks(self): """Get the list of tasks""" self._rwlock.reader_acquire() tl = [v for v in self._tasks.values()] tl.sort(key=lambda x: x.task_id) self._rwlock.reader_release() return tl
def tasks(self): """Get the list of tasks""" self._rwlock.reader_acquire() tl = [v for v in self._tasks.values()] tl.sort(key=lambda x: x.task_id) self._rwlock.reader_release() return tl
[ "Get", "the", "list", "of", "tasks" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/tasks.py#L186-L194
[ "def", "tasks", "(", "self", ")", ":", "self", ".", "_rwlock", ".", "reader_acquire", "(", ")", "tl", "=", "[", "v", "for", "v", "in", "self", ".", "_tasks", ".", "values", "(", ")", "]", "tl", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", ".", "task_id", ")", "self", ".", "_rwlock", ".", "reader_release", "(", ")", "return", "tl" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
_TaskConfig.to_dict
Returns a dict with the representation of this task configuration object.
arthur/tasks.py
def to_dict(self): """Returns a dict with the representation of this task configuration object.""" properties = find_class_properties(self.__class__) config = { name: self.__getattribute__(name) for name, _ in properties } return config
def to_dict(self): """Returns a dict with the representation of this task configuration object.""" properties = find_class_properties(self.__class__) config = { name: self.__getattribute__(name) for name, _ in properties } return config
[ "Returns", "a", "dict", "with", "the", "representation", "of", "this", "task", "configuration", "object", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/tasks.py#L214-L221
[ "def", "to_dict", "(", "self", ")", ":", "properties", "=", "find_class_properties", "(", "self", ".", "__class__", ")", "config", "=", "{", "name", ":", "self", ".", "__getattribute__", "(", "name", ")", "for", "name", ",", "_", "in", "properties", "}", "return", "config" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
_TaskConfig.from_dict
Create an configuration object from a dictionary. Key,value pairs will be used to initialize a task configuration object. If 'config' contains invalid configuration parameters a `ValueError` exception will be raised. :param config: dictionary used to create an instance of this object :returns: a task config instance :raises ValueError: when an invalid configuration parameter is found
arthur/tasks.py
def from_dict(cls, config): """Create an configuration object from a dictionary. Key,value pairs will be used to initialize a task configuration object. If 'config' contains invalid configuration parameters a `ValueError` exception will be raised. :param config: dictionary used to create an instance of this object :returns: a task config instance :raises ValueError: when an invalid configuration parameter is found """ try: obj = cls(**config) except TypeError as e: m = cls.KW_ARGS_ERROR_REGEX.match(str(e)) if m: raise ValueError("unknown '%s' task config parameter" % m.group(1)) else: raise e else: return obj
def from_dict(cls, config): """Create an configuration object from a dictionary. Key,value pairs will be used to initialize a task configuration object. If 'config' contains invalid configuration parameters a `ValueError` exception will be raised. :param config: dictionary used to create an instance of this object :returns: a task config instance :raises ValueError: when an invalid configuration parameter is found """ try: obj = cls(**config) except TypeError as e: m = cls.KW_ARGS_ERROR_REGEX.match(str(e)) if m: raise ValueError("unknown '%s' task config parameter" % m.group(1)) else: raise e else: return obj
[ "Create", "an", "configuration", "object", "from", "a", "dictionary", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/tasks.py#L224-L246
[ "def", "from_dict", "(", "cls", ",", "config", ")", ":", "try", ":", "obj", "=", "cls", "(", "*", "*", "config", ")", "except", "TypeError", "as", "e", ":", "m", "=", "cls", ".", "KW_ARGS_ERROR_REGEX", ".", "match", "(", "str", "(", "e", ")", ")", "if", "m", ":", "raise", "ValueError", "(", "\"unknown '%s' task config parameter\"", "%", "m", ".", "group", "(", "1", ")", ")", "else", ":", "raise", "e", "else", ":", "return", "obj" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
metadata
Add metadata to an item. Decorator that adds metadata to Perceval items such as the identifier of the job that generated it or the version of the system. The contents from the original item will be stored under the 'data' keyword. Take into account that this function only can be called from a `PercevalJob` class due it needs access to some attributes and methods of this class.
arthur/jobs.py
def metadata(func): """Add metadata to an item. Decorator that adds metadata to Perceval items such as the identifier of the job that generated it or the version of the system. The contents from the original item will be stored under the 'data' keyword. Take into account that this function only can be called from a `PercevalJob` class due it needs access to some attributes and methods of this class. """ @functools.wraps(func) def decorator(self, *args, **kwargs): for item in func(self, *args, **kwargs): item['arthur_version'] = __version__ item['job_id'] = self.job_id yield item return decorator
def metadata(func): """Add metadata to an item. Decorator that adds metadata to Perceval items such as the identifier of the job that generated it or the version of the system. The contents from the original item will be stored under the 'data' keyword. Take into account that this function only can be called from a `PercevalJob` class due it needs access to some attributes and methods of this class. """ @functools.wraps(func) def decorator(self, *args, **kwargs): for item in func(self, *args, **kwargs): item['arthur_version'] = __version__ item['job_id'] = self.job_id yield item return decorator
[ "Add", "metadata", "to", "an", "item", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/jobs.py#L46-L64
[ "def", "metadata", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "decorator", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "item", "in", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "item", "[", "'arthur_version'", "]", "=", "__version__", "item", "[", "'job_id'", "]", "=", "self", ".", "job_id", "yield", "item", "return", "decorator" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
execute_perceval_job
Execute a Perceval job on RQ. The items fetched during the process will be stored in a Redis queue named `queue`. Setting the parameter `archive_path`, raw data will be stored with the archive manager. The contents from the archive can be retrieved setting the pameter `fetch_from_archive` to `True`, too. Take into account this behaviour will be only available when the backend supports the use of the archive. If archiving is not supported, an `AttributeError` exception will be raised. :param backend: backend to execute :param bakend_args: dict of arguments for running the backend :param qitems: name of the RQ queue used to store the items :param task_id: identifier of the task linked to this job :param category: category of the items to retrieve :param archive_args: archive arguments :param max_retries: maximum number of attempts this job can execute before failing :returns: a `JobResult` instance :raises NotFoundError: raised when the backend is not found :raises AttributeError: raised when archiving is not supported but any of the archive parameters were set
arthur/jobs.py
def execute_perceval_job(backend, backend_args, qitems, task_id, category, archive_args=None, max_retries=MAX_JOB_RETRIES): """Execute a Perceval job on RQ. The items fetched during the process will be stored in a Redis queue named `queue`. Setting the parameter `archive_path`, raw data will be stored with the archive manager. The contents from the archive can be retrieved setting the pameter `fetch_from_archive` to `True`, too. Take into account this behaviour will be only available when the backend supports the use of the archive. If archiving is not supported, an `AttributeError` exception will be raised. :param backend: backend to execute :param bakend_args: dict of arguments for running the backend :param qitems: name of the RQ queue used to store the items :param task_id: identifier of the task linked to this job :param category: category of the items to retrieve :param archive_args: archive arguments :param max_retries: maximum number of attempts this job can execute before failing :returns: a `JobResult` instance :raises NotFoundError: raised when the backend is not found :raises AttributeError: raised when archiving is not supported but any of the archive parameters were set """ rq_job = rq.get_current_job() job = PercevalJob(rq_job.id, task_id, backend, category, rq_job.connection, qitems) logger.debug("Running job #%s (task: %s) (%s) (cat:%s)", job.job_id, task_id, backend, category) if not job.has_archiving() and archive_args: raise AttributeError("archive attributes set but archive is not supported") run_job = True resume = False failures = 0 while run_job: try: job.run(backend_args, archive_args=archive_args, resume=resume) except AttributeError as e: raise e except Exception as e: logger.debug("Error running job %s (%s) - %s", job.job_id, backend, str(e)) failures += 1 if not job.has_resuming() or failures >= max_retries: logger.error("Cancelling job #%s (task: %s) (%s)", job.job_id, task_id, backend) raise e logger.warning("Resuming job #%s (task: %s) (%s) due to a failure (n %s, max %s)", job.job_id, task_id, backend, failures, max_retries) resume = True else: # No failure, do not retry run_job = False result = job.result logger.debug("Job #%s (task: %s) completed (%s) - %s items (%s) fetched", result.job_id, task_id, result.backend, str(result.nitems), result.category) return result
def execute_perceval_job(backend, backend_args, qitems, task_id, category, archive_args=None, max_retries=MAX_JOB_RETRIES): """Execute a Perceval job on RQ. The items fetched during the process will be stored in a Redis queue named `queue`. Setting the parameter `archive_path`, raw data will be stored with the archive manager. The contents from the archive can be retrieved setting the pameter `fetch_from_archive` to `True`, too. Take into account this behaviour will be only available when the backend supports the use of the archive. If archiving is not supported, an `AttributeError` exception will be raised. :param backend: backend to execute :param bakend_args: dict of arguments for running the backend :param qitems: name of the RQ queue used to store the items :param task_id: identifier of the task linked to this job :param category: category of the items to retrieve :param archive_args: archive arguments :param max_retries: maximum number of attempts this job can execute before failing :returns: a `JobResult` instance :raises NotFoundError: raised when the backend is not found :raises AttributeError: raised when archiving is not supported but any of the archive parameters were set """ rq_job = rq.get_current_job() job = PercevalJob(rq_job.id, task_id, backend, category, rq_job.connection, qitems) logger.debug("Running job #%s (task: %s) (%s) (cat:%s)", job.job_id, task_id, backend, category) if not job.has_archiving() and archive_args: raise AttributeError("archive attributes set but archive is not supported") run_job = True resume = False failures = 0 while run_job: try: job.run(backend_args, archive_args=archive_args, resume=resume) except AttributeError as e: raise e except Exception as e: logger.debug("Error running job %s (%s) - %s", job.job_id, backend, str(e)) failures += 1 if not job.has_resuming() or failures >= max_retries: logger.error("Cancelling job #%s (task: %s) (%s)", job.job_id, task_id, backend) raise e logger.warning("Resuming job #%s (task: %s) (%s) due to a failure (n %s, max %s)", job.job_id, task_id, backend, failures, max_retries) resume = True else: # No failure, do not retry run_job = False result = job.result logger.debug("Job #%s (task: %s) completed (%s) - %s items (%s) fetched", result.job_id, task_id, result.backend, str(result.nitems), result.category) return result
[ "Execute", "a", "Perceval", "job", "on", "RQ", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/jobs.py#L244-L315
[ "def", "execute_perceval_job", "(", "backend", ",", "backend_args", ",", "qitems", ",", "task_id", ",", "category", ",", "archive_args", "=", "None", ",", "max_retries", "=", "MAX_JOB_RETRIES", ")", ":", "rq_job", "=", "rq", ".", "get_current_job", "(", ")", "job", "=", "PercevalJob", "(", "rq_job", ".", "id", ",", "task_id", ",", "backend", ",", "category", ",", "rq_job", ".", "connection", ",", "qitems", ")", "logger", ".", "debug", "(", "\"Running job #%s (task: %s) (%s) (cat:%s)\"", ",", "job", ".", "job_id", ",", "task_id", ",", "backend", ",", "category", ")", "if", "not", "job", ".", "has_archiving", "(", ")", "and", "archive_args", ":", "raise", "AttributeError", "(", "\"archive attributes set but archive is not supported\"", ")", "run_job", "=", "True", "resume", "=", "False", "failures", "=", "0", "while", "run_job", ":", "try", ":", "job", ".", "run", "(", "backend_args", ",", "archive_args", "=", "archive_args", ",", "resume", "=", "resume", ")", "except", "AttributeError", "as", "e", ":", "raise", "e", "except", "Exception", "as", "e", ":", "logger", ".", "debug", "(", "\"Error running job %s (%s) - %s\"", ",", "job", ".", "job_id", ",", "backend", ",", "str", "(", "e", ")", ")", "failures", "+=", "1", "if", "not", "job", ".", "has_resuming", "(", ")", "or", "failures", ">=", "max_retries", ":", "logger", ".", "error", "(", "\"Cancelling job #%s (task: %s) (%s)\"", ",", "job", ".", "job_id", ",", "task_id", ",", "backend", ")", "raise", "e", "logger", ".", "warning", "(", "\"Resuming job #%s (task: %s) (%s) due to a failure (n %s, max %s)\"", ",", "job", ".", "job_id", ",", "task_id", ",", "backend", ",", "failures", ",", "max_retries", ")", "resume", "=", "True", "else", ":", "# No failure, do not retry", "run_job", "=", "False", "result", "=", "job", ".", "result", "logger", ".", "debug", "(", "\"Job #%s (task: %s) completed (%s) - %s items (%s) fetched\"", ",", "result", ".", "job_id", ",", "task_id", ",", "result", ".", "backend", ",", "str", "(", "result", ".", "nitems", ")", ",", "result", ".", "category", ")", "return", "result" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
PercevalJob.initialize_archive_manager
Initialize the archive manager. :param archive_path: path where the archive manager is located
arthur/jobs.py
def initialize_archive_manager(self, archive_path): """Initialize the archive manager. :param archive_path: path where the archive manager is located """ if archive_path == "": raise ValueError("Archive manager path cannot be empty") if archive_path: self.archive_manager = perceval.archive.ArchiveManager(archive_path)
def initialize_archive_manager(self, archive_path): """Initialize the archive manager. :param archive_path: path where the archive manager is located """ if archive_path == "": raise ValueError("Archive manager path cannot be empty") if archive_path: self.archive_manager = perceval.archive.ArchiveManager(archive_path)
[ "Initialize", "the", "archive", "manager", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/jobs.py#L135-L144
[ "def", "initialize_archive_manager", "(", "self", ",", "archive_path", ")", ":", "if", "archive_path", "==", "\"\"", ":", "raise", "ValueError", "(", "\"Archive manager path cannot be empty\"", ")", "if", "archive_path", ":", "self", ".", "archive_manager", "=", "perceval", ".", "archive", ".", "ArchiveManager", "(", "archive_path", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
PercevalJob.run
Run the backend with the given parameters. The method will run the backend assigned to this job, storing the fetched items in a Redis queue. The ongoing status of the job, can be accessed through the property `result`. When `resume` is set, the job will start from the last execution, overwriting 'from_date' and 'offset' parameters, if needed. Setting to `True` the parameter `fetch_from_archive`, items can be fetched from the archive assigned to this job. Any exception during the execution of the process will be raised. :param backend_args: parameters used to un the backend :param archive_args: archive arguments :param resume: fetch items starting where the last execution stopped
arthur/jobs.py
def run(self, backend_args, archive_args=None, resume=False): """Run the backend with the given parameters. The method will run the backend assigned to this job, storing the fetched items in a Redis queue. The ongoing status of the job, can be accessed through the property `result`. When `resume` is set, the job will start from the last execution, overwriting 'from_date' and 'offset' parameters, if needed. Setting to `True` the parameter `fetch_from_archive`, items can be fetched from the archive assigned to this job. Any exception during the execution of the process will be raised. :param backend_args: parameters used to un the backend :param archive_args: archive arguments :param resume: fetch items starting where the last execution stopped """ args = backend_args.copy() if archive_args: self.initialize_archive_manager(archive_args['archive_path']) if not resume: max_date = backend_args.get('from_date', None) offset = backend_args.get('offset', None) if max_date: max_date = datetime_to_utc(max_date).timestamp() self._result = JobResult(self.job_id, self.task_id, self.backend, self.category, None, max_date, 0, offset=offset, nresumed=0) else: if self.result.max_date: args['from_date'] = unixtime_to_datetime(self.result.max_date) if self.result.offset: args['offset'] = self.result.offset self._result.nresumed += 1 for item in self._execute(args, archive_args): self.conn.rpush(self.qitems, pickle.dumps(item)) self._result.nitems += 1 self._result.last_uuid = item['uuid'] if not self.result.max_date or self.result.max_date < item['updated_on']: self._result.max_date = item['updated_on'] if 'offset' in item: self._result.offset = item['offset']
def run(self, backend_args, archive_args=None, resume=False): """Run the backend with the given parameters. The method will run the backend assigned to this job, storing the fetched items in a Redis queue. The ongoing status of the job, can be accessed through the property `result`. When `resume` is set, the job will start from the last execution, overwriting 'from_date' and 'offset' parameters, if needed. Setting to `True` the parameter `fetch_from_archive`, items can be fetched from the archive assigned to this job. Any exception during the execution of the process will be raised. :param backend_args: parameters used to un the backend :param archive_args: archive arguments :param resume: fetch items starting where the last execution stopped """ args = backend_args.copy() if archive_args: self.initialize_archive_manager(archive_args['archive_path']) if not resume: max_date = backend_args.get('from_date', None) offset = backend_args.get('offset', None) if max_date: max_date = datetime_to_utc(max_date).timestamp() self._result = JobResult(self.job_id, self.task_id, self.backend, self.category, None, max_date, 0, offset=offset, nresumed=0) else: if self.result.max_date: args['from_date'] = unixtime_to_datetime(self.result.max_date) if self.result.offset: args['offset'] = self.result.offset self._result.nresumed += 1 for item in self._execute(args, archive_args): self.conn.rpush(self.qitems, pickle.dumps(item)) self._result.nitems += 1 self._result.last_uuid = item['uuid'] if not self.result.max_date or self.result.max_date < item['updated_on']: self._result.max_date = item['updated_on'] if 'offset' in item: self._result.offset = item['offset']
[ "Run", "the", "backend", "with", "the", "given", "parameters", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/jobs.py#L146-L198
[ "def", "run", "(", "self", ",", "backend_args", ",", "archive_args", "=", "None", ",", "resume", "=", "False", ")", ":", "args", "=", "backend_args", ".", "copy", "(", ")", "if", "archive_args", ":", "self", ".", "initialize_archive_manager", "(", "archive_args", "[", "'archive_path'", "]", ")", "if", "not", "resume", ":", "max_date", "=", "backend_args", ".", "get", "(", "'from_date'", ",", "None", ")", "offset", "=", "backend_args", ".", "get", "(", "'offset'", ",", "None", ")", "if", "max_date", ":", "max_date", "=", "datetime_to_utc", "(", "max_date", ")", ".", "timestamp", "(", ")", "self", ".", "_result", "=", "JobResult", "(", "self", ".", "job_id", ",", "self", ".", "task_id", ",", "self", ".", "backend", ",", "self", ".", "category", ",", "None", ",", "max_date", ",", "0", ",", "offset", "=", "offset", ",", "nresumed", "=", "0", ")", "else", ":", "if", "self", ".", "result", ".", "max_date", ":", "args", "[", "'from_date'", "]", "=", "unixtime_to_datetime", "(", "self", ".", "result", ".", "max_date", ")", "if", "self", ".", "result", ".", "offset", ":", "args", "[", "'offset'", "]", "=", "self", ".", "result", ".", "offset", "self", ".", "_result", ".", "nresumed", "+=", "1", "for", "item", "in", "self", ".", "_execute", "(", "args", ",", "archive_args", ")", ":", "self", ".", "conn", ".", "rpush", "(", "self", ".", "qitems", ",", "pickle", ".", "dumps", "(", "item", ")", ")", "self", ".", "_result", ".", "nitems", "+=", "1", "self", ".", "_result", ".", "last_uuid", "=", "item", "[", "'uuid'", "]", "if", "not", "self", ".", "result", ".", "max_date", "or", "self", ".", "result", ".", "max_date", "<", "item", "[", "'updated_on'", "]", ":", "self", ".", "_result", ".", "max_date", "=", "item", "[", "'updated_on'", "]", "if", "'offset'", "in", "item", ":", "self", ".", "_result", ".", "offset", "=", "item", "[", "'offset'", "]" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
PercevalJob._execute
Execute a backend of Perceval. Run the backend of Perceval assigned to this job using the given arguments. It will raise an `AttributeError` when any of the required parameters to run the backend are not found. Other exceptions related to the execution of the backend will be raised too. This method will return an iterator of the items fetched by the backend. These items will include some metadata related to this job. It will also be possible to retrieve the items from the archive setting to `True` the parameter `fetch_from_archive`. :param backend_args: arguments to execute the backend :param archive_args: archive arguments :returns: iterator of items fetched by the backend :raises AttributeError: raised when any of the required parameters is not found
arthur/jobs.py
def _execute(self, backend_args, archive_args): """Execute a backend of Perceval. Run the backend of Perceval assigned to this job using the given arguments. It will raise an `AttributeError` when any of the required parameters to run the backend are not found. Other exceptions related to the execution of the backend will be raised too. This method will return an iterator of the items fetched by the backend. These items will include some metadata related to this job. It will also be possible to retrieve the items from the archive setting to `True` the parameter `fetch_from_archive`. :param backend_args: arguments to execute the backend :param archive_args: archive arguments :returns: iterator of items fetched by the backend :raises AttributeError: raised when any of the required parameters is not found """ if not archive_args or not archive_args['fetch_from_archive']: return perceval.backend.fetch(self._bklass, backend_args, self.category, manager=self.archive_manager) else: return perceval.backend.fetch_from_archive(self._bklass, backend_args, self.archive_manager, self.category, archive_args['archived_after'])
def _execute(self, backend_args, archive_args): """Execute a backend of Perceval. Run the backend of Perceval assigned to this job using the given arguments. It will raise an `AttributeError` when any of the required parameters to run the backend are not found. Other exceptions related to the execution of the backend will be raised too. This method will return an iterator of the items fetched by the backend. These items will include some metadata related to this job. It will also be possible to retrieve the items from the archive setting to `True` the parameter `fetch_from_archive`. :param backend_args: arguments to execute the backend :param archive_args: archive arguments :returns: iterator of items fetched by the backend :raises AttributeError: raised when any of the required parameters is not found """ if not archive_args or not archive_args['fetch_from_archive']: return perceval.backend.fetch(self._bklass, backend_args, self.category, manager=self.archive_manager) else: return perceval.backend.fetch_from_archive(self._bklass, backend_args, self.archive_manager, self.category, archive_args['archived_after'])
[ "Execute", "a", "backend", "of", "Perceval", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/jobs.py#L211-L241
[ "def", "_execute", "(", "self", ",", "backend_args", ",", "archive_args", ")", ":", "if", "not", "archive_args", "or", "not", "archive_args", "[", "'fetch_from_archive'", "]", ":", "return", "perceval", ".", "backend", ".", "fetch", "(", "self", ".", "_bklass", ",", "backend_args", ",", "self", ".", "category", ",", "manager", "=", "self", ".", "archive_manager", ")", "else", ":", "return", "perceval", ".", "backend", ".", "fetch_from_archive", "(", "self", ".", "_bklass", ",", "backend_args", ",", "self", ".", "archive_manager", ",", "self", ".", "category", ",", "archive_args", "[", "'archived_after'", "]", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
ElasticItemsWriter.create_index
Configure the index to work with
arthur/writers.py
def create_index(idx_url, clean=False): """Configure the index to work with""" try: r = requests.get(idx_url) except requests.exceptions.ConnectionError: cause = "Error connecting to Elastic Search (index: %s)" % idx_url raise ElasticSearchError(cause=cause) if r.status_code != 200: # The index does not exist r = requests.put(idx_url) if r.status_code != 200: logger.info("Can't create index %s (%s)", idx_url, r.status_code) cause = "Error creating Elastic Search index %s" % idx_url raise ElasticSearchError(cause=cause) logger.info("Index %s created", idx_url) return True elif r.status_code == 200 and clean: requests.delete(idx_url) requests.put(idx_url) logger.info("Index deleted and created (index: %s)", idx_url) return True return False
def create_index(idx_url, clean=False): """Configure the index to work with""" try: r = requests.get(idx_url) except requests.exceptions.ConnectionError: cause = "Error connecting to Elastic Search (index: %s)" % idx_url raise ElasticSearchError(cause=cause) if r.status_code != 200: # The index does not exist r = requests.put(idx_url) if r.status_code != 200: logger.info("Can't create index %s (%s)", idx_url, r.status_code) cause = "Error creating Elastic Search index %s" % idx_url raise ElasticSearchError(cause=cause) logger.info("Index %s created", idx_url) return True elif r.status_code == 200 and clean: requests.delete(idx_url) requests.put(idx_url) logger.info("Index deleted and created (index: %s)", idx_url) return True return False
[ "Configure", "the", "index", "to", "work", "with" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/writers.py#L124-L150
[ "def", "create_index", "(", "idx_url", ",", "clean", "=", "False", ")", ":", "try", ":", "r", "=", "requests", ".", "get", "(", "idx_url", ")", "except", "requests", ".", "exceptions", ".", "ConnectionError", ":", "cause", "=", "\"Error connecting to Elastic Search (index: %s)\"", "%", "idx_url", "raise", "ElasticSearchError", "(", "cause", "=", "cause", ")", "if", "r", ".", "status_code", "!=", "200", ":", "# The index does not exist", "r", "=", "requests", ".", "put", "(", "idx_url", ")", "if", "r", ".", "status_code", "!=", "200", ":", "logger", ".", "info", "(", "\"Can't create index %s (%s)\"", ",", "idx_url", ",", "r", ".", "status_code", ")", "cause", "=", "\"Error creating Elastic Search index %s\"", "%", "idx_url", "raise", "ElasticSearchError", "(", "cause", "=", "cause", ")", "logger", ".", "info", "(", "\"Index %s created\"", ",", "idx_url", ")", "return", "True", "elif", "r", ".", "status_code", "==", "200", "and", "clean", ":", "requests", ".", "delete", "(", "idx_url", ")", "requests", ".", "put", "(", "idx_url", ")", "logger", ".", "info", "(", "\"Index deleted and created (index: %s)\"", ",", "idx_url", ")", "return", "True", "return", "False" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
ElasticItemsWriter.create_mapping
Create a mapping
arthur/writers.py
def create_mapping(idx_url, mapping): """Create a mapping""" mapping_url = idx_url + '/items/_mapping' mapping = json.dumps(mapping) try: r = requests.put(mapping_url, data=mapping, headers={'Content-Type': 'application/json'}) except requests.exceptions.ConnectionError: cause = "Error connecting to Elastic Search (index: %s, url: %s)" \ % (idx_url, mapping_url) raise ElasticSearchError(cause=cause) if r.status_code != 200: reason = r.json()['error'] logger.info("Can't create mapping in %s. %s", mapping_url, reason) cause = "Error creating Elastic Search mapping %s. %s" % \ (mapping_url, reason) raise ElasticSearchError(cause=cause) else: logger.info("Mapping created in %s", mapping_url)
def create_mapping(idx_url, mapping): """Create a mapping""" mapping_url = idx_url + '/items/_mapping' mapping = json.dumps(mapping) try: r = requests.put(mapping_url, data=mapping, headers={'Content-Type': 'application/json'}) except requests.exceptions.ConnectionError: cause = "Error connecting to Elastic Search (index: %s, url: %s)" \ % (idx_url, mapping_url) raise ElasticSearchError(cause=cause) if r.status_code != 200: reason = r.json()['error'] logger.info("Can't create mapping in %s. %s", mapping_url, reason) cause = "Error creating Elastic Search mapping %s. %s" % \ (mapping_url, reason) raise ElasticSearchError(cause=cause) else: logger.info("Mapping created in %s", mapping_url)
[ "Create", "a", "mapping" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/writers.py#L153-L175
[ "def", "create_mapping", "(", "idx_url", ",", "mapping", ")", ":", "mapping_url", "=", "idx_url", "+", "'/items/_mapping'", "mapping", "=", "json", ".", "dumps", "(", "mapping", ")", "try", ":", "r", "=", "requests", ".", "put", "(", "mapping_url", ",", "data", "=", "mapping", ",", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", ")", "except", "requests", ".", "exceptions", ".", "ConnectionError", ":", "cause", "=", "\"Error connecting to Elastic Search (index: %s, url: %s)\"", "%", "(", "idx_url", ",", "mapping_url", ")", "raise", "ElasticSearchError", "(", "cause", "=", "cause", ")", "if", "r", ".", "status_code", "!=", "200", ":", "reason", "=", "r", ".", "json", "(", ")", "[", "'error'", "]", "logger", ".", "info", "(", "\"Can't create mapping in %s. %s\"", ",", "mapping_url", ",", "reason", ")", "cause", "=", "\"Error creating Elastic Search mapping %s. %s\"", "%", "(", "mapping_url", ",", "reason", ")", "raise", "ElasticSearchError", "(", "cause", "=", "cause", ")", "else", ":", "logger", ".", "info", "(", "\"Mapping created in %s\"", ",", "mapping_url", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
json_encoder
Custom JSON encoder handler
arthur/server.py
def json_encoder(*args, **kwargs): """Custom JSON encoder handler""" obj = cherrypy.serving.request._json_inner_handler(*args, **kwargs) for chunk in JSONEncoder().iterencode(obj): yield chunk.encode('utf-8')
def json_encoder(*args, **kwargs): """Custom JSON encoder handler""" obj = cherrypy.serving.request._json_inner_handler(*args, **kwargs) for chunk in JSONEncoder().iterencode(obj): yield chunk.encode('utf-8')
[ "Custom", "JSON", "encoder", "handler" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/server.py#L39-L45
[ "def", "json_encoder", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "obj", "=", "cherrypy", ".", "serving", ".", "request", ".", "_json_inner_handler", "(", "*", "args", ",", "*", "*", "kwargs", ")", "for", "chunk", "in", "JSONEncoder", "(", ")", ".", "iterencode", "(", "obj", ")", ":", "yield", "chunk", ".", "encode", "(", "'utf-8'", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
ArthurServer.write_items
Write items to the queue :param writer: the writer object :param items_generator: items to be written in the queue
arthur/server.py
def write_items(cls, writer, items_generator): """Write items to the queue :param writer: the writer object :param items_generator: items to be written in the queue """ while True: items = items_generator() writer.write(items) time.sleep(1)
def write_items(cls, writer, items_generator): """Write items to the queue :param writer: the writer object :param items_generator: items to be written in the queue """ while True: items = items_generator() writer.write(items) time.sleep(1)
[ "Write", "items", "to", "the", "queue" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/server.py#L73-L82
[ "def", "write_items", "(", "cls", ",", "writer", ",", "items_generator", ")", ":", "while", "True", ":", "items", "=", "items_generator", "(", ")", "writer", ".", "write", "(", "items", ")", "time", ".", "sleep", "(", "1", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
ArthurServer.add
Add tasks
arthur/server.py
def add(self): """Add tasks""" payload = cherrypy.request.json logger.debug("Reading tasks...") for task_data in payload['tasks']: try: category = task_data['category'] backend_args = task_data['backend_args'] archive_args = task_data.get('archive', None) sched_args = task_data.get('scheduler', None) except KeyError as ex: logger.error("Task badly formed") raise ex from_date = backend_args.get('from_date', None) if from_date: backend_args['from_date'] = str_to_datetime(from_date) super().add_task(task_data['task_id'], task_data['backend'], category, backend_args, archive_args=archive_args, sched_args=sched_args) logger.debug("Done. Ready to work!") return "Tasks added"
def add(self): """Add tasks""" payload = cherrypy.request.json logger.debug("Reading tasks...") for task_data in payload['tasks']: try: category = task_data['category'] backend_args = task_data['backend_args'] archive_args = task_data.get('archive', None) sched_args = task_data.get('scheduler', None) except KeyError as ex: logger.error("Task badly formed") raise ex from_date = backend_args.get('from_date', None) if from_date: backend_args['from_date'] = str_to_datetime(from_date) super().add_task(task_data['task_id'], task_data['backend'], category, backend_args, archive_args=archive_args, sched_args=sched_args) logger.debug("Done. Ready to work!") return "Tasks added"
[ "Add", "tasks" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/server.py#L86-L115
[ "def", "add", "(", "self", ")", ":", "payload", "=", "cherrypy", ".", "request", ".", "json", "logger", ".", "debug", "(", "\"Reading tasks...\"", ")", "for", "task_data", "in", "payload", "[", "'tasks'", "]", ":", "try", ":", "category", "=", "task_data", "[", "'category'", "]", "backend_args", "=", "task_data", "[", "'backend_args'", "]", "archive_args", "=", "task_data", ".", "get", "(", "'archive'", ",", "None", ")", "sched_args", "=", "task_data", ".", "get", "(", "'scheduler'", ",", "None", ")", "except", "KeyError", "as", "ex", ":", "logger", ".", "error", "(", "\"Task badly formed\"", ")", "raise", "ex", "from_date", "=", "backend_args", ".", "get", "(", "'from_date'", ",", "None", ")", "if", "from_date", ":", "backend_args", "[", "'from_date'", "]", "=", "str_to_datetime", "(", "from_date", ")", "super", "(", ")", ".", "add_task", "(", "task_data", "[", "'task_id'", "]", ",", "task_data", "[", "'backend'", "]", ",", "category", ",", "backend_args", ",", "archive_args", "=", "archive_args", ",", "sched_args", "=", "sched_args", ")", "logger", ".", "debug", "(", "\"Done. Ready to work!\"", ")", "return", "\"Tasks added\"" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
ArthurServer.remove
Remove tasks
arthur/server.py
def remove(self): """Remove tasks""" payload = cherrypy.request.json logger.debug("Reading tasks to remove...") task_ids = {} for task_data in payload['tasks']: task_id = task_data['task_id'] removed = super().remove_task(task_id) task_ids[task_id] = removed result = {'tasks': task_ids} return result
def remove(self): """Remove tasks""" payload = cherrypy.request.json logger.debug("Reading tasks to remove...") task_ids = {} for task_data in payload['tasks']: task_id = task_data['task_id'] removed = super().remove_task(task_id) task_ids[task_id] = removed result = {'tasks': task_ids} return result
[ "Remove", "tasks" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/server.py#L120-L135
[ "def", "remove", "(", "self", ")", ":", "payload", "=", "cherrypy", ".", "request", ".", "json", "logger", ".", "debug", "(", "\"Reading tasks to remove...\"", ")", "task_ids", "=", "{", "}", "for", "task_data", "in", "payload", "[", "'tasks'", "]", ":", "task_id", "=", "task_data", "[", "'task_id'", "]", "removed", "=", "super", "(", ")", ".", "remove_task", "(", "task_id", ")", "task_ids", "[", "task_id", "]", "=", "removed", "result", "=", "{", "'tasks'", ":", "task_ids", "}", "return", "result" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
ArthurServer.tasks
List tasks
arthur/server.py
def tasks(self): """List tasks""" logger.debug("API 'tasks' method called") result = [task.to_dict() for task in self._tasks.tasks] result = {'tasks': result} logger.debug("Tasks registry read") return result
def tasks(self): """List tasks""" logger.debug("API 'tasks' method called") result = [task.to_dict() for task in self._tasks.tasks] result = {'tasks': result} logger.debug("Tasks registry read") return result
[ "List", "tasks" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/server.py#L139-L149
[ "def", "tasks", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"API 'tasks' method called\"", ")", "result", "=", "[", "task", ".", "to_dict", "(", ")", "for", "task", "in", "self", ".", "_tasks", ".", "tasks", "]", "result", "=", "{", "'tasks'", ":", "result", "}", "logger", ".", "debug", "(", "\"Tasks registry read\"", ")", "return", "result" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Arthur.add_task
Add and schedule a task. :param task_id: id of the task :param backend: name of the backend :param category: category of the items to fecth :param backend_args: args needed to initialize the backend :param archive_args: args needed to initialize the archive :param sched_args: scheduling args for this task :returns: the task created
arthur/arthur.py
def add_task(self, task_id, backend, category, backend_args, archive_args=None, sched_args=None): """Add and schedule a task. :param task_id: id of the task :param backend: name of the backend :param category: category of the items to fecth :param backend_args: args needed to initialize the backend :param archive_args: args needed to initialize the archive :param sched_args: scheduling args for this task :returns: the task created """ try: archiving_cfg = self.__parse_archive_args(archive_args) scheduling_cfg = self.__parse_schedule_args(sched_args) self.__validate_args(task_id, backend, category, backend_args) except ValueError as e: raise e try: task = self._tasks.add(task_id, backend, category, backend_args, archiving_cfg=archiving_cfg, scheduling_cfg=scheduling_cfg) except AlreadyExistsError as e: raise e self._scheduler.schedule_task(task.task_id) return task
def add_task(self, task_id, backend, category, backend_args, archive_args=None, sched_args=None): """Add and schedule a task. :param task_id: id of the task :param backend: name of the backend :param category: category of the items to fecth :param backend_args: args needed to initialize the backend :param archive_args: args needed to initialize the archive :param sched_args: scheduling args for this task :returns: the task created """ try: archiving_cfg = self.__parse_archive_args(archive_args) scheduling_cfg = self.__parse_schedule_args(sched_args) self.__validate_args(task_id, backend, category, backend_args) except ValueError as e: raise e try: task = self._tasks.add(task_id, backend, category, backend_args, archiving_cfg=archiving_cfg, scheduling_cfg=scheduling_cfg) except AlreadyExistsError as e: raise e self._scheduler.schedule_task(task.task_id) return task
[ "Add", "and", "schedule", "a", "task", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/arthur.py#L61-L90
[ "def", "add_task", "(", "self", ",", "task_id", ",", "backend", ",", "category", ",", "backend_args", ",", "archive_args", "=", "None", ",", "sched_args", "=", "None", ")", ":", "try", ":", "archiving_cfg", "=", "self", ".", "__parse_archive_args", "(", "archive_args", ")", "scheduling_cfg", "=", "self", ".", "__parse_schedule_args", "(", "sched_args", ")", "self", ".", "__validate_args", "(", "task_id", ",", "backend", ",", "category", ",", "backend_args", ")", "except", "ValueError", "as", "e", ":", "raise", "e", "try", ":", "task", "=", "self", ".", "_tasks", ".", "add", "(", "task_id", ",", "backend", ",", "category", ",", "backend_args", ",", "archiving_cfg", "=", "archiving_cfg", ",", "scheduling_cfg", "=", "scheduling_cfg", ")", "except", "AlreadyExistsError", "as", "e", ":", "raise", "e", "self", ".", "_scheduler", ".", "schedule_task", "(", "task", ".", "task_id", ")", "return", "task" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Arthur.remove_task
Remove and cancel a task. :param task_id: id of the task to be removed
arthur/arthur.py
def remove_task(self, task_id): """Remove and cancel a task. :param task_id: id of the task to be removed """ try: self._scheduler.cancel_task(task_id) except NotFoundError as e: logger.info("Cannot cancel %s task because it does not exist.", task_id) return False else: return True
def remove_task(self, task_id): """Remove and cancel a task. :param task_id: id of the task to be removed """ try: self._scheduler.cancel_task(task_id) except NotFoundError as e: logger.info("Cannot cancel %s task because it does not exist.", task_id) return False else: return True
[ "Remove", "and", "cancel", "a", "task", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/arthur.py#L92-L104
[ "def", "remove_task", "(", "self", ",", "task_id", ")", ":", "try", ":", "self", ".", "_scheduler", ".", "cancel_task", "(", "task_id", ")", "except", "NotFoundError", "as", "e", ":", "logger", ".", "info", "(", "\"Cannot cancel %s task because it does not exist.\"", ",", "task_id", ")", "return", "False", "else", ":", "return", "True" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Arthur.items
Get the items fetched by the jobs.
arthur/arthur.py
def items(self): """Get the items fetched by the jobs.""" # Get and remove queued items in an atomic transaction pipe = self.conn.pipeline() pipe.lrange(Q_STORAGE_ITEMS, 0, -1) pipe.ltrim(Q_STORAGE_ITEMS, 1, 0) items = pipe.execute()[0] for item in items: item = pickle.loads(item) yield item
def items(self): """Get the items fetched by the jobs.""" # Get and remove queued items in an atomic transaction pipe = self.conn.pipeline() pipe.lrange(Q_STORAGE_ITEMS, 0, -1) pipe.ltrim(Q_STORAGE_ITEMS, 1, 0) items = pipe.execute()[0] for item in items: item = pickle.loads(item) yield item
[ "Get", "the", "items", "fetched", "by", "the", "jobs", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/arthur.py#L106-L117
[ "def", "items", "(", "self", ")", ":", "# Get and remove queued items in an atomic transaction", "pipe", "=", "self", ".", "conn", ".", "pipeline", "(", ")", "pipe", ".", "lrange", "(", "Q_STORAGE_ITEMS", ",", "0", ",", "-", "1", ")", "pipe", ".", "ltrim", "(", "Q_STORAGE_ITEMS", ",", "1", ",", "0", ")", "items", "=", "pipe", ".", "execute", "(", ")", "[", "0", "]", "for", "item", "in", "items", ":", "item", "=", "pickle", ".", "loads", "(", "item", ")", "yield", "item" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Arthur.__validate_args
Check that the task arguments received are valid
arthur/arthur.py
def __validate_args(task_id, backend, category, backend_args): """Check that the task arguments received are valid""" if not task_id or task_id.strip() == "": msg = "Missing task_id for task" raise ValueError(msg) if not backend or backend.strip() == "": msg = "Missing backend for task '%s'" % task_id raise ValueError(msg) if backend_args and not isinstance(backend_args, dict): msg = "Backend_args is not a dict, task '%s'" % task_id raise ValueError(msg) if not category or category.strip() == "": msg = "Missing category for task '%s'" % task_id raise ValueError(msg)
def __validate_args(task_id, backend, category, backend_args): """Check that the task arguments received are valid""" if not task_id or task_id.strip() == "": msg = "Missing task_id for task" raise ValueError(msg) if not backend or backend.strip() == "": msg = "Missing backend for task '%s'" % task_id raise ValueError(msg) if backend_args and not isinstance(backend_args, dict): msg = "Backend_args is not a dict, task '%s'" % task_id raise ValueError(msg) if not category or category.strip() == "": msg = "Missing category for task '%s'" % task_id raise ValueError(msg)
[ "Check", "that", "the", "task", "arguments", "received", "are", "valid" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/arthur.py#L120-L137
[ "def", "__validate_args", "(", "task_id", ",", "backend", ",", "category", ",", "backend_args", ")", ":", "if", "not", "task_id", "or", "task_id", ".", "strip", "(", ")", "==", "\"\"", ":", "msg", "=", "\"Missing task_id for task\"", "raise", "ValueError", "(", "msg", ")", "if", "not", "backend", "or", "backend", ".", "strip", "(", ")", "==", "\"\"", ":", "msg", "=", "\"Missing backend for task '%s'\"", "%", "task_id", "raise", "ValueError", "(", "msg", ")", "if", "backend_args", "and", "not", "isinstance", "(", "backend_args", ",", "dict", ")", ":", "msg", "=", "\"Backend_args is not a dict, task '%s'\"", "%", "task_id", "raise", "ValueError", "(", "msg", ")", "if", "not", "category", "or", "category", ".", "strip", "(", ")", "==", "\"\"", ":", "msg", "=", "\"Missing category for task '%s'\"", "%", "task_id", "raise", "ValueError", "(", "msg", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Arthur.__parse_archive_args
Parse the archive arguments of a task
arthur/arthur.py
def __parse_archive_args(self, archive_args): """Parse the archive arguments of a task""" if not archive_args: return None archiving_args = copy.deepcopy(archive_args) if self.archive_path: archiving_args['archive_path'] = self.archive_path else: archiving_args['archive_path'] = os.path.expanduser(ARCHIVES_DEFAULT_PATH) return ArchivingTaskConfig.from_dict(archiving_args)
def __parse_archive_args(self, archive_args): """Parse the archive arguments of a task""" if not archive_args: return None archiving_args = copy.deepcopy(archive_args) if self.archive_path: archiving_args['archive_path'] = self.archive_path else: archiving_args['archive_path'] = os.path.expanduser(ARCHIVES_DEFAULT_PATH) return ArchivingTaskConfig.from_dict(archiving_args)
[ "Parse", "the", "archive", "arguments", "of", "a", "task" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/arthur.py#L139-L152
[ "def", "__parse_archive_args", "(", "self", ",", "archive_args", ")", ":", "if", "not", "archive_args", ":", "return", "None", "archiving_args", "=", "copy", ".", "deepcopy", "(", "archive_args", ")", "if", "self", ".", "archive_path", ":", "archiving_args", "[", "'archive_path'", "]", "=", "self", ".", "archive_path", "else", ":", "archiving_args", "[", "'archive_path'", "]", "=", "os", ".", "path", ".", "expanduser", "(", "ARCHIVES_DEFAULT_PATH", ")", "return", "ArchivingTaskConfig", ".", "from_dict", "(", "archiving_args", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
ArthurWorker.perform_job
Custom method to execute a job and notify of its result :param job: Job object :param queue: the queue containing the object
arthur/worker.py
def perform_job(self, job, queue): """Custom method to execute a job and notify of its result :param job: Job object :param queue: the queue containing the object """ result = super().perform_job(job, queue) job_status = job.get_status() job_result = job.return_value if job_status == 'finished' else None data = { 'job_id': job.id, 'status': job_status, 'result': job_result } msg = pickle.dumps(data) self.connection.publish(self.pubsub_channel, msg) return result
def perform_job(self, job, queue): """Custom method to execute a job and notify of its result :param job: Job object :param queue: the queue containing the object """ result = super().perform_job(job, queue) job_status = job.get_status() job_result = job.return_value if job_status == 'finished' else None data = { 'job_id': job.id, 'status': job_status, 'result': job_result } msg = pickle.dumps(data) self.connection.publish(self.pubsub_channel, msg) return result
[ "Custom", "method", "to", "execute", "a", "job", "and", "notify", "of", "its", "result" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/worker.py#L50-L71
[ "def", "perform_job", "(", "self", ",", "job", ",", "queue", ")", ":", "result", "=", "super", "(", ")", ".", "perform_job", "(", "job", ",", "queue", ")", "job_status", "=", "job", ".", "get_status", "(", ")", "job_result", "=", "job", ".", "return_value", "if", "job_status", "==", "'finished'", "else", "None", "data", "=", "{", "'job_id'", ":", "job", ".", "id", ",", "'status'", ":", "job_status", ",", "'result'", ":", "job_result", "}", "msg", "=", "pickle", ".", "dumps", "(", "data", ")", "self", ".", "connection", ".", "publish", "(", "self", ".", "pubsub_channel", ",", "msg", ")", "return", "result" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
_JobScheduler.schedule_job_task
Schedule a job in the given queue.
arthur/scheduler.py
def schedule_job_task(self, queue_id, task_id, job_args, delay=0): """Schedule a job in the given queue.""" self._rwlock.writer_acquire() job_id = self._generate_job_id(task_id) event = self._scheduler.enter(delay, 1, self._enqueue_job, argument=(queue_id, job_id, job_args,)) self._jobs[job_id] = event self._tasks[task_id] = job_id self._rwlock.writer_release() logging.debug("Job #%s (task: %s) scheduled on %s (wait: %s)", job_id, task_id, queue_id, delay) return job_id
def schedule_job_task(self, queue_id, task_id, job_args, delay=0): """Schedule a job in the given queue.""" self._rwlock.writer_acquire() job_id = self._generate_job_id(task_id) event = self._scheduler.enter(delay, 1, self._enqueue_job, argument=(queue_id, job_id, job_args,)) self._jobs[job_id] = event self._tasks[task_id] = job_id self._rwlock.writer_release() logging.debug("Job #%s (task: %s) scheduled on %s (wait: %s)", job_id, task_id, queue_id, delay) return job_id
[ "Schedule", "a", "job", "in", "the", "given", "queue", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L117-L134
[ "def", "schedule_job_task", "(", "self", ",", "queue_id", ",", "task_id", ",", "job_args", ",", "delay", "=", "0", ")", ":", "self", ".", "_rwlock", ".", "writer_acquire", "(", ")", "job_id", "=", "self", ".", "_generate_job_id", "(", "task_id", ")", "event", "=", "self", ".", "_scheduler", ".", "enter", "(", "delay", ",", "1", ",", "self", ".", "_enqueue_job", ",", "argument", "=", "(", "queue_id", ",", "job_id", ",", "job_args", ",", ")", ")", "self", ".", "_jobs", "[", "job_id", "]", "=", "event", "self", ".", "_tasks", "[", "task_id", "]", "=", "job_id", "self", ".", "_rwlock", ".", "writer_release", "(", ")", "logging", ".", "debug", "(", "\"Job #%s (task: %s) scheduled on %s (wait: %s)\"", ",", "job_id", ",", "task_id", ",", "queue_id", ",", "delay", ")", "return", "job_id" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
_JobScheduler.cancel_job_task
Cancel the job related to the given task.
arthur/scheduler.py
def cancel_job_task(self, task_id): """Cancel the job related to the given task.""" try: self._rwlock.writer_acquire() job_id = self._tasks.get(task_id, None) if job_id: self._cancel_job(job_id) else: logger.warning("Task %s set to be removed was not found", task_id) finally: self._rwlock.writer_release()
def cancel_job_task(self, task_id): """Cancel the job related to the given task.""" try: self._rwlock.writer_acquire() job_id = self._tasks.get(task_id, None) if job_id: self._cancel_job(job_id) else: logger.warning("Task %s set to be removed was not found", task_id) finally: self._rwlock.writer_release()
[ "Cancel", "the", "job", "related", "to", "the", "given", "task", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L136-L150
[ "def", "cancel_job_task", "(", "self", ",", "task_id", ")", ":", "try", ":", "self", ".", "_rwlock", ".", "writer_acquire", "(", ")", "job_id", "=", "self", ".", "_tasks", ".", "get", "(", "task_id", ",", "None", ")", "if", "job_id", ":", "self", ".", "_cancel_job", "(", "job_id", ")", "else", ":", "logger", ".", "warning", "(", "\"Task %s set to be removed was not found\"", ",", "task_id", ")", "finally", ":", "self", ".", "_rwlock", ".", "writer_release", "(", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
_JobListener.run
Run thread to listen for jobs and reschedule successful ones.
arthur/scheduler.py
def run(self): """Run thread to listen for jobs and reschedule successful ones.""" try: self.listen() except Exception as e: logger.critical("JobListener instence crashed. Error: %s", str(e)) logger.critical(traceback.format_exc())
def run(self): """Run thread to listen for jobs and reschedule successful ones.""" try: self.listen() except Exception as e: logger.critical("JobListener instence crashed. Error: %s", str(e)) logger.critical(traceback.format_exc())
[ "Run", "thread", "to", "listen", "for", "jobs", "and", "reschedule", "successful", "ones", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L211-L218
[ "def", "run", "(", "self", ")", ":", "try", ":", "self", ".", "listen", "(", ")", "except", "Exception", "as", "e", ":", "logger", ".", "critical", "(", "\"JobListener instence crashed. Error: %s\"", ",", "str", "(", "e", ")", ")", "logger", ".", "critical", "(", "traceback", ".", "format_exc", "(", ")", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
_JobListener.listen
Listen for completed jobs and reschedule successful ones.
arthur/scheduler.py
def listen(self): """Listen for completed jobs and reschedule successful ones.""" pubsub = self.conn.pubsub() pubsub.subscribe(self.pubsub_channel) logger.debug("Listening on channel %s", self.pubsub_channel) for msg in pubsub.listen(): logger.debug("New message received of type %s", str(msg['type'])) if msg['type'] != 'message': logger.debug("Ignoring job message") continue data = pickle.loads(msg['data']) job_id = data['job_id'] job = rq.job.Job.fetch(job_id, connection=self.conn) if data['status'] == 'finished': logging.debug("Job #%s completed", job_id) handler = self.result_handler elif data['status'] == 'failed': logging.debug("Job #%s failed", job_id) handler = self.result_handler_err else: continue if handler: logging.debug("Calling handler for job #%s", job_id) handler(job)
def listen(self): """Listen for completed jobs and reschedule successful ones.""" pubsub = self.conn.pubsub() pubsub.subscribe(self.pubsub_channel) logger.debug("Listening on channel %s", self.pubsub_channel) for msg in pubsub.listen(): logger.debug("New message received of type %s", str(msg['type'])) if msg['type'] != 'message': logger.debug("Ignoring job message") continue data = pickle.loads(msg['data']) job_id = data['job_id'] job = rq.job.Job.fetch(job_id, connection=self.conn) if data['status'] == 'finished': logging.debug("Job #%s completed", job_id) handler = self.result_handler elif data['status'] == 'failed': logging.debug("Job #%s failed", job_id) handler = self.result_handler_err else: continue if handler: logging.debug("Calling handler for job #%s", job_id) handler(job)
[ "Listen", "for", "completed", "jobs", "and", "reschedule", "successful", "ones", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L220-L251
[ "def", "listen", "(", "self", ")", ":", "pubsub", "=", "self", ".", "conn", ".", "pubsub", "(", ")", "pubsub", ".", "subscribe", "(", "self", ".", "pubsub_channel", ")", "logger", ".", "debug", "(", "\"Listening on channel %s\"", ",", "self", ".", "pubsub_channel", ")", "for", "msg", "in", "pubsub", ".", "listen", "(", ")", ":", "logger", ".", "debug", "(", "\"New message received of type %s\"", ",", "str", "(", "msg", "[", "'type'", "]", ")", ")", "if", "msg", "[", "'type'", "]", "!=", "'message'", ":", "logger", ".", "debug", "(", "\"Ignoring job message\"", ")", "continue", "data", "=", "pickle", ".", "loads", "(", "msg", "[", "'data'", "]", ")", "job_id", "=", "data", "[", "'job_id'", "]", "job", "=", "rq", ".", "job", ".", "Job", ".", "fetch", "(", "job_id", ",", "connection", "=", "self", ".", "conn", ")", "if", "data", "[", "'status'", "]", "==", "'finished'", ":", "logging", ".", "debug", "(", "\"Job #%s completed\"", ",", "job_id", ")", "handler", "=", "self", ".", "result_handler", "elif", "data", "[", "'status'", "]", "==", "'failed'", ":", "logging", ".", "debug", "(", "\"Job #%s failed\"", ",", "job_id", ")", "handler", "=", "self", ".", "result_handler_err", "else", ":", "continue", "if", "handler", ":", "logging", ".", "debug", "(", "\"Calling handler for job #%s\"", ",", "job_id", ")", "handler", "(", "job", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Scheduler.schedule
Start scheduling jobs.
arthur/scheduler.py
def schedule(self): """Start scheduling jobs.""" if self.async_mode: self._scheduler.start() self._listener.start() else: self._scheduler.schedule()
def schedule(self): """Start scheduling jobs.""" if self.async_mode: self._scheduler.start() self._listener.start() else: self._scheduler.schedule()
[ "Start", "scheduling", "jobs", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L280-L287
[ "def", "schedule", "(", "self", ")", ":", "if", "self", ".", "async_mode", ":", "self", ".", "_scheduler", ".", "start", "(", ")", "self", ".", "_listener", ".", "start", "(", ")", "else", ":", "self", ".", "_scheduler", ".", "schedule", "(", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Scheduler.schedule_task
Schedule a task. :param task_id: identifier of the task to schedule :raises NotFoundError: raised when the requested task is not found in the registry
arthur/scheduler.py
def schedule_task(self, task_id): """Schedule a task. :param task_id: identifier of the task to schedule :raises NotFoundError: raised when the requested task is not found in the registry """ task = self.registry.get(task_id) job_args = self._build_job_arguments(task) archiving_cfg = task.archiving_cfg fetch_from_archive = False if not archiving_cfg else archiving_cfg.fetch_from_archive # Schedule the job as soon as possible queue = Q_ARCHIVE_JOBS if fetch_from_archive else Q_CREATION_JOBS job_id = self._scheduler.schedule_job_task(queue, task.task_id, job_args, delay=0) logger.info("Job #%s (task: %s) scheduled", job_id, task.task_id) return job_id
def schedule_task(self, task_id): """Schedule a task. :param task_id: identifier of the task to schedule :raises NotFoundError: raised when the requested task is not found in the registry """ task = self.registry.get(task_id) job_args = self._build_job_arguments(task) archiving_cfg = task.archiving_cfg fetch_from_archive = False if not archiving_cfg else archiving_cfg.fetch_from_archive # Schedule the job as soon as possible queue = Q_ARCHIVE_JOBS if fetch_from_archive else Q_CREATION_JOBS job_id = self._scheduler.schedule_job_task(queue, task.task_id, job_args, delay=0) logger.info("Job #%s (task: %s) scheduled", job_id, task.task_id) return job_id
[ "Schedule", "a", "task", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L289-L312
[ "def", "schedule_task", "(", "self", ",", "task_id", ")", ":", "task", "=", "self", ".", "registry", ".", "get", "(", "task_id", ")", "job_args", "=", "self", ".", "_build_job_arguments", "(", "task", ")", "archiving_cfg", "=", "task", ".", "archiving_cfg", "fetch_from_archive", "=", "False", "if", "not", "archiving_cfg", "else", "archiving_cfg", ".", "fetch_from_archive", "# Schedule the job as soon as possible", "queue", "=", "Q_ARCHIVE_JOBS", "if", "fetch_from_archive", "else", "Q_CREATION_JOBS", "job_id", "=", "self", ".", "_scheduler", ".", "schedule_job_task", "(", "queue", ",", "task", ".", "task_id", ",", "job_args", ",", "delay", "=", "0", ")", "logger", ".", "info", "(", "\"Job #%s (task: %s) scheduled\"", ",", "job_id", ",", "task", ".", "task_id", ")", "return", "job_id" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Scheduler.cancel_task
Cancel or 'un-schedule' a task. :param task_id: identifier of the task to cancel :raises NotFoundError: raised when the requested task is not found in the registry
arthur/scheduler.py
def cancel_task(self, task_id): """Cancel or 'un-schedule' a task. :param task_id: identifier of the task to cancel :raises NotFoundError: raised when the requested task is not found in the registry """ self.registry.remove(task_id) self._scheduler.cancel_job_task(task_id) logger.info("Task %s canceled", task_id)
def cancel_task(self, task_id): """Cancel or 'un-schedule' a task. :param task_id: identifier of the task to cancel :raises NotFoundError: raised when the requested task is not found in the registry """ self.registry.remove(task_id) self._scheduler.cancel_job_task(task_id) logger.info("Task %s canceled", task_id)
[ "Cancel", "or", "un", "-", "schedule", "a", "task", "." ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L314-L325
[ "def", "cancel_task", "(", "self", ",", "task_id", ")", ":", "self", ".", "registry", ".", "remove", "(", "task_id", ")", "self", ".", "_scheduler", ".", "cancel_job_task", "(", "task_id", ")", "logger", ".", "info", "(", "\"Task %s canceled\"", ",", "task_id", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Scheduler._handle_successful_job
Handle successufl jobs
arthur/scheduler.py
def _handle_successful_job(self, job): """Handle successufl jobs""" result = job.result task_id = job.kwargs['task_id'] try: task = self.registry.get(task_id) except NotFoundError: logger.warning("Task %s not found; related job #%s will not be rescheduled", task_id, job.id) return if task.archiving_cfg and task.archiving_cfg.fetch_from_archive: logger.info("Job #%s (task: %s) successfully finished", job.id, task_id) return if result.nitems > 0: task.backend_args['next_from_date'] = unixtime_to_datetime(result.max_date) if result.offset: task.backend_args['next_offset'] = result.offset job_args = self._build_job_arguments(task) delay = task.scheduling_cfg.delay if task.scheduling_cfg else WAIT_FOR_QUEUING job_id = self._scheduler.schedule_job_task(Q_UPDATING_JOBS, task_id, job_args, delay=delay) logger.info("Job #%s (task: %s, old job: %s) re-scheduled", job_id, task_id, job.id)
def _handle_successful_job(self, job): """Handle successufl jobs""" result = job.result task_id = job.kwargs['task_id'] try: task = self.registry.get(task_id) except NotFoundError: logger.warning("Task %s not found; related job #%s will not be rescheduled", task_id, job.id) return if task.archiving_cfg and task.archiving_cfg.fetch_from_archive: logger.info("Job #%s (task: %s) successfully finished", job.id, task_id) return if result.nitems > 0: task.backend_args['next_from_date'] = unixtime_to_datetime(result.max_date) if result.offset: task.backend_args['next_offset'] = result.offset job_args = self._build_job_arguments(task) delay = task.scheduling_cfg.delay if task.scheduling_cfg else WAIT_FOR_QUEUING job_id = self._scheduler.schedule_job_task(Q_UPDATING_JOBS, task_id, job_args, delay=delay) logger.info("Job #%s (task: %s, old job: %s) re-scheduled", job_id, task_id, job.id)
[ "Handle", "successufl", "jobs" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L327-L359
[ "def", "_handle_successful_job", "(", "self", ",", "job", ")", ":", "result", "=", "job", ".", "result", "task_id", "=", "job", ".", "kwargs", "[", "'task_id'", "]", "try", ":", "task", "=", "self", ".", "registry", ".", "get", "(", "task_id", ")", "except", "NotFoundError", ":", "logger", ".", "warning", "(", "\"Task %s not found; related job #%s will not be rescheduled\"", ",", "task_id", ",", "job", ".", "id", ")", "return", "if", "task", ".", "archiving_cfg", "and", "task", ".", "archiving_cfg", ".", "fetch_from_archive", ":", "logger", ".", "info", "(", "\"Job #%s (task: %s) successfully finished\"", ",", "job", ".", "id", ",", "task_id", ")", "return", "if", "result", ".", "nitems", ">", "0", ":", "task", ".", "backend_args", "[", "'next_from_date'", "]", "=", "unixtime_to_datetime", "(", "result", ".", "max_date", ")", "if", "result", ".", "offset", ":", "task", ".", "backend_args", "[", "'next_offset'", "]", "=", "result", ".", "offset", "job_args", "=", "self", ".", "_build_job_arguments", "(", "task", ")", "delay", "=", "task", ".", "scheduling_cfg", ".", "delay", "if", "task", ".", "scheduling_cfg", "else", "WAIT_FOR_QUEUING", "job_id", "=", "self", ".", "_scheduler", ".", "schedule_job_task", "(", "Q_UPDATING_JOBS", ",", "task_id", ",", "job_args", ",", "delay", "=", "delay", ")", "logger", ".", "info", "(", "\"Job #%s (task: %s, old job: %s) re-scheduled\"", ",", "job_id", ",", "task_id", ",", "job", ".", "id", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Scheduler._handle_failed_job
Handle failed jobs
arthur/scheduler.py
def _handle_failed_job(self, job): """Handle failed jobs""" task_id = job.kwargs['task_id'] logger.error("Job #%s (task: %s) failed; cancelled", job.id, task_id)
def _handle_failed_job(self, job): """Handle failed jobs""" task_id = job.kwargs['task_id'] logger.error("Job #%s (task: %s) failed; cancelled", job.id, task_id)
[ "Handle", "failed", "jobs" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L361-L366
[ "def", "_handle_failed_job", "(", "self", ",", "job", ")", ":", "task_id", "=", "job", ".", "kwargs", "[", "'task_id'", "]", "logger", ".", "error", "(", "\"Job #%s (task: %s) failed; cancelled\"", ",", "job", ".", "id", ",", "task_id", ")" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
Scheduler._build_job_arguments
Build the set of arguments required for running a job
arthur/scheduler.py
def _build_job_arguments(task): """Build the set of arguments required for running a job""" job_args = {} job_args['qitems'] = Q_STORAGE_ITEMS job_args['task_id'] = task.task_id # Backend parameters job_args['backend'] = task.backend backend_args = copy.deepcopy(task.backend_args) if 'next_from_date' in backend_args: backend_args['from_date'] = backend_args.pop('next_from_date') if 'next_offset' in backend_args: backend_args['offset'] = backend_args.pop('next_offset') job_args['backend_args'] = backend_args # Category job_args['category'] = task.category # Archiving parameters archiving_cfg = task.archiving_cfg job_args['archive_args'] = archiving_cfg.to_dict() if archiving_cfg else None # Scheduler parameters sched_cfg = task.scheduling_cfg job_args['max_retries'] = sched_cfg.max_retries if sched_cfg else MAX_JOB_RETRIES return job_args
def _build_job_arguments(task): """Build the set of arguments required for running a job""" job_args = {} job_args['qitems'] = Q_STORAGE_ITEMS job_args['task_id'] = task.task_id # Backend parameters job_args['backend'] = task.backend backend_args = copy.deepcopy(task.backend_args) if 'next_from_date' in backend_args: backend_args['from_date'] = backend_args.pop('next_from_date') if 'next_offset' in backend_args: backend_args['offset'] = backend_args.pop('next_offset') job_args['backend_args'] = backend_args # Category job_args['category'] = task.category # Archiving parameters archiving_cfg = task.archiving_cfg job_args['archive_args'] = archiving_cfg.to_dict() if archiving_cfg else None # Scheduler parameters sched_cfg = task.scheduling_cfg job_args['max_retries'] = sched_cfg.max_retries if sched_cfg else MAX_JOB_RETRIES return job_args
[ "Build", "the", "set", "of", "arguments", "required", "for", "running", "a", "job" ]
chaoss/grimoirelab-kingarthur
python
https://github.com/chaoss/grimoirelab-kingarthur/blob/9d6a638bee68d5e5c511f045eeebf06340fd3252/arthur/scheduler.py#L369-L399
[ "def", "_build_job_arguments", "(", "task", ")", ":", "job_args", "=", "{", "}", "job_args", "[", "'qitems'", "]", "=", "Q_STORAGE_ITEMS", "job_args", "[", "'task_id'", "]", "=", "task", ".", "task_id", "# Backend parameters", "job_args", "[", "'backend'", "]", "=", "task", ".", "backend", "backend_args", "=", "copy", ".", "deepcopy", "(", "task", ".", "backend_args", ")", "if", "'next_from_date'", "in", "backend_args", ":", "backend_args", "[", "'from_date'", "]", "=", "backend_args", ".", "pop", "(", "'next_from_date'", ")", "if", "'next_offset'", "in", "backend_args", ":", "backend_args", "[", "'offset'", "]", "=", "backend_args", ".", "pop", "(", "'next_offset'", ")", "job_args", "[", "'backend_args'", "]", "=", "backend_args", "# Category", "job_args", "[", "'category'", "]", "=", "task", ".", "category", "# Archiving parameters", "archiving_cfg", "=", "task", ".", "archiving_cfg", "job_args", "[", "'archive_args'", "]", "=", "archiving_cfg", ".", "to_dict", "(", ")", "if", "archiving_cfg", "else", "None", "# Scheduler parameters", "sched_cfg", "=", "task", ".", "scheduling_cfg", "job_args", "[", "'max_retries'", "]", "=", "sched_cfg", ".", "max_retries", "if", "sched_cfg", "else", "MAX_JOB_RETRIES", "return", "job_args" ]
9d6a638bee68d5e5c511f045eeebf06340fd3252
test
get_secret
Gets contents of secret file :param secret_name: The name of the secret present in BANANAS_SECRETS_DIR :param default: Default value to return if no secret was found :return: The secret or default if not found
bananas/secrets.py
def get_secret(secret_name, default=None): """ Gets contents of secret file :param secret_name: The name of the secret present in BANANAS_SECRETS_DIR :param default: Default value to return if no secret was found :return: The secret or default if not found """ secrets_dir = get_secrets_dir() secret_path = os.path.join(secrets_dir, secret_name) try: with open(secret_path, "r") as secret_file: return secret_file.read() except OSError: return default
def get_secret(secret_name, default=None): """ Gets contents of secret file :param secret_name: The name of the secret present in BANANAS_SECRETS_DIR :param default: Default value to return if no secret was found :return: The secret or default if not found """ secrets_dir = get_secrets_dir() secret_path = os.path.join(secrets_dir, secret_name) try: with open(secret_path, "r") as secret_file: return secret_file.read() except OSError: return default
[ "Gets", "contents", "of", "secret", "file" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/secrets.py#L8-L22
[ "def", "get_secret", "(", "secret_name", ",", "default", "=", "None", ")", ":", "secrets_dir", "=", "get_secrets_dir", "(", ")", "secret_path", "=", "os", ".", "path", ".", "join", "(", "secrets_dir", ",", "secret_name", ")", "try", ":", "with", "open", "(", "secret_path", ",", "\"r\"", ")", "as", "secret_file", ":", "return", "secret_file", ".", "read", "(", ")", "except", "OSError", ":", "return", "default" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
register
Register the API view class in the bananas router. :param BananasAPI view:
bananas/admin/api/router.py
def register(view): # Type[BananasAPI] """ Register the API view class in the bananas router. :param BananasAPI view: """ meta = view.get_admin_meta() prefix = meta.basename.replace(".", "/") router.register(prefix, view, meta.basename)
def register(view): # Type[BananasAPI] """ Register the API view class in the bananas router. :param BananasAPI view: """ meta = view.get_admin_meta() prefix = meta.basename.replace(".", "/") router.register(prefix, view, meta.basename)
[ "Register", "the", "API", "view", "class", "in", "the", "bananas", "router", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/router.py#L6-L14
[ "def", "register", "(", "view", ")", ":", "# Type[BananasAPI]", "meta", "=", "view", ".", "get_admin_meta", "(", ")", "prefix", "=", "meta", ".", "basename", ".", "replace", "(", "\".\"", ",", "\"/\"", ")", "router", ".", "register", "(", "prefix", ",", "view", ",", "meta", ".", "basename", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
register
Register a generic class based view wrapped with ModelAdmin and fake model :param view: The AdminView to register. :param admin_site: The AdminSite to register the view on. Defaults to bananas.admin.ExtendedAdminSite. :param admin_class: The ModelAdmin class to use for eg. permissions. Defaults to bananas.admin.ModelAdminView. Example: @register # Or with args @register(admin_class=MyModelAdminSubclass) class MyAdminView(bananas.admin.AdminView): def get(self, request): return self.render('template.html', {}) # Also possible: register(MyAdminView, admin_class=MyModelAdminSublass)
bananas/admin/extension.py
def register(view=None, *, admin_site=None, admin_class=ModelAdminView): """ Register a generic class based view wrapped with ModelAdmin and fake model :param view: The AdminView to register. :param admin_site: The AdminSite to register the view on. Defaults to bananas.admin.ExtendedAdminSite. :param admin_class: The ModelAdmin class to use for eg. permissions. Defaults to bananas.admin.ModelAdminView. Example: @register # Or with args @register(admin_class=MyModelAdminSubclass) class MyAdminView(bananas.admin.AdminView): def get(self, request): return self.render('template.html', {}) # Also possible: register(MyAdminView, admin_class=MyModelAdminSublass) """ if not admin_site: admin_site = site def wrapped(inner_view): module = inner_view.__module__ app_label = re.search(r"\.?(\w+)\.admin", module).group(1) app_config = apps.get_app_config(app_label) label = getattr(inner_view, "label", None) if not label: label = re.sub("(Admin)|(View)", "", inner_view.__name__).lower() inner_view.label = label model_name = label.capitalize() verbose_name = getattr(inner_view, "verbose_name", model_name) inner_view.verbose_name = verbose_name access_perm_codename = "can_access_" + model_name.lower() access_perm_name = _("Can access {verbose_name}").format( verbose_name=verbose_name ) # The first permission here is expected to be # the general access permission. permissions = tuple( [(access_perm_codename, access_perm_name)] + list(getattr(inner_view, "permissions", [])) ) model = type( model_name, (Model,), { "__module__": module + ".__models__", # Fake "View": inner_view, "app_config": app_config, "Meta": type( "Meta", (object,), dict( managed=False, abstract=True, app_label=app_config.label, verbose_name=verbose_name, verbose_name_plural=verbose_name, permissions=permissions, ), ), }, ) admin_site._registry[model] = admin_class(model, admin_site) return inner_view if view is None: # Used as a decorator return wrapped return wrapped(view)
def register(view=None, *, admin_site=None, admin_class=ModelAdminView): """ Register a generic class based view wrapped with ModelAdmin and fake model :param view: The AdminView to register. :param admin_site: The AdminSite to register the view on. Defaults to bananas.admin.ExtendedAdminSite. :param admin_class: The ModelAdmin class to use for eg. permissions. Defaults to bananas.admin.ModelAdminView. Example: @register # Or with args @register(admin_class=MyModelAdminSubclass) class MyAdminView(bananas.admin.AdminView): def get(self, request): return self.render('template.html', {}) # Also possible: register(MyAdminView, admin_class=MyModelAdminSublass) """ if not admin_site: admin_site = site def wrapped(inner_view): module = inner_view.__module__ app_label = re.search(r"\.?(\w+)\.admin", module).group(1) app_config = apps.get_app_config(app_label) label = getattr(inner_view, "label", None) if not label: label = re.sub("(Admin)|(View)", "", inner_view.__name__).lower() inner_view.label = label model_name = label.capitalize() verbose_name = getattr(inner_view, "verbose_name", model_name) inner_view.verbose_name = verbose_name access_perm_codename = "can_access_" + model_name.lower() access_perm_name = _("Can access {verbose_name}").format( verbose_name=verbose_name ) # The first permission here is expected to be # the general access permission. permissions = tuple( [(access_perm_codename, access_perm_name)] + list(getattr(inner_view, "permissions", [])) ) model = type( model_name, (Model,), { "__module__": module + ".__models__", # Fake "View": inner_view, "app_config": app_config, "Meta": type( "Meta", (object,), dict( managed=False, abstract=True, app_label=app_config.label, verbose_name=verbose_name, verbose_name_plural=verbose_name, permissions=permissions, ), ), }, ) admin_site._registry[model] = admin_class(model, admin_site) return inner_view if view is None: # Used as a decorator return wrapped return wrapped(view)
[ "Register", "a", "generic", "class", "based", "view", "wrapped", "with", "ModelAdmin", "and", "fake", "model" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/extension.py#L144-L221
[ "def", "register", "(", "view", "=", "None", ",", "*", ",", "admin_site", "=", "None", ",", "admin_class", "=", "ModelAdminView", ")", ":", "if", "not", "admin_site", ":", "admin_site", "=", "site", "def", "wrapped", "(", "inner_view", ")", ":", "module", "=", "inner_view", ".", "__module__", "app_label", "=", "re", ".", "search", "(", "r\"\\.?(\\w+)\\.admin\"", ",", "module", ")", ".", "group", "(", "1", ")", "app_config", "=", "apps", ".", "get_app_config", "(", "app_label", ")", "label", "=", "getattr", "(", "inner_view", ",", "\"label\"", ",", "None", ")", "if", "not", "label", ":", "label", "=", "re", ".", "sub", "(", "\"(Admin)|(View)\"", ",", "\"\"", ",", "inner_view", ".", "__name__", ")", ".", "lower", "(", ")", "inner_view", ".", "label", "=", "label", "model_name", "=", "label", ".", "capitalize", "(", ")", "verbose_name", "=", "getattr", "(", "inner_view", ",", "\"verbose_name\"", ",", "model_name", ")", "inner_view", ".", "verbose_name", "=", "verbose_name", "access_perm_codename", "=", "\"can_access_\"", "+", "model_name", ".", "lower", "(", ")", "access_perm_name", "=", "_", "(", "\"Can access {verbose_name}\"", ")", ".", "format", "(", "verbose_name", "=", "verbose_name", ")", "# The first permission here is expected to be", "# the general access permission.", "permissions", "=", "tuple", "(", "[", "(", "access_perm_codename", ",", "access_perm_name", ")", "]", "+", "list", "(", "getattr", "(", "inner_view", ",", "\"permissions\"", ",", "[", "]", ")", ")", ")", "model", "=", "type", "(", "model_name", ",", "(", "Model", ",", ")", ",", "{", "\"__module__\"", ":", "module", "+", "\".__models__\"", ",", "# Fake", "\"View\"", ":", "inner_view", ",", "\"app_config\"", ":", "app_config", ",", "\"Meta\"", ":", "type", "(", "\"Meta\"", ",", "(", "object", ",", ")", ",", "dict", "(", "managed", "=", "False", ",", "abstract", "=", "True", ",", "app_label", "=", "app_config", ".", "label", ",", "verbose_name", "=", "verbose_name", ",", "verbose_name_plural", "=", "verbose_name", ",", "permissions", "=", "permissions", ",", ")", ",", ")", ",", "}", ",", ")", "admin_site", ".", "_registry", "[", "model", "]", "=", "admin_class", "(", "model", ",", "admin_site", ")", "return", "inner_view", "if", "view", "is", "None", ":", "# Used as a decorator", "return", "wrapped", "return", "wrapped", "(", "view", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
BananasAPI.reverse_action
Extended DRF with fallback to requested namespace if request.version is missing
bananas/admin/api/mixins.py
def reverse_action(self, url_name, *args, **kwargs): """ Extended DRF with fallback to requested namespace if request.version is missing """ if self.request and not self.request.version: return reverse(self.get_url_name(url_name), *args, **kwargs) return super().reverse_action(url_name, *args, **kwargs)
def reverse_action(self, url_name, *args, **kwargs): """ Extended DRF with fallback to requested namespace if request.version is missing """ if self.request and not self.request.version: return reverse(self.get_url_name(url_name), *args, **kwargs) return super().reverse_action(url_name, *args, **kwargs)
[ "Extended", "DRF", "with", "fallback", "to", "requested", "namespace", "if", "request", ".", "version", "is", "missing" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/mixins.py#L69-L76
[ "def", "reverse_action", "(", "self", ",", "url_name", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "request", "and", "not", "self", ".", "request", ".", "version", ":", "return", "reverse", "(", "self", ".", "get_url_name", "(", "url_name", ")", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "super", "(", ")", ".", "reverse_action", "(", "url_name", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
BananasAPI.get_url_name
Get full namespaced url name to use for reverse()
bananas/admin/api/mixins.py
def get_url_name(self, action_url_name="list"): """ Get full namespaced url name to use for reverse() """ url_name = "{}-{}".format(self.basename, action_url_name) namespace = self.request.resolver_match.namespace if namespace: url_name = "{}:{}".format(namespace, url_name) return url_name
def get_url_name(self, action_url_name="list"): """ Get full namespaced url name to use for reverse() """ url_name = "{}-{}".format(self.basename, action_url_name) namespace = self.request.resolver_match.namespace if namespace: url_name = "{}:{}".format(namespace, url_name) return url_name
[ "Get", "full", "namespaced", "url", "name", "to", "use", "for", "reverse", "()" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/mixins.py#L78-L88
[ "def", "get_url_name", "(", "self", ",", "action_url_name", "=", "\"list\"", ")", ":", "url_name", "=", "\"{}-{}\"", ".", "format", "(", "self", ".", "basename", ",", "action_url_name", ")", "namespace", "=", "self", ".", "request", ".", "resolver_match", ".", "namespace", "if", "namespace", ":", "url_name", "=", "\"{}:{}\"", ".", "format", "(", "namespace", ",", "url_name", ")", "return", "url_name" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
BananasAPI.get_view_name
Get or generate human readable view name. Extended version from DRF to support usage from both class and instance.
bananas/admin/api/mixins.py
def get_view_name(self, respect_name=True): """ Get or generate human readable view name. Extended version from DRF to support usage from both class and instance. """ if isinstance(self, type): view = self else: view = self.__class__ # Name may be set by some Views, such as a ViewSet. if respect_name: name = getattr(view, "name", None) if name is not None: return name name = view.__name__ for suffix in ("ViewSet", "View", "API", "Admin"): name = formatting.remove_trailing_string(name, suffix) name = formatting.camelcase_to_spaces(name) # Suffix may be set by some Views, such as a ViewSet. suffix = getattr(view, "suffix", None) if suffix: name += " " + suffix return name
def get_view_name(self, respect_name=True): """ Get or generate human readable view name. Extended version from DRF to support usage from both class and instance. """ if isinstance(self, type): view = self else: view = self.__class__ # Name may be set by some Views, such as a ViewSet. if respect_name: name = getattr(view, "name", None) if name is not None: return name name = view.__name__ for suffix in ("ViewSet", "View", "API", "Admin"): name = formatting.remove_trailing_string(name, suffix) name = formatting.camelcase_to_spaces(name) # Suffix may be set by some Views, such as a ViewSet. suffix = getattr(view, "suffix", None) if suffix: name += " " + suffix return name
[ "Get", "or", "generate", "human", "readable", "view", "name", ".", "Extended", "version", "from", "DRF", "to", "support", "usage", "from", "both", "class", "and", "instance", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/mixins.py#L90-L116
[ "def", "get_view_name", "(", "self", ",", "respect_name", "=", "True", ")", ":", "if", "isinstance", "(", "self", ",", "type", ")", ":", "view", "=", "self", "else", ":", "view", "=", "self", ".", "__class__", "# Name may be set by some Views, such as a ViewSet.", "if", "respect_name", ":", "name", "=", "getattr", "(", "view", ",", "\"name\"", ",", "None", ")", "if", "name", "is", "not", "None", ":", "return", "name", "name", "=", "view", ".", "__name__", "for", "suffix", "in", "(", "\"ViewSet\"", ",", "\"View\"", ",", "\"API\"", ",", "\"Admin\"", ")", ":", "name", "=", "formatting", ".", "remove_trailing_string", "(", "name", ",", "suffix", ")", "name", "=", "formatting", ".", "camelcase_to_spaces", "(", "name", ")", "# Suffix may be set by some Views, such as a ViewSet.", "suffix", "=", "getattr", "(", "view", ",", "\"suffix\"", ",", "None", ")", "if", "suffix", ":", "name", "+=", "\" \"", "+", "suffix", "return", "name" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
get_version
Derives a PEP386-compliant version number from VERSION.
bananas/__init__.py
def get_version(version=None): """Derives a PEP386-compliant version number from VERSION.""" if version is None: version = VERSION assert len(version) == 5 assert version[3] in ("alpha", "beta", "rc", "final") # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if version[2] == 0 else 3 main = ".".join(str(x) for x in version[:parts]) sub = "" if version[3] != "final": mapping = {"alpha": "a", "beta": "b", "rc": "c"} sub = mapping[version[3]] + str(version[4]) return main + sub
def get_version(version=None): """Derives a PEP386-compliant version number from VERSION.""" if version is None: version = VERSION assert len(version) == 5 assert version[3] in ("alpha", "beta", "rc", "final") # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if version[2] == 0 else 3 main = ".".join(str(x) for x in version[:parts]) sub = "" if version[3] != "final": mapping = {"alpha": "a", "beta": "b", "rc": "c"} sub = mapping[version[3]] + str(version[4]) return main + sub
[ "Derives", "a", "PEP386", "-", "compliant", "version", "number", "from", "VERSION", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/__init__.py#L4-L24
[ "def", "get_version", "(", "version", "=", "None", ")", ":", "if", "version", "is", "None", ":", "version", "=", "VERSION", "assert", "len", "(", "version", ")", "==", "5", "assert", "version", "[", "3", "]", "in", "(", "\"alpha\"", ",", "\"beta\"", ",", "\"rc\"", ",", "\"final\"", ")", "# Now build the two parts of the version number:", "# main = X.Y[.Z]", "# sub = .devN - for pre-alpha releases", "# | {a|b|c}N - for alpha, beta and rc releases", "parts", "=", "2", "if", "version", "[", "2", "]", "==", "0", "else", "3", "main", "=", "\".\"", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "version", "[", ":", "parts", "]", ")", "sub", "=", "\"\"", "if", "version", "[", "3", "]", "!=", "\"final\"", ":", "mapping", "=", "{", "\"alpha\"", ":", "\"a\"", ",", "\"beta\"", ":", "\"b\"", ",", "\"rc\"", ":", "\"c\"", "}", "sub", "=", "mapping", "[", "version", "[", "3", "]", "]", "+", "str", "(", "version", "[", "4", "]", ")", "return", "main", "+", "sub" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
BananasSwaggerSchema.get_summary_and_description
Compat: drf-yasg 1.12+
bananas/admin/api/schemas/yasg.py
def get_summary_and_description(self): """ Compat: drf-yasg 1.12+ """ summary = self.get_summary() _, description = super().get_summary_and_description() return summary, description
def get_summary_and_description(self): """ Compat: drf-yasg 1.12+ """ summary = self.get_summary() _, description = super().get_summary_and_description() return summary, description
[ "Compat", ":", "drf", "-", "yasg", "1", ".", "12", "+" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/schemas/yasg.py#L37-L43
[ "def", "get_summary_and_description", "(", "self", ")", ":", "summary", "=", "self", ".", "get_summary", "(", ")", "_", ",", "description", "=", "super", "(", ")", ".", "get_summary_and_description", "(", ")", "return", "summary", ",", "description" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
BananasSwaggerSchema.get_summary
Compat: drf-yasg 1.11
bananas/admin/api/schemas/yasg.py
def get_summary(self): """ Compat: drf-yasg 1.11 """ title = None method_name = getattr(self.view, "action", self.method.lower()) action = getattr(self.view, method_name, None) action_kwargs = getattr(action, "kwargs", None) if action_kwargs: title = action_kwargs.get("name") if not title and is_custom_action(self.view.action): title = _(self.view.action.replace("_", " ")).capitalize() if not title: meta = self.view.get_admin_meta() if self.view.action in ["retrieve", "update", "partial_update"]: title = str(meta.get("verbose_name") or meta.name) elif self.view.action == "create": title = meta.get("verbose_name") if title: title = str(_("Add")) + " " + str(title).lower() else: title = meta.name elif self.view.action == "list": title = str(meta.get("verbose_name_plural") or meta.name) else: title = str(meta.name) return title
def get_summary(self): """ Compat: drf-yasg 1.11 """ title = None method_name = getattr(self.view, "action", self.method.lower()) action = getattr(self.view, method_name, None) action_kwargs = getattr(action, "kwargs", None) if action_kwargs: title = action_kwargs.get("name") if not title and is_custom_action(self.view.action): title = _(self.view.action.replace("_", " ")).capitalize() if not title: meta = self.view.get_admin_meta() if self.view.action in ["retrieve", "update", "partial_update"]: title = str(meta.get("verbose_name") or meta.name) elif self.view.action == "create": title = meta.get("verbose_name") if title: title = str(_("Add")) + " " + str(title).lower() else: title = meta.name elif self.view.action == "list": title = str(meta.get("verbose_name_plural") or meta.name) else: title = str(meta.name) return title
[ "Compat", ":", "drf", "-", "yasg", "1", ".", "11" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/schemas/yasg.py#L45-L76
[ "def", "get_summary", "(", "self", ")", ":", "title", "=", "None", "method_name", "=", "getattr", "(", "self", ".", "view", ",", "\"action\"", ",", "self", ".", "method", ".", "lower", "(", ")", ")", "action", "=", "getattr", "(", "self", ".", "view", ",", "method_name", ",", "None", ")", "action_kwargs", "=", "getattr", "(", "action", ",", "\"kwargs\"", ",", "None", ")", "if", "action_kwargs", ":", "title", "=", "action_kwargs", ".", "get", "(", "\"name\"", ")", "if", "not", "title", "and", "is_custom_action", "(", "self", ".", "view", ".", "action", ")", ":", "title", "=", "_", "(", "self", ".", "view", ".", "action", ".", "replace", "(", "\"_\"", ",", "\" \"", ")", ")", ".", "capitalize", "(", ")", "if", "not", "title", ":", "meta", "=", "self", ".", "view", ".", "get_admin_meta", "(", ")", "if", "self", ".", "view", ".", "action", "in", "[", "\"retrieve\"", ",", "\"update\"", ",", "\"partial_update\"", "]", ":", "title", "=", "str", "(", "meta", ".", "get", "(", "\"verbose_name\"", ")", "or", "meta", ".", "name", ")", "elif", "self", ".", "view", ".", "action", "==", "\"create\"", ":", "title", "=", "meta", ".", "get", "(", "\"verbose_name\"", ")", "if", "title", ":", "title", "=", "str", "(", "_", "(", "\"Add\"", ")", ")", "+", "\" \"", "+", "str", "(", "title", ")", ".", "lower", "(", ")", "else", ":", "title", "=", "meta", ".", "name", "elif", "self", ".", "view", ".", "action", "==", "\"list\"", ":", "title", "=", "str", "(", "meta", ".", "get", "(", "\"verbose_name_plural\"", ")", "or", "meta", ".", "name", ")", "else", ":", "title", "=", "str", "(", "meta", ".", "name", ")", "return", "title" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
BananasVersioning.get_versioned_viewname
Prefix viewname with full namespace bananas:vX.Y:
bananas/admin/api/versioning.py
def get_versioned_viewname(self, viewname, request): """ Prefix viewname with full namespace bananas:vX.Y: """ namespace = request.resolver_match.namespace if namespace: viewname = "{}:{}".format(namespace, viewname) return viewname
def get_versioned_viewname(self, viewname, request): """ Prefix viewname with full namespace bananas:vX.Y: """ namespace = request.resolver_match.namespace if namespace: viewname = "{}:{}".format(namespace, viewname) return viewname
[ "Prefix", "viewname", "with", "full", "namespace", "bananas", ":", "vX", ".", "Y", ":" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/versioning.py#L14-L22
[ "def", "get_versioned_viewname", "(", "self", ",", "viewname", ",", "request", ")", ":", "namespace", "=", "request", ".", "resolver_match", ".", "namespace", "if", "namespace", ":", "viewname", "=", "\"{}:{}\"", ".", "format", "(", "namespace", ",", "viewname", ")", "return", "viewname" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
resolve
Get engine or raise exception, resolves Alias-instances to a sibling target. :param cursor: The object so search in :param key: The key to get :return: The object found
bananas/url.py
def resolve(cursor, key): """ Get engine or raise exception, resolves Alias-instances to a sibling target. :param cursor: The object so search in :param key: The key to get :return: The object found """ try: result = cursor[key] # Resolve alias if isinstance(result, Alias): result = cursor[result.target] return result except KeyError: raise KeyError("No matches for engine %s" % key)
def resolve(cursor, key): """ Get engine or raise exception, resolves Alias-instances to a sibling target. :param cursor: The object so search in :param key: The key to get :return: The object found """ try: result = cursor[key] # Resolve alias if isinstance(result, Alias): result = cursor[result.target] return result except KeyError: raise KeyError("No matches for engine %s" % key)
[ "Get", "engine", "or", "raise", "exception", "resolves", "Alias", "-", "instances", "to", "a", "sibling", "target", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/url.py#L72-L89
[ "def", "resolve", "(", "cursor", ",", "key", ")", ":", "try", ":", "result", "=", "cursor", "[", "key", "]", "# Resolve alias", "if", "isinstance", "(", "result", ",", "Alias", ")", ":", "result", "=", "cursor", "[", "result", ".", "target", "]", "return", "result", "except", "KeyError", ":", "raise", "KeyError", "(", "\"No matches for engine %s\"", "%", "key", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
get_engine
Perform a lookup in _ENGINE_MAPPING using engine_string. :param scheme: '+'-separated string Maximum of 2 parts, i.e "postgres+psycopg" is OK, "postgres+psycopg2+postgis" is NOT OK. :return: Engine string
bananas/url.py
def get_engine(scheme): """ Perform a lookup in _ENGINE_MAPPING using engine_string. :param scheme: '+'-separated string Maximum of 2 parts, i.e "postgres+psycopg" is OK, "postgres+psycopg2+postgis" is NOT OK. :return: Engine string """ path = scheme.split("+") first, rest = path[0], path[1:] second = rest[0] if rest else None engine = resolve(ENGINE_MAPPING, first) # If the selected engine does not have a second level. if not isinstance(engine, list): # If second level engine was expected if second: raise KeyError("%s has no sub-engines" % first) return engine try: engine, extra = engine except ValueError: # engine was not a list of length 2 raise ValueError( "django-bananas.url' engine " "configuration is invalid: %r" % ENGINE_MAPPING ) # Get second-level engine if second is not None: engine = resolve(extra, second) # Sanity-check the value before returning assert not isinstance( engine, (list, dict) ), "Only two levels of engines " "are allowed" assert engine, "The returned engine is not truthy" return engine
def get_engine(scheme): """ Perform a lookup in _ENGINE_MAPPING using engine_string. :param scheme: '+'-separated string Maximum of 2 parts, i.e "postgres+psycopg" is OK, "postgres+psycopg2+postgis" is NOT OK. :return: Engine string """ path = scheme.split("+") first, rest = path[0], path[1:] second = rest[0] if rest else None engine = resolve(ENGINE_MAPPING, first) # If the selected engine does not have a second level. if not isinstance(engine, list): # If second level engine was expected if second: raise KeyError("%s has no sub-engines" % first) return engine try: engine, extra = engine except ValueError: # engine was not a list of length 2 raise ValueError( "django-bananas.url' engine " "configuration is invalid: %r" % ENGINE_MAPPING ) # Get second-level engine if second is not None: engine = resolve(extra, second) # Sanity-check the value before returning assert not isinstance( engine, (list, dict) ), "Only two levels of engines " "are allowed" assert engine, "The returned engine is not truthy" return engine
[ "Perform", "a", "lookup", "in", "_ENGINE_MAPPING", "using", "engine_string", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/url.py#L92-L134
[ "def", "get_engine", "(", "scheme", ")", ":", "path", "=", "scheme", ".", "split", "(", "\"+\"", ")", "first", ",", "rest", "=", "path", "[", "0", "]", ",", "path", "[", "1", ":", "]", "second", "=", "rest", "[", "0", "]", "if", "rest", "else", "None", "engine", "=", "resolve", "(", "ENGINE_MAPPING", ",", "first", ")", "# If the selected engine does not have a second level.", "if", "not", "isinstance", "(", "engine", ",", "list", ")", ":", "# If second level engine was expected", "if", "second", ":", "raise", "KeyError", "(", "\"%s has no sub-engines\"", "%", "first", ")", "return", "engine", "try", ":", "engine", ",", "extra", "=", "engine", "except", "ValueError", ":", "# engine was not a list of length 2", "raise", "ValueError", "(", "\"django-bananas.url' engine \"", "\"configuration is invalid: %r\"", "%", "ENGINE_MAPPING", ")", "# Get second-level engine", "if", "second", "is", "not", "None", ":", "engine", "=", "resolve", "(", "extra", ",", "second", ")", "# Sanity-check the value before returning", "assert", "not", "isinstance", "(", "engine", ",", "(", "list", ",", "dict", ")", ")", ",", "\"Only two levels of engines \"", "\"are allowed\"", "assert", "engine", ",", "\"The returned engine is not truthy\"", "return", "engine" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
parse_path
Get database name and database schema from path. :param path: "/"-delimited path, parsed as "/<database name>/<database schema>" :return: tuple with (database or None, schema or None)
bananas/url.py
def parse_path(path): """ Get database name and database schema from path. :param path: "/"-delimited path, parsed as "/<database name>/<database schema>" :return: tuple with (database or None, schema or None) """ if path is None: raise ValueError("path must be a string") parts = path.strip("/").split("/") database = unquote_plus(parts[0]) if len(parts) else None schema = parts[1] if len(parts) > 1 else None return database, schema
def parse_path(path): """ Get database name and database schema from path. :param path: "/"-delimited path, parsed as "/<database name>/<database schema>" :return: tuple with (database or None, schema or None) """ if path is None: raise ValueError("path must be a string") parts = path.strip("/").split("/") database = unquote_plus(parts[0]) if len(parts) else None schema = parts[1] if len(parts) > 1 else None return database, schema
[ "Get", "database", "name", "and", "database", "schema", "from", "path", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/url.py#L143-L159
[ "def", "parse_path", "(", "path", ")", ":", "if", "path", "is", "None", ":", "raise", "ValueError", "(", "\"path must be a string\"", ")", "parts", "=", "path", ".", "strip", "(", "\"/\"", ")", ".", "split", "(", "\"/\"", ")", "database", "=", "unquote_plus", "(", "parts", "[", "0", "]", ")", "if", "len", "(", "parts", ")", "else", "None", "schema", "=", "parts", "[", "1", "]", "if", "len", "(", "parts", ")", ">", "1", "else", "None", "return", "database", ",", "schema" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
database_conf_from_url
Return a django-style database configuration based on ``url``. :param url: Database URL :return: Django-style database configuration dict Example: >>> conf = database_conf_from_url( ... 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' ... '?hello=world') >>> sorted(conf.items()) # doctest: +NORMALIZE_WHITESPACE [('ENGINE', 'django.db.backends.postgresql_psycopg2'), ('HOST', '5monkeys.se'), ('NAME', 'tweets'), ('PARAMS', {'hello': 'world'}), ('PASSWORD', 'hunter2'), ('PORT', 4242), ('SCHEMA', 'tweetschema'), ('USER', 'joar')]
bananas/url.py
def database_conf_from_url(url): """ Return a django-style database configuration based on ``url``. :param url: Database URL :return: Django-style database configuration dict Example: >>> conf = database_conf_from_url( ... 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' ... '?hello=world') >>> sorted(conf.items()) # doctest: +NORMALIZE_WHITESPACE [('ENGINE', 'django.db.backends.postgresql_psycopg2'), ('HOST', '5monkeys.se'), ('NAME', 'tweets'), ('PARAMS', {'hello': 'world'}), ('PASSWORD', 'hunter2'), ('PORT', 4242), ('SCHEMA', 'tweetschema'), ('USER', 'joar')] """ return {key.upper(): val for key, val in parse_database_url(url)._asdict().items()}
def database_conf_from_url(url): """ Return a django-style database configuration based on ``url``. :param url: Database URL :return: Django-style database configuration dict Example: >>> conf = database_conf_from_url( ... 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' ... '?hello=world') >>> sorted(conf.items()) # doctest: +NORMALIZE_WHITESPACE [('ENGINE', 'django.db.backends.postgresql_psycopg2'), ('HOST', '5monkeys.se'), ('NAME', 'tweets'), ('PARAMS', {'hello': 'world'}), ('PASSWORD', 'hunter2'), ('PORT', 4242), ('SCHEMA', 'tweetschema'), ('USER', 'joar')] """ return {key.upper(): val for key, val in parse_database_url(url)._asdict().items()}
[ "Return", "a", "django", "-", "style", "database", "configuration", "based", "on", "url", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/url.py#L162-L183
[ "def", "database_conf_from_url", "(", "url", ")", ":", "return", "{", "key", ".", "upper", "(", ")", ":", "val", "for", "key", ",", "val", "in", "parse_database_url", "(", "url", ")", ".", "_asdict", "(", ")", ".", "items", "(", ")", "}" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
parse_database_url
Parse a database URL and return a DatabaseInfo named tuple. :param url: Database URL :return: DatabaseInfo instance Example: >>> conf = parse_database_url( ... 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' ... '?hello=world') >>> conf # doctest: +NORMALIZE_WHITESPACE DatabaseInfo(engine='django.db.backends.postgresql_psycopg2', name='tweets', schema='tweetschema', user='joar', password='hunter2', host='5monkeys.se', port=4242, params={'hello': 'world'})
bananas/url.py
def parse_database_url(url): """ Parse a database URL and return a DatabaseInfo named tuple. :param url: Database URL :return: DatabaseInfo instance Example: >>> conf = parse_database_url( ... 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' ... '?hello=world') >>> conf # doctest: +NORMALIZE_WHITESPACE DatabaseInfo(engine='django.db.backends.postgresql_psycopg2', name='tweets', schema='tweetschema', user='joar', password='hunter2', host='5monkeys.se', port=4242, params={'hello': 'world'}) """ if url == "sqlite://:memory:": raise Exception( 'Your url is "sqlite://:memory:", if you want ' 'an sqlite memory database, just use "sqlite://"' ) url_parts = urlsplit(url) engine = get_engine(url_parts.scheme) database, schema = parse_path(url_parts.path) port = url_parts.port host = url_parts.hostname user = url_parts.username password = url_parts.password # Take the last element of every parameter list params = {key: val.pop() for key, val in parse_qs(url_parts.query).items()} return DatabaseInfo( engine=engine, name=database, schema=schema, user=user, password=password, host=host, port=port, params=params, )
def parse_database_url(url): """ Parse a database URL and return a DatabaseInfo named tuple. :param url: Database URL :return: DatabaseInfo instance Example: >>> conf = parse_database_url( ... 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' ... '?hello=world') >>> conf # doctest: +NORMALIZE_WHITESPACE DatabaseInfo(engine='django.db.backends.postgresql_psycopg2', name='tweets', schema='tweetschema', user='joar', password='hunter2', host='5monkeys.se', port=4242, params={'hello': 'world'}) """ if url == "sqlite://:memory:": raise Exception( 'Your url is "sqlite://:memory:", if you want ' 'an sqlite memory database, just use "sqlite://"' ) url_parts = urlsplit(url) engine = get_engine(url_parts.scheme) database, schema = parse_path(url_parts.path) port = url_parts.port host = url_parts.hostname user = url_parts.username password = url_parts.password # Take the last element of every parameter list params = {key: val.pop() for key, val in parse_qs(url_parts.query).items()} return DatabaseInfo( engine=engine, name=database, schema=schema, user=user, password=password, host=host, port=port, params=params, )
[ "Parse", "a", "database", "URL", "and", "return", "a", "DatabaseInfo", "named", "tuple", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/url.py#L186-L233
[ "def", "parse_database_url", "(", "url", ")", ":", "if", "url", "==", "\"sqlite://:memory:\"", ":", "raise", "Exception", "(", "'Your url is \"sqlite://:memory:\", if you want '", "'an sqlite memory database, just use \"sqlite://\"'", ")", "url_parts", "=", "urlsplit", "(", "url", ")", "engine", "=", "get_engine", "(", "url_parts", ".", "scheme", ")", "database", ",", "schema", "=", "parse_path", "(", "url_parts", ".", "path", ")", "port", "=", "url_parts", ".", "port", "host", "=", "url_parts", ".", "hostname", "user", "=", "url_parts", ".", "username", "password", "=", "url_parts", ".", "password", "# Take the last element of every parameter list", "params", "=", "{", "key", ":", "val", ".", "pop", "(", ")", "for", "key", ",", "val", "in", "parse_qs", "(", "url_parts", ".", "query", ")", ".", "items", "(", ")", "}", "return", "DatabaseInfo", "(", "engine", "=", "engine", ",", "name", "=", "database", ",", "schema", "=", "schema", ",", "user", "=", "user", ",", "password", "=", "password", ",", "host", "=", "host", ",", "port", "=", "port", ",", "params", "=", "params", ",", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
LoginAPI.create
Log in django staff user
bananas/admin/api/views.py
def create(self, request): """ Log in django staff user """ # TODO: Decorate api with sensitive post parameters as Django admin do? # from django.utils.decorators import method_decorator # from django.views.decorators.debug import sensitive_post_parameters # sensitive_post_parameters_m = method_decorator(sensitive_post_parameters()) login_form = AuthenticationForm(request, data=request.data) if not login_form.is_valid(): raise serializers.ValidationError(login_form.errors) auth_login(request, login_form.get_user()) serializer = UserSerializer(request.user) return Response(serializer.data, status=status.HTTP_200_OK)
def create(self, request): """ Log in django staff user """ # TODO: Decorate api with sensitive post parameters as Django admin do? # from django.utils.decorators import method_decorator # from django.views.decorators.debug import sensitive_post_parameters # sensitive_post_parameters_m = method_decorator(sensitive_post_parameters()) login_form = AuthenticationForm(request, data=request.data) if not login_form.is_valid(): raise serializers.ValidationError(login_form.errors) auth_login(request, login_form.get_user()) serializer = UserSerializer(request.user) return Response(serializer.data, status=status.HTTP_200_OK)
[ "Log", "in", "django", "staff", "user" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/views.py#L40-L57
[ "def", "create", "(", "self", ",", "request", ")", ":", "# TODO: Decorate api with sensitive post parameters as Django admin do?", "# from django.utils.decorators import method_decorator", "# from django.views.decorators.debug import sensitive_post_parameters", "# sensitive_post_parameters_m = method_decorator(sensitive_post_parameters())", "login_form", "=", "AuthenticationForm", "(", "request", ",", "data", "=", "request", ".", "data", ")", "if", "not", "login_form", ".", "is_valid", "(", ")", ":", "raise", "serializers", ".", "ValidationError", "(", "login_form", ".", "errors", ")", "auth_login", "(", "request", ",", "login_form", ".", "get_user", "(", ")", ")", "serializer", "=", "UserSerializer", "(", "request", ".", "user", ")", "return", "Response", "(", "serializer", ".", "data", ",", "status", "=", "status", ".", "HTTP_200_OK", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
MeAPI.list
Retrieve logged in user info
bananas/admin/api/views.py
def list(self, request): """ Retrieve logged in user info """ serializer = self.get_serializer(request.user) return Response(serializer.data, status=status.HTTP_200_OK)
def list(self, request): """ Retrieve logged in user info """ serializer = self.get_serializer(request.user) return Response(serializer.data, status=status.HTTP_200_OK)
[ "Retrieve", "logged", "in", "user", "info" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/views.py#L85-L90
[ "def", "list", "(", "self", ",", "request", ")", ":", "serializer", "=", "self", ".", "get_serializer", "(", "request", ".", "user", ")", "return", "Response", "(", "serializer", ".", "data", ",", "status", "=", "status", ".", "HTTP_200_OK", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
ChangePasswordAPI.create
Change password for logged in django staff user
bananas/admin/api/views.py
def create(self, request): """ Change password for logged in django staff user """ # TODO: Decorate api with sensitive post parameters as Django admin do? password_form = PasswordChangeForm(request.user, data=request.data) if not password_form.is_valid(): raise serializers.ValidationError(password_form.errors) password_form.save() update_session_auth_hash(request, password_form.user) return Response(status=status.HTTP_204_NO_CONTENT)
def create(self, request): """ Change password for logged in django staff user """ # TODO: Decorate api with sensitive post parameters as Django admin do? password_form = PasswordChangeForm(request.user, data=request.data) if not password_form.is_valid(): raise serializers.ValidationError(password_form.errors) password_form.save() update_session_auth_hash(request, password_form.user) return Response(status=status.HTTP_204_NO_CONTENT)
[ "Change", "password", "for", "logged", "in", "django", "staff", "user" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/admin/api/views.py#L103-L117
[ "def", "create", "(", "self", ",", "request", ")", ":", "# TODO: Decorate api with sensitive post parameters as Django admin do?", "password_form", "=", "PasswordChangeForm", "(", "request", ".", "user", ",", "data", "=", "request", ".", "data", ")", "if", "not", "password_form", ".", "is_valid", "(", ")", ":", "raise", "serializers", ".", "ValidationError", "(", "password_form", ".", "errors", ")", "password_form", ".", "save", "(", ")", "update_session_auth_hash", "(", "request", ",", "password_form", ".", "user", ")", "return", "Response", "(", "status", "=", "status", ".", "HTTP_204_NO_CONTENT", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
UserDetailsSerializer.build_url_field
This is needed due to DRF's model serializer uses the queryset to build url name # TODO: Move this to own serializer mixin or fix problem elsewhere?
example/example/api.py
def build_url_field(self, field_name, model_class): """ This is needed due to DRF's model serializer uses the queryset to build url name # TODO: Move this to own serializer mixin or fix problem elsewhere? """ field, kwargs = super().build_url_field(field_name, model_class) view = self.root.context["view"] kwargs["view_name"] = view.get_url_name("detail") return field, kwargs
def build_url_field(self, field_name, model_class): """ This is needed due to DRF's model serializer uses the queryset to build url name # TODO: Move this to own serializer mixin or fix problem elsewhere? """ field, kwargs = super().build_url_field(field_name, model_class) view = self.root.context["view"] kwargs["view_name"] = view.get_url_name("detail") return field, kwargs
[ "This", "is", "needed", "due", "to", "DRF", "s", "model", "serializer", "uses", "the", "queryset", "to", "build", "url", "name" ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/example/example/api.py#L25-L36
[ "def", "build_url_field", "(", "self", ",", "field_name", ",", "model_class", ")", ":", "field", ",", "kwargs", "=", "super", "(", ")", ".", "build_url_field", "(", "field_name", ",", "model_class", ")", "view", "=", "self", ".", "root", ".", "context", "[", "\"view\"", "]", "kwargs", "[", "\"view_name\"", "]", "=", "view", ".", "get_url_name", "(", "\"detail\"", ")", "return", "field", ",", "kwargs" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
parse_bool
Parse string to bool. :param str value: String value to parse as bool :return bool:
bananas/environment.py
def parse_bool(value): """ Parse string to bool. :param str value: String value to parse as bool :return bool: """ boolean = parse_str(value).capitalize() if boolean in ("True", "Yes", "On", "1"): return True elif boolean in ("False", "No", "Off", "0"): return False else: raise ValueError('Unable to parse boolean value "{}"'.format(value))
def parse_bool(value): """ Parse string to bool. :param str value: String value to parse as bool :return bool: """ boolean = parse_str(value).capitalize() if boolean in ("True", "Yes", "On", "1"): return True elif boolean in ("False", "No", "Off", "0"): return False else: raise ValueError('Unable to parse boolean value "{}"'.format(value))
[ "Parse", "string", "to", "bool", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/environment.py#L40-L54
[ "def", "parse_bool", "(", "value", ")", ":", "boolean", "=", "parse_str", "(", "value", ")", ".", "capitalize", "(", ")", "if", "boolean", "in", "(", "\"True\"", ",", "\"Yes\"", ",", "\"On\"", ",", "\"1\"", ")", ":", "return", "True", "elif", "boolean", "in", "(", "\"False\"", ",", "\"No\"", ",", "\"Off\"", ",", "\"0\"", ")", ":", "return", "False", "else", ":", "raise", "ValueError", "(", "'Unable to parse boolean value \"{}\"'", ".", "format", "(", "value", ")", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf
test
parse_int
Parse numeric string to int. Supports oct formatted string. :param str value: String value to parse as int :return int:
bananas/environment.py
def parse_int(value): """ Parse numeric string to int. Supports oct formatted string. :param str value: String value to parse as int :return int: """ value = parse_str(value=value) if value.startswith("0"): return int(value.lstrip("0o"), 8) else: return int(value)
def parse_int(value): """ Parse numeric string to int. Supports oct formatted string. :param str value: String value to parse as int :return int: """ value = parse_str(value=value) if value.startswith("0"): return int(value.lstrip("0o"), 8) else: return int(value)
[ "Parse", "numeric", "string", "to", "int", ".", "Supports", "oct", "formatted", "string", "." ]
5monkeys/django-bananas
python
https://github.com/5monkeys/django-bananas/blob/cfd318c737f6c4580036c13d2acf32bca96654bf/bananas/environment.py#L57-L68
[ "def", "parse_int", "(", "value", ")", ":", "value", "=", "parse_str", "(", "value", "=", "value", ")", "if", "value", ".", "startswith", "(", "\"0\"", ")", ":", "return", "int", "(", "value", ".", "lstrip", "(", "\"0o\"", ")", ",", "8", ")", "else", ":", "return", "int", "(", "value", ")" ]
cfd318c737f6c4580036c13d2acf32bca96654bf