id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
243,300
yougov/vr.builder
vr/builder/build.py
OutputSaver.make_tarball
def make_tarball(self, app_folder, build_data): """ Following a successful build, create a tarball and build result. """ # slugignore clean_slug_dir(app_folder) # tar up the result with tarfile.open('build.tar.gz', 'w:gz') as tar: tar.add(app_folder, arcname='') build_data.build_md5 = file_md5('build.tar.gz') tardest = os.path.join(self.outfolder, 'build.tar.gz') shutil.move('build.tar.gz', tardest) build_data_path = os.path.join(self.outfolder, 'build_result.yaml') print("Writing", build_data_path) with open(build_data_path, 'w') as f: f.write(build_data.as_yaml())
python
def make_tarball(self, app_folder, build_data): """ Following a successful build, create a tarball and build result. """ # slugignore clean_slug_dir(app_folder) # tar up the result with tarfile.open('build.tar.gz', 'w:gz') as tar: tar.add(app_folder, arcname='') build_data.build_md5 = file_md5('build.tar.gz') tardest = os.path.join(self.outfolder, 'build.tar.gz') shutil.move('build.tar.gz', tardest) build_data_path = os.path.join(self.outfolder, 'build_result.yaml') print("Writing", build_data_path) with open(build_data_path, 'w') as f: f.write(build_data.as_yaml())
[ "def", "make_tarball", "(", "self", ",", "app_folder", ",", "build_data", ")", ":", "# slugignore", "clean_slug_dir", "(", "app_folder", ")", "# tar up the result", "with", "tarfile", ".", "open", "(", "'build.tar.gz'", ",", "'w:gz'", ")", "as", "tar", ":", "tar", ".", "add", "(", "app_folder", ",", "arcname", "=", "''", ")", "build_data", ".", "build_md5", "=", "file_md5", "(", "'build.tar.gz'", ")", "tardest", "=", "os", ".", "path", ".", "join", "(", "self", ".", "outfolder", ",", "'build.tar.gz'", ")", "shutil", ".", "move", "(", "'build.tar.gz'", ",", "tardest", ")", "build_data_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "outfolder", ",", "'build_result.yaml'", ")", "print", "(", "\"Writing\"", ",", "build_data_path", ")", "with", "open", "(", "build_data_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "build_data", ".", "as_yaml", "(", ")", ")" ]
Following a successful build, create a tarball and build result.
[ "Following", "a", "successful", "build", "create", "a", "tarball", "and", "build", "result", "." ]
666b28f997d0cff52e82eed4ace1c73fee4b2136
https://github.com/yougov/vr.builder/blob/666b28f997d0cff52e82eed4ace1c73fee4b2136/vr/builder/build.py#L63-L81
243,301
goldhand/django-nupages
nupages/middleware.py
MultiTenantMiddleware.process_request
def process_request(self, request): ''' checks if the host domain is one of the site objects and sets request.site_id ''' site_id = 0 domain = request.get_host().lower() if hasattr(settings, 'SITE_ID'): site_id = settings.SITE_ID try: site = Site.objects.get(domain__iexact=domain) site_id = site.id except Site.DoesNotExist: pass request.site_id = site_id
python
def process_request(self, request): ''' checks if the host domain is one of the site objects and sets request.site_id ''' site_id = 0 domain = request.get_host().lower() if hasattr(settings, 'SITE_ID'): site_id = settings.SITE_ID try: site = Site.objects.get(domain__iexact=domain) site_id = site.id except Site.DoesNotExist: pass request.site_id = site_id
[ "def", "process_request", "(", "self", ",", "request", ")", ":", "site_id", "=", "0", "domain", "=", "request", ".", "get_host", "(", ")", ".", "lower", "(", ")", "if", "hasattr", "(", "settings", ",", "'SITE_ID'", ")", ":", "site_id", "=", "settings", ".", "SITE_ID", "try", ":", "site", "=", "Site", ".", "objects", ".", "get", "(", "domain__iexact", "=", "domain", ")", "site_id", "=", "site", ".", "id", "except", "Site", ".", "DoesNotExist", ":", "pass", "request", ".", "site_id", "=", "site_id" ]
checks if the host domain is one of the site objects and sets request.site_id
[ "checks", "if", "the", "host", "domain", "is", "one", "of", "the", "site", "objects", "and", "sets", "request", ".", "site_id" ]
4e54fae7e057f9530c22dc30c03812fd660cb7f4
https://github.com/goldhand/django-nupages/blob/4e54fae7e057f9530c22dc30c03812fd660cb7f4/nupages/middleware.py#L9-L23
243,302
baliame/http-hmac-python
httphmac/v2.py
V2Signer.sign
def sign(self, request, authheaders, secret): """Returns the v2 signature appropriate for the request. The request is not changed by this function. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. secret -- The base64-encoded secret key for the HMAC authorization. """ if "id" not in authheaders or authheaders["id"] == '': raise KeyError("id required in authorization headers.") if "nonce" not in authheaders or authheaders["nonce"] == '': raise KeyError("nonce required in authorization headers.") if "realm" not in authheaders or authheaders["realm"] == '': raise KeyError("realm required in authorization headers.") if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") bodyhash = None if request.body is not None and request.body != b'': sha256 = hashlib.sha256() sha256.update(request.body) bodyhash = base64.b64encode(sha256.digest()).decode('utf-8') try: mac = hmac.HMAC(base64.b64decode(secret.encode('utf-8'), validate=True), digestmod=self.digest) except TypeError: s = secret.encode('utf-8') if not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') mac = hmac.HMAC(base64.b64decode(s), digestmod=self.digest) mac.update(self.signable(request, authheaders, bodyhash).encode('utf-8')) digest = mac.digest() return base64.b64encode(digest).decode('utf-8')
python
def sign(self, request, authheaders, secret): """Returns the v2 signature appropriate for the request. The request is not changed by this function. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. secret -- The base64-encoded secret key for the HMAC authorization. """ if "id" not in authheaders or authheaders["id"] == '': raise KeyError("id required in authorization headers.") if "nonce" not in authheaders or authheaders["nonce"] == '': raise KeyError("nonce required in authorization headers.") if "realm" not in authheaders or authheaders["realm"] == '': raise KeyError("realm required in authorization headers.") if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") bodyhash = None if request.body is not None and request.body != b'': sha256 = hashlib.sha256() sha256.update(request.body) bodyhash = base64.b64encode(sha256.digest()).decode('utf-8') try: mac = hmac.HMAC(base64.b64decode(secret.encode('utf-8'), validate=True), digestmod=self.digest) except TypeError: s = secret.encode('utf-8') if not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') mac = hmac.HMAC(base64.b64decode(s), digestmod=self.digest) mac.update(self.signable(request, authheaders, bodyhash).encode('utf-8')) digest = mac.digest() return base64.b64encode(digest).decode('utf-8')
[ "def", "sign", "(", "self", ",", "request", ",", "authheaders", ",", "secret", ")", ":", "if", "\"id\"", "not", "in", "authheaders", "or", "authheaders", "[", "\"id\"", "]", "==", "''", ":", "raise", "KeyError", "(", "\"id required in authorization headers.\"", ")", "if", "\"nonce\"", "not", "in", "authheaders", "or", "authheaders", "[", "\"nonce\"", "]", "==", "''", ":", "raise", "KeyError", "(", "\"nonce required in authorization headers.\"", ")", "if", "\"realm\"", "not", "in", "authheaders", "or", "authheaders", "[", "\"realm\"", "]", "==", "''", ":", "raise", "KeyError", "(", "\"realm required in authorization headers.\"", ")", "if", "request", ".", "get_header", "(", "'x-authorization-timestamp'", ")", "==", "''", ":", "raise", "KeyError", "(", "\"X-Authorization-Timestamp is required.\"", ")", "bodyhash", "=", "None", "if", "request", ".", "body", "is", "not", "None", "and", "request", ".", "body", "!=", "b''", ":", "sha256", "=", "hashlib", ".", "sha256", "(", ")", "sha256", ".", "update", "(", "request", ".", "body", ")", "bodyhash", "=", "base64", ".", "b64encode", "(", "sha256", ".", "digest", "(", ")", ")", ".", "decode", "(", "'utf-8'", ")", "try", ":", "mac", "=", "hmac", ".", "HMAC", "(", "base64", ".", "b64decode", "(", "secret", ".", "encode", "(", "'utf-8'", ")", ",", "validate", "=", "True", ")", ",", "digestmod", "=", "self", ".", "digest", ")", "except", "TypeError", ":", "s", "=", "secret", ".", "encode", "(", "'utf-8'", ")", "if", "not", "re", ".", "match", "(", "b'^[A-Za-z0-9+/]*={0,2}$'", ",", "s", ")", ":", "raise", "binascii", ".", "Error", "(", "'Non-base64 digit found'", ")", "mac", "=", "hmac", ".", "HMAC", "(", "base64", ".", "b64decode", "(", "s", ")", ",", "digestmod", "=", "self", ".", "digest", ")", "mac", ".", "update", "(", "self", ".", "signable", "(", "request", ",", "authheaders", ",", "bodyhash", ")", ".", "encode", "(", "'utf-8'", ")", ")", "digest", "=", "mac", ".", "digest", "(", ")", "return", "base64", ".", "b64encode", "(", "digest", ")", ".", "decode", "(", "'utf-8'", ")" ]
Returns the v2 signature appropriate for the request. The request is not changed by this function. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. secret -- The base64-encoded secret key for the HMAC authorization.
[ "Returns", "the", "v2", "signature", "appropriate", "for", "the", "request", ".", "The", "request", "is", "not", "changed", "by", "this", "function", "." ]
9884c0cbfdb712f9f37080a8efbfdce82850785f
https://github.com/baliame/http-hmac-python/blob/9884c0cbfdb712f9f37080a8efbfdce82850785f/httphmac/v2.py#L78-L109
243,303
baliame/http-hmac-python
httphmac/v2.py
V2Signer.get_response_signer
def get_response_signer(self): """Returns the response signer for this version of the signature. """ if not hasattr(self, "response_signer"): self.response_signer = V2ResponseSigner(self.digest, orig=self) return self.response_signer
python
def get_response_signer(self): """Returns the response signer for this version of the signature. """ if not hasattr(self, "response_signer"): self.response_signer = V2ResponseSigner(self.digest, orig=self) return self.response_signer
[ "def", "get_response_signer", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "\"response_signer\"", ")", ":", "self", ".", "response_signer", "=", "V2ResponseSigner", "(", "self", ".", "digest", ",", "orig", "=", "self", ")", "return", "self", ".", "response_signer" ]
Returns the response signer for this version of the signature.
[ "Returns", "the", "response", "signer", "for", "this", "version", "of", "the", "signature", "." ]
9884c0cbfdb712f9f37080a8efbfdce82850785f
https://github.com/baliame/http-hmac-python/blob/9884c0cbfdb712f9f37080a8efbfdce82850785f/httphmac/v2.py#L111-L116
243,304
baliame/http-hmac-python
httphmac/v2.py
V2Signer.check
def check(self, request, secret): """Verifies whether or not the request bears an authorization appropriate and valid for this version of the signature. This verifies every element of the signature, including the timestamp's value. Does not alter the request. Keyword arguments: request -- A request object which can be consumed by this API. secret -- The base64-encoded secret key for the HMAC authorization. """ if request.get_header("Authorization") == "": return False ah = self.parse_auth_headers(request.get_header("Authorization")) if "signature" not in ah: return False if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") timestamp = int(float(request.get_header('x-authorization-timestamp'))) if timestamp == 0: raise ValueError("X-Authorization-Timestamp must be a valid, non-zero timestamp.") if self.preset_time is None: curr_time = time.time() else: curr_time = self.preset_time if timestamp > curr_time + 900: raise ValueError("X-Authorization-Timestamp is too far in the future.") if timestamp < curr_time - 900: raise ValueError("X-Authorization-Timestamp is too far in the past.") if request.body is not None and request.body != b'': content_hash = request.get_header("x-authorization-content-sha256") if content_hash == '': raise KeyError("X-Authorization-Content-SHA256 is required for requests with a request body.") sha256 = hashlib.sha256() sha256.update(request.body) if content_hash != base64.b64encode(sha256.digest()).decode('utf-8'): raise ValueError("X-Authorization-Content-SHA256 must match the SHA-256 hash of the request body.") return ah["signature"] == self.sign(request, ah, secret)
python
def check(self, request, secret): """Verifies whether or not the request bears an authorization appropriate and valid for this version of the signature. This verifies every element of the signature, including the timestamp's value. Does not alter the request. Keyword arguments: request -- A request object which can be consumed by this API. secret -- The base64-encoded secret key for the HMAC authorization. """ if request.get_header("Authorization") == "": return False ah = self.parse_auth_headers(request.get_header("Authorization")) if "signature" not in ah: return False if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") timestamp = int(float(request.get_header('x-authorization-timestamp'))) if timestamp == 0: raise ValueError("X-Authorization-Timestamp must be a valid, non-zero timestamp.") if self.preset_time is None: curr_time = time.time() else: curr_time = self.preset_time if timestamp > curr_time + 900: raise ValueError("X-Authorization-Timestamp is too far in the future.") if timestamp < curr_time - 900: raise ValueError("X-Authorization-Timestamp is too far in the past.") if request.body is not None and request.body != b'': content_hash = request.get_header("x-authorization-content-sha256") if content_hash == '': raise KeyError("X-Authorization-Content-SHA256 is required for requests with a request body.") sha256 = hashlib.sha256() sha256.update(request.body) if content_hash != base64.b64encode(sha256.digest()).decode('utf-8'): raise ValueError("X-Authorization-Content-SHA256 must match the SHA-256 hash of the request body.") return ah["signature"] == self.sign(request, ah, secret)
[ "def", "check", "(", "self", ",", "request", ",", "secret", ")", ":", "if", "request", ".", "get_header", "(", "\"Authorization\"", ")", "==", "\"\"", ":", "return", "False", "ah", "=", "self", ".", "parse_auth_headers", "(", "request", ".", "get_header", "(", "\"Authorization\"", ")", ")", "if", "\"signature\"", "not", "in", "ah", ":", "return", "False", "if", "request", ".", "get_header", "(", "'x-authorization-timestamp'", ")", "==", "''", ":", "raise", "KeyError", "(", "\"X-Authorization-Timestamp is required.\"", ")", "timestamp", "=", "int", "(", "float", "(", "request", ".", "get_header", "(", "'x-authorization-timestamp'", ")", ")", ")", "if", "timestamp", "==", "0", ":", "raise", "ValueError", "(", "\"X-Authorization-Timestamp must be a valid, non-zero timestamp.\"", ")", "if", "self", ".", "preset_time", "is", "None", ":", "curr_time", "=", "time", ".", "time", "(", ")", "else", ":", "curr_time", "=", "self", ".", "preset_time", "if", "timestamp", ">", "curr_time", "+", "900", ":", "raise", "ValueError", "(", "\"X-Authorization-Timestamp is too far in the future.\"", ")", "if", "timestamp", "<", "curr_time", "-", "900", ":", "raise", "ValueError", "(", "\"X-Authorization-Timestamp is too far in the past.\"", ")", "if", "request", ".", "body", "is", "not", "None", "and", "request", ".", "body", "!=", "b''", ":", "content_hash", "=", "request", ".", "get_header", "(", "\"x-authorization-content-sha256\"", ")", "if", "content_hash", "==", "''", ":", "raise", "KeyError", "(", "\"X-Authorization-Content-SHA256 is required for requests with a request body.\"", ")", "sha256", "=", "hashlib", ".", "sha256", "(", ")", "sha256", ".", "update", "(", "request", ".", "body", ")", "if", "content_hash", "!=", "base64", ".", "b64encode", "(", "sha256", ".", "digest", "(", ")", ")", ".", "decode", "(", "'utf-8'", ")", ":", "raise", "ValueError", "(", "\"X-Authorization-Content-SHA256 must match the SHA-256 hash of the request body.\"", ")", "return", "ah", "[", "\"signature\"", "]", "==", "self", ".", "sign", "(", "request", ",", "ah", ",", "secret", ")" ]
Verifies whether or not the request bears an authorization appropriate and valid for this version of the signature. This verifies every element of the signature, including the timestamp's value. Does not alter the request. Keyword arguments: request -- A request object which can be consumed by this API. secret -- The base64-encoded secret key for the HMAC authorization.
[ "Verifies", "whether", "or", "not", "the", "request", "bears", "an", "authorization", "appropriate", "and", "valid", "for", "this", "version", "of", "the", "signature", ".", "This", "verifies", "every", "element", "of", "the", "signature", "including", "the", "timestamp", "s", "value", ".", "Does", "not", "alter", "the", "request", "." ]
9884c0cbfdb712f9f37080a8efbfdce82850785f
https://github.com/baliame/http-hmac-python/blob/9884c0cbfdb712f9f37080a8efbfdce82850785f/httphmac/v2.py#L128-L163
243,305
baliame/http-hmac-python
httphmac/v2.py
V2Signer.unroll_auth_headers
def unroll_auth_headers(self, authheaders, exclude_signature=False, sep=",", quote=True): """Converts an authorization header dict-like object into a string representing the authorization. Keyword arguments: authheaders -- A string-indexable object which contains the headers appropriate for this signature version. """ res = "" ordered = collections.OrderedDict(sorted(authheaders.items())) form = '{0}=\"{1}\"' if quote else '{0}={1}' if exclude_signature: return sep.join([form.format(k, urlquote(str(v), safe='')) for k, v in ordered.items() if k != 'signature']) else: return sep.join([form.format(k, urlquote(str(v), safe='') if k != 'signature' else str(v)) for k, v in ordered.items()])
python
def unroll_auth_headers(self, authheaders, exclude_signature=False, sep=",", quote=True): """Converts an authorization header dict-like object into a string representing the authorization. Keyword arguments: authheaders -- A string-indexable object which contains the headers appropriate for this signature version. """ res = "" ordered = collections.OrderedDict(sorted(authheaders.items())) form = '{0}=\"{1}\"' if quote else '{0}={1}' if exclude_signature: return sep.join([form.format(k, urlquote(str(v), safe='')) for k, v in ordered.items() if k != 'signature']) else: return sep.join([form.format(k, urlquote(str(v), safe='') if k != 'signature' else str(v)) for k, v in ordered.items()])
[ "def", "unroll_auth_headers", "(", "self", ",", "authheaders", ",", "exclude_signature", "=", "False", ",", "sep", "=", "\",\"", ",", "quote", "=", "True", ")", ":", "res", "=", "\"\"", "ordered", "=", "collections", ".", "OrderedDict", "(", "sorted", "(", "authheaders", ".", "items", "(", ")", ")", ")", "form", "=", "'{0}=\\\"{1}\\\"'", "if", "quote", "else", "'{0}={1}'", "if", "exclude_signature", ":", "return", "sep", ".", "join", "(", "[", "form", ".", "format", "(", "k", ",", "urlquote", "(", "str", "(", "v", ")", ",", "safe", "=", "''", ")", ")", "for", "k", ",", "v", "in", "ordered", ".", "items", "(", ")", "if", "k", "!=", "'signature'", "]", ")", "else", ":", "return", "sep", ".", "join", "(", "[", "form", ".", "format", "(", "k", ",", "urlquote", "(", "str", "(", "v", ")", ",", "safe", "=", "''", ")", "if", "k", "!=", "'signature'", "else", "str", "(", "v", ")", ")", "for", "k", ",", "v", "in", "ordered", ".", "items", "(", ")", "]", ")" ]
Converts an authorization header dict-like object into a string representing the authorization. Keyword arguments: authheaders -- A string-indexable object which contains the headers appropriate for this signature version.
[ "Converts", "an", "authorization", "header", "dict", "-", "like", "object", "into", "a", "string", "representing", "the", "authorization", "." ]
9884c0cbfdb712f9f37080a8efbfdce82850785f
https://github.com/baliame/http-hmac-python/blob/9884c0cbfdb712f9f37080a8efbfdce82850785f/httphmac/v2.py#L165-L177
243,306
baliame/http-hmac-python
httphmac/v2.py
V2Signer.sign_direct
def sign_direct(self, request, authheaders, secret): """Signs a request directly with a v2 signature. The request's Authorization header will change. This function may also add the required X-Authorization-Timestamp and X-Authorization-Content-SHA256 headers. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. secret -- The base64-encoded secret key for the HMAC authorization. """ if request.get_header('x-authorization-timestamp') == '': request.with_header("X-Authorization-Timestamp", str(time.time())) if request.body is not None and request.body != b'': if request.get_header("x-authorization-content-sha256") == '': sha256 = hashlib.sha256() sha256.update(request.body) request.with_header("X-Authorization-Content-SHA256", base64.b64encode(sha256.digest()).decode('utf-8')) sig = self.sign(request, authheaders, secret) authheaders["signature"] = sig return request.with_header("Authorization", "acquia-http-hmac {0}".format(self.unroll_auth_headers(authheaders)))
python
def sign_direct(self, request, authheaders, secret): """Signs a request directly with a v2 signature. The request's Authorization header will change. This function may also add the required X-Authorization-Timestamp and X-Authorization-Content-SHA256 headers. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. secret -- The base64-encoded secret key for the HMAC authorization. """ if request.get_header('x-authorization-timestamp') == '': request.with_header("X-Authorization-Timestamp", str(time.time())) if request.body is not None and request.body != b'': if request.get_header("x-authorization-content-sha256") == '': sha256 = hashlib.sha256() sha256.update(request.body) request.with_header("X-Authorization-Content-SHA256", base64.b64encode(sha256.digest()).decode('utf-8')) sig = self.sign(request, authheaders, secret) authheaders["signature"] = sig return request.with_header("Authorization", "acquia-http-hmac {0}".format(self.unroll_auth_headers(authheaders)))
[ "def", "sign_direct", "(", "self", ",", "request", ",", "authheaders", ",", "secret", ")", ":", "if", "request", ".", "get_header", "(", "'x-authorization-timestamp'", ")", "==", "''", ":", "request", ".", "with_header", "(", "\"X-Authorization-Timestamp\"", ",", "str", "(", "time", ".", "time", "(", ")", ")", ")", "if", "request", ".", "body", "is", "not", "None", "and", "request", ".", "body", "!=", "b''", ":", "if", "request", ".", "get_header", "(", "\"x-authorization-content-sha256\"", ")", "==", "''", ":", "sha256", "=", "hashlib", ".", "sha256", "(", ")", "sha256", ".", "update", "(", "request", ".", "body", ")", "request", ".", "with_header", "(", "\"X-Authorization-Content-SHA256\"", ",", "base64", ".", "b64encode", "(", "sha256", ".", "digest", "(", ")", ")", ".", "decode", "(", "'utf-8'", ")", ")", "sig", "=", "self", ".", "sign", "(", "request", ",", "authheaders", ",", "secret", ")", "authheaders", "[", "\"signature\"", "]", "=", "sig", "return", "request", ".", "with_header", "(", "\"Authorization\"", ",", "\"acquia-http-hmac {0}\"", ".", "format", "(", "self", ".", "unroll_auth_headers", "(", "authheaders", ")", ")", ")" ]
Signs a request directly with a v2 signature. The request's Authorization header will change. This function may also add the required X-Authorization-Timestamp and X-Authorization-Content-SHA256 headers. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. secret -- The base64-encoded secret key for the HMAC authorization.
[ "Signs", "a", "request", "directly", "with", "a", "v2", "signature", ".", "The", "request", "s", "Authorization", "header", "will", "change", ".", "This", "function", "may", "also", "add", "the", "required", "X", "-", "Authorization", "-", "Timestamp", "and", "X", "-", "Authorization", "-", "Content", "-", "SHA256", "headers", "." ]
9884c0cbfdb712f9f37080a8efbfdce82850785f
https://github.com/baliame/http-hmac-python/blob/9884c0cbfdb712f9f37080a8efbfdce82850785f/httphmac/v2.py#L188-L206
243,307
baliame/http-hmac-python
httphmac/v2.py
V2ResponseSigner.check
def check(self, request, response, secret): """Checks the response for the appropriate signature. Returns True if the signature matches the expected value. Keyword arguments: request -- A request object which can be consumed by this API. response -- A requests response object or compatible signed response object. secret -- The base64-encoded secret key for the HMAC authorization. """ auth = request.get_header('Authorization') if auth == '': raise KeyError('Authorization header is required for the request.') ah = self.orig.parse_auth_headers(auth) act = response.headers['X-Server-Authorization-HMAC-SHA256'] if act == '': raise KeyError('Response is missing the signature header X-Server-Authorization-HMAC-SHA256.') sig = self.sign(request, ah, response.text, secret) return sig == act
python
def check(self, request, response, secret): """Checks the response for the appropriate signature. Returns True if the signature matches the expected value. Keyword arguments: request -- A request object which can be consumed by this API. response -- A requests response object or compatible signed response object. secret -- The base64-encoded secret key for the HMAC authorization. """ auth = request.get_header('Authorization') if auth == '': raise KeyError('Authorization header is required for the request.') ah = self.orig.parse_auth_headers(auth) act = response.headers['X-Server-Authorization-HMAC-SHA256'] if act == '': raise KeyError('Response is missing the signature header X-Server-Authorization-HMAC-SHA256.') sig = self.sign(request, ah, response.text, secret) return sig == act
[ "def", "check", "(", "self", ",", "request", ",", "response", ",", "secret", ")", ":", "auth", "=", "request", ".", "get_header", "(", "'Authorization'", ")", "if", "auth", "==", "''", ":", "raise", "KeyError", "(", "'Authorization header is required for the request.'", ")", "ah", "=", "self", ".", "orig", ".", "parse_auth_headers", "(", "auth", ")", "act", "=", "response", ".", "headers", "[", "'X-Server-Authorization-HMAC-SHA256'", "]", "if", "act", "==", "''", ":", "raise", "KeyError", "(", "'Response is missing the signature header X-Server-Authorization-HMAC-SHA256.'", ")", "sig", "=", "self", ".", "sign", "(", "request", ",", "ah", ",", "response", ".", "text", ",", "secret", ")", "return", "sig", "==", "act" ]
Checks the response for the appropriate signature. Returns True if the signature matches the expected value. Keyword arguments: request -- A request object which can be consumed by this API. response -- A requests response object or compatible signed response object. secret -- The base64-encoded secret key for the HMAC authorization.
[ "Checks", "the", "response", "for", "the", "appropriate", "signature", ".", "Returns", "True", "if", "the", "signature", "matches", "the", "expected", "value", "." ]
9884c0cbfdb712f9f37080a8efbfdce82850785f
https://github.com/baliame/http-hmac-python/blob/9884c0cbfdb712f9f37080a8efbfdce82850785f/httphmac/v2.py#L226-L242
243,308
baliame/http-hmac-python
httphmac/v2.py
V2ResponseSigner.signable
def signable(self, request, authheaders, response_body): """Creates the signable string for a response and returns it. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. response_body -- A string or bytes-like object which represents the body of the response. """ nonce = authheaders["nonce"] timestamp = request.get_header("x-authorization-timestamp") try: body_str = response_body.decode('utf-8') except: body_str = response_body return '{0}\n{1}\n{2}'.format(nonce, timestamp, body_str)
python
def signable(self, request, authheaders, response_body): """Creates the signable string for a response and returns it. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. response_body -- A string or bytes-like object which represents the body of the response. """ nonce = authheaders["nonce"] timestamp = request.get_header("x-authorization-timestamp") try: body_str = response_body.decode('utf-8') except: body_str = response_body return '{0}\n{1}\n{2}'.format(nonce, timestamp, body_str)
[ "def", "signable", "(", "self", ",", "request", ",", "authheaders", ",", "response_body", ")", ":", "nonce", "=", "authheaders", "[", "\"nonce\"", "]", "timestamp", "=", "request", ".", "get_header", "(", "\"x-authorization-timestamp\"", ")", "try", ":", "body_str", "=", "response_body", ".", "decode", "(", "'utf-8'", ")", "except", ":", "body_str", "=", "response_body", "return", "'{0}\\n{1}\\n{2}'", ".", "format", "(", "nonce", ",", "timestamp", ",", "body_str", ")" ]
Creates the signable string for a response and returns it. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. response_body -- A string or bytes-like object which represents the body of the response.
[ "Creates", "the", "signable", "string", "for", "a", "response", "and", "returns", "it", "." ]
9884c0cbfdb712f9f37080a8efbfdce82850785f
https://github.com/baliame/http-hmac-python/blob/9884c0cbfdb712f9f37080a8efbfdce82850785f/httphmac/v2.py#L244-L258
243,309
baliame/http-hmac-python
httphmac/v2.py
V2ResponseSigner.sign
def sign(self, request, authheaders, response_body, secret): """Returns the response signature for the response to the request. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. response_body -- A string or bytes-like object which represents the body of the response. secret -- The base64-encoded secret key for the HMAC authorization. """ if "nonce" not in authheaders or authheaders["nonce"] == '': raise KeyError("nonce required in authorization headers.") if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") try: mac = hmac.HMAC(base64.b64decode(secret.encode('utf-8'), validate=True), digestmod=self.digest) except TypeError: s = secret.encode('utf-8') if not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') mac = hmac.HMAC(base64.b64decode(s), digestmod=self.digest) mac.update(self.signable(request, authheaders, response_body).encode('utf-8')) digest = mac.digest() return base64.b64encode(digest).decode('utf-8')
python
def sign(self, request, authheaders, response_body, secret): """Returns the response signature for the response to the request. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. response_body -- A string or bytes-like object which represents the body of the response. secret -- The base64-encoded secret key for the HMAC authorization. """ if "nonce" not in authheaders or authheaders["nonce"] == '': raise KeyError("nonce required in authorization headers.") if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") try: mac = hmac.HMAC(base64.b64decode(secret.encode('utf-8'), validate=True), digestmod=self.digest) except TypeError: s = secret.encode('utf-8') if not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') mac = hmac.HMAC(base64.b64decode(s), digestmod=self.digest) mac.update(self.signable(request, authheaders, response_body).encode('utf-8')) digest = mac.digest() return base64.b64encode(digest).decode('utf-8')
[ "def", "sign", "(", "self", ",", "request", ",", "authheaders", ",", "response_body", ",", "secret", ")", ":", "if", "\"nonce\"", "not", "in", "authheaders", "or", "authheaders", "[", "\"nonce\"", "]", "==", "''", ":", "raise", "KeyError", "(", "\"nonce required in authorization headers.\"", ")", "if", "request", ".", "get_header", "(", "'x-authorization-timestamp'", ")", "==", "''", ":", "raise", "KeyError", "(", "\"X-Authorization-Timestamp is required.\"", ")", "try", ":", "mac", "=", "hmac", ".", "HMAC", "(", "base64", ".", "b64decode", "(", "secret", ".", "encode", "(", "'utf-8'", ")", ",", "validate", "=", "True", ")", ",", "digestmod", "=", "self", ".", "digest", ")", "except", "TypeError", ":", "s", "=", "secret", ".", "encode", "(", "'utf-8'", ")", "if", "not", "re", ".", "match", "(", "b'^[A-Za-z0-9+/]*={0,2}$'", ",", "s", ")", ":", "raise", "binascii", ".", "Error", "(", "'Non-base64 digit found'", ")", "mac", "=", "hmac", ".", "HMAC", "(", "base64", ".", "b64decode", "(", "s", ")", ",", "digestmod", "=", "self", ".", "digest", ")", "mac", ".", "update", "(", "self", ".", "signable", "(", "request", ",", "authheaders", ",", "response_body", ")", ".", "encode", "(", "'utf-8'", ")", ")", "digest", "=", "mac", ".", "digest", "(", ")", "return", "base64", ".", "b64encode", "(", "digest", ")", ".", "decode", "(", "'utf-8'", ")" ]
Returns the response signature for the response to the request. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. response_body -- A string or bytes-like object which represents the body of the response. secret -- The base64-encoded secret key for the HMAC authorization.
[ "Returns", "the", "response", "signature", "for", "the", "response", "to", "the", "request", "." ]
9884c0cbfdb712f9f37080a8efbfdce82850785f
https://github.com/baliame/http-hmac-python/blob/9884c0cbfdb712f9f37080a8efbfdce82850785f/httphmac/v2.py#L260-L283
243,310
basvandenbroek/gcloud_taskqueue
gcloud_taskqueue/taskqueue.py
Taskqueue.delete_task
def delete_task(self, id, client=None): """Deletes a task from the current task queue. If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to delete. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current taskqueue. :raises: :class:`gcloud.exceptions.NotFound` """ client = self._require_client(client) task = Task(taskqueue=self, id=id) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client.connection.api_request(method='DELETE', path=task.path, _target_object=None)
python
def delete_task(self, id, client=None): """Deletes a task from the current task queue. If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to delete. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current taskqueue. :raises: :class:`gcloud.exceptions.NotFound` """ client = self._require_client(client) task = Task(taskqueue=self, id=id) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client.connection.api_request(method='DELETE', path=task.path, _target_object=None)
[ "def", "delete_task", "(", "self", ",", "id", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "task", "=", "Task", "(", "taskqueue", "=", "self", ",", "id", "=", "id", ")", "# We intentionally pass `_target_object=None` since a DELETE", "# request has no response value (whether in a standard request or", "# in a batch request).", "client", ".", "connection", ".", "api_request", "(", "method", "=", "'DELETE'", ",", "path", "=", "task", ".", "path", ",", "_target_object", "=", "None", ")" ]
Deletes a task from the current task queue. If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to delete. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current taskqueue. :raises: :class:`gcloud.exceptions.NotFound`
[ "Deletes", "a", "task", "from", "the", "current", "task", "queue", "." ]
b147b57f7c0ad9e8030ee9797d6526a448aa5007
https://github.com/basvandenbroek/gcloud_taskqueue/blob/b147b57f7c0ad9e8030ee9797d6526a448aa5007/gcloud_taskqueue/taskqueue.py#L153-L174
243,311
basvandenbroek/gcloud_taskqueue
gcloud_taskqueue/taskqueue.py
Taskqueue.get_task
def get_task(self, id, client=None): """Gets a named task from taskqueue If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to get :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound` """ client = self._require_client(client) task = Task(taskqueue=self, id=id) try: response = client.connection.api_request(method='GET', path=task.path, _target_object=task) task._set_properties(response) return task except NotFound: return None
python
def get_task(self, id, client=None): """Gets a named task from taskqueue If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to get :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound` """ client = self._require_client(client) task = Task(taskqueue=self, id=id) try: response = client.connection.api_request(method='GET', path=task.path, _target_object=task) task._set_properties(response) return task except NotFound: return None
[ "def", "get_task", "(", "self", ",", "id", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "task", "=", "Task", "(", "taskqueue", "=", "self", ",", "id", "=", "id", ")", "try", ":", "response", "=", "client", ".", "connection", ".", "api_request", "(", "method", "=", "'GET'", ",", "path", "=", "task", ".", "path", ",", "_target_object", "=", "task", ")", "task", ".", "_set_properties", "(", "response", ")", "return", "task", "except", "NotFound", ":", "return", "None" ]
Gets a named task from taskqueue If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to get :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound`
[ "Gets", "a", "named", "task", "from", "taskqueue" ]
b147b57f7c0ad9e8030ee9797d6526a448aa5007
https://github.com/basvandenbroek/gcloud_taskqueue/blob/b147b57f7c0ad9e8030ee9797d6526a448aa5007/gcloud_taskqueue/taskqueue.py#L176-L199
243,312
basvandenbroek/gcloud_taskqueue
gcloud_taskqueue/taskqueue.py
Taskqueue.lease
def lease(self, lease_time, num_tasks, group_by_tag=False, tag=None, client=None): """ Acquires a lease on the topmost N unowned tasks in the specified queue. :type lease_time: int :param lease_time: How long to lease this task, in seconds. :type num_tasks: int :param num_tasks: The number of tasks to lease. :type group_by_tag: bool :param group_by_tag: Optional. When True, returns tasks of the same tag. Specify which tag by using the tag parameter. If tag is not specified, returns tasks of the same tag as the oldest task in the queue. :type tag: string :param tag: Optional. Only specify tag if groupByTag is true. If groupByTag is true and tag is not specified, the tag is assumed to be that of the oldest task by ETA. I.e., the first available tag. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_TaskIterator`. :returns: An iterator of tasks. """ client = self._require_client(client) if group_by_tag: query_params = {"leaseSecs": lease_time, "numTasks": num_tasks, "groupByTag": group_by_tag, "tag": tag} else: query_params = {"leaseSecs": lease_time, "numTasks": num_tasks} response = client.connection.api_request(method='POST', path=self.path + "/tasks/lease", query_params=query_params) for item in response.get('items', []): id = item.get('id') task = Task(id, taskqueue=self) task._set_properties(item) yield task
python
def lease(self, lease_time, num_tasks, group_by_tag=False, tag=None, client=None): """ Acquires a lease on the topmost N unowned tasks in the specified queue. :type lease_time: int :param lease_time: How long to lease this task, in seconds. :type num_tasks: int :param num_tasks: The number of tasks to lease. :type group_by_tag: bool :param group_by_tag: Optional. When True, returns tasks of the same tag. Specify which tag by using the tag parameter. If tag is not specified, returns tasks of the same tag as the oldest task in the queue. :type tag: string :param tag: Optional. Only specify tag if groupByTag is true. If groupByTag is true and tag is not specified, the tag is assumed to be that of the oldest task by ETA. I.e., the first available tag. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_TaskIterator`. :returns: An iterator of tasks. """ client = self._require_client(client) if group_by_tag: query_params = {"leaseSecs": lease_time, "numTasks": num_tasks, "groupByTag": group_by_tag, "tag": tag} else: query_params = {"leaseSecs": lease_time, "numTasks": num_tasks} response = client.connection.api_request(method='POST', path=self.path + "/tasks/lease", query_params=query_params) for item in response.get('items', []): id = item.get('id') task = Task(id, taskqueue=self) task._set_properties(item) yield task
[ "def", "lease", "(", "self", ",", "lease_time", ",", "num_tasks", ",", "group_by_tag", "=", "False", ",", "tag", "=", "None", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "if", "group_by_tag", ":", "query_params", "=", "{", "\"leaseSecs\"", ":", "lease_time", ",", "\"numTasks\"", ":", "num_tasks", ",", "\"groupByTag\"", ":", "group_by_tag", ",", "\"tag\"", ":", "tag", "}", "else", ":", "query_params", "=", "{", "\"leaseSecs\"", ":", "lease_time", ",", "\"numTasks\"", ":", "num_tasks", "}", "response", "=", "client", ".", "connection", ".", "api_request", "(", "method", "=", "'POST'", ",", "path", "=", "self", ".", "path", "+", "\"/tasks/lease\"", ",", "query_params", "=", "query_params", ")", "for", "item", "in", "response", ".", "get", "(", "'items'", ",", "[", "]", ")", ":", "id", "=", "item", ".", "get", "(", "'id'", ")", "task", "=", "Task", "(", "id", ",", "taskqueue", "=", "self", ")", "task", ".", "_set_properties", "(", "item", ")", "yield", "task" ]
Acquires a lease on the topmost N unowned tasks in the specified queue. :type lease_time: int :param lease_time: How long to lease this task, in seconds. :type num_tasks: int :param num_tasks: The number of tasks to lease. :type group_by_tag: bool :param group_by_tag: Optional. When True, returns tasks of the same tag. Specify which tag by using the tag parameter. If tag is not specified, returns tasks of the same tag as the oldest task in the queue. :type tag: string :param tag: Optional. Only specify tag if groupByTag is true. If groupByTag is true and tag is not specified, the tag is assumed to be that of the oldest task by ETA. I.e., the first available tag. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_TaskIterator`. :returns: An iterator of tasks.
[ "Acquires", "a", "lease", "on", "the", "topmost", "N", "unowned", "tasks", "in", "the", "specified", "queue", "." ]
b147b57f7c0ad9e8030ee9797d6526a448aa5007
https://github.com/basvandenbroek/gcloud_taskqueue/blob/b147b57f7c0ad9e8030ee9797d6526a448aa5007/gcloud_taskqueue/taskqueue.py#L209-L246
243,313
basvandenbroek/gcloud_taskqueue
gcloud_taskqueue/taskqueue.py
Taskqueue.update_task
def update_task(self, id, new_lease_time, client=None): """ Updates the duration of a task lease If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to update :type new_lease_time: int :param new_lease_time: New lease time, in seconds. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound` """ client = self._require_client(client) task = Task(taskqueue=self, id=id) try: response = client.connection.api_request(method='POST', path=self.path + "/tasks/" + id, query_params={"newLeaseSeconds": new_lease_time}, _target_object=task) task._set_properties(response) return task except NotFound: return None
python
def update_task(self, id, new_lease_time, client=None): """ Updates the duration of a task lease If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to update :type new_lease_time: int :param new_lease_time: New lease time, in seconds. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound` """ client = self._require_client(client) task = Task(taskqueue=self, id=id) try: response = client.connection.api_request(method='POST', path=self.path + "/tasks/" + id, query_params={"newLeaseSeconds": new_lease_time}, _target_object=task) task._set_properties(response) return task except NotFound: return None
[ "def", "update_task", "(", "self", ",", "id", ",", "new_lease_time", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "task", "=", "Task", "(", "taskqueue", "=", "self", ",", "id", "=", "id", ")", "try", ":", "response", "=", "client", ".", "connection", ".", "api_request", "(", "method", "=", "'POST'", ",", "path", "=", "self", ".", "path", "+", "\"/tasks/\"", "+", "id", ",", "query_params", "=", "{", "\"newLeaseSeconds\"", ":", "new_lease_time", "}", ",", "_target_object", "=", "task", ")", "task", ".", "_set_properties", "(", "response", ")", "return", "task", "except", "NotFound", ":", "return", "None" ]
Updates the duration of a task lease If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to update :type new_lease_time: int :param new_lease_time: New lease time, in seconds. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound`
[ "Updates", "the", "duration", "of", "a", "task", "lease" ]
b147b57f7c0ad9e8030ee9797d6526a448aa5007
https://github.com/basvandenbroek/gcloud_taskqueue/blob/b147b57f7c0ad9e8030ee9797d6526a448aa5007/gcloud_taskqueue/taskqueue.py#L248-L277
243,314
basvandenbroek/gcloud_taskqueue
gcloud_taskqueue/taskqueue.py
Taskqueue.insert_task
def insert_task(self, description, tag=None, client=None): """ Insert task in task queue. If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type description: string :param description: Description of task to perform :type tag: string :param tag: Optional. The tag for this task, allows leasing tasks with a specific tag :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound` """ client = self._require_client(client) new_task = { "queueName": self.full_name, "payloadBase64": base64.b64encode(description).decode('ascii'), "tag": tag } response = client.connection.api_request(method='POST', path=self.path + "/tasks/", data=new_task) task = Task(taskqueue=self, id=response.get('id')) task._set_properties(response) return task
python
def insert_task(self, description, tag=None, client=None): """ Insert task in task queue. If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type description: string :param description: Description of task to perform :type tag: string :param tag: Optional. The tag for this task, allows leasing tasks with a specific tag :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound` """ client = self._require_client(client) new_task = { "queueName": self.full_name, "payloadBase64": base64.b64encode(description).decode('ascii'), "tag": tag } response = client.connection.api_request(method='POST', path=self.path + "/tasks/", data=new_task) task = Task(taskqueue=self, id=response.get('id')) task._set_properties(response) return task
[ "def", "insert_task", "(", "self", ",", "description", ",", "tag", "=", "None", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "new_task", "=", "{", "\"queueName\"", ":", "self", ".", "full_name", ",", "\"payloadBase64\"", ":", "base64", ".", "b64encode", "(", "description", ")", ".", "decode", "(", "'ascii'", ")", ",", "\"tag\"", ":", "tag", "}", "response", "=", "client", ".", "connection", ".", "api_request", "(", "method", "=", "'POST'", ",", "path", "=", "self", ".", "path", "+", "\"/tasks/\"", ",", "data", "=", "new_task", ")", "task", "=", "Task", "(", "taskqueue", "=", "self", ",", "id", "=", "response", ".", "get", "(", "'id'", ")", ")", "task", ".", "_set_properties", "(", "response", ")", "return", "task" ]
Insert task in task queue. If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type description: string :param description: Description of task to perform :type tag: string :param tag: Optional. The tag for this task, allows leasing tasks with a specific tag :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound`
[ "Insert", "task", "in", "task", "queue", "." ]
b147b57f7c0ad9e8030ee9797d6526a448aa5007
https://github.com/basvandenbroek/gcloud_taskqueue/blob/b147b57f7c0ad9e8030ee9797d6526a448aa5007/gcloud_taskqueue/taskqueue.py#L279-L310
243,315
pydsigner/pygu
pygu/pygw.py
Base.add_to
def add_to(self, container): ''' Add the class to @container. ''' if self.container: self.remove_from(self.container) container.add(self)
python
def add_to(self, container): ''' Add the class to @container. ''' if self.container: self.remove_from(self.container) container.add(self)
[ "def", "add_to", "(", "self", ",", "container", ")", ":", "if", "self", ".", "container", ":", "self", ".", "remove_from", "(", "self", ".", "container", ")", "container", ".", "add", "(", "self", ")" ]
Add the class to @container.
[ "Add", "the", "class", "to" ]
09fe71534900933908ab83db12f5659b7827e31c
https://github.com/pydsigner/pygu/blob/09fe71534900933908ab83db12f5659b7827e31c/pygu/pygw.py#L78-L84
243,316
pydsigner/pygu
pygu/pygw.py
Container.draw
def draw(self, surf): ''' Draw all widgets and sub-containers to @surf. ''' if self.shown: for w in self.widgets: surf.blit(w.image, self.convert_rect(w.rect)) for c in self.containers: c.draw(surf)
python
def draw(self, surf): ''' Draw all widgets and sub-containers to @surf. ''' if self.shown: for w in self.widgets: surf.blit(w.image, self.convert_rect(w.rect)) for c in self.containers: c.draw(surf)
[ "def", "draw", "(", "self", ",", "surf", ")", ":", "if", "self", ".", "shown", ":", "for", "w", "in", "self", ".", "widgets", ":", "surf", ".", "blit", "(", "w", ".", "image", ",", "self", ".", "convert_rect", "(", "w", ".", "rect", ")", ")", "for", "c", "in", "self", ".", "containers", ":", "c", ".", "draw", "(", "surf", ")" ]
Draw all widgets and sub-containers to @surf.
[ "Draw", "all", "widgets", "and", "sub", "-", "containers", "to" ]
09fe71534900933908ab83db12f5659b7827e31c
https://github.com/pydsigner/pygu/blob/09fe71534900933908ab83db12f5659b7827e31c/pygu/pygw.py#L153-L161
243,317
pydsigner/pygu
pygu/pygw.py
Container.kill
def kill(self): ''' Remove the class from its container, contained items and sub-widgets. Runs automatically when the class is garbage collected. ''' Base.kill(self) for c in self.containers: c.remove_internal(self) for w in self.widgets: w.remove_internal(self)
python
def kill(self): ''' Remove the class from its container, contained items and sub-widgets. Runs automatically when the class is garbage collected. ''' Base.kill(self) for c in self.containers: c.remove_internal(self) for w in self.widgets: w.remove_internal(self)
[ "def", "kill", "(", "self", ")", ":", "Base", ".", "kill", "(", "self", ")", "for", "c", "in", "self", ".", "containers", ":", "c", ".", "remove_internal", "(", "self", ")", "for", "w", "in", "self", ".", "widgets", ":", "w", ".", "remove_internal", "(", "self", ")" ]
Remove the class from its container, contained items and sub-widgets. Runs automatically when the class is garbage collected.
[ "Remove", "the", "class", "from", "its", "container", "contained", "items", "and", "sub", "-", "widgets", ".", "Runs", "automatically", "when", "the", "class", "is", "garbage", "collected", "." ]
09fe71534900933908ab83db12f5659b7827e31c
https://github.com/pydsigner/pygu/blob/09fe71534900933908ab83db12f5659b7827e31c/pygu/pygw.py#L163-L173
243,318
pydsigner/pygu
pygu/pygw.py
Typable.bspace
def bspace(self): ''' Remove the character before the cursor. ''' try: self.text.pop(self.cursor_loc - 1) self.cursor_loc -= 1 except IndexError: pass
python
def bspace(self): ''' Remove the character before the cursor. ''' try: self.text.pop(self.cursor_loc - 1) self.cursor_loc -= 1 except IndexError: pass
[ "def", "bspace", "(", "self", ")", ":", "try", ":", "self", ".", "text", ".", "pop", "(", "self", ".", "cursor_loc", "-", "1", ")", "self", ".", "cursor_loc", "-=", "1", "except", "IndexError", ":", "pass" ]
Remove the character before the cursor.
[ "Remove", "the", "character", "before", "the", "cursor", "." ]
09fe71534900933908ab83db12f5659b7827e31c
https://github.com/pydsigner/pygu/blob/09fe71534900933908ab83db12f5659b7827e31c/pygu/pygw.py#L446-L454
243,319
pydsigner/pygu
pygu/pygw.py
Entry.paste
def paste(self): ''' Insert text from the clipboard at the cursor. ''' try: t = pygame.scrap.get(SCRAP_TEXT) if t: self.insert(t) return True except: # pygame.scrap is experimental, allow for changes return False
python
def paste(self): ''' Insert text from the clipboard at the cursor. ''' try: t = pygame.scrap.get(SCRAP_TEXT) if t: self.insert(t) return True except: # pygame.scrap is experimental, allow for changes return False
[ "def", "paste", "(", "self", ")", ":", "try", ":", "t", "=", "pygame", ".", "scrap", ".", "get", "(", "SCRAP_TEXT", ")", "if", "t", ":", "self", ".", "insert", "(", "t", ")", "return", "True", "except", ":", "# pygame.scrap is experimental, allow for changes", "return", "False" ]
Insert text from the clipboard at the cursor.
[ "Insert", "text", "from", "the", "clipboard", "at", "the", "cursor", "." ]
09fe71534900933908ab83db12f5659b7827e31c
https://github.com/pydsigner/pygu/blob/09fe71534900933908ab83db12f5659b7827e31c/pygu/pygw.py#L568-L579
243,320
slarse/clanimtk
clanimtk/cli.py
animate_cli
def animate_cli(animation_, step, event): """Print out the animation cycle to stdout. This function is for use with synchronous functions and must be run in a thread. Args: animation_ (generator): A generator that produces strings for the animation. Should be endless. step (float): Seconds between each animation frame. """ while True: # run at least once, important for tests! time.sleep(step) frame = next(animation_) sys.stdout.write(frame) sys.stdout.flush() if event.is_set(): break sys.stdout.write(animation_.get_erase_frame()) sys.stdout.flush() animation_.reset()
python
def animate_cli(animation_, step, event): """Print out the animation cycle to stdout. This function is for use with synchronous functions and must be run in a thread. Args: animation_ (generator): A generator that produces strings for the animation. Should be endless. step (float): Seconds between each animation frame. """ while True: # run at least once, important for tests! time.sleep(step) frame = next(animation_) sys.stdout.write(frame) sys.stdout.flush() if event.is_set(): break sys.stdout.write(animation_.get_erase_frame()) sys.stdout.flush() animation_.reset()
[ "def", "animate_cli", "(", "animation_", ",", "step", ",", "event", ")", ":", "while", "True", ":", "# run at least once, important for tests!", "time", ".", "sleep", "(", "step", ")", "frame", "=", "next", "(", "animation_", ")", "sys", ".", "stdout", ".", "write", "(", "frame", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "if", "event", ".", "is_set", "(", ")", ":", "break", "sys", ".", "stdout", ".", "write", "(", "animation_", ".", "get_erase_frame", "(", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "animation_", ".", "reset", "(", ")" ]
Print out the animation cycle to stdout. This function is for use with synchronous functions and must be run in a thread. Args: animation_ (generator): A generator that produces strings for the animation. Should be endless. step (float): Seconds between each animation frame.
[ "Print", "out", "the", "animation", "cycle", "to", "stdout", ".", "This", "function", "is", "for", "use", "with", "synchronous", "functions", "and", "must", "be", "run", "in", "a", "thread", "." ]
cb93d2e914c3ecc4e0007745ff4d546318cf3902
https://github.com/slarse/clanimtk/blob/cb93d2e914c3ecc4e0007745ff4d546318cf3902/clanimtk/cli.py#L24-L42
243,321
CS207-Final-Project-Group-10/cs207-FinalProject
solar_system/earth_sun.py
energy_ES
def energy_ES(q, v): """Compute the kinetic and potential energy of the earth sun system""" # Body 0 is the sun, Body 1 is the earth m0 = mass[0] m1 = mass[1] # Positions of sun and earth q0: np.ndarray = q[:, slices[0]] q1: np.ndarray = q[:, slices[1]] # Velocities of sun and earth v0: np.ndarray = v[:, slices[0]] v1: np.ndarray = v[:, slices[1]] # Kinetic energy is 1/2 mv^2 T0: np.ndarray = 0.5 * m0 * np.sum(v0 * v0, axis=1) T1: np.ndarray = 0.5 * m1 * np.sum(v1 * v1, axis=1) T: np.ndarray = T0 + T1 # Potential energy is -G m1 m2 / r dv_01 = q1 - q0 r_01 = np.linalg.norm(dv_01, axis=1) U_01: np.ndarray = -G * m0 * m1 / r_01 U: np.ndarray = U_01 # Total energy H = T + U H = T + U return H, T, U
python
def energy_ES(q, v): """Compute the kinetic and potential energy of the earth sun system""" # Body 0 is the sun, Body 1 is the earth m0 = mass[0] m1 = mass[1] # Positions of sun and earth q0: np.ndarray = q[:, slices[0]] q1: np.ndarray = q[:, slices[1]] # Velocities of sun and earth v0: np.ndarray = v[:, slices[0]] v1: np.ndarray = v[:, slices[1]] # Kinetic energy is 1/2 mv^2 T0: np.ndarray = 0.5 * m0 * np.sum(v0 * v0, axis=1) T1: np.ndarray = 0.5 * m1 * np.sum(v1 * v1, axis=1) T: np.ndarray = T0 + T1 # Potential energy is -G m1 m2 / r dv_01 = q1 - q0 r_01 = np.linalg.norm(dv_01, axis=1) U_01: np.ndarray = -G * m0 * m1 / r_01 U: np.ndarray = U_01 # Total energy H = T + U H = T + U return H, T, U
[ "def", "energy_ES", "(", "q", ",", "v", ")", ":", "# Body 0 is the sun, Body 1 is the earth", "m0", "=", "mass", "[", "0", "]", "m1", "=", "mass", "[", "1", "]", "# Positions of sun and earth", "q0", ":", "np", ".", "ndarray", "=", "q", "[", ":", ",", "slices", "[", "0", "]", "]", "q1", ":", "np", ".", "ndarray", "=", "q", "[", ":", ",", "slices", "[", "1", "]", "]", "# Velocities of sun and earth", "v0", ":", "np", ".", "ndarray", "=", "v", "[", ":", ",", "slices", "[", "0", "]", "]", "v1", ":", "np", ".", "ndarray", "=", "v", "[", ":", ",", "slices", "[", "1", "]", "]", "# Kinetic energy is 1/2 mv^2", "T0", ":", "np", ".", "ndarray", "=", "0.5", "*", "m0", "*", "np", ".", "sum", "(", "v0", "*", "v0", ",", "axis", "=", "1", ")", "T1", ":", "np", ".", "ndarray", "=", "0.5", "*", "m1", "*", "np", ".", "sum", "(", "v1", "*", "v1", ",", "axis", "=", "1", ")", "T", ":", "np", ".", "ndarray", "=", "T0", "+", "T1", "# Potential energy is -G m1 m2 / r", "dv_01", "=", "q1", "-", "q0", "r_01", "=", "np", ".", "linalg", ".", "norm", "(", "dv_01", ",", "axis", "=", "1", ")", "U_01", ":", "np", ".", "ndarray", "=", "-", "G", "*", "m0", "*", "m1", "/", "r_01", "U", ":", "np", ".", "ndarray", "=", "U_01", "# Total energy H = T + U", "H", "=", "T", "+", "U", "return", "H", ",", "T", ",", "U" ]
Compute the kinetic and potential energy of the earth sun system
[ "Compute", "the", "kinetic", "and", "potential", "energy", "of", "the", "earth", "sun", "system" ]
842e9c2d3ca1490cef18c086dfde81856d8d3a82
https://github.com/CS207-Final-Project-Group-10/cs207-FinalProject/blob/842e9c2d3ca1490cef18c086dfde81856d8d3a82/solar_system/earth_sun.py#L174-L202
243,322
CS207-Final-Project-Group-10/cs207-FinalProject
solar_system/earth_sun.py
make_force_ES
def make_force_ES(q_vars, mass): """Fluxion with the potential energy of the earth-sun sytem""" # Build the potential energy fluxion; just one pair of bodies U = U_ij(q_vars, mass, 0, 1) # Varname arrays for both the coordinate system and U vn_q = np.array([q.var_name for q in q_vars]) vn_fl = np.array(sorted(U.var_names)) # Permutation array for putting variables in q in the order expected by U (alphabetical) q2fl = np.array([np.argmax((vn_q == v)) for v in vn_fl]) # Permutation array for putting results of U.diff() in order of q_vars fl2q = np.array([np.argmax((vn_fl == v)) for v in vn_q]) # Return a force function from this potential force_func = lambda q: -U.diff(q[q2fl]).squeeze()[fl2q] return force_func
python
def make_force_ES(q_vars, mass): """Fluxion with the potential energy of the earth-sun sytem""" # Build the potential energy fluxion; just one pair of bodies U = U_ij(q_vars, mass, 0, 1) # Varname arrays for both the coordinate system and U vn_q = np.array([q.var_name for q in q_vars]) vn_fl = np.array(sorted(U.var_names)) # Permutation array for putting variables in q in the order expected by U (alphabetical) q2fl = np.array([np.argmax((vn_q == v)) for v in vn_fl]) # Permutation array for putting results of U.diff() in order of q_vars fl2q = np.array([np.argmax((vn_fl == v)) for v in vn_q]) # Return a force function from this potential force_func = lambda q: -U.diff(q[q2fl]).squeeze()[fl2q] return force_func
[ "def", "make_force_ES", "(", "q_vars", ",", "mass", ")", ":", "# Build the potential energy fluxion; just one pair of bodies", "U", "=", "U_ij", "(", "q_vars", ",", "mass", ",", "0", ",", "1", ")", "# Varname arrays for both the coordinate system and U", "vn_q", "=", "np", ".", "array", "(", "[", "q", ".", "var_name", "for", "q", "in", "q_vars", "]", ")", "vn_fl", "=", "np", ".", "array", "(", "sorted", "(", "U", ".", "var_names", ")", ")", "# Permutation array for putting variables in q in the order expected by U (alphabetical)", "q2fl", "=", "np", ".", "array", "(", "[", "np", ".", "argmax", "(", "(", "vn_q", "==", "v", ")", ")", "for", "v", "in", "vn_fl", "]", ")", "# Permutation array for putting results of U.diff() in order of q_vars", "fl2q", "=", "np", ".", "array", "(", "[", "np", ".", "argmax", "(", "(", "vn_fl", "==", "v", ")", ")", "for", "v", "in", "vn_q", "]", ")", "# Return a force function from this potential", "force_func", "=", "lambda", "q", ":", "-", "U", ".", "diff", "(", "q", "[", "q2fl", "]", ")", ".", "squeeze", "(", ")", "[", "fl2q", "]", "return", "force_func" ]
Fluxion with the potential energy of the earth-sun sytem
[ "Fluxion", "with", "the", "potential", "energy", "of", "the", "earth", "-", "sun", "sytem" ]
842e9c2d3ca1490cef18c086dfde81856d8d3a82
https://github.com/CS207-Final-Project-Group-10/cs207-FinalProject/blob/842e9c2d3ca1490cef18c086dfde81856d8d3a82/solar_system/earth_sun.py#L215-L228
243,323
delfick/aws_syncr
aws_syncr/option_spec/aws_syncr_specs.py
valid_account_id.validate
def validate(self, meta, val): """Validate an account_id""" val = string_or_int_as_string_spec().normalise(meta, val) if not regexes['amazon_account_id'].match(val): raise BadOption("Account id must match a particular regex", got=val, should_match=regexes['amazon_account_id'].pattern) return val
python
def validate(self, meta, val): """Validate an account_id""" val = string_or_int_as_string_spec().normalise(meta, val) if not regexes['amazon_account_id'].match(val): raise BadOption("Account id must match a particular regex", got=val, should_match=regexes['amazon_account_id'].pattern) return val
[ "def", "validate", "(", "self", ",", "meta", ",", "val", ")", ":", "val", "=", "string_or_int_as_string_spec", "(", ")", ".", "normalise", "(", "meta", ",", "val", ")", "if", "not", "regexes", "[", "'amazon_account_id'", "]", ".", "match", "(", "val", ")", ":", "raise", "BadOption", "(", "\"Account id must match a particular regex\"", ",", "got", "=", "val", ",", "should_match", "=", "regexes", "[", "'amazon_account_id'", "]", ".", "pattern", ")", "return", "val" ]
Validate an account_id
[ "Validate", "an", "account_id" ]
8cd214b27c1eee98dfba4632cbb8bc0ae36356bd
https://github.com/delfick/aws_syncr/blob/8cd214b27c1eee98dfba4632cbb8bc0ae36356bd/aws_syncr/option_spec/aws_syncr_specs.py#L40-L45
243,324
delfick/aws_syncr
aws_syncr/option_spec/aws_syncr_specs.py
AwsSyncrSpec.aws_syncr_spec
def aws_syncr_spec(self): """Spec for aws_syncr options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=string_types) return create_spec(AwsSyncr , extra = defaulted(formatted_string, "") , stage = defaulted(formatted_string, "") , debug = defaulted(boolean(), False) , dry_run = defaulted(boolean(), False) , location = defaulted(formatted_string, "ap-southeast-2") , artifact = formatted_string , environment = formatted_string , config_folder = directory_spec() )
python
def aws_syncr_spec(self): """Spec for aws_syncr options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=string_types) return create_spec(AwsSyncr , extra = defaulted(formatted_string, "") , stage = defaulted(formatted_string, "") , debug = defaulted(boolean(), False) , dry_run = defaulted(boolean(), False) , location = defaulted(formatted_string, "ap-southeast-2") , artifact = formatted_string , environment = formatted_string , config_folder = directory_spec() )
[ "def", "aws_syncr_spec", "(", "self", ")", ":", "formatted_string", "=", "formatted", "(", "string_spec", "(", ")", ",", "MergedOptionStringFormatter", ",", "expected_type", "=", "string_types", ")", "return", "create_spec", "(", "AwsSyncr", ",", "extra", "=", "defaulted", "(", "formatted_string", ",", "\"\"", ")", ",", "stage", "=", "defaulted", "(", "formatted_string", ",", "\"\"", ")", ",", "debug", "=", "defaulted", "(", "boolean", "(", ")", ",", "False", ")", ",", "dry_run", "=", "defaulted", "(", "boolean", "(", ")", ",", "False", ")", ",", "location", "=", "defaulted", "(", "formatted_string", ",", "\"ap-southeast-2\"", ")", ",", "artifact", "=", "formatted_string", ",", "environment", "=", "formatted_string", ",", "config_folder", "=", "directory_spec", "(", ")", ")" ]
Spec for aws_syncr options
[ "Spec", "for", "aws_syncr", "options" ]
8cd214b27c1eee98dfba4632cbb8bc0ae36356bd
https://github.com/delfick/aws_syncr/blob/8cd214b27c1eee98dfba4632cbb8bc0ae36356bd/aws_syncr/option_spec/aws_syncr_specs.py#L51-L63
243,325
delfick/aws_syncr
aws_syncr/option_spec/aws_syncr_specs.py
AwsSyncrSpec.accounts_spec
def accounts_spec(self): """Spec for accounts options""" formatted_account_id = formatted(valid_account_id(), MergedOptionStringFormatter, expected_type=string_types) return dictof(string_spec(), formatted_account_id)
python
def accounts_spec(self): """Spec for accounts options""" formatted_account_id = formatted(valid_account_id(), MergedOptionStringFormatter, expected_type=string_types) return dictof(string_spec(), formatted_account_id)
[ "def", "accounts_spec", "(", "self", ")", ":", "formatted_account_id", "=", "formatted", "(", "valid_account_id", "(", ")", ",", "MergedOptionStringFormatter", ",", "expected_type", "=", "string_types", ")", "return", "dictof", "(", "string_spec", "(", ")", ",", "formatted_account_id", ")" ]
Spec for accounts options
[ "Spec", "for", "accounts", "options" ]
8cd214b27c1eee98dfba4632cbb8bc0ae36356bd
https://github.com/delfick/aws_syncr/blob/8cd214b27c1eee98dfba4632cbb8bc0ae36356bd/aws_syncr/option_spec/aws_syncr_specs.py#L66-L69
243,326
sassoo/goldman
goldman/resources/s3_model.py
Resource._gen_s3_path
def _gen_s3_path(self, model, props): """ Return the part of the S3 path based on inputs The path will be passed to the s3_upload method & will ultimately be merged with the standard AWS S3 URL. An example model type of 'users' with a resource ID of 99 & an API endpoint ending with 'photos' will have a path generated in the following way: users/99/photos/<timestamp>.<extension> The timestamp is a high precision timestamp & the extension is typically 3 characters & derived in the form-data deserializer. """ now = '%.5f' % time.time() return '%s/%s/%s/%s.%s' % (model.rtype, model.rid_value, self._s3_rtype, now, props['file-ext'])
python
def _gen_s3_path(self, model, props): """ Return the part of the S3 path based on inputs The path will be passed to the s3_upload method & will ultimately be merged with the standard AWS S3 URL. An example model type of 'users' with a resource ID of 99 & an API endpoint ending with 'photos' will have a path generated in the following way: users/99/photos/<timestamp>.<extension> The timestamp is a high precision timestamp & the extension is typically 3 characters & derived in the form-data deserializer. """ now = '%.5f' % time.time() return '%s/%s/%s/%s.%s' % (model.rtype, model.rid_value, self._s3_rtype, now, props['file-ext'])
[ "def", "_gen_s3_path", "(", "self", ",", "model", ",", "props", ")", ":", "now", "=", "'%.5f'", "%", "time", ".", "time", "(", ")", "return", "'%s/%s/%s/%s.%s'", "%", "(", "model", ".", "rtype", ",", "model", ".", "rid_value", ",", "self", ".", "_s3_rtype", ",", "now", ",", "props", "[", "'file-ext'", "]", ")" ]
Return the part of the S3 path based on inputs The path will be passed to the s3_upload method & will ultimately be merged with the standard AWS S3 URL. An example model type of 'users' with a resource ID of 99 & an API endpoint ending with 'photos' will have a path generated in the following way: users/99/photos/<timestamp>.<extension> The timestamp is a high precision timestamp & the extension is typically 3 characters & derived in the form-data deserializer.
[ "Return", "the", "part", "of", "the", "S3", "path", "based", "on", "inputs" ]
b72540c9ad06b5c68aadb1b4fa8cb0b716260bf2
https://github.com/sassoo/goldman/blob/b72540c9ad06b5c68aadb1b4fa8cb0b716260bf2/goldman/resources/s3_model.py#L61-L82
243,327
sassoo/goldman
goldman/resources/s3_model.py
Resource.on_post
def on_post(self, req, resp, rid): """ Deserialize the file upload & save it to S3 File uploads are associated with a model of some kind. Ensure the associating model exists first & foremost. """ signals.pre_req.send(self.model) signals.pre_req_upload.send(self.model) props = req.deserialize(self.mimetypes) model = find(self.model, rid) signals.pre_upload.send(self.model, model=model) try: conn = s3_connect(self.key, self.secret) path = self._gen_s3_path(model, props) s3_url = s3_upload(self.acl, self.bucket, conn, props['content'], props['content-type'], path) except IOError: abort(ServiceUnavailable(**{ 'detail': 'The upload attempt failed unexpectedly', })) else: signals.post_upload.send(self.model, model=model, url=s3_url) resp.location = s3_url resp.status = falcon.HTTP_201 resp.serialize({'data': {'url': s3_url}}) signals.post_req.send(self.model) signals.post_req_upload.send(self.model)
python
def on_post(self, req, resp, rid): """ Deserialize the file upload & save it to S3 File uploads are associated with a model of some kind. Ensure the associating model exists first & foremost. """ signals.pre_req.send(self.model) signals.pre_req_upload.send(self.model) props = req.deserialize(self.mimetypes) model = find(self.model, rid) signals.pre_upload.send(self.model, model=model) try: conn = s3_connect(self.key, self.secret) path = self._gen_s3_path(model, props) s3_url = s3_upload(self.acl, self.bucket, conn, props['content'], props['content-type'], path) except IOError: abort(ServiceUnavailable(**{ 'detail': 'The upload attempt failed unexpectedly', })) else: signals.post_upload.send(self.model, model=model, url=s3_url) resp.location = s3_url resp.status = falcon.HTTP_201 resp.serialize({'data': {'url': s3_url}}) signals.post_req.send(self.model) signals.post_req_upload.send(self.model)
[ "def", "on_post", "(", "self", ",", "req", ",", "resp", ",", "rid", ")", ":", "signals", ".", "pre_req", ".", "send", "(", "self", ".", "model", ")", "signals", ".", "pre_req_upload", ".", "send", "(", "self", ".", "model", ")", "props", "=", "req", ".", "deserialize", "(", "self", ".", "mimetypes", ")", "model", "=", "find", "(", "self", ".", "model", ",", "rid", ")", "signals", ".", "pre_upload", ".", "send", "(", "self", ".", "model", ",", "model", "=", "model", ")", "try", ":", "conn", "=", "s3_connect", "(", "self", ".", "key", ",", "self", ".", "secret", ")", "path", "=", "self", ".", "_gen_s3_path", "(", "model", ",", "props", ")", "s3_url", "=", "s3_upload", "(", "self", ".", "acl", ",", "self", ".", "bucket", ",", "conn", ",", "props", "[", "'content'", "]", ",", "props", "[", "'content-type'", "]", ",", "path", ")", "except", "IOError", ":", "abort", "(", "ServiceUnavailable", "(", "*", "*", "{", "'detail'", ":", "'The upload attempt failed unexpectedly'", ",", "}", ")", ")", "else", ":", "signals", ".", "post_upload", ".", "send", "(", "self", ".", "model", ",", "model", "=", "model", ",", "url", "=", "s3_url", ")", "resp", ".", "location", "=", "s3_url", "resp", ".", "status", "=", "falcon", ".", "HTTP_201", "resp", ".", "serialize", "(", "{", "'data'", ":", "{", "'url'", ":", "s3_url", "}", "}", ")", "signals", ".", "post_req", ".", "send", "(", "self", ".", "model", ")", "signals", ".", "post_req_upload", ".", "send", "(", "self", ".", "model", ")" ]
Deserialize the file upload & save it to S3 File uploads are associated with a model of some kind. Ensure the associating model exists first & foremost.
[ "Deserialize", "the", "file", "upload", "&", "save", "it", "to", "S3" ]
b72540c9ad06b5c68aadb1b4fa8cb0b716260bf2
https://github.com/sassoo/goldman/blob/b72540c9ad06b5c68aadb1b4fa8cb0b716260bf2/goldman/resources/s3_model.py#L84-L118
243,328
astex/sequential
sequential/decorators.py
before
def before(f, chain=False): """Runs f before the decorated function.""" def decorator(g): @wraps(g) def h(*args, **kargs): if chain: return g(f(*args, **kargs)) else: f(*args, **kargs) return g(*args, **kargs) return h return decorator
python
def before(f, chain=False): """Runs f before the decorated function.""" def decorator(g): @wraps(g) def h(*args, **kargs): if chain: return g(f(*args, **kargs)) else: f(*args, **kargs) return g(*args, **kargs) return h return decorator
[ "def", "before", "(", "f", ",", "chain", "=", "False", ")", ":", "def", "decorator", "(", "g", ")", ":", "@", "wraps", "(", "g", ")", "def", "h", "(", "*", "args", ",", "*", "*", "kargs", ")", ":", "if", "chain", ":", "return", "g", "(", "f", "(", "*", "args", ",", "*", "*", "kargs", ")", ")", "else", ":", "f", "(", "*", "args", ",", "*", "*", "kargs", ")", "return", "g", "(", "*", "args", ",", "*", "*", "kargs", ")", "return", "h", "return", "decorator" ]
Runs f before the decorated function.
[ "Runs", "f", "before", "the", "decorated", "function", "." ]
8812d487c33a8f0f1c96336cd27ad2fa942175f6
https://github.com/astex/sequential/blob/8812d487c33a8f0f1c96336cd27ad2fa942175f6/sequential/decorators.py#L5-L16
243,329
astex/sequential
sequential/decorators.py
after
def after(f, chain=False): """Runs f with the result of the decorated function.""" def decorator(g): @wraps(g) def h(*args, **kargs): if chain: return f(g(*args, **kargs)) else: r = g(*args, **kargs) f(*args, **kargs) return r return h return decorator
python
def after(f, chain=False): """Runs f with the result of the decorated function.""" def decorator(g): @wraps(g) def h(*args, **kargs): if chain: return f(g(*args, **kargs)) else: r = g(*args, **kargs) f(*args, **kargs) return r return h return decorator
[ "def", "after", "(", "f", ",", "chain", "=", "False", ")", ":", "def", "decorator", "(", "g", ")", ":", "@", "wraps", "(", "g", ")", "def", "h", "(", "*", "args", ",", "*", "*", "kargs", ")", ":", "if", "chain", ":", "return", "f", "(", "g", "(", "*", "args", ",", "*", "*", "kargs", ")", ")", "else", ":", "r", "=", "g", "(", "*", "args", ",", "*", "*", "kargs", ")", "f", "(", "*", "args", ",", "*", "*", "kargs", ")", "return", "r", "return", "h", "return", "decorator" ]
Runs f with the result of the decorated function.
[ "Runs", "f", "with", "the", "result", "of", "the", "decorated", "function", "." ]
8812d487c33a8f0f1c96336cd27ad2fa942175f6
https://github.com/astex/sequential/blob/8812d487c33a8f0f1c96336cd27ad2fa942175f6/sequential/decorators.py#L19-L31
243,330
astex/sequential
sequential/decorators.py
during
def during(f): """Runs f during the decorated function's execution in a separate thread.""" def decorator(g): @wraps(g) def h(*args, **kargs): tf = Thread(target=f, args=args, kwargs=kargs) tf.start() r = g(*args, **kargs) tf.join() return r return h return decorator
python
def during(f): """Runs f during the decorated function's execution in a separate thread.""" def decorator(g): @wraps(g) def h(*args, **kargs): tf = Thread(target=f, args=args, kwargs=kargs) tf.start() r = g(*args, **kargs) tf.join() return r return h return decorator
[ "def", "during", "(", "f", ")", ":", "def", "decorator", "(", "g", ")", ":", "@", "wraps", "(", "g", ")", "def", "h", "(", "*", "args", ",", "*", "*", "kargs", ")", ":", "tf", "=", "Thread", "(", "target", "=", "f", ",", "args", "=", "args", ",", "kwargs", "=", "kargs", ")", "tf", ".", "start", "(", ")", "r", "=", "g", "(", "*", "args", ",", "*", "*", "kargs", ")", "tf", ".", "join", "(", ")", "return", "r", "return", "h", "return", "decorator" ]
Runs f during the decorated function's execution in a separate thread.
[ "Runs", "f", "during", "the", "decorated", "function", "s", "execution", "in", "a", "separate", "thread", "." ]
8812d487c33a8f0f1c96336cd27ad2fa942175f6
https://github.com/astex/sequential/blob/8812d487c33a8f0f1c96336cd27ad2fa942175f6/sequential/decorators.py#L34-L45
243,331
gsamokovarov/frames.py
frames/__init__.py
Frame.current_frame
def current_frame(raw=False): ''' Gives the current execution frame. :returns: The current execution frame that is actually executing this. ''' # `import sys` is important here, because the `sys` module is special # and we will end up with the class frame instead of the `current` one. if NATIVE: import sys frame = sys._getframe() else: frame = _getframe() frame = frame.f_back if not raw: frame = Frame(frame) return frame
python
def current_frame(raw=False): ''' Gives the current execution frame. :returns: The current execution frame that is actually executing this. ''' # `import sys` is important here, because the `sys` module is special # and we will end up with the class frame instead of the `current` one. if NATIVE: import sys frame = sys._getframe() else: frame = _getframe() frame = frame.f_back if not raw: frame = Frame(frame) return frame
[ "def", "current_frame", "(", "raw", "=", "False", ")", ":", "# `import sys` is important here, because the `sys` module is special", "# and we will end up with the class frame instead of the `current` one.", "if", "NATIVE", ":", "import", "sys", "frame", "=", "sys", ".", "_getframe", "(", ")", "else", ":", "frame", "=", "_getframe", "(", ")", "frame", "=", "frame", ".", "f_back", "if", "not", "raw", ":", "frame", "=", "Frame", "(", "frame", ")", "return", "frame" ]
Gives the current execution frame. :returns: The current execution frame that is actually executing this.
[ "Gives", "the", "current", "execution", "frame", "." ]
ba43782d043691fb5a388a1e749e0f0edb68a3d7
https://github.com/gsamokovarov/frames.py/blob/ba43782d043691fb5a388a1e749e0f0edb68a3d7/frames/__init__.py#L47-L70
243,332
gsamokovarov/frames.py
frames/__init__.py
Frame.locate
def locate(callback, root_frame=None, include_root=False, raw=False): ''' Locates a frame by criteria. :param callback: One argument function to check the frame against. The frame we are curretly on, is given as that argument. :param root_frame: The root frame to start the search from. Can be a callback taking no arguments. :param include_root: `True` if the search should start from the `root_frame` or the one beneath it. Defaults to `False`. :param raw: whether to use raw frames or wrap them in our own object. Defaults to `False`. :raises RuntimeError: When no matching frame is found. :returns: The first frame which responds to the `callback`. ''' def get_from(maybe_callable): if callable(maybe_callable): return maybe_callable() return maybe_callable # Creates new frames, whether raw or not. new = lambda frame: frame if raw else Frame(frame) current_frame = get_from(root_frame or Frame.current_frame(raw=True)) current_frame = new(current_frame) if not include_root: current_frame = new(current_frame.f_back) # The search will stop, because at some point the frame will be falsy. while current_frame: found = callback(current_frame) if found: return current_frame current_frame = new(current_frame.f_back) raise Frame.NotFound('No matching frame found')
python
def locate(callback, root_frame=None, include_root=False, raw=False): ''' Locates a frame by criteria. :param callback: One argument function to check the frame against. The frame we are curretly on, is given as that argument. :param root_frame: The root frame to start the search from. Can be a callback taking no arguments. :param include_root: `True` if the search should start from the `root_frame` or the one beneath it. Defaults to `False`. :param raw: whether to use raw frames or wrap them in our own object. Defaults to `False`. :raises RuntimeError: When no matching frame is found. :returns: The first frame which responds to the `callback`. ''' def get_from(maybe_callable): if callable(maybe_callable): return maybe_callable() return maybe_callable # Creates new frames, whether raw or not. new = lambda frame: frame if raw else Frame(frame) current_frame = get_from(root_frame or Frame.current_frame(raw=True)) current_frame = new(current_frame) if not include_root: current_frame = new(current_frame.f_back) # The search will stop, because at some point the frame will be falsy. while current_frame: found = callback(current_frame) if found: return current_frame current_frame = new(current_frame.f_back) raise Frame.NotFound('No matching frame found')
[ "def", "locate", "(", "callback", ",", "root_frame", "=", "None", ",", "include_root", "=", "False", ",", "raw", "=", "False", ")", ":", "def", "get_from", "(", "maybe_callable", ")", ":", "if", "callable", "(", "maybe_callable", ")", ":", "return", "maybe_callable", "(", ")", "return", "maybe_callable", "# Creates new frames, whether raw or not.", "new", "=", "lambda", "frame", ":", "frame", "if", "raw", "else", "Frame", "(", "frame", ")", "current_frame", "=", "get_from", "(", "root_frame", "or", "Frame", ".", "current_frame", "(", "raw", "=", "True", ")", ")", "current_frame", "=", "new", "(", "current_frame", ")", "if", "not", "include_root", ":", "current_frame", "=", "new", "(", "current_frame", ".", "f_back", ")", "# The search will stop, because at some point the frame will be falsy.", "while", "current_frame", ":", "found", "=", "callback", "(", "current_frame", ")", "if", "found", ":", "return", "current_frame", "current_frame", "=", "new", "(", "current_frame", ".", "f_back", ")", "raise", "Frame", ".", "NotFound", "(", "'No matching frame found'", ")" ]
Locates a frame by criteria. :param callback: One argument function to check the frame against. The frame we are curretly on, is given as that argument. :param root_frame: The root frame to start the search from. Can be a callback taking no arguments. :param include_root: `True` if the search should start from the `root_frame` or the one beneath it. Defaults to `False`. :param raw: whether to use raw frames or wrap them in our own object. Defaults to `False`. :raises RuntimeError: When no matching frame is found. :returns: The first frame which responds to the `callback`.
[ "Locates", "a", "frame", "by", "criteria", "." ]
ba43782d043691fb5a388a1e749e0f0edb68a3d7
https://github.com/gsamokovarov/frames.py/blob/ba43782d043691fb5a388a1e749e0f0edb68a3d7/frames/__init__.py#L73-L119
243,333
toastdriven/alligator
alligator/tasks.py
Task.to_call
def to_call(self, func, *args, **kwargs): """ Sets the function & its arguments to be called when the task is processed. Ex:: task.to_call(my_function, 1, 'c', another=True) :param func: The callable with business logic to execute :type func: callable :param args: Positional arguments to pass to the callable task :type args: list :param kwargs: Keyword arguments to pass to the callable task :type kwargs: dict """ self.func = func self.func_args = args self.func_kwargs = kwargs
python
def to_call(self, func, *args, **kwargs): """ Sets the function & its arguments to be called when the task is processed. Ex:: task.to_call(my_function, 1, 'c', another=True) :param func: The callable with business logic to execute :type func: callable :param args: Positional arguments to pass to the callable task :type args: list :param kwargs: Keyword arguments to pass to the callable task :type kwargs: dict """ self.func = func self.func_args = args self.func_kwargs = kwargs
[ "def", "to_call", "(", "self", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "func", "=", "func", "self", ".", "func_args", "=", "args", "self", ".", "func_kwargs", "=", "kwargs" ]
Sets the function & its arguments to be called when the task is processed. Ex:: task.to_call(my_function, 1, 'c', another=True) :param func: The callable with business logic to execute :type func: callable :param args: Positional arguments to pass to the callable task :type args: list :param kwargs: Keyword arguments to pass to the callable task :type kwargs: dict
[ "Sets", "the", "function", "&", "its", "arguments", "to", "be", "called", "when", "the", "task", "is", "processed", "." ]
f18bcb35b350fc6b0886393f5246d69c892b36c7
https://github.com/toastdriven/alligator/blob/f18bcb35b350fc6b0886393f5246d69c892b36c7/alligator/tasks.py#L73-L93
243,334
toastdriven/alligator
alligator/tasks.py
Task.serialize
def serialize(self): """ Serializes the ``Task`` data for storing in the queue. All data must be JSON-serializable in order to be stored properly. :returns: A JSON strong of the task data. """ data = { 'task_id': self.task_id, 'retries': self.retries, 'async': self.async, 'module': determine_module(self.func), 'callable': determine_name(self.func), 'args': self.func_args, 'kwargs': self.func_kwargs, 'options': {}, } if self.on_start: data['options']['on_start'] = { 'module': determine_module(self.on_start), 'callable': determine_name(self.on_start), } if self.on_success: data['options']['on_success'] = { 'module': determine_module(self.on_success), 'callable': determine_name(self.on_success), } if self.on_error: data['options']['on_error'] = { 'module': determine_module(self.on_error), 'callable': determine_name(self.on_error), } return json.dumps(data)
python
def serialize(self): """ Serializes the ``Task`` data for storing in the queue. All data must be JSON-serializable in order to be stored properly. :returns: A JSON strong of the task data. """ data = { 'task_id': self.task_id, 'retries': self.retries, 'async': self.async, 'module': determine_module(self.func), 'callable': determine_name(self.func), 'args': self.func_args, 'kwargs': self.func_kwargs, 'options': {}, } if self.on_start: data['options']['on_start'] = { 'module': determine_module(self.on_start), 'callable': determine_name(self.on_start), } if self.on_success: data['options']['on_success'] = { 'module': determine_module(self.on_success), 'callable': determine_name(self.on_success), } if self.on_error: data['options']['on_error'] = { 'module': determine_module(self.on_error), 'callable': determine_name(self.on_error), } return json.dumps(data)
[ "def", "serialize", "(", "self", ")", ":", "data", "=", "{", "'task_id'", ":", "self", ".", "task_id", ",", "'retries'", ":", "self", ".", "retries", ",", "'async'", ":", "self", ".", "async", ",", "'module'", ":", "determine_module", "(", "self", ".", "func", ")", ",", "'callable'", ":", "determine_name", "(", "self", ".", "func", ")", ",", "'args'", ":", "self", ".", "func_args", ",", "'kwargs'", ":", "self", ".", "func_kwargs", ",", "'options'", ":", "{", "}", ",", "}", "if", "self", ".", "on_start", ":", "data", "[", "'options'", "]", "[", "'on_start'", "]", "=", "{", "'module'", ":", "determine_module", "(", "self", ".", "on_start", ")", ",", "'callable'", ":", "determine_name", "(", "self", ".", "on_start", ")", ",", "}", "if", "self", ".", "on_success", ":", "data", "[", "'options'", "]", "[", "'on_success'", "]", "=", "{", "'module'", ":", "determine_module", "(", "self", ".", "on_success", ")", ",", "'callable'", ":", "determine_name", "(", "self", ".", "on_success", ")", ",", "}", "if", "self", ".", "on_error", ":", "data", "[", "'options'", "]", "[", "'on_error'", "]", "=", "{", "'module'", ":", "determine_module", "(", "self", ".", "on_error", ")", ",", "'callable'", ":", "determine_name", "(", "self", ".", "on_error", ")", ",", "}", "return", "json", ".", "dumps", "(", "data", ")" ]
Serializes the ``Task`` data for storing in the queue. All data must be JSON-serializable in order to be stored properly. :returns: A JSON strong of the task data.
[ "Serializes", "the", "Task", "data", "for", "storing", "in", "the", "queue", "." ]
f18bcb35b350fc6b0886393f5246d69c892b36c7
https://github.com/toastdriven/alligator/blob/f18bcb35b350fc6b0886393f5246d69c892b36c7/alligator/tasks.py#L140-L177
243,335
toastdriven/alligator
alligator/tasks.py
Task.deserialize
def deserialize(cls, data): """ Given some data from the queue, deserializes it into a ``Task`` instance. The data must be similar in format to what comes from ``Task.serialize`` (a JSON-serialized dictionary). Required keys are ``task_id``, ``retries`` & ``async``. :param data: A JSON-serialized string of the task data :type data: string :returns: A populated task :rtype: A ``Task`` instance """ data = json.loads(data) options = data.get('options', {}) task = cls( task_id=data['task_id'], retries=data['retries'], async=data['async'] ) func = import_attr(data['module'], data['callable']) task.to_call(func, *data.get('args', []), **data.get('kwargs', {})) if options.get('on_start'): task.on_start = import_attr( options['on_start']['module'], options['on_start']['callable'] ) if options.get('on_success'): task.on_success = import_attr( options['on_success']['module'], options['on_success']['callable'] ) if options.get('on_error'): task.on_error = import_attr( options['on_error']['module'], options['on_error']['callable'] ) return task
python
def deserialize(cls, data): """ Given some data from the queue, deserializes it into a ``Task`` instance. The data must be similar in format to what comes from ``Task.serialize`` (a JSON-serialized dictionary). Required keys are ``task_id``, ``retries`` & ``async``. :param data: A JSON-serialized string of the task data :type data: string :returns: A populated task :rtype: A ``Task`` instance """ data = json.loads(data) options = data.get('options', {}) task = cls( task_id=data['task_id'], retries=data['retries'], async=data['async'] ) func = import_attr(data['module'], data['callable']) task.to_call(func, *data.get('args', []), **data.get('kwargs', {})) if options.get('on_start'): task.on_start = import_attr( options['on_start']['module'], options['on_start']['callable'] ) if options.get('on_success'): task.on_success = import_attr( options['on_success']['module'], options['on_success']['callable'] ) if options.get('on_error'): task.on_error = import_attr( options['on_error']['module'], options['on_error']['callable'] ) return task
[ "def", "deserialize", "(", "cls", ",", "data", ")", ":", "data", "=", "json", ".", "loads", "(", "data", ")", "options", "=", "data", ".", "get", "(", "'options'", ",", "{", "}", ")", "task", "=", "cls", "(", "task_id", "=", "data", "[", "'task_id'", "]", ",", "retries", "=", "data", "[", "'retries'", "]", ",", "async", "=", "data", "[", "'async'", "]", ")", "func", "=", "import_attr", "(", "data", "[", "'module'", "]", ",", "data", "[", "'callable'", "]", ")", "task", ".", "to_call", "(", "func", ",", "*", "data", ".", "get", "(", "'args'", ",", "[", "]", ")", ",", "*", "*", "data", ".", "get", "(", "'kwargs'", ",", "{", "}", ")", ")", "if", "options", ".", "get", "(", "'on_start'", ")", ":", "task", ".", "on_start", "=", "import_attr", "(", "options", "[", "'on_start'", "]", "[", "'module'", "]", ",", "options", "[", "'on_start'", "]", "[", "'callable'", "]", ")", "if", "options", ".", "get", "(", "'on_success'", ")", ":", "task", ".", "on_success", "=", "import_attr", "(", "options", "[", "'on_success'", "]", "[", "'module'", "]", ",", "options", "[", "'on_success'", "]", "[", "'callable'", "]", ")", "if", "options", ".", "get", "(", "'on_error'", ")", ":", "task", ".", "on_error", "=", "import_attr", "(", "options", "[", "'on_error'", "]", "[", "'module'", "]", ",", "options", "[", "'on_error'", "]", "[", "'callable'", "]", ")", "return", "task" ]
Given some data from the queue, deserializes it into a ``Task`` instance. The data must be similar in format to what comes from ``Task.serialize`` (a JSON-serialized dictionary). Required keys are ``task_id``, ``retries`` & ``async``. :param data: A JSON-serialized string of the task data :type data: string :returns: A populated task :rtype: A ``Task`` instance
[ "Given", "some", "data", "from", "the", "queue", "deserializes", "it", "into", "a", "Task", "instance", "." ]
f18bcb35b350fc6b0886393f5246d69c892b36c7
https://github.com/toastdriven/alligator/blob/f18bcb35b350fc6b0886393f5246d69c892b36c7/alligator/tasks.py#L180-L225
243,336
toastdriven/alligator
alligator/tasks.py
Task.run
def run(self): """ Runs the task. This fires the ``on_start`` hook function first (if present), passing the task itself. Then it runs the target function supplied via ``Task.to_call`` with its arguments & stores the result. If the target function succeeded, the ``on_success`` hook function is called, passing both the task & the result to it. If the target function failed (threw an exception), the ``on_error`` hook function is called, passing both the task & the exception to it. Then the exception is re-raised. Finally, the result is returned. """ if self.on_start: self.on_start(self) try: result = self.func(*self.func_args, **self.func_kwargs) except Exception as err: self.to_failed() if self.on_error: self.on_error(self, err) raise self.to_success() if self.on_success: self.on_success(self, result) return result
python
def run(self): """ Runs the task. This fires the ``on_start`` hook function first (if present), passing the task itself. Then it runs the target function supplied via ``Task.to_call`` with its arguments & stores the result. If the target function succeeded, the ``on_success`` hook function is called, passing both the task & the result to it. If the target function failed (threw an exception), the ``on_error`` hook function is called, passing both the task & the exception to it. Then the exception is re-raised. Finally, the result is returned. """ if self.on_start: self.on_start(self) try: result = self.func(*self.func_args, **self.func_kwargs) except Exception as err: self.to_failed() if self.on_error: self.on_error(self, err) raise self.to_success() if self.on_success: self.on_success(self, result) return result
[ "def", "run", "(", "self", ")", ":", "if", "self", ".", "on_start", ":", "self", ".", "on_start", "(", "self", ")", "try", ":", "result", "=", "self", ".", "func", "(", "*", "self", ".", "func_args", ",", "*", "*", "self", ".", "func_kwargs", ")", "except", "Exception", "as", "err", ":", "self", ".", "to_failed", "(", ")", "if", "self", ".", "on_error", ":", "self", ".", "on_error", "(", "self", ",", "err", ")", "raise", "self", ".", "to_success", "(", ")", "if", "self", ".", "on_success", ":", "self", ".", "on_success", "(", "self", ",", "result", ")", "return", "result" ]
Runs the task. This fires the ``on_start`` hook function first (if present), passing the task itself. Then it runs the target function supplied via ``Task.to_call`` with its arguments & stores the result. If the target function succeeded, the ``on_success`` hook function is called, passing both the task & the result to it. If the target function failed (threw an exception), the ``on_error`` hook function is called, passing both the task & the exception to it. Then the exception is re-raised. Finally, the result is returned.
[ "Runs", "the", "task", "." ]
f18bcb35b350fc6b0886393f5246d69c892b36c7
https://github.com/toastdriven/alligator/blob/f18bcb35b350fc6b0886393f5246d69c892b36c7/alligator/tasks.py#L227-L264
243,337
rameshg87/pyremotevbox
pyremotevbox/ZSI/TCcompound.py
_check_typecode_list
def _check_typecode_list(ofwhat, tcname): '''Check a list of typecodes for compliance with Struct requirements.''' for o in ofwhat: if callable(o): #skip if _Mirage continue if not isinstance(o, TypeCode): raise TypeError( tcname + ' ofwhat outside the TypeCode hierarchy, ' + str(o.__class__)) if o.pname is None and not isinstance(o, AnyElement): raise TypeError(tcname + ' element ' + str(o) + ' has no name')
python
def _check_typecode_list(ofwhat, tcname): '''Check a list of typecodes for compliance with Struct requirements.''' for o in ofwhat: if callable(o): #skip if _Mirage continue if not isinstance(o, TypeCode): raise TypeError( tcname + ' ofwhat outside the TypeCode hierarchy, ' + str(o.__class__)) if o.pname is None and not isinstance(o, AnyElement): raise TypeError(tcname + ' element ' + str(o) + ' has no name')
[ "def", "_check_typecode_list", "(", "ofwhat", ",", "tcname", ")", ":", "for", "o", "in", "ofwhat", ":", "if", "callable", "(", "o", ")", ":", "#skip if _Mirage", "continue", "if", "not", "isinstance", "(", "o", ",", "TypeCode", ")", ":", "raise", "TypeError", "(", "tcname", "+", "' ofwhat outside the TypeCode hierarchy, '", "+", "str", "(", "o", ".", "__class__", ")", ")", "if", "o", ".", "pname", "is", "None", "and", "not", "isinstance", "(", "o", ",", "AnyElement", ")", ":", "raise", "TypeError", "(", "tcname", "+", "' element '", "+", "str", "(", "o", ")", "+", "' has no name'", ")" ]
Check a list of typecodes for compliance with Struct requirements.
[ "Check", "a", "list", "of", "typecodes", "for", "compliance", "with", "Struct", "requirements", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/TCcompound.py#L30-L41
243,338
rameshg87/pyremotevbox
pyremotevbox/ZSI/TCcompound.py
_get_type_or_substitute
def _get_type_or_substitute(typecode, pyobj, sw, elt): '''return typecode or substitute type for wildcard or derived type. For serialization only. ''' sub = getattr(pyobj, 'typecode', typecode) if sub is typecode or sub is None: return typecode # Element WildCard if isinstance(typecode, AnyElement): return sub # Global Element Declaration if isinstance(sub, ElementDeclaration): if (typecode.nspname,typecode.pname) == (sub.nspname,sub.pname): raise TypeError(\ 'bad usage, failed to serialize element reference (%s, %s), in: %s' % (typecode.nspname, typecode.pname, sw.Backtrace(elt),)) # check substitutionGroup if _is_substitute_element(typecode, sub): return sub raise TypeError(\ 'failed to serialize (%s, %s) illegal sub GED (%s,%s): %s' % (typecode.nspname, typecode.pname, sub.nspname, sub.pname, sw.Backtrace(elt),)) # Local Element if not isinstance(typecode, AnyType) and not isinstance(sub, typecode.__class__): raise TypeError(\ 'failed to serialize substitute %s for %s, not derivation: %s' % (sub, typecode, sw.Backtrace(elt),)) # Make our substitution type match the elements facets, # since typecode is created for a single existing pyobj # some facets are irrelevant. sub = _copy(sub) sub.nspname = typecode.nspname sub.pname = typecode.pname sub.aname = typecode.aname sub.minOccurs = sub.maxOccurs = 1 return sub
python
def _get_type_or_substitute(typecode, pyobj, sw, elt): '''return typecode or substitute type for wildcard or derived type. For serialization only. ''' sub = getattr(pyobj, 'typecode', typecode) if sub is typecode or sub is None: return typecode # Element WildCard if isinstance(typecode, AnyElement): return sub # Global Element Declaration if isinstance(sub, ElementDeclaration): if (typecode.nspname,typecode.pname) == (sub.nspname,sub.pname): raise TypeError(\ 'bad usage, failed to serialize element reference (%s, %s), in: %s' % (typecode.nspname, typecode.pname, sw.Backtrace(elt),)) # check substitutionGroup if _is_substitute_element(typecode, sub): return sub raise TypeError(\ 'failed to serialize (%s, %s) illegal sub GED (%s,%s): %s' % (typecode.nspname, typecode.pname, sub.nspname, sub.pname, sw.Backtrace(elt),)) # Local Element if not isinstance(typecode, AnyType) and not isinstance(sub, typecode.__class__): raise TypeError(\ 'failed to serialize substitute %s for %s, not derivation: %s' % (sub, typecode, sw.Backtrace(elt),)) # Make our substitution type match the elements facets, # since typecode is created for a single existing pyobj # some facets are irrelevant. sub = _copy(sub) sub.nspname = typecode.nspname sub.pname = typecode.pname sub.aname = typecode.aname sub.minOccurs = sub.maxOccurs = 1 return sub
[ "def", "_get_type_or_substitute", "(", "typecode", ",", "pyobj", ",", "sw", ",", "elt", ")", ":", "sub", "=", "getattr", "(", "pyobj", ",", "'typecode'", ",", "typecode", ")", "if", "sub", "is", "typecode", "or", "sub", "is", "None", ":", "return", "typecode", "# Element WildCard", "if", "isinstance", "(", "typecode", ",", "AnyElement", ")", ":", "return", "sub", "# Global Element Declaration", "if", "isinstance", "(", "sub", ",", "ElementDeclaration", ")", ":", "if", "(", "typecode", ".", "nspname", ",", "typecode", ".", "pname", ")", "==", "(", "sub", ".", "nspname", ",", "sub", ".", "pname", ")", ":", "raise", "TypeError", "(", "'bad usage, failed to serialize element reference (%s, %s), in: %s'", "%", "(", "typecode", ".", "nspname", ",", "typecode", ".", "pname", ",", "sw", ".", "Backtrace", "(", "elt", ")", ",", ")", ")", "# check substitutionGroup ", "if", "_is_substitute_element", "(", "typecode", ",", "sub", ")", ":", "return", "sub", "raise", "TypeError", "(", "'failed to serialize (%s, %s) illegal sub GED (%s,%s): %s'", "%", "(", "typecode", ".", "nspname", ",", "typecode", ".", "pname", ",", "sub", ".", "nspname", ",", "sub", ".", "pname", ",", "sw", ".", "Backtrace", "(", "elt", ")", ",", ")", ")", "# Local Element", "if", "not", "isinstance", "(", "typecode", ",", "AnyType", ")", "and", "not", "isinstance", "(", "sub", ",", "typecode", ".", "__class__", ")", ":", "raise", "TypeError", "(", "'failed to serialize substitute %s for %s, not derivation: %s'", "%", "(", "sub", ",", "typecode", ",", "sw", ".", "Backtrace", "(", "elt", ")", ",", ")", ")", "# Make our substitution type match the elements facets,", "# since typecode is created for a single existing pyobj", "# some facets are irrelevant.", "sub", "=", "_copy", "(", "sub", ")", "sub", ".", "nspname", "=", "typecode", ".", "nspname", "sub", ".", "pname", "=", "typecode", ".", "pname", "sub", ".", "aname", "=", "typecode", ".", "aname", "sub", ".", "minOccurs", "=", "sub", ".", "maxOccurs", "=", "1", "return", "sub" ]
return typecode or substitute type for wildcard or derived type. For serialization only.
[ "return", "typecode", "or", "substitute", "type", "for", "wildcard", "or", "derived", "type", ".", "For", "serialization", "only", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/TCcompound.py#L44-L86
243,339
rameshg87/pyremotevbox
pyremotevbox/ZSI/TCcompound.py
ComplexType.setDerivedTypeContents
def setDerivedTypeContents(self, extensions=None, restrictions=None): """For derived types set appropriate parameter and """ if extensions: ofwhat = list(self.ofwhat) if type(extensions) in _seqtypes: ofwhat += list(extensions) else: ofwhat.append(extensions) elif restrictions: if type(restrictions) in _seqtypes: ofwhat = restrictions else: ofwhat = (restrictions,) else: return self.ofwhat = tuple(ofwhat) self.lenofwhat = len(self.ofwhat)
python
def setDerivedTypeContents(self, extensions=None, restrictions=None): """For derived types set appropriate parameter and """ if extensions: ofwhat = list(self.ofwhat) if type(extensions) in _seqtypes: ofwhat += list(extensions) else: ofwhat.append(extensions) elif restrictions: if type(restrictions) in _seqtypes: ofwhat = restrictions else: ofwhat = (restrictions,) else: return self.ofwhat = tuple(ofwhat) self.lenofwhat = len(self.ofwhat)
[ "def", "setDerivedTypeContents", "(", "self", ",", "extensions", "=", "None", ",", "restrictions", "=", "None", ")", ":", "if", "extensions", ":", "ofwhat", "=", "list", "(", "self", ".", "ofwhat", ")", "if", "type", "(", "extensions", ")", "in", "_seqtypes", ":", "ofwhat", "+=", "list", "(", "extensions", ")", "else", ":", "ofwhat", ".", "append", "(", "extensions", ")", "elif", "restrictions", ":", "if", "type", "(", "restrictions", ")", "in", "_seqtypes", ":", "ofwhat", "=", "restrictions", "else", ":", "ofwhat", "=", "(", "restrictions", ",", ")", "else", ":", "return", "self", ".", "ofwhat", "=", "tuple", "(", "ofwhat", ")", "self", ".", "lenofwhat", "=", "len", "(", "self", ".", "ofwhat", ")" ]
For derived types set appropriate parameter and
[ "For", "derived", "types", "set", "appropriate", "parameter", "and" ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/TCcompound.py#L440-L457
243,340
evfredericksen/pynacea
pynhost/pynhost/commands.py
ActionList.contains_non_repeat_actions
def contains_non_repeat_actions(self): ''' Because repeating repeat actions can get ugly real fast ''' for action in self.actions: if not isinstance(action, (int, dynamic.RepeatCommand)): return True return False
python
def contains_non_repeat_actions(self): ''' Because repeating repeat actions can get ugly real fast ''' for action in self.actions: if not isinstance(action, (int, dynamic.RepeatCommand)): return True return False
[ "def", "contains_non_repeat_actions", "(", "self", ")", ":", "for", "action", "in", "self", ".", "actions", ":", "if", "not", "isinstance", "(", "action", ",", "(", "int", ",", "dynamic", ".", "RepeatCommand", ")", ")", ":", "return", "True", "return", "False" ]
Because repeating repeat actions can get ugly real fast
[ "Because", "repeating", "repeat", "actions", "can", "get", "ugly", "real", "fast" ]
63ee0e6695209048bf2571aa2c3770f502e29b0a
https://github.com/evfredericksen/pynacea/blob/63ee0e6695209048bf2571aa2c3770f502e29b0a/pynhost/pynhost/commands.py#L67-L74
243,341
edeposit/edeposit.amqp.ftp
src/edeposit/amqp/ftp/decoders/validator.py
_all_correct_list
def _all_correct_list(array): """ Make sure, that all items in `array` has good type and size. Args: array (list): Array of python types. Returns: True/False """ if type(array) not in _ITERABLE_TYPES: return False for item in array: if not type(item) in _ITERABLE_TYPES: return False if len(item) != 2: return False return True
python
def _all_correct_list(array): """ Make sure, that all items in `array` has good type and size. Args: array (list): Array of python types. Returns: True/False """ if type(array) not in _ITERABLE_TYPES: return False for item in array: if not type(item) in _ITERABLE_TYPES: return False if len(item) != 2: return False return True
[ "def", "_all_correct_list", "(", "array", ")", ":", "if", "type", "(", "array", ")", "not", "in", "_ITERABLE_TYPES", ":", "return", "False", "for", "item", "in", "array", ":", "if", "not", "type", "(", "item", ")", "in", "_ITERABLE_TYPES", ":", "return", "False", "if", "len", "(", "item", ")", "!=", "2", ":", "return", "False", "return", "True" ]
Make sure, that all items in `array` has good type and size. Args: array (list): Array of python types. Returns: True/False
[ "Make", "sure", "that", "all", "items", "in", "array", "has", "good", "type", "and", "size", "." ]
fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71
https://github.com/edeposit/edeposit.amqp.ftp/blob/fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71/src/edeposit/amqp/ftp/decoders/validator.py#L220-L240
243,342
edeposit/edeposit.amqp.ftp
src/edeposit/amqp/ftp/decoders/validator.py
_convert_to_dict
def _convert_to_dict(data): """ Convert `data` to dictionary. Tries to get sense in multidimensional arrays. Args: data: List/dict/tuple of variable dimension. Returns: dict: If the data can be converted to dictionary. Raises: MetaParsingException: When the data are unconvertible to dict. """ if isinstance(data, dict): return data if isinstance(data, list) or isinstance(data, tuple): if _all_correct_list(data): return dict(data) else: data = zip(data[::2], data[1::2]) return dict(data) else: raise MetaParsingException( "Can't decode provided metadata - unknown structure." )
python
def _convert_to_dict(data): """ Convert `data` to dictionary. Tries to get sense in multidimensional arrays. Args: data: List/dict/tuple of variable dimension. Returns: dict: If the data can be converted to dictionary. Raises: MetaParsingException: When the data are unconvertible to dict. """ if isinstance(data, dict): return data if isinstance(data, list) or isinstance(data, tuple): if _all_correct_list(data): return dict(data) else: data = zip(data[::2], data[1::2]) return dict(data) else: raise MetaParsingException( "Can't decode provided metadata - unknown structure." )
[ "def", "_convert_to_dict", "(", "data", ")", ":", "if", "isinstance", "(", "data", ",", "dict", ")", ":", "return", "data", "if", "isinstance", "(", "data", ",", "list", ")", "or", "isinstance", "(", "data", ",", "tuple", ")", ":", "if", "_all_correct_list", "(", "data", ")", ":", "return", "dict", "(", "data", ")", "else", ":", "data", "=", "zip", "(", "data", "[", ":", ":", "2", "]", ",", "data", "[", "1", ":", ":", "2", "]", ")", "return", "dict", "(", "data", ")", "else", ":", "raise", "MetaParsingException", "(", "\"Can't decode provided metadata - unknown structure.\"", ")" ]
Convert `data` to dictionary. Tries to get sense in multidimensional arrays. Args: data: List/dict/tuple of variable dimension. Returns: dict: If the data can be converted to dictionary. Raises: MetaParsingException: When the data are unconvertible to dict.
[ "Convert", "data", "to", "dictionary", "." ]
fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71
https://github.com/edeposit/edeposit.amqp.ftp/blob/fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71/src/edeposit/amqp/ftp/decoders/validator.py#L243-L270
243,343
edeposit/edeposit.amqp.ftp
src/edeposit/amqp/ftp/decoders/validator.py
check_structure
def check_structure(data): """ Check whether the structure is flat dictionary. If not, try to convert it to dictionary. Args: data: Whatever data you have (dict/tuple/list). Returns: dict: When the conversion was successful or `data` was already `good`. Raises: MetaParsingException: When the data couldn't be converted or had `bad` structure. """ if not isinstance(data, dict): try: data = _convert_to_dict(data) except MetaParsingException: raise except: raise MetaParsingException( "Metadata format has invalid strucure (dict is expected)." ) for key, val in data.iteritems(): if type(key) not in _ALLOWED_TYPES: raise MetaParsingException( "Can't decode the meta file - invalid type of keyword '" + str(key) + "'!" ) if type(val) not in _ALLOWED_TYPES: raise MetaParsingException( "Can't decode the meta file - invalid type of keyword '" + str(key) + "'!" ) return data
python
def check_structure(data): """ Check whether the structure is flat dictionary. If not, try to convert it to dictionary. Args: data: Whatever data you have (dict/tuple/list). Returns: dict: When the conversion was successful or `data` was already `good`. Raises: MetaParsingException: When the data couldn't be converted or had `bad` structure. """ if not isinstance(data, dict): try: data = _convert_to_dict(data) except MetaParsingException: raise except: raise MetaParsingException( "Metadata format has invalid strucure (dict is expected)." ) for key, val in data.iteritems(): if type(key) not in _ALLOWED_TYPES: raise MetaParsingException( "Can't decode the meta file - invalid type of keyword '" + str(key) + "'!" ) if type(val) not in _ALLOWED_TYPES: raise MetaParsingException( "Can't decode the meta file - invalid type of keyword '" + str(key) + "'!" ) return data
[ "def", "check_structure", "(", "data", ")", ":", "if", "not", "isinstance", "(", "data", ",", "dict", ")", ":", "try", ":", "data", "=", "_convert_to_dict", "(", "data", ")", "except", "MetaParsingException", ":", "raise", "except", ":", "raise", "MetaParsingException", "(", "\"Metadata format has invalid strucure (dict is expected).\"", ")", "for", "key", ",", "val", "in", "data", ".", "iteritems", "(", ")", ":", "if", "type", "(", "key", ")", "not", "in", "_ALLOWED_TYPES", ":", "raise", "MetaParsingException", "(", "\"Can't decode the meta file - invalid type of keyword '\"", "+", "str", "(", "key", ")", "+", "\"'!\"", ")", "if", "type", "(", "val", ")", "not", "in", "_ALLOWED_TYPES", ":", "raise", "MetaParsingException", "(", "\"Can't decode the meta file - invalid type of keyword '\"", "+", "str", "(", "key", ")", "+", "\"'!\"", ")", "return", "data" ]
Check whether the structure is flat dictionary. If not, try to convert it to dictionary. Args: data: Whatever data you have (dict/tuple/list). Returns: dict: When the conversion was successful or `data` was already `good`. Raises: MetaParsingException: When the data couldn't be converted or had `bad` structure.
[ "Check", "whether", "the", "structure", "is", "flat", "dictionary", ".", "If", "not", "try", "to", "convert", "it", "to", "dictionary", "." ]
fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71
https://github.com/edeposit/edeposit.amqp.ftp/blob/fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71/src/edeposit/amqp/ftp/decoders/validator.py#L273-L312
243,344
edeposit/edeposit.amqp.ftp
src/edeposit/amqp/ftp/decoders/validator.py
Field._remove_accents
def _remove_accents(self, input_str): """ Convert unicode string to ASCII. Credit: http://stackoverflow.com/a/517974 """ nkfd_form = unicodedata.normalize('NFKD', input_str) return u"".join([c for c in nkfd_form if not unicodedata.combining(c)])
python
def _remove_accents(self, input_str): """ Convert unicode string to ASCII. Credit: http://stackoverflow.com/a/517974 """ nkfd_form = unicodedata.normalize('NFKD', input_str) return u"".join([c for c in nkfd_form if not unicodedata.combining(c)])
[ "def", "_remove_accents", "(", "self", ",", "input_str", ")", ":", "nkfd_form", "=", "unicodedata", ".", "normalize", "(", "'NFKD'", ",", "input_str", ")", "return", "u\"\"", ".", "join", "(", "[", "c", "for", "c", "in", "nkfd_form", "if", "not", "unicodedata", ".", "combining", "(", "c", ")", "]", ")" ]
Convert unicode string to ASCII. Credit: http://stackoverflow.com/a/517974
[ "Convert", "unicode", "string", "to", "ASCII", "." ]
fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71
https://github.com/edeposit/edeposit.amqp.ftp/blob/fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71/src/edeposit/amqp/ftp/decoders/validator.py#L111-L118
243,345
edeposit/edeposit.amqp.ftp
src/edeposit/amqp/ftp/decoders/validator.py
FieldParser.process
def process(self, key, val): """ Try to look for `key` in all required and optional fields. If found, set the `val`. """ for field in self.fields: if field.check(key, val): return for field in self.optional: if field.check(key, val): return
python
def process(self, key, val): """ Try to look for `key` in all required and optional fields. If found, set the `val`. """ for field in self.fields: if field.check(key, val): return for field in self.optional: if field.check(key, val): return
[ "def", "process", "(", "self", ",", "key", ",", "val", ")", ":", "for", "field", "in", "self", ".", "fields", ":", "if", "field", ".", "check", "(", "key", ",", "val", ")", ":", "return", "for", "field", "in", "self", ".", "optional", ":", "if", "field", ".", "check", "(", "key", ",", "val", ")", ":", "return" ]
Try to look for `key` in all required and optional fields. If found, set the `val`.
[ "Try", "to", "look", "for", "key", "in", "all", "required", "and", "optional", "fields", ".", "If", "found", "set", "the", "val", "." ]
fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71
https://github.com/edeposit/edeposit.amqp.ftp/blob/fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71/src/edeposit/amqp/ftp/decoders/validator.py#L158-L169
243,346
riccardocagnasso/useless
src/useless/common/structures.py
Structure.get_size
def get_size(cls): """ Total byte size of fields in this structure => total byte size of the structure on the file """ return sum([getattr(cls, name).length for name in cls.get_fields_names()])
python
def get_size(cls): """ Total byte size of fields in this structure => total byte size of the structure on the file """ return sum([getattr(cls, name).length for name in cls.get_fields_names()])
[ "def", "get_size", "(", "cls", ")", ":", "return", "sum", "(", "[", "getattr", "(", "cls", ",", "name", ")", ".", "length", "for", "name", "in", "cls", ".", "get_fields_names", "(", ")", "]", ")" ]
Total byte size of fields in this structure => total byte size of the structure on the file
[ "Total", "byte", "size", "of", "fields", "in", "this", "structure", "=", ">", "total", "byte", "size", "of", "the", "structure", "on", "the", "file" ]
5167aab82958f653148e3689c9a7e548d4fa2cba
https://github.com/riccardocagnasso/useless/blob/5167aab82958f653148e3689c9a7e548d4fa2cba/src/useless/common/structures.py#L79-L85
243,347
Aperture-py/aperture-lib
aperturelib/__init__.py
format_image
def format_image(path, options): '''Formats an image. Args: path (str): Path to the image file. options (dict): Options to apply to the image. Returns: (list) A list of PIL images. The list will always be of length 1 unless resolutions for resizing are provided in the options. ''' image = Image.open(path) image_pipeline_results = __pipeline_image(image, options) return image_pipeline_results
python
def format_image(path, options): '''Formats an image. Args: path (str): Path to the image file. options (dict): Options to apply to the image. Returns: (list) A list of PIL images. The list will always be of length 1 unless resolutions for resizing are provided in the options. ''' image = Image.open(path) image_pipeline_results = __pipeline_image(image, options) return image_pipeline_results
[ "def", "format_image", "(", "path", ",", "options", ")", ":", "image", "=", "Image", ".", "open", "(", "path", ")", "image_pipeline_results", "=", "__pipeline_image", "(", "image", ",", "options", ")", "return", "image_pipeline_results" ]
Formats an image. Args: path (str): Path to the image file. options (dict): Options to apply to the image. Returns: (list) A list of PIL images. The list will always be of length 1 unless resolutions for resizing are provided in the options.
[ "Formats", "an", "image", "." ]
5c54af216319f297ddf96181a16f088cf1ba23f3
https://github.com/Aperture-py/aperture-lib/blob/5c54af216319f297ddf96181a16f088cf1ba23f3/aperturelib/__init__.py#L26-L39
243,348
nefarioustim/parker
parker/redisset.py
get_instance
def get_instance(key, expire=None): """Return an instance of RedisSet.""" global _instances try: instance = _instances[key] except KeyError: instance = RedisSet( key, _redis, expire=expire ) _instances[key] = instance return instance
python
def get_instance(key, expire=None): """Return an instance of RedisSet.""" global _instances try: instance = _instances[key] except KeyError: instance = RedisSet( key, _redis, expire=expire ) _instances[key] = instance return instance
[ "def", "get_instance", "(", "key", ",", "expire", "=", "None", ")", ":", "global", "_instances", "try", ":", "instance", "=", "_instances", "[", "key", "]", "except", "KeyError", ":", "instance", "=", "RedisSet", "(", "key", ",", "_redis", ",", "expire", "=", "expire", ")", "_instances", "[", "key", "]", "=", "instance", "return", "instance" ]
Return an instance of RedisSet.
[ "Return", "an", "instance", "of", "RedisSet", "." ]
ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6
https://github.com/nefarioustim/parker/blob/ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6/parker/redisset.py#L14-L27
243,349
nefarioustim/parker
parker/redisset.py
RedisSet.add
def add(self, value): """Add value to set.""" added = self.redis.sadd( self.key, value ) if self.redis.scard(self.key) < 2: self.redis.expire(self.key, self.expire) return added
python
def add(self, value): """Add value to set.""" added = self.redis.sadd( self.key, value ) if self.redis.scard(self.key) < 2: self.redis.expire(self.key, self.expire) return added
[ "def", "add", "(", "self", ",", "value", ")", ":", "added", "=", "self", ".", "redis", ".", "sadd", "(", "self", ".", "key", ",", "value", ")", "if", "self", ".", "redis", ".", "scard", "(", "self", ".", "key", ")", "<", "2", ":", "self", ".", "redis", ".", "expire", "(", "self", ".", "key", ",", "self", ".", "expire", ")", "return", "added" ]
Add value to set.
[ "Add", "value", "to", "set", "." ]
ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6
https://github.com/nefarioustim/parker/blob/ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6/parker/redisset.py#L51-L61
243,350
pybel/pybel-artifactory
src/pybel_artifactory/cli.py
write
def write(name, keyword, domain, citation, author, description, species, version, contact, licenses, values, functions, output, value_prefix): """Build a namespace from items.""" write_namespace( name, keyword, domain, author, citation, values, namespace_description=description, namespace_species=species, namespace_version=version, author_contact=contact, author_copyright=licenses, functions=functions, file=output, value_prefix=value_prefix )
python
def write(name, keyword, domain, citation, author, description, species, version, contact, licenses, values, functions, output, value_prefix): """Build a namespace from items.""" write_namespace( name, keyword, domain, author, citation, values, namespace_description=description, namespace_species=species, namespace_version=version, author_contact=contact, author_copyright=licenses, functions=functions, file=output, value_prefix=value_prefix )
[ "def", "write", "(", "name", ",", "keyword", ",", "domain", ",", "citation", ",", "author", ",", "description", ",", "species", ",", "version", ",", "contact", ",", "licenses", ",", "values", ",", "functions", ",", "output", ",", "value_prefix", ")", ":", "write_namespace", "(", "name", ",", "keyword", ",", "domain", ",", "author", ",", "citation", ",", "values", ",", "namespace_description", "=", "description", ",", "namespace_species", "=", "species", ",", "namespace_version", "=", "version", ",", "author_contact", "=", "contact", ",", "author_copyright", "=", "licenses", ",", "functions", "=", "functions", ",", "file", "=", "output", ",", "value_prefix", "=", "value_prefix", ")" ]
Build a namespace from items.
[ "Build", "a", "namespace", "from", "items", "." ]
720107780a59be2ef08885290dfa519b1da62871
https://github.com/pybel/pybel-artifactory/blob/720107780a59be2ef08885290dfa519b1da62871/src/pybel_artifactory/cli.py#L43-L56
243,351
pybel/pybel-artifactory
src/pybel_artifactory/cli.py
history
def history(namespace_module): """Hash all versions on Artifactory.""" for path in get_namespace_history(namespace_module): h = get_bel_resource_hash(path.as_posix()) click.echo('{}\t{}'.format(path, h))
python
def history(namespace_module): """Hash all versions on Artifactory.""" for path in get_namespace_history(namespace_module): h = get_bel_resource_hash(path.as_posix()) click.echo('{}\t{}'.format(path, h))
[ "def", "history", "(", "namespace_module", ")", ":", "for", "path", "in", "get_namespace_history", "(", "namespace_module", ")", ":", "h", "=", "get_bel_resource_hash", "(", "path", ".", "as_posix", "(", ")", ")", "click", ".", "echo", "(", "'{}\\t{}'", ".", "format", "(", "path", ",", "h", ")", ")" ]
Hash all versions on Artifactory.
[ "Hash", "all", "versions", "on", "Artifactory", "." ]
720107780a59be2ef08885290dfa519b1da62871
https://github.com/pybel/pybel-artifactory/blob/720107780a59be2ef08885290dfa519b1da62871/src/pybel_artifactory/cli.py#L79-L83
243,352
pybel/pybel-artifactory
src/pybel_artifactory/cli.py
convert_to_annotation
def convert_to_annotation(file, output): """Convert a namespace file to an annotation file.""" resource = parse_bel_resource(file) write_annotation( keyword=resource['Namespace']['Keyword'], values={k: '' for k in resource['Values']}, citation_name=resource['Citation']['NameString'], description=resource['Namespace']['DescriptionString'], file=output )
python
def convert_to_annotation(file, output): """Convert a namespace file to an annotation file.""" resource = parse_bel_resource(file) write_annotation( keyword=resource['Namespace']['Keyword'], values={k: '' for k in resource['Values']}, citation_name=resource['Citation']['NameString'], description=resource['Namespace']['DescriptionString'], file=output )
[ "def", "convert_to_annotation", "(", "file", ",", "output", ")", ":", "resource", "=", "parse_bel_resource", "(", "file", ")", "write_annotation", "(", "keyword", "=", "resource", "[", "'Namespace'", "]", "[", "'Keyword'", "]", ",", "values", "=", "{", "k", ":", "''", "for", "k", "in", "resource", "[", "'Values'", "]", "}", ",", "citation_name", "=", "resource", "[", "'Citation'", "]", "[", "'NameString'", "]", ",", "description", "=", "resource", "[", "'Namespace'", "]", "[", "'DescriptionString'", "]", ",", "file", "=", "output", ")" ]
Convert a namespace file to an annotation file.
[ "Convert", "a", "namespace", "file", "to", "an", "annotation", "file", "." ]
720107780a59be2ef08885290dfa519b1da62871
https://github.com/pybel/pybel-artifactory/blob/720107780a59be2ef08885290dfa519b1da62871/src/pybel_artifactory/cli.py#L90-L100
243,353
pybel/pybel-artifactory
src/pybel_artifactory/cli.py
history
def history(annotation_module): """Output the hashes for the annotation resources' versions.""" for path in get_annotation_history(annotation_module): h = get_bel_resource_hash(path.as_posix()) click.echo('{}\t{}'.format(path, h))
python
def history(annotation_module): """Output the hashes for the annotation resources' versions.""" for path in get_annotation_history(annotation_module): h = get_bel_resource_hash(path.as_posix()) click.echo('{}\t{}'.format(path, h))
[ "def", "history", "(", "annotation_module", ")", ":", "for", "path", "in", "get_annotation_history", "(", "annotation_module", ")", ":", "h", "=", "get_bel_resource_hash", "(", "path", ".", "as_posix", "(", ")", ")", "click", ".", "echo", "(", "'{}\\t{}'", ".", "format", "(", "path", ",", "h", ")", ")" ]
Output the hashes for the annotation resources' versions.
[ "Output", "the", "hashes", "for", "the", "annotation", "resources", "versions", "." ]
720107780a59be2ef08885290dfa519b1da62871
https://github.com/pybel/pybel-artifactory/blob/720107780a59be2ef08885290dfa519b1da62871/src/pybel_artifactory/cli.py#L110-L114
243,354
pybel/pybel-artifactory
src/pybel_artifactory/cli.py
convert_to_namespace
def convert_to_namespace(file, output, keyword): """Convert an annotation file to a namespace file.""" resource = parse_bel_resource(file) write_namespace( namespace_keyword=(keyword or resource['AnnotationDefinition']['Keyword']), namespace_name=resource['AnnotationDefinition']['Keyword'], namespace_description=resource['AnnotationDefinition']['DescriptionString'], author_name='Charles Tapley Hoyt', namespace_domain=NAMESPACE_DOMAIN_OTHER, values=resource['Values'], citation_name=resource['Citation']['NameString'], file=output )
python
def convert_to_namespace(file, output, keyword): """Convert an annotation file to a namespace file.""" resource = parse_bel_resource(file) write_namespace( namespace_keyword=(keyword or resource['AnnotationDefinition']['Keyword']), namespace_name=resource['AnnotationDefinition']['Keyword'], namespace_description=resource['AnnotationDefinition']['DescriptionString'], author_name='Charles Tapley Hoyt', namespace_domain=NAMESPACE_DOMAIN_OTHER, values=resource['Values'], citation_name=resource['Citation']['NameString'], file=output )
[ "def", "convert_to_namespace", "(", "file", ",", "output", ",", "keyword", ")", ":", "resource", "=", "parse_bel_resource", "(", "file", ")", "write_namespace", "(", "namespace_keyword", "=", "(", "keyword", "or", "resource", "[", "'AnnotationDefinition'", "]", "[", "'Keyword'", "]", ")", ",", "namespace_name", "=", "resource", "[", "'AnnotationDefinition'", "]", "[", "'Keyword'", "]", ",", "namespace_description", "=", "resource", "[", "'AnnotationDefinition'", "]", "[", "'DescriptionString'", "]", ",", "author_name", "=", "'Charles Tapley Hoyt'", ",", "namespace_domain", "=", "NAMESPACE_DOMAIN_OTHER", ",", "values", "=", "resource", "[", "'Values'", "]", ",", "citation_name", "=", "resource", "[", "'Citation'", "]", "[", "'NameString'", "]", ",", "file", "=", "output", ")" ]
Convert an annotation file to a namespace file.
[ "Convert", "an", "annotation", "file", "to", "a", "namespace", "file", "." ]
720107780a59be2ef08885290dfa519b1da62871
https://github.com/pybel/pybel-artifactory/blob/720107780a59be2ef08885290dfa519b1da62871/src/pybel_artifactory/cli.py#L129-L141
243,355
nefarioustim/parker
parker/client.py
get_proxy
def get_proxy(): """Return a random proxy from proxy config.""" proxies = _config['proxies'] return proxies[ random.randint(0, len(proxies) - 1) ] if len(proxies) > 0 else None
python
def get_proxy(): """Return a random proxy from proxy config.""" proxies = _config['proxies'] return proxies[ random.randint(0, len(proxies) - 1) ] if len(proxies) > 0 else None
[ "def", "get_proxy", "(", ")", ":", "proxies", "=", "_config", "[", "'proxies'", "]", "return", "proxies", "[", "random", ".", "randint", "(", "0", ",", "len", "(", "proxies", ")", "-", "1", ")", "]", "if", "len", "(", "proxies", ")", ">", "0", "else", "None" ]
Return a random proxy from proxy config.
[ "Return", "a", "random", "proxy", "from", "proxy", "config", "." ]
ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6
https://github.com/nefarioustim/parker/blob/ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6/parker/client.py#L14-L20
243,356
nefarioustim/parker
parker/client.py
get_instance
def get_instance(): """Return an instance of Client.""" global _instances user_agents = _config['user-agents'] user_agent = user_agents[ random.randint(0, len(user_agents) - 1) ] if len(user_agents) > 0 else DEFAULT_UA instance_key = user_agent try: instance = _instances[instance_key] except KeyError: instance = Client(user_agent, get_proxy) _instances[instance_key] = instance return instance
python
def get_instance(): """Return an instance of Client.""" global _instances user_agents = _config['user-agents'] user_agent = user_agents[ random.randint(0, len(user_agents) - 1) ] if len(user_agents) > 0 else DEFAULT_UA instance_key = user_agent try: instance = _instances[instance_key] except KeyError: instance = Client(user_agent, get_proxy) _instances[instance_key] = instance return instance
[ "def", "get_instance", "(", ")", ":", "global", "_instances", "user_agents", "=", "_config", "[", "'user-agents'", "]", "user_agent", "=", "user_agents", "[", "random", ".", "randint", "(", "0", ",", "len", "(", "user_agents", ")", "-", "1", ")", "]", "if", "len", "(", "user_agents", ")", ">", "0", "else", "DEFAULT_UA", "instance_key", "=", "user_agent", "try", ":", "instance", "=", "_instances", "[", "instance_key", "]", "except", "KeyError", ":", "instance", "=", "Client", "(", "user_agent", ",", "get_proxy", ")", "_instances", "[", "instance_key", "]", "=", "instance", "return", "instance" ]
Return an instance of Client.
[ "Return", "an", "instance", "of", "Client", "." ]
ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6
https://github.com/nefarioustim/parker/blob/ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6/parker/client.py#L23-L40
243,357
nefarioustim/parker
parker/client.py
Client.get
def get(self, uri, disable_proxy=False, stream=False): """Return Requests response to GET request.""" response = requests.get( uri, headers=self.headers, allow_redirects=True, cookies={}, stream=stream, proxies=self.proxy if not disable_proxy else False ) if response.status_code in _PERMITTED_STATUS_CODES: self.response_headers = response.headers return response.content if not stream else response.iter_content() else: raise requests.exceptions.HTTPError( "HTTP response did not have a permitted status code." )
python
def get(self, uri, disable_proxy=False, stream=False): """Return Requests response to GET request.""" response = requests.get( uri, headers=self.headers, allow_redirects=True, cookies={}, stream=stream, proxies=self.proxy if not disable_proxy else False ) if response.status_code in _PERMITTED_STATUS_CODES: self.response_headers = response.headers return response.content if not stream else response.iter_content() else: raise requests.exceptions.HTTPError( "HTTP response did not have a permitted status code." )
[ "def", "get", "(", "self", ",", "uri", ",", "disable_proxy", "=", "False", ",", "stream", "=", "False", ")", ":", "response", "=", "requests", ".", "get", "(", "uri", ",", "headers", "=", "self", ".", "headers", ",", "allow_redirects", "=", "True", ",", "cookies", "=", "{", "}", ",", "stream", "=", "stream", ",", "proxies", "=", "self", ".", "proxy", "if", "not", "disable_proxy", "else", "False", ")", "if", "response", ".", "status_code", "in", "_PERMITTED_STATUS_CODES", ":", "self", ".", "response_headers", "=", "response", ".", "headers", "return", "response", ".", "content", "if", "not", "stream", "else", "response", ".", "iter_content", "(", ")", "else", ":", "raise", "requests", ".", "exceptions", ".", "HTTPError", "(", "\"HTTP response did not have a permitted status code.\"", ")" ]
Return Requests response to GET request.
[ "Return", "Requests", "response", "to", "GET", "request", "." ]
ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6
https://github.com/nefarioustim/parker/blob/ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6/parker/client.py#L73-L90
243,358
nefarioustim/parker
parker/client.py
Client.get_content
def get_content(self, uri, disable_proxy=False): """Return content from URI if Response status is good.""" return self.get(uri=uri, disable_proxy=disable_proxy)
python
def get_content(self, uri, disable_proxy=False): """Return content from URI if Response status is good.""" return self.get(uri=uri, disable_proxy=disable_proxy)
[ "def", "get_content", "(", "self", ",", "uri", ",", "disable_proxy", "=", "False", ")", ":", "return", "self", ".", "get", "(", "uri", "=", "uri", ",", "disable_proxy", "=", "disable_proxy", ")" ]
Return content from URI if Response status is good.
[ "Return", "content", "from", "URI", "if", "Response", "status", "is", "good", "." ]
ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6
https://github.com/nefarioustim/parker/blob/ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6/parker/client.py#L92-L94
243,359
nefarioustim/parker
parker/client.py
Client.get_iter_content
def get_iter_content(self, uri, disable_proxy=False): """Return iterable content from URI if Response status is good.""" return self.get(uri=uri, disable_proxy=disable_proxy, stream=True)
python
def get_iter_content(self, uri, disable_proxy=False): """Return iterable content from URI if Response status is good.""" return self.get(uri=uri, disable_proxy=disable_proxy, stream=True)
[ "def", "get_iter_content", "(", "self", ",", "uri", ",", "disable_proxy", "=", "False", ")", ":", "return", "self", ".", "get", "(", "uri", "=", "uri", ",", "disable_proxy", "=", "disable_proxy", ",", "stream", "=", "True", ")" ]
Return iterable content from URI if Response status is good.
[ "Return", "iterable", "content", "from", "URI", "if", "Response", "status", "is", "good", "." ]
ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6
https://github.com/nefarioustim/parker/blob/ccc1de1ac6bfb5e0a8cfa4fdebb2f38f2ee027d6/parker/client.py#L96-L98
243,360
Akhail/Tebless
tebless/utils/__init__.py
dict_diff
def dict_diff(first, second): """ Return a dict of keys that differ with another config object. If a value is not found in one fo the configs, it will be represented by KEYNOTFOUND. @param first: Fist dictionary to diff. @param second: Second dicationary to diff. @return diff: Dict of Key => (first.val, second.val) """ diff = {} # Check all keys in first dict for key in first: if key not in second: diff[key] = (first[key], None) elif (first[key] != second[key]): diff[key] = (first[key], second[key]) # Check all keys in second dict to find missing for key in second: if key not in first: diff[key] = (None, second[key]) return diff
python
def dict_diff(first, second): """ Return a dict of keys that differ with another config object. If a value is not found in one fo the configs, it will be represented by KEYNOTFOUND. @param first: Fist dictionary to diff. @param second: Second dicationary to diff. @return diff: Dict of Key => (first.val, second.val) """ diff = {} # Check all keys in first dict for key in first: if key not in second: diff[key] = (first[key], None) elif (first[key] != second[key]): diff[key] = (first[key], second[key]) # Check all keys in second dict to find missing for key in second: if key not in first: diff[key] = (None, second[key]) return diff
[ "def", "dict_diff", "(", "first", ",", "second", ")", ":", "diff", "=", "{", "}", "# Check all keys in first dict", "for", "key", "in", "first", ":", "if", "key", "not", "in", "second", ":", "diff", "[", "key", "]", "=", "(", "first", "[", "key", "]", ",", "None", ")", "elif", "(", "first", "[", "key", "]", "!=", "second", "[", "key", "]", ")", ":", "diff", "[", "key", "]", "=", "(", "first", "[", "key", "]", ",", "second", "[", "key", "]", ")", "# Check all keys in second dict to find missing", "for", "key", "in", "second", ":", "if", "key", "not", "in", "first", ":", "diff", "[", "key", "]", "=", "(", "None", ",", "second", "[", "key", "]", ")", "return", "diff" ]
Return a dict of keys that differ with another config object. If a value is not found in one fo the configs, it will be represented by KEYNOTFOUND. @param first: Fist dictionary to diff. @param second: Second dicationary to diff. @return diff: Dict of Key => (first.val, second.val)
[ "Return", "a", "dict", "of", "keys", "that", "differ", "with", "another", "config", "object", ".", "If", "a", "value", "is", "not", "found", "in", "one", "fo", "the", "configs", "it", "will", "be", "represented", "by", "KEYNOTFOUND", "." ]
369ff76f06e7a0b6d04fabc287fa6c4095e158d4
https://github.com/Akhail/Tebless/blob/369ff76f06e7a0b6d04fabc287fa6c4095e158d4/tebless/utils/__init__.py#L13-L32
243,361
RonenNess/Fileter
fileter/iterators/concat_files.py
ConcatFiles.process_file
def process_file(self, path, dryrun): """ Concat files and return filename. """ # special case - skip output file so we won't include it in result if path == self._output_path: return None # if dryrun skip and return file if dryrun: return path # concat file with output file with open(path, "rb") as infile: data = infile.read() self._output_file.write(data) # return processed file path return path
python
def process_file(self, path, dryrun): """ Concat files and return filename. """ # special case - skip output file so we won't include it in result if path == self._output_path: return None # if dryrun skip and return file if dryrun: return path # concat file with output file with open(path, "rb") as infile: data = infile.read() self._output_file.write(data) # return processed file path return path
[ "def", "process_file", "(", "self", ",", "path", ",", "dryrun", ")", ":", "# special case - skip output file so we won't include it in result", "if", "path", "==", "self", ".", "_output_path", ":", "return", "None", "# if dryrun skip and return file", "if", "dryrun", ":", "return", "path", "# concat file with output file", "with", "open", "(", "path", ",", "\"rb\"", ")", "as", "infile", ":", "data", "=", "infile", ".", "read", "(", ")", "self", ".", "_output_file", ".", "write", "(", "data", ")", "# return processed file path", "return", "path" ]
Concat files and return filename.
[ "Concat", "files", "and", "return", "filename", "." ]
5372221b4049d5d46a9926573b91af17681c81f3
https://github.com/RonenNess/Fileter/blob/5372221b4049d5d46a9926573b91af17681c81f3/fileter/iterators/concat_files.py#L41-L59
243,362
maxfischer2781/chainlet
chainlet/primitives/chain.py
Chain._chain_forks
def _chain_forks(elements): """Detect whether a sequence of elements leads to a fork of streams""" # we are only interested in the result, so unwind from the end for element in reversed(elements): if element.chain_fork: return True elif element.chain_join: return False return False
python
def _chain_forks(elements): """Detect whether a sequence of elements leads to a fork of streams""" # we are only interested in the result, so unwind from the end for element in reversed(elements): if element.chain_fork: return True elif element.chain_join: return False return False
[ "def", "_chain_forks", "(", "elements", ")", ":", "# we are only interested in the result, so unwind from the end", "for", "element", "in", "reversed", "(", "elements", ")", ":", "if", "element", ".", "chain_fork", ":", "return", "True", "elif", "element", ".", "chain_join", ":", "return", "False", "return", "False" ]
Detect whether a sequence of elements leads to a fork of streams
[ "Detect", "whether", "a", "sequence", "of", "elements", "leads", "to", "a", "fork", "of", "streams" ]
4e17f9992b4780bd0d9309202e2847df640bffe8
https://github.com/maxfischer2781/chainlet/blob/4e17f9992b4780bd0d9309202e2847df640bffe8/chainlet/primitives/chain.py#L66-L74
243,363
walidsa3d/videoscene
videoscene/core.py
parse
def parse(filename): """ parse a scene release string and return a dictionary of parsed values.""" screensize = re.compile('720p|1080p', re.I) source = re.compile( '\.(AHDTV|MBluRay|MDVDR|CAM|TS|TELESYNC|DVDSCR|DVD9|BDSCR|DDC|R5LINE|R5|DVDRip|HDRip|BRRip|BDRip|WEBRip|WEB-?HD|HDtv|PDTV|WEBDL|BluRay)', re.I) year = re.compile('(1|2)\d{3}') series = re.compile('s\d{1,3}e\d{1,3}', re.I) group = re.compile('[A-Za-z0-9]+$', re.I) video = re.compile('DVDR|Xvid|MP4|NTSC|PAL|[xh][\.\s]?264', re.I) audio = re.compile('AAC2[\.\s]0|AAC|AC3|DTS|DD5', re.I) edition = re.compile( '\.(UNRATED|DC|(Directors|EXTENDED)[\.\s](CUT|EDITION)|EXTENDED|3D|2D|\bNF\b)', re.I) tags = re.compile( '\.(COMPLETE|LiMiTED|DL|DUAL|iNTERNAL|UNCUT|FS|FESTIVAL|DOKU|DOCU|DUBBED|SUBBED|WS)', re.I) release = re.compile( 'REAL[\.\s]PROPER|REMASTERED|PROPER|REPACK|READNFO|READ[\.\s]NFO|DiRFiX|NFOFiX', re.I) subtitles = re.compile( '\.(MULTi(SUBS)?|FiNNiSH|NORDiC|DANiSH|SWEDiSH|NORWEGiAN|iTALiAN|SPANiSH|SWESUB)', re.I) language = re.compile('\.(German|ITALIAN|Chinese|CZECH|RUSSIAN|FRENCH|TRUEFRENCH)', re.I) title = filename attrs = {'screenSize': screensize, 'source': source, 'year': year, 'series': series, 'release_group': group, 'video': video, 'audio': audio, 'edition': edition, 'tags': tags, 'release': release, 'subtitles': subtitles, 'language': language } data = {} for attr in attrs: match = methodcaller('search', filename)(attrs[attr]) if match: matched = methodcaller('group')(match) data[attr] = matched.strip('.') title = re.sub(matched, '', title) if 'series' in data: s, e = re.split('e|E', data['series']) # use lstrip to remove leading zeros data['season'] = s[1:].lstrip('0') data['episode'] = e.lstrip('0') data['series'] = True temptitle = title.replace('.', ' ').strip('-').strip() data['title'] = re.sub('\s{2,}', ' ', temptitle) return data
python
def parse(filename): """ parse a scene release string and return a dictionary of parsed values.""" screensize = re.compile('720p|1080p', re.I) source = re.compile( '\.(AHDTV|MBluRay|MDVDR|CAM|TS|TELESYNC|DVDSCR|DVD9|BDSCR|DDC|R5LINE|R5|DVDRip|HDRip|BRRip|BDRip|WEBRip|WEB-?HD|HDtv|PDTV|WEBDL|BluRay)', re.I) year = re.compile('(1|2)\d{3}') series = re.compile('s\d{1,3}e\d{1,3}', re.I) group = re.compile('[A-Za-z0-9]+$', re.I) video = re.compile('DVDR|Xvid|MP4|NTSC|PAL|[xh][\.\s]?264', re.I) audio = re.compile('AAC2[\.\s]0|AAC|AC3|DTS|DD5', re.I) edition = re.compile( '\.(UNRATED|DC|(Directors|EXTENDED)[\.\s](CUT|EDITION)|EXTENDED|3D|2D|\bNF\b)', re.I) tags = re.compile( '\.(COMPLETE|LiMiTED|DL|DUAL|iNTERNAL|UNCUT|FS|FESTIVAL|DOKU|DOCU|DUBBED|SUBBED|WS)', re.I) release = re.compile( 'REAL[\.\s]PROPER|REMASTERED|PROPER|REPACK|READNFO|READ[\.\s]NFO|DiRFiX|NFOFiX', re.I) subtitles = re.compile( '\.(MULTi(SUBS)?|FiNNiSH|NORDiC|DANiSH|SWEDiSH|NORWEGiAN|iTALiAN|SPANiSH|SWESUB)', re.I) language = re.compile('\.(German|ITALIAN|Chinese|CZECH|RUSSIAN|FRENCH|TRUEFRENCH)', re.I) title = filename attrs = {'screenSize': screensize, 'source': source, 'year': year, 'series': series, 'release_group': group, 'video': video, 'audio': audio, 'edition': edition, 'tags': tags, 'release': release, 'subtitles': subtitles, 'language': language } data = {} for attr in attrs: match = methodcaller('search', filename)(attrs[attr]) if match: matched = methodcaller('group')(match) data[attr] = matched.strip('.') title = re.sub(matched, '', title) if 'series' in data: s, e = re.split('e|E', data['series']) # use lstrip to remove leading zeros data['season'] = s[1:].lstrip('0') data['episode'] = e.lstrip('0') data['series'] = True temptitle = title.replace('.', ' ').strip('-').strip() data['title'] = re.sub('\s{2,}', ' ', temptitle) return data
[ "def", "parse", "(", "filename", ")", ":", "screensize", "=", "re", ".", "compile", "(", "'720p|1080p'", ",", "re", ".", "I", ")", "source", "=", "re", ".", "compile", "(", "'\\.(AHDTV|MBluRay|MDVDR|CAM|TS|TELESYNC|DVDSCR|DVD9|BDSCR|DDC|R5LINE|R5|DVDRip|HDRip|BRRip|BDRip|WEBRip|WEB-?HD|HDtv|PDTV|WEBDL|BluRay)'", ",", "re", ".", "I", ")", "year", "=", "re", ".", "compile", "(", "'(1|2)\\d{3}'", ")", "series", "=", "re", ".", "compile", "(", "'s\\d{1,3}e\\d{1,3}'", ",", "re", ".", "I", ")", "group", "=", "re", ".", "compile", "(", "'[A-Za-z0-9]+$'", ",", "re", ".", "I", ")", "video", "=", "re", ".", "compile", "(", "'DVDR|Xvid|MP4|NTSC|PAL|[xh][\\.\\s]?264'", ",", "re", ".", "I", ")", "audio", "=", "re", ".", "compile", "(", "'AAC2[\\.\\s]0|AAC|AC3|DTS|DD5'", ",", "re", ".", "I", ")", "edition", "=", "re", ".", "compile", "(", "'\\.(UNRATED|DC|(Directors|EXTENDED)[\\.\\s](CUT|EDITION)|EXTENDED|3D|2D|\\bNF\\b)'", ",", "re", ".", "I", ")", "tags", "=", "re", ".", "compile", "(", "'\\.(COMPLETE|LiMiTED|DL|DUAL|iNTERNAL|UNCUT|FS|FESTIVAL|DOKU|DOCU|DUBBED|SUBBED|WS)'", ",", "re", ".", "I", ")", "release", "=", "re", ".", "compile", "(", "'REAL[\\.\\s]PROPER|REMASTERED|PROPER|REPACK|READNFO|READ[\\.\\s]NFO|DiRFiX|NFOFiX'", ",", "re", ".", "I", ")", "subtitles", "=", "re", ".", "compile", "(", "'\\.(MULTi(SUBS)?|FiNNiSH|NORDiC|DANiSH|SWEDiSH|NORWEGiAN|iTALiAN|SPANiSH|SWESUB)'", ",", "re", ".", "I", ")", "language", "=", "re", ".", "compile", "(", "'\\.(German|ITALIAN|Chinese|CZECH|RUSSIAN|FRENCH|TRUEFRENCH)'", ",", "re", ".", "I", ")", "title", "=", "filename", "attrs", "=", "{", "'screenSize'", ":", "screensize", ",", "'source'", ":", "source", ",", "'year'", ":", "year", ",", "'series'", ":", "series", ",", "'release_group'", ":", "group", ",", "'video'", ":", "video", ",", "'audio'", ":", "audio", ",", "'edition'", ":", "edition", ",", "'tags'", ":", "tags", ",", "'release'", ":", "release", ",", "'subtitles'", ":", "subtitles", ",", "'language'", ":", "language", "}", "data", "=", "{", "}", "for", "attr", "in", "attrs", ":", "match", "=", "methodcaller", "(", "'search'", ",", "filename", ")", "(", "attrs", "[", "attr", "]", ")", "if", "match", ":", "matched", "=", "methodcaller", "(", "'group'", ")", "(", "match", ")", "data", "[", "attr", "]", "=", "matched", ".", "strip", "(", "'.'", ")", "title", "=", "re", ".", "sub", "(", "matched", ",", "''", ",", "title", ")", "if", "'series'", "in", "data", ":", "s", ",", "e", "=", "re", ".", "split", "(", "'e|E'", ",", "data", "[", "'series'", "]", ")", "# use lstrip to remove leading zeros", "data", "[", "'season'", "]", "=", "s", "[", "1", ":", "]", ".", "lstrip", "(", "'0'", ")", "data", "[", "'episode'", "]", "=", "e", ".", "lstrip", "(", "'0'", ")", "data", "[", "'series'", "]", "=", "True", "temptitle", "=", "title", ".", "replace", "(", "'.'", ",", "' '", ")", ".", "strip", "(", "'-'", ")", ".", "strip", "(", ")", "data", "[", "'title'", "]", "=", "re", ".", "sub", "(", "'\\s{2,}'", ",", "' '", ",", "temptitle", ")", "return", "data" ]
parse a scene release string and return a dictionary of parsed values.
[ "parse", "a", "scene", "release", "string", "and", "return", "a", "dictionary", "of", "parsed", "values", "." ]
3212f8cd7b746b78f76687a60326183bcf5f2e1b
https://github.com/walidsa3d/videoscene/blob/3212f8cd7b746b78f76687a60326183bcf5f2e1b/videoscene/core.py#L7-L57
243,364
xaptum/xtt-python
xtt/certificates.py
generate_ecdsap256_server_certificate
def generate_ecdsap256_server_certificate(server_id, server_pub_key, expiry, root_id, root_priv_key): """ Creates a new server certificate signed by the provided root. :param Identity server_id: the identity for the certificate :param ECDSAP256PublicKey server_pub_key: the public key for the certificate :param CertificateExpiry expiry: the expiry date for the certificate :param CertificateRootId root_id: the root identity to sign this certificate :param ECDSAP256PrivateKey root_priv_key: the root private key to sign this certificate """ cert = ECDSAP256ServerCertificate() rc = _lib.xtt_generate_server_certificate_ecdsap256(cert.native, server_id.native, server_pub_key.native, expiry.native, root_id.native, root_priv_key.native) if rc == RC.SUCCESS: return cert else: raise error_from_code(rc)
python
def generate_ecdsap256_server_certificate(server_id, server_pub_key, expiry, root_id, root_priv_key): """ Creates a new server certificate signed by the provided root. :param Identity server_id: the identity for the certificate :param ECDSAP256PublicKey server_pub_key: the public key for the certificate :param CertificateExpiry expiry: the expiry date for the certificate :param CertificateRootId root_id: the root identity to sign this certificate :param ECDSAP256PrivateKey root_priv_key: the root private key to sign this certificate """ cert = ECDSAP256ServerCertificate() rc = _lib.xtt_generate_server_certificate_ecdsap256(cert.native, server_id.native, server_pub_key.native, expiry.native, root_id.native, root_priv_key.native) if rc == RC.SUCCESS: return cert else: raise error_from_code(rc)
[ "def", "generate_ecdsap256_server_certificate", "(", "server_id", ",", "server_pub_key", ",", "expiry", ",", "root_id", ",", "root_priv_key", ")", ":", "cert", "=", "ECDSAP256ServerCertificate", "(", ")", "rc", "=", "_lib", ".", "xtt_generate_server_certificate_ecdsap256", "(", "cert", ".", "native", ",", "server_id", ".", "native", ",", "server_pub_key", ".", "native", ",", "expiry", ".", "native", ",", "root_id", ".", "native", ",", "root_priv_key", ".", "native", ")", "if", "rc", "==", "RC", ".", "SUCCESS", ":", "return", "cert", "else", ":", "raise", "error_from_code", "(", "rc", ")" ]
Creates a new server certificate signed by the provided root. :param Identity server_id: the identity for the certificate :param ECDSAP256PublicKey server_pub_key: the public key for the certificate :param CertificateExpiry expiry: the expiry date for the certificate :param CertificateRootId root_id: the root identity to sign this certificate :param ECDSAP256PrivateKey root_priv_key: the root private key to sign this certificate
[ "Creates", "a", "new", "server", "certificate", "signed", "by", "the", "provided", "root", "." ]
23ee469488d710d730314bec1136c4dd7ac2cd5c
https://github.com/xaptum/xtt-python/blob/23ee469488d710d730314bec1136c4dd7ac2cd5c/xtt/certificates.py#L35-L57
243,365
radjkarl/fancyTools
fancytools/math/Point3D.py
Point3D.project
def project(self, win_width, win_height, fov, viewer_distance): """ Transforms this 3D point to 2D using a perspective projection. """ factor = fov / (viewer_distance + self.z) x = self.x * factor + win_width // 2 y = -self.y * factor + win_height // 2 return Point3D(x, y, 1)
python
def project(self, win_width, win_height, fov, viewer_distance): """ Transforms this 3D point to 2D using a perspective projection. """ factor = fov / (viewer_distance + self.z) x = self.x * factor + win_width // 2 y = -self.y * factor + win_height // 2 return Point3D(x, y, 1)
[ "def", "project", "(", "self", ",", "win_width", ",", "win_height", ",", "fov", ",", "viewer_distance", ")", ":", "factor", "=", "fov", "/", "(", "viewer_distance", "+", "self", ".", "z", ")", "x", "=", "self", ".", "x", "*", "factor", "+", "win_width", "//", "2", "y", "=", "-", "self", ".", "y", "*", "factor", "+", "win_height", "//", "2", "return", "Point3D", "(", "x", ",", "y", ",", "1", ")" ]
Transforms this 3D point to 2D using a perspective projection.
[ "Transforms", "this", "3D", "point", "to", "2D", "using", "a", "perspective", "projection", "." ]
4c4d961003dc4ed6e46429a0c24f7e2bb52caa8b
https://github.com/radjkarl/fancyTools/blob/4c4d961003dc4ed6e46429a0c24f7e2bb52caa8b/fancytools/math/Point3D.py#L46-L51
243,366
lambdalisue/maidenhair
src/maidenhair/functions.py
set_default_parser
def set_default_parser(parser): """ Set defaulr parser instance Parameters ---------- parser : instance or string An instance or registered name of parser class. The specified parser instance will be used when user did not specified :attr:`parser` in :func:`maidenhair.functions.load` function. See also -------- :func:`maidenhair.utils.plugins.Registry.register` : Register new class """ if isinstance(parser, basestring): parser = registry.find(parser)() if not isinstance(parser, BaseParser): parser = parser() global _parser _parser = parser
python
def set_default_parser(parser): """ Set defaulr parser instance Parameters ---------- parser : instance or string An instance or registered name of parser class. The specified parser instance will be used when user did not specified :attr:`parser` in :func:`maidenhair.functions.load` function. See also -------- :func:`maidenhair.utils.plugins.Registry.register` : Register new class """ if isinstance(parser, basestring): parser = registry.find(parser)() if not isinstance(parser, BaseParser): parser = parser() global _parser _parser = parser
[ "def", "set_default_parser", "(", "parser", ")", ":", "if", "isinstance", "(", "parser", ",", "basestring", ")", ":", "parser", "=", "registry", ".", "find", "(", "parser", ")", "(", ")", "if", "not", "isinstance", "(", "parser", ",", "BaseParser", ")", ":", "parser", "=", "parser", "(", ")", "global", "_parser", "_parser", "=", "parser" ]
Set defaulr parser instance Parameters ---------- parser : instance or string An instance or registered name of parser class. The specified parser instance will be used when user did not specified :attr:`parser` in :func:`maidenhair.functions.load` function. See also -------- :func:`maidenhair.utils.plugins.Registry.register` : Register new class
[ "Set", "defaulr", "parser", "instance" ]
d5095c1087d1f4d71cc57410492151d2803a9f0d
https://github.com/lambdalisue/maidenhair/blob/d5095c1087d1f4d71cc57410492151d2803a9f0d/src/maidenhair/functions.py#L166-L187
243,367
lambdalisue/maidenhair
src/maidenhair/functions.py
set_default_loader
def set_default_loader(loader): """ Set defaulr loader instance Parameters ---------- loader : instance or string An instance or registered name of loader class. The specified loader instance will be used when user did not specified :attr:`loader` in :func:`maidenhair.functions.load` function. See also -------- :func:`maidenhair.utils.plugins.Registry.register` : Register new class """ if isinstance(loader, basestring): loader = registry.find(loader)() if not isinstance(loader, BaseLoader): loader = loader() global _loader _loader = loader
python
def set_default_loader(loader): """ Set defaulr loader instance Parameters ---------- loader : instance or string An instance or registered name of loader class. The specified loader instance will be used when user did not specified :attr:`loader` in :func:`maidenhair.functions.load` function. See also -------- :func:`maidenhair.utils.plugins.Registry.register` : Register new class """ if isinstance(loader, basestring): loader = registry.find(loader)() if not isinstance(loader, BaseLoader): loader = loader() global _loader _loader = loader
[ "def", "set_default_loader", "(", "loader", ")", ":", "if", "isinstance", "(", "loader", ",", "basestring", ")", ":", "loader", "=", "registry", ".", "find", "(", "loader", ")", "(", ")", "if", "not", "isinstance", "(", "loader", ",", "BaseLoader", ")", ":", "loader", "=", "loader", "(", ")", "global", "_loader", "_loader", "=", "loader" ]
Set defaulr loader instance Parameters ---------- loader : instance or string An instance or registered name of loader class. The specified loader instance will be used when user did not specified :attr:`loader` in :func:`maidenhair.functions.load` function. See also -------- :func:`maidenhair.utils.plugins.Registry.register` : Register new class
[ "Set", "defaulr", "loader", "instance" ]
d5095c1087d1f4d71cc57410492151d2803a9f0d
https://github.com/lambdalisue/maidenhair/blob/d5095c1087d1f4d71cc57410492151d2803a9f0d/src/maidenhair/functions.py#L208-L229
243,368
thingful/hypercat-py
hypercat/hypercat.py
loads
def loads(inputStr): """Takes a string and converts it into an internal hypercat object, with some checking""" inCat = json.loads(inputStr) assert CATALOGUE_TYPE in _values(inCat[CATALOGUE_METADATA], ISCONTENTTYPE_RELATION) # Manually copy mandatory fields, to check that they are they, and exclude other garbage desc = _values(inCat[CATALOGUE_METADATA], DESCRIPTION_RELATION)[0] # TODO: We are ASSUMING just one description, which may not be true outCat = Hypercat(desc) for i in inCat[ITEMS]: href = i[HREF] contentType = _values(i[ITEM_METADATA], ISCONTENTTYPE_RELATION) [0] desc = _values(i[ITEM_METADATA], DESCRIPTION_RELATION) [0] if contentType == CATALOGUE_TYPE: r = Hypercat(desc) else: r = Resource(desc, contentType) outCat.addItem(r, href) return outCat
python
def loads(inputStr): """Takes a string and converts it into an internal hypercat object, with some checking""" inCat = json.loads(inputStr) assert CATALOGUE_TYPE in _values(inCat[CATALOGUE_METADATA], ISCONTENTTYPE_RELATION) # Manually copy mandatory fields, to check that they are they, and exclude other garbage desc = _values(inCat[CATALOGUE_METADATA], DESCRIPTION_RELATION)[0] # TODO: We are ASSUMING just one description, which may not be true outCat = Hypercat(desc) for i in inCat[ITEMS]: href = i[HREF] contentType = _values(i[ITEM_METADATA], ISCONTENTTYPE_RELATION) [0] desc = _values(i[ITEM_METADATA], DESCRIPTION_RELATION) [0] if contentType == CATALOGUE_TYPE: r = Hypercat(desc) else: r = Resource(desc, contentType) outCat.addItem(r, href) return outCat
[ "def", "loads", "(", "inputStr", ")", ":", "inCat", "=", "json", ".", "loads", "(", "inputStr", ")", "assert", "CATALOGUE_TYPE", "in", "_values", "(", "inCat", "[", "CATALOGUE_METADATA", "]", ",", "ISCONTENTTYPE_RELATION", ")", "# Manually copy mandatory fields, to check that they are they, and exclude other garbage", "desc", "=", "_values", "(", "inCat", "[", "CATALOGUE_METADATA", "]", ",", "DESCRIPTION_RELATION", ")", "[", "0", "]", "# TODO: We are ASSUMING just one description, which may not be true", "outCat", "=", "Hypercat", "(", "desc", ")", "for", "i", "in", "inCat", "[", "ITEMS", "]", ":", "href", "=", "i", "[", "HREF", "]", "contentType", "=", "_values", "(", "i", "[", "ITEM_METADATA", "]", ",", "ISCONTENTTYPE_RELATION", ")", "[", "0", "]", "desc", "=", "_values", "(", "i", "[", "ITEM_METADATA", "]", ",", "DESCRIPTION_RELATION", ")", "[", "0", "]", "if", "contentType", "==", "CATALOGUE_TYPE", ":", "r", "=", "Hypercat", "(", "desc", ")", "else", ":", "r", "=", "Resource", "(", "desc", ",", "contentType", ")", "outCat", ".", "addItem", "(", "r", ",", "href", ")", "return", "outCat" ]
Takes a string and converts it into an internal hypercat object, with some checking
[ "Takes", "a", "string", "and", "converts", "it", "into", "an", "internal", "hypercat", "object", "with", "some", "checking" ]
db24ef66ec92d74fbea90afbeadc3a268f18f6e3
https://github.com/thingful/hypercat-py/blob/db24ef66ec92d74fbea90afbeadc3a268f18f6e3/hypercat/hypercat.py#L195-L212
243,369
thingful/hypercat-py
hypercat/hypercat.py
Base.rels
def rels(self): """Returns a LIST of all the metadata relations""" r = [] for i in self.metadata: r = r + i[REL] return []
python
def rels(self): """Returns a LIST of all the metadata relations""" r = [] for i in self.metadata: r = r + i[REL] return []
[ "def", "rels", "(", "self", ")", ":", "r", "=", "[", "]", "for", "i", "in", "self", ".", "metadata", ":", "r", "=", "r", "+", "i", "[", "REL", "]", "return", "[", "]" ]
Returns a LIST of all the metadata relations
[ "Returns", "a", "LIST", "of", "all", "the", "metadata", "relations" ]
db24ef66ec92d74fbea90afbeadc3a268f18f6e3
https://github.com/thingful/hypercat-py/blob/db24ef66ec92d74fbea90afbeadc3a268f18f6e3/hypercat/hypercat.py#L79-L84
243,370
thingful/hypercat-py
hypercat/hypercat.py
Base.prettyprint
def prettyprint(self): """Return hypercat formatted prettily""" return json.dumps(self.asJSON(), sort_keys=True, indent=4, separators=(',', ': '))
python
def prettyprint(self): """Return hypercat formatted prettily""" return json.dumps(self.asJSON(), sort_keys=True, indent=4, separators=(',', ': '))
[ "def", "prettyprint", "(", "self", ")", ":", "return", "json", ".", "dumps", "(", "self", ".", "asJSON", "(", ")", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ",", "separators", "=", "(", "','", ",", "': '", ")", ")" ]
Return hypercat formatted prettily
[ "Return", "hypercat", "formatted", "prettily" ]
db24ef66ec92d74fbea90afbeadc3a268f18f6e3
https://github.com/thingful/hypercat-py/blob/db24ef66ec92d74fbea90afbeadc3a268f18f6e3/hypercat/hypercat.py#L90-L92
243,371
SkyLothar/shcmd
shcmd/tailf.py
tailf
def tailf( filepath, lastn=0, timeout=60, stopon=None, encoding="utf8", delay=0.1 ): """provide a `tail -f` like function :param filepath: file to tail -f, absolute path or relative path :param lastn: lastn line will also be yield :param timeout: (optional) stop tail -f when time's up [timeout <= 10min, default = 1min] :param stopon: (optional) stops when the stopon(output) returns True :param encoding: (optional) default encoding utf8 :param delay: (optional) sleep if no data is available, default is 0.1s Usage:: >>> for line in tailf('/tmp/foo'): ... print(line) ... "bar" "barz" """ if not os.path.isfile(filepath): raise ShCmdError("[{0}] not exists".format(filepath)) if consts.TIMEOUT_MAX > timeout: timeout = consts.TIMEOUT_DEFAULT delay = delay if consts.DELAY_MAX > delay > 0 else consts.DELAY_DEFAULT if isinstance(stopon, types.FunctionType) is False: stopon = always_false logger.info("tail -f {0} begin".format(filepath)) with open(filepath, "rt", encoding=encoding) as file_obj: lastn_filter = deque(maxlen=lastn) logger.debug("tail last {0} lines".format(lastn)) for line in file_obj: lastn_filter.append(line.rstrip()) for line in lastn_filter: yield line start = time.time() while timeout < 0 or (time.time() - start) < timeout: line = file_obj.readline() where = file_obj.tell() if line: logger.debug("found line: [{0}]".format(line)) yield line if stopon(line): break else: file_obj.seek(0, os.SEEK_END) if file_obj.tell() < where: logger.info("file [{0}] rewinded!".format(filepath)) file_obj.seek(0) else: logger.debug("no data, waiting for [{0}]s".format(delay)) time.sleep(delay) logger.info("tail -f {0} end".format(filepath))
python
def tailf( filepath, lastn=0, timeout=60, stopon=None, encoding="utf8", delay=0.1 ): """provide a `tail -f` like function :param filepath: file to tail -f, absolute path or relative path :param lastn: lastn line will also be yield :param timeout: (optional) stop tail -f when time's up [timeout <= 10min, default = 1min] :param stopon: (optional) stops when the stopon(output) returns True :param encoding: (optional) default encoding utf8 :param delay: (optional) sleep if no data is available, default is 0.1s Usage:: >>> for line in tailf('/tmp/foo'): ... print(line) ... "bar" "barz" """ if not os.path.isfile(filepath): raise ShCmdError("[{0}] not exists".format(filepath)) if consts.TIMEOUT_MAX > timeout: timeout = consts.TIMEOUT_DEFAULT delay = delay if consts.DELAY_MAX > delay > 0 else consts.DELAY_DEFAULT if isinstance(stopon, types.FunctionType) is False: stopon = always_false logger.info("tail -f {0} begin".format(filepath)) with open(filepath, "rt", encoding=encoding) as file_obj: lastn_filter = deque(maxlen=lastn) logger.debug("tail last {0} lines".format(lastn)) for line in file_obj: lastn_filter.append(line.rstrip()) for line in lastn_filter: yield line start = time.time() while timeout < 0 or (time.time() - start) < timeout: line = file_obj.readline() where = file_obj.tell() if line: logger.debug("found line: [{0}]".format(line)) yield line if stopon(line): break else: file_obj.seek(0, os.SEEK_END) if file_obj.tell() < where: logger.info("file [{0}] rewinded!".format(filepath)) file_obj.seek(0) else: logger.debug("no data, waiting for [{0}]s".format(delay)) time.sleep(delay) logger.info("tail -f {0} end".format(filepath))
[ "def", "tailf", "(", "filepath", ",", "lastn", "=", "0", ",", "timeout", "=", "60", ",", "stopon", "=", "None", ",", "encoding", "=", "\"utf8\"", ",", "delay", "=", "0.1", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "filepath", ")", ":", "raise", "ShCmdError", "(", "\"[{0}] not exists\"", ".", "format", "(", "filepath", ")", ")", "if", "consts", ".", "TIMEOUT_MAX", ">", "timeout", ":", "timeout", "=", "consts", ".", "TIMEOUT_DEFAULT", "delay", "=", "delay", "if", "consts", ".", "DELAY_MAX", ">", "delay", ">", "0", "else", "consts", ".", "DELAY_DEFAULT", "if", "isinstance", "(", "stopon", ",", "types", ".", "FunctionType", ")", "is", "False", ":", "stopon", "=", "always_false", "logger", ".", "info", "(", "\"tail -f {0} begin\"", ".", "format", "(", "filepath", ")", ")", "with", "open", "(", "filepath", ",", "\"rt\"", ",", "encoding", "=", "encoding", ")", "as", "file_obj", ":", "lastn_filter", "=", "deque", "(", "maxlen", "=", "lastn", ")", "logger", ".", "debug", "(", "\"tail last {0} lines\"", ".", "format", "(", "lastn", ")", ")", "for", "line", "in", "file_obj", ":", "lastn_filter", ".", "append", "(", "line", ".", "rstrip", "(", ")", ")", "for", "line", "in", "lastn_filter", ":", "yield", "line", "start", "=", "time", ".", "time", "(", ")", "while", "timeout", "<", "0", "or", "(", "time", ".", "time", "(", ")", "-", "start", ")", "<", "timeout", ":", "line", "=", "file_obj", ".", "readline", "(", ")", "where", "=", "file_obj", ".", "tell", "(", ")", "if", "line", ":", "logger", ".", "debug", "(", "\"found line: [{0}]\"", ".", "format", "(", "line", ")", ")", "yield", "line", "if", "stopon", "(", "line", ")", ":", "break", "else", ":", "file_obj", ".", "seek", "(", "0", ",", "os", ".", "SEEK_END", ")", "if", "file_obj", ".", "tell", "(", ")", "<", "where", ":", "logger", ".", "info", "(", "\"file [{0}] rewinded!\"", ".", "format", "(", "filepath", ")", ")", "file_obj", ".", "seek", "(", "0", ")", "else", ":", "logger", ".", "debug", "(", "\"no data, waiting for [{0}]s\"", ".", "format", "(", "delay", ")", ")", "time", ".", "sleep", "(", "delay", ")", "logger", ".", "info", "(", "\"tail -f {0} end\"", ".", "format", "(", "filepath", ")", ")" ]
provide a `tail -f` like function :param filepath: file to tail -f, absolute path or relative path :param lastn: lastn line will also be yield :param timeout: (optional) stop tail -f when time's up [timeout <= 10min, default = 1min] :param stopon: (optional) stops when the stopon(output) returns True :param encoding: (optional) default encoding utf8 :param delay: (optional) sleep if no data is available, default is 0.1s Usage:: >>> for line in tailf('/tmp/foo'): ... print(line) ... "bar" "barz"
[ "provide", "a", "tail", "-", "f", "like", "function" ]
d8cad6311a4da7ef09f3419c86b58e30388b7ee3
https://github.com/SkyLothar/shcmd/blob/d8cad6311a4da7ef09f3419c86b58e30388b7ee3/shcmd/tailf.py#L20-L84
243,372
padfoot27/merlin
venv/lib/python2.7/site-packages/setuptools/sandbox.py
_execfile
def _execfile(filename, globals, locals=None): """ Python 3 implementation of execfile. """ mode = 'rb' # Python 2.6 compile requires LF for newlines, so use deprecated # Universal newlines support. if sys.version_info < (2, 7): mode += 'U' with open(filename, mode) as stream: script = stream.read() if locals is None: locals = globals code = compile(script, filename, 'exec') exec(code, globals, locals)
python
def _execfile(filename, globals, locals=None): """ Python 3 implementation of execfile. """ mode = 'rb' # Python 2.6 compile requires LF for newlines, so use deprecated # Universal newlines support. if sys.version_info < (2, 7): mode += 'U' with open(filename, mode) as stream: script = stream.read() if locals is None: locals = globals code = compile(script, filename, 'exec') exec(code, globals, locals)
[ "def", "_execfile", "(", "filename", ",", "globals", ",", "locals", "=", "None", ")", ":", "mode", "=", "'rb'", "# Python 2.6 compile requires LF for newlines, so use deprecated", "# Universal newlines support.", "if", "sys", ".", "version_info", "<", "(", "2", ",", "7", ")", ":", "mode", "+=", "'U'", "with", "open", "(", "filename", ",", "mode", ")", "as", "stream", ":", "script", "=", "stream", ".", "read", "(", ")", "if", "locals", "is", "None", ":", "locals", "=", "globals", "code", "=", "compile", "(", "script", ",", "filename", ",", "'exec'", ")", "exec", "(", "code", ",", "globals", ",", "locals", ")" ]
Python 3 implementation of execfile.
[ "Python", "3", "implementation", "of", "execfile", "." ]
c317505c5eca0e774fcf8b8c7f08801479a5099a
https://github.com/padfoot27/merlin/blob/c317505c5eca0e774fcf8b8c7f08801479a5099a/venv/lib/python2.7/site-packages/setuptools/sandbox.py#L32-L46
243,373
maxfischer2781/chainlet
chainlet/primitives/link.py
ChainLink.send
def send(self, value=None): """Send a single value to this element for processing""" if self.chain_fork: return self._send_fork(value) return self._send_flat(value)
python
def send(self, value=None): """Send a single value to this element for processing""" if self.chain_fork: return self._send_fork(value) return self._send_flat(value)
[ "def", "send", "(", "self", ",", "value", "=", "None", ")", ":", "if", "self", ".", "chain_fork", ":", "return", "self", ".", "_send_fork", "(", "value", ")", "return", "self", ".", "_send_flat", "(", "value", ")" ]
Send a single value to this element for processing
[ "Send", "a", "single", "value", "to", "this", "element", "for", "processing" ]
4e17f9992b4780bd0d9309202e2847df640bffe8
https://github.com/maxfischer2781/chainlet/blob/4e17f9992b4780bd0d9309202e2847df640bffe8/chainlet/primitives/link.py#L184-L188
243,374
kankiri/pabiana
pabiana/utils.py
read_interfaces
def read_interfaces(path: str) -> Interfaces: """Reads an Interfaces JSON file at the given path and returns it as a dictionary.""" with open(path, encoding='utf-8') as f: return json.load(f)
python
def read_interfaces(path: str) -> Interfaces: """Reads an Interfaces JSON file at the given path and returns it as a dictionary.""" with open(path, encoding='utf-8') as f: return json.load(f)
[ "def", "read_interfaces", "(", "path", ":", "str", ")", "->", "Interfaces", ":", "with", "open", "(", "path", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":", "return", "json", ".", "load", "(", "f", ")" ]
Reads an Interfaces JSON file at the given path and returns it as a dictionary.
[ "Reads", "an", "Interfaces", "JSON", "file", "at", "the", "given", "path", "and", "returns", "it", "as", "a", "dictionary", "." ]
74acfdd81e2a1cc411c37b9ee3d6905ce4b1a39b
https://github.com/kankiri/pabiana/blob/74acfdd81e2a1cc411c37b9ee3d6905ce4b1a39b/pabiana/utils.py#L7-L10
243,375
kankiri/pabiana
pabiana/utils.py
multiple
def multiple(layer: int, limit: int) -> Set[str]: """Returns a set of strings to be used as Slots with Pabianas default Clock. Args: layer: The layer in the hierarchy this Area is placed in. Technically, the number specifies how many of the Clocks signals are relevant to the Area. Between 1 and limit. limit: The number of layers of the hierarchy. """ return {str(x).zfill(2) for x in [2**x for x in range(limit)] if x % 2**(layer - 1) == 0}
python
def multiple(layer: int, limit: int) -> Set[str]: """Returns a set of strings to be used as Slots with Pabianas default Clock. Args: layer: The layer in the hierarchy this Area is placed in. Technically, the number specifies how many of the Clocks signals are relevant to the Area. Between 1 and limit. limit: The number of layers of the hierarchy. """ return {str(x).zfill(2) for x in [2**x for x in range(limit)] if x % 2**(layer - 1) == 0}
[ "def", "multiple", "(", "layer", ":", "int", ",", "limit", ":", "int", ")", "->", "Set", "[", "str", "]", ":", "return", "{", "str", "(", "x", ")", ".", "zfill", "(", "2", ")", "for", "x", "in", "[", "2", "**", "x", "for", "x", "in", "range", "(", "limit", ")", "]", "if", "x", "%", "2", "**", "(", "layer", "-", "1", ")", "==", "0", "}" ]
Returns a set of strings to be used as Slots with Pabianas default Clock. Args: layer: The layer in the hierarchy this Area is placed in. Technically, the number specifies how many of the Clocks signals are relevant to the Area. Between 1 and limit. limit: The number of layers of the hierarchy.
[ "Returns", "a", "set", "of", "strings", "to", "be", "used", "as", "Slots", "with", "Pabianas", "default", "Clock", "." ]
74acfdd81e2a1cc411c37b9ee3d6905ce4b1a39b
https://github.com/kankiri/pabiana/blob/74acfdd81e2a1cc411c37b9ee3d6905ce4b1a39b/pabiana/utils.py#L17-L26
243,376
delfick/aws_syncr
aws_syncr/amazon/common.py
AmazonMixin.print_change
def print_change(self, symbol, typ, changes=None, document=None, **kwargs): """Print out a change""" values = ", ".join("{0}={1}".format(key, val) for key, val in sorted(kwargs.items())) print("{0} {1}({2})".format(symbol, typ, values)) if changes: for change in changes: print("\n".join("\t{0}".format(line) for line in change.split('\n'))) elif document: print("\n".join("\t{0}".format(line) for line in document.split('\n')))
python
def print_change(self, symbol, typ, changes=None, document=None, **kwargs): """Print out a change""" values = ", ".join("{0}={1}".format(key, val) for key, val in sorted(kwargs.items())) print("{0} {1}({2})".format(symbol, typ, values)) if changes: for change in changes: print("\n".join("\t{0}".format(line) for line in change.split('\n'))) elif document: print("\n".join("\t{0}".format(line) for line in document.split('\n')))
[ "def", "print_change", "(", "self", ",", "symbol", ",", "typ", ",", "changes", "=", "None", ",", "document", "=", "None", ",", "*", "*", "kwargs", ")", ":", "values", "=", "\", \"", ".", "join", "(", "\"{0}={1}\"", ".", "format", "(", "key", ",", "val", ")", "for", "key", ",", "val", "in", "sorted", "(", "kwargs", ".", "items", "(", ")", ")", ")", "print", "(", "\"{0} {1}({2})\"", ".", "format", "(", "symbol", ",", "typ", ",", "values", ")", ")", "if", "changes", ":", "for", "change", "in", "changes", ":", "print", "(", "\"\\n\"", ".", "join", "(", "\"\\t{0}\"", ".", "format", "(", "line", ")", "for", "line", "in", "change", ".", "split", "(", "'\\n'", ")", ")", ")", "elif", "document", ":", "print", "(", "\"\\n\"", ".", "join", "(", "\"\\t{0}\"", ".", "format", "(", "line", ")", "for", "line", "in", "document", ".", "split", "(", "'\\n'", ")", ")", ")" ]
Print out a change
[ "Print", "out", "a", "change" ]
8cd214b27c1eee98dfba4632cbb8bc0ae36356bd
https://github.com/delfick/aws_syncr/blob/8cd214b27c1eee98dfba4632cbb8bc0ae36356bd/aws_syncr/amazon/common.py#L48-L56
243,377
delfick/aws_syncr
aws_syncr/amazon/common.py
AmazonMixin.change
def change(self, symbol, typ, **kwargs): """Print out a change and then do the change if not doing a dry run""" self.print_change(symbol, typ, **kwargs) if not self.dry_run: try: yield except: raise else: self.amazon.changes = True
python
def change(self, symbol, typ, **kwargs): """Print out a change and then do the change if not doing a dry run""" self.print_change(symbol, typ, **kwargs) if not self.dry_run: try: yield except: raise else: self.amazon.changes = True
[ "def", "change", "(", "self", ",", "symbol", ",", "typ", ",", "*", "*", "kwargs", ")", ":", "self", ".", "print_change", "(", "symbol", ",", "typ", ",", "*", "*", "kwargs", ")", "if", "not", "self", ".", "dry_run", ":", "try", ":", "yield", "except", ":", "raise", "else", ":", "self", ".", "amazon", ".", "changes", "=", "True" ]
Print out a change and then do the change if not doing a dry run
[ "Print", "out", "a", "change", "and", "then", "do", "the", "change", "if", "not", "doing", "a", "dry", "run" ]
8cd214b27c1eee98dfba4632cbb8bc0ae36356bd
https://github.com/delfick/aws_syncr/blob/8cd214b27c1eee98dfba4632cbb8bc0ae36356bd/aws_syncr/amazon/common.py#L58-L67
243,378
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
urlopen
def urlopen(url, timeout=20, redirects=None): """A minimal urlopen replacement hack that supports timeouts for http. Note that this supports GET only.""" scheme, host, path, params, query, frag = urlparse(url) if not scheme in ('http', 'https'): return urllib.urlopen(url) if params: path = '%s;%s' % (path, params) if query: path = '%s?%s' % (path, query) if frag: path = '%s#%s' % (path, frag) if scheme == 'https': # If ssl is not compiled into Python, you will not get an exception # until a conn.endheaders() call. We need to know sooner, so use # getattr. try: import M2Crypto except ImportError: if not hasattr(socket, 'ssl'): raise RuntimeError, 'no built-in SSL Support' conn = TimeoutHTTPS(host, None, timeout) else: ctx = M2Crypto.SSL.Context() ctx.set_session_timeout(timeout) conn = M2Crypto.httpslib.HTTPSConnection(host, ssl_context=ctx) conn.set_debuglevel(1) else: conn = TimeoutHTTP(host, None, timeout) conn.putrequest('GET', path) conn.putheader('Connection', 'close') conn.endheaders() response = None while 1: response = conn.getresponse() if response.status != 100: break conn._HTTPConnection__state = httplib._CS_REQ_SENT conn._HTTPConnection__response = None status = response.status # If we get an HTTP redirect, we will follow it automatically. if status >= 300 and status < 400: location = response.msg.getheader('location') if location is not None: response.close() if redirects is not None and redirects.has_key(location): raise RecursionError( 'Circular HTTP redirection detected.' ) if redirects is None: redirects = {} redirects[location] = 1 return urlopen(location, timeout, redirects) raise HTTPResponse(response) if not (status >= 200 and status < 300): raise HTTPResponse(response) body = StringIO(response.read()) response.close() return body
python
def urlopen(url, timeout=20, redirects=None): """A minimal urlopen replacement hack that supports timeouts for http. Note that this supports GET only.""" scheme, host, path, params, query, frag = urlparse(url) if not scheme in ('http', 'https'): return urllib.urlopen(url) if params: path = '%s;%s' % (path, params) if query: path = '%s?%s' % (path, query) if frag: path = '%s#%s' % (path, frag) if scheme == 'https': # If ssl is not compiled into Python, you will not get an exception # until a conn.endheaders() call. We need to know sooner, so use # getattr. try: import M2Crypto except ImportError: if not hasattr(socket, 'ssl'): raise RuntimeError, 'no built-in SSL Support' conn = TimeoutHTTPS(host, None, timeout) else: ctx = M2Crypto.SSL.Context() ctx.set_session_timeout(timeout) conn = M2Crypto.httpslib.HTTPSConnection(host, ssl_context=ctx) conn.set_debuglevel(1) else: conn = TimeoutHTTP(host, None, timeout) conn.putrequest('GET', path) conn.putheader('Connection', 'close') conn.endheaders() response = None while 1: response = conn.getresponse() if response.status != 100: break conn._HTTPConnection__state = httplib._CS_REQ_SENT conn._HTTPConnection__response = None status = response.status # If we get an HTTP redirect, we will follow it automatically. if status >= 300 and status < 400: location = response.msg.getheader('location') if location is not None: response.close() if redirects is not None and redirects.has_key(location): raise RecursionError( 'Circular HTTP redirection detected.' ) if redirects is None: redirects = {} redirects[location] = 1 return urlopen(location, timeout, redirects) raise HTTPResponse(response) if not (status >= 200 and status < 300): raise HTTPResponse(response) body = StringIO(response.read()) response.close() return body
[ "def", "urlopen", "(", "url", ",", "timeout", "=", "20", ",", "redirects", "=", "None", ")", ":", "scheme", ",", "host", ",", "path", ",", "params", ",", "query", ",", "frag", "=", "urlparse", "(", "url", ")", "if", "not", "scheme", "in", "(", "'http'", ",", "'https'", ")", ":", "return", "urllib", ".", "urlopen", "(", "url", ")", "if", "params", ":", "path", "=", "'%s;%s'", "%", "(", "path", ",", "params", ")", "if", "query", ":", "path", "=", "'%s?%s'", "%", "(", "path", ",", "query", ")", "if", "frag", ":", "path", "=", "'%s#%s'", "%", "(", "path", ",", "frag", ")", "if", "scheme", "==", "'https'", ":", "# If ssl is not compiled into Python, you will not get an exception", "# until a conn.endheaders() call. We need to know sooner, so use", "# getattr.", "try", ":", "import", "M2Crypto", "except", "ImportError", ":", "if", "not", "hasattr", "(", "socket", ",", "'ssl'", ")", ":", "raise", "RuntimeError", ",", "'no built-in SSL Support'", "conn", "=", "TimeoutHTTPS", "(", "host", ",", "None", ",", "timeout", ")", "else", ":", "ctx", "=", "M2Crypto", ".", "SSL", ".", "Context", "(", ")", "ctx", ".", "set_session_timeout", "(", "timeout", ")", "conn", "=", "M2Crypto", ".", "httpslib", ".", "HTTPSConnection", "(", "host", ",", "ssl_context", "=", "ctx", ")", "conn", ".", "set_debuglevel", "(", "1", ")", "else", ":", "conn", "=", "TimeoutHTTP", "(", "host", ",", "None", ",", "timeout", ")", "conn", ".", "putrequest", "(", "'GET'", ",", "path", ")", "conn", ".", "putheader", "(", "'Connection'", ",", "'close'", ")", "conn", ".", "endheaders", "(", ")", "response", "=", "None", "while", "1", ":", "response", "=", "conn", ".", "getresponse", "(", ")", "if", "response", ".", "status", "!=", "100", ":", "break", "conn", ".", "_HTTPConnection__state", "=", "httplib", ".", "_CS_REQ_SENT", "conn", ".", "_HTTPConnection__response", "=", "None", "status", "=", "response", ".", "status", "# If we get an HTTP redirect, we will follow it automatically.", "if", "status", ">=", "300", "and", "status", "<", "400", ":", "location", "=", "response", ".", "msg", ".", "getheader", "(", "'location'", ")", "if", "location", "is", "not", "None", ":", "response", ".", "close", "(", ")", "if", "redirects", "is", "not", "None", "and", "redirects", ".", "has_key", "(", "location", ")", ":", "raise", "RecursionError", "(", "'Circular HTTP redirection detected.'", ")", "if", "redirects", "is", "None", ":", "redirects", "=", "{", "}", "redirects", "[", "location", "]", "=", "1", "return", "urlopen", "(", "location", ",", "timeout", ",", "redirects", ")", "raise", "HTTPResponse", "(", "response", ")", "if", "not", "(", "status", ">=", "200", "and", "status", "<", "300", ")", ":", "raise", "HTTPResponse", "(", "response", ")", "body", "=", "StringIO", "(", "response", ".", "read", "(", ")", ")", "response", ".", "close", "(", ")", "return", "body" ]
A minimal urlopen replacement hack that supports timeouts for http. Note that this supports GET only.
[ "A", "minimal", "urlopen", "replacement", "hack", "that", "supports", "timeouts", "for", "http", ".", "Note", "that", "this", "supports", "GET", "only", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L147-L211
243,379
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.SOAPUriToVersion
def SOAPUriToVersion(self, uri): """Return the SOAP version related to an envelope uri.""" value = self._soap_uri_mapping.get(uri) if value is not None: return value raise ValueError( 'Unsupported SOAP envelope uri: %s' % uri )
python
def SOAPUriToVersion(self, uri): """Return the SOAP version related to an envelope uri.""" value = self._soap_uri_mapping.get(uri) if value is not None: return value raise ValueError( 'Unsupported SOAP envelope uri: %s' % uri )
[ "def", "SOAPUriToVersion", "(", "self", ",", "uri", ")", ":", "value", "=", "self", ".", "_soap_uri_mapping", ".", "get", "(", "uri", ")", "if", "value", "is", "not", "None", ":", "return", "value", "raise", "ValueError", "(", "'Unsupported SOAP envelope uri: %s'", "%", "uri", ")" ]
Return the SOAP version related to an envelope uri.
[ "Return", "the", "SOAP", "version", "related", "to", "an", "envelope", "uri", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L242-L249
243,380
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.WSDLUriToVersion
def WSDLUriToVersion(self, uri): """Return the WSDL version related to a WSDL namespace uri.""" value = self._wsdl_uri_mapping.get(uri) if value is not None: return value raise ValueError( 'Unsupported SOAP envelope uri: %s' % uri )
python
def WSDLUriToVersion(self, uri): """Return the WSDL version related to a WSDL namespace uri.""" value = self._wsdl_uri_mapping.get(uri) if value is not None: return value raise ValueError( 'Unsupported SOAP envelope uri: %s' % uri )
[ "def", "WSDLUriToVersion", "(", "self", ",", "uri", ")", ":", "value", "=", "self", ".", "_wsdl_uri_mapping", ".", "get", "(", "uri", ")", "if", "value", "is", "not", "None", ":", "return", "value", "raise", "ValueError", "(", "'Unsupported SOAP envelope uri: %s'", "%", "uri", ")" ]
Return the WSDL version related to a WSDL namespace uri.
[ "Return", "the", "WSDL", "version", "related", "to", "a", "WSDL", "namespace", "uri", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L350-L357
243,381
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.isElement
def isElement(self, node, name, nsuri=None): """Return true if the given node is an element with the given name and optional namespace uri.""" if node.nodeType != node.ELEMENT_NODE: return 0 return node.localName == name and \ (nsuri is None or self.nsUriMatch(node.namespaceURI, nsuri))
python
def isElement(self, node, name, nsuri=None): """Return true if the given node is an element with the given name and optional namespace uri.""" if node.nodeType != node.ELEMENT_NODE: return 0 return node.localName == name and \ (nsuri is None or self.nsUriMatch(node.namespaceURI, nsuri))
[ "def", "isElement", "(", "self", ",", "node", ",", "name", ",", "nsuri", "=", "None", ")", ":", "if", "node", ".", "nodeType", "!=", "node", ".", "ELEMENT_NODE", ":", "return", "0", "return", "node", ".", "localName", "==", "name", "and", "(", "nsuri", "is", "None", "or", "self", ".", "nsUriMatch", "(", "node", ".", "namespaceURI", ",", "nsuri", ")", ")" ]
Return true if the given node is an element with the given name and optional namespace uri.
[ "Return", "true", "if", "the", "given", "node", "is", "an", "element", "with", "the", "given", "name", "and", "optional", "namespace", "uri", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L410-L416
243,382
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.getElement
def getElement(self, node, name, nsuri=None, default=join): """Return the first child of node with a matching name and namespace uri, or the default if one is provided.""" nsmatch = self.nsUriMatch ELEMENT_NODE = node.ELEMENT_NODE for child in node.childNodes: if child.nodeType == ELEMENT_NODE: if ((child.localName == name or name is None) and (nsuri is None or nsmatch(child.namespaceURI, nsuri)) ): return child if default is not join: return default raise KeyError, name
python
def getElement(self, node, name, nsuri=None, default=join): """Return the first child of node with a matching name and namespace uri, or the default if one is provided.""" nsmatch = self.nsUriMatch ELEMENT_NODE = node.ELEMENT_NODE for child in node.childNodes: if child.nodeType == ELEMENT_NODE: if ((child.localName == name or name is None) and (nsuri is None or nsmatch(child.namespaceURI, nsuri)) ): return child if default is not join: return default raise KeyError, name
[ "def", "getElement", "(", "self", ",", "node", ",", "name", ",", "nsuri", "=", "None", ",", "default", "=", "join", ")", ":", "nsmatch", "=", "self", ".", "nsUriMatch", "ELEMENT_NODE", "=", "node", ".", "ELEMENT_NODE", "for", "child", "in", "node", ".", "childNodes", ":", "if", "child", ".", "nodeType", "==", "ELEMENT_NODE", ":", "if", "(", "(", "child", ".", "localName", "==", "name", "or", "name", "is", "None", ")", "and", "(", "nsuri", "is", "None", "or", "nsmatch", "(", "child", ".", "namespaceURI", ",", "nsuri", ")", ")", ")", ":", "return", "child", "if", "default", "is", "not", "join", ":", "return", "default", "raise", "KeyError", ",", "name" ]
Return the first child of node with a matching name and namespace uri, or the default if one is provided.
[ "Return", "the", "first", "child", "of", "node", "with", "a", "matching", "name", "and", "namespace", "uri", "or", "the", "default", "if", "one", "is", "provided", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L418-L431
243,383
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.getElementById
def getElementById(self, node, id, default=join): """Return the first child of node matching an id reference.""" attrget = self.getAttr ELEMENT_NODE = node.ELEMENT_NODE for child in node.childNodes: if child.nodeType == ELEMENT_NODE: if attrget(child, 'id') == id: return child if default is not join: return default raise KeyError, name
python
def getElementById(self, node, id, default=join): """Return the first child of node matching an id reference.""" attrget = self.getAttr ELEMENT_NODE = node.ELEMENT_NODE for child in node.childNodes: if child.nodeType == ELEMENT_NODE: if attrget(child, 'id') == id: return child if default is not join: return default raise KeyError, name
[ "def", "getElementById", "(", "self", ",", "node", ",", "id", ",", "default", "=", "join", ")", ":", "attrget", "=", "self", ".", "getAttr", "ELEMENT_NODE", "=", "node", ".", "ELEMENT_NODE", "for", "child", "in", "node", ".", "childNodes", ":", "if", "child", ".", "nodeType", "==", "ELEMENT_NODE", ":", "if", "attrget", "(", "child", ",", "'id'", ")", "==", "id", ":", "return", "child", "if", "default", "is", "not", "join", ":", "return", "default", "raise", "KeyError", ",", "name" ]
Return the first child of node matching an id reference.
[ "Return", "the", "first", "child", "of", "node", "matching", "an", "id", "reference", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L433-L443
243,384
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.getElements
def getElements(self, node, name, nsuri=None): """Return a sequence of the child elements of the given node that match the given name and optional namespace uri.""" nsmatch = self.nsUriMatch result = [] ELEMENT_NODE = node.ELEMENT_NODE for child in node.childNodes: if child.nodeType == ELEMENT_NODE: if ((child.localName == name or name is None) and ( (nsuri is None) or nsmatch(child.namespaceURI, nsuri))): result.append(child) return result
python
def getElements(self, node, name, nsuri=None): """Return a sequence of the child elements of the given node that match the given name and optional namespace uri.""" nsmatch = self.nsUriMatch result = [] ELEMENT_NODE = node.ELEMENT_NODE for child in node.childNodes: if child.nodeType == ELEMENT_NODE: if ((child.localName == name or name is None) and ( (nsuri is None) or nsmatch(child.namespaceURI, nsuri))): result.append(child) return result
[ "def", "getElements", "(", "self", ",", "node", ",", "name", ",", "nsuri", "=", "None", ")", ":", "nsmatch", "=", "self", ".", "nsUriMatch", "result", "=", "[", "]", "ELEMENT_NODE", "=", "node", ".", "ELEMENT_NODE", "for", "child", "in", "node", ".", "childNodes", ":", "if", "child", ".", "nodeType", "==", "ELEMENT_NODE", ":", "if", "(", "(", "child", ".", "localName", "==", "name", "or", "name", "is", "None", ")", "and", "(", "(", "nsuri", "is", "None", ")", "or", "nsmatch", "(", "child", ".", "namespaceURI", ",", "nsuri", ")", ")", ")", ":", "result", ".", "append", "(", "child", ")", "return", "result" ]
Return a sequence of the child elements of the given node that match the given name and optional namespace uri.
[ "Return", "a", "sequence", "of", "the", "child", "elements", "of", "the", "given", "node", "that", "match", "the", "given", "name", "and", "optional", "namespace", "uri", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L464-L475
243,385
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.hasAttr
def hasAttr(self, node, name, nsuri=None): """Return true if element has attribute with the given name and optional nsuri. If nsuri is not specified, returns true if an attribute exists with the given name with any namespace.""" if nsuri is None: if node.hasAttribute(name): return True return False return node.hasAttributeNS(nsuri, name)
python
def hasAttr(self, node, name, nsuri=None): """Return true if element has attribute with the given name and optional nsuri. If nsuri is not specified, returns true if an attribute exists with the given name with any namespace.""" if nsuri is None: if node.hasAttribute(name): return True return False return node.hasAttributeNS(nsuri, name)
[ "def", "hasAttr", "(", "self", ",", "node", ",", "name", ",", "nsuri", "=", "None", ")", ":", "if", "nsuri", "is", "None", ":", "if", "node", ".", "hasAttribute", "(", "name", ")", ":", "return", "True", "return", "False", "return", "node", ".", "hasAttributeNS", "(", "nsuri", ",", "name", ")" ]
Return true if element has attribute with the given name and optional nsuri. If nsuri is not specified, returns true if an attribute exists with the given name with any namespace.
[ "Return", "true", "if", "element", "has", "attribute", "with", "the", "given", "name", "and", "optional", "nsuri", ".", "If", "nsuri", "is", "not", "specified", "returns", "true", "if", "an", "attribute", "exists", "with", "the", "given", "name", "with", "any", "namespace", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L477-L485
243,386
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.getAttr
def getAttr(self, node, name, nsuri=None, default=join): """Return the value of the attribute named 'name' with the optional nsuri, or the default if one is specified. If nsuri is not specified, an attribute that matches the given name will be returned regardless of namespace.""" if nsuri is None: result = node._attrs.get(name, None) if result is None: for item in node._attrsNS.keys(): if item[1] == name: result = node._attrsNS[item] break else: result = node._attrsNS.get((nsuri, name), None) if result is not None: return result.value if default is not join: return default return ''
python
def getAttr(self, node, name, nsuri=None, default=join): """Return the value of the attribute named 'name' with the optional nsuri, or the default if one is specified. If nsuri is not specified, an attribute that matches the given name will be returned regardless of namespace.""" if nsuri is None: result = node._attrs.get(name, None) if result is None: for item in node._attrsNS.keys(): if item[1] == name: result = node._attrsNS[item] break else: result = node._attrsNS.get((nsuri, name), None) if result is not None: return result.value if default is not join: return default return ''
[ "def", "getAttr", "(", "self", ",", "node", ",", "name", ",", "nsuri", "=", "None", ",", "default", "=", "join", ")", ":", "if", "nsuri", "is", "None", ":", "result", "=", "node", ".", "_attrs", ".", "get", "(", "name", ",", "None", ")", "if", "result", "is", "None", ":", "for", "item", "in", "node", ".", "_attrsNS", ".", "keys", "(", ")", ":", "if", "item", "[", "1", "]", "==", "name", ":", "result", "=", "node", ".", "_attrsNS", "[", "item", "]", "break", "else", ":", "result", "=", "node", ".", "_attrsNS", ".", "get", "(", "(", "nsuri", ",", "name", ")", ",", "None", ")", "if", "result", "is", "not", "None", ":", "return", "result", ".", "value", "if", "default", "is", "not", "join", ":", "return", "default", "return", "''" ]
Return the value of the attribute named 'name' with the optional nsuri, or the default if one is specified. If nsuri is not specified, an attribute that matches the given name will be returned regardless of namespace.
[ "Return", "the", "value", "of", "the", "attribute", "named", "name", "with", "the", "optional", "nsuri", "or", "the", "default", "if", "one", "is", "specified", ".", "If", "nsuri", "is", "not", "specified", "an", "attribute", "that", "matches", "the", "given", "name", "will", "be", "returned", "regardless", "of", "namespace", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L487-L505
243,387
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.getAttrs
def getAttrs(self, node): """Return a Collection of all attributes """ attrs = {} for k,v in node._attrs.items(): attrs[k] = v.value return attrs
python
def getAttrs(self, node): """Return a Collection of all attributes """ attrs = {} for k,v in node._attrs.items(): attrs[k] = v.value return attrs
[ "def", "getAttrs", "(", "self", ",", "node", ")", ":", "attrs", "=", "{", "}", "for", "k", ",", "v", "in", "node", ".", "_attrs", ".", "items", "(", ")", ":", "attrs", "[", "k", "]", "=", "v", ".", "value", "return", "attrs" ]
Return a Collection of all attributes
[ "Return", "a", "Collection", "of", "all", "attributes" ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L507-L513
243,388
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.getElementText
def getElementText(self, node, preserve_ws=None): """Return the text value of an xml element node. Leading and trailing whitespace is stripped from the value unless the preserve_ws flag is passed with a true value.""" result = [] for child in node.childNodes: nodetype = child.nodeType if nodetype == child.TEXT_NODE or \ nodetype == child.CDATA_SECTION_NODE: result.append(child.nodeValue) value = join(result, '') if preserve_ws is None: value = strip(value) return value
python
def getElementText(self, node, preserve_ws=None): """Return the text value of an xml element node. Leading and trailing whitespace is stripped from the value unless the preserve_ws flag is passed with a true value.""" result = [] for child in node.childNodes: nodetype = child.nodeType if nodetype == child.TEXT_NODE or \ nodetype == child.CDATA_SECTION_NODE: result.append(child.nodeValue) value = join(result, '') if preserve_ws is None: value = strip(value) return value
[ "def", "getElementText", "(", "self", ",", "node", ",", "preserve_ws", "=", "None", ")", ":", "result", "=", "[", "]", "for", "child", "in", "node", ".", "childNodes", ":", "nodetype", "=", "child", ".", "nodeType", "if", "nodetype", "==", "child", ".", "TEXT_NODE", "or", "nodetype", "==", "child", ".", "CDATA_SECTION_NODE", ":", "result", ".", "append", "(", "child", ".", "nodeValue", ")", "value", "=", "join", "(", "result", ",", "''", ")", "if", "preserve_ws", "is", "None", ":", "value", "=", "strip", "(", "value", ")", "return", "value" ]
Return the text value of an xml element node. Leading and trailing whitespace is stripped from the value unless the preserve_ws flag is passed with a true value.
[ "Return", "the", "text", "value", "of", "an", "xml", "element", "node", ".", "Leading", "and", "trailing", "whitespace", "is", "stripped", "from", "the", "value", "unless", "the", "preserve_ws", "flag", "is", "passed", "with", "a", "true", "value", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L515-L528
243,389
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.findNamespaceURI
def findNamespaceURI(self, prefix, node): """Find a namespace uri given a prefix and a context node.""" attrkey = (self.NS_XMLNS, prefix) DOCUMENT_NODE = node.DOCUMENT_NODE ELEMENT_NODE = node.ELEMENT_NODE while 1: if node is None: raise DOMException('Value for prefix %s not found.' % prefix) if node.nodeType != ELEMENT_NODE: node = node.parentNode continue result = node._attrsNS.get(attrkey, None) if result is not None: return result.value if hasattr(node, '__imported__'): raise DOMException('Value for prefix %s not found.' % prefix) node = node.parentNode if node.nodeType == DOCUMENT_NODE: raise DOMException('Value for prefix %s not found.' % prefix)
python
def findNamespaceURI(self, prefix, node): """Find a namespace uri given a prefix and a context node.""" attrkey = (self.NS_XMLNS, prefix) DOCUMENT_NODE = node.DOCUMENT_NODE ELEMENT_NODE = node.ELEMENT_NODE while 1: if node is None: raise DOMException('Value for prefix %s not found.' % prefix) if node.nodeType != ELEMENT_NODE: node = node.parentNode continue result = node._attrsNS.get(attrkey, None) if result is not None: return result.value if hasattr(node, '__imported__'): raise DOMException('Value for prefix %s not found.' % prefix) node = node.parentNode if node.nodeType == DOCUMENT_NODE: raise DOMException('Value for prefix %s not found.' % prefix)
[ "def", "findNamespaceURI", "(", "self", ",", "prefix", ",", "node", ")", ":", "attrkey", "=", "(", "self", ".", "NS_XMLNS", ",", "prefix", ")", "DOCUMENT_NODE", "=", "node", ".", "DOCUMENT_NODE", "ELEMENT_NODE", "=", "node", ".", "ELEMENT_NODE", "while", "1", ":", "if", "node", "is", "None", ":", "raise", "DOMException", "(", "'Value for prefix %s not found.'", "%", "prefix", ")", "if", "node", ".", "nodeType", "!=", "ELEMENT_NODE", ":", "node", "=", "node", ".", "parentNode", "continue", "result", "=", "node", ".", "_attrsNS", ".", "get", "(", "attrkey", ",", "None", ")", "if", "result", "is", "not", "None", ":", "return", "result", ".", "value", "if", "hasattr", "(", "node", ",", "'__imported__'", ")", ":", "raise", "DOMException", "(", "'Value for prefix %s not found.'", "%", "prefix", ")", "node", "=", "node", ".", "parentNode", "if", "node", ".", "nodeType", "==", "DOCUMENT_NODE", ":", "raise", "DOMException", "(", "'Value for prefix %s not found.'", "%", "prefix", ")" ]
Find a namespace uri given a prefix and a context node.
[ "Find", "a", "namespace", "uri", "given", "a", "prefix", "and", "a", "context", "node", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L530-L548
243,390
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.findTargetNS
def findTargetNS(self, node): """Return the defined target namespace uri for the given node.""" attrget = self.getAttr attrkey = (self.NS_XMLNS, 'xmlns') DOCUMENT_NODE = node.DOCUMENT_NODE ELEMENT_NODE = node.ELEMENT_NODE while 1: if node.nodeType != ELEMENT_NODE: node = node.parentNode continue result = attrget(node, 'targetNamespace', default=None) if result is not None: return result node = node.parentNode if node.nodeType == DOCUMENT_NODE: raise DOMException('Cannot determine target namespace.')
python
def findTargetNS(self, node): """Return the defined target namespace uri for the given node.""" attrget = self.getAttr attrkey = (self.NS_XMLNS, 'xmlns') DOCUMENT_NODE = node.DOCUMENT_NODE ELEMENT_NODE = node.ELEMENT_NODE while 1: if node.nodeType != ELEMENT_NODE: node = node.parentNode continue result = attrget(node, 'targetNamespace', default=None) if result is not None: return result node = node.parentNode if node.nodeType == DOCUMENT_NODE: raise DOMException('Cannot determine target namespace.')
[ "def", "findTargetNS", "(", "self", ",", "node", ")", ":", "attrget", "=", "self", ".", "getAttr", "attrkey", "=", "(", "self", ".", "NS_XMLNS", ",", "'xmlns'", ")", "DOCUMENT_NODE", "=", "node", ".", "DOCUMENT_NODE", "ELEMENT_NODE", "=", "node", ".", "ELEMENT_NODE", "while", "1", ":", "if", "node", ".", "nodeType", "!=", "ELEMENT_NODE", ":", "node", "=", "node", ".", "parentNode", "continue", "result", "=", "attrget", "(", "node", ",", "'targetNamespace'", ",", "default", "=", "None", ")", "if", "result", "is", "not", "None", ":", "return", "result", "node", "=", "node", ".", "parentNode", "if", "node", ".", "nodeType", "==", "DOCUMENT_NODE", ":", "raise", "DOMException", "(", "'Cannot determine target namespace.'", ")" ]
Return the defined target namespace uri for the given node.
[ "Return", "the", "defined", "target", "namespace", "uri", "for", "the", "given", "node", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L568-L583
243,391
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.nsUriMatch
def nsUriMatch(self, value, wanted, strict=0, tt=type(())): """Return a true value if two namespace uri values match.""" if value == wanted or (type(wanted) is tt) and value in wanted: return 1 if not strict and value is not None: wanted = type(wanted) is tt and wanted or (wanted,) value = value[-1:] != '/' and value or value[:-1] for item in wanted: if item == value or item[:-1] == value: return 1 return 0
python
def nsUriMatch(self, value, wanted, strict=0, tt=type(())): """Return a true value if two namespace uri values match.""" if value == wanted or (type(wanted) is tt) and value in wanted: return 1 if not strict and value is not None: wanted = type(wanted) is tt and wanted or (wanted,) value = value[-1:] != '/' and value or value[:-1] for item in wanted: if item == value or item[:-1] == value: return 1 return 0
[ "def", "nsUriMatch", "(", "self", ",", "value", ",", "wanted", ",", "strict", "=", "0", ",", "tt", "=", "type", "(", "(", ")", ")", ")", ":", "if", "value", "==", "wanted", "or", "(", "type", "(", "wanted", ")", "is", "tt", ")", "and", "value", "in", "wanted", ":", "return", "1", "if", "not", "strict", "and", "value", "is", "not", "None", ":", "wanted", "=", "type", "(", "wanted", ")", "is", "tt", "and", "wanted", "or", "(", "wanted", ",", ")", "value", "=", "value", "[", "-", "1", ":", "]", "!=", "'/'", "and", "value", "or", "value", "[", ":", "-", "1", "]", "for", "item", "in", "wanted", ":", "if", "item", "==", "value", "or", "item", "[", ":", "-", "1", "]", "==", "value", ":", "return", "1", "return", "0" ]
Return a true value if two namespace uri values match.
[ "Return", "a", "true", "value", "if", "two", "namespace", "uri", "values", "match", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L615-L625
243,392
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.createDocument
def createDocument(self, nsuri, qname, doctype=None): """Create a new writable DOM document object.""" impl = xml.dom.minidom.getDOMImplementation() return impl.createDocument(nsuri, qname, doctype)
python
def createDocument(self, nsuri, qname, doctype=None): """Create a new writable DOM document object.""" impl = xml.dom.minidom.getDOMImplementation() return impl.createDocument(nsuri, qname, doctype)
[ "def", "createDocument", "(", "self", ",", "nsuri", ",", "qname", ",", "doctype", "=", "None", ")", ":", "impl", "=", "xml", ".", "dom", ".", "minidom", ".", "getDOMImplementation", "(", ")", "return", "impl", ".", "createDocument", "(", "nsuri", ",", "qname", ",", "doctype", ")" ]
Create a new writable DOM document object.
[ "Create", "a", "new", "writable", "DOM", "document", "object", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L627-L630
243,393
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
DOM.loadFromURL
def loadFromURL(self, url): """Load an xml file from a URL and return a DOM document.""" if isfile(url) is True: file = open(url, 'r') else: file = urlopen(url) try: result = self.loadDocument(file) except Exception, ex: file.close() raise ParseError(('Failed to load document %s' %url,) + ex.args) else: file.close() return result
python
def loadFromURL(self, url): """Load an xml file from a URL and return a DOM document.""" if isfile(url) is True: file = open(url, 'r') else: file = urlopen(url) try: result = self.loadDocument(file) except Exception, ex: file.close() raise ParseError(('Failed to load document %s' %url,) + ex.args) else: file.close() return result
[ "def", "loadFromURL", "(", "self", ",", "url", ")", ":", "if", "isfile", "(", "url", ")", "is", "True", ":", "file", "=", "open", "(", "url", ",", "'r'", ")", "else", ":", "file", "=", "urlopen", "(", "url", ")", "try", ":", "result", "=", "self", ".", "loadDocument", "(", "file", ")", "except", "Exception", ",", "ex", ":", "file", ".", "close", "(", ")", "raise", "ParseError", "(", "(", "'Failed to load document %s'", "%", "url", ",", ")", "+", "ex", ".", "args", ")", "else", ":", "file", ".", "close", "(", ")", "return", "result" ]
Load an xml file from a URL and return a DOM document.
[ "Load", "an", "xml", "file", "from", "a", "URL", "and", "return", "a", "DOM", "document", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L637-L651
243,394
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
ElementProxy._getUniquePrefix
def _getUniquePrefix(self): '''I guess we need to resolve all potential prefixes because when the current node is attached it copies the namespaces into the parent node. ''' while 1: self._indx += 1 prefix = 'ns%d' %self._indx try: self._dom.findNamespaceURI(prefix, self._getNode()) except DOMException, ex: break return prefix
python
def _getUniquePrefix(self): '''I guess we need to resolve all potential prefixes because when the current node is attached it copies the namespaces into the parent node. ''' while 1: self._indx += 1 prefix = 'ns%d' %self._indx try: self._dom.findNamespaceURI(prefix, self._getNode()) except DOMException, ex: break return prefix
[ "def", "_getUniquePrefix", "(", "self", ")", ":", "while", "1", ":", "self", ".", "_indx", "+=", "1", "prefix", "=", "'ns%d'", "%", "self", ".", "_indx", "try", ":", "self", ".", "_dom", ".", "findNamespaceURI", "(", "prefix", ",", "self", ".", "_getNode", "(", ")", ")", "except", "DOMException", ",", "ex", ":", "break", "return", "prefix" ]
I guess we need to resolve all potential prefixes because when the current node is attached it copies the namespaces into the parent node.
[ "I", "guess", "we", "need", "to", "resolve", "all", "potential", "prefixes", "because", "when", "the", "current", "node", "is", "attached", "it", "copies", "the", "namespaces", "into", "the", "parent", "node", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L828-L840
243,395
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/Utility.py
ElementProxy.createDocument
def createDocument(self, namespaceURI, localName, doctype=None): '''If specified must be a SOAP envelope, else may contruct an empty document. ''' prefix = self._soap_env_prefix if namespaceURI == self.reserved_ns[prefix]: qualifiedName = '%s:%s' %(prefix,localName) elif namespaceURI is localName is None: self.node = self._dom.createDocument(None,None,None) return else: raise KeyError, 'only support creation of document in %s' %self.reserved_ns[prefix] document = self._dom.createDocument(nsuri=namespaceURI, qname=qualifiedName, doctype=doctype) self.node = document.childNodes[0] #set up reserved namespace attributes for prefix,nsuri in self.reserved_ns.items(): self._setAttributeNS(namespaceURI=self._xmlns_nsuri, qualifiedName='%s:%s' %(self._xmlns_prefix,prefix), value=nsuri)
python
def createDocument(self, namespaceURI, localName, doctype=None): '''If specified must be a SOAP envelope, else may contruct an empty document. ''' prefix = self._soap_env_prefix if namespaceURI == self.reserved_ns[prefix]: qualifiedName = '%s:%s' %(prefix,localName) elif namespaceURI is localName is None: self.node = self._dom.createDocument(None,None,None) return else: raise KeyError, 'only support creation of document in %s' %self.reserved_ns[prefix] document = self._dom.createDocument(nsuri=namespaceURI, qname=qualifiedName, doctype=doctype) self.node = document.childNodes[0] #set up reserved namespace attributes for prefix,nsuri in self.reserved_ns.items(): self._setAttributeNS(namespaceURI=self._xmlns_nsuri, qualifiedName='%s:%s' %(self._xmlns_prefix,prefix), value=nsuri)
[ "def", "createDocument", "(", "self", ",", "namespaceURI", ",", "localName", ",", "doctype", "=", "None", ")", ":", "prefix", "=", "self", ".", "_soap_env_prefix", "if", "namespaceURI", "==", "self", ".", "reserved_ns", "[", "prefix", "]", ":", "qualifiedName", "=", "'%s:%s'", "%", "(", "prefix", ",", "localName", ")", "elif", "namespaceURI", "is", "localName", "is", "None", ":", "self", ".", "node", "=", "self", ".", "_dom", ".", "createDocument", "(", "None", ",", "None", ",", "None", ")", "return", "else", ":", "raise", "KeyError", ",", "'only support creation of document in %s'", "%", "self", ".", "reserved_ns", "[", "prefix", "]", "document", "=", "self", ".", "_dom", ".", "createDocument", "(", "nsuri", "=", "namespaceURI", ",", "qname", "=", "qualifiedName", ",", "doctype", "=", "doctype", ")", "self", ".", "node", "=", "document", ".", "childNodes", "[", "0", "]", "#set up reserved namespace attributes", "for", "prefix", ",", "nsuri", "in", "self", ".", "reserved_ns", ".", "items", "(", ")", ":", "self", ".", "_setAttributeNS", "(", "namespaceURI", "=", "self", ".", "_xmlns_nsuri", ",", "qualifiedName", "=", "'%s:%s'", "%", "(", "self", ".", "_xmlns_prefix", ",", "prefix", ")", ",", "value", "=", "nsuri", ")" ]
If specified must be a SOAP envelope, else may contruct an empty document.
[ "If", "specified", "must", "be", "a", "SOAP", "envelope", "else", "may", "contruct", "an", "empty", "document", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/Utility.py#L934-L954
243,396
callowayproject/Transmogrify
transmogrify/autodetect/__init__.py
face_and_energy_detector
def face_and_energy_detector(image_path, detect_faces=True): """ Finds faces and energy in an image """ source = Image.open(image_path) work_width = 800 if source.mode != 'RGB' or source.bits != 8: source24 = source.convert('RGB') else: source24 = source.copy() grayscaleRMY = source24.convert('L', (0.5, 0.419, 0.081, 0)) w = min(grayscaleRMY.size[0], work_width) h = w * grayscaleRMY.size[1] / grayscaleRMY.size[0] b = grayscaleRMY.resize((w, h), Image.BICUBIC) # b.save('step2.jpg') if detect_faces: info = do_face_detection(image_path) if info: return CropInfo(gravity=info) b = b.filter(ImageFilter.GaussianBlur(7)) # b.save('step3.jpg') sobelXfilter = ImageFilter.Kernel((3, 3), (1, 0, -1, 2, 0, -2, 1, 0, -1), -.5) sobelYfilter = ImageFilter.Kernel((3, 3), (1, 2, 1, 0, 0, 0, -1, -2, -1), -.5) b = ImageChops.lighter(b.filter(sobelXfilter), b.filter(sobelYfilter)) b = b.filter(ImageFilter.FIND_EDGES) # b.save('step4.jpg') ec = energy_center(b) return CropInfo(gravity=ec)
python
def face_and_energy_detector(image_path, detect_faces=True): """ Finds faces and energy in an image """ source = Image.open(image_path) work_width = 800 if source.mode != 'RGB' or source.bits != 8: source24 = source.convert('RGB') else: source24 = source.copy() grayscaleRMY = source24.convert('L', (0.5, 0.419, 0.081, 0)) w = min(grayscaleRMY.size[0], work_width) h = w * grayscaleRMY.size[1] / grayscaleRMY.size[0] b = grayscaleRMY.resize((w, h), Image.BICUBIC) # b.save('step2.jpg') if detect_faces: info = do_face_detection(image_path) if info: return CropInfo(gravity=info) b = b.filter(ImageFilter.GaussianBlur(7)) # b.save('step3.jpg') sobelXfilter = ImageFilter.Kernel((3, 3), (1, 0, -1, 2, 0, -2, 1, 0, -1), -.5) sobelYfilter = ImageFilter.Kernel((3, 3), (1, 2, 1, 0, 0, 0, -1, -2, -1), -.5) b = ImageChops.lighter(b.filter(sobelXfilter), b.filter(sobelYfilter)) b = b.filter(ImageFilter.FIND_EDGES) # b.save('step4.jpg') ec = energy_center(b) return CropInfo(gravity=ec)
[ "def", "face_and_energy_detector", "(", "image_path", ",", "detect_faces", "=", "True", ")", ":", "source", "=", "Image", ".", "open", "(", "image_path", ")", "work_width", "=", "800", "if", "source", ".", "mode", "!=", "'RGB'", "or", "source", ".", "bits", "!=", "8", ":", "source24", "=", "source", ".", "convert", "(", "'RGB'", ")", "else", ":", "source24", "=", "source", ".", "copy", "(", ")", "grayscaleRMY", "=", "source24", ".", "convert", "(", "'L'", ",", "(", "0.5", ",", "0.419", ",", "0.081", ",", "0", ")", ")", "w", "=", "min", "(", "grayscaleRMY", ".", "size", "[", "0", "]", ",", "work_width", ")", "h", "=", "w", "*", "grayscaleRMY", ".", "size", "[", "1", "]", "/", "grayscaleRMY", ".", "size", "[", "0", "]", "b", "=", "grayscaleRMY", ".", "resize", "(", "(", "w", ",", "h", ")", ",", "Image", ".", "BICUBIC", ")", "# b.save('step2.jpg')", "if", "detect_faces", ":", "info", "=", "do_face_detection", "(", "image_path", ")", "if", "info", ":", "return", "CropInfo", "(", "gravity", "=", "info", ")", "b", "=", "b", ".", "filter", "(", "ImageFilter", ".", "GaussianBlur", "(", "7", ")", ")", "# b.save('step3.jpg')", "sobelXfilter", "=", "ImageFilter", ".", "Kernel", "(", "(", "3", ",", "3", ")", ",", "(", "1", ",", "0", ",", "-", "1", ",", "2", ",", "0", ",", "-", "2", ",", "1", ",", "0", ",", "-", "1", ")", ",", "-", ".5", ")", "sobelYfilter", "=", "ImageFilter", ".", "Kernel", "(", "(", "3", ",", "3", ")", ",", "(", "1", ",", "2", ",", "1", ",", "0", ",", "0", ",", "0", ",", "-", "1", ",", "-", "2", ",", "-", "1", ")", ",", "-", ".5", ")", "b", "=", "ImageChops", ".", "lighter", "(", "b", ".", "filter", "(", "sobelXfilter", ")", ",", "b", ".", "filter", "(", "sobelYfilter", ")", ")", "b", "=", "b", ".", "filter", "(", "ImageFilter", ".", "FIND_EDGES", ")", "# b.save('step4.jpg')", "ec", "=", "energy_center", "(", "b", ")", "return", "CropInfo", "(", "gravity", "=", "ec", ")" ]
Finds faces and energy in an image
[ "Finds", "faces", "and", "energy", "in", "an", "image" ]
f1f891b8b923b3a1ede5eac7f60531c1c472379e
https://github.com/callowayproject/Transmogrify/blob/f1f891b8b923b3a1ede5eac7f60531c1c472379e/transmogrify/autodetect/__init__.py#L136-L164
243,397
callowayproject/Transmogrify
transmogrify/autodetect/__init__.py
get_crop_size
def get_crop_size(crop_w, crop_h, image_w, image_h): """ Determines the correct scale size for the image when img w == crop w and img h > crop h Use these dimensions when img h == crop h and img w > crop w Use these dimensions """ scale1 = float(crop_w) / float(image_w) scale2 = float(crop_h) / float(image_h) scale1_w = crop_w # int(round(img_w * scale1)) scale1_h = int(round(image_h * scale1)) scale2_w = int(round(image_w * scale2)) scale2_h = crop_h # int(round(img_h * scale2)) if scale1_h > crop_h: # scale1_w == crop_w # crop on vertical return (scale1_w, scale1_h) else: # scale2_h == crop_h and scale2_w > crop_w #crop on horizontal return (scale2_w, scale2_h)
python
def get_crop_size(crop_w, crop_h, image_w, image_h): """ Determines the correct scale size for the image when img w == crop w and img h > crop h Use these dimensions when img h == crop h and img w > crop w Use these dimensions """ scale1 = float(crop_w) / float(image_w) scale2 = float(crop_h) / float(image_h) scale1_w = crop_w # int(round(img_w * scale1)) scale1_h = int(round(image_h * scale1)) scale2_w = int(round(image_w * scale2)) scale2_h = crop_h # int(round(img_h * scale2)) if scale1_h > crop_h: # scale1_w == crop_w # crop on vertical return (scale1_w, scale1_h) else: # scale2_h == crop_h and scale2_w > crop_w #crop on horizontal return (scale2_w, scale2_h)
[ "def", "get_crop_size", "(", "crop_w", ",", "crop_h", ",", "image_w", ",", "image_h", ")", ":", "scale1", "=", "float", "(", "crop_w", ")", "/", "float", "(", "image_w", ")", "scale2", "=", "float", "(", "crop_h", ")", "/", "float", "(", "image_h", ")", "scale1_w", "=", "crop_w", "# int(round(img_w * scale1))", "scale1_h", "=", "int", "(", "round", "(", "image_h", "*", "scale1", ")", ")", "scale2_w", "=", "int", "(", "round", "(", "image_w", "*", "scale2", ")", ")", "scale2_h", "=", "crop_h", "# int(round(img_h * scale2))", "if", "scale1_h", ">", "crop_h", ":", "# scale1_w == crop_w", "# crop on vertical", "return", "(", "scale1_w", ",", "scale1_h", ")", "else", ":", "# scale2_h == crop_h and scale2_w > crop_w", "#crop on horizontal", "return", "(", "scale2_w", ",", "scale2_h", ")" ]
Determines the correct scale size for the image when img w == crop w and img h > crop h Use these dimensions when img h == crop h and img w > crop w Use these dimensions
[ "Determines", "the", "correct", "scale", "size", "for", "the", "image" ]
f1f891b8b923b3a1ede5eac7f60531c1c472379e
https://github.com/callowayproject/Transmogrify/blob/f1f891b8b923b3a1ede5eac7f60531c1c472379e/transmogrify/autodetect/__init__.py#L167-L189
243,398
callowayproject/Transmogrify
transmogrify/autodetect/__init__.py
calc_subrange
def calc_subrange(range_max, sub_amount, weight): """ return the start and stop points that are sub_amount distance apart and contain weight, without going outside the provided range """ if weight > range_max or sub_amount > range_max: raise ValueError("sub_amount and weight must be less than range_max. range_max %s, sub_amount %s, weight %s" % (range_max, sub_amount, weight)) half_amount = sub_amount / 2 bottom = weight - half_amount top = bottom + sub_amount if top <= range_max and bottom >= 0: return (bottom, top) elif weight > range_max / 2: # weight is on the upper half, start at the max and go down return (range_max - sub_amount, range_max) else: # weight is on the lower have, start at 0 and go up return (0, sub_amount)
python
def calc_subrange(range_max, sub_amount, weight): """ return the start and stop points that are sub_amount distance apart and contain weight, without going outside the provided range """ if weight > range_max or sub_amount > range_max: raise ValueError("sub_amount and weight must be less than range_max. range_max %s, sub_amount %s, weight %s" % (range_max, sub_amount, weight)) half_amount = sub_amount / 2 bottom = weight - half_amount top = bottom + sub_amount if top <= range_max and bottom >= 0: return (bottom, top) elif weight > range_max / 2: # weight is on the upper half, start at the max and go down return (range_max - sub_amount, range_max) else: # weight is on the lower have, start at 0 and go up return (0, sub_amount)
[ "def", "calc_subrange", "(", "range_max", ",", "sub_amount", ",", "weight", ")", ":", "if", "weight", ">", "range_max", "or", "sub_amount", ">", "range_max", ":", "raise", "ValueError", "(", "\"sub_amount and weight must be less than range_max. range_max %s, sub_amount %s, weight %s\"", "%", "(", "range_max", ",", "sub_amount", ",", "weight", ")", ")", "half_amount", "=", "sub_amount", "/", "2", "bottom", "=", "weight", "-", "half_amount", "top", "=", "bottom", "+", "sub_amount", "if", "top", "<=", "range_max", "and", "bottom", ">=", "0", ":", "return", "(", "bottom", ",", "top", ")", "elif", "weight", ">", "range_max", "/", "2", ":", "# weight is on the upper half, start at the max and go down", "return", "(", "range_max", "-", "sub_amount", ",", "range_max", ")", "else", ":", "# weight is on the lower have, start at 0 and go up", "return", "(", "0", ",", "sub_amount", ")" ]
return the start and stop points that are sub_amount distance apart and contain weight, without going outside the provided range
[ "return", "the", "start", "and", "stop", "points", "that", "are", "sub_amount", "distance", "apart", "and", "contain", "weight", "without", "going", "outside", "the", "provided", "range" ]
f1f891b8b923b3a1ede5eac7f60531c1c472379e
https://github.com/callowayproject/Transmogrify/blob/f1f891b8b923b3a1ede5eac7f60531c1c472379e/transmogrify/autodetect/__init__.py#L192-L209
243,399
callowayproject/Transmogrify
transmogrify/autodetect/__init__.py
smart_crop
def smart_crop(crop_w, crop_h, image_path): """ Return the scaled image size and crop rectangle """ cropping = face_and_energy_detector(image_path) img = Image.open(image_path) w, h = img.size scaled_size = get_crop_size(crop_w, crop_h, *img.size) gravity_x = int(round(scaled_size[0] * cropping.gravity[0])) gravity_y = int(round(scaled_size[1] * cropping.gravity[1])) if scaled_size[0] == crop_w: # find the top and bottom crops crop_top, crop_bot = calc_subrange(scaled_size[1], crop_h, gravity_y) return scaled_size, Rect(left=0, top=crop_top, right=crop_w, bottom=crop_bot) else: # find the right and left crops crop_left, crop_right = calc_subrange(scaled_size[0], crop_w, gravity_x) return scaled_size, Rect(left=crop_left, top=0, right=crop_right, bottom=crop_h)
python
def smart_crop(crop_w, crop_h, image_path): """ Return the scaled image size and crop rectangle """ cropping = face_and_energy_detector(image_path) img = Image.open(image_path) w, h = img.size scaled_size = get_crop_size(crop_w, crop_h, *img.size) gravity_x = int(round(scaled_size[0] * cropping.gravity[0])) gravity_y = int(round(scaled_size[1] * cropping.gravity[1])) if scaled_size[0] == crop_w: # find the top and bottom crops crop_top, crop_bot = calc_subrange(scaled_size[1], crop_h, gravity_y) return scaled_size, Rect(left=0, top=crop_top, right=crop_w, bottom=crop_bot) else: # find the right and left crops crop_left, crop_right = calc_subrange(scaled_size[0], crop_w, gravity_x) return scaled_size, Rect(left=crop_left, top=0, right=crop_right, bottom=crop_h)
[ "def", "smart_crop", "(", "crop_w", ",", "crop_h", ",", "image_path", ")", ":", "cropping", "=", "face_and_energy_detector", "(", "image_path", ")", "img", "=", "Image", ".", "open", "(", "image_path", ")", "w", ",", "h", "=", "img", ".", "size", "scaled_size", "=", "get_crop_size", "(", "crop_w", ",", "crop_h", ",", "*", "img", ".", "size", ")", "gravity_x", "=", "int", "(", "round", "(", "scaled_size", "[", "0", "]", "*", "cropping", ".", "gravity", "[", "0", "]", ")", ")", "gravity_y", "=", "int", "(", "round", "(", "scaled_size", "[", "1", "]", "*", "cropping", ".", "gravity", "[", "1", "]", ")", ")", "if", "scaled_size", "[", "0", "]", "==", "crop_w", ":", "# find the top and bottom crops", "crop_top", ",", "crop_bot", "=", "calc_subrange", "(", "scaled_size", "[", "1", "]", ",", "crop_h", ",", "gravity_y", ")", "return", "scaled_size", ",", "Rect", "(", "left", "=", "0", ",", "top", "=", "crop_top", ",", "right", "=", "crop_w", ",", "bottom", "=", "crop_bot", ")", "else", ":", "# find the right and left crops", "crop_left", ",", "crop_right", "=", "calc_subrange", "(", "scaled_size", "[", "0", "]", ",", "crop_w", ",", "gravity_x", ")", "return", "scaled_size", ",", "Rect", "(", "left", "=", "crop_left", ",", "top", "=", "0", ",", "right", "=", "crop_right", ",", "bottom", "=", "crop_h", ")" ]
Return the scaled image size and crop rectangle
[ "Return", "the", "scaled", "image", "size", "and", "crop", "rectangle" ]
f1f891b8b923b3a1ede5eac7f60531c1c472379e
https://github.com/callowayproject/Transmogrify/blob/f1f891b8b923b3a1ede5eac7f60531c1c472379e/transmogrify/autodetect/__init__.py#L212-L229