INSTRUCTION stringlengths 1 8.43k | RESPONSE stringlengths 75 104k |
|---|---|
Returns a dictionary of all the files under a path. | def _get_local_files(self, path):
"""Returns a dictionary of all the files under a path."""
if not path:
raise ValueError("No path specified")
files = defaultdict(lambda: None)
path_len = len(path) + 1
for root, dirs, filenames in os.walk(path):
for name in filenames:
full_path = join(root, name)
files[full_path[path_len:]] = compute_md5(full_path)
return files |
Syncs a local directory with an S3 bucket. Currently does not delete files from S3 that are not in the local directory. | def sync_folder(self, path, bucket):
"""Syncs a local directory with an S3 bucket.
Currently does not delete files from S3 that are not in the local directory.
path: The path to the directory to sync to S3
bucket: The name of the bucket on S3
"""
bucket = self.conn.get_bucket(bucket)
local_files = self._get_local_files(path)
s3_files = self._get_s3_files(bucket)
for filename, hash in local_files.iteritems():
s3_key = s3_files[filename]
if s3_key is None:
s3_key = Key(bucket)
s3_key.key = filename
s3_key.etag = '"!"'
if s3_key.etag[1:-1] != hash[0]:
s3_key.set_contents_from_filename(join(path, filename), md5=hash) |
Syncs a list of folders to their assicated buckets. folders: A list of 2 - tuples in the form ( folder bucket ) | def sync(self, folders):
"""Syncs a list of folders to their assicated buckets.
folders: A list of 2-tuples in the form (folder, bucket)
"""
if not folders:
raise ValueError("No folders to sync given")
for folder in folders:
self.sync_folder(*folder) |
Decorator for views that checks that the user is logged in redirecting to the log - in page if necessary. | def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = request_passes_test(
lambda r: r.session.get('user_token'),
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator |
Decorator for views that checks that the user is logged in redirecting to the log - in page if necessary. | def permission_required(function=None, permission=None, object_id=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = request_passes_test(
lambda r: has_permission(r.session.get('user_permissions'), permission, object_id), # noqa
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator |
Ensure the user has the necessary tokens for the specified services | def tokens_required(service_list):
"""
Ensure the user has the necessary tokens for the specified services
"""
def decorator(func):
@wraps(func)
def inner(request, *args, **kwargs):
for service in service_list:
if service not in request.session["user_tokens"]:
return redirect('denied')
return func(request, *args, **kwargs)
return inner
return decorator |
Displays the login form and handles the login action. | def login(request, template_name='ci/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm):
"""
Displays the login form and handles the login action.
"""
redirect_to = request.POST.get(redirect_field_name,
request.GET.get(redirect_field_name, ''))
if request.method == "POST":
form = authentication_form(request, data=request.POST)
if form.is_valid():
# Ensure the user-originating redirection url is safe.
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
# Okay, security check complete. Get the user object from auth api.
user = form.get_user()
request.session['user_token'] = user["token"]
request.session['user_email'] = user["email"]
request.session['user_permissions'] = user["permissions"]
request.session['user_id'] = user["id"]
request.session['user_list'] = user["user_list"]
if not settings.HIDE_DASHBOARDS:
# Set user dashboards because they are slow to change
dashboards = ciApi.get_user_dashboards(user["id"])
dashboard_list = list(dashboards['results'])
if len(dashboard_list) > 0:
request.session['user_dashboards'] = \
dashboard_list[0]["dashboards"]
request.session['user_default_dashboard'] = \
dashboard_list[0]["default_dashboard"]["id"]
else:
request.session['user_dashboards'] = []
request.session['user_default_dashboard'] = None
# Get the user access tokens too and format for easy access
tokens = ciApi.get_user_service_tokens(
params={"user_id": user["id"]})
token_list = list(tokens['results'])
user_tokens = {}
if len(token_list) > 0:
for token in token_list:
user_tokens[token["service"]["name"]] = {
"token": token["token"],
"url": token["service"]["url"] + "/api/v1"
}
request.session['user_tokens'] = user_tokens
return HttpResponseRedirect(redirect_to)
else:
form = authentication_form(request)
current_site = get_current_site(request)
context = {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}
return TemplateResponse(request, template_name, context) |
Build CLI dynamically based on the package structure. | def build(cli, path, package):
"""Build CLI dynamically based on the package structure.
"""
for _, name, ispkg in iter_modules(path):
module = import_module(f'.{name}', package)
if ispkg:
build(cli.group(name)(module.group),
module.__path__,
module.__package__)
else:
cli.command(name)(module.command) |
Return an already closed read - only instance of Fridge. Arguments are the same as for the constructor. | def readonly(cls, *args, **kwargs):
"""
Return an already closed read-only instance of Fridge.
Arguments are the same as for the constructor.
"""
fridge = cls(*args, **kwargs)
fridge.close()
return fridge |
Force reloading the data from the file. All data in the in - memory dictionary is discarded. This method is called automatically by the constructor normally you don t need to call it. | def load(self):
"""
Force reloading the data from the file.
All data in the in-memory dictionary is discarded.
This method is called automatically by the constructor, normally you
don't need to call it.
"""
self._check_open()
try:
data = json.load(self.file, **self.load_args)
except ValueError:
data = {}
if not isinstance(data, dict):
raise ValueError('Root JSON type must be dictionary')
self.clear()
self.update(data) |
Force saving the dictionary to the file. All data in the file is discarded. This method is called automatically by: meth: close. | def save(self):
"""
Force saving the dictionary to the file.
All data in the file is discarded.
This method is called automatically by :meth:`close`.
"""
self._check_open()
self.file.truncate(0)
self.file.seek(0)
json.dump(self, self.file, **self.dump_args) |
Close the fridge. Calls: meth: save and closes the underlying file object unless an already open file was passed to the constructor. This method has no effect if the object is already closed. | def close(self):
"""
Close the fridge.
Calls :meth:`save` and closes the underlying file object unless
an already open file was passed to the constructor.
This method has no effect if the object is already closed.
After the fridge is closed :meth:`save` and :meth:`load` will raise an exception
but you will still be able to use it as an ordinary dictionary.
"""
if not self.closed:
self.save()
if self.close_file:
self.file.close()
self.closed = True |
Create a signed JWT containing a JWKS. The JWT is signed by one of the keys in the JWKS. | def self_sign_jwks(keyjar, iss, kid='', lifetime=3600):
"""
Create a signed JWT containing a JWKS. The JWT is signed by one of the
keys in the JWKS.
:param keyjar: A KeyJar instance with at least one private signing key
:param iss: issuer of the JWT, should be the owner of the keys
:param kid: A key ID if a special key should be used otherwise one
is picked at random.
:param lifetime: The lifetime of the signed JWT
:return: A signed JWT
"""
# _json = json.dumps(jwks)
_jwt = JWT(keyjar, iss=iss, lifetime=lifetime)
jwks = keyjar.export_jwks(issuer=iss)
return _jwt.pack(payload={'jwks': jwks}, owner=iss, kid=kid) |
Verify the signature of a signed JWT containing a JWKS. The JWT is signed by one of the keys in the JWKS. In the JWT the JWKS is stored using this format:: jwks: { keys: [ ] } | def verify_self_signed_jwks(sjwt):
"""
Verify the signature of a signed JWT containing a JWKS.
The JWT is signed by one of the keys in the JWKS.
In the JWT the JWKS is stored using this format ::
'jwks': {
'keys': [ ]
}
:param sjwt: Signed Jason Web Token
:return: Dictionary containing 'jwks' (the JWKS) and 'iss' (the issuer of
the JWT)
"""
_jws = factory(sjwt)
_json = _jws.jwt.part[1]
_body = json.loads(as_unicode(_json))
iss = _body['iss']
_jwks = _body['jwks']
_kj = jwks_to_keyjar(_jwks, iss)
try:
_kid = _jws.jwt.headers['kid']
except KeyError:
_keys = _kj.get_signing_key(owner=iss)
else:
_keys = _kj.get_signing_key(owner=iss, kid=_kid)
_ver = _jws.verify_compact(sjwt, _keys)
return {'jwks': _ver['jwks'], 'iss': iss} |
A metadata statement signing request with signing_keys signed by one of the keys in signing_keys. | def request_signed_by_signing_keys(keyjar, msreq, iss, lifetime, kid=''):
"""
A metadata statement signing request with 'signing_keys' signed by one
of the keys in 'signing_keys'.
:param keyjar: A KeyJar instance with the private signing key
:param msreq: Metadata statement signing request. A MetadataStatement
instance.
:param iss: Issuer of the signing request also the owner of the signing
keys.
:return: Signed JWT where the body is the metadata statement
"""
try:
jwks_to_keyjar(msreq['signing_keys'], iss)
except KeyError:
jwks = keyjar.export_jwks(issuer=iss)
msreq['signing_keys'] = jwks
_jwt = JWT(keyjar, iss=iss, lifetime=lifetime)
return _jwt.pack(owner=iss, kid=kid, payload=msreq.to_dict()) |
Verify that a JWT is signed with a key that is inside the JWT.: param smsreq: Signed Metadata Statement signing request: return: Dictionary containing ms ( the signed request ) and iss ( the issuer of the JWT ). | def verify_request_signed_by_signing_keys(smsreq):
"""
Verify that a JWT is signed with a key that is inside the JWT.
:param smsreq: Signed Metadata Statement signing request
:return: Dictionary containing 'ms' (the signed request) and 'iss' (the
issuer of the JWT).
"""
_jws = factory(smsreq)
_json = _jws.jwt.part[1]
_body = json.loads(as_unicode(_json))
iss = _body['iss']
_jwks = _body['signing_keys']
_kj = jwks_to_keyjar(_jwks, iss)
try:
_kid = _jws.jwt.headers['kid']
except KeyError:
_keys = _kj.get_signing_key(owner=iss)
else:
_keys = _kj.get_signing_key(owner=iss, kid=_kid)
_ver = _jws.verify_compact(smsreq, _keys)
# remove the JWT specific claims
for k in JsonWebToken.c_param.keys():
try:
del _ver[k]
except KeyError:
pass
try:
del _ver['kid']
except KeyError:
pass
return {'ms': MetadataStatement(**_ver), 'iss': iss} |
A decorator for providing a unittesting function/ method with every card in a librarian card library database when it is called. | def card(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped |
A decorator for providing a unittest with a library and have it called only once. | def library(func):
"""
A decorator for providing a unittest with a library and have it called only
once.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
SINGLES.append(wrapped)
return wrapped |
Descover and load greencard tests. | def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import join, exists, isdir, splitext, basename, sep
if not testdir or not exists(testdir) or not isdir(testdir):
return None
from os import walk
import fnmatch
import imp
for root, _, filenames in walk(testdir):
for filename in fnmatch.filter(filenames, '*.py'):
path = join(root, filename)
modulepath = splitext(root)[0].replace(sep, '.')
imp.load_source(modulepath, path) |
Command line entry point. | def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="test/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
cardcount, passes, failures = execute_tests(library)
print(RESULTS.format(len(SINGLES), len(TESTS), cardcount, passes,
failures))
sys.exit(failures) |
Returns the Scrabble score of a letter. | def letter_score(letter):
"""Returns the Scrabble score of a letter.
Args:
letter: a single character string
Raises:
TypeError if a non-Scrabble character is supplied
"""
score_map = {
1: ["a", "e", "i", "o", "u", "l", "n", "r", "s", "t"],
2: ["d", "g"],
3: ["b", "c", "m", "p"],
4: ["f", "h", "v", "w", "y"],
5: ["k"],
8: ["j", "x"],
10: ["q", "z"],
}
for score, letters in score_map.items():
if letter.lower() in letters:
return score
else:
raise TypeError("Invalid letter: %s", letter) |
Checks the Scrabble score of a single word. | def word_score(word, input_letters, questions=0):
"""Checks the Scrabble score of a single word.
Args:
word: a string to check the Scrabble score of
input_letters: the letters in our rack
questions: integer of the tiles already on the board to build on
Returns:
an integer Scrabble score amount for the word
"""
score = 0
bingo = 0
filled_by_blanks = []
rack = list(input_letters) # make a copy to speed up find_anagrams()
for letter in word:
if letter in rack:
bingo += 1
score += letter_score(letter)
rack.remove(letter)
else:
filled_by_blanks.append(letter_score(letter))
# we can have both ?'s and _'s in the word. this will apply the ?s to the
# highest scrabble score value letters and leave the blanks for low points.
for blank_score in sorted(filled_by_blanks, reverse=True):
if questions > 0:
score += blank_score
questions -= 1
# 50 bonus points for using all the tiles in your rack
if bingo > 6:
score += 50
return score |
Searches a string for blank tile characters ( ? and _ ). | def blank_tiles(input_word):
"""Searches a string for blank tile characters ("?" and "_").
Args:
input_word: the user supplied string to search through
Returns:
a tuple of:
input_word without blanks
integer number of blanks (no points)
integer number of questions (points)
"""
blanks = 0
questions = 0
input_letters = []
for letter in input_word:
if letter == "_":
blanks += 1
elif letter == "?":
questions += 1
else:
input_letters.append(letter)
return input_letters, blanks, questions |
Opens the word list file. | def word_list(sowpods=False, start="", end=""):
"""Opens the word list file.
Args:
sowpods: a boolean to declare using the sowpods list or TWL (default)
start: a string of starting characters to find anagrams based on
end: a string of ending characters to find anagrams based on
Yeilds:
a word at a time out of 178691 words for TWL, 267751 for sowpods. Much
less if either start or end are used (filtering is applied here)
"""
location = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"wordlists",
)
if sowpods:
filename = "sowpods.txt"
else:
filename = "twl.txt"
filepath = os.path.join(location, filename)
with open(filepath) as wordfile:
for word in wordfile.readlines():
word = word.strip()
if start and end and word.startswith(start) and word.endswith(end):
yield word
elif start and word.startswith(start) and not end:
yield word
elif end and word.endswith(end) and not start:
yield word
elif not start and not end:
yield word |
Checks if the input word could be played with a full bag of tiles. | def valid_scrabble_word(word):
"""Checks if the input word could be played with a full bag of tiles.
Returns:
True or false
"""
letters_in_bag = {
"a": 9,
"b": 2,
"c": 2,
"d": 4,
"e": 12,
"f": 2,
"g": 3,
"h": 2,
"i": 9,
"j": 1,
"k": 1,
"l": 4,
"m": 2,
"n": 6,
"o": 8,
"p": 2,
"q": 1,
"r": 6,
"s": 4,
"t": 6,
"u": 4,
"v": 2,
"w": 2,
"x": 1,
"y": 2,
"z": 1,
"_": 2,
}
for letter in word:
if letter == "?":
continue
try:
letters_in_bag[letter] -= 1
except KeyError:
return False
if letters_in_bag[letter] < 0:
letters_in_bag["_"] -= 1
if letters_in_bag["_"] < 0:
return False
return True |
docstring for main | def main(args):
"""docstring for main"""
try:
args.query = ' '.join(args.query).replace('?', '')
so = SOSearch(args.query, args.tags)
result = so.first_q().best_answer.code
if result != None:
print result
else:
print("Sorry I can't find your answer, try adding tags")
except NoResult, e:
print("Sorry I can't find your answer, try adding tags") |
docstring for argparse | def cli_run():
"""docstring for argparse"""
parser = argparse.ArgumentParser(description='Stupidly simple code answers from StackOverflow')
parser.add_argument('query', help="What's the problem ?", type=str, nargs='+')
parser.add_argument('-t','--tags', help='semicolon separated tags -> python;lambda')
args = parser.parse_args()
main(args) |
Handle a JSON AMP dialect request. | def stringReceived(self, string):
"""Handle a JSON AMP dialect request.
First, the JSON is parsed. Then, all JSON dialect specific
values in the request are turned into the correct objects.
Then, finds the correct responder function, calls it, and
serializes the result (or error).
"""
request = loads(string)
identifier = request.pop("_ask")
commandName = request.pop("_command")
command, responder = self._getCommandAndResponder(commandName)
self._parseRequestValues(request, command)
d = self._runResponder(responder, request, command, identifier)
d.addCallback(self._writeResponse) |
Gets the command class and matching responder function for the given command name. | def _getCommandAndResponder(self, commandName):
"""Gets the command class and matching responder function for the
given command name.
"""
# DISGUSTING IMPLEMENTATION DETAIL EXPLOITING HACK
locator = self._remote.boxReceiver.locator
responder = locator.locateResponder(commandName)
responderFunction = responder.func_closure[1].cell_contents
command = responder.func_closure[2].cell_contents
return command, responderFunction |
Parses all the values in the request that are in a form specific to the JSON AMP dialect. | def _parseRequestValues(self, request, command):
"""Parses all the values in the request that are in a form specific
to the JSON AMP dialect.
"""
for key, ampType in command.arguments:
ampClass = ampType.__class__
if ampClass is exposed.ExposedResponderLocator:
request[key] = self._remote
continue
decoder = _decoders.get(ampClass)
if decoder is not None:
value = request.get(key)
request[key] = decoder(value, self) |
Run the responser function. If it succeeds add the _answer key. If it fails with an error known to the command serialize the error. | def _runResponder(self, responder, request, command, identifier):
"""Run the responser function. If it succeeds, add the _answer key.
If it fails with an error known to the command, serialize the
error.
"""
d = defer.maybeDeferred(responder, **request)
def _addIdentifier(response):
"""Return the response with an ``_answer`` key.
"""
response["_answer"] = identifier
return response
def _serializeFailure(failure):
"""
If the failure is serializable by this AMP command, serialize it.
"""
key = failure.trap(*command.allErrors)
response = {
"_error_code": command.allErrors[key],
"_error_description": str(failure.value),
"_error": identifier
}
return response
d.addCallbacks(_addIdentifier, _serializeFailure)
return d |
Serializes the response to JSON and writes it to the transport. | def _writeResponse(self, response):
"""
Serializes the response to JSON, and writes it to the transport.
"""
encoded = dumps(response, default=_default)
self.transport.write(encoded) |
Tells the box receiver to stop receiving boxes. | def connectionLost(self, reason):
"""
Tells the box receiver to stop receiving boxes.
"""
self._remote.boxReceiver.stopReceivingBoxes(reason)
return basic.NetstringReceiver.connectionLost(self, reason) |
Builds a bridge and associates it with an AMP protocol instance. | def buildProtocol(self, addr):
"""
Builds a bridge and associates it with an AMP protocol instance.
"""
proto = self._factory.buildProtocol(addr)
return JSONAMPDialectReceiver(proto) |
Read a signed JWKS bundle from disc verify the signature and instantiate a JWKSBundle instance with the information from the file.: param iss:: param ver_keys:: param bundle_file:: return: | def get_bundle(iss, ver_keys, bundle_file):
"""
Read a signed JWKS bundle from disc, verify the signature and
instantiate a JWKSBundle instance with the information from the file.
:param iss:
:param ver_keys:
:param bundle_file:
:return:
"""
fp = open(bundle_file, 'r')
signed_bundle = fp.read()
fp.close()
return JWKSBundle(iss, None).upload_signed_bundle(signed_bundle, ver_keys) |
If the * key_file * file exists then read the keys from there otherwise create the keys and store them a file with the name * key_file *. | def get_signing_keys(eid, keydef, key_file):
"""
If the *key_file* file exists then read the keys from there, otherwise
create the keys and store them a file with the name *key_file*.
:param eid: The ID of the entity that the keys belongs to
:param keydef: What keys to create
:param key_file: A file name
:return: A :py:class:`oidcmsg.key_jar.KeyJar` instance
"""
if os.path.isfile(key_file):
kj = KeyJar()
kj.import_jwks(json.loads(open(key_file, 'r').read()), eid)
else:
kj = build_keyjar(keydef)[1]
# make it know under both names
fp = open(key_file, 'w')
fp.write(json.dumps(kj.export_jwks()))
fp.close()
kj.issuer_keys[eid] = kj.issuer_keys['']
return kj |
Convert a JWKS to a KeyJar instance. | def jwks_to_keyjar(jwks, iss=''):
"""
Convert a JWKS to a KeyJar instance.
:param jwks: String representation of a JWKS
:return: A :py:class:`oidcmsg.key_jar.KeyJar` instance
"""
if not isinstance(jwks, dict):
try:
jwks = json.loads(jwks)
except json.JSONDecodeError:
raise ValueError('No proper JSON')
kj = KeyJar()
kj.import_jwks(jwks, issuer=iss)
return kj |
Create a signed JWT containing a dictionary with Issuer IDs as keys and JWKSs as values. If iss_list is empty then all available issuers are included.: param sign_alg: Which algorithm to use when signing the JWT: param iss_list: A list of issuer IDs who s keys should be included in the signed bundle.: return: A signed JWT | def create_signed_bundle(self, sign_alg='RS256', iss_list=None):
"""
Create a signed JWT containing a dictionary with Issuer IDs as keys
and JWKSs as values. If iss_list is empty then all available issuers are
included.
:param sign_alg: Which algorithm to use when signing the JWT
:param iss_list: A list of issuer IDs who's keys should be included in
the signed bundle.
:return: A signed JWT
"""
data = self.dict(iss_list)
_jwt = JWT(self.sign_keys, iss=self.iss, sign_alg=sign_alg)
return _jwt.pack({'bundle':data}) |
Upload a bundle from an unsigned JSON document | def loads(self, jstr):
"""
Upload a bundle from an unsigned JSON document
:param jstr: A bundle as a dictionary or a JSON document
"""
if isinstance(jstr, dict):
_info = jstr
else:
_info = json.loads(jstr)
for iss, jwks in _info.items():
kj = KeyJar()
if isinstance(jwks, dict):
kj.import_jwks(jwks, issuer=iss)
else:
kj.import_jwks_as_json(jwks, issuer=iss)
self.bundle[iss] = kj
return self |
Return the bundle of keys as a dictionary with the issuer IDs as the keys and the key sets represented as JWKS instances.: param iss_list: List of Issuer IDs that should be part of the output: rtype: Dictionary | def dict(self, iss_list=None):
"""
Return the bundle of keys as a dictionary with the issuer IDs as
the keys and the key sets represented as JWKS instances.
:param iss_list: List of Issuer IDs that should be part of the
output
:rtype: Dictionary
"""
_int = {}
for iss, kj in self.bundle.items():
if iss_list is None or iss in iss_list:
try:
_int[iss] = kj.export_jwks_as_json(issuer=iss)
except KeyError:
_int[iss] = kj.export_jwks_as_json()
return _int |
Input is a signed JWT with a JSON document representing the key bundle as body. This method verifies the signature and the updates the instance bundle with whatever was in the received package. Note that as with dictionary update if an Issuer ID already exists in the instance bundle that will be overwritten with the new information.: param sign_bundle: A signed JWT: param ver_keys: Keys that can be used to verify the JWT signature. | def upload_signed_bundle(self, sign_bundle, ver_keys):
"""
Input is a signed JWT with a JSON document representing the key bundle
as body. This method verifies the signature and the updates the instance
bundle with whatever was in the received package. Note, that as with
dictionary update if an Issuer ID already exists in the instance bundle
that will be overwritten with the new information.
:param sign_bundle: A signed JWT
:param ver_keys: Keys that can be used to verify the JWT signature.
"""
jwt = verify_signed_bundle(sign_bundle, ver_keys)
self.loads(jwt['bundle']) |
Convert a key bundle into a KeyJar instance.: return: An: py: class: oidcmsg. key_jar. KeyJar instance | def as_keyjar(self):
"""
Convert a key bundle into a KeyJar instance.
:return: An :py:class:`oidcmsg.key_jar.KeyJar` instance
"""
kj = KeyJar()
for iss, k in self.bundle.items():
try:
kj.issuer_keys[iss] = k.issuer_keys[iss]
except KeyError:
kj.issuer_keys[iss] = k.issuer_keys['']
return kj |
return a function which runs the given cmd make_shortcut ( ls ) returns a function which executes envoy. run ( ls + arguments ) | def make_shortcut(cmd):
"""return a function which runs the given cmd
make_shortcut('ls') returns a function which executes
envoy.run('ls ' + arguments)"""
def _(cmd_arguments, *args, **kwargs):
return run("%s %s" % (cmd, cmd_arguments), *args, **kwargs)
return _ |
This function deal with the nova notification. | def nova_process(body, message):
"""
This function deal with the nova notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = nova_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in nova_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = nova_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack() |
This function deal with the cinder notification. | def cinder_process(body, message):
"""
This function deal with the cinder notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = cinder_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in cinder_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = cinder_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack() |
This function deal with the neutron notification. | def neutron_process(body, message):
"""
This function deal with the neutron notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = neutron_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in neutron_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = neutron_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack() |
This function deal with the glance notification. | def glance_process(body, message):
"""
This function deal with the glance notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = glance_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in glance_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = glance_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack() |
This function deal with the swift notification. | def swift_process(body, message):
"""
This function deal with the swift notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = swift_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in swift_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = swift_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack() |
This function deal with the keystone notification. | def keystone_process(body, message):
"""
This function deal with the keystone notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = keystone_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in keystone_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = keystone_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack() |
This function deal with the heat notification. | def heat_process(body, message):
"""
This function deal with the heat notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of openstack notification.
:param message: kombu Message class
:return:
"""
event_type = body['event_type']
process = heat_customer_process.get(event_type)
if process is not None:
process(body, message)
else:
matched = False
process_wildcard = None
for pattern in heat_customer_process_wildcard.keys():
if pattern.match(event_type):
process_wildcard = heat_customer_process_wildcard.get(pattern)
matched = True
break
if matched:
process_wildcard(body, message)
else:
default_process(body, message)
message.ack() |
Serve app using wsgiref or provided server. | def serve(self, server=None):
"""Serve app using wsgiref or provided server.
Args:
- server (callable): An callable
"""
if server is None:
from wsgiref.simple_server import make_server
server = lambda app: make_server('', 8000, app).serve_forever()
print('Listening on 0.0.0.0:8000')
try:
server(self)
finally:
server.socket.close() |
Print msg to stdout and option log at info level. | def pout(msg, log=None):
"""Print 'msg' to stdout, and option 'log' at info level."""
_print(msg, sys.stdout, log_func=log.info if log else None) |
Print msg to stderr and option log at info level. | def perr(msg, log=None):
"""Print 'msg' to stderr, and option 'log' at info level."""
_print(msg, sys.stderr, log_func=log.error if log else None) |
A class decorator for Command classes to register in the default set. | def register(CommandSubClass):
"""A class decorator for Command classes to register in the default set."""
name = CommandSubClass.name()
if name in Command._all_commands:
raise ValueError("Command already exists: " + name)
Command._all_commands[name] = CommandSubClass
return CommandSubClass |
A class decorator for Command classes to register. | def register(Class, CommandSubClass):
"""A class decorator for Command classes to register."""
for name in [CommandSubClass.name()] + CommandSubClass.aliases():
if name in Class._registered_commands[Class]:
raise ValueError("Command already exists: " + name)
Class._registered_commands[Class][name] = CommandSubClass
return CommandSubClass |
Instantiate each registered command to a dict mapping name/ alias to instance. | def loadCommandMap(Class, subparsers=None, instantiate=True, **cmd_kwargs):
"""Instantiate each registered command to a dict mapping name/alias to
instance.
Due to aliases, the returned length may be greater there the number of
commands, but the unique instance count will match.
"""
if not Class._registered_commands:
raise ValueError("No commands have been registered with {}"
.format(Class))
all = {}
for Cmd in set(Class._registered_commands[Class].values()):
cmd = Cmd(subparsers=subparsers, **cmd_kwargs) \
if instantiate else Cmd
for name in [Cmd.name()] + Cmd.aliases():
all[name] = cmd
return all |
If all of the constraints are satisfied with the given value defers to the composed AMP argument s toString method. | def toString(self, value):
"""
If all of the constraints are satisfied with the given value, defers
to the composed AMP argument's ``toString`` method.
"""
self._checkConstraints(value)
return self.baseArgument.toString(value) |
Converts the string to a value using the composed AMP argument then checks all the constraints against that value. | def fromString(self, string):
"""
Converts the string to a value using the composed AMP argument, then
checks all the constraints against that value.
"""
value = self.baseArgument.fromString(string)
self._checkConstraints(value)
return value |
Merges cdict into completers. In the event that a key in cdict already exists in the completers dict a ValueError is raised iff regex false y. If a regex str is provided it and the duplicate key are updated to be unique and the updated regex is returned. | def _updateCompleterDict(completers, cdict, regex=None):
"""Merges ``cdict`` into ``completers``. In the event that a key
in cdict already exists in the completers dict a ValueError is raised
iff ``regex`` false'y. If a regex str is provided it and the duplicate
key are updated to be unique, and the updated regex is returned.
"""
for key in cdict:
if key in completers and not regex:
raise ValueError(f"Duplicate completion key: {key}")
if key in completers:
uniq = "_".join([key, str(uuid.uuid4()).replace("-", "")])
regex = regex.replace(f"P<{key}>", f"P<{uniq}>")
completers[uniq] = cdict[key]
else:
completers[key] = cdict[key]
return regex |
log. debug ( ------------------------------------------------------ ) log. debug ( f ** WORD { self. WORD } ) log. debug ( f ** words { self. words } ) log. debug ( f ** word_before_cursor { word_before_cursor } ) | def get_completions(self, document, complete_event):
# Get word/text before cursor.
if self.sentence:
word_before_cursor = document.text_before_cursor
else:
word_before_cursor = document.get_word_before_cursor(WORD=self.WORD)
if self.ignore_case:
word_before_cursor = word_before_cursor.lower()
def word_matches(word):
""" True when the word before the cursor matches. """
if self.ignore_case:
word = word.lower()
if self.match_middle:
return word_before_cursor in word
else:
return word.startswith(word_before_cursor)
'''
log.debug("------------------------------------------------------")
log.debug(f"** WORD {self.WORD}")
log.debug(f"** words {self.words}")
log.debug(f"** word_before_cursor {word_before_cursor}")
'''
words = self._words_callable() if self._words_callable else self.words
for a in words:
if word_matches(a):
display_meta = self.meta_dict.get(a, '')
log.debug(f"MATCH: {a}, {-len(word_before_cursor)},"
f" meta: {display_meta}")
yield Completion(self.quote(a), -len(word_before_cursor),
display_meta=display_meta) |
Start ternya work. | def work(self):
"""
Start ternya work.
First, import customer's service modules.
Second, init openstack mq.
Third, keep a ternya connection that can auto-reconnect.
"""
self.init_modules()
connection = self.init_mq()
TernyaConnection(self, connection).connect() |
Init connection and consumer with openstack mq. | def init_mq(self):
"""Init connection and consumer with openstack mq."""
mq = self.init_connection()
self.init_consumer(mq)
return mq.connection |
Import customer s service modules. | def init_modules(self):
"""Import customer's service modules."""
if not self.config:
raise ValueError("please read your config file.")
log.debug("begin to import customer's service modules.")
modules = ServiceModules(self.config)
modules.import_modules()
log.debug("end to import customer's service modules.") |
Init openstack nova mq | def init_nova_consumer(self, mq):
"""
Init openstack nova mq
1. Check if enable listening nova notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Nova):
log.debug("disable listening nova notification")
return
for i in range(self.config.nova_mq_consumer_count):
mq.create_consumer(self.config.nova_mq_exchange,
self.config.nova_mq_queue,
ProcessFactory.process(Openstack.Nova))
log.debug("enable listening openstack nova notification.") |
Init openstack cinder mq | def init_cinder_consumer(self, mq):
"""
Init openstack cinder mq
1. Check if enable listening cinder notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Cinder):
log.debug("disable listening cinder notification")
return
for i in range(self.config.cinder_mq_consumer_count):
mq.create_consumer(self.config.cinder_mq_exchange,
self.config.cinder_mq_queue,
ProcessFactory.process(Openstack.Cinder))
log.debug("enable listening openstack cinder notification.") |
Init openstack neutron mq | def init_neutron_consumer(self, mq):
"""
Init openstack neutron mq
1. Check if enable listening neutron notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Neutron):
log.debug("disable listening neutron notification")
return
for i in range(self.config.neutron_mq_consumer_count):
mq.create_consumer(self.config.neutron_mq_exchange,
self.config.neutron_mq_queue,
ProcessFactory.process(Openstack.Neutron))
log.debug("enable listening openstack neutron notification.") |
Init openstack glance mq | def init_glance_consumer(self, mq):
"""
Init openstack glance mq
1. Check if enable listening glance notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Glance):
log.debug("disable listening glance notification")
return
for i in range(self.config.glance_mq_consumer_count):
mq.create_consumer(self.config.glance_mq_exchange,
self.config.glance_mq_queue,
ProcessFactory.process(Openstack.Glance))
log.debug("enable listening openstack glance notification.") |
Init openstack swift mq | def init_swift_consumer(self, mq):
"""
Init openstack swift mq
1. Check if enable listening swift notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Swift):
log.debug("disable listening swift notification")
return
for i in range(self.config.swift_mq_consumer_count):
mq.create_consumer(self.config.swift_mq_exchange,
self.config.swift_mq_queue,
ProcessFactory.process(Openstack.Swift))
log.debug("enable listening openstack swift notification.") |
Init openstack swift mq | def init_keystone_consumer(self, mq):
"""
Init openstack swift mq
1. Check if enable listening keystone notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Keystone):
log.debug("disable listening keystone notification")
return
for i in range(self.config.keystone_mq_consumer_count):
mq.create_consumer(self.config.keystone_mq_exchange,
self.config.keystone_mq_queue,
ProcessFactory.process(Openstack.Keystone))
log.debug("enable listening openstack keystone notification.") |
Init openstack heat mq | def init_heat_consumer(self, mq):
"""
Init openstack heat mq
1. Check if enable listening heat notification
2. Create consumer
:param mq: class ternya.mq.MQ
"""
if not self.enable_component_notification(Openstack.Heat):
log.debug("disable listening heat notification")
return
for i in range(self.config.heat_mq_consumer_count):
mq.create_consumer(self.config.heat_mq_exchange,
self.config.heat_mq_queue,
ProcessFactory.process(Openstack.Heat))
log.debug("enable listening openstack heat notification.") |
Check if customer enable openstack component notification. | def enable_component_notification(self, openstack_component):
"""
Check if customer enable openstack component notification.
:param openstack_component: Openstack component type.
"""
openstack_component_mapping = {
Openstack.Nova: self.config.listen_nova_notification,
Openstack.Cinder: self.config.listen_cinder_notification,
Openstack.Neutron: self.config.listen_neutron_notification,
Openstack.Glance: self.config.listen_glance_notification,
Openstack.Swift: self.config.listen_swift_notification,
Openstack.Keystone: self.config.listen_keystone_notification,
Openstack.Heat: self.config.listen_heat_notification
}
return openstack_component_mapping[openstack_component] |
Get music info from baidu music api | def music_info(songid):
"""
Get music info from baidu music api
"""
if isinstance(songid, list):
songid = ','.join(songid)
data = {
"hq": 1,
"songIds": songid
}
res = requests.post(MUSIC_INFO_URL, data=data)
info = res.json()
music_data = info["data"]
songs = []
for song in music_data["songList"]:
song_link, size = _song_link(song, music_data["xcode"])
songs.append({
"name": song["songName"],
"singer": song["artistName"],
"lrc_link": song["lrcLink"],
"song_link": song_link,
"size": size
})
return songs |
process for downing music with multiple threads | def download_music(song, thread_num=4):
"""
process for downing music with multiple threads
"""
filename = "{}.mp3".format(song["name"])
if os.path.exists(filename):
os.remove(filename)
part = int(song["size"] / thread_num)
if part <= 1024:
thread_num = 1
_id = uuid.uuid4().hex
logger.info("downloading '{}'...".format(song["name"]))
threads = []
for i in range(thread_num):
if i == thread_num - 1:
end = ''
else:
end = (i + 1) * part - 1
thread = Worker((i * part, end), song, _id)
thread.start()
threads.append(thread)
for t in threads:
t.join()
fileParts = glob.glob("part-{}-*".format(_id))
fileParts.sort(key=lambda e: e.split('-')[-1])
logger.info("'{}' combine parts...".format(song["name"]))
with open(filename, "ab") as f:
for part in fileParts:
with open(part, "rb") as d:
shutil.copyfileobj(d, f)
os.remove(part)
logger.info("'{}' finished".format(song["name"])) |
Execute a code object The inputs and behavior of this function should match those of eval_ and exec_. | def execute(self, globals_=None, _locals=None):
"""
Execute a code object
The inputs and behavior of this function should match those of
eval_ and exec_.
.. _eval: https://docs.python.org/3/library/functions.html?highlight=eval#eval
.. _exec: https://docs.python.org/3/library/functions.html?highlight=exec#exec
.. note:: Need to figure out how the internals of this function must change for
``eval`` or ``exec``.
:param code: a python code object
:param globals_: optional globals dictionary
:param _locals: optional locals dictionary
"""
if globals_ is None:
globals_ = globals()
if _locals is None:
self._locals = globals_
else:
self._locals = _locals
self.globals_ = globals_
if self.contains_op("YIELD_VALUE"):
return self.iterate_instructions()
else:
return self.execute_instructions() |
Implementation of the LOAD_NAME operation | def load_name(self, name):
"""
Implementation of the LOAD_NAME operation
"""
if name in self.globals_:
return self.globals_[name]
b = self.globals_['__builtins__']
if isinstance(b, dict):
return b[name]
else:
return getattr(b, name) |
Pop the ** n ** topmost items from the stack and return them as a list. | def pop(self, n):
"""
Pop the **n** topmost items from the stack and return them as a ``list``.
"""
poped = self.__stack[len(self.__stack) - n:]
del self.__stack[len(self.__stack) - n:]
return poped |
Implement builtins. __build_class__. We must wrap all class member functions using: py: func: function_wrapper. This requires using a: py: class: Machine to execute the class source code and then recreating the class source code using an: py: class: Assembler. | def build_class(self, callable_, args):
"""
Implement ``builtins.__build_class__``.
We must wrap all class member functions using :py:func:`function_wrapper`.
This requires using a :py:class:`Machine` to execute the class source code
and then recreating the class source code using an :py:class:`Assembler`.
.. note: We might be able to bypass the call to ``builtins.__build_class__``
entirely and manually construct a class object.
https://github.com/python/cpython/blob/master/Python/bltinmodule.c
"""
self._print('build_class')
self._print(callable_)
self._print('args=',args)
if isinstance(args[0], FunctionType):
c = args[0].get_code()
else:
c = args[0].__closure__[0].cell_contents.__code__
# execute the original class source code
print('execute original class source code')
machine = MachineClassSource(c, self.verbose)
l = dict()
machine.execute(self.globals_, l)
# construct new code for class source
a = Assembler()
for name, value in l.items():
a.load_const(value)
a.store_name(name)
a.load_const(None)
a.return_value()
print('new code for class source')
dis.dis(a.code())
#machine = Machine(self.verbose)
f = types.FunctionType(a.code(), self.globals_, args[1])
args = (f, *args[1:])
self.call_callbacks('CALL_FUNCTION', callable_, *args)
return callable_(*args) |
Implement the CALL_FUNCTION_ operation. | def call_function(self, c, i):
"""
Implement the CALL_FUNCTION_ operation.
.. _CALL_FUNCTION: https://docs.python.org/3/library/dis.html#opcode-CALL_FUNCTION
"""
callable_ = self.__stack[-1-i.arg]
args = tuple(self.__stack[len(self.__stack) - i.arg:])
self._print('call function')
self._print('\tfunction ', callable_)
self._print('\ti.arg ', i.arg)
self._print('\targs ', args)
self.call_callbacks('CALL_FUNCTION', callable_, *args)
if isinstance(callable_, FunctionType):
ret = callable_(*args)
elif callable_ is builtins.__build_class__:
ret = self.build_class(callable_, args)
elif callable_ is builtins.globals:
ret = self.builtins_globals()
else:
ret = callable_(*args)
self.pop(1 + i.arg)
self.__stack.append(ret) |
Perfoms a mysqldump backup. Create a database dump for the given database. returns statuscode and shelloutput | def dump(filename, dbname, username=None, password=None, host=None,
port=None, tempdir='/tmp', mysqldump_path='mysqldump'):
"""Perfoms a mysqldump backup.
Create a database dump for the given database.
returns statuscode and shelloutput
"""
filepath = os.path.join(tempdir, filename)
cmd = mysqldump_path
cmd += ' --result-file=' + os.path.join(tempdir, filename)
if username:
cmd += ' --user=%s' % username
if host:
cmd += ' --host=%s' % host
if port:
cmd += ' --port=%s' % port
if password:
cmd += ' --password=%s' % password
cmd += ' ' + dbname
## run mysqldump
return sh(cmd) |
returns a connected cursor to the database - server. | def _connection(username=None, password=None, host=None, port=None):
"returns a connected cursor to the database-server."
c_opts = {}
if username: c_opts['user'] = username
if password: c_opts['passwd'] = password
if host: c_opts['host'] = host
if port: c_opts['port'] = port
dbc = MySQLdb.connect(**c_opts)
dbc.autocommit(True)
return dbc |
Render ditaa code into a PNG output file. | def render_ditaa(self, code, options, prefix='ditaa'):
"""Render ditaa code into a PNG output file."""
hashkey = code.encode('utf-8') + str(options) + \
str(self.builder.config.ditaa) + \
str(self.builder.config.ditaa_args)
infname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "ditaa")
outfname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "png")
inrelfn = posixpath.join(self.builder.imgpath, infname)
infullfn = path.join(self.builder.outdir, '_images', infname)
outrelfn = posixpath.join(self.builder.imgpath, outfname)
outfullfn = path.join(self.builder.outdir, '_images', outfname)
if path.isfile(outfullfn):
return outrelfn, outfullfn
ensuredir(path.dirname(outfullfn))
# ditaa expects UTF-8 by default
if isinstance(code, unicode):
code = code.encode('utf-8')
ditaa_args = [self.builder.config.ditaa]
ditaa_args.extend(self.builder.config.ditaa_args)
ditaa_args.extend(options)
ditaa_args.extend( [infullfn] )
ditaa_args.extend( [outfullfn] )
f = open(infullfn, 'w')
f.write(code)
f.close()
try:
self.builder.warn(ditaa_args)
p = Popen(ditaa_args, stdout=PIPE, stdin=PIPE, stderr=PIPE)
except OSError, err:
if err.errno != ENOENT: # No such file or directory
raise
self.builder.warn('ditaa command %r cannot be run (needed for ditaa '
'output), check the ditaa setting' %
self.builder.config.ditaa)
self.builder._ditaa_warned_dot = True
return None, None
wentWrong = False
try:
# Ditaa may close standard input when an error occurs,
# resulting in a broken pipe on communicate()
stdout, stderr = p.communicate(code)
except OSError, err:
if err.errno != EPIPE:
raise
wentWrong = True
except IOError, err:
if err.errno != EINVAL:
raise
wentWrong = True
if wentWrong:
# in this case, read the standard output and standard error streams
# directly, to get the error message(s)
stdout, stderr = p.stdout.read(), p.stderr.read()
p.wait()
if p.returncode != 0:
raise DitaaError('ditaa exited with error:\n[stderr]\n%s\n'
'[stdout]\n%s' % (stderr, stdout))
return outrelfn, outfullfn |
Invoked in the finally block of Application. run. | def _atexit(self):
"""Invoked in the 'finally' block of Application.run."""
self.log.debug("Application._atexit")
if self._atexit_func:
self._atexit_func(self) |
Run Application. main and exits with the return value. | def run(self, args_list=None):
"""Run Application.main and exits with the return value."""
self.log.debug("Application.run: {args_list}".format(**locals()))
retval = None
try:
retval = self._run(args_list=args_list)
except KeyboardInterrupt:
self.log.verbose("Interrupted") # pragma: nocover
except SystemExit as exit:
self.log.verbose("Exited")
retval = exit.code
except Exception:
print("Uncaught exception", file=sys.stderr)
traceback.print_exc()
if "debug_pdb" in self.args and self.args.debug_pdb:
debugger()
retval = Application.UNCAUGHT_EXCEPTION_EXIT
raise
finally:
try:
self._atexit()
finally:
sys.stderr.flush()
sys.stdout.flush()
sys.exit(retval) |
Context manager that changes to directory path and return to CWD when exited. | def cd(path):
"""Context manager that changes to directory `path` and return to CWD
when exited.
"""
old_path = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(old_path) |
Modified from shutil. copytree docs code sample merges files rather than requiring dst to not exist. | def copytree(src, dst, symlinks=True):
"""
Modified from shutil.copytree docs code sample, merges files rather than
requiring dst to not exist.
"""
from shutil import copy2, Error, copystat
names = os.listdir(src)
if not Path(dst).exists():
os.makedirs(dst)
errors = []
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks)
else:
copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except OSError as why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
errors.extend(err.args[0])
try:
copystat(src, dst)
except OSError as why:
# can't copy file access times on Windows
if why.winerror is None:
errors.extend((src, dst, str(why)))
if errors:
raise Error(errors) |
If called in the context of an exception calls post_mortem ; otherwise set_trace. ipdb is preferred over pdb if installed. | def debugger():
"""If called in the context of an exception, calls post_mortem; otherwise
set_trace.
``ipdb`` is preferred over ``pdb`` if installed.
"""
e, m, tb = sys.exc_info()
if tb is not None:
_debugger.post_mortem(tb)
else:
_debugger.set_trace() |
Implements the dict. keys () method | def keys(self):
"""
Implements the dict.keys() method
"""
self.sync()
for k in self.db.keys():
try:
yield self.key_conv['from'](k)
except KeyError:
yield k |
Find the time this file was last modified. | def get_mtime(fname):
"""
Find the time this file was last modified.
:param fname: File name
:return: The last time the file was modified.
"""
try:
mtime = os.stat(fname).st_mtime_ns
except OSError:
# The file might be right in the middle of being written
# so sleep
time.sleep(1)
mtime = os.stat(fname).st_mtime_ns
return mtime |
Find out if this item has been modified since last | def is_changed(self, item):
"""
Find out if this item has been modified since last
:param item: A key
:return: True/False
"""
fname = os.path.join(self.fdir, item)
if os.path.isfile(fname):
mtime = self.get_mtime(fname)
try:
_ftime = self.fmtime[item]
except KeyError: # Never been seen before
self.fmtime[item] = mtime
return True
if mtime > _ftime: # has changed
self.fmtime[item] = mtime
return True
else:
return False
else:
logger.error('Could not access {}'.format(fname))
raise KeyError(item) |
Goes through the directory and builds a local cache based on the content of the directory. | def sync(self):
"""
Goes through the directory and builds a local cache based on
the content of the directory.
"""
if not os.path.isdir(self.fdir):
os.makedirs(self.fdir)
for f in os.listdir(self.fdir):
fname = os.path.join(self.fdir, f)
if not os.path.isfile(fname):
continue
if f in self.fmtime:
if self.is_changed(f):
self.db[f] = self._read_info(fname)
else:
mtime = self.get_mtime(fname)
self.db[f] = self._read_info(fname)
self.fmtime[f] = mtime |
Implements the dict. items () method | def items(self):
"""
Implements the dict.items() method
"""
self.sync()
for k, v in self.db.items():
try:
yield self.key_conv['from'](k), v
except KeyError:
yield k, v |
Completely resets the database. This means that all information in the local cache and on disc will be erased. | def clear(self):
"""
Completely resets the database. This means that all information in
the local cache and on disc will be erased.
"""
if not os.path.isdir(self.fdir):
os.makedirs(self.fdir, exist_ok=True)
return
for f in os.listdir(self.fdir):
del self[f] |
Implements the dict. update () method | def update(self, ava):
"""
Implements the dict.update() method
"""
for key, val in ava.items():
self[key] = val |
x -- > int/ byte Returns -- > BYTE ( not str in python3 ) Behaves like PY2 chr () in PY2 or PY3 if x is str of length > 1 or int > 256 raises ValueError/ TypeError is not SUPPRESS_ERRORS | def chr(x):
'''
x-->int / byte
Returns-->BYTE (not str in python3)
Behaves like PY2 chr() in PY2 or PY3
if x is str of length > 1 or int > 256
raises ValueError/TypeError is not SUPPRESS_ERRORS
'''
global _chr
if isinstance(x, int):
if x > 256:
if SUPPRESS_ERRORS:
x = x % 256
return toBytes(_chr(x))
elif isinstance(x, bytes):
x = fromBytes(x)
if len(x) > 1:
if not SUPPRESS_ERRORS:
raise TypeError('chr() found string of length > 2')
x = x[0]
return toBytes(x)
elif isinstance(x, str):
if len(x) > 1:
if not SUPPRESS_ERRORS:
raise TypeError('chr() found string of length > 2')
x = x[0]
return toBytes(x)
else:
raise TypeError('Unknown type passed to chr: %s', str(type(x))) |
x -- > char ( str of length 1 ) Returns -- > int Behaves like PY2 ord () in PY2 or PY3 if x is str of length > 1 or int > 256 raises ValueError/ TypeError is not SUPPRESS_ERRORS | def ord(x):
'''
x-->char (str of length 1)
Returns-->int
Behaves like PY2 ord() in PY2 or PY3
if x is str of length > 1 or int > 256
raises ValueError/TypeError is not SUPPRESS_ERRORS
'''
global _ord
if isinstance(x, int):
if x > 256:
if not SUPPRESS_ERRORS:
raise ValueError('ord() arg not in range(256)')
return x % 256
elif isinstance(x, bytes):
x = fromBytes(x)
if len(x) > 1:
if SUPPRESS_ERRORS:
x = x[0]
return _ord(x)
elif isinstance(x, str):
if len(x) > 1:
if SUPPRESS_ERRORS:
x = x[0]
return _ord(x)
else:
raise TypeError('Unknown type passed to ord: %s', str(type(x))) |
x -- > bytes | bytearray Returns -- > bytes: hex - encoded | def hex(x):
'''
x-->bytes | bytearray
Returns-->bytes: hex-encoded
'''
if isinstance(x, bytearray):
x = bytes(x)
return encode(x, 'hex') |
x -- > unicode string | bytearray | bytes Returns -- > unicode string with encoding = latin1 | def fromBytes(x):
'''
x-->unicode string | bytearray | bytes
Returns-->unicode string, with encoding=latin1
'''
if isinstance(x, unicode):
return x
if isinstance(x, bytearray):
x = bytes(x)
elif isinstance(x, bytes):
pass
else:
return x # unchanged (int etc)
return decode(x, DEF_ENCODING) |
x -- > unicode string | bytearray | bytes Returns -- > bytes If x is unicode MUST have encoding = latin1 | def toBytes(x):
'''
x-->unicode string | bytearray | bytes
Returns-->bytes
If x is unicode, MUST have encoding=latin1
'''
if isinstance(x, bytes):
return x
elif isinstance(x, bytearray):
return bytes(x)
elif isinstance(x, unicode):
pass
else:
return x # unchanged (int etc)
# ASSUMES latin1 encoding - Could raise an exception
return encode(x, DEF_ENCODING) |
encoding -- > str: one of ENCODINGS avoid -- > list of int: to void ( unprintable chars etc ) Returns -- > int that can be converted to requested encoding which is NOT in avoid | def get_rand_int(encoding='latin1', avoid=[]):
'''
encoding-->str: one of ENCODINGS
avoid-->list of int: to void (unprintable chars etc)
Returns-->int that can be converted to requested encoding
which is NOT in avoid
'''
UNICODE_LIMIT = 0x10ffff
# See: https://en.wikipedia.org/wiki/UTF-8#Invalid_code_points
SURROGATE_RANGE = (0xD800, 0xDFFF)
if encoding not in ENCODINGS:
raise ValueError('Unsupported encoding: ' + str(encoding))
if encoding == 'ascii':
maxord = 2 ** 7
elif encoding == 'latin1':
maxord = 2 ** 8
elif encoding == 'utf16':
maxord = 2 ** 16
elif encoding == 'utf8':
maxord = 2 ** 32
elif encoding == 'utf32':
maxord = 2 ** 32
rndint = random.randrange(0, min(maxord, UNICODE_LIMIT))
while (
(rndint in avoid) or
(SURROGATE_RANGE[0] <= rndint <= SURROGATE_RANGE[1])
):
rndint = random.randrange(0, min(maxord, UNICODE_LIMIT))
return rndint |
encoding -- > str: one of ENCODINGS l -- > int: length of returned str avoid -- > list of int: to void ( unprintable chars etc ) Returns -- > unicode str of the requested encoding | def get_rand_str(encoding='latin1', l=64, avoid=[]):
'''
encoding-->str: one of ENCODINGS
l-->int: length of returned str
avoid-->list of int: to void (unprintable chars etc)
Returns-->unicode str of the requested encoding
'''
ret = unicode('')
while len(ret) < l:
rndint = get_rand_int(encoding=encoding, avoid=avoid)
ret += unichr(rndint)
return ret |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.