signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
def _get_trap(func, exc):
debug = False<EOL>ntokens = <NUM_LIT:2><EOL>fname = "<STR_LIT>".format(os.sep)<EOL>src = os.path.join(os.path.dirname(os.path.abspath(__file__)), "<STR_LIT>")<EOL>with open(src) as fobj:<EOL><INDENT>lines = [line.rstrip() for line in fobj.readlines()]<EOL><DEDENT>if debug:<EOL><INDENT>print(lines)<EOL><DEDENT>in_func = False<EOL>exc_lnum = None<EOL>if debug:<EOL><INDENT>print("<STR_LIT>".format(func, exc))<EOL><DEDENT>for lnum, line in enumerate(lines):<EOL><INDENT>if in_func:<EOL><INDENT>if debug:<EOL><INDENT>print("<STR_LIT>".format(line))<EOL><DEDENT>if line.strip() == exc.strip():<EOL><INDENT>exc_lnum = lnum<EOL>break<EOL><DEDENT>if line.startswith("<STR_LIT>"):<EOL><INDENT>break<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if debug:<EOL><INDENT>print(line)<EOL><DEDENT>in_func = line.startswith("<STR_LIT>".format(func))<EOL><DEDENT><DEDENT>if exc_lnum is None:<EOL><INDENT>raise RuntimeError(<EOL>"<STR_LIT>".format(func, exc)<EOL>)<EOL><DEDENT>return (ntokens, fname, exc_lnum + <NUM_LIT:1>, func, exc)<EOL>
Find a line in a function with a simple source file parser.
f4108:m0
def _eprint(msg):
print(msg, file=sys.stderr)<EOL>
Print passthrough function, for ease of testing of custom excepthook function.
f4108:m2
def _excepthook(exc_type, exc_value, exc_traceback):
tbs = traceback.extract_tb(exc_traceback)<EOL>offset = _find_test_module_frame(tbs)<EOL>if not offset:<EOL><INDENT>_ORIG_EXCEPTHOOK(exc_type, exc_value, exc_traceback)<EOL><DEDENT>new_tb = _process_tb(exc_traceback, offset)<EOL>if new_tb:<EOL><INDENT>exc_traceback = new_tb[<NUM_LIT:0>]<EOL><DEDENT>tbs = traceback.extract_tb(exc_traceback)<EOL>tblines = ["<STR_LIT>"]<EOL>tblines += traceback.format_list(tbs)<EOL>tblines = [_homogenize_breaks(item) for item in tblines if item.strip()]<EOL>regexp = re.compile(r"<STR_LIT>")<EOL>exc_type = regexp.match(str(exc_type)).groups()[<NUM_LIT:0>]<EOL>exc_type = exc_type[<NUM_LIT:11>:] if exc_type.startswith("<STR_LIT>") else exc_type<EOL>tblines += ["<STR_LIT>".format(exc_type, exc_value)]<EOL>lines = os.linesep.join(tblines)<EOL>_eprint(lines)<EOL>
Remove unwanted traceback elements past a given specific module call.
f4108:m3
def _get_fargs(func, no_self=False, no_varargs=False):
is_parg = lambda x: (len(x) > <NUM_LIT:1>) and (x[<NUM_LIT:0>] == "<STR_LIT:*>") and (x[<NUM_LIT:1>] != "<STR_LIT:*>")<EOL>is_kwarg = lambda x: (len(x) > <NUM_LIT:2>) and (x[:<NUM_LIT:2>] == "<STR_LIT>")<EOL>par_dict = signature(func).parameters<EOL>args = [<EOL>"<STR_LIT>".format(<EOL>prefix=(<EOL>"<STR_LIT:*>"<EOL>if par_dict[par].kind == par_dict[par].VAR_POSITIONAL<EOL>else ("<STR_LIT>" if par_dict[par].kind == par_dict[par].VAR_KEYWORD else "<STR_LIT>")<EOL>),<EOL>arg=par,<EOL>)<EOL>for par in par_dict<EOL>]<EOL>self_filtered_args = (<EOL>args if not args else (args[<NUM_LIT:1> if (args[<NUM_LIT:0>] == "<STR_LIT>") and no_self else <NUM_LIT:0> :])<EOL>)<EOL>varargs_filtered_args = tuple(<EOL>[<EOL>arg<EOL>for arg in self_filtered_args<EOL>if (<EOL>(not no_varargs)<EOL>or (no_varargs and (not is_parg(arg)) and (not is_kwarg(arg)))<EOL>)<EOL>]<EOL>)<EOL>return varargs_filtered_args<EOL>
Return function argument names. The names are returned in a tuple in the order they are specified in the function signature :param func: Function :type func: function object :param no_self: Flag that indicates whether the function argument *self*, if present, is included in the output (False) or not (True) :type no_self: boolean :param no_varargs: Flag that indicates whether keyword arguments are included in the output (True) or not (False) :type no_varargs: boolean :rtype: tuple
f4108:m5
def _homogenize_breaks(msg):
token = "<STR_LIT>".format(uuid.uuid4())<EOL>msg = msg.replace(os.linesep, token)<EOL>msg = msg.replace("<STR_LIT:\n>", os.linesep)<EOL>msg = msg.replace(token, os.linesep).rstrip()<EOL>return msg<EOL>
Replace stray newline characters with platform-correct line separator.
f4108:m6
def _invalid_frame(fobj):
fin = fobj.f_code.co_filename<EOL>invalid_module = fin.endswith("<STR_LIT>")<EOL>return invalid_module or (not os.path.isfile(fin))<EOL>
Select valid stack frame to process.
f4108:m7
def _process_tb(trbk, offset=-<NUM_LIT:1>):
obj = trbk<EOL>iret = []<EOL>while obj:<EOL><INDENT>iret.append((obj.tb_frame, obj.tb_lasti, obj.tb_lineno, obj.tb_next))<EOL>obj = obj.tb_next<EOL><DEDENT>ret = [_CustomTraceback(*item) for item in iret[:offset]]<EOL>if ret:<EOL><INDENT>ret[-<NUM_LIT:1>].tb_next = None<EOL><DEDENT>return ret<EOL>
Create a "copy" of the traceback chain and cut it at a predefined depth.
f4108:m9
def _raise_exception_mismatch(excinfo, extype, exmsg):
with warnings.catch_warnings():<EOL><INDENT>warnings.filterwarnings("<STR_LIT:ignore>")<EOL>regexp = re.compile(exmsg) if isinstance(exmsg, str) else None<EOL><DEDENT>actmsg = get_exmsg(excinfo)<EOL>acttype = (<EOL>exception_type_str(excinfo.type)<EOL>if hasattr(excinfo, "<STR_LIT:type>")<EOL>else repr(excinfo)[: repr(excinfo).find("<STR_LIT:(>")]<EOL>)<EOL>if not (<EOL>(acttype == exception_type_str(extype))<EOL>and ((actmsg == exmsg) or (regexp and regexp.match(actmsg)))<EOL>):<EOL><INDENT>assert False, (<EOL>"<STR_LIT>"<EOL>"<STR_LIT>".format(<EOL>os.linesep, exception_type_str(extype), exmsg, acttype, actmsg<EOL>)<EOL>)<EOL><DEDENT>
Create verbose message when expected exception does not match actual exception. The mismatch may be it due to a different exception type or a different exception message or both.
f4108:m10
def _raise_if_not_raised(eobj, exmsg=None):
if get_exmsg(eobj).upper().startswith("<STR_LIT>"):<EOL><INDENT>raise AssertionError(exmsg or "<STR_LIT>")<EOL><DEDENT>
Raise an exception if there was no exception raised (and it should have been).
f4108:m11
def assert_arg_invalid(fpointer, pname, *args, **kwargs):
assert_exception(<EOL>fpointer,<EOL>RuntimeError,<EOL>"<STR_LIT>".format(pname),<EOL>*args,<EOL>**kwargs<EOL>)<EOL>
r""" Test if function raises :code:`RuntimeError('Argument \`*pname*\` is not valid')`. :code:`*pname*` is the value of the **pname** argument, when called with given positional and/or keyword arguments :param fpointer: Object to evaluate :type fpointer: callable :param pname: Parameter name :type pname: string :param args: Positional arguments to pass to object :type args: tuple :param kwargs: Keyword arguments to pass to object :type kwargs: dictionary :raises: * AssertionError (Did not raise) * RuntimeError (Illegal number of arguments)
f4108:m12
def assert_exception(fpointer, extype, exmsg, *args, **kwargs):
<EOL>arg_dict = {}<EOL>if args:<EOL><INDENT>fargs = _get_fargs(fpointer, no_self=True)<EOL>if len(args) > len(fargs):<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>arg_dict = dict(zip(fargs, args))<EOL><DEDENT>arg_dict.update(kwargs)<EOL>inner_ex = False<EOL>eobj = None<EOL>try:<EOL><INDENT>with pytest.raises(extype) as excinfo:<EOL><INDENT>fpointer(**arg_dict)<EOL><DEDENT><DEDENT>except (BaseException, Exception, Failed) as tmp_eobj:<EOL><INDENT>eobj = tmp_eobj<EOL>inner_ex = True<EOL><DEDENT>if inner_ex:<EOL><INDENT>_raise_if_not_raised(eobj)<EOL>_raise_exception_mismatch(eobj, extype, exmsg)<EOL><DEDENT>else:<EOL><INDENT>_raise_exception_mismatch(excinfo, extype, exmsg)<EOL><DEDENT>
Assert an exception type and message within the Py.test environment. If the actual exception message and the expected exception message do not literally match then the expected exception message is treated as a regular expression and a match is sought with the actual exception message :param fpointer: Object to evaluate :type fpointer: callable :param extype: Expected exception type :type extype: type :param exmsg: Expected exception message (can have regular expressions) :type exmsg: any :param args: Positional arguments to pass to object :type args: tuple :param kwargs: Keyword arguments to pass to object :type kwargs: dictionary For example: >>> import pmisc >>> try: ... pmisc.assert_exception( ... pmisc.normalize, ... RuntimeError, ... 'Argument `offset` is not valid', ... 15, [10, 20], 0 ... ) #doctest: +ELLIPSIS ... except: ... raise RuntimeError('Exception not raised') Traceback (most recent call last): ... RuntimeError: Exception not raised :raises: * AssertionError (Did not raise) * RuntimeError (Illegal number of arguments)
f4108:m13
def assert_prop(cobj, prop_name, value, extype, exmsg):
<EOL>fnum = <NUM_LIT:0><EOL>fobj = sys._getframe(fnum)<EOL>while _invalid_frame(fobj):<EOL><INDENT>fnum += <NUM_LIT:1><EOL>fobj = sys._getframe(fnum)<EOL><DEDENT>fobj = sys._getframe(fnum)<EOL>lvars = copy.copy(fobj.f_locals)<EOL>lvars.update({"<STR_LIT>": cobj})<EOL>cmd = "<STR_LIT>" + prop_name + "<STR_LIT>" + repr(value)<EOL>try:<EOL><INDENT>with pytest.raises(extype) as excinfo:<EOL><INDENT>exec(cmd, fobj.f_globals, lvars)<EOL><DEDENT><DEDENT>except (BaseException, Exception, Failed) as eobj:<EOL><INDENT>_raise_if_not_raised(eobj)<EOL>_raise_exception_mismatch(eobj, extype, exmsg)<EOL><DEDENT>_raise_exception_mismatch(excinfo, extype, exmsg)<EOL>
Assert whether a class property raises an exception when assigned a value. :param cobj: Class object :type cobj: class object :param prop_name: Property name :type prop_name: string :param extype: Exception type :type extype: Exception type object, i.e. RuntimeError, TypeError, etc. :param exmsg: Exception message :type exmsg: string
f4108:m14
def assert_ro_prop(cobj, prop_name):
try:<EOL><INDENT>with pytest.raises(AttributeError) as excinfo:<EOL><INDENT>exec("<STR_LIT>" + prop_name, None, locals())<EOL><DEDENT><DEDENT>except (BaseException, Exception, Failed) as eobj:<EOL><INDENT>_raise_if_not_raised(eobj, "<STR_LIT>")<EOL><DEDENT>extype = "<STR_LIT>"<EOL>exmsg = "<STR_LIT>"<EOL>_raise_exception_mismatch(excinfo, extype, exmsg)<EOL>
Assert that a class property cannot be deleted. :param cobj: Class object :type cobj: class object :param prop_name: Property name :type prop_name: string
f4108:m15
def compare_strings(actual, ref, diff_mode=False):
<EOL>pyellow = lambda x, y: x if x == y else _pcolor(x, "<STR_LIT>")<EOL>def colorize_lines(list1, list2, template, mode=True):<EOL><INDENT>iobj = izip_longest(list1, list2, fillvalue="<STR_LIT>")<EOL>for num, (line1, line2) in enumerate(iobj):<EOL><INDENT>if mode and (len(list2) - <NUM_LIT:1> < num):<EOL><INDENT>break<EOL><DEDENT>line = [pyellow(chr2, chr1) for chr1, chr2 in zip(line1, line2)]<EOL>line = "<STR_LIT>".join(line).replace("<STR_LIT>", "<STR_LIT>")<EOL>if len(line2) > len(line1):<EOL><INDENT>line += _pcolor(line2[len(line1) :], "<STR_LIT>")<EOL><DEDENT>yield template.format(num + <NUM_LIT:1>, line)<EOL><DEDENT><DEDENT>def print_non_diff(msg, list1, list2, template):<EOL><INDENT>ret = "<STR_LIT>"<EOL>ret += _pcolor(msg, "<STR_LIT>") + os.linesep<EOL>ret += _pcolor("<STR_LIT:->" * len(msg), "<STR_LIT>") + os.linesep<EOL>for line in colorize_lines(list1, list2, template):<EOL><INDENT>ret += line + os.linesep<EOL><DEDENT>return ret<EOL><DEDENT>def print_diff(list1, list2, template1, template2, sep):<EOL><INDENT>iobj = zip(<EOL>colorize_lines(list1, list2, template1, False),<EOL>colorize_lines(list2, list1, template2, False),<EOL>)<EOL>ret = "<STR_LIT>"<EOL>for rline, aline in iobj:<EOL><INDENT>ret += _pcolor(sep, "<STR_LIT>") + os.linesep<EOL>ret += rline + os.linesep<EOL>ret += aline + os.linesep<EOL><DEDENT>return ret<EOL><DEDENT>if not isinstance(actual, str):<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>if not isinstance(ref, str):<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>if not isinstance(diff_mode, bool):<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>if actual != ref:<EOL><INDENT>actual = actual.split(os.linesep)<EOL>ref = ref.split(os.linesep)<EOL>length = len(str(max(len(actual), len(ref))))<EOL>ret = _pcolor("<STR_LIT>", "<STR_LIT>") + os.linesep<EOL>ret += "<STR_LIT>" + os.linesep<EOL>ret += _pcolor("<STR_LIT>", "<STR_LIT>") + os.linesep<EOL>ret += _pcolor("<STR_LIT>", "<STR_LIT>") + os.linesep<EOL>if not diff_mode:<EOL><INDENT>template = _pcolor("<STR_LIT>" + str(length) + "<STR_LIT>", "<STR_LIT>") + "<STR_LIT>"<EOL>ret += print_non_diff("<STR_LIT>", actual, ref, template)<EOL>ret += print_non_diff("<STR_LIT>", ref, actual, template)<EOL><DEDENT>else:<EOL><INDENT>mline = max(<EOL>[<EOL>max(len(item1), len(item2))<EOL>for item1, item2 in izip_longest(actual, ref, fillvalue="<STR_LIT>")<EOL>]<EOL>)<EOL>sep = "<STR_LIT:->" * (mline + length + <NUM_LIT:9>)<EOL>template1 = _pcolor("<STR_LIT>" + str(length) + "<STR_LIT>", "<STR_LIT>") + "<STR_LIT>"<EOL>template2 = _pcolor("<STR_LIT:U+0020>" * length + "<STR_LIT>", "<STR_LIT>") + "<STR_LIT>"<EOL>ret += print_diff(actual, ref, template1, template2, sep)<EOL><DEDENT>ret += _pcolor("<STR_LIT>", "<STR_LIT>") + os.linesep<EOL>raise AssertionError("<STR_LIT>" + os.linesep + ret)<EOL><DEDENT>
r""" Compare two strings. Lines are numbered, differing characters are colored yellow and extra characters (characters present in one string but not in the other) are colored red :param actual: Text produced by software under test :type actual: string :param ref: Reference text :type ref: string :param diff_mode: Flag that indicates whether the line(s) of the actual and reference strings are printed one right after the other (True) of if the actual and reference strings are printed separately (False) :type diff_mode: boolean :raises: * AssertionError(Strings do not match) * RuntimeError(Argument \`actual\` is not valid) * RuntimeError(Argument \`diff_mode\` is not valid) * RuntimeError(Argument \`ref\` is not valid)
f4108:m16
def comp_list_of_dicts(list1, list2):
for item in list1:<EOL><INDENT>if item not in list2:<EOL><INDENT>print("<STR_LIT>")<EOL>print(item)<EOL>return False<EOL><DEDENT><DEDENT>for item in list2:<EOL><INDENT>if item not in list1:<EOL><INDENT>print("<STR_LIT>")<EOL>print(item)<EOL>return False<EOL><DEDENT><DEDENT>return True<EOL>
Compare list of dictionaries. :param list1: First list of dictionaries to compare :type list1: list of dictionaries :param list2: Second list of dictionaries to compare :type list2: list of dictionaries :rtype: boolean
f4108:m17
def exception_type_str(exobj):
return _ex_type_str(exobj)<EOL>
Return an exception type string. :param exobj: Exception :type exobj: type (Python 2) or class (Python 3) :rtype: string For example: >>> import pmisc >>> pmisc.exception_type_str(RuntimeError) 'RuntimeError'
f4108:m18
def get_exmsg(exobj):
return _get_ex_msg(exobj)<EOL>
Return exception message (Python interpreter version independent). :param exobj: Exception object :type exobj: exception object :rtype: string
f4108:m19
def isalpha(obj):
<EOL>try:<EOL><INDENT>float(obj)<EOL>return isinstance(obj, str)<EOL><DEDENT>except:<EOL><INDENT>return False<EOL><DEDENT>
Test if the argument is a string representing a number. :param obj: Object :type obj: any :rtype: boolean For example: >>> import pmisc >>> pmisc.isalpha('1.5') True >>> pmisc.isalpha('1E-20') True >>> pmisc.isalpha('1EA-20') False
f4109:m0
def ishex(obj):
return isinstance(obj, str) and (len(obj) == <NUM_LIT:1>) and (obj in string.hexdigits)<EOL>
Test if the argument is a string representing a valid hexadecimal digit. :param obj: Object :type obj: any :rtype: boolean
f4109:m1
def isiterable(obj):
try:<EOL><INDENT>iter(obj)<EOL><DEDENT>except TypeError:<EOL><INDENT>return False<EOL><DEDENT>return True<EOL>
Test if the argument is an iterable. :param obj: Object :type obj: any :rtype: boolean
f4109:m2
def isnumber(obj):
return (<EOL>(obj is not None)<EOL>and (not isinstance(obj, bool))<EOL>and isinstance(obj, (int, float, complex))<EOL>)<EOL>
Test if the argument is a number (complex, float or integer). :param obj: Object :type obj: any :rtype: boolean
f4109:m3
def isreal(obj):
return (<EOL>(obj is not None)<EOL>and (not isinstance(obj, bool))<EOL>and isinstance(obj, (int, float))<EOL>)<EOL>
Test if the argument is a real number (float or integer). :param obj: Object :type obj: any :rtype: boolean
f4109:m4
def _ex_type_str(exobj):
regexp = re.compile(r"<STR_LIT>")<EOL>exc_type = str(exobj)<EOL>if regexp.match(exc_type):<EOL><INDENT>exc_type = regexp.match(exc_type).groups()[<NUM_LIT:0>]<EOL>exc_type = exc_type[<NUM_LIT:11>:] if exc_type.startswith("<STR_LIT>") else exc_type<EOL><DEDENT>if "<STR_LIT:.>" in exc_type:<EOL><INDENT>exc_type = exc_type.split("<STR_LIT:.>")[-<NUM_LIT:1>]<EOL><DEDENT>return exc_type<EOL>
Return a string corresponding to the exception type.
f4110:m0
def _get_ex_msg(obj):
return obj.value.args[<NUM_LIT:0>] if hasattr(obj, "<STR_LIT:value>") else obj.args[<NUM_LIT:0>]<EOL>
Get exception message.
f4110:m1
def _readlines(fname):
with open(fname, "<STR_LIT:r>") as fobj:<EOL><INDENT>return fobj.readlines()<EOL><DEDENT>
Read all lines from file.
f4110:m2
def _unicode_to_ascii(obj):
<EOL>if isinstance(obj, dict):<EOL><INDENT>return dict(<EOL>[<EOL>(_unicode_to_ascii(key), _unicode_to_ascii(value))<EOL>for key, value in obj.items()<EOL>]<EOL>)<EOL><DEDENT>if isinstance(obj, list):<EOL><INDENT>return [_unicode_to_ascii(element) for element in obj]<EOL><DEDENT>if isinstance(obj, unicode):<EOL><INDENT>return obj.encode("<STR_LIT:utf-8>")<EOL><DEDENT>return obj<EOL>
Convert to ASCII.
f4110:m3
def _write(fobj, data):
fobj.write(data)<EOL>
Write data to file.
f4110:m4
def _ex_type_str(exobj):
regexp = re.compile(r"<STR_LIT>")<EOL>exc_type = str(exobj)<EOL>if regexp.match(exc_type):<EOL><INDENT>exc_type = regexp.match(exc_type).groups()[<NUM_LIT:0>]<EOL>exc_type = exc_type[<NUM_LIT:11>:] if exc_type.startswith("<STR_LIT>") else exc_type<EOL><DEDENT>if "<STR_LIT:.>" in exc_type:<EOL><INDENT>exc_type = str(exobj).split("<STR_LIT:'>")[<NUM_LIT:1>].split("<STR_LIT:.>")[-<NUM_LIT:1>]<EOL><DEDENT>return exc_type<EOL>
Return a string corresponding to the exception type.
f4111:m0
def _get_ex_msg(obj):
return obj.value.args[<NUM_LIT:0>] if hasattr(obj, "<STR_LIT:value>") else obj.args[<NUM_LIT:0>]<EOL>
Get exception message.
f4111:m1
def _readlines(fname, fpointer1=open, fpointer2=open):
<EOL>try:<EOL><INDENT>with fpointer1(fname, "<STR_LIT:r>") as fobj:<EOL><INDENT>return fobj.readlines()<EOL><DEDENT><DEDENT>except UnicodeDecodeError: <EOL><INDENT>with fpointer2(fname, "<STR_LIT:r>", encoding="<STR_LIT:utf-8>") as fobj:<EOL><INDENT>return fobj.readlines()<EOL><DEDENT><DEDENT>
Read all lines from file.
f4111:m2
def _unicode_to_ascii(obj):
<EOL>return obj<EOL>
Convert to ASCII.
f4111:m3
def _write(fobj, data):
fobj.write(data)<EOL>
Write data to file.
f4111:m4
def make_dir(fname):
file_path, fname = os.path.split(os.path.abspath(fname))<EOL>if not os.path.exists(file_path):<EOL><INDENT>os.makedirs(file_path)<EOL><DEDENT>
Create the directory of a fully qualified file name if it does not exist. :param fname: File name :type fname: string Equivalent to these Bash shell commands: .. code-block:: bash $ fname="${HOME}/mydir/myfile.txt" $ dir=$(dirname "${fname}") $ mkdir -p "${dir}" :param fname: Fully qualified file name :type fname: string
f4113:m0
def normalize_windows_fname(fname, _force=False):
if (platform.system().lower() != "<STR_LIT>") and (not _force): <EOL><INDENT>return fname<EOL><DEDENT>rchars = {<EOL>"<STR_LIT>": r"<STR_LIT>",<EOL>"<STR_LIT>": r"<STR_LIT>",<EOL>"<STR_LIT>": r"<STR_LIT>",<EOL>"<STR_LIT>": r"<STR_LIT>",<EOL>"<STR_LIT>": r"<STR_LIT>",<EOL>"<STR_LIT>": r"<STR_LIT>",<EOL>"<STR_LIT>": r"<STR_LIT>",<EOL>}<EOL>ret = "<STR_LIT>"<EOL>for char in os.path.normpath(fname):<EOL><INDENT>ret = ret + rchars.get(char, char)<EOL><DEDENT>network_share = False<EOL>tmp = None<EOL>network_share = fname.startswith(r"<STR_LIT:\\>")<EOL>while tmp != ret:<EOL><INDENT>tmp, ret = ret, ret.replace(r"<STR_LIT>", r"<STR_LIT:\\>")<EOL><DEDENT>ret = ret.replace(r"<STR_LIT>", r"<STR_LIT:\\>")<EOL>if network_share:<EOL><INDENT>ret = r"<STR_LIT:\\>" + ret.lstrip(r"<STR_LIT:\\>")<EOL><DEDENT>return ret<EOL>
r""" Fix potential problems with a Microsoft Windows file name. Superfluous backslashes are removed and unintended escape sequences are converted to their equivalent (presumably correct and intended) representation, for example :code:`r'\\\\x07pps'` is transformed to :code:`r'\\\\\\\\apps'`. A file name is considered network shares if the file does not include a drive letter and they start with a double backslash (:code:`'\\\\\\\\'`) :param fname: File name :type fname: string :rtype: string
f4113:m1
def _homogenize_linesep(line):
token = str(uuid.uuid4())<EOL>line = line.replace(os.linesep, token).replace("<STR_LIT:\n>", "<STR_LIT>").replace("<STR_LIT:\r>", "<STR_LIT>")<EOL>return line.replace(token, os.linesep)<EOL>
Enforce line separators to be the right one depending on platform.
f4115:m0
def _proc_token(spec, mlines):
spec = spec.strip().replace("<STR_LIT:U+0020>", "<STR_LIT>")<EOL>regexp = re.compile(r"<STR_LIT>")<EOL>tokens = spec.split("<STR_LIT:U+002C>")<EOL>cond = any([not item for item in tokens])<EOL>if ("<STR_LIT>" in spec) or ("<STR_LIT>" in spec) or ("<STR_LIT>" in spec) or cond or regexp.match(spec):<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>lines = []<EOL>for token in tokens:<EOL><INDENT>if token.count("<STR_LIT:->") > <NUM_LIT:1>:<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>if "<STR_LIT:->" in token:<EOL><INDENT>subtokens = token.split("<STR_LIT:->")<EOL>lmin, lmax = (<EOL>int(subtokens[<NUM_LIT:0>]),<EOL>int(subtokens[<NUM_LIT:1>]) if subtokens[<NUM_LIT:1>] else mlines,<EOL>)<EOL>for num in range(lmin, lmax + <NUM_LIT:1>):<EOL><INDENT>lines.append(num)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>lines.append(int(token))<EOL><DEDENT><DEDENT>if lines != sorted(lines):<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>return lines<EOL>
Process line range tokens.
f4115:m1
def incfile(fname, fpointer, lrange=None, sdir=None):
<EOL>file_dir = (<EOL>sdir<EOL>if sdir<EOL>else os.environ.get("<STR_LIT>", os.path.abspath(os.path.dirname(__file__)))<EOL>)<EOL>fname = os.path.join(file_dir, fname)<EOL>with open(fname, "<STR_LIT:r>") as fobj:<EOL><INDENT>lines = fobj.readlines()<EOL><DEDENT>lines = [_homogenize_linesep(line) for line in lines]<EOL>inc_lines = (<EOL>_proc_token(lrange, len(lines)) if lrange else list(range(<NUM_LIT:1>, len(lines) + <NUM_LIT:1>))<EOL>)<EOL>fpointer("<STR_LIT>" + os.linesep)<EOL>fpointer(os.linesep)<EOL>for num, line in enumerate(lines):<EOL><INDENT>if num + <NUM_LIT:1> in inc_lines:<EOL><INDENT>fpointer(<EOL>"<STR_LIT:U+0020>" + line.replace("<STR_LIT:\t>", "<STR_LIT:U+0020>").rstrip() + os.linesep<EOL>if line.strip()<EOL>else os.linesep<EOL>)<EOL><DEDENT><DEDENT>fpointer(os.linesep)<EOL>
r""" Return a Python source file formatted in reStructuredText. .. role:: bash(code) :language: bash :param fname: File name, relative to environment variable :bash:`PKG_DOC_DIR` :type fname: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but other functions can be used for debugging :type fpointer: function object :param lrange: Line range to include, similar to Sphinx `literalinclude <http://www.sphinx-doc.org/en/master/usage /restructuredtext/directives.html #directive-literalinclude>`_ directive :type lrange: string :param sdir: Source file directory. If None the :bash:`PKG_DOC_DIR` environment variable is used if it is defined, otherwise the directory where the module is located is used :type sdir: string For example: .. code-block:: python def func(): \"\"\" This is a docstring. This file shows how to use it: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('func_example.py', cog.out) .. =]= .. code-block:: python # func_example.py if __name__ == '__main__': func() .. =[=end=]= \"\"\" return 'This is func output'
f4115:m2
def ste(command, nindent, mdir, fpointer, env=None):
sdir = LDELIM + "<STR_LIT>" + RDELIM<EOL>command = (<EOL>sdir + ("<STR_LIT>".format(sep=os.path.sep, cmd=command))<EOL>if env is None<EOL>else command<EOL>)<EOL>env = {"<STR_LIT>": mdir} if env is None else env<EOL>term_echo(command, nindent, env, fpointer)<EOL>
Print STDOUT of a shell command formatted in reStructuredText. This is a simplified version of :py:func:`pmisc.term_echo`. :param command: Shell command (relative to **mdir** if **env** is not given) :type command: string :param nindent: Indentation level :type nindent: integer :param mdir: Module directory, used if **env** is not given :type mdir: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param env: Environment dictionary. If not provided, the environment dictionary is the key "PKG_BIN_DIR" with the value of the **mdir** :type env: dictionary For example:: .. This is a reStructuredText file snippet .. [[[cog .. import os, sys .. from docs.support.term_echo import term_echo .. file_name = sys.modules['docs.support.term_echo'].__file__ .. mdir = os.path.realpath( .. os.path.dirname( .. os.path.dirname(os.path.dirname(file_name)) .. ) .. ) .. [[[cog ste('build_docs.py -h', 0, mdir, cog.out) ]]] .. code-block:: console $ ${PKG_BIN_DIR}/build_docs.py -h usage: build_docs.py [-h] [-d DIRECTORY] [-n NUM_CPUS] ... $ .. ]]]
f4115:m3
def term_echo(command, nindent=<NUM_LIT:0>, env=None, fpointer=None, cols=<NUM_LIT>):
<EOL>os.environ["<STR_LIT>"] = str(cols)<EOL>command_int = command<EOL>if env:<EOL><INDENT>for var, repl in env.items():<EOL><INDENT>command_int = command_int.replace('<STR_LIT:">' + LDELIM + var + RDELIM + '<STR_LIT:">', repl)<EOL>command_int = command_int.replace(LDELIM + var + RDELIM, repl)<EOL><DEDENT><DEDENT>tokens = command_int.split("<STR_LIT:U+0020>")<EOL>if (platform.system().lower() == "<STR_LIT>") and (<EOL>tokens[<NUM_LIT:0>].endswith("<STR_LIT>")<EOL>): <EOL><INDENT>tokens = [sys.executable] + tokens<EOL><DEDENT>proc = subprocess.Popen(tokens, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)<EOL>stdout = proc.communicate()[<NUM_LIT:0>]<EOL>if sys.hexversion >= <NUM_LIT>: <EOL><INDENT>stdout = stdout.decode("<STR_LIT:utf-8>")<EOL><DEDENT>stdout = stdout.split("<STR_LIT:\n>")<EOL>indent = nindent * "<STR_LIT:U+0020>"<EOL>fpointer(os.linesep)<EOL>fpointer("<STR_LIT>".format(indent, os.linesep))<EOL>fpointer(os.linesep)<EOL>fpointer("<STR_LIT>".format(indent, command, os.linesep))<EOL>for line in stdout:<EOL><INDENT>line = _homogenize_linesep(line)<EOL>if line.strip():<EOL><INDENT>fpointer(indent + "<STR_LIT:U+0020>" + line.replace("<STR_LIT:\t>", "<STR_LIT:U+0020>") + os.linesep)<EOL><DEDENT>else:<EOL><INDENT>fpointer(os.linesep)<EOL><DEDENT><DEDENT>
Print STDOUT of a shell command formatted in reStructuredText. .. role:: bash(code) :language: bash :param command: Shell command :type command: string :param nindent: Indentation level :type nindent: integer :param env: Environment variable replacement dictionary. The command is pre-processed and any environment variable represented in the full notation (:bash:`${...}` in Linux and OS X or :bash:`%...%` in Windows) is replaced. The dictionary key is the environment variable name and the dictionary value is the replacement value. For example, if **command** is :code:`'${PYTHON_CMD} -m "x=5"'` and **env** is :code:`{'PYTHON_CMD':'python3'}` the actual command issued is :code:`'python3 -m "x=5"'` :type env: dictionary :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param cols: Number of columns of output :type cols: integer
f4115:m4
@decorator.contextmanager<EOL>def ignored(*exceptions):
try:<EOL><INDENT>yield<EOL><DEDENT>except exceptions:<EOL><INDENT>pass<EOL><DEDENT>
Execute commands and selectively ignore exceptions. Inspired by `"Transforming Code into Beautiful, Idiomatic Python" <https://pyvideo.org/video/1780/ transforming-code-into-beautiful-idiomatic-pytho>`_ talk at PyCon US 2013 by Raymond Hettinger. :param exceptions: Exception type(s) to ignore :type exceptions: Exception object, i.e. RuntimeError, OSError, etc. For example: .. =[=cog .. import pmisc .. pmisc.incfile('pmisc_example_1.py', cog.out, '1, 6-', mdir) .. =]= .. code-block:: python # pmisc_example_1.py from __future__ import print_function import os, pmisc def ignored_example(): fname = 'somefile.tmp' open(fname, 'w').close() print('File {0} exists? {1}'.format( fname, os.path.isfile(fname) )) with pmisc.ignored(OSError): os.remove(fname) print('File {0} exists? {1}'.format( fname, os.path.isfile(fname) )) with pmisc.ignored(OSError): os.remove(fname) print('No exception trying to remove a file that does not exists') try: with pmisc.ignored(RuntimeError): os.remove(fname) except: print('Got an exception') .. =[=end=]= .. code-block:: python >>> import docs.support.pmisc_example_1 >>> docs.support.pmisc_example_1.ignored_example() File somefile.tmp exists? True File somefile.tmp exists? False No exception trying to remove a file that does not exists Got an exception
f4116:m0
def get_short_desc(long_desc):
found = False<EOL>olines = []<EOL>for line in [item.rstrip() for item in long_desc.split("<STR_LIT:\n>")]:<EOL><INDENT>if found and (((not line) and (not olines)) or (line and olines)):<EOL><INDENT>olines.append(line)<EOL><DEDENT>elif found and olines and (not line):<EOL><INDENT>return ("<STR_LIT:U+0020>".join(olines).split("<STR_LIT:.>")[<NUM_LIT:0>]).strip()<EOL><DEDENT>found = line == "<STR_LIT>" if not found else found<EOL><DEDENT>return "<STR_LIT>"<EOL>
Get first sentence of first paragraph of long description.
f4128:m0
def read(*filenames, **kwargs):
encoding = kwargs.get("<STR_LIT>", "<STR_LIT:utf-8>")<EOL>sep = kwargs.get("<STR_LIT>", "<STR_LIT:\n>")<EOL>buf = []<EOL>for filename in filenames:<EOL><INDENT>with io.open(filename, encoding=encoding) as fobj:<EOL><INDENT>buf.append(fobj.read())<EOL><DEDENT><DEDENT>return sep.join(buf)<EOL>
Read plain text file(s).
f4128:m1
def ste(command, nindent, mdir, fpointer):
term_echo(<EOL>"<STR_LIT>".format(sep=os.path.sep, cmd=command),<EOL>nindent,<EOL>{"<STR_LIT>": mdir},<EOL>fpointer,<EOL>)<EOL>
r""" Echo terminal output. Print STDOUT resulting from a given Bash shell command (relative to the package :code:`pypkg` directory) formatted in reStructuredText :param command: Bash shell command, relative to :bash:`${PMISC_DIR}/pypkg` :type command: string :param nindent: Indentation level :type nindent: integer :param mdir: Module directory :type mdir: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object For example:: .. This is a reStructuredText file snippet .. [[[cog .. import os, sys .. from docs.support.term_echo import term_echo .. file_name = sys.modules['docs.support.term_echo'].__file__ .. mdir = os.path.realpath( .. os.path.dirname( .. os.path.dirname(os.path.dirname(file_name)) .. ) .. ) .. [[[cog ste('build_docs.py -h', 0, mdir, cog.out) ]]] .. code-block:: bash $ ${PMISC_DIR}/pypkg/build_docs.py -h usage: build_docs.py [-h] [-d DIRECTORY] [-n NUM_CPUS] ... .. ]]]
f4131:m0
def term_echo(command, nindent=<NUM_LIT:0>, env=None, fpointer=None, cols=<NUM_LIT>):
<EOL>os.environ["<STR_LIT>"] = str(cols)<EOL>command_int = command<EOL>if env:<EOL><INDENT>for var, repl in env.items():<EOL><INDENT>command_int = command_int.replace("<STR_LIT>" + var + "<STR_LIT:}>", repl)<EOL><DEDENT><DEDENT>tokens = command_int.split("<STR_LIT:U+0020>")<EOL>if (platform.system().lower() == "<STR_LIT>") and (tokens[<NUM_LIT:0>].endswith("<STR_LIT>")):<EOL><INDENT>tokens = [sys.executable] + tokens<EOL><DEDENT>proc = subprocess.Popen(tokens, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)<EOL>stdout = proc.communicate()[<NUM_LIT:0>]<EOL>if sys.hexversion >= <NUM_LIT>:<EOL><INDENT>stdout = stdout.decode("<STR_LIT:utf-8>")<EOL><DEDENT>stdout = stdout.split("<STR_LIT:\n>")<EOL>indent = nindent * "<STR_LIT:U+0020>"<EOL>fpointer("<STR_LIT:\n>", dedent=False)<EOL>fpointer("<STR_LIT>".format(indent), dedent=False)<EOL>fpointer("<STR_LIT:\n>", dedent=False)<EOL>fpointer("<STR_LIT>".format(indent, command), dedent=False)<EOL>for line in stdout:<EOL><INDENT>if line.strip():<EOL><INDENT>fpointer(indent + "<STR_LIT:U+0020>" + line.replace("<STR_LIT:\t>", "<STR_LIT:U+0020>") + "<STR_LIT:\n>", dedent=False)<EOL><DEDENT>else:<EOL><INDENT>fpointer("<STR_LIT:\n>", dedent=False)<EOL><DEDENT><DEDENT>fpointer("<STR_LIT:\n>", dedent=False)<EOL>
Print STDOUT resulting from a Bash shell command formatted in reStructuredText. :param command: Bash shell command :type command: string :param nindent: Indentation level :type nindent: integer :param env: Environment variable replacement dictionary. The Bash command is pre-processed and any environment variable represented in the full notation (:bash:`${...}`) is replaced. The dictionary key is the environment variable name and the dictionary value is the replacement value. For example, if **command** is :code:`'${PYTHON_CMD} -m "x=5"'` and **env** is :code:`{'PYTHON_CMD':'python3'}` the actual command issued is :code:`'python3 -m "x=5"'` :type env: dictionary :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param cols: Number of columns of output :type cols: integer
f4131:m1
def def_links(mobj):
fdict = json_load(os.path.join("<STR_LIT:data>", "<STR_LIT>"))<EOL>sdeps = sorted(fdict.keys())<EOL>olines = []<EOL>for item in sdeps:<EOL><INDENT>olines.append(<EOL>"<STR_LIT>".format(<EOL>name=fdict[item]["<STR_LIT:name>"], url=fdict[item]["<STR_LIT:url>"]<EOL>)<EOL>)<EOL><DEDENT>ret = []<EOL>for line in olines:<EOL><INDENT>wobj = textwrap.wrap(line, width=LINE_WIDTH, subsequent_indent="<STR_LIT:U+0020>")<EOL>ret.append("<STR_LIT:\n>".join([item for item in wobj]))<EOL><DEDENT>mobj.out("<STR_LIT:\n>".join(ret))<EOL>
Define Sphinx requirements links.
f4132:m0
def make_common_entry(plist, pyver, suffix, req_ver):
prefix = "<STR_LIT>".format(pyver=pyver, suffix=suffix)<EOL>plist.append("<STR_LIT>".format(prefix=prefix, ver=ops_to_words(req_ver)))<EOL>
Generate Python interpreter version entries for 2.x or 3.x series.
f4132:m1
def make_multi_entry(plist, pkg_pyvers, ver_dict):
for pyver in pkg_pyvers:<EOL><INDENT>pver = pyver[<NUM_LIT:2>] + "<STR_LIT:.>" + pyver[<NUM_LIT:3>:]<EOL>plist.append("<STR_LIT>".format(pver, ops_to_words(ver_dict[pyver])))<EOL><DEDENT>
Generate Python interpreter version entries.
f4132:m2
def op_to_words(item):
sdicts = [<EOL>{"<STR_LIT>": "<STR_LIT>"},<EOL>{"<STR_LIT>": "<STR_LIT>"},<EOL>{"<STR_LIT:>>": "<STR_LIT>"},<EOL>{"<STR_LIT>": "<STR_LIT>"},<EOL>{"<STR_LIT:<>": "<STR_LIT>"},<EOL>{"<STR_LIT>": "<STR_LIT>"},<EOL>]<EOL>for sdict in sdicts:<EOL><INDENT>prefix = list(sdict.keys())[<NUM_LIT:0>]<EOL>suffix = sdict[prefix]<EOL>if item.startswith(prefix):<EOL><INDENT>if prefix == "<STR_LIT>":<EOL><INDENT>return item[<NUM_LIT:2>:]<EOL><DEDENT>if prefix == "<STR_LIT>":<EOL><INDENT>return suffix + item[<NUM_LIT:2>:]<EOL><DEDENT>if prefix in ["<STR_LIT:>>", "<STR_LIT:<>"]:<EOL><INDENT>return suffix + item[<NUM_LIT:1>:]<EOL><DEDENT>return item[<NUM_LIT:2>:] + suffix<EOL><DEDENT><DEDENT>raise RuntimeError("<STR_LIT>")<EOL>
Translate >=, ==, <= to words.
f4132:m3
def ops_to_words(item):
unsupp_ops = ["<STR_LIT>", "<STR_LIT>"]<EOL>supp_ops = ["<STR_LIT>", "<STR_LIT:>>", "<STR_LIT>", "<STR_LIT>", "<STR_LIT:<>", "<STR_LIT>"]<EOL>tokens = sorted(item.split("<STR_LIT:U+002C>"), reverse=True)<EOL>actual_tokens = []<EOL>for req in tokens:<EOL><INDENT>for op in unsupp_ops:<EOL><INDENT>if req.startswith(op):<EOL><INDENT>raise RuntimeError("<STR_LIT>".format(op))<EOL><DEDENT><DEDENT>for op in supp_ops:<EOL><INDENT>if req.startswith(op):<EOL><INDENT>actual_tokens.append(op)<EOL>break<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise RuntimeError("<STR_LIT>".format(op))<EOL><DEDENT><DEDENT>if len(list(set(actual_tokens))) != len(actual_tokens):<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>if "<STR_LIT>" in actual_tokens:<EOL><INDENT>return (<EOL>"<STR_LIT>".join([op_to_words(token) for token in tokens[:-<NUM_LIT:1>]])<EOL>+ "<STR_LIT:U+0020>"<EOL>+ op_to_words(tokens[-<NUM_LIT:1>])<EOL>)<EOL><DEDENT>return "<STR_LIT>".join([op_to_words(token) for token in tokens])<EOL>
Translate requirement specification to words.
f4132:m4
def proc_requirements(mobj):
pyvers = ["<STR_LIT>".format(item.replace("<STR_LIT:.>", "<STR_LIT>")) for item in get_supported_interps()]<EOL>py2vers = sorted([item for item in pyvers if item.startswith("<STR_LIT>")])<EOL>py3vers = sorted([item for item in pyvers if item.startswith("<STR_LIT>")])<EOL>fdict = json_load(os.path.join("<STR_LIT:data>", "<STR_LIT>"))<EOL>olines = ["<STR_LIT>"]<EOL>sdict = dict([(item["<STR_LIT:name>"], item) for item in fdict.values()])<EOL>for real_name in sorted(sdict.keys()):<EOL><INDENT>pkg_dict = sdict[real_name]<EOL>if pkg_dict["<STR_LIT>"] == ["<STR_LIT>"]:<EOL><INDENT>continue<EOL><DEDENT>plist = [] if not pkg_dict["<STR_LIT>"] else ["<STR_LIT>"]<EOL>if isinstance(pkg_dict["<STR_LIT>"], str):<EOL><INDENT>pkg_dict["<STR_LIT>"] = dict([(pyver, pkg_dict["<STR_LIT>"]) for pyver in pyvers])<EOL><DEDENT>pkg_pyvers = sorted(pkg_dict["<STR_LIT>"].keys())<EOL>pkg_py2vers = sorted(<EOL>[item for item in pkg_dict["<STR_LIT>"].keys() if item.startswith("<STR_LIT>")]<EOL>)<EOL>req_vers = list(set(pkg_dict["<STR_LIT>"].values()))<EOL>req_py2vers = list(<EOL>set([pkg_dict["<STR_LIT>"][item] for item in py2vers if item in pkg_dict["<STR_LIT>"]])<EOL>)<EOL>req_py3vers = list(<EOL>set([pkg_dict["<STR_LIT>"][item] for item in py3vers if item in pkg_dict["<STR_LIT>"]])<EOL>)<EOL>if (len(req_vers) == <NUM_LIT:1>) and (pkg_pyvers == pyvers):<EOL><INDENT>plist.append(ops_to_words(req_vers[<NUM_LIT:0>]))<EOL><DEDENT>elif (<EOL>(pkg_pyvers == pyvers)<EOL>and (len(req_py2vers) == <NUM_LIT:1>)<EOL>and (len(req_py3vers) == <NUM_LIT:1>)<EOL>):<EOL><INDENT>make_common_entry(plist, "<STR_LIT:2>", "<STR_LIT>", req_py2vers[<NUM_LIT:0>])<EOL>make_common_entry(plist, "<STR_LIT:3>", "<STR_LIT>", req_py3vers[<NUM_LIT:0>])<EOL><DEDENT>elif (<EOL>(pkg_pyvers == pyvers)<EOL>and (len(req_py2vers) == len(py2vers))<EOL>and (len(req_py3vers) == <NUM_LIT:1>)<EOL>and (pkg_dict["<STR_LIT>"][pkg_py2vers[-<NUM_LIT:1>]] == req_py3vers[<NUM_LIT:0>])<EOL>):<EOL><INDENT>py2dict = dict(<EOL>[<EOL>(key, value)<EOL>for key, value in pkg_dict["<STR_LIT>"].items()<EOL>if key.startswith("<STR_LIT>") and (key != pkg_py2vers[-<NUM_LIT:1>])<EOL>]<EOL>)<EOL>make_multi_entry(plist, py2vers[:-<NUM_LIT:1>], py2dict)<EOL>pver = pkg_py2vers[-<NUM_LIT:1>][<NUM_LIT:2>] + "<STR_LIT:.>" + pkg_py2vers[-<NUM_LIT:1>][<NUM_LIT:3>:]<EOL>plist.append(<EOL>"<STR_LIT>".format(<EOL>pyver=pver, ver=ops_to_words(req_py3vers[<NUM_LIT:0>])<EOL>)<EOL>)<EOL><DEDENT>elif (<EOL>(pkg_pyvers == pyvers)<EOL>and (len(req_py2vers) == len(py2vers))<EOL>and (len(req_py3vers) == <NUM_LIT:1>)<EOL>):<EOL><INDENT>py2dict = dict(<EOL>[<EOL>(key, value)<EOL>for key, value in pkg_dict["<STR_LIT>"].items()<EOL>if key.startswith("<STR_LIT>")<EOL>]<EOL>)<EOL>make_multi_entry(plist, py2vers, py2dict)<EOL>make_common_entry(plist, "<STR_LIT:3>", "<STR_LIT>", req_py3vers[<NUM_LIT:0>])<EOL><DEDENT>elif (<EOL>(pkg_pyvers == pyvers)<EOL>and (len(req_py3vers) == len(py3vers))<EOL>and (len(req_py2vers) == <NUM_LIT:1>)<EOL>):<EOL><INDENT>py3dict = dict(<EOL>[<EOL>(key, value)<EOL>for key, value in pkg_dict["<STR_LIT>"].items()<EOL>if key.startswith("<STR_LIT>")<EOL>]<EOL>)<EOL>make_common_entry(plist, "<STR_LIT:2>", "<STR_LIT>", req_py2vers[<NUM_LIT:0>])<EOL>make_multi_entry(plist, py3vers, py3dict)<EOL><DEDENT>elif (len(req_vers) == <NUM_LIT:1>) and (pkg_pyvers == py2vers):<EOL><INDENT>make_common_entry(plist, "<STR_LIT:2>", "<STR_LIT>", req_vers[<NUM_LIT:0>])<EOL><DEDENT>elif (len(req_vers) == <NUM_LIT:1>) and (pkg_pyvers == py3vers):<EOL><INDENT>make_common_entry(plist, "<STR_LIT:3>", "<STR_LIT>", req_vers[<NUM_LIT:0>])<EOL><DEDENT>else:<EOL><INDENT>make_multi_entry(plist, pkg_pyvers, pkg_dict["<STR_LIT>"])<EOL><DEDENT>olines.append(<EOL>"<STR_LIT>".format(<EOL>name=pkg_dict["<STR_LIT:name>"], par="<STR_LIT:U+002CU+0020>".join(plist)<EOL>)<EOL>)<EOL><DEDENT>ret = []<EOL>for line in olines:<EOL><INDENT>wobj = textwrap.wrap(line, width=LINE_WIDTH, subsequent_indent="<STR_LIT:U+0020>")<EOL>ret.append("<STR_LIT:\n>".join([item for item in wobj]))<EOL><DEDENT>mobj.out("<STR_LIT>".join(ret) + "<STR_LIT>")<EOL>
Get requirements in reStructuredText format.
f4132:m5
def incfile(fname, fpointer, lrange="<STR_LIT>", sdir=None):
<EOL>file_dir = (<EOL>sdir<EOL>if sdir<EOL>else os.environ.get("<STR_LIT>", os.path.abspath(os.path.dirname(__file__)))<EOL>)<EOL>fname = os.path.join(file_dir, fname)<EOL>with open(fname) as fobj:<EOL><INDENT>lines = fobj.readlines()<EOL><DEDENT>tokens = [item.strip() for item in lrange.split("<STR_LIT:U+002C>")]<EOL>inc_lines = []<EOL>for token in tokens:<EOL><INDENT>if "<STR_LIT:->" in token:<EOL><INDENT>subtokens = token.split("<STR_LIT:->")<EOL>lmin, lmax = (<EOL>int(subtokens[<NUM_LIT:0>]),<EOL>int(subtokens[<NUM_LIT:1>]) if subtokens[<NUM_LIT:1>] else len(lines),<EOL>)<EOL>for num in range(lmin, lmax + <NUM_LIT:1>):<EOL><INDENT>inc_lines.append(num)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>inc_lines.append(int(token))<EOL><DEDENT><DEDENT>fpointer("<STR_LIT>")<EOL>fpointer("<STR_LIT:\n>")<EOL>for num, line in enumerate(lines):<EOL><INDENT>if num + <NUM_LIT:1> in inc_lines:<EOL><INDENT>fpointer("<STR_LIT:U+0020>" + line.replace("<STR_LIT:\t>", "<STR_LIT:U+0020>") if line.strip() else "<STR_LIT:\n>")<EOL><DEDENT><DEDENT>fpointer("<STR_LIT:\n>")<EOL>
r""" Include a Python source file in a docstring formatted in reStructuredText. :param fname: File name, relative to environment variable :bash:`${TRACER_DIR}` :type fname: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param lrange: Line range to include, similar to Sphinx `literalinclude <http://sphinx-doc.org/markup/code.html #directive-literalinclude>`_ directive :type lrange: string :param sdir: Source file directory. If None the :bash:`${TRACER_DIR}` environment variable is used if it is defined, otherwise the directory where the :code:`docs.support.incfile` module is located is used :type sdir: string For example: .. code-block:: python def func(): \"\"\" This is a docstring. This file shows how to use it: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('func_example.py', cog.out) .. =]= .. code-block:: python # func_example.py if __name__ == '__main__': func() .. =[=end=]= \"\"\" return 'This is func output'
f4135:m0
def __init__(self, *args):
self.configs = dict()<EOL>self.parsers = list()<EOL>self._config = None<EOL>for arg in args:<EOL><INDENT>self.addConfig(**arg)<EOL><DEDENT>
Initializes a ConfigManager. Inputs: args - ConfigManagers can be optionally initialized with a sequence of dictionaries representing configuration options to add to the ConfigManager.
f4146:c1:m0
def registerParser(self, parser):
if not isinstance(parser, Subparser):<EOL><INDENT>raise TypeError("<STR_LIT>" % parser)<EOL><DEDENT>self.parsers.append(parser)<EOL>
Registers a parser to parse configuration inputs.
f4146:c1:m1
def addConfig(self, name, default=None, cast=None, required=False, description=None):
<EOL>if not self.configNameRE.match(name):<EOL><INDENT>raise InvalidConfigurationException("<STR_LIT>" % name)<EOL><DEDENT>self.configs[self._sanitizeName(name)] = {<EOL>'<STR_LIT:default>': default,<EOL>'<STR_LIT>': cast,<EOL>'<STR_LIT>': required,<EOL>'<STR_LIT:description>': description<EOL>}<EOL>
Adds the given configuration option to the ConfigManager. Inputs: name - The configuration name to accept. required - A boolean indicating whether or not the configuration option is required or not. cast - A type (or function accepting 1 argument and returning an object) to cast the input as. If any error occurs during casting an InvalidConfigurationException will be raised. default - The default value to assign to this configuration option. Note that None is not a valid default if required=True. description - A human readable description of this configuration parameter. Will be displayed when the program is run with a -h flag.
f4146:c1:m2
def parse(self):
self._config = _Config()<EOL>self._setDefaults()<EOL>for parser in self.parsers:<EOL><INDENT>for key, value in parser.parse(self, self._config).items():<EOL><INDENT>key = self._sanitizeName(key)<EOL>if key not in self.configs:<EOL><INDENT>raise UnknownConfigurationException(key)<EOL><DEDENT>if value is not None:<EOL><INDENT>self._setConfig(key, value)<EOL><DEDENT><DEDENT><DEDENT>self._ensureRequired()<EOL>self._cast()<EOL>return self._config<EOL>
Executes the registered parsers to parse input configurations.
f4146:c1:m3
def _setDefaults(self):
for configName, configDict in self.configs.items():<EOL><INDENT>self._setConfig(configName, configDict['<STR_LIT:default>'])<EOL><DEDENT>
Sets all the expected configuration options on the config object as either the requested default value, or None.
f4146:c1:m4
def _ensureRequired(self):
for configName, configDict in self.configs.items():<EOL><INDENT>if configDict['<STR_LIT>']:<EOL><INDENT>if getattr(self._config, configName) is None:<EOL><INDENT>raise MissingConfigurationException(configName)<EOL><DEDENT><DEDENT><DEDENT>
Ensures that all configuration options marked as being required have been passed (ie are non-None). Raises a MissingConfigurationException if a required configuration option was not passed.
f4146:c1:m5
def _cast(self):
for configName, configDict in self.configs.items():<EOL><INDENT>if configDict['<STR_LIT>'] is not None:<EOL><INDENT>configValue = getattr(self._config, configName)<EOL>if configValue is not None:<EOL><INDENT>try:<EOL><INDENT>self._setConfig(configName, configDict['<STR_LIT>'](configValue))<EOL><DEDENT>except:<EOL><INDENT>raise InvalidConfigurationException("<STR_LIT>" % (configName, configValue))<EOL><DEDENT><DEDENT><DEDENT><DEDENT>
Iterates through our parsed configuration options and cast any options with marked cast types.
f4146:c1:m6
def _setConfig(self, name, value):
setattr(self._config, name, value)<EOL>
Sets the configuration option on the current configuration object being populated. Inputs: name - The name of the configuration option to set. value - The value of the configuration option to set.
f4146:c1:m7
def _sanitizeName(self, name):
return name.replace('<STR_LIT:->', '<STR_LIT:_>')<EOL>
Sanitizes a configuration name so that it can be set onto the Config object safely (ex: replacing -'s with _'s). Inputs: name - The string containing the name to sanitize. Outputs: A string containing the sanitized string.
f4146:c1:m8
def __init__(self, dictionary):
self.dictionary = dictionary<EOL>
Initializes a dictionary parser. Inputs: dictionary - A dictionary containing key: value pairs of configName: configValue of config options to add to the configuration object.
f4148:c0:m0
def parse(self, *args):
if isinstance(self.dictionary, dict):<EOL><INDENT>return self.dictionary<EOL><DEDENT>raise self.subparserException("<STR_LIT>" % type(self.dictionary))<EOL>
Return our initialized dictionary arguments.
f4148:c0:m1
def __init__(self, sections=None, **kwargs):
super(self.__class__, self).__init__(**kwargs)<EOL>self.sections = sections<EOL>
Initializes an ini configuration file parser. Inputs: sections - A list of sections names which should be parsed. If not set all sections will be parsed. kwargs - Keyword arguments to be supplied to ConfigFileSubparser.__init__
f4149:c0:m0
def parse(self, configManager, config):
parser = ConfigParser.RawConfigParser()<EOL>configOptions = dict()<EOL>configFile = self._getConfigFile(config)<EOL>if configFile:<EOL><INDENT>parser.readfp(configFile)<EOL>for section in parser.sections():<EOL><INDENT>if self.sections is None or section in self.sections:<EOL><INDENT>configOptions.update(parser.items(section))<EOL><DEDENT><DEDENT><DEDENT>return configOptions<EOL>
Parse configuration options out of an .ini configuration file. Inputs: configManager - Our parent ConfigManager instance which is constructing the Config object. config - The _Config object containing configuration options populated thus far. Outputs: A dictionary of new configuration options to add to the Config object.
f4149:c0:m1
def __init__(self, description=None):
self.description = description<EOL>
Initializes a cmdline parser. Inputs: description - A description to print if a user runs this program with a -h flag.
f4150:c0:m0
def getArgumentParser(self, configManager, config):
argParser = argparse.ArgumentParser(self.description)<EOL>for configName, configDict in configManager.configs.items():<EOL><INDENT>cmdName = configName.replace("<STR_LIT:_>", "<STR_LIT:->")<EOL>argParser.add_argument(<EOL>'<STR_LIT>' % cmdName,<EOL>default=None,<EOL>help=configDict['<STR_LIT:description>']<EOL>)<EOL><DEDENT>return argParser<EOL>
May be overidden to provide custom functionality. Constructs an argparse.ArgumentParser used to parse configuration options from the command line. Inputs: configManager - Our parent ConfigManager instance which is constructing the Config object. config - The _Config object containing configuration options populated thus far. Outputs: An argparse.ArgumentParser object intialized to parse command line configuration options.
f4150:c0:m1
def parse(self, configManager, config):
argParser = self.getArgumentParser(configManager, config)<EOL>return vars(argParser.parse_args())<EOL>
Parses commandline arguments, given a series of configuration options. Inputs: configManager - Our parent ConfigManager instance which is constructing the Config object. config - The _Config object containing configuration options populated thus far. Outputs: A dictionary of new configuration options to add to the Config object.
f4150:c0:m2
@abc.abstractmethod<EOL><INDENT>def parse(self, configManager, config):<DEDENT>
raise NotImplementedError()<EOL>
Function which must accept a _Config instance of configuration options parsed thus far, and return a dictionary containing new configuration options to add to the _Config object. Inputs: configManager - Our parent ConfigManager instance which is constructing the Config object. config - The _Config object containing configuration options populated thus far. Outputs: A dictionary of new configuration options to add to the Config object.
f4151:c1:m0
def __init__(self, filepath=None, filename=None, filepathConfig=None, filenameConfig=None):
self.filepath = filepath<EOL>self.filename = filename<EOL>self.filepathConfig = filepathConfig<EOL>self.filenameConfig = filenameConfig<EOL>if (filepath is None or filename is None) and (filepathConfig is None or filenameConfig is None):<EOL><INDENT>raise self.subparserException("<STR_LIT>" +<EOL>"<STR_LIT>" +<EOL>"<STR_LIT>")<EOL><DEDENT>
Initializes this configuration file parser. Note: At least one set of either filepath & filename, or filepathConfig & filenameConfig must be passed. If both are passed, both will be checked, though only the first will be parsed. The order that they will be checked is filenameConfig/filepathConfig first, then filepath/filename second. Inputs: filepath - Either a list containing the path directories to where the configuration file is stored or a string of the complete path to the directory where the configuration file to be parsed can be found. filename - The name of the configuration file to be parsed. filepathConfig - The name of the configuration option to retrieve the configuration file path from. filenameConfig - The name of the configuration option to retrieve the the configuration filename from.
f4151:c2:m0
def _getConfigFile(self, config):
joinPath = lambda p: (os.path.join(p) if isinstance(p, (tuple, list)) else p)<EOL>if self.filepathConfig is not None and self.filenameConfig is not None:<EOL><INDENT>if hasattr(config, self.filepathConfig) and hasattr(config, self.filenameConfig):<EOL><INDENT>path = joinPath(getattr(config, self.filepathConfig))<EOL>name = getattr(config, self.filenameConfig)<EOL>if os.path.isfile(os.path.join(path, name)):<EOL><INDENT>return open(os.path.join(path, name), '<STR_LIT:r>')<EOL><DEDENT><DEDENT><DEDENT>if self.filepath is not None and self.filename is not None:<EOL><INDENT>path = joinPath(self.filepath)<EOL>name = self.filename<EOL>if os.path.isfile(os.path.join(path, name)):<EOL><INDENT>return open(os.path.join(path, name), '<STR_LIT:r>')<EOL><DEDENT><DEDENT>
Retrieves a file descriptor to a configuration file to process. Inputs: config - The _Config object which is being populated. Outputs: An open file descriptor to the configuration file to parse in read mode if successful, else None.
f4151:c2:m1
def parse(self, configManager, config):
configFile = self._getConfigFile(config)<EOL>if not configFile:<EOL><INDENT>return dict()<EOL><DEDENT>yamlConfigs = yaml.load(configFile)<EOL>if isinstance(yamlConfigs, dict):<EOL><INDENT>return yamlConfigs<EOL><DEDENT>raise self.subparserException("<STR_LIT>"<EOL>% type(yamlConfigs))<EOL>
Parse configuration options out of a YAML configuration file. Inputs: configManager - Our parent ConfigManager instance which is constructing the Config object. config - The _Config object containing configuration options populated thus far. Outputs: A dictionary of new configuration options to add to the Config object.
f4153:c0:m0
def __init__(self, jsonString):
self.jsonString = jsonString<EOL>
Initializes a JSON string parser. Inputs: jsonString - The JSON string to parse.
f4154:c0:m0
def parse(self, configManager, config):
jsonConfigs = json.loads(self.jsonString)<EOL>if isinstance(jsonConfigs, dict):<EOL><INDENT>return jsonConfigs<EOL><DEDENT>raise self.subparserException("<STR_LIT>"<EOL>% type(jsonConfigs))<EOL>
Parse configuration options out of a YAML configuration file. Inputs: configManager - Our parent ConfigManager instance which is constructing the Config object. config - The _Config object containing configuration options populated thus far. Outputs: A dictionary of new configuration options to add to the Config object.
f4154:c0:m1
def __init__(self, name, config_class):
super(AgoraApp, self).__init__(name)<EOL>self.__handlers = {}<EOL>self.__rdfizers = {}<EOL>self.errorhandler(self.__handle_invalid_usage)<EOL>self.config.from_object(config_class)<EOL>self._stop_event = Event()<EOL>
:param name: App name :param config_class: String that represents the config class to be used :return:
f4157:c3:m0
def batch_work(self):
while True:<EOL><INDENT>gen = collect_fragment(self._stop_event, self.config['<STR_LIT>'])<EOL>for collector, (t, s, p, o) in gen:<EOL><INDENT>for task in _batch_tasks:<EOL><INDENT>task(collector, (t, s, p, o), self._stop_event)<EOL><DEDENT>if self._stop_event.isSet():<EOL><INDENT>return<EOL><DEDENT><DEDENT>for task in _batch_tasks:<EOL><INDENT>task(None, None, self._stop_event)<EOL><DEDENT>time.sleep(<NUM_LIT:10>)<EOL><DEDENT>
Method to be executed in batch mode for collecting the required fragment (composite) and then other custom tasks. :return:
f4157:c3:m2
def run(self, host=None, port=None, debug=None, **options):
tasks = options.get('<STR_LIT>', [])<EOL>for task in tasks:<EOL><INDENT>if task is not None and hasattr(task, '<STR_LIT>'):<EOL><INDENT>_batch_tasks.append(task)<EOL><DEDENT><DEDENT>thread = Thread(target=self.batch_work)<EOL>thread.start()<EOL>try:<EOL><INDENT>super(AgoraApp, self).run(host='<STR_LIT>', port=self.config['<STR_LIT>'], debug=True, use_reloader=False)<EOL><DEDENT>except Exception as e:<EOL><INDENT>print(e.message)<EOL><DEDENT>self._stop_event.set()<EOL>if thread.isAlive():<EOL><INDENT>thread.join()<EOL><DEDENT>
Start the AgoraApp expecting the provided config to have at least REDIS and PORT fields.
f4157:c3:m3
def collect(tp, *args):
def decorator(f):<EOL><INDENT>add_triple_pattern(tp, f, args)<EOL><DEDENT>return decorator<EOL>
Decorator to attach a collector function to a triple pattern :param tp: :param args: :return:
f4160:m0
def add_triple_pattern(tp, collector, args):
tp_parts = [part.strip() for part in tp.strip().split('<STR_LIT:U+0020>')]<EOL>tp = '<STR_LIT:U+0020>'.join(tp_parts)<EOL>if tp not in __triple_patterns.keys():<EOL><INDENT>__triple_patterns[tp] = set([])<EOL><DEDENT>if collector is not None:<EOL><INDENT>__triple_patterns[tp].add((collector, args))<EOL><DEDENT>
0 Manage the relations between triple patterns and collector functions :param tp: :param collector: :param args: :return:
f4160:m1
def __extract_pattern_nodes(graph):
tp_nodes = graph.subjects(RDF.type, AGORA.TriplePattern)<EOL>for tpn in tp_nodes:<EOL><INDENT>subject = list(graph.objects(tpn, AGORA.subject)).pop()<EOL>predicate = list(graph.objects(tpn, AGORA.predicate)).pop()<EOL>obj = list(graph.objects(tpn, AGORA.object)).pop()<EOL>subject_str = list(graph.objects(subject, RDFS.label)).pop().toPython()<EOL>predicate_str = graph.qname(predicate)<EOL>if (obj, RDF.type, AGORA.Variable) in graph:<EOL><INDENT>object_str = list(graph.objects(obj, RDFS.label)).pop().toPython()<EOL><DEDENT>else:<EOL><INDENT>object_str = list(graph.objects(obj, AGORA.value)).pop().toPython()<EOL><DEDENT>__plan_patterns[tpn] = '<STR_LIT>'.format(subject_str, predicate_str, object_str)<EOL><DEDENT>
Extract and bind the triple patterns contained in the search plan, so as to be able to identify to which pattern is associated each triple of the fragment. :return:
f4160:m2
def collect_fragment(event, agora_host):
agora = Agora(agora_host)<EOL>graph_pattern = "<STR_LIT>"<EOL>for tp in __triple_patterns:<EOL><INDENT>graph_pattern += '<STR_LIT>'.format(tp)<EOL><DEDENT>fragment, _, graph = agora.get_fragment_generator('<STR_LIT>' % graph_pattern, stop_event=event, workers=<NUM_LIT:4>)<EOL>__extract_pattern_nodes(graph)<EOL>log.info('<STR_LIT>' % graph_pattern)<EOL>for (t, s, p, o) in fragment:<EOL><INDENT>collectors = __triple_patterns[str(__plan_patterns[t])]<EOL>for c, args in collectors:<EOL><INDENT>log.debug('<STR_LIT>'.format(s.n3(graph.namespace_manager), graph.qname(p),<EOL>o.n3(graph.namespace_manager), c))<EOL>c((s, p, o))<EOL>if event.isSet():<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>yield (c.func_name, (t, s, p, o))<EOL><DEDENT><DEDENT>
Execute a search plan for the declared graph pattern and sends all obtained triples to the corresponding collector functions (config
f4160:m3
def read_file(filename):
path = os.path.abspath(os.path.dirname(__file__))<EOL>filepath = os.path.join(path, filename)<EOL>try:<EOL><INDENT>return open(filepath).read()<EOL><DEDENT>except IOError:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT>
Read a file into a string
f4162:m0
def __init__(self, client_id, client_secret, redirect_uri):
self._client_id = client_id<EOL>self._client_secret = client_secret<EOL>self._redirect_uri = redirect_uri<EOL>
Initialize Authorization Manager. @param client_id: Client ID for accessing Health Graph API @param client_secret: Client Secret for accessing Health Graph API @param redirect_uri: Redirect URI for returning control to client web application after the Authorization Dialog with RunKeeper.com is executed successfully.
f4169:c0:m0
def get_login_url(self, state=None):
payload = {'<STR_LIT>': '<STR_LIT:code>',<EOL>'<STR_LIT>': self._client_id,<EOL>'<STR_LIT>': self._redirect_uri,}<EOL>if state is not None:<EOL><INDENT>payload['<STR_LIT:state>'] = state<EOL><DEDENT>return "<STR_LIT>" % (settings.API_AUTHORIZATION_URL,<EOL>urllib.urlencode(payload))<EOL>
Generates and returns URL for redirecting to Login Page of RunKeeper, which is the Authorization Endpoint of Health Graph API. @param state: State string. Passed to client web application at the end of the Login Process. @return: URL for redirecting to RunKeeper Login Page.
f4169:c0:m1
def get_login_button_url(self, button_color=None, caption_color=None, button_size=None):
if not button_color in settings.LOGIN_BUTTON_COLORS:<EOL><INDENT>button_color = settings.LOGIN_BUTTON_COLORS[<NUM_LIT:0>]<EOL><DEDENT>if not caption_color in settings.LOGIN_BUTTON_CAPTION_COLORS:<EOL><INDENT>caption_color = settings.LOGIN_BUTTON_CAPTION_COLORS[<NUM_LIT:0>]<EOL><DEDENT>if settings.LOGIN_BUTTON_SIZES.has_key(button_size):<EOL><INDENT>button_size = settings.LOGIN_BUTTON_SIZES[button_size]<EOL><DEDENT>else:<EOL><INDENT>button_size = settings.LOGIN_BUTTON_SIZES['<STR_LIT:None>']<EOL><DEDENT>return settings.LOGIN_BUTTON_URL % (button_color, <EOL>caption_color, <EOL>button_size)<EOL>
Return URL for image used for RunKeeper Login button. @param button_color: Button color. Either 'blue', 'grey' or 'black'. Default: 'blue'. @param caption_color: Button text color. Either 'white' or 'black'. Default: 'white' @param button_size: Button width in pixels. Either 200, 300 or 600. Default: 200 @return: URL for Login Button Image.
f4169:c0:m2
def get_access_token(self, code):
payload = {'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT:code>': code,<EOL>'<STR_LIT>': self._client_id,<EOL>'<STR_LIT>': self._client_secret,<EOL>'<STR_LIT>': self._redirect_uri,}<EOL>req = requests.post(settings.API_ACCESS_TOKEN_URL, data=payload)<EOL>data = req.json()<EOL>return data.get('<STR_LIT>')<EOL>
Returns Access Token retrieved from the Health Graph API Token Endpoint following the login to RunKeeper. to RunKeeper. @param code: Code returned by Health Graph API at the Authorization or RunKeeper Login phase. @return: Access Token for querying the Health Graph API.
f4169:c0:m3
def revoke_access_token(self, access_token):
payload = {'<STR_LIT>': access_token,}<EOL>req = requests.post(settings.API_DEAUTHORIZATION_URL, data=payload)<EOL>
Revokes the Access Token by accessing the De-authorization Endpoint of Health Graph API. @param access_token: Access Token for querying Health Graph API.
f4169:c0:m4
def parse_cmdline(argv=None):
parser = optparse.OptionParser()<EOL>parser.add_option('<STR_LIT:-c>', '<STR_LIT>', help='<STR_LIT>',<EOL>dest='<STR_LIT>',default=None)<EOL>parser.add_option('<STR_LIT>', '<STR_LIT>',<EOL>help='<STR_LIT>' % conf['<STR_LIT>'],<EOL>dest='<STR_LIT>', type='<STR_LIT:int>', default=None, action='<STR_LIT:store>')<EOL>parser.add_option('<STR_LIT>', '<STR_LIT>',<EOL>help='<STR_LIT>' % conf['<STR_LIT>'],<EOL>dest='<STR_LIT>', default=None, action='<STR_LIT:store>')<EOL>parser.add_option('<STR_LIT>', '<STR_LIT>', <EOL>help='<STR_LIT>' % conf['<STR_LIT>'],<EOL>dest='<STR_LIT>', default=None, action='<STR_LIT:store>')<EOL>parser.add_option('<STR_LIT>', '<STR_LIT>', help='<STR_LIT>',<EOL>dest='<STR_LIT>', default=False, action='<STR_LIT:store_true>')<EOL>if argv is None:<EOL><INDENT>return parser.parse_args()<EOL><DEDENT>else:<EOL><INDENT>return parser.parse_args(argv[<NUM_LIT:1>:])<EOL><DEDENT>
Parse command line options. @param argv: List of command line arguments. If None, get list from system. @return: Tuple of Option List and Argument List.
f4171:m5
def parse_conf_files(conf_paths):
conf_file = ConfigParser.RawConfigParser()<EOL>conf_read = conf_file.read(conf_paths)<EOL>conf = {}<EOL>try:<EOL><INDENT>if conf_read:<EOL><INDENT>conf['<STR_LIT>'] = conf_file.get('<STR_LIT>', '<STR_LIT>')<EOL>conf['<STR_LIT>'] = conf_file.get('<STR_LIT>', '<STR_LIT>')<EOL>if conf_file.has_option('<STR_LIT>', '<STR_LIT>'):<EOL><INDENT>conf['<STR_LIT>'] = conf_file.getint('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>if conf_file.has_option('<STR_LIT>', '<STR_LIT>'):<EOL><INDENT>conf['<STR_LIT>'] = conf_file.get('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>if conf_file.has_option('<STR_LIT>', '<STR_LIT>'):<EOL><INDENT>conf['<STR_LIT>'] = conf_file.get('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>return conf<EOL><DEDENT><DEDENT>except ConfigParser.Error:<EOL><INDENT>raise ConfigurationError("<STR_LIT>" <EOL>% sys.exc_info()[<NUM_LIT:1>])<EOL><DEDENT>else:<EOL><INDENT>raise ConfigurationError("<STR_LIT>" <EOL>% defaultConfFilename)<EOL><DEDENT>
Parse the configuration file and return dictionary of configuration options. @param conf_paths: List of configuration file paths to parse. @return: Dictionary of configuration options.
f4171:m6
def main(argv=None):
cmd_opts = parse_cmdline(argv)[<NUM_LIT:0>]<EOL>if cmd_opts.confpath is not None:<EOL><INDENT>if os.path.exists(cmd_opts.confpath):<EOL><INDENT>conf_paths = [cmd_opts.confpath,]<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT>" % cmd_opts.confpath<EOL><DEDENT><DEDENT>else:<EOL><INDENT>conf_paths = [os.path.join(path, defaultConfFilename) <EOL>for path in ('<STR_LIT>', '<STR_LIT:.>',)]<EOL><DEDENT>try:<EOL><INDENT>conf.update(parse_conf_files(conf_paths))<EOL><DEDENT>except ConfigurationError:<EOL><INDENT>return(sys.exc_info()[<NUM_LIT:1>])<EOL><DEDENT>if cmd_opts.bindport is not None:<EOL><INDENT>conf['<STR_LIT>'] = cmd_opts.bindport<EOL><DEDENT>if cmd_opts.bindaddr is not None:<EOL><INDENT>conf['<STR_LIT>'] = cmd_opts.bindaddr<EOL><DEDENT>if cmd_opts.baseurl is not None:<EOL><INDENT>conf['<STR_LIT>'] = cmd_opts.baseurl<EOL><DEDENT>if cmd_opts.devel:<EOL><INDENT>from bottle import debug<EOL>debug(True)<EOL><DEDENT>app = SessionMiddleware(bottle.app(), sessionOpts)<EOL>bottle.run(app=app, host=conf['<STR_LIT>'], port=conf['<STR_LIT>'], <EOL>reloader=cmd_opts.devel)<EOL>
Main Block - Configure and run the Bottle Web Server.
f4171:m7
def __new__(cls, clsname, bases, attrs):
newclass = super(SerialiserMetaClass, cls).__new__(cls, clsname, bases, attrs)<EOL>register_serialiser(newclass) <EOL>return newclass<EOL>
Automatically registers Serialiser subclasses at class definition time.
f4174:c0:m0
def __new__(cls, clsname, bases, attrs):
newclass = super(SerialisableMetaClass, cls).__new__(cls, clsname, bases, attrs)<EOL>serialised_cls = getattr(newclass, '<STR_LIT>'.format(clsname), None)<EOL>register_class(newclass, serialised_cls)<EOL>return newclass<EOL>
Automatically registers Serialisable subclasses at class definition time.
f4176:c0:m0
@classmethod<EOL><INDENT>def serialisable(cls, key, obj):<DEDENT>
<EOL>if key.startswith('<STR_LIT>'.format(cls.__name__)):<EOL><INDENT>return False<EOL><DEDENT>if key in obj.__whitelist:<EOL><INDENT>return True<EOL><DEDENT>if '<STR_LIT>' in key:<EOL><INDENT>return False<EOL><DEDENT>if key in obj.__blacklist:<EOL><INDENT>return False<EOL><DEDENT>if callable(getattr(obj, key)):<EOL><INDENT>return False<EOL><DEDENT>if hasattr(obj.__class__, key):<EOL><INDENT>if isinstance(getattr(obj.__class__, key), property):<EOL><INDENT>return False<EOL><DEDENT><DEDENT>return True<EOL>
Determines what can be serialised and what shouldn't
f4176:c1:m0
@classmethod<EOL><INDENT>def to_dict(cls, obj):<DEDENT>
return {<EOL>k: getattr(obj, k)<EOL>for k in dir(obj)<EOL>if cls.serialisable(k, obj)<EOL>}<EOL>
Serialises the object, by default serialises anything that isn't prefixed with __, isn't in the blacklist, and isn't callable.
f4176:c1:m1
@classmethod<EOL><INDENT>def from_dict(cls, jobj):<DEDENT>
try:<EOL><INDENT>obj = cls.__new__(cls)<EOL>blacklist = set(['<STR_LIT>', '<STR_LIT>'] + cls.__blacklist)<EOL>for k in set(jobj.keys()) - blacklist:<EOL><INDENT>setattr(obj, k, jobj[k])<EOL><DEDENT>return obj<EOL><DEDENT>except Exception as e:<EOL><INDENT>raise TypeError('<STR_LIT>'.format(cls.__name__, str(e), kwargs))<EOL><DEDENT>
Deserialises the object. Automatically inspects the object's __init__ function and extracts the parameters. Can be trivially over-written.
f4176:c1:m2
@abc.abstractproperty<EOL><INDENT>def datastore(self):<DEDENT>
The datastore instance under test
f4186:c0:m0
def load_yaml(filename):
return yaml.load(open(filename))<EOL>
Given a param, load the YAML config from a file.
f4188:m1
def load_yaml_env(env_var_name):
return load_yaml(os.environ[env_var_name])<EOL>
Resolve the env var to a filename and load the YAML config from there.
f4188:m2
def _store(self, uid, content, data=None):
doc = dict(uid=uid)<EOL>if data:<EOL><INDENT>gfs = gridfs.GridFS(self.db)<EOL>id = gfs.put(data, encoding='<STR_LIT:utf-8>')<EOL>doc.update(data_id=id)<EOL><DEDENT>doc.update(content)<EOL>self.db.pastes.insert_one(doc)<EOL>
Store the given dict of content at uid. Nothing returned.
f4190:c0:m2
def _storeLog(self, nick, time, uid):
query = dict(uid=uid)<EOL>update = {'<STR_LIT>': dict(nick=nick, time=time)}<EOL>self.db.pastes.update(query, update)<EOL>
Adds the nick & uid to the log for a given time/order. No return.
f4190:c0:m3
def _retrieve(self, uid):
query = dict(uid=uid)<EOL>doc = self.db.pastes.find_one(query)<EOL>if '<STR_LIT>' in doc:<EOL><INDENT>data_id = doc.pop('<STR_LIT>')<EOL>gfs = gridfs.GridFS(self.db)<EOL>doc.update(data=gfs.get(data_id).read())<EOL><DEDENT>return doc<EOL>
Return a dict with the contents of the paste, including the raw data, if any, as the key 'data'. Must pass in uid, not shortid.
f4190:c0:m4