signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
def render_pep440_post(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"] or pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>" % pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT>rendered += plus_or_dot(pieces)<EOL>rendered += "<STR_LIT>" % pieces["<STR_LIT>"]<EOL><DEDENT><DEDENT>else:<EOL><INDENT>rendered = "<STR_LIT>" % pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT>rendered += "<STR_LIT>" % pieces["<STR_LIT>"]<EOL><DEDENT>return rendered<EOL>
TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0]
f8563:m14
def render_pep440_old(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"] or pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>" % pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>rendered = "<STR_LIT>" % pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT><DEDENT>return rendered<EOL>
TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0]
f8563:m15
def render_git_describe(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>" % (pieces["<STR_LIT>"], pieces["<STR_LIT>"])<EOL><DEDENT><DEDENT>else:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL><DEDENT>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT>return rendered<EOL>
TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix)
f8563:m16
def render_git_describe_long(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>rendered += "<STR_LIT>" % (pieces["<STR_LIT>"], pieces["<STR_LIT>"])<EOL><DEDENT>else:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL><DEDENT>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT>return rendered<EOL>
TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix)
f8563:m17
def render(pieces, style):
if pieces["<STR_LIT:error>"]:<EOL><INDENT>return {"<STR_LIT:version>": "<STR_LIT>",<EOL>"<STR_LIT>": pieces.get("<STR_LIT>"),<EOL>"<STR_LIT>": None,<EOL>"<STR_LIT:error>": pieces["<STR_LIT:error>"],<EOL>"<STR_LIT:date>": None}<EOL><DEDENT>if not style or style == "<STR_LIT:default>":<EOL><INDENT>style = "<STR_LIT>" <EOL><DEDENT>if style == "<STR_LIT>":<EOL><INDENT>rendered = render_pep440(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_pep440_pre(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_pep440_post(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_pep440_old(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_git_describe(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_git_describe_long(pieces)<EOL><DEDENT>else:<EOL><INDENT>raise ValueError("<STR_LIT>" % style)<EOL><DEDENT>return {"<STR_LIT:version>": rendered, "<STR_LIT>": pieces["<STR_LIT>"],<EOL>"<STR_LIT>": pieces["<STR_LIT>"], "<STR_LIT:error>": None,<EOL>"<STR_LIT:date>": pieces.get("<STR_LIT:date>")}<EOL>
Render the given version pieces into the requested style.
f8563:m18
def get_versions(verbose=False):
if "<STR_LIT>" in sys.modules:<EOL><INDENT>del sys.modules["<STR_LIT>"]<EOL><DEDENT>root = get_root()<EOL>cfg = get_config_from_root(root)<EOL>assert cfg.VCS is not None, "<STR_LIT>"<EOL>handlers = HANDLERS.get(cfg.VCS)<EOL>assert handlers, "<STR_LIT>" % cfg.VCS<EOL>verbose = verbose or cfg.verbose<EOL>assert cfg.versionfile_source is not None,"<STR_LIT>"<EOL>assert cfg.tag_prefix is not None, "<STR_LIT>"<EOL>versionfile_abs = os.path.join(root, cfg.versionfile_source)<EOL>get_keywords_f = handlers.get("<STR_LIT>")<EOL>from_keywords_f = handlers.get("<STR_LIT>")<EOL>if get_keywords_f and from_keywords_f:<EOL><INDENT>try:<EOL><INDENT>keywords = get_keywords_f(versionfile_abs)<EOL>ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)<EOL>if verbose:<EOL><INDENT>print("<STR_LIT>" % ver)<EOL><DEDENT>return ver<EOL><DEDENT>except NotThisMethod:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>try:<EOL><INDENT>ver = versions_from_file(versionfile_abs)<EOL>if verbose:<EOL><INDENT>print("<STR_LIT>" % (versionfile_abs, ver))<EOL><DEDENT>return ver<EOL><DEDENT>except NotThisMethod:<EOL><INDENT>pass<EOL><DEDENT>from_vcs_f = handlers.get("<STR_LIT>")<EOL>if from_vcs_f:<EOL><INDENT>try:<EOL><INDENT>pieces = from_vcs_f(cfg.tag_prefix, root, verbose)<EOL>ver = render(pieces, cfg.style)<EOL>if verbose:<EOL><INDENT>print("<STR_LIT>" % ver)<EOL><DEDENT>return ver<EOL><DEDENT>except NotThisMethod:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>try:<EOL><INDENT>if cfg.parentdir_prefix:<EOL><INDENT>ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)<EOL>if verbose:<EOL><INDENT>print("<STR_LIT>" % ver)<EOL><DEDENT>return ver<EOL><DEDENT><DEDENT>except NotThisMethod:<EOL><INDENT>pass<EOL><DEDENT>if verbose:<EOL><INDENT>print("<STR_LIT>")<EOL><DEDENT>return {"<STR_LIT:version>": "<STR_LIT>", "<STR_LIT>": None,<EOL>"<STR_LIT>": None, "<STR_LIT:error>": "<STR_LIT>",<EOL>"<STR_LIT:date>": None}<EOL>
Get the project version from whatever source is available. Returns dict with two keys: 'version' and 'full'.
f8563:m19
def get_version():
return get_versions()["<STR_LIT:version>"]<EOL>
Get the short version string for this project.
f8563:m20
def get_cmdclass():
if "<STR_LIT>" in sys.modules:<EOL><INDENT>del sys.modules["<STR_LIT>"]<EOL><DEDENT>cmds = {}<EOL>from distutils.core import Command<EOL>class cmd_version(Command):<EOL><INDENT>description = "<STR_LIT>"<EOL>user_options = []<EOL>boolean_options = []<EOL>def initialize_options(self):<EOL><INDENT>pass<EOL><DEDENT>def finalize_options(self):<EOL><INDENT>pass<EOL><DEDENT>def run(self):<EOL><INDENT>vers = get_versions(verbose=True)<EOL>print("<STR_LIT>" % vers["<STR_LIT:version>"])<EOL>print("<STR_LIT>" % vers.get("<STR_LIT>"))<EOL>print("<STR_LIT>" % vers.get("<STR_LIT>"))<EOL>print("<STR_LIT>" % vers.get("<STR_LIT:date>"))<EOL>if vers["<STR_LIT:error>"]:<EOL><INDENT>print("<STR_LIT>" % vers["<STR_LIT:error>"])<EOL><DEDENT><DEDENT><DEDENT>cmds["<STR_LIT:version>"] = cmd_version<EOL>if "<STR_LIT>" in sys.modules:<EOL><INDENT>from setuptools.command.build_py import build_py as _build_py<EOL><DEDENT>else:<EOL><INDENT>from distutils.command.build_py import build_py as _build_py<EOL><DEDENT>class cmd_build_py(_build_py):<EOL><INDENT>def run(self):<EOL><INDENT>root = get_root()<EOL>cfg = get_config_from_root(root)<EOL>versions = get_versions()<EOL>_build_py.run(self)<EOL>if cfg.versionfile_build:<EOL><INDENT>target_versionfile = os.path.join(self.build_lib,<EOL>cfg.versionfile_build)<EOL>print("<STR_LIT>" % target_versionfile)<EOL>write_to_version_file(target_versionfile, versions)<EOL><DEDENT><DEDENT><DEDENT>cmds["<STR_LIT>"] = cmd_build_py<EOL>if "<STR_LIT>" in sys.modules: <EOL><INDENT>from cx_Freeze.dist import build_exe as _build_exe<EOL>class cmd_build_exe(_build_exe):<EOL><INDENT>def run(self):<EOL><INDENT>root = get_root()<EOL>cfg = get_config_from_root(root)<EOL>versions = get_versions()<EOL>target_versionfile = cfg.versionfile_source<EOL>print("<STR_LIT>" % target_versionfile)<EOL>write_to_version_file(target_versionfile, versions)<EOL>_build_exe.run(self)<EOL>os.unlink(target_versionfile)<EOL>with open(cfg.versionfile_source, "<STR_LIT:w>") as f:<EOL><INDENT>LONG = LONG_VERSION_PY[cfg.VCS]<EOL>f.write(LONG %<EOL>{"<STR_LIT>": "<STR_LIT:$>",<EOL>"<STR_LIT>": cfg.style,<EOL>"<STR_LIT>": cfg.tag_prefix,<EOL>"<STR_LIT>": cfg.parentdir_prefix,<EOL>"<STR_LIT>": cfg.versionfile_source,<EOL>})<EOL><DEDENT><DEDENT><DEDENT>cmds["<STR_LIT>"] = cmd_build_exe<EOL>del cmds["<STR_LIT>"]<EOL><DEDENT>if '<STR_LIT>' in sys.modules: <EOL><INDENT>try:<EOL><INDENT>from py2exe.distutils_buildexe import py2exe as _py2exe <EOL><DEDENT>except ImportError:<EOL><INDENT>from py2exe.build_exe import py2exe as _py2exe <EOL><DEDENT>class cmd_py2exe(_py2exe):<EOL><INDENT>def run(self):<EOL><INDENT>root = get_root()<EOL>cfg = get_config_from_root(root)<EOL>versions = get_versions()<EOL>target_versionfile = cfg.versionfile_source<EOL>print("<STR_LIT>" % target_versionfile)<EOL>write_to_version_file(target_versionfile, versions)<EOL>_py2exe.run(self)<EOL>os.unlink(target_versionfile)<EOL>with open(cfg.versionfile_source, "<STR_LIT:w>") as f:<EOL><INDENT>LONG = LONG_VERSION_PY[cfg.VCS]<EOL>f.write(LONG %<EOL>{"<STR_LIT>": "<STR_LIT:$>",<EOL>"<STR_LIT>": cfg.style,<EOL>"<STR_LIT>": cfg.tag_prefix,<EOL>"<STR_LIT>": cfg.parentdir_prefix,<EOL>"<STR_LIT>": cfg.versionfile_source,<EOL>})<EOL><DEDENT><DEDENT><DEDENT>cmds["<STR_LIT>"] = cmd_py2exe<EOL><DEDENT>if "<STR_LIT>" in sys.modules:<EOL><INDENT>from setuptools.command.sdist import sdist as _sdist<EOL><DEDENT>else:<EOL><INDENT>from distutils.command.sdist import sdist as _sdist<EOL><DEDENT>class cmd_sdist(_sdist):<EOL><INDENT>def run(self):<EOL><INDENT>versions = get_versions()<EOL>self._versioneer_generated_versions = versions<EOL>self.distribution.metadata.version = versions["<STR_LIT:version>"]<EOL>return _sdist.run(self)<EOL><DEDENT>def make_release_tree(self, base_dir, files):<EOL><INDENT>root = get_root()<EOL>cfg = get_config_from_root(root)<EOL>_sdist.make_release_tree(self, base_dir, files)<EOL>target_versionfile = os.path.join(base_dir, cfg.versionfile_source)<EOL>print("<STR_LIT>" % target_versionfile)<EOL>write_to_version_file(target_versionfile,<EOL>self._versioneer_generated_versions)<EOL><DEDENT><DEDENT>cmds["<STR_LIT>"] = cmd_sdist<EOL>return cmds<EOL>
Get the custom setuptools/distutils subclasses used by Versioneer.
f8563:m21
def do_setup():
root = get_root()<EOL>try:<EOL><INDENT>cfg = get_config_from_root(root)<EOL><DEDENT>except (EnvironmentError, configparser.NoSectionError,<EOL>configparser.NoOptionError) as e:<EOL><INDENT>if isinstance(e, (EnvironmentError, configparser.NoSectionError)):<EOL><INDENT>print("<STR_LIT>",<EOL>file=sys.stderr)<EOL>with open(os.path.join(root, "<STR_LIT>"), "<STR_LIT:a>") as f:<EOL><INDENT>f.write(SAMPLE_CONFIG)<EOL><DEDENT><DEDENT>print(CONFIG_ERROR, file=sys.stderr)<EOL>return <NUM_LIT:1><EOL><DEDENT>print("<STR_LIT>" % cfg.versionfile_source)<EOL>with open(cfg.versionfile_source, "<STR_LIT:w>") as f:<EOL><INDENT>LONG = LONG_VERSION_PY[cfg.VCS]<EOL>f.write(LONG % {"<STR_LIT>": "<STR_LIT:$>",<EOL>"<STR_LIT>": cfg.style,<EOL>"<STR_LIT>": cfg.tag_prefix,<EOL>"<STR_LIT>": cfg.parentdir_prefix,<EOL>"<STR_LIT>": cfg.versionfile_source,<EOL>})<EOL><DEDENT>ipy = os.path.join(os.path.dirname(cfg.versionfile_source),<EOL>"<STR_LIT>")<EOL>if os.path.exists(ipy):<EOL><INDENT>try:<EOL><INDENT>with open(ipy, "<STR_LIT:r>") as f:<EOL><INDENT>old = f.read()<EOL><DEDENT><DEDENT>except EnvironmentError:<EOL><INDENT>old = "<STR_LIT>"<EOL><DEDENT>if INIT_PY_SNIPPET not in old:<EOL><INDENT>print("<STR_LIT>" % ipy)<EOL>with open(ipy, "<STR_LIT:a>") as f:<EOL><INDENT>f.write(INIT_PY_SNIPPET)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>print("<STR_LIT>" % ipy)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>print("<STR_LIT>" % ipy)<EOL>ipy = None<EOL><DEDENT>manifest_in = os.path.join(root, "<STR_LIT>")<EOL>simple_includes = set()<EOL>try:<EOL><INDENT>with open(manifest_in, "<STR_LIT:r>") as f:<EOL><INDENT>for line in f:<EOL><INDENT>if line.startswith("<STR_LIT>"):<EOL><INDENT>for include in line.split()[<NUM_LIT:1>:]:<EOL><INDENT>simple_includes.add(include)<EOL><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT>except EnvironmentError:<EOL><INDENT>pass<EOL><DEDENT>if "<STR_LIT>" not in simple_includes:<EOL><INDENT>print("<STR_LIT>")<EOL>with open(manifest_in, "<STR_LIT:a>") as f:<EOL><INDENT>f.write("<STR_LIT>")<EOL><DEDENT><DEDENT>else:<EOL><INDENT>print("<STR_LIT>")<EOL><DEDENT>if cfg.versionfile_source not in simple_includes:<EOL><INDENT>print("<STR_LIT>" %<EOL>cfg.versionfile_source)<EOL>with open(manifest_in, "<STR_LIT:a>") as f:<EOL><INDENT>f.write("<STR_LIT>" % cfg.versionfile_source)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>print("<STR_LIT>")<EOL><DEDENT>do_vcs_install(manifest_in, cfg.versionfile_source, ipy)<EOL>return <NUM_LIT:0><EOL>
Main VCS-independent setup function for installing Versioneer.
f8563:m22
def scan_setup_py():
found = set()<EOL>setters = False<EOL>errors = <NUM_LIT:0><EOL>with open("<STR_LIT>", "<STR_LIT:r>") as f:<EOL><INDENT>for line in f.readlines():<EOL><INDENT>if "<STR_LIT>" in line:<EOL><INDENT>found.add("<STR_LIT>")<EOL><DEDENT>if "<STR_LIT>" in line:<EOL><INDENT>found.add("<STR_LIT>")<EOL><DEDENT>if "<STR_LIT>" in line:<EOL><INDENT>found.add("<STR_LIT>")<EOL><DEDENT>if "<STR_LIT>" in line:<EOL><INDENT>setters = True<EOL><DEDENT>if "<STR_LIT>" in line:<EOL><INDENT>setters = True<EOL><DEDENT><DEDENT><DEDENT>if len(found) != <NUM_LIT:3>:<EOL><INDENT>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>errors += <NUM_LIT:1><EOL><DEDENT>if setters:<EOL><INDENT>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>")<EOL>errors += <NUM_LIT:1><EOL><DEDENT>return errors<EOL>
Validate the contents of setup.py against Versioneer's expectations.
f8563:m23
def L(g,i):
g1 = g&(<NUM_LIT:2>**i)<EOL>if i:<EOL><INDENT>n = Lwidth(i)<EOL>Ln = L(g,i-<NUM_LIT:1>)<EOL>if g1:<EOL><INDENT>return Ln<<(<NUM_LIT:2>*n) | Ln<<n | Ln<EOL><DEDENT>else:<EOL><INDENT>return int('<STR_LIT:1>'*n,<NUM_LIT:2>)<<(<NUM_LIT:2>*n) | Ln<<n | Ln<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if g1:<EOL><INDENT>return int('<STR_LIT>',<NUM_LIT:2>)<EOL><DEDENT>else:<EOL><INDENT>return int('<STR_LIT:100>',<NUM_LIT:2>)<EOL><DEDENT><DEDENT>
recursively constructs L line for g; i = len(g)-1
f8570:m1
def H(g,i):
g1 = g&(<NUM_LIT:2>**i)<EOL>if i:<EOL><INDENT>n = Hwidth(i)<EOL>i=i-<NUM_LIT:1><EOL>Hn = H(g,i)<EOL>if g1:<EOL><INDENT>return Hn<<(<NUM_LIT:2>*n) | Hn<<n | Hn<EOL><DEDENT>else:<EOL><INDENT>return int('<STR_LIT:1>'*n,<NUM_LIT:2>)<<(<NUM_LIT:2>*n) | L(g,i)<<n | Hn<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if g1:<EOL><INDENT>return int('<STR_LIT>',<NUM_LIT:2>)<EOL><DEDENT>else:<EOL><INDENT>return int('<STR_LIT>',<NUM_LIT:2>)<EOL><DEDENT><DEDENT>
recursively constructs H line for g; i = len(g)-1
f8570:m2
def UV_H(Hg,gw):
lefts = set()<EOL>K = []<EOL>UV = []<EOL>p = Hwidth(gw)<EOL>pp = <NUM_LIT:2>**p<EOL>while p:<EOL><INDENT>pp = pp>><NUM_LIT:1><EOL>p = p-<NUM_LIT:1><EOL>if Hg&pp:<EOL><INDENT>y = istr(p,<NUM_LIT:3>,gw)<EOL>yy = y.replace('<STR_LIT:1>','<STR_LIT:0>')<EOL>if yy not in lefts: <EOL><INDENT>if y.find('<STR_LIT:1>') == -<NUM_LIT:1>:<EOL><INDENT>K.append(y)<EOL><DEDENT>else:<EOL><INDENT>UV.append(y)<EOL>lefts.add(yy)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>return (UV,K)<EOL>
Constructs implications and intents based on H gw = g width Hg = H(g), g is the binary coding of the attribute set UV = all non-trivial (!V⊂U) implications U->V with UuV closed; in ternary coding (1=V,2=U) K = all closed sets
f8570:m3
def A(g,i):
g1 = g&(<NUM_LIT:2>**i)<EOL>if i:<EOL><INDENT>n = Awidth(i)<EOL>An = A(g,i-<NUM_LIT:1>)<EOL>if g1:<EOL><INDENT>return An<<n | An<EOL><DEDENT>else:<EOL><INDENT>return int('<STR_LIT:1>'*n,<NUM_LIT:2>)<<n | An<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if g1:<EOL><INDENT>return int('<STR_LIT>',<NUM_LIT:2>)<EOL><DEDENT>else:<EOL><INDENT>return int('<STR_LIT>',<NUM_LIT:2>)<EOL><DEDENT><DEDENT>
recursively constructs A line for g; i = len(g)-1
f8570:m4
def B(g,i):
g1 = g&(<NUM_LIT:2>**i)<EOL>if i:<EOL><INDENT>nA = Awidth(i)<EOL>nB = Bwidth(i)<EOL>i=i-<NUM_LIT:1><EOL>Bn = B(g,i)<EOL>if g1:<EOL><INDENT>return Bn << (nA+nB) | int('<STR_LIT:1>'*nA,<NUM_LIT:2>) << nB | Bn<EOL><DEDENT>else:<EOL><INDENT>return int('<STR_LIT:1>'*nB,<NUM_LIT:2>) << (nA+nB) | A(g,i) << nB | Bn<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if g1:<EOL><INDENT>return <NUM_LIT:1><EOL><DEDENT>else:<EOL><INDENT>return <NUM_LIT:0><EOL><DEDENT><DEDENT>
recursively constructs B line for g; i = len(g)-1
f8570:m5
def B012(t,i):
if not i:<EOL><INDENT>return "<STR_LIT:1>"<EOL><DEDENT>nA = Awidth(i)<EOL>nB = Bwidth(i)<EOL>nBB = nB + nA<EOL>if t < nB:<EOL><INDENT>return "<STR_LIT:0>"+B012(t,i-<NUM_LIT:1>)<EOL><DEDENT>elif t < nBB:<EOL><INDENT>return "<STR_LIT:1>"+A012(t-nB,i-<NUM_LIT:1>)<EOL><DEDENT>else:<EOL><INDENT>return "<STR_LIT:2>"+B012(t-nBB,i-<NUM_LIT:1>)<EOL><DEDENT>
Constructs ternary implication coding (0=not there, 2=U, 1=V) t is B column position i = |M|-1 to 0
f8570:m7
def UV_B(Bg,gw):
UV = []<EOL>p = Bwidth(gw)<EOL>pp = <NUM_LIT:2>**p<EOL>while p:<EOL><INDENT>pp = pp>><NUM_LIT:1><EOL>p = p-<NUM_LIT:1><EOL>if Bg&pp:<EOL><INDENT>uv = B012(p,gw-<NUM_LIT:1>)<EOL>UV.append(uv)<EOL><DEDENT><DEDENT>return UV<EOL>
returns the implications UV based on B Bg = B(g), g∈2^M gw = |M|, M is the set of all attributes
f8570:m8
def omega(imps):
if isinstance(imps,v_Us_dict):<EOL><INDENT>return sum([omega(V) for U,V in imps.items()])<EOL><DEDENT>if isinstance(imps,list):<EOL><INDENT>return sum([omega(x) for x in imps])<EOL><DEDENT>if isinstance(imps,str):<EOL><INDENT>try:<EOL><INDENT>U,V = imps.split("<STR_LIT>")<EOL>Us = U.split("<STR_LIT:U+002C>") if "<STR_LIT:U+002C>" in U else U.split()<EOL>Vs = V.split("<STR_LIT:U+002C>") if "<STR_LIT:U+002C>" in V else V.split()<EOL>res = len(Us)*len(Vs)<EOL>return res<EOL><DEDENT>except:<EOL><INDENT>return <NUM_LIT:0><EOL><DEDENT><DEDENT>if isinstance(imps,int):<EOL><INDENT>b=bin(imps)[<NUM_LIT:2>:]<EOL>res = len([x for x in b if x=='<STR_LIT:1>'])<EOL>return res<EOL><DEDENT>
Calculates a measure for the size of the implication basis: \sum |U||V|
f8570:m9
def respects(g,imp):
if isinstance(g,str):<EOL><INDENT>g = int(g,<NUM_LIT:2>)<EOL><DEDENT>if isinstance(imp,int):<EOL><INDENT>imp = istr(imp,<NUM_LIT:3>,g.bit_length())<EOL><DEDENT>V = int(imp.replace('<STR_LIT:1>','<STR_LIT:2>').replace('<STR_LIT:2>','<STR_LIT:1>'),<NUM_LIT:2>)<EOL>U = int(imp.replace('<STR_LIT:1>','<STR_LIT:0>').replace('<STR_LIT:2>','<STR_LIT:1>'),<NUM_LIT:2>)<EOL>ginU = U&g == U<EOL>ginV = V&g == V<EOL>return not ginU or ginV<EOL>
g is an int, where each bit is an attribute implication UV is ternary coded 1 = ∈V, 2 = ∈U, 0 otherwise g and UV have the same number of digits
f8570:m10
def LL(n):
if (n<=<NUM_LIT:0>):return Context('<STR_LIT:0>')<EOL>else:<EOL><INDENT>LL1=LL(n-<NUM_LIT:1>)<EOL>r1 = C1(<NUM_LIT:3>**(n-<NUM_LIT:1>),<NUM_LIT:2>**(n-<NUM_LIT:1>)) - LL1 - LL1<EOL>r2 = LL1 - LL1 - LL1<EOL>return r1 + r2<EOL><DEDENT>
constructs the LL context
f8570:m13
def HH(n):
if (n<=<NUM_LIT:0>):return Context('<STR_LIT:1>')<EOL>else:<EOL><INDENT>LL1=LL(n-<NUM_LIT:1>)<EOL>HH1=HH(n-<NUM_LIT:1>)<EOL>r1 = C1(<NUM_LIT:3>**(n-<NUM_LIT:1>),<NUM_LIT:2>**(n-<NUM_LIT:1>)) - LL1 - HH1<EOL>r2 = HH1 - HH1 - HH1<EOL>return r1 + r2<EOL><DEDENT>
constructs the HH context
f8570:m14
def AA(n):
if (n<=<NUM_LIT:1>):return Context('<STR_LIT>')<EOL>else:<EOL><INDENT>AA1=AA(n-<NUM_LIT:1>)<EOL>r1 = C1(<NUM_LIT:2>**(n-<NUM_LIT:1>),<NUM_LIT:2>**(n-<NUM_LIT:1>)) - AA1<EOL>r2 = AA1 - AA1<EOL>return r1 + r2<EOL><DEDENT>
constructs the AA context
f8570:m15
def BB(n):
if (n<=<NUM_LIT:1>):return Context('<STR_LIT>')<EOL>else:<EOL><INDENT>BB1=BB(n-<NUM_LIT:1>)<EOL>AA1=AA(n-<NUM_LIT:1>)<EOL>r1 = C1((n-<NUM_LIT:1>)*<NUM_LIT:2>**(n-<NUM_LIT:2>),<NUM_LIT:2>**(n-<NUM_LIT:1>)) - AA1 - BB1<EOL>r2 = BB1 - C1(<NUM_LIT:2>**(n-<NUM_LIT:1>),<NUM_LIT:2>**(n-<NUM_LIT:1>)) - BB1;<EOL>return r1 + r2<EOL><DEDENT>
constructs the BB context
f8570:m16
def __init__(self,Bg,gw):
self.width = gw<EOL>if isinstance(Bg,int):<EOL><INDENT>defaultdict.__init__(self,list)<EOL>p = Bwidth(gw)<EOL>pp = <NUM_LIT:2>**p<EOL>while p:<EOL><INDENT>pp = pp>><NUM_LIT:1><EOL>p = p-<NUM_LIT:1><EOL>if Bg&pp:<EOL><INDENT>uv = B012(p,gw-<NUM_LIT:1>)<EOL>v = uv.find('<STR_LIT:1>')<EOL>u = uv[:v]+'<STR_LIT:0>'+uv[v+<NUM_LIT:1>:]<EOL>u = int(u.replace('<STR_LIT:2>','<STR_LIT:1>'),<NUM_LIT:2>)<EOL>Umin_s = self[gw-v-<NUM_LIT:1>]<EOL>it = [i for i,U in enumerate(Umin_s) if U&u==u]<EOL>for i in reversed(it):<EOL><INDENT>del Umin_s[i]<EOL><DEDENT>else:<EOL><INDENT>Umin_s.append(u)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>elif isinstance(Bg,list):<EOL><INDENT>defaultdict.__init__(self,list)<EOL>for k,v in Bg:<EOL><INDENT>assert isinstance(v,list)<EOL>self[k] += v<EOL><DEDENT><DEDENT>else:<EOL><INDENT>defaultdict.__init__(self,list,Bg)<EOL><DEDENT>
returns the implications {v:Us} based on B v is the significant component Bg = B(g), g∈2^M gw = |M|, M is the set of all attributes
f8570:c0:m0
def __mul__(self, other):
res = v_Us_dict([],self.width)<EOL>if id(self)==id(other):<EOL><INDENT>s = iter(self.items())<EOL>try:<EOL><INDENT>while True:<EOL><INDENT>v1, us1 = next(s)<EOL>vv1 = <NUM_LIT:2>**v1<EOL>s, ss = tee(s)<EOL>try:<EOL><INDENT>while True:<EOL><INDENT>v2, us2 = next(ss)<EOL>vv2 = <NUM_LIT:2>**v2<EOL>for u1 in us1:<EOL><INDENT>for u2 in us2:<EOL><INDENT>if vv2&u1 and not vv1&u2:<EOL><INDENT>res[v1].append((u1|u2)&~vv2)<EOL><DEDENT>elif vv1&u2 and not vv2&u1:<EOL><INDENT>res[v2].append((u1|u2)&~vv1)<EOL><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT>except StopIteration:<EOL><INDENT>pass<EOL><DEDENT><DEDENT><DEDENT>except StopIteration:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>else:<EOL><INDENT>for v1,us1 in self.items():<EOL><INDENT>vv1 = <NUM_LIT:2>**v1<EOL>for v2,us2 in other.items():<EOL><INDENT>vv2 = <NUM_LIT:2>**v2<EOL>if v1 != v2:<EOL><INDENT>for u1 in us1:<EOL><INDENT>for u2 in us2:<EOL><INDENT>if vv2&u1 and not vv1&u2:<EOL><INDENT>res[v1].append((u1|u2)&~vv2)<EOL><DEDENT>elif vv1&u2 and not vv2&u1:<EOL><INDENT>res[v2].append((u1|u2)&~vv1)<EOL><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT>for v,U in res.items():<EOL><INDENT>res[v] = list(set(U))<EOL><DEDENT>return res<EOL>
This is the o operation in [1]_, that represents the 3rd Armstrong rule. It returns combinations for i‡j: (i,u1|u2) or (j,u1|u2),
f8570:c0:m8
def __invert__(self):
Y = self<EOL>Yn = Y*Y<EOL>while True:<EOL><INDENT>YnplusY = Yn+Y<EOL>Yg = Yn*YnplusY<EOL>Yn1 = Yn + Yg<EOL>if Yn1 == Yn:<EOL><INDENT>break<EOL><DEDENT>Yn = Yn1<EOL><DEDENT>return Yn<EOL>
U->v generated from L=∪ min L_i via the 3rd Armstrong rule Note, that this can become bigger than L.
f8570:c0:m9
def __pow__(self, other):
Y = self<EOL>Z = v_Us_dict({other[<NUM_LIT:0>]:[other[<NUM_LIT:1>]]},self.width)<EOL>Yn = Y*Z<EOL>while True:<EOL><INDENT>YnplusY = Yn+Y<EOL>Yg = Z*YnplusY<EOL>Yn1 = Yn + Yg<EOL>if Yn1 == Yn:<EOL><INDENT>break<EOL><DEDENT>Yn = Yn1<EOL><DEDENT>return Yn<EOL>
'other' is a (v,u) couple generates U->v involving 'other' #other = (0,64)
f8570:c0:m10
def koenig(self):
L = self<EOL>Y = L - (L*L)<EOL>while True:<EOL><INDENT>Ybar = Y + ~Y<EOL>take = L - Ybar<EOL>if not len(take):<EOL><INDENT>return Y<EOL><DEDENT>else:<EOL><INDENT>ZZ = list(set(take)-set(Y))<EOL>if len(ZZ) > <NUM_LIT:0>:<EOL><INDENT>v = ZZ[<NUM_LIT:0>]<EOL>z=(v,take[v][<NUM_LIT:0>])<EOL><DEDENT>else:<EOL><INDENT>z = next(take.flatten())<EOL><DEDENT>Yzgen = Y**z<EOL>Y = (Y - Yzgen) + z<EOL><DEDENT><DEDENT>
This needs to be L = contextg.v_Us_B()
f8570:c0:m11
def __init__(self, *args, **kwargs):
if isinstance(args[<NUM_LIT:0>],str):<EOL><INDENT>lines = [s.strip() for s in args[<NUM_LIT:0>].splitlines() if s.strip()]<EOL>linelens = [len(tt) for tt in lines]<EOL>self.width = linelens[<NUM_LIT:0>]<EOL>samelen = linelens.count(linelens[<NUM_LIT:0>])==len(linelens)<EOL>assert samelen, "<STR_LIT>"<EOL>super().__init__([int(s,<NUM_LIT:2>) for s in lines])<EOL><DEDENT>else:<EOL><INDENT>super().__init__(*args)<EOL>self.width = kwargs['<STR_LIT:width>']<EOL><DEDENT>try:<EOL><INDENT>self.mapping = kwargs['<STR_LIT>']<EOL><DEDENT>except:<EOL><INDENT>self.mapping = [i for i in range(self.width)]<EOL><DEDENT>
Context can be initialized with - a rectangular text block of 0s and 1s - a list of ints and a "width" keyword argument. A "mapping" keyword argument as list associates the bits with objects of any kind.
f8570:c1:m0
def column(self, i):
return '<STR_LIT>'.join([str(digitat2(r,i)) for r in self])<EOL>
from right
f8570:c1:m3
def UV_H(self):
h = reduce(lambda x,y:x&y,(H(g,self.width-<NUM_LIT:1>) for g in self))<EOL>return UV_H(h, self.width)<EOL>
UV = all non-trivial (!V⊂U) implications U->V with UuV closed; in ternary coding (1=V,2=U) K = all closed sets This is UV_H function, but the returned implications are respected by all attribute sets of this context. This corresponds to a multiplication or & operation of the Hg sets.
f8570:c1:m9
def UV_B(self):
h = reduce(lambda x,y:x&y,(B(g,self.width-<NUM_LIT:1>) for g in self))<EOL>return UV_B(h, self.width)<EOL>
returns UV = all respected U->Ux in ternary coding (1=V,2=U)
f8570:c1:m10
def v_Us_B(self):
Bg = reduce(lambda x,y:x&y,(B(g,self.width-<NUM_LIT:1>) for g in self))<EOL>gw = self.width<EOL>return v_Us_dict(Bg, gw)<EOL>
returns the implications {v:Us} based on B This is L=∪ min L_i in [1]_
f8570:c1:m11
def __call__(self, intOrCode012, right = None):
if isinstance(intOrCode012,v_Us_dict):<EOL><INDENT>return frozenset(self(x,right=i) for i,x in intOrCode012.items())<EOL><DEDENT>if isinstance(intOrCode012,list):<EOL><INDENT>return frozenset(self(x,right=right) for x in intOrCode012)<EOL><DEDENT>if isinstance(intOrCode012,int):<EOL><INDENT>res = []<EOL>pp = <NUM_LIT:1><EOL>for pos in range(self.width):<EOL><INDENT>if intOrCode012&pp:<EOL><INDENT>res.append(self.mapping[-pos-<NUM_LIT:1>])<EOL><DEDENT>pp = pp*<NUM_LIT:2><EOL><DEDENT>if right != None:<EOL><INDENT>return (frozenset(res),frozenset([self.mapping[-right-<NUM_LIT:1>]]))<EOL><DEDENT>else:<EOL><INDENT>return frozenset(res)<EOL><DEDENT><DEDENT>if isinstance(intOrCode012,str):<EOL><INDENT>left = []<EOL>right = []<EOL>for pos in range(self.width):<EOL><INDENT>if intOrCode012[pos] == '<STR_LIT:2>':<EOL><INDENT>left.append(self.mapping[pos])<EOL><DEDENT>elif intOrCode012[pos] == '<STR_LIT:1>':<EOL><INDENT>right.append(self.mapping[pos])<EOL><DEDENT><DEDENT>if left:<EOL><INDENT>if right:<EOL><INDENT>return (frozenset(left),frozenset(right))<EOL><DEDENT>else:<EOL><INDENT>return frozenset(left)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return frozenset(right)<EOL><DEDENT><DEDENT>
mapping from bits to attributes using mapping (which defaults to ints) - right, if available, is the conclusion of the implication; used if intOrCode012 is int
f8570:c1:m13
def extract_heading(line):
match = _HEADING_RE.match(line)<EOL>if match:<EOL><INDENT>return match.group(<NUM_LIT:1>)<EOL><DEDENT>return None<EOL>
Return heading in given line or None if it's not a heading.
f8571:m0
def extract_key_value(line, environ):
segments = line.split("<STR_LIT:=>", <NUM_LIT:1>)<EOL>if len(segments) < <NUM_LIT:2>:<EOL><INDENT>return None<EOL><DEDENT>key, value = segments<EOL>value = value.strip()<EOL>if value[<NUM_LIT:0>] == "<STR_LIT:'>" and _SQUOTE_RE.match(value):<EOL><INDENT>value = value[<NUM_LIT:1>:-<NUM_LIT:1>]<EOL><DEDENT>elif value[<NUM_LIT:0>] == '<STR_LIT:">' and _DQUOTE_RE.match(value):<EOL><INDENT>template = value[<NUM_LIT:1>:-<NUM_LIT:1>]<EOL>value = template.format(**environ)<EOL><DEDENT>key = key.strip()<EOL>value = value.strip()<EOL>return key, value<EOL>
Return key, value from given line if present, else return None.
f8571:m1
def parse_vexrc(inp, environ):
heading = None<EOL>errors = []<EOL>with inp:<EOL><INDENT>for line_number, line in enumerate(inp):<EOL><INDENT>line = line.decode("<STR_LIT:utf-8>")<EOL>if not line.strip():<EOL><INDENT>continue<EOL><DEDENT>extracted_heading = extract_heading(line)<EOL>if extracted_heading is not None:<EOL><INDENT>heading = extracted_heading<EOL>continue<EOL><DEDENT>kv_tuple = extract_key_value(line, environ)<EOL>if kv_tuple is None:<EOL><INDENT>errors.append((line_number, line))<EOL>continue<EOL><DEDENT>try:<EOL><INDENT>yield heading, kv_tuple[<NUM_LIT:0>], kv_tuple[<NUM_LIT:1>]<EOL><DEDENT>except GeneratorExit:<EOL><INDENT>break<EOL><DEDENT><DEDENT><DEDENT>if errors:<EOL><INDENT>raise InvalidConfigError(inp.name, errors)<EOL><DEDENT>
Iterator yielding key/value pairs from given stream. yields tuples of heading, key, value.
f8571:m2
@classmethod<EOL><INDENT>def from_file(cls, path, environ):<DEDENT>
instance = cls()<EOL>instance.read(path, environ)<EOL>return instance<EOL>
Make a Vexrc instance from given file in given environ.
f8571:c1:m2
def read(self, path, environ):
try:<EOL><INDENT>inp = open(path, '<STR_LIT:rb>')<EOL><DEDENT>except FileNotFoundError as error:<EOL><INDENT>if error.errno != <NUM_LIT:2>:<EOL><INDENT>raise<EOL><DEDENT>return None<EOL><DEDENT>parsing = parse_vexrc(inp, environ)<EOL>for heading, key, value in parsing:<EOL><INDENT>heading = self.default_heading if heading is None else heading<EOL>if heading not in self.headings:<EOL><INDENT>self.headings[heading] = OrderedDict()<EOL><DEDENT>self.headings[heading][key] = value<EOL><DEDENT>parsing.close()<EOL>
Read data from file into this vexrc instance.
f8571:c1:m3
def get_ve_base(self, environ):
<EOL>ve_base_value = self.headings[self.default_heading].get('<STR_LIT>')<EOL>if ve_base_value:<EOL><INDENT>ve_base = os.path.expanduser(ve_base_value)<EOL><DEDENT>else:<EOL><INDENT>ve_base = environ.get('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>if not ve_base:<EOL><INDENT>if platform.system() == '<STR_LIT>' and os.name == '<STR_LIT>':<EOL><INDENT>_win_drive = environ.get('<STR_LIT>')<EOL>home = environ.get('<STR_LIT>', '<STR_LIT>')<EOL>if home:<EOL><INDENT>home = os.path.join(_win_drive, home)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>home = environ.get('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>if not home:<EOL><INDENT>home = os.path.expanduser('<STR_LIT>')<EOL><DEDENT>if not home:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT>ve_base = os.path.join(home, '<STR_LIT>')<EOL><DEDENT>return ve_base or '<STR_LIT>'<EOL>
Find a directory to look for virtualenvs in.
f8571:c1:m4
def get_shell(self, environ):
command = self.headings[self.default_heading].get('<STR_LIT>')<EOL>if not command and os.name != '<STR_LIT>':<EOL><INDENT>command = environ.get('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>command = shlex.split(command) if command else None<EOL>return command<EOL>
Find a command to run.
f8571:c1:m5
def make_fake_exists(accepted_paths):
<EOL>assert not isinstance(accepted_paths, str)<EOL>def fake_exists(path):<EOL><INDENT>if path in accepted_paths:<EOL><INDENT>return True<EOL><DEDENT>return False<EOL><DEDENT>return fake_exists<EOL>
Make functions which only return true for a particular string.
f8572:m0
def scary_path(path):
if not path:<EOL><INDENT>return True<EOL><DEDENT>assert isinstance(path, bytes)<EOL>return not NOT_SCARY.match(path)<EOL>
Whitelist the WORKON_HOME strings we're willing to substitute in to strings that we provide for user's shell to evaluate. If it smells at all bad, return True.
f8583:m0
def shell_config_for(shell, vexrc, environ):
here = os.path.dirname(os.path.abspath(__file__))<EOL>path = os.path.join(here, '<STR_LIT>', shell)<EOL>try:<EOL><INDENT>with open(path, '<STR_LIT:rb>') as inp:<EOL><INDENT>data = inp.read()<EOL><DEDENT><DEDENT>except FileNotFoundError as error:<EOL><INDENT>if error.errno != <NUM_LIT:2>:<EOL><INDENT>raise<EOL><DEDENT>return b'<STR_LIT>'<EOL><DEDENT>ve_base = vexrc.get_ve_base(environ).encode('<STR_LIT:ascii>')<EOL>if ve_base and not scary_path(ve_base) and os.path.exists(ve_base):<EOL><INDENT>data = data.replace(b'<STR_LIT>', ve_base)<EOL><DEDENT>return data<EOL>
return completion config for the named shell.
f8583:m1
def handle_shell_config(shell, vexrc, environ):
from vex import shell_config<EOL>data = shell_config.shell_config_for(shell, vexrc, environ)<EOL>if not data:<EOL><INDENT>raise exceptions.OtherShell("<STR_LIT>".format(shell))<EOL><DEDENT>if hasattr(sys.stdout, '<STR_LIT>'):<EOL><INDENT>sys.stdout.buffer.write(data)<EOL><DEDENT>else:<EOL><INDENT>sys.stdout.write(data)<EOL><DEDENT>return <NUM_LIT:0><EOL>
Carry out the logic of the --shell-config option.
f8583:m2
def make_arg_parser():
parser = argparse.ArgumentParser(<EOL>formatter_class=argparse.RawTextHelpFormatter,<EOL>usage="<STR_LIT>",<EOL>)<EOL>make = parser.add_argument_group(title='<STR_LIT>')<EOL>make.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>',<EOL>action="<STR_LIT:store_true>",<EOL>help="<STR_LIT>"<EOL>)<EOL>make.add_argument(<EOL>'<STR_LIT>',<EOL>help="<STR_LIT>",<EOL>action="<STR_LIT:store>",<EOL>default=None,<EOL>)<EOL>make.add_argument(<EOL>'<STR_LIT>',<EOL>help="<STR_LIT>",<EOL>action="<STR_LIT:store_true>",<EOL>)<EOL>make.add_argument(<EOL>'<STR_LIT>',<EOL>help="<STR_LIT>",<EOL>action="<STR_LIT:store_true>",<EOL>)<EOL>remove = parser.add_argument_group(title='<STR_LIT>')<EOL>remove.add_argument(<EOL>'<STR_LIT>', '<STR_LIT>',<EOL>action="<STR_LIT:store_true>",<EOL>help="<STR_LIT>"<EOL>)<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>metavar="<STR_LIT>",<EOL>help="<STR_LIT>",<EOL>action="<STR_LIT:store>"<EOL>)<EOL>parser.add_argument(<EOL>'<STR_LIT>',<EOL>metavar="<STR_LIT>",<EOL>action="<STR_LIT:store>",<EOL>default='<STR_LIT:.>',<EOL>help="<STR_LIT>",<EOL>)<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>metavar="<STR_LIT>",<EOL>default=None,<EOL>action="<STR_LIT:store>",<EOL>help="<STR_LIT>"<EOL>)<EOL>parser.add_argument(<EOL>'<STR_LIT>',<EOL>metavar="<STR_LIT>",<EOL>dest="<STR_LIT>",<EOL>action="<STR_LIT:store>",<EOL>default=None,<EOL>help="<STR_LIT>"<EOL>)<EOL>parser.add_argument(<EOL>'<STR_LIT>',<EOL>metavar="<STR_LIT>",<EOL>nargs="<STR_LIT:?>",<EOL>const="<STR_LIT>",<EOL>default=None,<EOL>help="<STR_LIT>",<EOL>action="<STR_LIT:store>"<EOL>)<EOL>parser.add_argument(<EOL>'<STR_LIT>',<EOL>help="<STR_LIT>",<EOL>action="<STR_LIT:store_true>"<EOL>)<EOL>parser.add_argument(<EOL>"<STR_LIT>",<EOL>nargs=argparse.REMAINDER,<EOL>help=argparse.SUPPRESS)<EOL>return parser<EOL>
Return a standard ArgumentParser object.
f8584:m0
def get_options(argv):
arg_parser = make_arg_parser()<EOL>options, unknown = arg_parser.parse_known_args(argv)<EOL>if unknown:<EOL><INDENT>arg_parser.print_help()<EOL>raise exceptions.UnknownArguments(<EOL>"<STR_LIT>".format(unknown))<EOL><DEDENT>options.print_help = arg_parser.print_help<EOL>return options<EOL>
Called to parse the given list as command-line arguments. :returns: an options object as returned by argparse.
f8584:m1
def get_vexrc(options, environ):
<EOL>if options.config and not os.path.exists(options.config):<EOL><INDENT>raise exceptions.InvalidVexrc("<STR_LIT>".format(options.config))<EOL><DEDENT>filename = options.config or os.path.expanduser('<STR_LIT>')<EOL>vexrc = config.Vexrc.from_file(filename, environ)<EOL>return vexrc<EOL>
Get a representation of the contents of the config file. :returns: a Vexrc instance.
f8586:m0
def get_cwd(options):
if not options.cwd:<EOL><INDENT>return None<EOL><DEDENT>if not os.path.exists(options.cwd):<EOL><INDENT>raise exceptions.InvalidCwd(<EOL>"<STR_LIT>".format(options.cwd))<EOL><DEDENT>return options.cwd<EOL>
Discover what directory the command should run in.
f8586:m1
def get_virtualenv_path(ve_base, ve_name):
if not ve_base:<EOL><INDENT>raise exceptions.NoVirtualenvsDirectory(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>")<EOL><DEDENT>if not os.path.exists(ve_base):<EOL><INDENT>message = (<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>).format(ve_base)<EOL>raise exceptions.NoVirtualenvsDirectory(message)<EOL><DEDENT>if not ve_name:<EOL><INDENT>raise exceptions.InvalidVirtualenv("<STR_LIT>")<EOL><DEDENT>ve_path = os.path.join(ve_base, ve_name)<EOL>if ve_path == ve_name and os.path.basename(ve_name) != ve_name:<EOL><INDENT>raise exceptions.InvalidVirtualenv(<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'.format(ve_path))<EOL><DEDENT>ve_path = os.path.abspath(ve_path)<EOL>if not os.path.exists(ve_path):<EOL><INDENT>raise exceptions.InvalidVirtualenv(<EOL>"<STR_LIT>".format(ve_path))<EOL><DEDENT>return ve_path<EOL>
Check a virtualenv path, raising exceptions to explain problems.
f8586:m3
def get_command(options, vexrc, environ):
command = options.rest<EOL>if not command:<EOL><INDENT>command = vexrc.get_shell(environ)<EOL><DEDENT>if command and command[<NUM_LIT:0>].startswith('<STR_LIT>'):<EOL><INDENT>raise exceptions.InvalidCommand(<EOL>"<STR_LIT>"<EOL>% command[<NUM_LIT:0>])<EOL><DEDENT>if not command:<EOL><INDENT>raise exceptions.InvalidCommand("<STR_LIT>")<EOL><DEDENT>return command<EOL>
Get a command to run. :returns: a list of strings representing a command to be passed to Popen.
f8586:m4
def _main(environ, argv):
options = get_options(argv)<EOL>if options.version:<EOL><INDENT>return handle_version()<EOL><DEDENT>vexrc = get_vexrc(options, environ)<EOL>if options.shell_to_configure:<EOL><INDENT>return handle_shell_config(options.shell_to_configure, vexrc, environ)<EOL><DEDENT>if options.list is not None:<EOL><INDENT>return handle_list(vexrc.get_ve_base(environ), options.list)<EOL><DEDENT>cwd = get_cwd(options)<EOL>ve_base = vexrc.get_ve_base(environ)<EOL>ve_name = get_virtualenv_name(options)<EOL>command = get_command(options, vexrc, environ)<EOL>if options.make:<EOL><INDENT>if options.path:<EOL><INDENT>make_path = os.path.abspath(options.path)<EOL><DEDENT>else:<EOL><INDENT>make_path = os.path.abspath(os.path.join(ve_base, ve_name))<EOL><DEDENT>handle_make(environ, options, make_path)<EOL>ve_path = make_path<EOL><DEDENT>elif options.path:<EOL><INDENT>ve_path = os.path.abspath(options.path)<EOL>if not os.path.exists(ve_path) or not os.path.isdir(ve_path):<EOL><INDENT>raise exceptions.InvalidVirtualenv(<EOL>"<STR_LIT>")<EOL><DEDENT><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>ve_path = get_virtualenv_path(ve_base, ve_name)<EOL><DEDENT>except exceptions.NoVirtualenvName:<EOL><INDENT>options.print_help()<EOL>raise<EOL><DEDENT><DEDENT>env = get_environ(environ, vexrc['<STR_LIT>'], ve_path)<EOL>returncode = run(command, env=env, cwd=cwd)<EOL>if options.remove:<EOL><INDENT>handle_remove(ve_path)<EOL><DEDENT>if returncode is None:<EOL><INDENT>raise exceptions.InvalidCommand(<EOL>"<STR_LIT>".format(command[<NUM_LIT:0>]))<EOL><DEDENT>return returncode<EOL>
Logic for main(), with less direct system interaction. Routines called here raise InvalidArgument with messages that should be delivered on stderr, to be caught by main.
f8586:m7
def main():
argv = sys.argv[<NUM_LIT:1>:]<EOL>returncode = <NUM_LIT:1><EOL>try:<EOL><INDENT>returncode = _main(os.environ, argv)<EOL><DEDENT>except exceptions.InvalidArgument as error:<EOL><INDENT>if error.message:<EOL><INDENT>sys.stderr.write("<STR_LIT>" + error.message + '<STR_LIT:\n>')<EOL><DEDENT>else:<EOL><INDENT>raise<EOL><DEDENT><DEDENT>sys.exit(returncode)<EOL>
The main command-line entry point, with system interactions.
f8586:m8
def get_environ(environ, defaults, ve_path):
<EOL>env = environ.copy()<EOL>env.update(defaults)<EOL>if '<STR_LIT>' in env:<EOL><INDENT>del env['<STR_LIT>']<EOL><DEDENT>if not ve_path:<EOL><INDENT>raise exceptions.BadConfig('<STR_LIT>')<EOL><DEDENT>if platform.system() == '<STR_LIT>':<EOL><INDENT>ve_bin = os.path.join(ve_path, '<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>ve_bin = os.path.join(ve_path, '<STR_LIT>')<EOL><DEDENT>current_ve = env.get('<STR_LIT>', '<STR_LIT>')<EOL>system_path = environ.get('<STR_LIT>', '<STR_LIT>')<EOL>segments = system_path.split(os.pathsep)<EOL>if current_ve:<EOL><INDENT>current_ve_bin = os.path.join(current_ve, '<STR_LIT>')<EOL>try:<EOL><INDENT>segments.remove(current_ve_bin)<EOL><DEDENT>except ValueError:<EOL><INDENT>raise exceptions.BadConfig(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>.format(system_path)<EOL>)<EOL><DEDENT><DEDENT>segments.insert(<NUM_LIT:0>, ve_bin)<EOL>env['<STR_LIT>'] = os.pathsep.join(segments)<EOL>env['<STR_LIT>'] = ve_path<EOL>return env<EOL>
Make an environment to run with.
f8589:m0
def run(command, env, cwd):
assert command<EOL>if cwd:<EOL><INDENT>assert os.path.exists(cwd)<EOL><DEDENT>if platform.system() == "<STR_LIT>":<EOL><INDENT>exe = distutils.spawn.find_executable(command[<NUM_LIT:0>], path=env['<STR_LIT>'])<EOL>if exe:<EOL><INDENT>command[<NUM_LIT:0>] = exe<EOL><DEDENT><DEDENT>_, command_name = os.path.split(command[<NUM_LIT:0>])<EOL>if (command_name in ('<STR_LIT>', '<STR_LIT>')<EOL>and '<STR_LIT>' not in env):<EOL><INDENT>env['<STR_LIT>'] = '<STR_LIT::>'<EOL><DEDENT>try:<EOL><INDENT>process = subprocess.Popen(command, env=env, cwd=cwd)<EOL>process.wait()<EOL><DEDENT>except exceptions.CommandNotFoundError as error:<EOL><INDENT>if error.errno != <NUM_LIT:2>:<EOL><INDENT>raise<EOL><DEDENT>return None<EOL><DEDENT>return process.returncode<EOL>
Run the given command.
f8589:m1
def is_authenticated(self, request, **kwargs):
log.info("<STR_LIT>")<EOL>try:<EOL><INDENT>key = request.GET.get('<STR_LIT>')<EOL>if not key:<EOL><INDENT>for header in ['<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>auth_header_value = request.META.get(header)<EOL>if auth_header_value:<EOL><INDENT>key = auth_header_value.split('<STR_LIT:U+0020>', <NUM_LIT:1>)[<NUM_LIT:1>]<EOL>break<EOL><DEDENT><DEDENT><DEDENT>if not key and request.method == '<STR_LIT:POST>':<EOL><INDENT>if request.META.get('<STR_LIT>') == '<STR_LIT:application/json>':<EOL><INDENT>decoded_body = request.body.decode('<STR_LIT:utf8>')<EOL>key = json.loads(decoded_body)['<STR_LIT>']<EOL><DEDENT><DEDENT>if not key:<EOL><INDENT>log.info('<STR_LIT>')<EOL>return None<EOL><DEDENT>"""<STR_LIT>"""<EOL>token = self.verify_access_token(key, request, **kwargs)<EOL>request.user = token.user<EOL>request.META['<STR_LIT>'] = key<EOL>return True<EOL><DEDENT>except KeyError:<EOL><INDENT>log.exception("<STR_LIT>")<EOL>request.user = AnonymousUser()<EOL>return False<EOL><DEDENT>except Exception:<EOL><INDENT>log.exception("<STR_LIT>")<EOL>return False<EOL><DEDENT>
Verify 2-legged oauth request. Parameters accepted as values in the "Authorization" header, as a GET request parameter, or in a POST body.
f8601:c1:m1
def __init__(self, realm="<STR_LIT>", post=None, get=None, patch=None, put=None, delete=None, use_default=True, **kwargs):
super(OAuth2ScopedAuthentication, self).__init__(realm)<EOL>self.POST = post<EOL>if use_default:<EOL><INDENT>self.GET = get or post<EOL>self.DELETE = delete or post<EOL>if not patch and not put:<EOL><INDENT>self.PATCH = self.PUT = post<EOL><DEDENT>elif not patch or not put:<EOL><INDENT>self.PATCH = self.PUT = (put or patch)<EOL><DEDENT>else:<EOL><INDENT>self.PATCH = patch<EOL>self.PUT = put<EOL><DEDENT><DEDENT>else:<EOL><INDENT>self.GET = get<EOL>self.PUT = put<EOL>self.PATCH = patch<EOL>self.DELETE = delete<EOL><DEDENT>
https://tools.ietf.org/html/rfc6749 get, post, patch and put is desired to be a scope or a list of scopes or None if get is None, it will default to post if delete is None, it will default to post if both patch and put are None, they are all default to post if one of patch or put is None, the two will default to the one that is not None You can turn this overriding behavior off entirely by specifying use_default=False, but then remember that None means no scope requirement is specified for that http method the list of scopes should have a logic "or" between them e.g. get=("a b","c") for oauth2-toolkit means "GET method requires scope 'a b'('a' and 'b') or scope 'c' " get=(a|b,c) is the corresponding form for oauth2-provider, where a,b,c should be some constants you defined in your settings Note: for oauth2-toolkit, you have to provide a space seperated string of combination of scopes you can also specify only one scope(instead of a list), and that scope will the only scope that has permission to the according method
f8601:c2:m0
def iter_result_proxy(rp, step=None):
while True:<EOL><INDENT>if step is None:<EOL><INDENT>chunk = rp.fetchall()<EOL><DEDENT>else:<EOL><INDENT>chunk = rp.fetchmany(step)<EOL><DEDENT>if not chunk:<EOL><INDENT>break<EOL><DEDENT>for row in chunk:<EOL><INDENT>yield row<EOL><DEDENT><DEDENT>
Iterate over the ResultProxy.
f8604:m1
def normalize_column_name(name):
if not isinstance(name, six.string_types):<EOL><INDENT>raise ValueError('<STR_LIT>' % name)<EOL><DEDENT>name = name.strip()[:<NUM_LIT>]<EOL>if isinstance(name, six.text_type):<EOL><INDENT>while len(name.encode('<STR_LIT:utf-8>')) >= <NUM_LIT:64>:<EOL><INDENT>name = name[:len(name) - <NUM_LIT:1>]<EOL><DEDENT><DEDENT>if not len(name) or '<STR_LIT:.>' in name or '<STR_LIT:->' in name:<EOL><INDENT>raise ValueError('<STR_LIT>' % name)<EOL><DEDENT>return name<EOL>
Check if a string is a reasonable thing to use as a column name.
f8604:m2
def normalize_table_name(name):
if not isinstance(name, six.string_types):<EOL><INDENT>raise ValueError("<STR_LIT>" % name)<EOL><DEDENT>name = name.strip()[:<NUM_LIT>]<EOL>if not len(name):<EOL><INDENT>raise ValueError("<STR_LIT>" % name)<EOL><DEDENT>return name<EOL>
Check if the table name is obviously invalid.
f8604:m3
def safe_url(url):
parsed = urlparse(url)<EOL>if parsed.password is not None:<EOL><INDENT>pwd = '<STR_LIT>' % parsed.password<EOL>url = url.replace(pwd, '<STR_LIT>')<EOL><DEDENT>return url<EOL>
Remove password from printed connection URLs.
f8604:m4
def index_name(table, columns):
sig = '<STR_LIT>'.join(columns)<EOL>key = sha1(sig.encode('<STR_LIT:utf-8>')).hexdigest()[:<NUM_LIT:16>]<EOL>return '<STR_LIT>' % (table, key)<EOL>
Generate an artificial index name.
f8604:m5
def ensure_tuple(obj):
if obj is None:<EOL><INDENT>return tuple()<EOL><DEDENT>if isinstance(obj, Iterable) and not isinstance(obj, six.string_types):<EOL><INDENT>return tuple(obj)<EOL><DEDENT>return obj,<EOL>
Try and make the given argument into a tuple.
f8604:m6
def pad_chunk_columns(chunk):
columns = set()<EOL>for record in chunk:<EOL><INDENT>columns.update(record.keys())<EOL><DEDENT>for record in chunk:<EOL><INDENT>for column in columns:<EOL><INDENT>record.setdefault(column, None)<EOL><DEDENT><DEDENT>return chunk<EOL>
Given a set of items to be inserted, make sure they all have the same columns by padding columns with None if they are missing.
f8604:m7
def __init__(self, database, table_name, primary_id=None,<EOL>primary_type=None, auto_create=False):
self.db = database<EOL>self.name = normalize_table_name(table_name)<EOL>self._table = None<EOL>self._indexes = []<EOL>self._primary_id = primary_id<EOL>self._primary_type = primary_type<EOL>self._auto_create = auto_create<EOL>
Initialise the table from database schema.
f8605:c0:m0
@property<EOL><INDENT>def exists(self):<DEDENT>
if self._table is not None:<EOL><INDENT>return True<EOL><DEDENT>return self.name in self.db<EOL>
Check to see if the table currently exists in the database.
f8605:c0:m1
@property<EOL><INDENT>def table(self):<DEDENT>
if self._table is None:<EOL><INDENT>self._sync_table(())<EOL><DEDENT>return self._table<EOL>
Get a reference to the table, which may be reflected or created.
f8605:c0:m2
@property<EOL><INDENT>def columns(self):<DEDENT>
if not self.exists:<EOL><INDENT>return []<EOL><DEDENT>return self.table.columns.keys()<EOL>
Get a listing of all columns that exist in the table.
f8605:c0:m3
def has_column(self, column):
return normalize_column_name(column) in self.columns<EOL>
Check if a column with the given name exists on this table.
f8605:c0:m4
def insert(self, row, ensure=None, types=None):
row = self._sync_columns(row, ensure, types=types)<EOL>res = self.db.executable.execute(self.table.insert(row))<EOL>if len(res.inserted_primary_key) > <NUM_LIT:0>:<EOL><INDENT>return res.inserted_primary_key[<NUM_LIT:0>]<EOL><DEDENT>return True<EOL>
Add a ``row`` dict by inserting it into the table. If ``ensure`` is set, any of the keys of the row are not table columns, they will be created automatically. During column creation, ``types`` will be checked for a key matching the name of a column to be created, and the given SQLAlchemy column type will be used. Otherwise, the type is guessed from the row value, defaulting to a simple unicode field. :: data = dict(title='I am a banana!') table.insert(data) Returns the inserted row's primary key.
f8605:c0:m5
def insert_ignore(self, row, keys, ensure=None, types=None):
row = self._sync_columns(row, ensure, types=types)<EOL>if self._check_ensure(ensure):<EOL><INDENT>self.create_index(keys)<EOL><DEDENT>args, _ = self._keys_to_args(row, keys)<EOL>if self.count(**args) == <NUM_LIT:0>:<EOL><INDENT>return self.insert(row, ensure=False)<EOL><DEDENT>return False<EOL>
Add a ``row`` dict into the table if the row does not exist. If rows with matching ``keys`` exist they will be added to the table. Setting ``ensure`` results in automatically creating missing columns, i.e., keys of the row are not table columns. During column creation, ``types`` will be checked for a key matching the name of a column to be created, and the given SQLAlchemy column type will be used. Otherwise, the type is guessed from the row value, defaulting to a simple unicode field. :: data = dict(id=10, title='I am a banana!') table.insert_ignore(data, ['id'])
f8605:c0:m6
def insert_many(self, rows, chunk_size=<NUM_LIT:1000>, ensure=None, types=None):
chunk = []<EOL>for row in rows:<EOL><INDENT>row = self._sync_columns(row, ensure, types=types)<EOL>chunk.append(row)<EOL>if len(chunk) == chunk_size:<EOL><INDENT>chunk = pad_chunk_columns(chunk)<EOL>self.table.insert().execute(chunk)<EOL>chunk = []<EOL><DEDENT><DEDENT>if len(chunk):<EOL><INDENT>chunk = pad_chunk_columns(chunk)<EOL>self.table.insert().execute(chunk)<EOL><DEDENT>
Add many rows at a time. This is significantly faster than adding them one by one. Per default the rows are processed in chunks of 1000 per commit, unless you specify a different ``chunk_size``. See :py:meth:`insert() <dataset.Table.insert>` for details on the other parameters. :: rows = [dict(name='Dolly')] * 10000 table.insert_many(rows)
f8605:c0:m7
def update(self, row, keys, ensure=None, types=None, return_count=False):
row = self._sync_columns(row, ensure, types=types)<EOL>args, row = self._keys_to_args(row, keys)<EOL>clause = self._args_to_clause(args)<EOL>if not len(row):<EOL><INDENT>return self.count(clause)<EOL><DEDENT>stmt = self.table.update(whereclause=clause, values=row)<EOL>rp = self.db.executable.execute(stmt)<EOL>if rp.supports_sane_rowcount():<EOL><INDENT>return rp.rowcount<EOL><DEDENT>if return_count:<EOL><INDENT>return self.count(clause)<EOL><DEDENT>
Update a row in the table. The update is managed via the set of column names stated in ``keys``: they will be used as filters for the data to be updated, using the values in ``row``. :: # update all entries with id matching 10, setting their title columns data = dict(id=10, title='I am a banana!') table.update(data, ['id']) If keys in ``row`` update columns not present in the table, they will be created based on the settings of ``ensure`` and ``types``, matching the behavior of :py:meth:`insert() <dataset.Table.insert>`.
f8605:c0:m8
def upsert(self, row, keys, ensure=None, types=None):
row = self._sync_columns(row, ensure, types=types)<EOL>if self._check_ensure(ensure):<EOL><INDENT>self.create_index(keys)<EOL><DEDENT>row_count = self.update(row, keys, ensure=False, return_count=True)<EOL>if row_count == <NUM_LIT:0>:<EOL><INDENT>return self.insert(row, ensure=False)<EOL><DEDENT>return True<EOL>
An UPSERT is a smart combination of insert and update. If rows with matching ``keys`` exist they will be updated, otherwise a new row is inserted in the table. :: data = dict(id=10, title='I am a banana!') table.upsert(data, ['id'])
f8605:c0:m9
def delete(self, *clauses, **filters):
if not self.exists:<EOL><INDENT>return False<EOL><DEDENT>clause = self._args_to_clause(filters, clauses=clauses)<EOL>stmt = self.table.delete(whereclause=clause)<EOL>rp = self.db.executable.execute(stmt)<EOL>return rp.rowcount > <NUM_LIT:0><EOL>
Delete rows from the table. Keyword arguments can be used to add column-based filters. The filter criterion will always be equality: :: table.delete(place='Berlin') If no arguments are given, all records are deleted.
f8605:c0:m10
def _reflect_table(self):
with self.db.lock:<EOL><INDENT>try:<EOL><INDENT>self._table = SQLATable(self.name,<EOL>self.db.metadata,<EOL>schema=self.db.schema,<EOL>autoload=True)<EOL><DEDENT>except NoSuchTableError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>
Load the tables definition from the database.
f8605:c0:m11
def _sync_table(self, columns):
if self._table is None:<EOL><INDENT>self._reflect_table()<EOL><DEDENT>if self._table is None:<EOL><INDENT>if not self._auto_create:<EOL><INDENT>raise DatasetException("<STR_LIT>" % self.name)<EOL><DEDENT>with self.db.lock:<EOL><INDENT>self._threading_warn()<EOL>self._table = SQLATable(self.name,<EOL>self.db.metadata,<EOL>schema=self.db.schema)<EOL>if self._primary_id is not False:<EOL><INDENT>primary_id = self._primary_id or self.PRIMARY_DEFAULT<EOL>primary_type = self._primary_type or Types.integer<EOL>increment = primary_type in [Types.integer, Types.bigint]<EOL>column = Column(primary_id, primary_type,<EOL>primary_key=True,<EOL>autoincrement=increment)<EOL>self._table.append_column(column)<EOL><DEDENT>for column in columns:<EOL><INDENT>if not column.name == self._primary_id:<EOL><INDENT>self._table.append_column(column)<EOL><DEDENT><DEDENT>self._table.create(self.db.executable, checkfirst=True)<EOL><DEDENT><DEDENT>elif len(columns):<EOL><INDENT>with self.db.lock:<EOL><INDENT>self._reflect_table()<EOL>self._threading_warn()<EOL>for column in columns:<EOL><INDENT>if not self.has_column(column.name):<EOL><INDENT>self.db.op.add_column(self.name, column, self.db.schema)<EOL><DEDENT><DEDENT>self._reflect_table()<EOL><DEDENT><DEDENT>
Lazy load, create or adapt the table structure in the database.
f8605:c0:m13
def _sync_columns(self, row, ensure, types=None):
columns = self.columns<EOL>ensure = self._check_ensure(ensure)<EOL>types = types or {}<EOL>types = {normalize_column_name(k): v for (k, v) in types.items()}<EOL>out = {}<EOL>sync_columns = []<EOL>for name, value in row.items():<EOL><INDENT>name = normalize_column_name(name)<EOL>if ensure and name not in columns:<EOL><INDENT>_type = types.get(name)<EOL>if _type is None:<EOL><INDENT>_type = self.db.types.guess(value)<EOL><DEDENT>sync_columns.append(Column(name, _type))<EOL>columns.append(name)<EOL><DEDENT>if name in columns:<EOL><INDENT>out[name] = value<EOL><DEDENT><DEDENT>self._sync_table(sync_columns)<EOL>return out<EOL>
Create missing columns (or the table) prior to writes. If automatic schema generation is disabled (``ensure`` is ``False``), this will remove any keys from the ``row`` for which there is no matching column.
f8605:c0:m14
def create_column(self, name, type):
name = normalize_column_name(name)<EOL>if self.has_column(name):<EOL><INDENT>log.debug("<STR_LIT>" % name)<EOL>return<EOL><DEDENT>self._sync_table((Column(name, type),))<EOL>
Create a new column ``name`` of a specified type. :: table.create_column('created_at', db.types.datetime) `type` corresponds to an SQLAlchemy type as described by `dataset.db.Types`
f8605:c0:m19
def create_column_by_example(self, name, value):
type_ = self.db.types.guess(value)<EOL>self.create_column(name, type_)<EOL>
Explicitly create a new column ``name`` with a type that is appropriate to store the given example ``value``. The type is guessed in the same way as for the insert method with ``ensure=True``. :: table.create_column_by_example('length', 4.2) If a column of the same name already exists, no action is taken, even if it is not of the type we would have created.
f8605:c0:m20
def drop_column(self, name):
if self.db.engine.dialect.name == '<STR_LIT>':<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>name = normalize_column_name(name)<EOL>with self.db.lock:<EOL><INDENT>if not self.exists or not self.has_column(name):<EOL><INDENT>log.debug("<STR_LIT>", name)<EOL>return<EOL><DEDENT>self._threading_warn()<EOL>self.db.op.drop_column(<EOL>self.table.name,<EOL>name,<EOL>self.table.schema<EOL>)<EOL>self._reflect_table()<EOL><DEDENT>
Drop the column ``name``. :: table.drop_column('created_at')
f8605:c0:m21
def drop(self):
with self.db.lock:<EOL><INDENT>if self.exists:<EOL><INDENT>self._threading_warn()<EOL>self.table.drop(self.db.executable, checkfirst=True)<EOL>self._table = None<EOL><DEDENT><DEDENT>
Drop the table from the database. Deletes both the schema and all the contents within it.
f8605:c0:m22
def has_index(self, columns):
if not self.exists:<EOL><INDENT>return False<EOL><DEDENT>columns = set([normalize_column_name(c) for c in columns])<EOL>if columns in self._indexes:<EOL><INDENT>return True<EOL><DEDENT>for column in columns:<EOL><INDENT>if not self.has_column(column):<EOL><INDENT>return False<EOL><DEDENT><DEDENT>indexes = self.db.inspect.get_indexes(self.name, schema=self.db.schema)<EOL>for index in indexes:<EOL><INDENT>if columns == set(index.get('<STR_LIT>', [])):<EOL><INDENT>self._indexes.append(columns)<EOL>return True<EOL><DEDENT><DEDENT>return False<EOL>
Check if an index exists to cover the given ``columns``.
f8605:c0:m23
def create_index(self, columns, name=None, **kw):
columns = [normalize_column_name(c) for c in ensure_tuple(columns)]<EOL>with self.db.lock:<EOL><INDENT>if not self.exists:<EOL><INDENT>raise DatasetException("<STR_LIT>")<EOL><DEDENT>for column in columns:<EOL><INDENT>if not self.has_column(column):<EOL><INDENT>return<EOL><DEDENT><DEDENT>if not self.has_index(columns):<EOL><INDENT>self._threading_warn()<EOL>name = name or index_name(self.name, columns)<EOL>columns = [self.table.c[c] for c in columns]<EOL>idx = Index(name, *columns, **kw)<EOL>idx.create(self.db.executable)<EOL><DEDENT><DEDENT>
Create an index to speed up queries on a table. If no ``name`` is given a random name is created. :: table.create_index(['name', 'country'])
f8605:c0:m24
def find(self, *_clauses, **kwargs):
if not self.exists:<EOL><INDENT>return iter([])<EOL><DEDENT>_limit = kwargs.pop('<STR_LIT>', None)<EOL>_offset = kwargs.pop('<STR_LIT>', <NUM_LIT:0>)<EOL>order_by = kwargs.pop('<STR_LIT>', None)<EOL>_streamed = kwargs.pop('<STR_LIT>', False)<EOL>_step = kwargs.pop('<STR_LIT>', QUERY_STEP)<EOL>if _step is False or _step == <NUM_LIT:0>:<EOL><INDENT>_step = None<EOL><DEDENT>order_by = self._args_to_order_by(order_by)<EOL>args = self._args_to_clause(kwargs, clauses=_clauses)<EOL>query = self.table.select(whereclause=args,<EOL>limit=_limit,<EOL>offset=_offset)<EOL>if len(order_by):<EOL><INDENT>query = query.order_by(*order_by)<EOL><DEDENT>conn = self.db.executable<EOL>if _streamed:<EOL><INDENT>conn = self.db.engine.connect()<EOL>conn = conn.execution_options(stream_results=True)<EOL><DEDENT>return ResultIter(conn.execute(query),<EOL>row_type=self.db.row_type,<EOL>step=_step)<EOL>
Perform a simple search on the table. Simply pass keyword arguments as ``filter``. :: results = table.find(country='France') results = table.find(country='France', year=1980) Using ``_limit``:: # just return the first 10 rows results = table.find(country='France', _limit=10) You can sort the results by single or multiple columns. Append a minus sign to the column name for descending order:: # sort results by a column 'year' results = table.find(country='France', order_by='year') # return all rows sorted by multiple columns (descending by year) results = table.find(order_by=['country', '-year']) To perform complex queries with advanced filters or to perform aggregation, use :py:meth:`db.query() <dataset.Database.query>` instead.
f8605:c0:m25
def find_one(self, *args, **kwargs):
if not self.exists:<EOL><INDENT>return None<EOL><DEDENT>kwargs['<STR_LIT>'] = <NUM_LIT:1><EOL>kwargs['<STR_LIT>'] = None<EOL>resiter = self.find(*args, **kwargs)<EOL>try:<EOL><INDENT>for row in resiter:<EOL><INDENT>return row<EOL><DEDENT><DEDENT>finally:<EOL><INDENT>resiter.close()<EOL><DEDENT>
Get a single result from the table. Works just like :py:meth:`find() <dataset.Table.find>` but returns one result, or ``None``. :: row = table.find_one(country='United States')
f8605:c0:m26
def count(self, *_clauses, **kwargs):
<EOL>if not self.exists:<EOL><INDENT>return <NUM_LIT:0><EOL><DEDENT>args = self._args_to_clause(kwargs, clauses=_clauses)<EOL>query = select([func.count()], whereclause=args)<EOL>query = query.select_from(self.table)<EOL>rp = self.db.executable.execute(query)<EOL>return rp.fetchone()[<NUM_LIT:0>]<EOL>
Return the count of results for the given filter set.
f8605:c0:m27
def __len__(self):
return self.count()<EOL>
Return the number of rows in the table.
f8605:c0:m28
def distinct(self, *args, **_filter):
if not self.exists:<EOL><INDENT>return iter([])<EOL><DEDENT>columns = []<EOL>clauses = []<EOL>for column in args:<EOL><INDENT>if isinstance(column, ClauseElement):<EOL><INDENT>clauses.append(column)<EOL><DEDENT>else:<EOL><INDENT>if not self.has_column(column):<EOL><INDENT>raise DatasetException("<STR_LIT>" % column)<EOL><DEDENT>columns.append(self.table.c[column])<EOL><DEDENT><DEDENT>clause = self._args_to_clause(_filter, clauses=clauses)<EOL>if not len(columns):<EOL><INDENT>return iter([])<EOL><DEDENT>q = expression.select(columns,<EOL>distinct=True,<EOL>whereclause=clause,<EOL>order_by=[c.asc() for c in columns])<EOL>return self.db.query(q)<EOL>
Return all the unique (distinct) values for the given ``columns``. :: # returns only one row per year, ignoring the rest table.distinct('year') # works with multiple columns, too table.distinct('year', 'country') # you can also combine this with a filter table.distinct('year', country='China')
f8605:c0:m29
def __iter__(self):
return self.find()<EOL>
Return all rows of the table as simple dictionaries. Allows for iterating over all rows in the table without explicetly calling :py:meth:`find() <dataset.Table.find>`. :: for row in table: print(row)
f8605:c0:m30
def __repr__(self):
return '<STR_LIT>' % self.table.name<EOL>
Get table representation.
f8605:c0:m31
def guess(cls, sample):
if isinstance(sample, TypeEngine):<EOL><INDENT>return sample<EOL><DEDENT>if isinstance(sample, bool):<EOL><INDENT>return cls.boolean<EOL><DEDENT>elif isinstance(sample, int):<EOL><INDENT>return cls.integer<EOL><DEDENT>elif isinstance(sample, float):<EOL><INDENT>return cls.float<EOL><DEDENT>elif isinstance(sample, datetime):<EOL><INDENT>return cls.datetime<EOL><DEDENT>elif isinstance(sample, date):<EOL><INDENT>return cls.date<EOL><DEDENT>return cls.text<EOL>
Given a single sample, guess the column type for the field. If the sample is an instance of an SQLAlchemy type, the type will be used instead.
f8607:c0:m0
def connect(url=None, schema=None, reflect_metadata=True, engine_kwargs=None,<EOL>reflect_views=True, ensure_schema=True, row_type=row_type):
if url is None:<EOL><INDENT>url = os.environ.get('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>return Database(url, schema=schema, reflect_metadata=reflect_metadata,<EOL>engine_kwargs=engine_kwargs, reflect_views=reflect_views,<EOL>ensure_schema=ensure_schema, row_type=row_type)<EOL>
Opens a new connection to a database. *url* can be any valid `SQLAlchemy engine URL`_. If *url* is not defined it will try to use *DATABASE_URL* from environment variable. Returns an instance of :py:class:`Database <dataset.Database>`. Set *reflect_metadata* to False if you don't want the entire database schema to be pre-loaded. This significantly speeds up connecting to large databases with lots of tables. *reflect_views* can be set to False if you don't want views to be loaded. Additionally, *engine_kwargs* will be directly passed to SQLAlchemy, e.g. set *engine_kwargs={'pool_recycle': 3600}* will avoid `DB connection timeout`_. Set *row_type* to an alternate dict-like class to change the type of container rows are stored in.:: db = dataset.connect('sqlite:///factbook.db') .. _SQLAlchemy Engine URL: http://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine .. _DB connection timeout: http://docs.sqlalchemy.org/en/latest/core/pooling.html#setting-pool-recycle
f8608:m0
def __init__(self, url, schema=None, reflect_metadata=True,<EOL>engine_kwargs=None, reflect_views=True,<EOL>ensure_schema=True, row_type=row_type):
if engine_kwargs is None:<EOL><INDENT>engine_kwargs = {}<EOL><DEDENT>parsed_url = urlparse(url)<EOL>if parsed_url.scheme.lower() in '<STR_LIT>':<EOL><INDENT>if '<STR_LIT>' not in engine_kwargs:<EOL><INDENT>engine_kwargs['<STR_LIT>'] = StaticPool<EOL><DEDENT><DEDENT>self.lock = threading.RLock()<EOL>self.local = threading.local()<EOL>if len(parsed_url.query):<EOL><INDENT>query = parse_qs(parsed_url.query)<EOL>if schema is None:<EOL><INDENT>schema_qs = query.get('<STR_LIT>', query.get('<STR_LIT>', []))<EOL>if len(schema_qs):<EOL><INDENT>schema = schema_qs.pop()<EOL><DEDENT><DEDENT><DEDENT>self.types = Types()<EOL>self.schema = schema<EOL>self.engine = create_engine(url, **engine_kwargs)<EOL>self.url = url<EOL>self.row_type = row_type<EOL>self.ensure_schema = ensure_schema<EOL>self._tables = {}<EOL>
Configure and connect to the database.
f8609:c0:m0
@property<EOL><INDENT>def executable(self):<DEDENT>
if not hasattr(self.local, '<STR_LIT>'):<EOL><INDENT>self.local.conn = self.engine.connect()<EOL><DEDENT>return self.local.conn<EOL>
Connection against which statements will be executed.
f8609:c0:m1
@property<EOL><INDENT>def op(self):<DEDENT>
ctx = MigrationContext.configure(self.executable)<EOL>return Operations(ctx)<EOL>
Get an alembic operations context.
f8609:c0:m2
@property<EOL><INDENT>def inspect(self):<DEDENT>
return Inspector.from_engine(self.executable)<EOL>
Get a SQLAlchemy inspector.
f8609:c0:m3
@property<EOL><INDENT>def metadata(self):<DEDENT>
return MetaData(schema=self.schema, bind=self.executable)<EOL>
Return a SQLAlchemy schema cache object.
f8609:c0:m4
@property<EOL><INDENT>def in_transaction(self):<DEDENT>
if not hasattr(self.local, '<STR_LIT>'):<EOL><INDENT>return False<EOL><DEDENT>return len(self.local.tx) > <NUM_LIT:0><EOL>
Check if this database is in a transactional context.
f8609:c0:m5
def _flush_tables(self):
for table in self._tables.values():<EOL><INDENT>table._table = None<EOL><DEDENT>
Clear the table metadata after transaction rollbacks.
f8609:c0:m6