text stringlengths 4 1.02M | meta dict |
|---|---|
from PyQt5.QtWidgets import QWidget, QVBoxLayout, \
QListWidget, QPushButton
from PyQt5.QtWidgets import QInputDialog, QLineEdit
from scgv.qtviews.profiles_window import ShowProfilesWindow
class ProfilesActions(QWidget):
def __init__(self, main, *args, **kwargs):
super(ProfilesActions, self).__init__(*args, **kwargs)
self.main = main
self.profiles = []
layout = QVBoxLayout(self)
self.profiles_list = QListWidget(self)
layout.addWidget(self.profiles_list)
self.profiles_show_button = QPushButton("Profiles Show")
self.profiles_show_button.clicked.connect(
self.on_profiles_show
)
layout.addWidget(self.profiles_show_button)
self.profiles_clear_button = QPushButton("Profiles Clear")
self.profiles_clear_button.clicked.connect(
self.on_profiles_clear
)
layout.addWidget(self.profiles_clear_button)
self.profiles_add_button = QPushButton("Profiles Add")
self.profiles_add_button.clicked.connect(
self.on_profiles_add
)
layout.addWidget(self.profiles_add_button)
self.model = None
def set_model(self, model):
self.model = model
def on_profile_selected(self, profile, *args, **kwargs):
if profile is None or profile in self.profiles:
return
print(profile)
assert profile is not None
self.profiles_list.addItem(profile)
self.profiles.append(profile)
def on_profiles_clear(self, *args, **kwargs):
self.profiles_list.clear()
self.profiles = []
def on_profiles_show(self, *args, **kwargs):
if not self.profiles:
return
profiles = self.profiles[:]
self.profiles_list.clear()
self.profiles = []
show_profiles = ShowProfilesWindow(
self.model, profiles, self.main
)
show_profiles.show()
def on_profiles_add(self, *args, **kwargs):
if self.model is None:
return
profile, ok_pressed = QInputDialog.getText(
self.main, "SCGV Add Profile", "Profile:", QLineEdit.Normal, ""
)
if not ok_pressed or not profile:
return
if profile not in self.model.column_labels:
print(
"profile not found in current case:",
profile, self.model.column_labels)
return
self.on_profile_selected(profile)
| {
"content_hash": "29cb7c3b1d47b72860ed5977e5c2646d",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 75,
"avg_line_length": 31.2,
"alnum_prop": 0.6109775641025641,
"repo_name": "KrasnitzLab/SCGV",
"id": "baca918afb179406d3c96479857140388f085f01",
"size": "2496",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scgv/qtviews/profiles.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "103351"
}
],
"symlink_target": ""
} |
import numpy as np
import tensorflow as tf
from candidate_selection.tensorflow_models.components.abstract_component import AbstractComponent
class MultilayerPerceptron(AbstractComponent):
transforms = None
variable_prefix = None
variables = None
weights = None
biases = None
l2_scale = None
dropout_rate=None
def __init__(self, transforms, variables, variable_prefix="", l2_scale=0.0, dropout_rate=0.0):
self.transforms = transforms
self.variable_prefix = variable_prefix
if self.variable_prefix != "":
self.variable_prefix += "_"
self.variables = variables
self.weights = [None]*(len(transforms)-1)
self.biases = [None]*(len(transforms)-1)
self.l2_scale=l2_scale
self.dropout_rate=dropout_rate
def prepare_tensorflow_variables(self, mode="train"):
for i in range(len(self.transforms)-1):
dim_1 = self.transforms[i]
dim_2 = self.transforms[i+1]
glorot_variance = np.sqrt(6)/np.sqrt(dim_1 + dim_2)
weight_initializer = np.random.uniform(-glorot_variance, glorot_variance, size=(dim_1, dim_2)).astype(np.float32)
bias_initializer = np.zeros(dim_2, dtype=np.float32)
self.weights[i] = tf.Variable(weight_initializer, name=self.variable_prefix + "_W" + str(i))
self.biases[i] = tf.Variable(bias_initializer, name=self.variable_prefix + "_b" + str(i))
def transform(self, vectors, mode="train"):
for i in range(len(self.transforms)-1):
if mode == "train" and self.dropout_rate > 0:
vectors = tf.nn.dropout(vectors, 1-self.dropout_rate)
vectors = tf.matmul(vectors, self.weights[i]) + self.biases[i]
if i < len(self.transforms) - 2:
vectors = tf.nn.relu(vectors)
return vectors
def get_regularization_term(self):
return self.l2_scale * tf.reduce_sum([tf.reduce_sum(tf.square(w)) for w in self.weights])
def handle_variable_assignment(self, batch, mode):
pass | {
"content_hash": "af0999bf7f236caf8d7ef4b2deefc9d8",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 125,
"avg_line_length": 37.17857142857143,
"alnum_prop": 0.6296829971181557,
"repo_name": "MichSchli/QuestionAnsweringGCN",
"id": "0ea714aca10a4f774d400220f89e347aca98eb8a",
"size": "2082",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "old_version/candidate_selection/tensorflow_models/components/vector_encoders/multilayer_perceptron.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "730851"
},
{
"name": "Shell",
"bytes": "1446"
}
],
"symlink_target": ""
} |
import inspect
import sys
import numpy as np
import attrdict
from mtwaffle import graphs
from mtwaffle import mt
class Site(attrdict.AttrDict):
index_map = {
'xx': [0, 0],
'xy': [0, 1],
'yx': [1, 0],
'yy': [1, 1]
}
EXCLUDED_CALLABLES = ('between_freqs', )
def __init__(self, freqs, zs, name='', phase_func=None, **kwargs):
super(attrdict.AttrDict, self).__init__()
self.freqs = np.asarray(freqs)
self.zs = np.asarray(zs)
self.name = name
if phase_func is None:
phase_func = mt.phase
self.phase_func = phase_func
for key, value in kwargs.items():
setattr(self, key, value)
@property
def periods(self):
return 1. / self.freqs
@property
def phases(self):
return self.phase_func(self.zs)
def inspect_mt_callable(self, name):
f = mt.callables[name]
argnames = [ # Find arguments of callable from mtwaffle.mt
p.name for p in inspect.signature(f).parameters.values()
if p.kind == p.POSITIONAL_OR_KEYWORD and p.default is p.empty
]
return f, argnames
def help(self, output=sys.stdout):
'''Print a list of the attributes which are available.'''
output.write('''
Attributes of mtwaffle.mtsite.Site are calculated using functions from the mtwaffle.mt module:
mtsite.Site mtwaffle.mt function
attribute (args are Site attributes) Function description
-------------- ------------------------------ ----------------------------------------------
''')
label = lambda f: f.__doc__.splitlines()[0] if f.__doc__ else 'MISSING DOC'
fnames = []
for fname, f in mt.callables.items():
try:
getattr(self, fname)
except:
pass
else:
fnames.append(fname)
for fname in fnames:
f, argnames = self.inspect_mt_callable(fname)
cname = self.__class__.__name__
argsig = ', '.join(['{}'.format(arg) for arg in argnames])
source = '{}({})'.format(fname, argsig)
label_attr = '{}'.format(fname.ljust(14))
label_source = source.ljust(30)
label_help = label(f)
output.write('{} {} {}\n'.format(label_attr, label_source, label_help))
# print('{fname}({sig})'.format(
# fname=fname, sig=', '.join([
# '{c}.{a}'.format(c=self.__class__.__name__, a=arg) for arg in f_arg_names])))
# output.write('{}.{} -- {}\n'.format(
# self.__class__.__name__,
# fname.ljust(max([len(fi) for fi in fnames])),
# doc(mt.callables[fname])
# )
# )
def get_property(self, key):
# Is the key ending with xx, xy, yx, or yy?
if key[-2:] in self.index_map:
indices = self.index_map[key[-2:]]
if key.startswith('res_'):
return self.appres[[Ellipsis] + indices]
elif key.startswith('phase_'):
return self.phases[[Ellipsis] + indices]
elif key.startswith('zr_'):
return self.zs.real[[Ellipsis] + indices]
elif key.startswith('zi_'):
return self.zs.imag[[Ellipsis] + indices]
# See if we can complete a function from mtwaffle.mt using the
# existing attributes in this Site:
elif key in mt.callables and not key in self.EXCLUDED_CALLABLES:
f, argnames = self.inspect_mt_callable(key)
return f(*[getattr(self, arg) for arg in argnames])
return False
def __getattr__(self, key):
value = self.get_property(key)
if value is False:
return super(attrdict.AttrDict, self).__getattr__(key)
else:
return value
def __getitem__(self, key):
value = self.get_property(key)
if value is False:
return super(attrdict.AttrDict, self).__getitem__(key)
else:
return value
def plot_res_phase(self, **kwargs):
args = (
(self.freqs, self.freqs),
(self.res_xy, self.res_yx),
(self.phase_xy, self.phase_yx),
)
if not 'res_indiv_kws' in kwargs:
kwargs['res_indiv_kws'] = (
{'label': 'xy', 'color': 'b'},
{'label': 'yx', 'color': 'g'},
)
return graphs.plot_res_phase(*args, **kwargs)
def plot_impedance_tensors(self, *args, **kwargs):
return graphs.plot_impedance_tensors(
self.zs, self.freqs, **kwargs)
def plot_ptensell(self, *args, **kwargs):
return graphs.plot_ptensell(
self.ptensors, self.freqs, *args, **kwargs
)
def plot_ptensell_filled(self, *args, **kwargs):
return graphs.plot_ptensell_filled(
self.ptensors, self.freqs, *args, **kwargs
)
def plot_mohr_imp(self, *args, **kwargs):
kwargs['title'] = kwargs.get('title', self.name)
return graphs.plot_mohr_imp(
self.zs, self.freqs, *args, **kwargs
)
def plot_mohr_ptensor(self, *args, **kwargs):
return graphs.plot_mohr_ptensor(
self.ptensors, self.freqs, *args, **kwargs
) | {
"content_hash": "746410035ecd4ecdc36a32ab9cfe2bc3",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 99,
"avg_line_length": 33.825,
"alnum_prop": 0.5206947524020695,
"repo_name": "kinverarity1/mtwaffle",
"id": "96d74d2ff0dc3fdfbc59821572994b8df28d1d63",
"size": "5412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mtwaffle/mtsite.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "204318"
},
{
"name": "Python",
"bytes": "73149"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
from textwrap import dedent
import _pytest._code
import py
import pytest
from _pytest.config import PytestPluginManager
from _pytest.main import EXIT_NOTESTSCOLLECTED, EXIT_USAGEERROR
@pytest.fixture(scope="module", params=["global", "inpackage"])
def basedir(request, tmpdir_factory):
from _pytest.tmpdir import tmpdir
tmpdir = tmpdir(request, tmpdir_factory)
tmpdir.ensure("adir/conftest.py").write("a=1 ; Directory = 3")
tmpdir.ensure("adir/b/conftest.py").write("b=2 ; a = 1.5")
if request.param == "inpackage":
tmpdir.ensure("adir/__init__.py")
tmpdir.ensure("adir/b/__init__.py")
return tmpdir
def ConftestWithSetinitial(path):
conftest = PytestPluginManager()
conftest_setinitial(conftest, [path])
return conftest
def conftest_setinitial(conftest, args, confcutdir=None):
class Namespace(object):
def __init__(self):
self.file_or_dir = args
self.confcutdir = str(confcutdir)
self.noconftest = False
conftest._set_initial_conftests(Namespace())
class TestConftestValueAccessGlobal(object):
def test_basic_init(self, basedir):
conftest = PytestPluginManager()
p = basedir.join("adir")
assert conftest._rget_with_confmod("a", p)[1] == 1
def test_immediate_initialiation_and_incremental_are_the_same(self, basedir):
conftest = PytestPluginManager()
len(conftest._path2confmods)
conftest._getconftestmodules(basedir)
snap1 = len(conftest._path2confmods)
#assert len(conftest._path2confmods) == snap1 + 1
conftest._getconftestmodules(basedir.join('adir'))
assert len(conftest._path2confmods) == snap1 + 1
conftest._getconftestmodules(basedir.join('b'))
assert len(conftest._path2confmods) == snap1 + 2
def test_value_access_not_existing(self, basedir):
conftest = ConftestWithSetinitial(basedir)
with pytest.raises(KeyError):
conftest._rget_with_confmod('a', basedir)
def test_value_access_by_path(self, basedir):
conftest = ConftestWithSetinitial(basedir)
adir = basedir.join("adir")
assert conftest._rget_with_confmod("a", adir)[1] == 1
assert conftest._rget_with_confmod("a", adir.join("b"))[1] == 1.5
def test_value_access_with_confmod(self, basedir):
startdir = basedir.join("adir", "b")
startdir.ensure("xx", dir=True)
conftest = ConftestWithSetinitial(startdir)
mod, value = conftest._rget_with_confmod("a", startdir)
assert value == 1.5
path = py.path.local(mod.__file__)
assert path.dirpath() == basedir.join("adir", "b")
assert path.purebasename.startswith("conftest")
def test_conftest_in_nonpkg_with_init(tmpdir):
tmpdir.ensure("adir-1.0/conftest.py").write("a=1 ; Directory = 3")
tmpdir.ensure("adir-1.0/b/conftest.py").write("b=2 ; a = 1.5")
tmpdir.ensure("adir-1.0/b/__init__.py")
tmpdir.ensure("adir-1.0/__init__.py")
ConftestWithSetinitial(tmpdir.join("adir-1.0", "b"))
def test_doubledash_considered(testdir):
conf = testdir.mkdir("--option")
conf.join("conftest.py").ensure()
conftest = PytestPluginManager()
conftest_setinitial(conftest, [conf.basename, conf.basename])
l = conftest._getconftestmodules(conf)
assert len(l) == 1
def test_issue151_load_all_conftests(testdir):
names = "code proj src".split()
for name in names:
p = testdir.mkdir(name)
p.ensure("conftest.py")
conftest = PytestPluginManager()
conftest_setinitial(conftest, names)
d = list(conftest._conftestpath2mod.values())
assert len(d) == len(names)
def test_conftest_global_import(testdir):
testdir.makeconftest("x=3")
p = testdir.makepyfile("""
import py, pytest
from _pytest.config import PytestPluginManager
conf = PytestPluginManager()
mod = conf._importconftest(py.path.local("conftest.py"))
assert mod.x == 3
import conftest
assert conftest is mod, (conftest, mod)
subconf = py.path.local().ensure("sub", "conftest.py")
subconf.write("y=4")
mod2 = conf._importconftest(subconf)
assert mod != mod2
assert mod2.y == 4
import conftest
assert conftest is mod2, (conftest, mod)
""")
res = testdir.runpython(p)
assert res.ret == 0
def test_conftestcutdir(testdir):
conf = testdir.makeconftest("")
p = testdir.mkdir("x")
conftest = PytestPluginManager()
conftest_setinitial(conftest, [testdir.tmpdir], confcutdir=p)
l = conftest._getconftestmodules(p)
assert len(l) == 0
l = conftest._getconftestmodules(conf.dirpath())
assert len(l) == 0
assert conf not in conftest._conftestpath2mod
# but we can still import a conftest directly
conftest._importconftest(conf)
l = conftest._getconftestmodules(conf.dirpath())
assert l[0].__file__.startswith(str(conf))
# and all sub paths get updated properly
l = conftest._getconftestmodules(p)
assert len(l) == 1
assert l[0].__file__.startswith(str(conf))
def test_conftestcutdir_inplace_considered(testdir):
conf = testdir.makeconftest("")
conftest = PytestPluginManager()
conftest_setinitial(conftest, [conf.dirpath()], confcutdir=conf.dirpath())
l = conftest._getconftestmodules(conf.dirpath())
assert len(l) == 1
assert l[0].__file__.startswith(str(conf))
@pytest.mark.parametrize("name", 'test tests whatever .dotdir'.split())
def test_setinitial_conftest_subdirs(testdir, name):
sub = testdir.mkdir(name)
subconftest = sub.ensure("conftest.py")
conftest = PytestPluginManager()
conftest_setinitial(conftest, [sub.dirpath()], confcutdir=testdir.tmpdir)
if name not in ('whatever', '.dotdir'):
assert subconftest in conftest._conftestpath2mod
assert len(conftest._conftestpath2mod) == 1
else:
assert subconftest not in conftest._conftestpath2mod
assert len(conftest._conftestpath2mod) == 0
def test_conftest_confcutdir(testdir):
testdir.makeconftest("assert 0")
x = testdir.mkdir("x")
x.join("conftest.py").write(_pytest._code.Source("""
def pytest_addoption(parser):
parser.addoption("--xyz", action="store_true")
"""))
result = testdir.runpytest("-h", "--confcutdir=%s" % x, x)
result.stdout.fnmatch_lines(["*--xyz*"])
assert 'warning: could not load initial' not in result.stdout.str()
def test_no_conftest(testdir):
testdir.makeconftest("assert 0")
result = testdir.runpytest("--noconftest")
assert result.ret == EXIT_NOTESTSCOLLECTED
result = testdir.runpytest()
assert result.ret == EXIT_USAGEERROR
def test_conftest_existing_resultlog(testdir):
x = testdir.mkdir("tests")
x.join("conftest.py").write(_pytest._code.Source("""
def pytest_addoption(parser):
parser.addoption("--xyz", action="store_true")
"""))
testdir.makefile(ext=".log", result="") # Writes result.log
result = testdir.runpytest("-h", "--resultlog", "result.log")
result.stdout.fnmatch_lines(["*--xyz*"])
def test_conftest_existing_junitxml(testdir):
x = testdir.mkdir("tests")
x.join("conftest.py").write(_pytest._code.Source("""
def pytest_addoption(parser):
parser.addoption("--xyz", action="store_true")
"""))
testdir.makefile(ext=".xml", junit="") # Writes junit.xml
result = testdir.runpytest("-h", "--junitxml", "junit.xml")
result.stdout.fnmatch_lines(["*--xyz*"])
def test_conftest_import_order(testdir, monkeypatch):
ct1 = testdir.makeconftest("")
sub = testdir.mkdir("sub")
ct2 = sub.join("conftest.py")
ct2.write("")
def impct(p):
return p
conftest = PytestPluginManager()
conftest._confcutdir = testdir.tmpdir
monkeypatch.setattr(conftest, '_importconftest', impct)
assert conftest._getconftestmodules(sub) == [ct1, ct2]
def test_fixture_dependency(testdir, monkeypatch):
ct1 = testdir.makeconftest("")
ct1 = testdir.makepyfile("__init__.py")
ct1.write("")
sub = testdir.mkdir("sub")
sub.join("__init__.py").write("")
sub.join("conftest.py").write(py.std.textwrap.dedent("""
import pytest
@pytest.fixture
def not_needed():
assert False, "Should not be called!"
@pytest.fixture
def foo():
assert False, "Should not be called!"
@pytest.fixture
def bar(foo):
return 'bar'
"""))
subsub = sub.mkdir("subsub")
subsub.join("__init__.py").write("")
subsub.join("test_bar.py").write(py.std.textwrap.dedent("""
import pytest
@pytest.fixture
def bar():
return 'sub bar'
def test_event_fixture(bar):
assert bar == 'sub bar'
"""))
result = testdir.runpytest("sub")
result.stdout.fnmatch_lines(["*1 passed*"])
def test_conftest_found_with_double_dash(testdir):
sub = testdir.mkdir("sub")
sub.join("conftest.py").write(py.std.textwrap.dedent("""
def pytest_addoption(parser):
parser.addoption("--hello-world", action="store_true")
"""))
p = sub.join("test_hello.py")
p.write(py.std.textwrap.dedent("""
import pytest
def test_hello(found):
assert found == 1
"""))
result = testdir.runpytest(str(p) + "::test_hello", "-h")
result.stdout.fnmatch_lines("""
*--hello-world*
""")
class TestConftestVisibility(object):
def _setup_tree(self, testdir): # for issue616
# example mostly taken from:
# https://mail.python.org/pipermail/pytest-dev/2014-September/002617.html
runner = testdir.mkdir("empty")
package = testdir.mkdir("package")
package.join("conftest.py").write(dedent("""\
import pytest
@pytest.fixture
def fxtr():
return "from-package"
"""))
package.join("test_pkgroot.py").write(dedent("""\
def test_pkgroot(fxtr):
assert fxtr == "from-package"
"""))
swc = package.mkdir("swc")
swc.join("__init__.py").ensure()
swc.join("conftest.py").write(dedent("""\
import pytest
@pytest.fixture
def fxtr():
return "from-swc"
"""))
swc.join("test_with_conftest.py").write(dedent("""\
def test_with_conftest(fxtr):
assert fxtr == "from-swc"
"""))
snc = package.mkdir("snc")
snc.join("__init__.py").ensure()
snc.join("test_no_conftest.py").write(dedent("""\
def test_no_conftest(fxtr):
assert fxtr == "from-package" # No local conftest.py, so should
# use value from parent dir's
"""))
print ("created directory structure:")
for x in testdir.tmpdir.visit():
print (" " + x.relto(testdir.tmpdir))
return {
"runner": runner,
"package": package,
"swc": swc,
"snc": snc}
# N.B.: "swc" stands for "subdir with conftest.py"
# "snc" stands for "subdir no [i.e. without] conftest.py"
@pytest.mark.parametrize("chdir,testarg,expect_ntests_passed", [
# Effective target: package/..
("runner", "..", 3),
("package", "..", 3),
("swc", "../..", 3),
("snc", "../..", 3),
# Effective target: package
("runner", "../package", 3),
("package", ".", 3),
("swc", "..", 3),
("snc", "..", 3),
# Effective target: package/swc
("runner", "../package/swc", 1),
("package", "./swc", 1),
("swc", ".", 1),
("snc", "../swc", 1),
# Effective target: package/snc
("runner", "../package/snc", 1),
("package", "./snc", 1),
("swc", "../snc", 1),
("snc", ".", 1),
])
@pytest.mark.issue616
def test_parsefactories_relative_node_ids(
self, testdir, chdir,testarg, expect_ntests_passed):
dirs = self._setup_tree(testdir)
print("pytest run in cwd: %s" %(
dirs[chdir].relto(testdir.tmpdir)))
print("pytestarg : %s" %(testarg))
print("expected pass : %s" %(expect_ntests_passed))
with dirs[chdir].as_cwd():
reprec = testdir.inline_run(testarg, "-q", "--traceconfig")
reprec.assertoutcome(passed=expect_ntests_passed)
@pytest.mark.parametrize('confcutdir,passed,error', [
('.', 2, 0),
('src', 1, 1),
(None, 1, 1),
])
def test_search_conftest_up_to_inifile(testdir, confcutdir, passed, error):
"""Test that conftest files are detected only up to a ini file, unless
an explicit --confcutdir option is given.
"""
root = testdir.tmpdir
src = root.join('src').ensure(dir=1)
src.join('pytest.ini').write('[pytest]')
src.join('conftest.py').write(_pytest._code.Source("""
import pytest
@pytest.fixture
def fix1(): pass
"""))
src.join('test_foo.py').write(_pytest._code.Source("""
def test_1(fix1):
pass
def test_2(out_of_reach):
pass
"""))
root.join('conftest.py').write(_pytest._code.Source("""
import pytest
@pytest.fixture
def out_of_reach(): pass
"""))
args = [str(src)]
if confcutdir:
args = ['--confcutdir=%s' % root.join(confcutdir)]
result = testdir.runpytest(*args)
match = ''
if passed:
match += '*%d passed*' % passed
if error:
match += '*%d error*' % error
result.stdout.fnmatch_lines(match)
def test_issue1073_conftest_special_objects(testdir):
testdir.makeconftest("""
class DontTouchMe(object):
def __getattr__(self, x):
raise Exception('cant touch me')
x = DontTouchMe()
""")
testdir.makepyfile("""
def test_some():
pass
""")
res = testdir.runpytest()
assert res.ret == 0
def test_conftest_exception_handling(testdir):
testdir.makeconftest('''
raise ValueError()
''')
testdir.makepyfile("""
def test_some():
pass
""")
res = testdir.runpytest()
assert res.ret == 4
assert 'raise ValueError()' in [line.strip() for line in res.errlines]
def test_hook_proxy(testdir):
"""Session's gethookproxy() would cache conftests incorrectly (#2016).
It was decided to remove the cache altogether.
"""
testdir.makepyfile(**{
'root/demo-0/test_foo1.py': "def test1(): pass",
'root/demo-a/test_foo2.py': "def test1(): pass",
'root/demo-a/conftest.py': """
def pytest_ignore_collect(path, config):
return True
""",
'root/demo-b/test_foo3.py': "def test1(): pass",
'root/demo-c/test_foo4.py': "def test1(): pass",
})
result = testdir.runpytest()
result.stdout.fnmatch_lines([
'*test_foo1.py*',
'*test_foo3.py*',
'*test_foo4.py*',
'*3 passed*',
])
def test_required_option_help(testdir):
testdir.makeconftest("assert 0")
x = testdir.mkdir("x")
x.join("conftest.py").write(_pytest._code.Source("""
def pytest_addoption(parser):
parser.addoption("--xyz", action="store_true", required=True)
"""))
result = testdir.runpytest("-h", x)
assert 'argument --xyz is required' not in result.stdout.str()
assert 'general:' in result.stdout.str()
| {
"content_hash": "ebd87367516af356965ac0a464b0aee1",
"timestamp": "",
"source": "github",
"line_count": 463,
"max_line_length": 81,
"avg_line_length": 34.30885529157668,
"alnum_prop": 0.5906200818382121,
"repo_name": "flub/pytest",
"id": "b6fd7814cdcc40fa99a33a99f8d75c30427eb6ee",
"size": "15885",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testing/test_conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "568"
},
{
"name": "Python",
"bytes": "1424649"
}
],
"symlink_target": ""
} |
"""Test the listdescriptors RPC."""
from test_framework.descriptors import (
descsum_create
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class ListDescriptorsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_sqlite()
# do not create any wallet by default
def init_wallet(self, i):
return
def run_test(self):
node = self.nodes[0]
assert_raises_rpc_error(-18, 'No wallet is loaded.', node.listdescriptors)
if self.is_bdb_compiled():
self.log.info('Test that the command is not available for legacy wallets.')
node.createwallet(wallet_name='w1', descriptors=False)
assert_raises_rpc_error(-4, 'listdescriptors is not available for non-descriptor wallets', node.listdescriptors)
self.log.info('Test the command for empty descriptors wallet.')
node.createwallet(wallet_name='w2', blank=True, descriptors=True)
assert_equal(0, len(node.get_wallet_rpc('w2').listdescriptors()['descriptors']))
self.log.info('Test the command for a default descriptors wallet.')
node.createwallet(wallet_name='w3', descriptors=True)
result = node.get_wallet_rpc('w3').listdescriptors()
assert_equal("w3", result['wallet_name'])
assert_equal(8, len(result['descriptors']))
assert_equal(8, len([d for d in result['descriptors'] if d['active']]))
assert_equal(3, len([d for d in result['descriptors'] if 'internal' in d and d['internal']]))
for item in result['descriptors']:
assert item['desc'] != ''
assert item['next'] == 0
assert item['range'] == [0, 0]
assert item['timestamp'] is not None
self.log.info('Test descriptors with hardened derivations are listed in importable form.')
xprv = 'tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg'
xpub_acc = 'tpubDCMVLhErorrAGfApiJSJzEKwqeaf2z3NrkVMxgYQjZLzMjXMBeRw2muGNYbvaekAE8rUFLftyEar4LdrG2wXyyTJQZ26zptmeTEjPTaATts'
hardened_path = '/84\'/1\'/0\''
wallet = node.get_wallet_rpc('w2')
wallet.importdescriptors([{
'desc': descsum_create('wpkh(' + xprv + hardened_path + '/0/*)'),
'timestamp': 1296688602,
}])
expected = {
'wallet_name': 'w2',
'descriptors': [
{'desc': descsum_create('wpkh([80002067' + hardened_path + ']' + xpub_acc + '/0/*)'),
'timestamp': 1296688602,
'active': False,
'range': [0, 0],
'next': 0},
],
}
assert_equal(expected, wallet.listdescriptors())
self.log.info("Test listdescriptors with encrypted wallet")
wallet.encryptwallet("pass")
assert_equal(expected, wallet.listdescriptors())
self.log.info('Test non-active non-range combo descriptor')
node.createwallet(wallet_name='w4', blank=True, descriptors=True)
wallet = node.get_wallet_rpc('w4')
wallet.importdescriptors([{
'desc': descsum_create('combo(' + node.get_deterministic_priv_key().key + ')'),
'timestamp': 1296688602,
}])
expected = {
'wallet_name': 'w4',
'descriptors': [
{'active': False,
'desc': 'combo(0227d85ba011276cf25b51df6a188b75e604b38770a462b2d0e9fb2fc839ef5d3f)#np574htj',
'timestamp': 1296688602},
]
}
assert_equal(expected, wallet.listdescriptors())
if __name__ == '__main__':
ListDescriptorsTest().main()
| {
"content_hash": "ce7dadd26f8ae23c83175ef5134c82da",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 132,
"avg_line_length": 40.93684210526316,
"alnum_prop": 0.6132681923373617,
"repo_name": "qtumproject/qtum",
"id": "7980a17bdf4dfe68ed334e221bec138f295efda4",
"size": "4103",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/wallet_listdescriptors.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "138147"
},
{
"name": "Batchfile",
"bytes": "4488"
},
{
"name": "C",
"bytes": "1292347"
},
{
"name": "C++",
"bytes": "15628054"
},
{
"name": "CMake",
"bytes": "85710"
},
{
"name": "CSS",
"bytes": "111757"
},
{
"name": "Cap'n Proto",
"bytes": "1256"
},
{
"name": "Dockerfile",
"bytes": "483"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "695"
},
{
"name": "M4",
"bytes": "229308"
},
{
"name": "Makefile",
"bytes": "165639"
},
{
"name": "Objective-C++",
"bytes": "5478"
},
{
"name": "Perl",
"bytes": "1717"
},
{
"name": "Python",
"bytes": "3201664"
},
{
"name": "QMake",
"bytes": "438"
},
{
"name": "Sage",
"bytes": "39795"
},
{
"name": "Scheme",
"bytes": "25953"
},
{
"name": "Shell",
"bytes": "710214"
}
],
"symlink_target": ""
} |
"""
Defines restful interface to backend
"""
from flickipedia.mysqlio import DataIOMySQL
from flickipedia.config import schema
from flickipedia.config import log
from flickipedia.model.likes import LikeModel
from flickipedia.model.exclude import ExcludeModel
def api_insert_article(wiki_page_id, article_name):
"""
Adds an article
"""
raise NotImplementedError()
def api_insert_photo(flickr_id, article_id):
"""
Adds a photo
"""
raise NotImplementedError()
def api_set_like(uid, pid, aid):
"""
Toggles the like-glyph value for the given triplet
:param uid: Flickipedia user id
:param pid: Flickipedia photo id
:param aid: Flickipedia article id
:return: True on success, False otherwise
"""
# TODO - USE MODELS
io = DataIOMySQL()
io.connect()
result = api_get_like(uid, pid, aid)
# toggle and set new value (delete row if it doesn't exist)
if result: # io.update false
try:
io.delete(result)
except Exception as e:
log.error(' "%s"' % e.message)
return False
else: # io.update true
try:
io.insert('Like', user_id=uid, photo_id=pid, article_id=aid)
except Exception as e:
log.error(' "%s"' % e.message)
return False
# Clean up connections
io.sess.close()
io.engine.dispose()
return True
def api_get_like(uid, pid, aid):
"""
Determines the like-glyph value for the given triplet
:param uid: Flickipedia user id
:param pid: Flickipedia photo id
:param aid: Flickipedia article id
:return: 'Like' row if exists, None otherwise
"""
# TODO - USE MODELS
io = DataIOMySQL()
io.connect()
schema_obj = getattr(schema, 'Likes')
# Query to extract
res = io.session.query(schema_obj, schema_obj.is_set).filter(
schema_obj.photo_id == pid,
schema_obj.article_id == aid,
schema_obj.user_id == uid
).limit(1).all()
# Clean up connections
io.sess.close()
io.engine.dispose()
if len(res) == 0:
log.error('REST \'api_get_glyph\': Couldn\'t find ('
'user="%s", photo_id=%s, article_id=%s)' % (
uid, pid, aid))
return None
else:
return res[0]
def api_method_endorse_event(article_id, user_id, photo_id):
"""model logic for photo endorse
:param article_id: article local id
:param user_id: user id
:param photo_id: photo local id
"""
with LikeModel() as lm:
like = lm.get_like(user_id, article_id, photo_id)
if like:
lm.delete_like(like)
else:
lm.insert_like(user_id, article_id, photo_id)
def api_method_endorse_fetch(article_id, user_id, photo_id):
"""model logic for photo endorse fetch
:param article_id: article local id
:param user_id: user id
:param photo_id: photo local id
"""
with LikeModel() as lm:
like = lm.get_like(user_id, article_id, photo_id)
res = 1 if like else 0
return res
def api_method_exclude_event(article_id, user_id, photo_id):
"""model logic for photo exclude
:param article_id: article local id
:param user_id: user id
:param photo_id: photo local id
"""
with ExcludeModel() as em:
exclude = em.get_exclude(user_id, article_id, photo_id)
if exclude:
em.delete_exclude(exclude)
else:
em.insert_exclude(user_id, article_id, photo_id)
def api_method_exclude_fetch(article_id, user_id, photo_id):
"""model logic for photo exclude fetch
:param article_id: article local id
:param user_id: user id
:param photo_id: photo local id
"""
with ExcludeModel() as em:
exclude = em.get_exclude(user_id, article_id, photo_id)
res = 1 if exclude else 0
return res
def api_method_endorse_count(article_id, photo_id):
"""model logic for producing photo endorse count
:param article_id: article local id
:param photo_id: photo local id
"""
with LikeModel() as lm:
return lm.get_likes_article_photo(article_id, photo_id, count=True)
def api_method_exclude_count(article_id, photo_id):
"""model logic for producing photo exclude count
:param article_id: article local id
:param photo_id: photo local id
"""
with ExcludeModel() as em:
return em.get_excludes_article_photo(article_id, photo_id, count=True)
| {
"content_hash": "92d1c8b9814e7dc2f78f40586d3d94e2",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 78,
"avg_line_length": 25.819209039548024,
"alnum_prop": 0.6102844638949672,
"repo_name": "rfaulkner/Flickipedia",
"id": "ace7de16d4d81da19b4e682ce4735555881f6670",
"size": "4570",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flickipedia/web/rest.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "522"
},
{
"name": "HTML",
"bytes": "19363"
},
{
"name": "JavaScript",
"bytes": "8286"
},
{
"name": "Python",
"bytes": "85124"
},
{
"name": "Ruby",
"bytes": "4893"
},
{
"name": "Shell",
"bytes": "1402"
}
],
"symlink_target": ""
} |
from PyQt5.QtCore import (QThread, pyqtSignal)
# We spawn another
# thread to set up a "Linguistica component worker" using QThread.
# In this way, this worker (with lots of heavy computational work) works in a
# separate thread that is not the main thread for the GUI, and therefore the
# GUI stays responsive and (most probably) nothing freezes.
class LinguisticaWorker(QThread):
# progress_signal is a custom PyQt signal. It has to be defined within this
# QThread subclass but *outside* __init__ here.
progress_signal = pyqtSignal(str, int)
# str is for the progress label text
# int is the progress percentage target, for updating the progress bar
# bool (True or False) is whether the progress percentage increments
# gradually or not
def __init__(self, lexicon, parent=None):
QThread.__init__(self, parent)
self.lexicon = lexicon
def run(self):
# this "run" method is never explicitly called
# it is run by the built-in "start" method of this QThread
# What happens here: Each of the Linguistica component
# is run for the specified corpus file with the specified parameters.
# When a component is done, emit a signal with info to update the
# progress dialog label text and progress bar
self.progress_signal.emit("Extracting word ngrams...", 0)
self.lexicon.run_phon_module(verbose=True)
self.progress_signal.emit('Computing morphological signatures...', 20)
self.lexicon.run_signature_module(verbose=True)
self.progress_signal.emit('Computing tries...', 40)
self.lexicon.run_trie_module(verbose=True)
self.progress_signal.emit('Computing phonology...', 60)
self.lexicon.run_phon_module(verbose=True)
self.progress_signal.emit('Computing word neighbors...', 80)
self.lexicon.run_manifold_module(verbose=True)
self.progress_signal.emit('All done!', 100)
def get_lexicon(self):
return self.lexicon
| {
"content_hash": "87aad5d26ba47ea8f4c74d19d1a41cf9",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 79,
"avg_line_length": 38.20754716981132,
"alnum_prop": 0.6879012345679012,
"repo_name": "linguistica-uchicago/lxa5",
"id": "f117976432b7ecfa27e4da38d0b30f7cb2fe0b55",
"size": "2051",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linguistica/gui/worker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "155340"
},
{
"name": "Shell",
"bytes": "307"
}
],
"symlink_target": ""
} |
__source__ = 'https://leetcode.com/problems/sentence-screen-fitting/'
# Time: O(r + n * c)
# Space: O(n)
#
# Description: 418. Sentence Screen Fitting
#
# Given a rows x cols screen and a sentence represented by a list of non-empty words,
# find how many times the given sentence can be fitted on the screen.
#
# Note:
#
# A word cannot be split into two lines.
# The order of words in the sentence must remain unchanged.
# Two consecutive words in a line must be separated by a single space.
# Total words in the sentence won't exceed 100.
# Length of each word is greater than 0 and won't exceed 10.
# 1 <= rows, cols <= 20,000.
# Example 1:
#
# Input:
# rows = 2, cols = 8, sentence = ["hello", "world"]
#
# Output:
# 1
#
# Explanation:
# hello---
# world---
#
# The character '-' signifies an empty space on the screen.
# Example 2:
#
# Input:
# rows = 3, cols = 6, sentence = ["a", "bcd", "e"]
#
# Output:
# 2
#
# Explanation:
# a-bcd-
# e-a---
# bcd-e-
#
# The character '-' signifies an empty space on the screen.
# Example 3:
#
# Input:
# rows = 4, cols = 5, sentence = ["I", "had", "apple", "pie"]
#
# Output:
# 1
#
# Explanation:
# I-had
# apple
# pie-I
# had--
#
# The character '-' signifies an empty space on the screen.
# Hide Company Tags Google
# Hide Tags Dynamic Programming
import unittest
# 460ms 7.30%
class Solution(object):
def wordsTyping(self, sentence, rows, cols):
"""
:type sentence: List[str]
:type rows: int
:type cols: int
:rtype: int
"""
def words_fit(sentence, start, cols):
if len(sentence[start]) > cols:
return 0
s, count = len(sentence[start]), 1
i = (start + 1) % len(sentence)
while s + 1 + len(sentence[i]) <= cols:
s += 1 + len(sentence[i])
count += 1
i = (i + 1) % len(sentence)
return count
wc = [0] * len(sentence)
for i in xrange(len(sentence)):
wc[i] = words_fit(sentence, i, cols)
words, start = 0, 0
for i in xrange(rows):
words += wc[start]
start = (start + wc[start]) % len(sentence)
return words / len(sentence)
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought:
Say sentence=["abc", "de", "f], rows=4, and cols=6.
The screen should look like
"abc de"
"f abc "
"de f "
"abc de"
Consider the following repeating sentence string,
with positions of the start character of each row on the screen.
"abc de f abc de f abc de f ..."
^ ^ ^ ^ ^
0 7 13 18 25
Our goal is to find the start position of the row next to the last row on the screen, which is 25 here.
Since actually it's the length of everything earlier,
we can get the answer by dividing this number by the length of (non-repeated) sentence string.
Note that the non-repeated sentence string has a space at the end; it is "abc de f " in this example.
Here is how we find that position.
In each iteration, we need to adjust start based on spaces either added or removed.
"abc de f abc de f abc de f ..." // start=0
012345 // start=start+cols+adjustment=0+6+1=7 (1 space removed in screen string)
012345 // start=7+6+0=13
012345 // start=13+6-1=18 (1 space added)
012345 // start=18+6+1=25 (1 space added)
012345
#
# 16ms 48.14%
class Solution {
public int wordsTyping(String[] sentence, int rows, int cols) {
String s = String.join(" ", sentence) + " ";
int start = 0, l = s.length();
for (int i = 0; i < rows; i++) {
start += cols;
if (s.charAt(start % l) == ' ') {
start++;
} else {
while (start > 0 && s.charAt((start-1) % l) != ' ') {
start--;
}
}
}
return start / s.length();
}
}
'''
| {
"content_hash": "79164f10a255582360f3d4b022dc30b9",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 106,
"avg_line_length": 25.282208588957054,
"alnum_prop": 0.5600582382916768,
"repo_name": "JulyKikuAkita/PythonPrac",
"id": "64155a0bae55e0bda87033c5c0e414b0fa12c154",
"size": "4121",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cs15211/SentenceScreenFitting.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "191608"
},
{
"name": "HTML",
"bytes": "647778"
},
{
"name": "Python",
"bytes": "5429558"
}
],
"symlink_target": ""
} |
"""
eix experiment within gentoo chroot
"""
from plumbum import local
import benchbuild as bb
from benchbuild.projects.gentoo.gentoo import GentooGroup
class Eix(GentooGroup):
"""Represents the package eix from the portage tree."""
NAME = 'eix'
DOMAIN = 'app-portage'
def run_tests(self):
"""Runs runtime tests for eix"""
eix = bb.wrap(local.path('/usr/bin/eix'), self)
_eix = bb.watch(eix)
_eix("clang")
| {
"content_hash": "7db6172438de597675760ee2efe05092",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 59,
"avg_line_length": 21.857142857142858,
"alnum_prop": 0.644880174291939,
"repo_name": "PolyJIT/benchbuild",
"id": "ed8da3afb47829f8675d62ef974a8833f1384da8",
"size": "459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchbuild/projects/gentoo/eix.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "932"
},
{
"name": "HTML",
"bytes": "1658"
},
{
"name": "NASL",
"bytes": "2865"
},
{
"name": "PLpgSQL",
"bytes": "76700"
},
{
"name": "Python",
"bytes": "646423"
},
{
"name": "Shell",
"bytes": "1960"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from rest_framework import serializers
from rest_framework.response import Response
from sentry.api.bases.project import ProjectEndpoint, ProjectReleasePermission
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
from sentry.api.serializers.rest_framework import CommitSerializer, ListField
from sentry.models import Activity, Group, Release, ReleaseFile
from sentry.plugins.interfaces.releasehook import ReleaseHook
from sentry.constants import VERSION_LENGTH
ERR_RELEASE_REFERENCED = "This release is referenced by active issues and cannot be removed."
class ReleaseSerializer(serializers.Serializer):
ref = serializers.CharField(max_length=VERSION_LENGTH, required=False)
url = serializers.URLField(required=False)
dateReleased = serializers.DateTimeField(required=False)
commits = ListField(child=CommitSerializer(), required=False, allow_null=False)
class ProjectReleaseDetailsEndpoint(ProjectEndpoint):
permission_classes = (ProjectReleasePermission, )
def get(self, request, project, version):
"""
Retrieve a Project's Release
````````````````````````````
Return details on an individual release.
:pparam string organization_slug: the slug of the organization the
release belongs to.
:pparam string project_slug: the slug of the project to retrieve the
release of.
:pparam string version: the version identifier of the release.
:auth: required
"""
try:
release = Release.objects.get(
organization_id=project.organization_id,
projects=project,
version=version,
)
except Release.DoesNotExist:
raise ResourceDoesNotExist
return Response(serialize(release, request.user, project=project))
def put(self, request, project, version):
"""
Update a Project's Release
``````````````````````````
Update a release. This can change some metadata associated with
the release (the ref, url, and dates).
:pparam string organization_slug: the slug of the organization the
release belongs to.
:pparam string project_slug: the slug of the project to change the
release of.
:pparam string version: the version identifier of the release.
:param string ref: an optional commit reference. This is useful if
a tagged version has been provided.
:param url url: a URL that points to the release. This can be the
path to an online interface to the sourcecode
for instance.
:param datetime dateReleased: an optional date that indicates when
the release went live. If not provided
the current time is assumed.
:auth: required
"""
try:
release = Release.objects.get(
organization_id=project.organization_id,
projects=project,
version=version,
)
except Release.DoesNotExist:
raise ResourceDoesNotExist
serializer = ReleaseSerializer(data=request.DATA, partial=True)
if not serializer.is_valid():
return Response(serializer.errors, status=400)
result = serializer.object
was_released = bool(release.date_released)
kwargs = {}
if result.get('dateReleased'):
kwargs['date_released'] = result['dateReleased']
if result.get('ref'):
kwargs['ref'] = result['ref']
if result.get('url'):
kwargs['url'] = result['url']
if kwargs:
release.update(**kwargs)
commit_list = result.get('commits')
if commit_list:
hook = ReleaseHook(project)
# TODO(dcramer): handle errors with release payloads
hook.set_commits(release.version, commit_list)
if (not was_released and release.date_released):
Activity.objects.create(
type=Activity.RELEASE,
project=project,
ident=release.version,
data={'version': release.version},
datetime=release.date_released,
)
return Response(serialize(release, request.user))
def delete(self, request, project, version):
"""
Delete a Project's Release
``````````````````````````
Permanently remove a release and all of its files.
:pparam string organization_slug: the slug of the organization the
release belongs to.
:pparam string project_slug: the slug of the project to delete the
release of.
:pparam string version: the version identifier of the release.
:auth: required
"""
try:
release = Release.objects.get(
organization_id=project.organization_id,
projects=project,
version=version,
)
except Release.DoesNotExist:
raise ResourceDoesNotExist
# we don't want to remove the first_release metadata on the Group, and
# while people might want to kill a release (maybe to remove files),
# removing the release is prevented
if Group.objects.filter(first_release=release).exists():
return Response({"detail": ERR_RELEASE_REFERENCED}, status=400)
# TODO(dcramer): this needs to happen in the queue as it could be a long
# and expensive operation
file_list = ReleaseFile.objects.filter(
release=release,
).select_related('file')
for releasefile in file_list:
releasefile.file.delete()
releasefile.delete()
release.delete()
return Response(status=204)
| {
"content_hash": "9377eea75faf8ffa16dbe11feee590a9",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 93,
"avg_line_length": 38.55,
"alnum_prop": 0.6000324254215305,
"repo_name": "gencer/sentry",
"id": "40fc27ccba56b5c33ea165237a48551f72b364ed",
"size": "6168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sentry/api/endpoints/project_release_details.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "318167"
},
{
"name": "HTML",
"bytes": "281885"
},
{
"name": "JavaScript",
"bytes": "2342569"
},
{
"name": "Lua",
"bytes": "65795"
},
{
"name": "Makefile",
"bytes": "8393"
},
{
"name": "Python",
"bytes": "28161647"
},
{
"name": "Ruby",
"bytes": "4233"
},
{
"name": "Shell",
"bytes": "2149"
}
],
"symlink_target": ""
} |
import os
import ycm_core
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
'-Wall',
'-Wextra',
'-Werror',
'-fexceptions',
'-DNDEBUG',
# THIS IS IMPORTANT! Without a "-std=<something>" flag, clang won't know which
# language to use when compiling headers. So it will guess. Badly. So C++
# headers will be compiled as C headers. You don't want that so ALWAYS specify
# a "-std=<something>".
# For a C project, you would set this to something like 'c99' instead of
# 'c++11'.
'-std=c99',
# ...and the same thing goes for the magic -x option which specifies the
# language that the files to be compiled are written in. This is mostly
# relevant for c++ headers.
# For a C project, you would set this to 'c' instead of 'c++'.
'-x',
'c',
'-isystem',
'/usr/include',
'-isystem',
'/usr/local/include',
'-I',
'libs',
]
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags.
compilation_database_folder = ''
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
# This is the entry point; this function is called by ycmd to produce flags for
# a file.
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
| {
"content_hash": "f7c64826e76980082072207443828ffc",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 79,
"avg_line_length": 31.748031496062993,
"alnum_prop": 0.6852678571428571,
"repo_name": "henriquegogo/stobo",
"id": "fed5fe36cb84539ead048e81f41080aa2e9d170c",
"size": "4770",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".ycm_extra_conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "41537"
},
{
"name": "HTML",
"bytes": "273"
},
{
"name": "JavaScript",
"bytes": "2758"
},
{
"name": "Makefile",
"bytes": "407"
},
{
"name": "Python",
"bytes": "4770"
}
],
"symlink_target": ""
} |
from django.db import IntegrityError
from django.db import models
from django.db import transaction
from django.db.models.query import QuerySet
from django.utils import timezone
from openpyxl import load_workbook
class Lote(models.Model):
grupo = models.ForeignKey('Grupo',
related_name='lotes',
blank=True, null=True)
numero = models.IntegerField()
subastado = models.BooleanField(default=False)
chatarra = models.BooleanField(default=False)
def __unicode__(self):
return "%s" % self.numero
class Meta:
unique_together = ('grupo', 'numero')
class Tipo(models.Model):
nombre = models.CharField(max_length=100)
def __unicode__(self):
return self.nombre
class RodadoQuerySet(QuerySet):
def load_bienes(self, path_xlsx):
wb = load_workbook(path_xlsx)
ws = wb.active
instances = []
for row in ws.iter_rows(range_string="A2:K4"):
try:
with transaction.atomic():
rodado = Rodado.objects \
.create(numero_inventario=row[0].value,
descripcion=row[4].value,
modelo=row[8].value,
chasis=row[9].value,
motor=row[10].value,
dominio=row[6].value)
except IntegrityError, e:
print e
continue
else:
instances.append(rodado)
return len(instances)
def no_subastados(self):
return self.filter(subastado=False)
def subastados(self):
return self.filter(subastado=True)
class Rodado(models.Model):
lote = models.ForeignKey(Lote,
related_name="bienes",
blank=True, null=True)
tipo = models.ForeignKey(Tipo)
numero_inventario = models.IntegerField(unique=True)
descripcion = models.TextField(blank=True, null=True)
modelo = models.CharField(max_length=50)
chasis = models.CharField(max_length=50)
motor = models.CharField(max_length=50)
dominio = models.CharField(max_length=50)
marca = models.CharField(max_length=100)
anio = models.IntegerField("Año", blank=True, null=True)
precio_base = models.FloatField(default=0)
precio_venta = models.FloatField(default=0)
subastado = models.BooleanField(default=False)
objects = RodadoQuerySet.as_manager()
def __unicode__(self):
return "%s %s %s" % (self.chasis, self.motor, self.dominio)
class SubastaManager(models.Manager):
def get_current(self):
return super(SubastaManager, self).get_queryset() \
.filter(cerrado_el=None).last()
class Subasta(models.Model):
numero = models.IntegerField()
fecha_hora = models.DateTimeField()
cerrado_el = models.DateTimeField(blank=True, null=True)
decreto = models.CharField(max_length=10)
domicilio = models.ForeignKey('personas.Domicilio')
profesionales = models.ManyToManyField('personas.Profesional')
personas = models.ManyToManyField('personas.Persona',
blank=True, null=True)
created_at = models.DateTimeField(auto_now=True)
updated_at = models.DateTimeField(auto_now_add=True)
user_updated = models.ForeignKey('users.User', blank=True, null=True)
objects = SubastaManager()
def __unicode__(self):
return "%s" % self.fecha_hora
def close(self):
self.cerrado_el = timezone.now()
self.save()
@property
def lotes(self):
grupos = self.grupos.all()
return Lote.objects.filter(grupo__in=grupos)
class Grupo(models.Model):
subasta = models.ForeignKey(Subasta,
related_name='grupos',
blank=True, null=True)
numero = models.IntegerField()
subastado = models.BooleanField(default=False)
martillero = models.ForeignKey('personas.Profesional',
blank=True,
null=True)
def __unicode__(self):
return "%s" % self.numero
class Meta:
unique_together = ('subasta', 'numero')
class Acta(models.Model):
subasta = models.ForeignKey('subastas.Subasta', related_name='actas')
lote = models.OneToOneField(Lote)
persona = models.ForeignKey('personas.Persona')
profesionales = models.ManyToManyField('personas.Profesional')
descripcion = models.TextField(blank=True, null=True)
def __unicode__(self):
return "Lote: %s comprado por: %s" % (self.lote, self.persona)
| {
"content_hash": "ddde64e3c87c20dbe2f853ca9ddbc7cd",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 73,
"avg_line_length": 32.52413793103448,
"alnum_prop": 0.6030534351145038,
"repo_name": "diegoduncan21/subastas",
"id": "5a9899c3b803be6f61d8bf9ecb38dd8adbc72908",
"size": "4742",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "subastas_repo/subastas/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1906"
},
{
"name": "HTML",
"bytes": "51394"
},
{
"name": "JavaScript",
"bytes": "2387"
},
{
"name": "Python",
"bytes": "66230"
}
],
"symlink_target": ""
} |
__author__ = 'SmileyBarry'
from .decorators import debug
class APIException(Exception):
"""
Base class for all API exceptions.
"""
pass
class AccessException(APIException):
"""
You are attempting to query an object that you have no permission to query. (E.g.: private user,
hidden screenshot, etc.)
"""
pass
class APIUserError(APIException):
"""
An API error caused by a user error, like wrong data or just empty results for a query.
"""
pass
class UserNotFoundError(APIUserError):
"""
The specified user was not found on the Steam Community. (Bad vanity URL? Non-existent ID?)
"""
pass
class APIError(APIException):
"""
An API error signifies a problem with the server, a temporary issue or some other easily-repairable
problem.
"""
pass
class APIFailure(APIException):
"""
An API failure signifies a problem with your request (e.g.: invalid API), a problem with your data,
or any error that resulted from improper use.
"""
pass
class APIBadCall(APIFailure):
"""
Your API call doesn't match the API's specification. Check your arguments, service name, command &
version.
"""
pass
class APINotFound(APIFailure):
"""
The API you tried to call does not exist. (404)
"""
pass
class APIUnauthorized(APIFailure):
"""
The API you've attempted to call either requires a key, or your key has insufficient permissions.
If you're requesting user details, make sure their privacy level permits you to do so, or that you've
properly authorised said user. (401)
"""
pass
class APIKeyRequired(APIFailure):
"""
This API requires an API key to call and does not support anonymous requests.
"""
pass
class APIPrivate(APIFailure):
"""
The API you're trying to call requires a privileged API key. Your existing key is not allowed to call this.
"""
class APIConfigurationError(APIFailure):
"""
There's either no APIConnection defined, or the parameters given to "APIConnection" or "APIInterface" are
invalid.
"""
pass
def check(response):
"""
:type response: requests.Response
"""
if response.status_code // 100 == 4:
if response.status_code == 404:
raise APINotFound(
"The function or service you tried to call does not exist.")
elif response.status_code == 401:
raise APIUnauthorized("This API is not accessible to you.")
elif response.status_code == 403:
if '?key=' in response.request.url or '&key=' in response.request.url:
raise APIPrivate(
"You have no permission to use this API, or your key may be invalid.")
else:
raise APIKeyRequired("This API requires a key to call.")
elif response.status_code == 400:
raise APIBadCall(
"The parameters you sent didn't match this API's requirements.")
else:
raise APIFailure(
"Something is wrong with your configuration, parameters or environment.")
elif response.status_code // 100 == 5:
raise APIError("The API server has encountered an unknown error.")
else:
return
| {
"content_hash": "efaa51dd71eacf556aea0744a5fb2574",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 111,
"avg_line_length": 27.1900826446281,
"alnum_prop": 0.6425531914893617,
"repo_name": "smiley/steamapi",
"id": "5751a487c432cdd1e16cfa79b57492c10d8101d1",
"size": "3290",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "steamapi/errors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61641"
}
],
"symlink_target": ""
} |
import json
import redis
from django.conf import settings
class PushSender(object):
"""Push sender"""
@property
def r(self):
"""Init redis connection"""
if not hasattr(self, '_r'):
self._r = redis.Redis()
return self._r
def send(self, **msg):
"""Send message to redis channel"""
self.r.publish(settings.REDIS_PUSH, json.dumps(msg))
sender = PushSender()
| {
"content_hash": "671dc58a91b533dad9d46f5f23a75c98",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 60,
"avg_line_length": 20.38095238095238,
"alnum_prop": 0.5934579439252337,
"repo_name": "nvbn/coviolations_web",
"id": "8e867d5b3241b798c29b24683c35703fe66921e9",
"size": "428",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "push/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6025"
},
{
"name": "CoffeeScript",
"bytes": "30912"
},
{
"name": "Puppet",
"bytes": "729"
},
{
"name": "Python",
"bytes": "330675"
},
{
"name": "Shell",
"bytes": "383"
}
],
"symlink_target": ""
} |
'''
Created on Nov 27, 2011
@author: ppa
'''
import unittest
from analyzerdam.googleDAM import GoogleDAM
class testGoogleDam(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testReadQuotes(self):
dam = GoogleDAM()
dam.setSymbol('NASDAQ:EBAY')
data = dam.readQuotes('20131101', '20131110')
print([str(q) for q in data])
self.assertNotEqual(0, len(data))
def testReadTicks(self):
dam = GoogleDAM()
dam.setSymbol('EBAY')
data = dam.readTicks('20111120', '20111201')
print(data)
self.assertNotEqual(0, len(data))
def testReadFundamental(self):
dam = GoogleDAM()
dam.setSymbol('EBAY')
keyTimeValueDict = dam.readFundamental()
print(keyTimeValueDict)
self.assertNotEqual(0, len(keyTimeValueDict))
| {
"content_hash": "d52be2074d39fcb9142ef23061a010ed",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 53,
"avg_line_length": 24.7027027027027,
"alnum_prop": 0.5951859956236324,
"repo_name": "llazzaro/analyzerdam",
"id": "38d619b40a0117f71bf876ca20bd08f69fd5a4b1",
"size": "914",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_google_dam.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1735"
},
{
"name": "Python",
"bytes": "49471"
}
],
"symlink_target": ""
} |
import urllib
import urllib2
import json
import string
import pprint
import random
import socket
def random_str(randomlength = 16):
a = list(string.ascii_letters)
random.shuffle(a)
return ''.join(a[:randomlength])
def get_ip_by_domain(server_domain):
ips_list = []
try:
results = socket.getaddrinfo(server_domain, None)
except socket.error, e:
return 1, server_domain + ': %s' % e
for ipaddr in results:
ips_list.append(ipaddr[4][0])
print('ips_list[0]:', ips_list[0])
return 0, ips_list[0]
def send_subscribe_request(host, port, **args):
print(host, port, args['SerialNumber'], args['AuthCode'], args['AppToken'])
body = {
'AlarmCenter': {
'Header': {
'Version': '1.0',
'TerminalType': 'Camera',
'CSeq': '1',
'MessageType': 'MSG_ALARM_SUBSCRIBE_REQ'
},
'Body': {
'SerialNumber': '%s' % args['SerialNumber'],
'AuthCode': '%s' % args['AuthCode'],
'AppToken': '%s' % args['AppToken'],
'AppType': 'Android',
'AppLanguage': 'Chinese',
}
}
}
url = 'http://access-pms.secu100.net:' + str(port)
headers = {'Host': host, 'Port': port}
# req = urllib2.Request(url)
# data = urllib.urlencode(body)
data = json.dumps(body)
req = urllib2.Request(url = url, data = data, headers = headers)
try:
response = urllib2.urlopen(req)
except socket.error, e:
return 1, 'urlopen access-pms.secu100.net failed: %s' % e
# print(response.status)
# print(response.read())
return 0, response.read()
def serive_debug_pms_status():
exec_ret_dicts = {
'status': 0,
'target': 0,
'errors': ''
}
SerialNumber = random_str(16)
AuthCode = SerialNumber
AppToken = random_str(32)
access_pms_server_port = 6602
access_pms_server_domain= 'access-pms.secu100.net'
acl_server_port = 9903
acl_server_domain = 'auth-alc.secu100.net'
access_acl_server_port = 6603
access_acl_server_domain = 'access-alc.secu100.net'
res_domain = get_ip_by_domain(access_pms_server_domain)
if res_domain[0] != 0:
exec_ret_dicts.update({
'status': res_domain[0],
'target': res_domain[0],
'errors': res_domain[1]
})
return exec_ret_dicts
res_auth = send_subscribe_request(res_domain[1], access_pms_server_port, SerialNumber = SerialNumber, AuthCode = AuthCode, AppToken = AppToken)
if res_auth[0] != 0:
exec_ret_dicts.update({
'status': res_auth[0],
'target': res_auth[0],
'errors': res_auth[1]
})
return exec_ret_dicts
if __name__ == '__main__':
pprint.pprint(serive_debug_pms_status())
| {
"content_hash": "054a382da7ab23a778c27ad22a08fba5",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 147,
"avg_line_length": 32.075268817204304,
"alnum_prop": 0.542406972846128,
"repo_name": "louistin/fullstack",
"id": "50e27bd121b770f56b5144247197fc5c0ed16f86",
"size": "3002",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python/urllib2/post.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "120813"
},
{
"name": "C++",
"bytes": "261575"
},
{
"name": "HTML",
"bytes": "588772"
},
{
"name": "Lua",
"bytes": "16542"
},
{
"name": "Makefile",
"bytes": "437"
},
{
"name": "OpenEdge ABL",
"bytes": "420649"
},
{
"name": "Python",
"bytes": "57963"
},
{
"name": "Shell",
"bytes": "97"
},
{
"name": "Vim script",
"bytes": "21969"
}
],
"symlink_target": ""
} |
from msrest.serialization import Model
class AffinityInformation(Model):
"""A locality hint that can be used by the Batch service to select a compute
node on which to start a task.
:param affinity_id: An opaque string representing the location of a
compute node or a task that has run previously. You can pass the
affinityId of a compute node to indicate that this task needs to run on
that compute node. Note that this is just a soft affinity. If the target
node is busy or unavailable at the time the task is scheduled, then the
task will be scheduled elsewhere.
:type affinity_id: str
"""
_validation = {
'affinity_id': {'required': True},
}
_attribute_map = {
'affinity_id': {'key': 'affinityId', 'type': 'str'},
}
def __init__(self, affinity_id):
super(AffinityInformation, self).__init__()
self.affinity_id = affinity_id
| {
"content_hash": "dfd30f7b42aeaa071621cd3a6075ea57",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 80,
"avg_line_length": 34.44444444444444,
"alnum_prop": 0.6655913978494624,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "0745761d1343768d276b61c4b12ca40495420ae2",
"size": "1404",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-batch/azure/batch/models/affinity_information.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
} |
import sys
import json
import logging
import optparse
from bookdbtool.tools import bookDBTool
if __name__ == '__main__':
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
parser = optparse.OptionParser()
parser.add_option("-c", "--config", default="./configuration.json", dest="config_filename",
help="Configuration file for database access.")
parser.add_option("-d", "--cleanup", action="store_true",dest="dup",
help="Remove duplicate tag entries from Tag table. Set tags to lowercase.")
parser.add_option("-f", dest="tagattr", nargs=2,
help="Enter tag and field values, e.g. -f poetry Poetry. Each occurrence of" + \
" (field) in column Category will result in tagging the record with (tag).")
parser.add_option("-s", "--show", action="store_true", dest="show",
help="Show tags and fields.", default=False)
parser.add_option("-t", "--show_only_spell", action="store_true", dest="only_spell", default=False,
help="Show tags and fields.")
parser.add_option("--csv", action="store_true", dest="dumpcsv", default=False,
help="CSV dump from pandas")
parser.add_option("-u", dest="current_update", nargs=2,
help="Enter current tag value and updated tag value, e.g. -u poetyr poetry.")
(options, args) = parser.parse_args()
with open(options.config_filename, "r") as config_file:
c = json.load(config_file)
logging.debug("{}".format(c))
try:
UN = c["username"].strip()
PWD = c["password"].strip()
DB = c["database"].strip()
DBHOST = c["host"].strip()
except KeyError as e:
logging.error(e)
sys.exit()
bt = bookDBTool(DBHOST, UN, PWD, DB)
if (options.tagattr):
logging.info("Adding tag " + options.tagattr[0] + " to records in category " + options.tagattr[1])
bt.tag_from_category(options.tagattr[0], options.tagattr[1])
if (options.current_update):
logging.info("Updating tag " + options.current_update[0] + " to " + options.current_update[1])
bt.update_tag_value(options.current_update[0], options.current_update[1])
if (options.dup):
logging.info("Updating all tags to lower case...")
bt.lower_case_tags()
logging.info("Removing duplicate and null tags...")
bt.deduplicate_tags()
if (options.show or options.only_spell):
logging.info("Tags:")
bt.show_tags(only_spell=options.only_spell)
logging.info("Locations:")
bt.show_locations()
if (options.dumpcsv):
df = bt.get_dataframe()
print(df.to_csv())
bt.close()
| {
"content_hash": "7c2b24af3f1037bdac069afc7dc39ecb",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 106,
"avg_line_length": 44.63333333333333,
"alnum_prop": 0.6194921583271098,
"repo_name": "DrSkippy/php_books_database",
"id": "54572228e104bcb2177433fcad027ef516c243fd",
"size": "2860",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/bin/booktool.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "6888"
},
{
"name": "Dockerfile",
"bytes": "1022"
},
{
"name": "HTML",
"bytes": "5850"
},
{
"name": "Hack",
"bytes": "323"
},
{
"name": "JavaScript",
"bytes": "11226"
},
{
"name": "Jupyter Notebook",
"bytes": "2453617"
},
{
"name": "PHP",
"bytes": "29645"
},
{
"name": "Python",
"bytes": "52608"
},
{
"name": "Shell",
"bytes": "727"
}
],
"symlink_target": ""
} |
"""
stringarrays.py are a group of helper functions to convert string
input to vector and multivector class function to arrays of SymPy
symbols.
"""
import operator
from sympy.core.compatibility import reduce
from itertools import combinations
from sympy import S, Symbol, Function
from sympy.core.compatibility import range
def str_array(base, n=None):
"""
Generate one dimensional (list of strings) or two dimensional (list
of list of strings) string array.
For one dimensional arrays: -
base is string of variable names separated by blanks such as
base = 'a b c' which produces the string list ['a','b','c'] or
it is a string with no blanks than in conjunction with the
integer n generates -
str_array('v',n=-3) = ['v_1','v_2','v_3']
str_array('v',n=3) = ['v__1','v__2','v__3'].
In the case of LaTeX printing the '_' would give a subscript and
the '__' a super script.
For two dimensional arrays: -
base is string where elements are separated by spaces and rows by
commas so that -
str_array('a b,c d') = [['a','b'],['c','d']]
"""
if n is None:
if ',' in base:
base_array = []
base_split = base.split(',')
for base_arg in base_split:
base_array.append(list(filter(lambda x: x != '', base_arg.split(' '))))
return base_array
else:
return base.split(' ')
result = []
if isinstance(n, str):
if n[0] == '-':
for index in n[1:].split(' '):
result.append(base + '_' + index)
if n[0] == '+':
for index in n[1:].split(' '):
result.append(base + '__' + index)
if n > 0:
for i in range(1, n + 1):
result.append(base + '__' + str(i))
if n < 0:
for i in range(1, -n + 1):
result.append(base + '_' + str(i))
return result
def symbol_array(base, n=None):
"""
Generates a string arrary with str_array and replaces each string in
array with Symbol of same name.
"""
symbol_str_lst = str_array(base, n)
result = []
for symbol_str in symbol_str_lst:
result.append(S(symbol_str))
return tuple(result)
def fct_sym_array(str_lst, coords=None):
"""
Construct list of symbols or functions with names in 'str_lst'. If
'coords' are given (tuple of symbols) function list constructed,
otherwise a symbol list is constructed.
"""
if coords is None:
fs_lst = []
for sym_str in str_lst:
fs_lst.append(Symbol(sym_str))
else:
fs_lst = []
for fct_str in str_lst:
fs_lst.append(Function(fct_str)(*coords))
return fs_lst
def str_combinations(base, lst, rank=1, mode='_'):
"""
Construct a list of strings of the form 'base+mode+indexes' where the
indexes are formed by converting 'lst' to a list of strings and then
forming the 'indexes' by concatenating combinations of elements from
'lst' taken 'rank' at a time.
"""
str_lst = list(map(lambda x: base + mode + x, map(lambda x: reduce(operator.add, x),
combinations(map(lambda x: str(x), lst), rank))))
return str_lst
| {
"content_hash": "5106609e24c61165bf9e7eb62b1c60ac",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 88,
"avg_line_length": 31.066037735849058,
"alnum_prop": 0.5769814758578804,
"repo_name": "Sumith1896/sympy",
"id": "34ebca298462e850f973878c2e0c43f042ac6a91",
"size": "3327",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sympy/galgebra/stringarrays.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "5094"
},
{
"name": "Python",
"bytes": "13599543"
},
{
"name": "Ruby",
"bytes": "304"
},
{
"name": "Scheme",
"bytes": "125"
},
{
"name": "Shell",
"bytes": "4008"
},
{
"name": "TeX",
"bytes": "32356"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
} |
from django.contrib.auth.models import User
from django.db import models
class Language(models.Model):
name = models.CharField(max_length=50)
code = models.CharField(max_length=2)
def __unicode__(self):
return self.name
USER_TYPES = (
('translator', 'Translator'),
('doctor', 'Doctor'),
)
class UserProfile(models.Model):
user = models.ForeignKey(User, unique=True)
user_type = models.CharField(max_length=50, choices=USER_TYPES)
languages = models.ManyToManyField(Language, blank=True)
def __unicode__(self):
return unicode(self.user)
class PhoneNumber(models.Model):
number = models.CharField(max_length=50, unique=True)
language = models.ForeignKey(Language)
def __unicode__(self):
return self.number
class Question(models.Model):
to_number = models.CharField(max_length=50)
from_number = models.CharField(max_length=50)
language = models.ForeignKey(Language)
timestamp = models.DateTimeField(auto_now_add=True)
recording_url = models.CharField(max_length=250)
translation = models.TextField(blank=True)
answer = models.TextField(blank=True)
translator = models.ForeignKey(User, related_name='translating_questions', null=True, blank=True)
doctor = models.ForeignKey(User, related_name='doctor_questions', null=True, blank=True)
is_translated = models.BooleanField(default=False)
is_answered = models.BooleanField(default=False)
is_calledback = models.BooleanField(default=False)
def __unicode__(self):
return "%s - %s" % (self.language, self.timestamp)
| {
"content_hash": "798746b47b791a98826b9d2bb6d13254",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 101,
"avg_line_length": 27.77777777777778,
"alnum_prop": 0.644,
"repo_name": "robboyle/asesor",
"id": "44eea14253137495ce8ce0872f2024ffd8f79670",
"size": "1750",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "asesor/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "79498"
},
{
"name": "Python",
"bytes": "22652"
}
],
"symlink_target": ""
} |
import random
import sqlite3
from flask import (
Flask,
g,
render_template,
request,
)
from config import DATABASE
app = Flask(__name__)
app.config.from_object('config')
def get_db():
db = getattr(g, '_database', None)
if db is None:
db = g._database = sqlite3.connect(DATABASE)
return db
@app.teardown_appcontext
def close_connection(exception):
db = getattr(g, '_database', None)
if db is not None:
db.close()
def dict_factory(cursor, row):
# factory for results from the db
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def get_random_row(cur, table):
# returns a random row from the given table
l = cur.execute('SELECT COUNT(*) as n FROM %s' % table).fetchone()['n']
return cur.execute('SELECT * FROM %s WHERE id=?' % table, (random.randint(1, l),)).fetchone()
def generate_religion(cur):
template_context = {}
template_context['religion'] = get_random_row(cur, 'Religion')
template_context['pantheon'] = get_random_row(cur, 'Pantheon')
template_context['founder'] = get_random_row(cur, 'Founder')
# we don't want two of the same follower beliefs
l = cur.execute('SELECT COUNT(*) as n FROM Follower').fetchone()['n']
followers = random.sample(xrange(1, l + 1), 2)
template_context['follower1'] = cur.execute('SELECT * FROM Follower WHERE id=?', (followers[0],)).fetchone()
template_context['follower2'] = cur.execute('SELECT * FROM Follower WHERE id=?', (followers[1],)).fetchone()
template_context['enhancer'] = get_random_row(cur, 'Enhancer')
return template_context
@app.route('/', methods=['GET'])
def homepage():
# connect to the db
cur = get_db().cursor()
cur.row_factory = dict_factory
template_context = {}
section = request.args.get('section', '')
if section == 'religion':
# return only the religion section
template_context = generate_religion(cur)
return render_template('religion.html', **template_context)
else:
# return the whole page
# randomise all the things
template_context['civ'] = get_random_row(cur, 'Civ')
template_context['victory'] = get_random_row(cur, 'Victory')
template_context.update(generate_religion(cur))
template_context['policy_tree'] = get_random_row(cur, 'PolicyTree')
template_context['ideology'] = get_random_row(cur, 'Ideology')
return render_template('main.html', **template_context)
if __name__ == '__main__':
app.run()
| {
"content_hash": "330541bd53c14b34c9c89e914becfa31",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 112,
"avg_line_length": 28.68888888888889,
"alnum_prop": 0.6363284275755229,
"repo_name": "Chybby/civultimatebravery",
"id": "b5a5bca7c28e1e1e65318842d1b97a14729699f7",
"size": "2632",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "civultimatebravery.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3009"
},
{
"name": "HTML",
"bytes": "13107"
},
{
"name": "Python",
"bytes": "42267"
}
],
"symlink_target": ""
} |
import json
import os.path
import sys
import charliecloud as ch
import build_cache as bu
## Constants ##
# Internal library of manifests, e.g. for "FROM scratch" (issue #1013).
manifests_internal = {
"scratch": { # magic empty image
"schemaVersion": 2,
"config": { "digest": None },
"layers": []
}
}
## Main ##
def main(cli):
# Set things up.
src_ref = ch.Image_Ref(cli.source_ref)
dst_ref = src_ref if cli.dest_ref is None else ch.Image_Ref(cli.dest_ref)
if (cli.parse_only):
print(src_ref.as_verbose_str)
ch.exit(0)
dst_img = ch.Image(dst_ref)
ch.INFO("pulling image: %s" % src_ref)
if (src_ref != dst_ref):
ch.INFO("destination: %s" % dst_ref)
ch.INFO("requesting arch: %s" % ch.arch)
bu.cache.pull_eager(dst_img, src_ref, cli.last_layer)
ch.done_notify()
## Classes ##
class Image_Puller:
__slots__ = ("architectures", # key: architecture, value: manifest digest
"config_hash",
"digests",
"image",
"layer_hashes",
"registry",
"sid_input",
"src_ref")
def __init__(self, image, src_ref):
self.architectures = None
self.config_hash = None
self.digests = dict()
self.image = image
self.layer_hashes = None
self.registry = ch.Registry_HTTP(src_ref)
self.sid_input = None
self.src_ref = src_ref
@property
def config_path(self):
if (self.config_hash is None):
return None
else:
return ch.storage.download_cache // (self.config_hash + ".json")
@property
def fatman_path(self):
return ch.storage.fatman_for_download(self.image.ref)
@property
def manifest_path(self):
if (str(self.image.ref) in manifests_internal):
return "[internal library]"
else:
if (ch.arch == "yolo" or self.architectures is None):
digest = None
else:
digest = self.architectures[ch.arch]
return ch.storage.manifest_for_download(self.image.ref, digest)
def done(self):
self.registry.close()
def download(self):
"Download image metadata and layers and put them in the download cache."
# Spec: https://docs.docker.com/registry/spec/manifest-v2-2/
ch.VERBOSE("downloading image: %s" % self.image)
try:
# fat manifest
if (ch.arch != "yolo"):
try:
self.fatman_load()
if (not self.architectures.in_warn(ch.arch)):
ch.FATAL("requested arch unavailable: %s" % ch.arch,
("available: %s"
% " ".join(sorted(self.architectures.keys()))))
except ch.No_Fatman_Error:
if (ch.arch == "amd64"):
# We're guessing that enough arch-unaware images are amd64 to
# barge ahead if requested architecture is amd64.
ch.arch = "yolo"
ch.WARNING("image is architecture-unaware")
ch.WARNING("requested arch is amd64; using --arch=yolo")
else:
ch.FATAL("image is architecture-unaware",
"consider --arch=yolo")
# manifest
self.manifest_load()
except ch.Image_Unavailable_Error:
if (ch.user() == "qwofford"):
h = "Quincy, use --auth!!"
else:
h = "if your registry needs authentication, use --auth"
ch.FATAL("unauthorized or not in registry: %s" % self.registry.ref, h)
# config
ch.VERBOSE("config path: %s" % self.config_path)
if (self.config_path is not None):
if (os.path.exists(self.config_path) and ch.dlcache_p):
ch.INFO("config: using existing file")
else:
self.registry.blob_to_file(self.config_hash, self.config_path,
"config: downloading")
# layers
for (i, lh) in enumerate(self.layer_hashes, start=1):
path = self.layer_path(lh)
ch.VERBOSE("layer path: %s" % path)
msg = "layer %d/%d: %s" % (i, len(self.layer_hashes), lh[:7])
if (os.path.exists(path) and ch.dlcache_p):
ch.INFO("%s: using existing file" % msg)
else:
self.registry.blob_to_file(lh, path, "%s: downloading" % msg)
# done
self.registry.close()
def error_decode(self, data):
"""Decode first error message in registry error blob and return a tuple
(code, message)."""
try:
code = data["errors"][0]["code"]
msg = data["errors"][0]["message"]
except (IndexError, KeyError):
ch.FATAL("malformed error data", "yes, this is ironic")
return (code, msg)
def fatman_load(self):
"""Download the fat manifest and load it. If the image has a fat manifest
populate self.architectures; this may be an empty dictionary if no
valid architectures were found.
Raises:
* Image_Unavailable_Error if the image does not exist or we are not
authorized to have it.
* No_Fatman_Error if the image exists but has no fat manifest,
i.e., is architecture-unaware. In this case self.architectures is
set to None."""
self.architectures = None
if (str(self.src_ref) in manifests_internal):
# cheat; internal manifest library matches every architecture
self.architectures = ch.Arch_Dict({ ch.arch_host: None })
# Assume that image has no digest. This is a kludge, but it makes my
# solution to issue #1365 work so ¯\_(ツ)_/¯
self.digests[ch.arch_host] = "no digest"
return
# raises Image_Unavailable_Error if needed
self.registry.fatman_to_file(self.fatman_path,
"manifest list: downloading")
fm = self.fatman_path.json_from_file("fat manifest")
if ("layers" in fm or "fsLayers" in fm):
# FIXME (issue #1101): If it's a v2 manifest we could use it instead
# of re-requesting later. Maybe we could here move/copy it over to
# the skinny manifest path.
raise ch.No_Fatman_Error()
if ("errors" in fm):
# fm is an error blob.
(code, msg) = self.error_decode(fm)
if (code == "MANIFEST_UNKNOWN"):
ch.INFO("manifest list: no such image")
return
else:
ch.FATAL("manifest list: error: %s" % msg)
self.architectures = ch.Arch_Dict()
if ("manifests" not in fm):
ch.FATAL("manifest list has no key 'manifests'")
for m in fm["manifests"]:
try:
if (m["platform"]["os"] != "linux"):
continue
arch = m["platform"]["architecture"]
if ("variant" in m["platform"]):
arch = "%s/%s" % (arch, m["platform"]["variant"])
digest = m["digest"]
except KeyError:
ch.FATAL("manifest lists missing a required key")
if (arch in self.architectures):
ch.FATAL("manifest list: duplicate architecture: %s" % arch)
self.architectures[arch] = ch.digest_trim(digest)
self.digests[arch] = digest.split(":")[1]
if (len(self.architectures) == 0):
ch.WARNING("no valid architectures found")
def layer_path(self, layer_hash):
"Return the path to tarball for layer layer_hash."
return ch.storage.download_cache // (layer_hash + ".tar.gz")
def manifest_load(self):
"""Download the manifest file, parse it, and set self.config_hash and
self.layer_hashes. If the image does not exist,
exit with error."""
def bad_key(key):
ch.FATAL("manifest: %s: no key: %s" % (self.manifest_path, key))
self.config_hash = None
self.layer_hashes = None
# obtain the manifest
try:
# internal manifest library, e.g. for "FROM scratch"
manifest = manifests_internal[str(self.src_ref)]
ch.INFO("manifest: using internal library")
except KeyError:
# download the file and parse it
if (ch.arch == "yolo" or self.architectures is None):
digest = None
else:
digest = self.architectures[ch.arch]
ch.DEBUG("manifest digest: %s" % digest)
self.registry.manifest_to_file(self.manifest_path,
"manifest: downloading",
digest=digest)
manifest = self.manifest_path.json_from_file("manifest")
# validate schema version
try:
version = manifest['schemaVersion']
except KeyError:
bad_key("schemaVersion")
if (version not in {1,2}):
ch.FATAL("unsupported manifest schema version: %s" % repr(version))
# load config hash
#
# FIXME: Manifest version 1 does not list a config blob. It does have
# things (plural) that look like a config at history/v1Compatibility as
# an embedded JSON string :P but I haven't dug into it.
if (version == 1):
ch.VERBOSE("no config; manifest schema version 1")
self.config_hash = None
else: # version == 2
try:
self.config_hash = manifest["config"]["digest"]
if (self.config_hash is not None):
self.config_hash = ch.digest_trim(self.config_hash)
except KeyError:
bad_key("config/digest")
# load layer hashes
if (version == 1):
key1 = "fsLayers"
key2 = "blobSum"
else: # version == 2
key1 = "layers"
key2 = "digest"
if (key1 not in manifest):
bad_key(key1)
self.layer_hashes = list()
for i in manifest[key1]:
if (key2 not in i):
bad_key("%s/%s" % (key1, key2))
self.layer_hashes.append(ch.digest_trim(i[key2]))
if (version == 1):
self.layer_hashes.reverse()
# Remember State_ID input. We can't rely on the manifest existing in
# serialized form (e.g. for internal manifests), so re-serialize.
self.sid_input = json.dumps(manifest, sort_keys=True)
def manifest_digest_by_arch(self):
"Return skinny manifest digest for target architecture."
fatman = self.fat_manifest_path.json_from_file()
arch = None
digest = None
variant = None
try:
arch, variant = ch.arch.split("/", maxsplit=1)
except ValueError:
arch = ch.arch
if ("manifests" not in fatman):
ch.FATAL("manifest list has no manifests")
for k in fatman["manifests"]:
if (k.get('platform').get('os') != 'linux'):
continue
elif ( k.get('platform').get('architecture') == arch
and ( variant is None
or k.get('platform').get('variant') == variant)):
digest = k.get('digest')
if (digest is None):
ch.FATAL("arch not found for image: %s" % arch,
'try "ch-image list IMAGE_REF"')
return digest
def unpack(self, last_layer=None):
layer_paths = [self.layer_path(h) for h in self.layer_hashes]
self.image.unpack(layer_paths, last_layer)
self.image.metadata_replace(self.config_path)
# Check architecture we got. This is limited because image metadata does
# not store the variant. Move fast and break things, I guess.
arch_image = self.image.metadata["arch"] or "unknown"
arch_short = ch.arch.split("/")[0]
arch_host_short = ch.arch_host.split("/")[0]
if (arch_image != "unknown" and arch_image != arch_host_short):
host_mismatch = " (may not match host %s)" % ch.arch_host
else:
host_mismatch = ""
ch.INFO("image arch: %s%s" % (arch_image, host_mismatch))
if (ch.arch != "yolo" and arch_short != arch_image):
ch.WARNING("image architecture does not match requested: %s ≠ %s"
% (ch.arch, arch_image))
| {
"content_hash": "fd43f7eb0d52d377105b716599a339b3",
"timestamp": "",
"source": "github",
"line_count": 316,
"max_line_length": 79,
"avg_line_length": 38.27215189873418,
"alnum_prop": 0.5657350752439226,
"repo_name": "hpc/charliecloud",
"id": "421352db49953adced8245e89a4ccc1cb251c511",
"size": "12100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/pull.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "79333"
},
{
"name": "M4",
"bytes": "63756"
},
{
"name": "Makefile",
"bytes": "8117"
},
{
"name": "Python",
"bytes": "272802"
},
{
"name": "Shell",
"bytes": "328524"
}
],
"symlink_target": ""
} |
""" Helpers for the python-quilt test suite """
import unittest
class QuiltTest(unittest.TestCase):
""" Base class for all TestCases """
@classmethod
def suite(cls):
suite = unittest.TestSuite()
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(cls))
return suite
@classmethod
def run_tests(cls):
runner = unittest.TextTestRunner()
runner.run(cls.suite())
class tmp_mapping:
""" Context manager for temporarily altering a mapping """
def __init__(self, target):
self.target = target
self.orig = dict()
def __enter__(self):
return self
def __exit__(self, *exc):
while self.orig:
(key, value) = self.orig.popitem()
if value is None:
del self.target[key]
else:
self.target[key] = value
def set(self, key, value):
self.orig.setdefault(key, self.target.get(key))
self.target[key] = value
| {
"content_hash": "e9e5653331815ad732d8048fb66ff2a3",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 76,
"avg_line_length": 24.463414634146343,
"alnum_prop": 0.5852442671984048,
"repo_name": "vadmium/python-quilt",
"id": "52664b3b41aae14d5bd5ed4904df9fadf2da6cc5",
"size": "1255",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "119197"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
# Variables ===================================================================
changelog = open('CHANGES.rst').read()
long_description = "\n\n".join([
open('README.rst').read(),
changelog
])
# Functions ===================================================================
def allSame(s):
return not filter(lambda x: x != s[0], s)
def hasDigit(s):
return any(map(lambda x: x.isdigit(), s))
def getVersion(data):
data = data.splitlines()
return filter(
lambda (x, y):
len(x) == len(y) and allSame(y) and hasDigit(x) and "." in x,
zip(data, data[1:])
)[0][0]
# Actual setup definition =====================================================
setup(
name='timeout_wrapper',
version=getVersion(changelog),
description='Timeout decorator with defaults and exceptions.',
long_description=long_description,
url='https://github.com/Bystroushaak/timeout_wrapper',
author='Bystroushaak',
author_email='bystrousak@kitakitsune.org',
classifiers=[
'Intended Audience :: Developers',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
"License :: OSI Approved :: MIT License",
],
license='MIT',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=True,
test_suite='py.test',
tests_require=["pytest"],
extras_require={
"docs": [
"sphinx",
"sphinxcontrib-napoleon",
]
},
)
| {
"content_hash": "6c55b44ba14303cf31f846157fbebaf3",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 79,
"avg_line_length": 25.483870967741936,
"alnum_prop": 0.5278481012658228,
"repo_name": "Bystroushaak/timeout_wrapper",
"id": "a5d13f836c7300bbddc1e4f30a06ec07fc0cdc84",
"size": "1745",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "922"
},
{
"name": "Python",
"bytes": "4942"
}
],
"symlink_target": ""
} |
"""Test deprecation of RPC calls."""
from test_framework.test_framework import FujicoinTestFramework
class DeprecatedRpcTest(FujicoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [[], ['-deprecatedrpc=bumpfee']]
def run_test(self):
# This test should be used to verify correct behaviour of deprecated
# RPC methods with and without the -deprecatedrpc flags. For example:
#
# In set_test_params:
# self.extra_args = [[], ["-deprecatedrpc=generate"]]
#
# In run_test:
# self.log.info("Test generate RPC")
# assert_raises_rpc_error(-32, 'The wallet generate rpc method is deprecated', self.nodes[0].rpc.generate, 1)
# self.generate(self.nodes[1], 1)
self.log.info("No tested deprecated RPC methods")
if __name__ == '__main__':
DeprecatedRpcTest().main()
| {
"content_hash": "734a7f320ce98bebb22405b1dcd54bfe",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 117,
"avg_line_length": 37.72,
"alnum_prop": 0.6341463414634146,
"repo_name": "fujicoin/fujicoin",
"id": "d15abb5338d77777f9433a491c90adaf37a90202",
"size": "1158",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/rpc_deprecated.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28178"
},
{
"name": "Batchfile",
"bytes": "13"
},
{
"name": "C",
"bytes": "1226556"
},
{
"name": "C++",
"bytes": "10236550"
},
{
"name": "CMake",
"bytes": "29182"
},
{
"name": "Cap'n Proto",
"bytes": "1256"
},
{
"name": "Dockerfile",
"bytes": "1740"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "547"
},
{
"name": "M4",
"bytes": "221436"
},
{
"name": "Makefile",
"bytes": "147554"
},
{
"name": "Objective-C++",
"bytes": "5500"
},
{
"name": "Python",
"bytes": "2974091"
},
{
"name": "QMake",
"bytes": "438"
},
{
"name": "Sage",
"bytes": "58534"
},
{
"name": "Scheme",
"bytes": "26044"
},
{
"name": "Shell",
"bytes": "168383"
}
],
"symlink_target": ""
} |
import bs4
import sys, os
import json, copy, re
import ExprParser, ExprSemantics
from cgi import escape
from tatl import IR
def run_file(f):
return run_string(open(f).read())
def compile(s, source, out='py', warn=None, parser='html.parser'):
"""Return Python that implements the template"""
dom = _ensure_html_root(bs4.BeautifulSoup(s, parser))
c = Compiler(source, warn)
c.root(dom)
c.module.done()
if out == 'ir':
return c.module.view()
assert out in ('py', 'js')
code = c.module.code(out, IR.CodeState())
if out == 'js':
# detect unicode literals in JS
assert not re.search("""u(['"]).*?\\1""", out)
return code
def _ensure_html_root(dom):
"""Return a DOM that is guaranteed to have a root-level <html> tag"""
nontag = 0; tags = []; comments = []
for c in dom.contents:
if type(c) is bs4.NavigableString:
if c.strip():
nontag = True
tags.append(c)
elif type(c) is bs4.Tag:
tags.append(c)
elif type(c) is bs4.Comment:
comments.append(c)
else:
print "Extra stuff at front", c
if nontag or len(tags) != 1 or tags[0].name != 'html':
new_root_tag = bs4.Tag(name='html')
new_root_tag.contents = tags
dom.contents = comments + [new_root_tag]
return dom
def main():
args = sys.argv[1:]
opts = {
t: '-'+t in args and not args.remove('-'+t)
for t in ('py', 'js')
}
if not (opts['py'] or opts['js']):
opts['py'] = opts['js'] = True
for inp in args:
if inp == '-':
html = sys.stdin.read()
elif not inp.endswith(('.html', '.tatl')):
print "Expected .html or .tatl file:", inp
continue
else:
with open(inp) as f:
html = f.read()
base = inp[:-4]
for target in 'py', 'js':
if opts[target]:
try:
code = compile(html, inp, target)
except:
import traceback, pdb
traceback.print_exc()
pdb.post_mortem()
if inp == '-':
sys.stdout.write(code)
else:
with open(base + target, 'w') as f:
f.write(code)
class Compiler:
def __init__(self, source, warn=None):
modname = os.path.splitext(os.path.basename(source))[0]
self.module = IR.Module(source, modname, IR.Modformat())
self.parser = ExprParser.ExprParser(
parseinfo=True,
semantics=ExprSemantics.ExprSemantics()
)
self.tags = []
self.lastfn = []
self.fn = None
if warn:
self.warn = warn
def root(self, dom):
self.firsttag = 1
self._children(self._Tagstate('[root]', None, None), dom)
def startdef(self, funcdef):
self.lastfn.append(self.fn)
self.fn = self.module.startdef(funcdef)
def enddef(self):
self.fn = self.lastfn.pop()
def tag(self, tag):
if tag.name == 'else':
return self._else(tag)
ts = self._Tagstate(tag.name, tag.get('id'), self.fn and self.fn.block())
if tag.name in ('script', 'style'):
# just output it
ts.EmitQText(unicode(tag))
return
self.tags.append(ts)
self._process_attrs(tag.attrs, ts)
self.firsttag = 0
self._children(ts, tag)
self._finalize(ts)
self.tags.pop()
def _default(self, attr, ts, v):
if v: return v
method = getattr(self, '_default_'+attr, lambda ts: v)
return method(ts)
def _default_for(self, ts):
return '.'
def _default_def(self, ts):
return ts.name+'(*)'
def _default_param(self, ts):
return 'inner' if ts.name == 'do' else ts.name
def _default_set(self, ts):
return 'inner' if ts.name == 'do' else ts.name + '|contents'
def _check_attrs(self, attrs, ts):
if self.firsttag:
attrs.setdefault('def', "html(*)")
for attr in 'if', 'for':
if attr in attrs:
self.warn("Attr not allowed on root tag: "+attr)
del attrs[attr]
_attrs = 'def', 'param', 'set', 'use', 'for', 'if', 'process'
def _process_attrs(self, attrs, ts):
attrs = attrs.copy()
self._check_attrs(attrs, ts)
for attr in self._attrs:
if attr not in attrs: continue
v = self._default(attr, ts, attrs.pop(attr))
if attr == 'if' and not v:
self._process_elide(ts, '')
elif attr == 'process':
if v in ['raw']:
ts.process = v
else:
# ignored silently
raise SyntaxError("Invalid value for process= attribute (%r)" % v)
else:
try:
result = self.parser.parse(v, rule_name=attr+'Attr')
except:
raise SyntaxError("Syntax error on <%s %s='%s'>" % (ts.name, attr, v))
getattr(self, '_process_'+attr)(ts, result)
if ts.emit_tag:
ts.EmitQText('<'+ts.name)
for attr, val in attrs.items():
self._emit_attr(ts.for_attr(attr), attr, val)
ts.EmitQText('>')
else:
if attrs:
self.warn("Leftover attrs on <do>")
_boolable = re.compile('\s*\{[^{].*\}\s*$').match
def _emit_attr(self, ts, attr, val):
#import pdb
#pdb.set_trace()
# bool attributes have speical handling when the entire attribute is
# a substitution.
if isinstance(val, list):
val = ' '.join(val)
if ts.bool_attr:
#print 'bool:', ts.name, attr, val
if not val:
ts.EmitQText(' '+attr)
return
elif self._boolable(val):
Top = self.parser.parse(val, rule_name='top')
if Top.boolable():
ts.BoolAttr(attr, Top)
return
ts.EmitQText(' '+attr+'="')
self.parse_text(ts, val)
ts.EmitQText('"')
class _Tagstate:
ret = None
if_pending = 0
elide_pending = 0
end_if = False
bool_attr = False
process = "html"
def __init__(self, name, id, block):
self.emit_tag = name != 'do'
self.name = name
self.id = id
self.block = block
def __getattr__(self, name):
if name[:1] == '_': raise AttributeError
cls = getattr(IR, name)
fn = lambda *args: cls(*args).addto(self.block)
setattr(self, name, fn)
return fn
def copy(self, **kw):
new = copy.copy(self)
new.__dict__.update(kw)
return new
def for_attr(self, attr):
# If the attribute needs special handling,
# return a new Tagstate. Otherwise return self
if attr in ('selected', 'checked', 'disabled'):
return self.copy(bool_attr=True)
return self
def _finalize(self, ts):
if ts.emit_tag:
ts.EmitQText('</%s>' % ts.name)
self._finish_elide(ts)
ts.block.done()
if ts.ret:
self.enddef()
def _finish_elide(self, ts):
if ts.elide_pending:
ts.ElideEnd()
ts.elide_pending = False
def _process_elide(self, ts, result):
ts.ElideStart()
ts.elide_pending = True
def _process_def(self, ts, funcdef):
self.startdef(funcdef)
ts.block = self.fn.block()
ts.ret = True
def _process_set(self, ts, obj):
obj.addto(ts.block)
def _process_if(self, ts, test):
test.addto(ts.block)
ts.if_pending = 1
def _process_param(self, ts, v):
self.fn.add_params(v)
if len(v) == 1:
ts.Asgn('dot', v[0].lvar)
def _process_for(self, ts, obj):
obj.addto(ts.block)
def _process_use(self, ts, ast):
#import pdb
#pdb.set_trace()
ast.addto(ts.block)
def _else(self, tag):
tags = []
for ts in self.tags[::-1]:
if ts.emit_tag:
tags.append(ts.name)
if ts.if_pending or ts.elide_pending:
break
else:
self.warn("No if found for <else>")
return self._children(ts, tag)
if ts.end_if:
self.warn("<else> after <else> ignored")
return self._children(ts, tag)
if tags:
self.warn("<else> synthesized tags: "+str(tags))
endtags = ''.join(['</%s>' % t for t in tags])
starttags = ''.join(['<%s>' % t for t in tags])
ts.EmitQText(endtags)
self._finish_elide(ts)
attrs = tag.attrs
if 'if' in attrs:
test = self.parser.parse(attrs.pop('if'), rule_name='test')
ts.Elif(test)
else:
ts.Else()
ts.end_if = True
if attrs:
self.warn("Extraneous attributes on <else/>")
ts.EmitQText(starttags)
self._children(ts, tag)
def _children(self, ts, tag):
for c in tag.children:
typ = c.__class__
if ts.process == 'raw':
ts.EmitQText(unicode(c))
elif typ is bs4.Tag:
self.tag(c)
elif typ is bs4.NavigableString:
if ts.block is None:
if c.strip():
self.warn("Top-level content ignored: %r" % c.strip())
else:
self.parse_text(ts, c)
elif typ is bs4.Comment and c[:1] == '{':
self._front_matter(ts, c)
elif typ is bs4.Comment and c[:1] != '[':
# comments ignored
pass
elif self.fn is None:
# before first tag, we can't emit
self.warn("Can't emit %s: %s" % (c.__class__.__name__, c))
else:
self.emit_other(ts, c)
def _front_matter(self, ts, c):
# update current fn/module... ideas for keys:
# package, doc, param docs / required, sample data
try:
d = json.loads(c)
except:
self.warn("Front matter cannot be loaded in <%s>" % ts.name)
d = None
def emit_other(self, ts, c):
# Emit other kind of node (CData, PI, )
print "emit_other!", c
text = bs4.BeautifulSoup('')
text.contents.append(c)
text = unicode(text)
ts.EmitQText(text)
def parse_text(self, ts, text):
result = []
ix = text.find('{')
emit = lambda t: ts.EmitQText(escape(t.replace('}}', '}')))
while ix > -1 and ix < len(text) - 2:
if text[ix+1] == '{':
emit(text[:ix+1])
text = text[ix+2:]
ix = text.find('{')
continue
if ix > 0:
emit(text[:ix])
text = text[ix:]
Top = self.parser.parse(text, rule_name='top')
Top.addto(ts.block)
text = Top.rest
ix = text.find('{')
emit(text)
def warn(self, s):
#TODO pass line number
print>>sys.stderr, "*Warning:", s
| {
"content_hash": "53211188f8ed9ed55ec3a075677fbbfb",
"timestamp": "",
"source": "github",
"line_count": 372,
"max_line_length": 94,
"avg_line_length": 31.13978494623656,
"alnum_prop": 0.4855835635359116,
"repo_name": "tln/tatl",
"id": "8db10c9f15de0b71aa6b308909f4ca6fc6b6547d",
"size": "11584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tatl/Compiler.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "10649"
},
{
"name": "CSS",
"bytes": "848"
},
{
"name": "JavaScript",
"bytes": "9217"
},
{
"name": "Python",
"bytes": "117969"
}
],
"symlink_target": ""
} |
import pyterminalsize
try:
print(pyterminalsize._from_tput())
except OSError as e:
print('Caught OSError')
print(e)
| {
"content_hash": "831331e9ae063b1ede0f86dc3a6af3bf",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 38,
"avg_line_length": 16.25,
"alnum_prop": 0.6923076923076923,
"repo_name": "asottile/pyterminalsize",
"id": "088f881616ef36b0a6c68c7efb805bff56fe7604",
"size": "130",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testing/from_tput_prog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4904"
},
{
"name": "Python",
"bytes": "9143"
}
],
"symlink_target": ""
} |
"""Permission and action needs for Invenio."""
from collections import namedtuple
from functools import partial
from itertools import chain
from flask_principal import ActionNeed, Identity, Need
from flask_principal import Permission as _Permission
from .models import ActionRoles, ActionSystemRoles, ActionUsers, get_action_cache_key
from .proxies import current_access
_Need = namedtuple("Need", ["method", "value", "argument"])
ParameterizedActionNeed = partial(_Need, "action")
ParameterizedActionNeed.__doc__ = """A need having the method preset to `"action"` and a parameter.
If it is called with `argument=None` then this need is equivalent
to ``ActionNeed``.
"""
SystemRoleNeed = partial(Need, "system_role")
SystemRoleNeed.__doc__ = """A need with the method preset to `"system_role"`."""
#
# Need instances
#
superuser_access = ActionNeed("superuser-access")
"""Superuser access aciton which allow access to everything."""
any_user = SystemRoleNeed("any_user")
"""Any user system role.
This role is used to assign all possible users (authenticated and guests)
to an action.
"""
authenticated_user = SystemRoleNeed("authenticated_user")
"""Authenticated user system role.
This role is used to assign all authenticated users to an action.
"""
system_process = SystemRoleNeed("system_process")
"""System role for processes initiated by Invenio itself."""
#
# Identities
#
# the primary requirement for the system user's ID is to be unique
# the IDs of users provided by Invenio-Accounts are positive integers
# the ID of an AnonymousIdentity from Flask-Principle is None
# and the documentation for Flask-Principal makes use of strings for some IDs
system_user_id = "system"
"""The user ID of the system itself, used in its Identity."""
system_identity = Identity(system_user_id)
"""Identity used by system processes."""
system_identity.provides.add(system_process)
class _P(namedtuple("Permission", ["needs", "excludes"])):
"""Helper for simple permission updates."""
def update(self, permission):
"""In-place update of permissions."""
self.needs.update(permission.needs)
self.excludes.update(permission.excludes)
class Permission(_Permission):
"""Represents a set of required needs.
Extends Flask-Principal's :py:class:`flask_principal.Permission` with
support for loading action grants from the database including caching
support.
Essentially the class works as a translation layer that expands action
needs into a list of user/roles needs. For instance, take the following
permission:
.. code-block:: python
Permission(ActionNeed('my-action'))
Once the permission is checked with an identity, the class will fetch a
list of all users and roles that have been granted/denied access to the
action, and expand the permission into something similar to (depending
on the state of the database):
.. code-block:: python
Permission(UserNeed('1'), RoleNeed('admin'))
The expansion is cached until the action is modified (e.g. a user is
granted access to the action). The alternative approach to expanding the
action need like this class is doing, would be to load the list of allowed
actions for a user on login and cache the result. However retrieving all
allowed actions for a user could results in very large lists, where as
caching allowed users/roles for an action would usually yield smaller lists
(especially if roles are used).
"""
allow_by_default = False
"""If enabled, all permissions are granted when they are not assigned to
anybody. Disabled by default.
"""
def __init__(self, *needs):
r"""Initialize permission.
:param \*needs: The needs for this permission.
"""
self._permissions = None
self.explicit_needs = set(needs)
self.explicit_needs.add(superuser_access)
self.explicit_excludes = set()
@staticmethod
def _cache_key(action_need):
"""Helper method to generate cache key."""
return get_action_cache_key(
action_need.value,
action_need.argument if hasattr(action_need, "argument") else None,
)
@staticmethod
def _split_actionsneeds(needs):
"""Split needs into sets of ActionNeed and any other *Need."""
action_needs, other_needs = set(), set()
for need in needs:
if need.method == "action":
action_needs.add(need)
else:
other_needs.add(need)
return action_needs, other_needs
def _load_permissions(self):
"""Load permissions for all needs, expanding actions."""
result = _P(needs=set(), excludes=set())
# split ActionNeeds and any other Need in separates Sets
action_needs, explicit_needs = self._split_actionsneeds(self.explicit_needs)
action_excludes, explicit_excludes = self._split_actionsneeds(
self.explicit_excludes
)
# add all explicit needs/excludes to the result permissions
result.needs.update(explicit_needs)
result.excludes.update(explicit_excludes)
# expand all ActionNeeds to get all needs/excludes and add them to the
# result permissions
for need in action_needs | action_excludes:
result.update(self._expand_action(need))
# "allow_by_default = False" means that when needs are empty,
# then it should deny access.
# By default, `flask_principal.Permission.allows` will allow access
# if needs are empty!
needs_empty = len(result.needs) == 0
deny_access_when_empty_needs = not self.allow_by_default
if needs_empty and deny_access_when_empty_needs:
# Add at least one dummy need so that it will always deny access
result.needs.update(action_needs)
self._permissions = result
def _expand_action(self, explicit_action):
"""Expand action to user/roles needs and excludes."""
action = current_access.get_action_cache(self._cache_key(explicit_action))
if action is None:
action = _P(needs=set(), excludes=set())
actionsusers = ActionUsers.query_by_action(explicit_action).all()
actionsroles = (
ActionRoles.query_by_action(explicit_action)
.join(ActionRoles.role)
.all()
)
actionssystem = ActionSystemRoles.query_by_action(explicit_action).all()
for db_action in chain(actionsusers, actionsroles, actionssystem):
if db_action.exclude:
action.excludes.add(db_action.need)
else:
action.needs.add(db_action.need)
current_access.set_action_cache(self._cache_key(explicit_action), action)
return action
@property
def needs(self):
"""Return allowed permissions from database.
:returns: A list of need instances.
"""
self._load_permissions()
return self._permissions.needs
@property
def excludes(self):
"""Return denied permissions from database.
:returns: A list of need instances.
"""
self._load_permissions()
return self._permissions.excludes
| {
"content_hash": "f1975262799aff133a2d89a162dc63e1",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 99,
"avg_line_length": 34.47417840375587,
"alnum_prop": 0.668936402015525,
"repo_name": "inveniosoftware/invenio-access",
"id": "efec9f1c51406653f99c10524202410672857a83",
"size": "7578",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "invenio_access/permissions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "135407"
},
{
"name": "Shell",
"bytes": "769"
}
],
"symlink_target": ""
} |
try:
import json as _json
except ImportError:
import sys
sys.path.append('simplejson-2.3.3')
import simplejson as _json
import requests as _requests
import urlparse as _urlparse
import random as _random
import base64 as _base64
from ConfigParser import ConfigParser as _ConfigParser
import os as _os
_CT = 'content-type'
_AJ = 'application/json'
_URL_SCHEME = frozenset(['http', 'https'])
def _get_token(user_id, password,
auth_svc='https://nexus.api.globusonline.org/goauth/token?' +
'grant_type=client_credentials'):
# This is bandaid helper function until we get a full
# KBase python auth client released
auth = _base64.encodestring(user_id + ':' + password)
headers = {'Authorization': 'Basic ' + auth}
ret = _requests.get(auth_svc, headers=headers, allow_redirects=True)
status = ret.status_code
if status >= 200 and status <= 299:
tok = _json.loads(ret.text)
elif status == 403:
raise Exception('Authentication failed: Bad user_id/password ' +
'combination for user %s' % (user_id))
else:
raise Exception(ret.text)
return tok['access_token']
def _read_rcfile(file=_os.environ['HOME'] + '/.authrc'): # @ReservedAssignment
# Another bandaid to read in the ~/.authrc file if one is present
authdata = None
if _os.path.exists(file):
try:
with open(file) as authrc:
rawdata = _json.load(authrc)
# strip down whatever we read to only what is legit
authdata = {x: rawdata.get(x) for x in (
'user_id', 'token', 'client_secret', 'keyfile',
'keyfile_passphrase', 'password')}
except Exception, e:
print "Error while reading authrc file %s: %s" % (file, e)
return authdata
def _read_inifile(file=_os.environ.get( # @ReservedAssignment
'KB_DEPLOYMENT_CONFIG', _os.environ['HOME'] +
'/.kbase_config')):
# Another bandaid to read in the ~/.kbase_config file if one is present
authdata = None
if _os.path.exists(file):
try:
config = _ConfigParser()
config.read(file)
# strip down whatever we read to only what is legit
authdata = {x: config.get('authentication', x)
if config.has_option('authentication', x)
else None for x in ('user_id', 'token',
'client_secret', 'keyfile',
'keyfile_passphrase', 'password')}
except Exception, e:
print "Error while reading INI file %s: %s" % (file, e)
return authdata
class ServerError(Exception):
def __init__(self, name, code, message, data=None, error=None):
self.name = name
self.code = code
self.message = '' if message is None else message
self.data = data or error or ''
# data = JSON RPC 2.0, error = 1.1
def __str__(self):
return self.name + ': ' + str(self.code) + '. ' + self.message + \
'\n' + self.data
class _JSONObjectEncoder(_json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
if isinstance(obj, frozenset):
return list(obj)
return _json.JSONEncoder.default(self, obj)
class pavel_sdk_test_python(object):
def __init__(self, url=None, timeout=30 * 60, user_id=None,
password=None, token=None, ignore_authrc=False,
trust_all_ssl_certificates=False):
if url is None:
raise ValueError('A url is required')
scheme, _, _, _, _, _ = _urlparse.urlparse(url)
if scheme not in _URL_SCHEME:
raise ValueError(url + " isn't a valid http url")
self.url = url
self.timeout = int(timeout)
self._headers = dict()
self.trust_all_ssl_certificates = trust_all_ssl_certificates
# token overrides user_id and password
if token is not None:
self._headers['AUTHORIZATION'] = token
elif user_id is not None and password is not None:
self._headers['AUTHORIZATION'] = _get_token(user_id, password)
elif 'KB_AUTH_TOKEN' in _os.environ:
self._headers['AUTHORIZATION'] = _os.environ.get('KB_AUTH_TOKEN')
elif not ignore_authrc:
authdata = _read_inifile()
if authdata is None:
authdata = _read_rcfile()
if authdata is not None:
if authdata.get('token') is not None:
self._headers['AUTHORIZATION'] = authdata['token']
elif(authdata.get('user_id') is not None
and authdata.get('password') is not None):
self._headers['AUTHORIZATION'] = _get_token(
authdata['user_id'], authdata['password'])
if self.timeout < 1:
raise ValueError('Timeout value must be at least 1 second')
def _call(self, method, params, json_rpc_context = None):
arg_hash = {'method': method,
'params': params,
'version': '1.1',
'id': str(_random.random())[2:]
}
if json_rpc_context:
arg_hash['context'] = json_rpc_context
body = _json.dumps(arg_hash, cls=_JSONObjectEncoder)
ret = _requests.post(self.url, data=body, headers=self._headers,
timeout=self.timeout,
verify=not self.trust_all_ssl_certificates)
if ret.status_code == _requests.codes.server_error:
json_header = None
if _CT in ret.headers:
json_header = ret.headers[_CT]
if _CT in ret.headers and ret.headers[_CT] == _AJ:
err = _json.loads(ret.text)
if 'error' in err:
raise ServerError(**err['error'])
else:
raise ServerError('Unknown', 0, ret.text)
else:
raise ServerError('Unknown', 0, ret.text)
if ret.status_code != _requests.codes.OK:
ret.raise_for_status()
ret.encoding = 'utf-8'
resp = _json.loads(ret.text)
if 'result' not in resp:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
return resp['result']
def count_contigs(self, workspace_name, contigset_id, json_rpc_context = None):
if json_rpc_context and type(json_rpc_context) is not dict:
raise ValueError('Method count_contigs: argument json_rpc_context is not type dict as required.')
resp = self._call('pavel_sdk_test_python.count_contigs',
[workspace_name, contigset_id], json_rpc_context)
return resp[0]
def get_person(self, name, json_rpc_context = None):
if json_rpc_context and type(json_rpc_context) is not dict:
raise ValueError('Method get_person: argument json_rpc_context is not type dict as required.')
resp = self._call('pavel_sdk_test_python.get_person',
[name], json_rpc_context)
return resp[0]
| {
"content_hash": "cd9530fa30eaa98d0927b1d906ee910d",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 109,
"avg_line_length": 40.6,
"alnum_prop": 0.5618500273672687,
"repo_name": "psnovichkov/pavel_sdk_test_python",
"id": "4e032a136e422f99c63d75efdd49472aa30a1bed",
"size": "7524",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/pavel_sdk_test_python/pavel_sdk_test_pythonClient.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "10572"
},
{
"name": "JavaScript",
"bytes": "4611"
},
{
"name": "Makefile",
"bytes": "1845"
},
{
"name": "Perl",
"bytes": "12910"
},
{
"name": "Python",
"bytes": "37694"
},
{
"name": "Shell",
"bytes": "1472"
}
],
"symlink_target": ""
} |
"""The source file generator for configuration files."""
import glob
import io
import logging
import os
from yaldevtools.source_generators import interface
class ConfigurationFileGenerator(interface.SourceFileGenerator):
"""Configuration file generator."""
def _GenerateACIncludeM4(
self, project_configuration, template_mappings, output_writer,
output_filename):
"""Generates the acinclude.m4 configuration file.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
output_writer (OutputWriter): output writer.
output_filename (str): path of the output file.
"""
template_directory = os.path.join(self._template_directory, 'acinclude.m4')
library_name = project_configuration.library_name
tools_name = '{0:s}tools'.format(project_configuration.library_name_suffix)
m4_file = '{0:s}.m4'.format(library_name)
m4_file = os.path.join(self._data_directory, 'm4', m4_file)
if os.path.exists(m4_file):
with io.open(m4_file, 'r', encoding='utf8') as file_object:
input_lines = file_object.readlines()
with io.open(output_filename, 'w', encoding='utf8') as file_object:
# Generate the first line
input_lines.pop(0)
file_object.write('dnl Checks for required headers and functions\n')
# Copy the rest of the header
while input_lines:
line = input_lines.pop(0)
file_object.write(line)
if not line.strip():
break
# Find the line with the start of the definition of the
# AX_${library_name}_CHECK_LOCAL macro.
m4_macro_definition = 'AC_DEFUN([AX_{0:s}_CHECK_LOCAL],'.format(
library_name.upper())
macro_start_line_number = None
for line_number, line in enumerate(input_lines):
if line.startswith(m4_macro_definition):
macro_start_line_number = line_number
break
macro_start_line_number -= 1
macro_end_line_number = None
for line_number, line in enumerate(
input_lines[macro_start_line_number + 2:]):
if line.startswith('dnl ') or line.startswith('AC_DEFUN(['):
macro_end_line_number = line_number
break
macro_end_line_number += macro_start_line_number + 2
for _ in range(5):
input_lines.pop(macro_end_line_number - 3)
macro_end_line_number -= 1
# Copy the AX_${library_name}_CHECK_LOCAL macro.
for line in input_lines[macro_start_line_number:macro_end_line_number]:
file_object.write(line)
else:
template_names = ['header.m4', 'check_library.m4']
if project_configuration.HasTools():
template_names.append('check_tools.m4-start')
log_handle_path = os.path.join(tools_name, 'log_handle.c')
if os.path.exists(log_handle_path):
template_names.append('check_tools.m4-log_handle')
mount_tool_name = '{0:s}mount'.format(
project_configuration.library_name_suffix)
mount_tool_path = os.path.join(
tools_name, '{0:s}.c'.format(mount_tool_name))
if os.path.exists(mount_tool_path):
template_names.append('check_tools.m4-mount_tool')
template_names.append('check_tools.m4-end')
template_mappings['library_name'] = library_name
template_mappings['library_name_upper_case'] = library_name.upper()
template_mappings['mount_tool_name'] = mount_tool_name
template_mappings['tools_name'] = tools_name
template_mappings['tools_name_upper_case'] = tools_name.upper()
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename)
del template_mappings['library_name']
del template_mappings['library_name_upper_case']
del template_mappings['mount_tool_name']
del template_mappings['tools_name']
del template_mappings['tools_name_upper_case']
template_mappings['library_name'] = library_name
template_mappings['library_name_upper_case'] = library_name.upper()
if project_configuration.HasTools():
template_filename = 'check_dll_support.m4-tools'
else:
template_filename = 'check_dll_support.m4'
template_filename = os.path.join(template_directory, template_filename)
self._GenerateSection(
template_filename, template_mappings, output_writer,
output_filename, access_mode='a')
del template_mappings['library_name']
del template_mappings['library_name_upper_case']
def _GenerateAppVeyorYML(
self, project_configuration, template_mappings, output_writer,
output_filename):
"""Generates the appveyor.yml configuration file.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
output_writer (OutputWriter): output writer.
output_filename (str): path of the output file.
"""
template_directory = os.path.join(self._template_directory, 'appveyor.yml')
template_names = ['environment.yml']
if project_configuration.deploy_to_nuget:
template_names.append('environment-nuget.yml')
if project_configuration.HasPythonModule():
template_names.append('environment-pypi.yml')
template_names.append('environment-matrix.yml')
template_names.append('environment-macos.yml')
if project_configuration.HasPythonModule():
template_names.append('environment-macos-python.yml')
else:
template_names.append('environment-macos-pkgbuild.yml')
if project_configuration.HasPythonModule():
template_names.append('environment-setup_py.yml')
template_names.append('environment-cygwin.yml')
# if project_configuration.HasDependencyCrypto():
# TODO: add environment-cygwin-openssl.yml
if project_configuration.HasPythonModule():
template_names.append('environment-cygwin-python.yml')
if project_configuration.HasTools():
template_names.append('environment-cygwin-static-executables.yml')
template_names.append('environment-cygwin64.yml')
# if project_configuration.HasDependencyCrypto():
# TODO: add environment-cygwin64-openssl.yml
if project_configuration.HasPythonModule():
template_names.append('environment-cygwin64-python.yml')
if project_configuration.HasTools():
template_names.append('environment-cygwin64-static-executables.yml')
template_names.append('environment-mingw-w64.yml')
if project_configuration.HasTools():
template_names.append('environment-mingw-w64-static-executables.yml')
template_names.append('install-header.yml')
# TODO: check test more generic.
if project_configuration.library_name == 'libfsntfs':
template_names.append('install-testdata.yml')
if (project_configuration.HasDependencyLex() or
project_configuration.HasDependencyYacc()):
template_names.append('install-winflexbison.yml')
if project_configuration.HasDependencyZlib():
template_names.append('install-zlib.yml')
if project_configuration.HasDependencyBzip2():
template_names.append('install-bzip2.yml')
if project_configuration.HasDependencyDokan():
template_names.append('install-dokan.yml')
template_names.append('install-macos.yml')
if project_configuration.HasPythonModule():
template_names.append('install-python.yml')
template_mappings['pypi_token'] = getattr(
project_configuration, 'pypi_token_appveyor', '')
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename)
del template_mappings['pypi_token']
cygwin_build_dependencies = self._GetCygwinBuildDependencies(
project_configuration)
if cygwin_build_dependencies:
cygwin_build_dependencies = ' '.join([
'-P {0:s}'.format(name) for name in cygwin_build_dependencies])
template_mappings['cygwin_build_dependencies'] = cygwin_build_dependencies
template_filename = os.path.join(template_directory, 'install-cygwin.yml')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['cygwin_build_dependencies']
mingw_msys2_build_dependencies = self._GetMinGWMSYS2BuildDependencies(
project_configuration)
if mingw_msys2_build_dependencies:
mingw_msys2_build_dependencies = ' '.join(mingw_msys2_build_dependencies)
template_mappings['mingw_msys2_build_dependencies'] = (
mingw_msys2_build_dependencies)
template_filename = os.path.join(
template_directory, 'install-mingw-msys2.yml')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['mingw_msys2_build_dependencies']
# TODO: refactor code above to use template_names
template_names = []
template_names.append('build_script-header.yml')
if project_configuration.deploy_to_nuget:
template_names.append('build_script-vs2017-nuget.yml')
else:
template_names.append('build_script-vs2017.yml')
template_names.append('build_script-macos.yml')
if project_configuration.HasPythonModule():
template_names.append('build_script-python.yml')
template_names.extend([
'build_script-footer.yml', 'test_script.yml', 'after_test.yml'])
if (project_configuration.deploy_to_nuget or
project_configuration.HasPythonModule()):
template_names.append('artifacts.yml')
if project_configuration.deploy_to_nuget:
template_names.append('artifacts-nuget.yml')
if project_configuration.HasPythonModule():
template_names.append('artifacts-pypi.yml')
template_names.append('deploy_script-header.yml')
if project_configuration.deploy_to_nuget:
template_names.append('deploy_script-nuget.yml')
if project_configuration.HasPythonModule():
template_names.append('deploy_script-pypi.yml')
template_names.append('deploy_script-footer.yml')
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename,
access_mode='a')
def _GenerateCodecovYML(
self, project_configuration, template_mappings, output_writer,
output_filename):
"""Generates the .codecov.yml configuration file.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
output_writer (OutputWriter): output writer.
output_filename (str): path of the output file.
"""
template_directory = os.path.join(self._template_directory, '.codecov.yml')
makefile_am_file = self._GetMainMakefileAM(project_configuration)
ignore_paths = list(makefile_am_file.libraries)
ignore_paths.append('tests')
template_mappings['codecov_ignore'] = '\n'.join([
' - "{0:s}/*"'.format(path) for path in sorted(ignore_paths)])
template_filename = os.path.join(
template_directory, 'body.yml')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename)
del template_mappings['codecov_ignore']
def _GenerateConfigureAC(
self, project_configuration, template_mappings, output_writer,
output_filename):
"""Generates the configure.ac configuration file.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
output_writer (OutputWriter): output writer.
output_filename (str): path of the output file.
"""
# TODO: change indentation of templates.
include_header_file = self._GetLibraryIncludeHeaderFile(
project_configuration)
makefile_am_file = self._GetMainMakefileAM(project_configuration)
libraries = list(makefile_am_file.libraries)
library_dependencies = list(makefile_am_file.library_dependencies)
libcrypto_index = len(library_dependencies)
if 'libcaes' in library_dependencies:
libcrypto_index = min(
libcrypto_index, library_dependencies.index('libcaes'))
if 'libhmac' in library_dependencies:
libcrypto_index = min(
libcrypto_index, library_dependencies.index('libhmac'))
if project_configuration.HasDependencyCrypto():
if libcrypto_index == len(library_dependencies):
libraries.append('libcrypto')
library_dependencies.append('libcrypto')
if 'sgutils' in project_configuration.library_build_dependencies:
libraries.append('sgutils2')
library_dependencies.append('sgutils2')
if 'bzip2' in project_configuration.library_build_dependencies:
if libcrypto_index < len(library_dependencies):
libraries.insert(libcrypto_index, 'bzip2')
library_dependencies.insert(libcrypto_index, 'bzip2')
else:
libraries.append('bzip2')
library_dependencies.append('bzip2')
# Have zlib checked before libcrypto.
if project_configuration.HasDependencyZlib():
if libcrypto_index < len(library_dependencies):
libraries.insert(libcrypto_index, 'zlib')
library_dependencies.insert(libcrypto_index, 'zlib')
else:
libraries.append('zlib')
library_dependencies.append('zlib')
template_directory = os.path.join(self._template_directory, 'configure.ac')
template_names = ['header.ac', 'programs.ac-start']
if os.path.isdir('ossfuzz'):
template_names.append('programs.ac-ossfuzz')
template_names.extend([
'programs.ac-end', 'compiler_language.ac', 'build_features.ac'])
if project_configuration.HasTools():
template_names.append('check_static_executables.ac')
template_names.append('check_winapi.ac')
if (include_header_file and include_header_file.have_wide_character_type or
project_configuration.HasTools()):
template_names.append('check_wide_character_support.ac')
if project_configuration.HasDebugOutput():
template_names.append('check_debug_output.ac')
template_names.extend(['check_types_support.ac', 'check_common_support.ac'])
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename)
# TODO: refactor code below to use template_names
if library_dependencies:
for name in library_dependencies:
if (name == 'libcrypto' and
project_configuration.library_name == 'libcaes'):
continue
if name == 'zlib':
# TODO: make check more generic based on the source itself.
if project_configuration.library_name == 'libewf':
template_filename = 'check_zlib_compress.ac'
# TODO: determine deflate function via configuration setting?
elif project_configuration.library_name in (
'libfsapfs', 'libfshfs', 'libfvde', 'libmodi', 'libpff',
'libvmdk'):
template_filename = 'check_zlib_uncompress.ac'
else:
template_filename = 'check_zlib_inflate.ac'
else:
template_filename = 'check_dependency_support.ac'
template_mappings['local_library_name'] = name
template_mappings['local_library_name_upper_case'] = name.upper()
template_filename = os.path.join(template_directory, template_filename)
self._GenerateSection(
template_filename, template_mappings, output_writer,
output_filename, access_mode='a')
del template_mappings['local_library_name']
del template_mappings['local_library_name_upper_case']
template_names = ['check_library_support.ac']
if project_configuration.HasPythonModule():
template_names.append('check_python_support.ac')
if project_configuration.HasJavaBindings():
template_names.append('check_java_support.ac')
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename,
access_mode='a')
# TODO: refactor code below to use template_names
if project_configuration.HasTools():
tools_dependencies = list(makefile_am_file.tools_dependencies)
if 'uuid' in project_configuration.tools_build_dependencies:
tools_dependencies.append('libuuid')
if 'fuse' in project_configuration.tools_build_dependencies:
tools_dependencies.append('libfuse')
if tools_dependencies:
for name in tools_dependencies:
template_mappings['local_library_name'] = name
template_mappings['local_library_name_upper_case'] = name.upper()
template_filename = os.path.join(
template_directory, 'check_dependency_support.ac')
self._GenerateSection(
template_filename, template_mappings, output_writer,
output_filename, access_mode='a')
del template_mappings['local_library_name']
del template_mappings['local_library_name_upper_case']
template_filename = os.path.join(
template_directory, 'check_tools_support.ac')
self._GenerateSection(
template_filename, template_mappings, output_writer,
output_filename, access_mode='a')
template_names = ['check_dll_support.ac', 'check_tests_support.ac']
if os.path.isdir('ossfuzz'):
template_names.append('check_ossfuzz_support.ac')
template_names.append('compiler_flags.ac')
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename,
access_mode='a')
# TODO: refactor code below to use template_names
if library_dependencies:
local_library_tests = []
for name in library_dependencies:
if name in makefile_am_file.library_dependencies:
local_library_test = 'test "x$ac_cv_{0:s}" = xyes'.format(name)
else:
local_library_test = 'test "x$ac_cv_{0:s}" != xno'.format(name)
local_library_tests.append(local_library_test)
if 'libcaes' in library_dependencies or 'libhmac' in library_dependencies:
local_library_tests.append('test "x$ac_cv_libcrypto" != xno')
template_mappings['local_library_tests'] = ' || '.join(
local_library_tests)
template_filename = os.path.join(
template_directory, 'spec_requires_library.ac')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['local_library_tests']
if project_configuration.HasTools():
tools_dependencies = list(makefile_am_file.tools_dependencies)
if 'crypto' in project_configuration.tools_build_dependencies:
tools_dependencies.append('libcrypto')
if 'fuse' in project_configuration.tools_build_dependencies:
tools_dependencies.append('libfuse')
if 'uuid' in project_configuration.tools_build_dependencies:
tools_dependencies.append('libuuid')
if tools_dependencies:
local_library_tests = []
for name in tools_dependencies:
if name in ('libcrypto', 'libfuse'):
local_library_test = 'test "x$ac_cv_{0:s}" != xno'.format(name)
else:
local_library_test = 'test "x$ac_cv_{0:s}" = xyes'.format(name)
local_library_tests.append(local_library_test)
template_mappings['local_library_tests'] = ' || '.join(
local_library_tests)
template_filename = os.path.join(
template_directory, 'spec_requires_tools.ac')
self._GenerateSection(
template_filename, template_mappings, output_writer,
output_filename, access_mode='a')
del template_mappings['local_library_tests']
template_names = ['dates.ac', 'config_files_start.ac']
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename,
access_mode='a')
# TODO: refactor code below to use template_names
if makefile_am_file.library_dependencies:
for name in makefile_am_file.library_dependencies:
template_mappings['local_library_name'] = name
template_filename = os.path.join(
template_directory, 'config_files_dependency.ac')
self._GenerateSection(
template_filename, template_mappings, output_writer,
output_filename, access_mode='a')
del template_mappings['local_library_name']
template_names = ['config_files_library.ac']
if project_configuration.HasPythonModule():
template_names.append('config_files_python.ac')
if project_configuration.HasDotNetBindings():
template_names.append('config_files_dotnet.ac')
if project_configuration.HasJavaBindings():
template_names.append('config_files_java.ac')
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename,
access_mode='a')
# TODO: refactor code below to use template_names
if project_configuration.HasTools():
if makefile_am_file.tools_dependencies:
for name in makefile_am_file.tools_dependencies:
template_mappings['local_library_name'] = name
template_filename = os.path.join(
template_directory, 'config_files_dependency.ac')
self._GenerateSection(
template_filename, template_mappings, output_writer,
output_filename, access_mode='a')
del template_mappings['local_library_name']
template_filename = os.path.join(
template_directory, 'config_files_tools.ac')
self._GenerateSection(
template_filename, template_mappings, output_writer,
output_filename, access_mode='a')
# TODO: add support for Makefile in documents (libuna)
template_names = ['config_files_common.ac']
if os.path.isdir('ossfuzz'):
template_names.append('config_files_ossfuzz.ac')
template_names.append('config_files_headers.ac')
if project_configuration.HasDotNetBindings():
template_names.append('config_files_dotnet_rc.ac')
template_names.append('config_files_end.ac')
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename,
access_mode='a')
# TODO: add support for build options configuration
build_options = []
for name in libraries:
if name not in ('bzip2', 'libcrypto', 'zlib'):
build_options.append((
'{0:s} support'.format(name), '$ac_cv_{0:s}'.format(name)))
if name == 'bzip2':
build_options.append(('BZIP2 compression support', '$ac_cv_bzip2'))
if name == 'libcaes':
if project_configuration.library_name in ('libbde', 'libluksde'):
build_options.extend([
('AES-CBC support', '$ac_cv_libcaes_aes_cbc'),
('AES-ECB support', '$ac_cv_libcaes_aes_ecb'),
('AES-XTS support', '$ac_cv_libcaes_aes_xts')])
elif project_configuration.library_name == 'libewf':
pass
elif project_configuration.library_name in ('libfsapfs', 'libfvde'):
build_options.extend([
('AES-ECB support', '$ac_cv_libcaes_aes_ecb'),
('AES-XTS support', '$ac_cv_libcaes_aes_xts')])
elif project_configuration.library_name in ('libmodi', 'libqcow'):
build_options.append(
('AES-CBC support', '$ac_cv_libcaes_aes_cbc'))
elif name == 'libhmac':
# TODO: make check more generic based on the source itself.
if project_configuration.library_name in (
'libewf', 'libfsapfs', 'libfsext', 'libfsfat', 'libfshfs',
'libfsntfs', 'libfsxfs', 'libodraw', 'libsmraw'):
build_options.append(('MD5 support', '$ac_cv_libhmac_md5'))
if project_configuration.library_name in (
'libewf', 'libluksde', 'libodraw', 'libsmraw'):
build_options.append(('SHA1 support', '$ac_cv_libhmac_sha1'))
if project_configuration.library_name in (
'libbde', 'libewf', 'libfsapfs', 'libfvde', 'libmodi', 'libodraw',
'libsmraw'):
build_options.append(('SHA256 support', '$ac_cv_libhmac_sha256'))
elif project_configuration.library_name == 'libluksde':
build_options.extend([
('SHA224 support', '$ac_cv_libhmac_sha224'),
('SHA256 support', '$ac_cv_libhmac_sha256'),
('SHA512 support', '$ac_cv_libhmac_sha512')])
elif name == 'libfcrypto':
if project_configuration.library_name == 'libluksde':
build_options.extend([
('ARC4-ECB support', '$ac_cv_libfcrypto'),
('Serpent-CBC support', '$ac_cv_libfcrypto'),
('Serpent-ECB support', '$ac_cv_libfcrypto')])
elif name == 'zlib':
if project_configuration.library_name == 'libewf':
build_options.append(('ADLER32 checksum support', '$ac_cv_adler32'))
# TODO: determine deflate function via configuration setting?
if project_configuration.library_name in (
'libfsapfs', 'libewf', 'libfvde', 'libmodi', 'libpff', 'libvmdk'):
value = '$ac_cv_uncompress'
else:
value = '$ac_cv_inflate'
build_options.append(('DEFLATE compression support', value))
if project_configuration.library_name == 'libcaes':
build_options.extend([
('AES-CBC support', '$ac_cv_libcaes_aes_cbc'),
('AES-ECB support', '$ac_cv_libcaes_aes_ecb'),
('AES-XTS support', '$ac_cv_libcaes_aes_xts')])
elif project_configuration.library_name == 'libhmac':
build_options.extend([
('MD5 support', '$ac_cv_libhmac_md5'),
('SHA1 support', '$ac_cv_libhmac_sha1'),
('SHA224 support', '$ac_cv_libhmac_sha224'),
('SHA256 support', '$ac_cv_libhmac_sha256'),
('SHA512 support', '$ac_cv_libhmac_sha512')])
if 'uuid' in project_configuration.tools_build_dependencies:
build_options.append(('GUID/UUID support', '$ac_cv_libuuid'))
if 'fuse' in project_configuration.tools_build_dependencies:
build_options.append(('FUSE support', '$ac_cv_libfuse'))
build_information = []
maximum_description_length = 0
for description, value in build_options:
build_information_tuple = (description, value)
build_information.append(build_information_tuple)
maximum_description_length = max(
maximum_description_length, len(description))
features_information = []
if (project_configuration.library_name == 'libcthreads' or
'libcthreads' in makefile_am_file.libraries):
description = 'Multi-threading support'
value = '$ac_cv_libcthreads_multi_threading'
features_information.append((description, value))
maximum_description_length = max(
maximum_description_length, len(description))
if (include_header_file and include_header_file.have_wide_character_type or
project_configuration.HasTools()):
description = 'Wide character type support'
value = '$ac_cv_enable_wide_character_type'
features_information.append((description, value))
maximum_description_length = max(
maximum_description_length, len(description))
if project_configuration.HasTools():
description = '{0:s} are build as static executables'.format(
project_configuration.tools_directory)
value = '$ac_cv_enable_static_executables'
features_information.append((description, value))
maximum_description_length = max(
maximum_description_length, len(description))
if project_configuration.HasPythonModule():
description = 'Python ({0:s}) support'.format(
project_configuration.python_module_name)
value = '$ac_cv_enable_python'
features_information.append((description, value))
maximum_description_length = max(
maximum_description_length, len(description))
if project_configuration.HasDebugOutput():
description = 'Verbose output'
value = '$ac_cv_enable_verbose_output'
features_information.append((description, value))
maximum_description_length = max(
maximum_description_length, len(description))
description = 'Debug output'
value = '$ac_cv_enable_debug_output'
features_information.append((description, value))
maximum_description_length = max(
maximum_description_length, len(description))
notice_message = []
if build_information:
notice_message.append('Building:')
for description, value in build_information:
padding_length = maximum_description_length - len(description)
padding = ' ' * padding_length
notice_line = ' {0:s}: {1:s}{2:s}'.format(description, padding, value)
notice_message.append(notice_line)
notice_message.append('')
if features_information:
notice_message.append('Features:')
for description, value in features_information:
padding_length = maximum_description_length - len(description)
padding = ' ' * padding_length
notice_line = ' {0:s}: {1:s}{2:s}'.format(description, padding, value)
notice_message.append(notice_line)
template_filename = os.path.join(template_directory, 'output.ac')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
# TODO: improve this condition
if project_configuration.library_name != 'libcerror':
template_mappings['notice_message'] = '\n'.join(notice_message)
template_filename = os.path.join(template_directory, 'notice.ac')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['notice_message']
def _GenerateDpkg(
self, project_configuration, template_mappings, output_writer,
output_directory):
"""Generates the dpkg packaging files.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
output_writer (OutputWriter): output writer.
output_directory (str): path of the output directory.
"""
# TODO: add support for projects without Python bindings.
# TODO: fix lintian issues.
library_name = project_configuration.library_name
template_directory = os.path.join(self._template_directory, 'dpkg')
template_mappings['library_name'] = library_name
template_mappings['library_name_upper_case'] = library_name.upper()
for directory_entry in os.listdir(template_directory):
template_filename = os.path.join(template_directory, directory_entry)
if not os.path.isfile(template_filename):
continue
if (directory_entry.startswith('control') or
directory_entry.startswith('rules')):
continue
if directory_entry.endswith('.install'):
if (not project_configuration.HasPythonModule() and
'-python' in directory_entry):
continue
if (not project_configuration.HasTools() and
'-tools' in directory_entry):
continue
output_filename = directory_entry
if output_filename.startswith('libyal'):
output_filename = '{0:s}{1:s}'.format(
project_configuration.library_name, output_filename[6:])
output_filename = os.path.join(output_directory, output_filename)
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename)
dpkg_build_dependencies = self._GetDpkgBuildDependenciesDpkgControl(
project_configuration)
template_mappings['dpkg_build_dependencies'] = ', '.join(
dpkg_build_dependencies)
template_filename = os.path.join(template_directory, 'control')
output_filename = os.path.join(output_directory, 'control')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename)
del template_mappings['dpkg_build_dependencies']
if project_configuration.HasTools():
template_filename = os.path.join(template_directory, 'control-tools')
output_filename = os.path.join(output_directory, 'control')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if project_configuration.HasPythonModule():
template_filename = os.path.join(template_directory, 'control-python')
output_filename = os.path.join(output_directory, 'control')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if (project_configuration.HasPythonModule() and
project_configuration.HasTools()):
template_filename = 'rules-with-python-and-tools'
elif project_configuration.HasPythonModule():
template_filename = 'rules-with-python'
elif project_configuration.HasTools():
template_filename = 'rules-with-tools'
else:
template_filename = 'rules'
template_filename = os.path.join(template_directory, template_filename)
output_filename = os.path.join(output_directory, 'rules')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename)
template_directory = os.path.join(
self._template_directory, 'dpkg', 'source')
output_directory = os.path.join(output_directory, 'source')
for directory_entry in os.listdir(template_directory):
template_filename = os.path.join(template_directory, directory_entry)
if not os.path.isfile(template_filename):
continue
output_filename = os.path.join(output_directory, directory_entry)
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename)
del template_mappings['library_name']
del template_mappings['library_name_upper_case']
def _GenerateGitignore(
self, project_configuration, template_mappings, output_writer,
output_filename):
"""Generates the .gitignore configuration file.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
output_writer (OutputWriter): output writer.
output_filename (str): path of the output file.
"""
include_header_file = self._GetLibraryIncludeHeaderFile(
project_configuration)
template_directory = os.path.join(self._template_directory, '.gitignore')
template_filename = os.path.join(template_directory, 'header')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename)
template_filename = os.path.join(template_directory, 'library')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
# TODO: add support for lex yacc BUILT_SOURCES
if project_configuration.HasPythonModule():
template_filename = os.path.join(template_directory, 'python_module')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if project_configuration.HasDotNetBindings():
template_filename = os.path.join(template_directory, 'dotnet_bindings')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if project_configuration.HasJavaBindings():
template_filename = os.path.join(template_directory, 'java_bindings')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if project_configuration.HasTools():
tools_executables = []
for name in sorted(project_configuration.tools_names):
tools_executable = '/{0:s}/{1:s}'.format(
project_configuration.tools_directory, name)
tools_executables.append(tools_executable)
template_mappings['tools_executables'] = '\n'.join(tools_executables)
template_filename = os.path.join(template_directory, 'tools')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['tools_executables']
source_glob = '{0:s}_test_*.c'.format(
project_configuration.library_name_suffix)
source_glob = os.path.join('tests', source_glob)
tests_files = ['/tests/tmp*']
if os.path.exists(os.path.join('tests', 'input')):
tests_files.append('/tests/input')
for source_file_path in sorted(glob.glob(source_glob)):
if (source_file_path.endswith('_functions.c') or
source_file_path.endswith('_getopt.c') or
source_file_path.endswith('_i18n.c') or
source_file_path.endswith('_memory.c') or
source_file_path.endswith('_rwlock.c')):
continue
source_file_path = '/{0:s}'.format(source_file_path[:-2])
tests_files.append(source_file_path)
template_mappings['tests_files'] = '\n'.join(sorted(tests_files))
template_filename = os.path.join(template_directory, 'tests')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['tests_files']
makefile_am_file = self._GetMainMakefileAM(project_configuration)
libraries = [
'/{0:s}'.format(name) for name in sorted(makefile_am_file.libraries)]
if libraries:
template_mappings['local_libraries'] = '\n'.join(libraries)
template_filename = os.path.join(template_directory, 'local_libraries')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['local_libraries']
def _GenerateRpmSpec(
self, project_configuration, template_mappings, output_writer,
output_filename):
"""Generates the RPM spec file.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
output_writer (OutputWriter): output writer.
output_filename (str): path of the output file.
"""
library_name = project_configuration.library_name
makefile_am_file = self._GetMainMakefileAM(project_configuration)
template_directory = os.path.join(
self._template_directory, 'libyal.spec.in')
library_dependencies = list(makefile_am_file.library_dependencies)
if project_configuration.HasDependencyCrypto():
library_dependencies.append('libcrypto')
if project_configuration.HasDependencyZlib():
library_dependencies.append('zlib')
template_names = ['header.in']
template_mappings['library_name'] = library_name
template_mappings['library_name_upper_case'] = library_name.upper()
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename)
if not library_dependencies:
template_filename = os.path.join(template_directory, 'build_requires.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
else:
spec_requires = []
spec_build_requires = []
for name in sorted(library_dependencies):
requires = '@ax_{0:s}_spec_requires@'.format(name)
spec_requires.append(requires)
build_requires = '@ax_{0:s}_spec_build_requires@'.format(name)
spec_build_requires.append(build_requires)
template_mappings['spec_requires'] = ' '.join(spec_requires)
template_mappings['spec_build_requires'] = ' '.join(spec_build_requires)
template_filename = os.path.join(template_directory, 'requires.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['spec_requires']
del template_mappings['spec_build_requires']
template_filename = os.path.join(template_directory, 'package.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if project_configuration.HasPythonModule():
template_filename = os.path.join(template_directory, 'package-python.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if project_configuration.HasTools():
requires_library = '{0:s} = %{{version}}-%{{release}}'.format(
project_configuration.library_name)
tools_dependencies = list(makefile_am_file.tools_dependencies)
if 'crypto' in project_configuration.tools_build_dependencies:
tools_dependencies.append('libcrypto')
if 'fuse' in project_configuration.tools_build_dependencies:
tools_dependencies.append('libfuse')
spec_requires = [requires_library]
spec_build_requires = []
for name in sorted(tools_dependencies):
requires = '@ax_{0:s}_spec_requires@'.format(name)
spec_requires.append(requires)
build_requires = '@ax_{0:s}_spec_build_requires@'.format(name)
spec_build_requires.append(build_requires)
template_mappings['spec_requires'] = ' '.join(spec_requires)
template_filename = os.path.join(
template_directory, 'package-tools-header.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['spec_requires']
if tools_dependencies:
template_mappings['spec_build_requires'] = ' '.join(spec_build_requires)
template_filename = os.path.join(
template_directory, 'package-tools-requires.in')
self._GenerateSection(
template_filename, template_mappings, output_writer,
output_filename, access_mode='a')
del template_mappings['spec_build_requires']
template_filename = os.path.join(
template_directory, 'package-tools-footer.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
template_filename = os.path.join(template_directory, 'prep.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if project_configuration.HasPythonModule():
template_filename = os.path.join(template_directory, 'build-python.in')
else:
template_filename = os.path.join(template_directory, 'build.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
template_filename = os.path.join(template_directory, 'install.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
template_filename = os.path.join(template_directory, 'files.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if project_configuration.HasPythonModule():
template_filename = os.path.join(template_directory, 'files-python.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
if project_configuration.HasTools():
template_filename = os.path.join(template_directory, 'files-tools.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
template_filename = os.path.join(template_directory, 'changelog.in')
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename,
access_mode='a')
del template_mappings['library_name']
del template_mappings['library_name_upper_case']
def _GenerateGitHubActions(
self, project_configuration, template_mappings, include_header_file,
output_writer):
"""Generates the .github/workflows/*.yml configuration files.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
include_header_file (LibraryIncludeHeaderFile): library include header
file.
output_writer (OutputWriter): output writer.
"""
template_directory = os.path.join(
self._template_directory, 'github_workflows')
output_directory = os.path.join('.github', 'workflows')
for directory_entry in os.listdir(template_directory):
template_filename = os.path.join(template_directory, directory_entry)
if not os.path.isfile(template_filename):
continue
output_filename = os.path.join(output_directory, directory_entry)
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename)
def _GenerateGitHubActionsBuildYML(
self, project_configuration, template_mappings, include_header_file,
output_writer, output_filename):
"""Generates the .github/workflows/build.yml configuration file.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
include_header_file (LibraryIncludeHeaderFile): library include header
file.
output_writer (OutputWriter): output writer.
output_filename (str): path of the output file.
"""
template_directory = os.path.join(
self._template_directory, 'github_workflows', 'build.yml')
dpkg_build_dependencies = self._GetDpkgBuildDependencies(
project_configuration)
template_names = ['header.yml']
# TODO: improve check.
if project_configuration.library_name in ('libbfio', 'libcdata'):
template_names.append('build_ubuntu-no_pthread.yml')
if include_header_file.have_wide_character_type:
template_names.append('build_ubuntu-wide_character_type.yml')
if project_configuration.HasDependencyCrypto():
template_names.append('build_ubuntu-openssl.yml')
if project_configuration.HasTools():
template_names.append('build_ubuntu-static_executables.yml')
template_names.append('build_ubuntu-end.yml')
if project_configuration.HasPythonModule():
template_names.append('build_python_ubuntu.yml')
template_names.append('coverage_ubuntu-start.yml')
if include_header_file.have_wide_character_type:
template_names.append('coverage_ubuntu-wide_character_type.yml')
else:
template_names.append('coverage_ubuntu.yml')
template_names.append('footer.yml')
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
template_mappings['dpkg_build_dependencies'] = ' '.join(
dpkg_build_dependencies)
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename)
del template_mappings['dpkg_build_dependencies']
def _GenerateGitHubActionsBuildSharedYML(
self, project_configuration, template_mappings, include_header_file,
output_writer, output_filename):
"""Generates the .github/workflows/build_shared.yml configuration file.
Args:
project_configuration (ProjectConfiguration): project configuration.
template_mappings (dict[str, str]): template mappings, where the key
maps to the name of a template variable.
include_header_file (LibraryIncludeHeaderFile): library include header
file.
output_writer (OutputWriter): output writer.
output_filename (str): path of the output file.
"""
template_directory = os.path.join(
self._template_directory, 'github_workflows', 'build_shared.yml')
dpkg_build_dependencies = self._GetDpkgBuildDependencies(
project_configuration)
template_names = ['header.yml']
if include_header_file.have_wide_character_type:
template_names.append('build_shared_ubuntu-wide_character_type.yml')
template_names.append('footer.yml')
template_filenames = [
os.path.join(template_directory, template_name)
for template_name in template_names]
template_mappings['dpkg_build_dependencies'] = ' '.join(
dpkg_build_dependencies)
self._GenerateSections(
template_filenames, template_mappings, output_writer, output_filename)
del template_mappings['dpkg_build_dependencies']
def _GetBrewBuildDependencies(self, project_configuration):
"""Retrieves the brew build dependencies.
Args:
project_configuration (ProjectConfiguration): project configuration.
Returns:
list[str]: dpkg build dependencies.
"""
brew_build_dependencies = ['gettext', 'gnu-sed']
if 'fuse' in project_configuration.tools_build_dependencies:
brew_build_dependencies.append('osxfuse')
return brew_build_dependencies
def _GetDpkgBuildDependencies(self, project_configuration):
"""Retrieves the dpkg build dependencies.
Args:
project_configuration (ProjectConfiguration): project configuration.
Returns:
list[str]: dpkg build dependencies in alphabetical order.
"""
dpkg_build_dependencies = [
'autoconf',
'automake',
'autopoint',
'build-essential',
'git',
'libtool',
'pkg-config']
if project_configuration.HasDependencyYacc():
dpkg_build_dependencies.append('byacc')
if project_configuration.HasDependencyLex():
dpkg_build_dependencies.append('flex')
if project_configuration.HasDependencyZlib():
dpkg_build_dependencies.append('zlib1g-dev')
if ('crypto' in project_configuration.library_build_dependencies or
'crypto' in project_configuration.tools_build_dependencies):
dpkg_build_dependencies.append('libssl-dev')
if 'fuse' in project_configuration.tools_build_dependencies:
dpkg_build_dependencies.append('libfuse-dev')
if 'sgutils' in project_configuration.library_build_dependencies:
dpkg_build_dependencies.append('libsgutils2-dev')
dpkg_build_dependencies.extend(
project_configuration.dpkg_build_dependencies)
return sorted(dpkg_build_dependencies)
def _GetDpkgBuildDependenciesDpkgControl(self, project_configuration):
"""Retrieves the dpkg build dependencies for the dpkg/control file.
Args:
project_configuration (ProjectConfiguration): project configuration.
Returns:
list[str]: dpkg build dependencies.
"""
dpkg_build_dependencies = ['debhelper (>= 9)', 'dh-autoreconf']
if project_configuration.HasPythonModule():
dpkg_build_dependencies.append('dh-python')
dpkg_build_dependencies.append('pkg-config')
if project_configuration.HasDependencyZlib():
dpkg_build_dependencies.append('zlib1g-dev')
if ('crypto' in project_configuration.library_build_dependencies or
'crypto' in project_configuration.tools_build_dependencies):
dpkg_build_dependencies.append('libssl-dev')
if project_configuration.HasPythonModule():
dpkg_build_dependencies.extend(['python3-dev', 'python3-setuptools'])
if 'fuse' in project_configuration.tools_build_dependencies:
dpkg_build_dependencies.append('libfuse-dev')
if 'sgutils' in project_configuration.library_build_dependencies:
dpkg_build_dependencies.append('libsgutils2-dev')
if project_configuration.dpkg_build_dependencies:
dpkg_build_dependencies.extend(
project_configuration.dpkg_build_dependencies)
return dpkg_build_dependencies
def _GetCygwinBuildDependencies(self, project_configuration):
"""Retrieves the Cygwin build dependencies.
Args:
project_configuration (ProjectConfiguration): project configuration.
Returns:
list[str]: Cygwin build dependencies.
"""
cygwin_build_dependencies = list(
project_configuration.cygwin_build_dependencies)
cygwin_build_dependencies.append('gettext-devel')
if project_configuration.HasDependencyYacc():
cygwin_build_dependencies.append('bison')
if project_configuration.HasDependencyLex():
cygwin_build_dependencies.append('flex')
if project_configuration.HasDependencyZlib():
cygwin_build_dependencies.append('zlib-devel')
if project_configuration.HasDependencyBzip2():
cygwin_build_dependencies.append('libbz2-devel')
if ('crypto' in project_configuration.library_build_dependencies or
'crypto' in project_configuration.tools_build_dependencies):
# On Cygwin also link zlib since libcrypto relies on it.
if 'zlib' not in project_configuration.library_build_dependencies:
cygwin_build_dependencies.append('zlib-devel')
cygwin_build_dependencies.append('libssl-devel')
if project_configuration.HasPythonModule():
cygwin_build_dependencies.append('python2-devel')
cygwin_build_dependencies.append('python3-devel')
if ('uuid' in project_configuration.library_build_dependencies or
'uuid' in project_configuration.tools_build_dependencies):
cygwin_build_dependencies.append('libuuid-devel')
return cygwin_build_dependencies
def _GetMinGWMSYS2BuildDependencies(self, project_configuration):
"""Retrieves the MinGW-MSYS2 build dependencies.
Args:
project_configuration (ProjectConfiguration): project configuration.
Returns:
list[str]: MinGW-MSYS2 build dependencies.
"""
mingw_msys2_build_dependencies = list(
project_configuration.mingw_msys2_build_dependencies)
mingw_msys2_build_dependencies.extend([
'autoconf', 'automake', 'gcc', 'gettext-devel', 'libtool', 'make'])
if project_configuration.HasDependencyYacc():
mingw_msys2_build_dependencies.append('msys/bison')
if project_configuration.HasDependencyLex():
mingw_msys2_build_dependencies.append('msys/flex')
# TODO: add support for other dependencies.
if project_configuration.HasDependencyZlib():
mingw_msys2_build_dependencies.append('msys/zlib-devel')
return mingw_msys2_build_dependencies
def Generate(self, project_configuration, output_writer):
"""Generates configuration files.
Args:
project_configuration (ProjectConfiguration): project configuration.
output_writer (OutputWriter): output writer.
"""
# TODO: generate spec file, what about Python versus non-Python?
include_header_file = self._GetLibraryIncludeHeaderFile(
project_configuration)
if not include_header_file:
logging.warning(
'Missing: {0:s} skipping generation of configuration files.'.format(
self._library_include_header_path))
return
makefile_am_file = self._GetLibraryMakefileAM(project_configuration)
if not makefile_am_file:
logging.warning(
'Missing: {0:s} skipping generation of configuration files.'.format(
self._library_makefile_am_path))
return
pc_libs_private = []
for library in sorted(makefile_am_file.libraries):
if library == 'libdl':
continue
pc_lib_private = '@ax_{0:s}_pc_libs_private@'.format(library)
pc_libs_private.append(pc_lib_private)
template_mappings = self._GetTemplateMappings(
project_configuration,
authors_separator=',\n * ')
template_mappings['authors'] = 'Joachim Metz <joachim.metz@gmail.com>'
template_mappings['pc_libs_private'] = ' '.join(pc_libs_private)
for directory_entry in os.listdir(self._template_directory):
template_filename = os.path.join(
self._template_directory, directory_entry)
if not os.path.isfile(template_filename):
continue
if directory_entry == 'libyal.nuspec':
output_filename = '{0:s}.nuspec'.format(
project_configuration.library_name)
if not project_configuration.deploy_to_nuget:
logging.warning('Skipping: {0:s}'.format(output_filename))
continue
elif directory_entry == 'libyal.pc.in':
output_filename = '{0:s}.pc.in'.format(
project_configuration.library_name)
else:
output_filename = directory_entry
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename)
del template_mappings['pc_libs_private']
self._GenerateCodecovYML(
project_configuration, template_mappings, output_writer, '.codecov.yml')
self._GenerateGitignore(
project_configuration, template_mappings, output_writer, '.gitignore')
self._GenerateGitHubActions(
project_configuration, template_mappings, include_header_file,
output_writer)
output_filename = os.path.join('.github', 'workflows', 'build.yml')
self._GenerateGitHubActionsBuildYML(
project_configuration, template_mappings, include_header_file,
output_writer, output_filename)
output_filename = os.path.join('.github', 'workflows', 'build_shared.yml')
self._GenerateGitHubActionsBuildSharedYML(
project_configuration, template_mappings, include_header_file,
output_writer, output_filename)
self._GenerateAppVeyorYML(
project_configuration, template_mappings, output_writer, 'appveyor.yml')
self._GenerateConfigureAC(
project_configuration, template_mappings, output_writer, 'configure.ac')
self._GenerateACIncludeM4(
project_configuration, template_mappings, output_writer, 'acinclude.m4')
self._GenerateDpkg(
project_configuration, template_mappings, output_writer, 'dpkg')
output_filename = '{0:s}.spec.in'.format(project_configuration.library_name)
self._GenerateRpmSpec(
project_configuration, template_mappings, output_writer,
output_filename)
| {
"content_hash": "35054cda6533f2bdcde5d5138e1871f1",
"timestamp": "",
"source": "github",
"line_count": 1628,
"max_line_length": 80,
"avg_line_length": 37.08353808353808,
"alnum_prop": 0.6772510435301133,
"repo_name": "libyal/libyal",
"id": "1942188421a746947235b43219b38a98a44b9d5a",
"size": "60396",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "yaldevtools/source_generators/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1616472"
},
{
"name": "M4",
"bytes": "435083"
},
{
"name": "Makefile",
"bytes": "7291"
},
{
"name": "PowerShell",
"bytes": "37768"
},
{
"name": "Python",
"bytes": "912826"
},
{
"name": "Shell",
"bytes": "87438"
}
],
"symlink_target": ""
} |
class GeneratorAdapter:
def visit_selector_view(self, view):
pass
def visit_selector_object(self, object, property):
pass
def visit_selector_fk_object(self, object, property, fk_properties):
pass
def visit_view(self, view):
pass
def visit_page(self, page):
pass
def visit_action_selector(self, object, actions):
pass
def visit_other_selector(self, name, **kwargs):
pass
def visit_row(self, row):
pass
def visit_object(self, object):
pass
class FrontendGenerator(GeneratorAdapter):
def __init__(self, model, builtins, path):
self.model = model
self.builtins = builtins
self.path = path
self.backend_base_url = "http://localhost:8080/"
class BackendGenerator(GeneratorAdapter):
def __init__(self, model, builtins, path):
self.model = model
self.builtins = builtins
self.path = path
self.base_url = "http://localhost:8080/"
class BColors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
| {
"content_hash": "e2534e3c44aa84884305bb86608de861",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 72,
"avg_line_length": 22.555555555555557,
"alnum_prop": 0.5952380952380952,
"repo_name": "theshammy/GenAn",
"id": "353e26c825cb8b000a73f97b9ad5f554abea4066",
"size": "1218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2122"
},
{
"name": "HTML",
"bytes": "19036"
},
{
"name": "JavaScript",
"bytes": "15499"
},
{
"name": "Python",
"bytes": "62881"
}
],
"symlink_target": ""
} |
import GafferScene
from _GafferSceneTest import *
from SceneTestCase import SceneTestCase
from ScenePlugTest import ScenePlugTest
from GroupTest import GroupTest
from SceneTimeWarpTest import SceneTimeWarpTest
from CubeTest import CubeTest
from PlaneTest import PlaneTest
from SphereTest import SphereTest
from InstancerTest import InstancerTest
from ObjectToSceneTest import ObjectToSceneTest
from CameraTest import CameraTest
from OutputsTest import OutputsTest
from CustomOptionsTest import CustomOptionsTest
from DeleteOptionsTest import DeleteOptionsTest
from CopyOptionsTest import CopyOptionsTest
from SceneNodeTest import SceneNodeTest
from PathFilterTest import PathFilterTest
from ShaderAssignmentTest import ShaderAssignmentTest
from CustomAttributesTest import CustomAttributesTest
from DeletePrimitiveVariablesTest import DeletePrimitiveVariablesTest
from SeedsTest import SeedsTest
from SceneContextVariablesTest import SceneContextVariablesTest
from DeleteSceneContextVariablesTest import DeleteSceneContextVariablesTest
from SubTreeTest import SubTreeTest
from OpenGLAttributesTest import OpenGLAttributesTest
from StandardOptionsTest import StandardOptionsTest
from ScenePathTest import ScenePathTest
from LightTest import LightTest
from OpenGLShaderTest import OpenGLShaderTest
from OpenGLRenderTest import OpenGLRenderTest
from TransformTest import TransformTest
from AimConstraintTest import AimConstraintTest
from PruneTest import PruneTest
from ShaderTest import ShaderTest
from TextTest import TextTest
from MapProjectionTest import MapProjectionTest
from MapOffsetTest import MapOffsetTest
from PointConstraintTest import PointConstraintTest
from SceneReaderTest import SceneReaderTest
from SceneWriterTest import SceneWriterTest
from IsolateTest import IsolateTest
from DeleteAttributesTest import DeleteAttributesTest
from UnionFilterTest import UnionFilterTest
from SceneSwitchTest import SceneSwitchTest
from ShaderSwitchTest import ShaderSwitchTest
from ParentConstraintTest import ParentConstraintTest
from ParentTest import ParentTest
from StandardAttributesTest import StandardAttributesTest
from PrimitiveVariablesTest import PrimitiveVariablesTest
from DuplicateTest import DuplicateTest
from ModuleTest import ModuleTest
from GridTest import GridTest
from SetTest import SetTest
from FreezeTransformTest import FreezeTransformTest
from SetFilterTest import SetFilterTest
from FilterTest import FilterTest
from SceneAlgoTest import SceneAlgoTest
from CoordinateSystemTest import CoordinateSystemTest
from DeleteOutputsTest import DeleteOutputsTest
from ExternalProceduralTest import ExternalProceduralTest
from ClippingPlaneTest import ClippingPlaneTest
from FilterSwitchTest import FilterSwitchTest
from PointsTypeTest import PointsTypeTest
from ParametersTest import ParametersTest
from SceneFilterPathFilterTest import SceneFilterPathFilterTest
from AttributeVisualiserTest import AttributeVisualiserTest
from SceneLoopTest import SceneLoopTest
from SceneProcessorTest import SceneProcessorTest
from MeshToPointsTest import MeshToPointsTest
from InteractiveRenderTest import InteractiveRenderTest
from FilteredSceneProcessorTest import FilteredSceneProcessorTest
from ShaderBallTest import ShaderBallTest
from LightTweaksTest import LightTweaksTest
from FilterResultsTest import FilterResultsTest
from RendererAlgoTest import RendererAlgoTest
from SetAlgoTest import SetAlgoTest
from EvaluateLightLinksTest import EvaluateLightLinksTest
from MeshTangentsTest import MeshTangentsTest
from ResamplePrimitiveVariablesTest import ResamplePrimitiveVariablesTest
from DeleteFacesTest import DeleteFacesTest
from DeleteCurvesTest import DeleteCurvesTest
from DeletePointsTest import DeletePointsTest
from CollectScenesTest import CollectScenesTest
from CapsuleTest import CapsuleTest
from EncapsulateTest import EncapsulateTest
from FilterPlugTest import FilterPlugTest
from ReverseWindingTest import ReverseWindingTest
from MeshDistortionTest import MeshDistortionTest
from IECoreGLPreviewTest import *
if __name__ == "__main__":
import unittest
unittest.main()
| {
"content_hash": "3fa2a14e15cd8d46c41266880b561be8",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 75,
"avg_line_length": 42.2680412371134,
"alnum_prop": 0.9046341463414634,
"repo_name": "ivanimanishi/gaffer",
"id": "055a85af164976e9e48fe4fd8d07e3f3b1f6ed49",
"size": "5970",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/GafferSceneTest/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "39753"
},
{
"name": "C++",
"bytes": "6086015"
},
{
"name": "CMake",
"bytes": "83446"
},
{
"name": "CSS",
"bytes": "28027"
},
{
"name": "GLSL",
"bytes": "6236"
},
{
"name": "Python",
"bytes": "6120483"
},
{
"name": "Shell",
"bytes": "13049"
},
{
"name": "Slash",
"bytes": "2870"
}
],
"symlink_target": ""
} |
from portality import models
from portality import constants
from portality.bll import exceptions
from doajtest.helpers import DoajTestCase
from portality.events.consumers.account_created_email import AccountCreatedEmail
from doajtest.fixtures import AccountFixtureFactory
import uuid
from io import StringIO
import logging
import re
# A regex string for searching the log entries
email_log_regex = 'template.*%s.*to:\[u{0,1}\'%s.*subject:.*%s'
# A string present in each email log entry (for counting them)
email_count_string = 'Email template'
class TestAccountCreatedEmail(DoajTestCase):
def setUp(self):
super(TestAccountCreatedEmail, self).setUp()
self.info_stream = StringIO()
self.read_info = logging.StreamHandler(self.info_stream)
self.read_info.setLevel(logging.INFO)
self.app_test.logger.addHandler(self.read_info)
def tearDown(self):
super(TestAccountCreatedEmail, self).tearDown()
# Blank the info_stream and remove the error handler from the app
self.info_stream.truncate(0)
self.app_test.logger.removeHandler(self.read_info)
def test_consumes(self):
source = AccountFixtureFactory.make_publisher_source()
acc = models.Account(**source)
acc.clear_password()
reset_token = uuid.uuid4().hex
acc.set_reset_token(reset_token, 86400)
event = models.Event(constants.EVENT_ACCOUNT_CREATED, context={"account" : acc.data})
assert AccountCreatedEmail.consumes(event)
event = models.Event(constants.EVENT_ACCOUNT_CREATED)
assert not AccountCreatedEmail.consumes(event)
event = models.Event("test:event", context={"application" : "2345"})
assert not AccountCreatedEmail.consumes(event)
def test_consume_success(self):
self._make_and_push_test_context("/")
source = AccountFixtureFactory.make_publisher_source()
acc = models.Account(**source)
acc.clear_password()
reset_token = uuid.uuid4().hex
acc.set_reset_token(reset_token, 86400)
event = models.Event(constants.EVENT_ACCOUNT_CREATED, context={"account": acc.data})
AccountCreatedEmail.consume(event)
# Use the captured info stream to get email send logs
info_stream_contents = self.info_stream.getvalue()
# We expect one email sent:
# * to the applicant, informing them the application was received
template = re.escape('account_created.jinja2')
to = re.escape(acc.email)
subject = "Directory of Open Access Journals - account created, please verify your email address"
email_matched = re.search(email_log_regex % (template, to, subject),
info_stream_contents,
re.DOTALL)
assert bool(email_matched)
assert len(re.findall(email_count_string, info_stream_contents)) == 1
def test_consume_fail(self):
source = AccountFixtureFactory.make_publisher_source()
acc = models.Account(**source)
# we don't set the reset token
event = models.Event(constants.EVENT_ACCOUNT_PASSWORD_RESET, context={"account": acc.data})
with self.assertRaises(exceptions.NoSuchPropertyException):
AccountCreatedEmail.consume(event)
| {
"content_hash": "9067c77025925a60e7544dfa49e60965",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 105,
"avg_line_length": 39.27058823529412,
"alnum_prop": 0.6773517076093469,
"repo_name": "DOAJ/doaj",
"id": "0016d810be5183829599050b01aab9249a16f769",
"size": "3338",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "doajtest/unit/event_consumers/test_account_created_email.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2399"
},
{
"name": "Dockerfile",
"bytes": "59"
},
{
"name": "HTML",
"bytes": "483733"
},
{
"name": "JavaScript",
"bytes": "952971"
},
{
"name": "Jinja",
"bytes": "15292"
},
{
"name": "Python",
"bytes": "3195030"
},
{
"name": "SCSS",
"bytes": "75276"
},
{
"name": "Shell",
"bytes": "28415"
}
],
"symlink_target": ""
} |
from unittest import TestCase
from mock import patch, Mock
from helpscout_app.decorators import signed_request
class DecoratorTest(TestCase):
def test_signed_request(self):
# Patch Flask's request, current_app and the is_helpscout_request helper function
request_patcher = patch('helpscout_app.decorators.request',
headers={'X-Helpscout-Signature': '123'})
MockRequest = request_patcher.start()
app_patcher = patch('helpscout_app.decorators.current_app', config={})
MockApp = app_patcher.start()
helpscout_patcher = patch('helpscout_app.decorators.is_helpscout_request')
MockHelpScout = helpscout_patcher.start()
# If helper function returns False, decorator should not call the callable
MockHelpScout.return_value = False
decorated_func = signed_request(lambda x: x)
self.assertEquals(('', 400), decorated_func(1))
# Else, continue with the callable
MockHelpScout.return_value = True
decorated_func = signed_request(lambda x: x)
self.assertEquals(1, decorated_func(1))
| {
"content_hash": "ea8b6639e3b4314cd60a0e4247224972",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 89,
"avg_line_length": 45.2,
"alnum_prop": 0.6761061946902654,
"repo_name": "carousell/Helpscout-App-Template",
"id": "648b0b863da052a4ef684b21b82c51eedf9f4570",
"size": "1130",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/decorator_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8264"
}
],
"symlink_target": ""
} |
import sublime
import sublime_plugin
class EventDump(sublime_plugin.EventListener):
def on_new(self, view):
sublime.message_dialog("new file is opned.")
| {
"content_hash": "c884f58cfe8be93f50374ca864dc9789",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 52,
"avg_line_length": 21,
"alnum_prop": 0.7261904761904762,
"repo_name": "ekazyam/study",
"id": "6b950fe4671ab81ea5d692a7ca90768fea93cd2c",
"size": "168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sublime_api/api_on_new.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "522"
},
{
"name": "JavaScript",
"bytes": "81"
},
{
"name": "PHP",
"bytes": "508"
},
{
"name": "Python",
"bytes": "32868"
}
],
"symlink_target": ""
} |
import urllib
import logging
def seedfile_load(seedfile):
"""
seedfile_load - load crawl seed file.
Args:
seedfile: The seed file will be loaded.
Returns:
urls: All seed urls to be crawled.
"""
urls = []
try:
with open(seedfile, "r") as f:
for url in f.readlines():
urls.append(url.strip())
return urls
except IOError as e:
logging.error("Fail to load seedfile(%s) as Exception: %s", seedfile, e)
return None
| {
"content_hash": "6a6d1eafe4672770ee6926fb75a84a7a",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 80,
"avg_line_length": 24.272727272727273,
"alnum_prop": 0.5543071161048689,
"repo_name": "fivezh/Keepgoing",
"id": "10b8e049c7709ae8f752751c165f10228f3300df",
"size": "573",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py_spider/seedfile_load.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "12329"
},
{
"name": "Go",
"bytes": "10763"
},
{
"name": "PHP",
"bytes": "37648"
},
{
"name": "Python",
"bytes": "23360"
},
{
"name": "Shell",
"bytes": "85"
}
],
"symlink_target": ""
} |
'''Train a Bidirectional LSTM for MHG scansion.
Current accuracy 91.88 on validation data'''
# Authors: Christopher Hench
# ==============================================================================
from __future__ import print_function
import numpy as np
from keras.preprocessing import sequence
from keras.models import Model
from keras.layers.core import Masking
from keras.layers import TimeDistributed, Dense
from keras.layers import Dropout, Embedding, LSTM, Input, merge
from prep_nn import prep_scan
from keras.utils import np_utils, generic_utils
import itertools
from itertools import chain
from CLFL_mdf_classification import classification_report, confusion_matrix
from CLFL_mdf_classification import precision_recall_fscore_support
from sklearn.preprocessing import LabelBinarizer
import sklearn
import pandas as pd
np.random.seed(1337) # for reproducibility
nb_words = 20000 # max. size of vocab
nb_classes = 10 # number of labels
# hidden = int(((953 + 10) / 2))
hidden = 500
batch_size = 10 # create and update net after 10 lines
val_split = .1
epochs = 6
# input for X is multi-dimensional numpy array with syll IDs,
# one line per array. input y is multi-dimensional numpy array with
# binary arrays for each value of each label.
# maxlen is length of longest line
print('Loading data...')
(X_train, y_train), (X_test, y_test), maxlen, sylls_ids, tags_ids = prep_scan(
nb_words=nb_words, test_len=75)
print(len(X_train), 'train sequences')
print(int(len(X_train)*val_split), 'validation sequences')
print(len(X_test), 'heldout sequences')
# this is the placeholder tensor for the input sequences
sequence = Input(shape=(maxlen,), dtype='int32')
# this embedding layer will transform the sequences of integers
# into vectors of size 256
embedded = Embedding(nb_words, output_dim=hidden,
input_length=maxlen, mask_zero=True)(sequence)
# apply forwards LSTM
forwards = LSTM(output_dim=hidden, return_sequences=True)(embedded)
# apply backwards LSTM
backwards = LSTM(output_dim=hidden, return_sequences=True,
go_backwards=True)(embedded)
# concatenate the outputs of the 2 LSTMs
merged = merge([forwards, backwards], mode='concat', concat_axis=-1)
after_dp = Dropout(0.15)(merged)
# TimeDistributed for sequence
# change activation to sigmoid?
output = TimeDistributed(
Dense(output_dim=nb_classes,
activation='softmax'))(after_dp)
model = Model(input=sequence, output=output)
# try using different optimizers and different optimizer configs
# loss=binary_crossentropy, optimizer=rmsprop
model.compile(loss='categorical_crossentropy',
metrics=['accuracy'], optimizer='adam')
print('Train...')
model.fit(X_train, y_train,
batch_size=batch_size,
nb_epoch=epochs,
shuffle=True,
validation_split=val_split,
sample_weight=0.)
# held-out testing:
def bio_classification_report(y_true, y_pred):
"""
Classification report for a list of BIO-encoded sequences.
It computes token-level metrics and discards "O" labels.
Note that it requires scikit-learn 0.15+ (or a version from github master)
to calculate averages properly!
"""
lb = LabelBinarizer()
y_true_combined = lb.fit_transform(list(chain.from_iterable(y_true)))
y_pred_combined = lb.transform(list(chain.from_iterable(y_pred)))
tagset = set(lb.classes_) - {'O'}
tagset = sorted(tagset, key=lambda tag: tag.split('-', 1)[::-1])
class_indices = {cls: idx for idx, cls in enumerate(lb.classes_)}
labs = [class_indices[cls] for cls in tagset]
return((precision_recall_fscore_support(y_true_combined, y_pred_combined,
labels=labs,
average=None,
sample_weight=None)),
(classification_report(
y_true_combined,
y_pred_combined,
labels=[class_indices[cls] for cls in tagset],
target_names=tagset,
)), labs)
# first get probabilities of labels in binary arrays, then convert to classes
predicted_arrays = model.predict(X_test, batch_size=batch_size)
# return list of labels for input sequence of binary arrays
def arrays_to_labels(y_pred_arrays):
return [[np.argmax(arr) for arr in seq] for seq in y_pred_arrays]
pred_classes = arrays_to_labels(predicted_arrays)
y_test_classes = arrays_to_labels(y_test)
# get list of lines of labels
y_pred = []
for i, line in enumerate(pred_classes):
line_labels = []
for v in line:
if v != 0:
line_labels.append(tags_ids[v])
y_pred.append(line_labels)
y_test = []
for i, line in enumerate(y_test_classes):
line_labels = []
for v in line:
if v != 0:
line_labels.append(tags_ids[v])
y_test.append(line_labels)
# get stats
bioc = bio_classification_report(y_test, y_pred)
p, r, f1, s = bioc[0]
tot_avgs = []
for v in (np.average(p, weights=s),
np.average(r, weights=s),
np.average(f1, weights=s)):
tot_avgs.append(v)
toext = [0] * (len(s) - 3)
tot_avgs.extend(toext)
all_s = [sum(s)] * len(s)
rep = bioc[1]
all_labels = []
for word in rep.split():
if word.isupper():
all_labels.append(word)
ext_labels = [
"DOPPEL",
"EL",
"HALB",
"HALB_HAUPT",
"HALB_NEBEN",
"MORA",
"MORA_HAUPT",
"MORA_NEBEN"]
abs_labels = [l for l in ext_labels if l not in all_labels]
data = {
"labels": all_labels,
"precision": p,
"recall": r,
"f1": f1,
"support": s,
"tots": tot_avgs,
"all_s": all_s}
df = pd.DataFrame(data)
if len(abs_labels) > 0:
if "HALB_NEBEN" in abs_labels:
line = pd.DataFrame({"labels": "HALB_NEBEN",
"precision": 0,
"recall": 0,
"f1": 0,
"support": 0,
"tots": 0,
"all_s": 0},
index=[4])
df = pd.concat([df.ix[:3], line, df.ix[4:]]).reset_index(drop=True)
if "EL" in abs_labels:
line = pd.DataFrame({"labels": "EL",
"precision": 0,
"recall": 0,
"f1": 0,
"support": 0,
"tots": 0,
"all_s": 0},
index=[1])
df = pd.concat([df.ix[0], line, df.ix[1:]]).reset_index(drop=True)
df["w_p"] = df.precision * df.support
df["w_r"] = df.recall * df.support
df["w_f1"] = df.f1 * df.support
df["w_tots"] = df.tots * df.all_s
df_all = df
print(df_all)
| {
"content_hash": "d83df438bfa999cac3f8acdf12b9680f",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 80,
"avg_line_length": 31.022935779816514,
"alnum_prop": 0.6047612006505988,
"repo_name": "henchc/MHG_Scansion",
"id": "f988f0f6b7dbcf880a30cdf7376ba268c5df3653",
"size": "6763",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "BLSTM/BLSTM_scansion_heldout.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "103509"
}
],
"symlink_target": ""
} |
from re import search
import rospy
from std_msgs.msg import String
from sys import exit
pub = rospy.Publisher('control_action', String);#,queue_size=10);
state = ""
def callback(data):
global state
state = data.data
def initialize_handshake(HOST, PORT): # setup socket and start the connection to the model
rospy.init_node('controller', anonymous=True)
rospy.Subscriber('plant_state', String, callback)
def process(HOST, PORT, GET,client_socketport=None):
if rospy.is_shutdown():
exit(0);
try:
pub.publish(GET+"&]")
# print "send : "+GET
response=state
# response = rospy.wait_for_message('plant_state', String, timeout=.150).data
# print "response : "+response
m = search('\[(.+?)\]', response);
if m:
response = m.groups()[-1];
data = response.split()
except Exception, err:
rospy.logwarn("[Warning] in ROS client send and receive :%s\n " % err)
response = (GET.split("time=")[1]).split("&")[0]
return response
if __name__ == "__main__":
initialize_handshake(None, None);
print process(None, None, "/init");
| {
"content_hash": "92fb93df283a1b2d9254ed5f0eaa4649",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 93,
"avg_line_length": 31.18918918918919,
"alnum_prop": 0.6169844020797227,
"repo_name": "slremy/testingpubsub",
"id": "eb0766084b3afda11653a322d30f38dc45ff240f",
"size": "1166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myBallPlate/rosclient.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "32047"
},
{
"name": "HTML",
"bytes": "7708"
},
{
"name": "Java",
"bytes": "30511"
},
{
"name": "Makefile",
"bytes": "431"
},
{
"name": "Python",
"bytes": "95479"
}
],
"symlink_target": ""
} |
"""
tests.unit.payload_test
~~~~~~~~~~~~~~~~~~~~~~~
"""
import copy
import datetime
import logging
import salt.exceptions
import salt.payload
from salt.defaults import _Constant
from salt.utils import immutabletypes
from salt.utils.odict import OrderedDict
log = logging.getLogger(__name__)
def assert_no_ordered_dict(data):
if isinstance(data, OrderedDict):
raise AssertionError("Found an ordered dictionary")
if isinstance(data, dict):
for value in data.values():
assert_no_ordered_dict(value)
elif isinstance(data, (list, tuple)):
for chunk in data:
assert_no_ordered_dict(chunk)
def test_list_nested_odicts():
idata = {"pillar": [OrderedDict(environment="dev")]}
odata = salt.payload.loads(salt.payload.dumps(idata.copy()))
assert_no_ordered_dict(odata)
assert idata == odata
def test_datetime_dump_load():
"""
Check the custom datetime handler can understand itself
"""
dtvalue = datetime.datetime(2001, 2, 3, 4, 5, 6, 7)
idata = {dtvalue: dtvalue}
sdata = salt.payload.dumps(idata.copy())
odata = salt.payload.loads(sdata)
assert (
sdata
== b"\x81\xc7\x18N20010203T04:05:06.000007\xc7\x18N20010203T04:05:06.000007"
)
assert idata == odata
def test_verylong_dump_load():
"""
Test verylong encoder/decoder
"""
idata = {"jid": 20180227140750302662}
sdata = salt.payload.dumps(idata.copy())
odata = salt.payload.loads(sdata)
idata["jid"] = "{}".format(idata["jid"])
assert idata == odata
def test_immutable_dict_dump_load():
"""
Test immutable dict encoder/decoder
"""
idata = {"dict": {"key": "value"}}
sdata = salt.payload.dumps({"dict": immutabletypes.ImmutableDict(idata["dict"])})
odata = salt.payload.loads(sdata)
assert idata == odata
def test_immutable_list_dump_load():
"""
Test immutable list encoder/decoder
"""
idata = {"list": [1, 2, 3]}
sdata = salt.payload.dumps({"list": immutabletypes.ImmutableList(idata["list"])})
odata = salt.payload.loads(sdata)
assert idata == odata
def test_immutable_set_dump_load():
"""
Test immutable set encoder/decoder
"""
idata = {"set": ["red", "green", "blue"]}
sdata = salt.payload.dumps({"set": immutabletypes.ImmutableSet(idata["set"])})
odata = salt.payload.loads(sdata)
assert idata == odata
def test_odict_dump_load():
"""
Test odict just works. It wasn't until msgpack 0.2.0
"""
data = OrderedDict()
data["a"] = "b"
data["y"] = "z"
data["j"] = "k"
data["w"] = "x"
sdata = salt.payload.dumps({"set": data})
odata = salt.payload.loads(sdata)
assert {"set": dict(data)}, odata
def test_mixed_dump_load():
"""
Test we can handle all exceptions at once
"""
dtvalue = datetime.datetime(2001, 2, 3, 4, 5, 6, 7)
od = OrderedDict()
od["a"] = "b"
od["y"] = "z"
od["j"] = "k"
od["w"] = "x"
idata = {
dtvalue: dtvalue, # datetime
"jid": 20180227140750302662, # long int
"dict": immutabletypes.ImmutableDict({"key": "value"}), # immutable dict
"list": immutabletypes.ImmutableList([1, 2, 3]), # immutable list
"set": immutabletypes.ImmutableSet(("red", "green", "blue")), # immutable set
"odict": od, # odict
}
edata = {
dtvalue: dtvalue, # datetime, == input
"jid": "20180227140750302662", # string repr of long int
"dict": {"key": "value"}, # builtin dict
"list": [1, 2, 3], # builtin list
"set": ["red", "green", "blue"], # builtin set
"odict": dict(od), # builtin dict
}
sdata = salt.payload.dumps(idata)
odata = salt.payload.loads(sdata)
assert edata == odata
def test_recursive_dump_load():
"""
Test recursive payloads are (mostly) serialized
"""
data = {"name": "roscivs"}
data["data"] = data # Data all the things!
sdata = salt.payload.dumps(data)
odata = salt.payload.loads(sdata)
assert "recursion" in odata["data"].lower()
def test_recursive_dump_load_with_identical_non_recursive_types():
"""
If identical objects are nested anywhere, they should not be
marked recursive unless they're one of the types we iterate
over.
"""
repeating = "repeating element"
data = {
"a": "a", # Test CPython implementation detail. Short
"b": "a", # strings are interned.
"c": 13, # So are small numbers.
"d": 13,
"fnord": repeating,
# Let's go for broke and make a crazy nested structure
"repeating": [
[[[[{"one": repeating, "two": repeating}], repeating, 13, "a"]]],
repeating,
repeating,
repeating,
],
}
# We need a nested dictionary to trigger the exception
data["repeating"][0][0][0].append(data)
# If we don't deepcopy the data it gets mutated
sdata = salt.payload.dumps(copy.deepcopy(data))
odata = salt.payload.loads(sdata)
# Delete the recursive piece - it's served its purpose, and our
# other test tests that it's actually marked as recursive.
del odata["repeating"][0][0][0][-1], data["repeating"][0][0][0][-1]
assert odata == data
def test_raw_vs_encoding_none():
"""
Test that we handle the new raw parameter in 5.0.2 correctly based on
encoding. When encoding is None loads should return bytes
"""
dtvalue = datetime.datetime(2001, 2, 3, 4, 5, 6, 7)
idata = {dtvalue: "strval"}
sdata = salt.payload.dumps(idata.copy())
odata = salt.payload.loads(sdata, encoding=None)
assert isinstance(odata[dtvalue], str)
def test_raw_vs_encoding_utf8():
"""
Test that we handle the new raw parameter in 5.0.2 correctly based on
encoding. When encoding is utf-8 loads should return unicode
"""
dtvalue = datetime.datetime(2001, 2, 3, 4, 5, 6, 7)
idata = {dtvalue: "strval"}
sdata = salt.payload.dumps(idata.copy())
odata = salt.payload.loads(sdata, encoding="utf-8")
assert isinstance(odata[dtvalue], str)
def test_constants():
"""
That that we handle encoding and decoding of constants.
"""
constant = _Constant("Foo", "bar")
sdata = salt.payload.dumps(constant)
odata = salt.payload.loads(sdata)
assert odata == constant
| {
"content_hash": "9924ca54fe18368ba5e4a9deb22becf2",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 86,
"avg_line_length": 30.09433962264151,
"alnum_prop": 0.6125391849529781,
"repo_name": "saltstack/salt",
"id": "ecd77ab4fcfbc85f7d3d3515e4e5eb4b342f58fc",
"size": "6380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/pytests/unit/test_payload.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
} |
"""The database configuration CLI arguments helper."""
from plaso.lib import errors
from plaso.cli.helpers import interface
from plaso.cli.helpers import server_config
class DatabaseArgumentsHelper(interface.ArgumentsHelper):
"""Database configuration CLI arguments helper."""
NAME = u'database_config'
DESCRIPTION = u'Argument helper for a database configuration.'
_DEFAULT_NAME = u'data'
_DEFAULT_PASSWORD = u'toor'
_DEFAULT_USERNAME = u'root'
@classmethod
def AddArguments(cls, argument_group):
"""Adds command line arguments the helper supports to an argument group.
This function takes an argument parser or an argument group object and adds
to it all the command line arguments this helper supports.
Args:
argument_group (argparse._ArgumentGroup|argparse.ArgumentParser):
argparse group.
"""
argument_group.add_argument(
u'--user', dest=u'username', type=str, action=u'store',
default=cls._DEFAULT_USERNAME, metavar=u'USERNAME', required=False,
help=u'The username used to connect to the database.')
argument_group.add_argument(
u'--password', dest=u'password', type=str, action=u'store',
default=cls._DEFAULT_PASSWORD, metavar=u'PASSWORD', help=(
u'The password for the database user.'))
argument_group.add_argument(
u'--db_name', '--db-name', dest=u'db_name', action=u'store',
type=str, default=cls._DEFAULT_NAME, required=False, help=(
u'The name of the database to connect to.'))
server_config.ServerArgumentsHelper.AddArguments(argument_group)
@classmethod
def ParseOptions(cls, options, output_module):
"""Parses and validates options.
Args:
options (argparse.Namespace): parser options.
output_module (OutputModule): output module to configure.
Raises:
BadConfigObject: when the output module object does not have the
SetCredentials or SetDatabaseName methods.
"""
if not hasattr(output_module, u'SetCredentials'):
raise errors.BadConfigObject(u'Unable to set username information.')
if not hasattr(output_module, u'SetDatabaseName'):
raise errors.BadConfigObject(u'Unable to set database information.')
username = cls._ParseStringOption(
options, u'username', default_value=cls._DEFAULT_USERNAME)
password = cls._ParseStringOption(
options, u'password', default_value=cls._DEFAULT_PASSWORD)
name = cls._ParseStringOption(
options, u'db_name', default_value=cls._DEFAULT_NAME)
output_module.SetCredentials(username=username, password=password)
output_module.SetDatabaseName(name)
server_config.ServerArgumentsHelper.ParseOptions(options, output_module)
| {
"content_hash": "403aee4e26fce331f7bb6ccd6c6c5422",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 79,
"avg_line_length": 38.66197183098591,
"alnum_prop": 0.7092896174863388,
"repo_name": "dc3-plaso/plaso",
"id": "51992f763d35613d873640a055701532a238fd2d",
"size": "2769",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plaso/cli/helpers/database_config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1683"
},
{
"name": "Makefile",
"bytes": "1151"
},
{
"name": "Python",
"bytes": "3875098"
},
{
"name": "Shell",
"bytes": "17861"
}
],
"symlink_target": ""
} |
"The primary module for maya commands and node classes"
import sys
import pymel as _pymel
_pymel.core = sys.modules[__name__]
import pymel.versions as _versions
import pymel.internal.startup as _startup
import pymel.internal as _internal
# will check for the presence of an initilized Maya / launch it
_startup.mayaInit()
import pymel.internal.factories as _factories
import pymel.internal.pmcmds as _pmcmds
_pmcmds.addAllWrappedCmds()
import pymel.api as _api
from general import *
from context import *
from system import *
from windows import *
from animation import *
from effects import *
from modeling import *
from rendering import *
from language import *
from other import *
# to allow lazy loading, we avoid import *
import nodetypes
import nodetypes as nt
import datatypes
import datatypes as dt
import uitypes
import uitypes as ui
import runtime
# initialize MEL
_startup.finalize()
import maya.cmds as cmds
# these modules are imported anyway so they should not be a performance hit
import pymel.util as util
import pymel.api as api
_logger = _internal.getLogger(__name__)
#: dictionary of plugins and the nodes and commands they register
_pluginData = {}
_module = sys.modules[__name__]
def _pluginLoaded( *args ):
if len(args) > 1:
# 2009 API callback, the args are ( [ pathToPlugin, pluginName ], clientData )
pluginName = args[0][1]
else:
pluginName = args[0]
if not pluginName:
return
_logger.debug("Plugin loaded: %s", pluginName)
_pluginData[pluginName] = {}
try:
commands = _pmcmds.pluginInfo(pluginName, query=1, command=1)
except:
_logger.error("Failed to get command list from %s", pluginName)
commands = None
# Commands
if commands:
_pluginData[pluginName]['commands'] = commands
for funcName in commands:
_logger.debug("Adding command: %s" % funcName)
#__logger.debug("adding new command:", funcName)
_factories.cmdlist[funcName] = _factories.cmdcache.getCmdInfoBasic( funcName )
_pmcmds.addWrappedCmd(funcName)
func = _factories.functionFactory( funcName )
try:
if func:
setattr( _module, funcName, func )
if 'pymel.all' in sys.modules:
setattr( sys.modules['pymel.all'], funcName, func )
else:
_logger.warning( "failed to create function" )
except Exception, msg:
_logger.warning("exception: %s" % str(msg) )
# Nodes
mayaTypes = cmds.pluginInfo(pluginName, query=1, dependNode=1)
#apiEnums = cmds.pluginInfo(pluginName, query=1, dependNodeId=1)
if mayaTypes :
def addPluginPyNodes(*args):
try:
id = _pluginData[pluginName]['callbackId']
if id is not None:
_api.MEventMessage.removeCallback( id )
if hasattr(id, 'disown'):
id.disown()
except KeyError:
_logger.warning("could not find callback id!")
_pluginData[pluginName]['dependNodes'] = mayaTypes
for mayaType in mayaTypes:
_logger.debug("Adding node: %s" % mayaType)
inheritance = _factories.getInheritance( mayaType )
if not util.isIterable(inheritance):
_logger.warn( "could not get inheritance for mayaType %s" % mayaType)
else:
#__logger.debug(mayaType, inheritance)
#__logger.debug("adding new node:", mayaType, apiEnum, inheritence)
# some nodes in the hierarchy for this node might not exist, so we cycle through all
parent = 'dependNode'
for node in inheritance:
nodeName = _factories.addPyNode( nodetypes, node, parent )
parent = node
if 'pymel.all' in sys.modules:
# getattr forces loading of Lazy object
setattr( sys.modules['pymel.all'], nodeName, getattr(nodetypes,nodeName) )
# evidently isOpeningFile is not avaiable in maya 8.5 sp1. this could definitely cause problems
if _api.MFileIO.isReadingFile() or ( _versions.current() >= _versions.v2008 and _api.MFileIO.isOpeningFile() ):
#__logger.debug("pymel: Installing temporary plugin-loaded callback")
id = _api.MEventMessage.addEventCallback( 'SceneOpened', addPluginPyNodes )
_pluginData[pluginName]['callbackId'] = id
# scriptJob not respected in batch mode, had to use _api
#cmds.scriptJob( event=('SceneOpened',doSomethingElse), runOnce=1 )
else:
# add the callback id as None so that if we fail to get an id in addPluginPyNodes we know something is wrong
_pluginData[pluginName]['callbackId'] = None
addPluginPyNodes()
def _pluginUnloaded(*args):
if len(args) > 1:
# 2009 API callback, the args are
# ( [ pluginName, pathToPlugin ], clientData ) OR
# ( [ pathToPlugin ], clientData )
pluginName = args[0][0]
else:
pluginName = args[0]
_logger.debug("Plugin unloaded: %s" % pluginName)
try:
data = _pluginData.pop(pluginName)
except KeyError:
pass
else:
# Commands
commands = data.pop('commands', [])
if commands:
_logger.debug("Removing commands: %s", ', '.join( commands ))
for command in commands:
try:
_pmcmds.removeWrappedCmd(command)
_module.__dict__.pop(command)
except KeyError:
_logger.warn( "Failed to remove %s from module %s" % (command, _module.__name__) )
# Nodes
nodes = data.pop('dependNodes', [])
if nodes:
_logger.debug("Removing nodes: %s" % ', '.join( nodes ))
for node in nodes:
_factories.removePyNode( nodetypes, node )
global _pluginLoadedCB
global _pluginUnloadedCB
_pluginLoadedCB = None
_pluginUnloadedCB = None
def _installCallbacks():
"""install the callbacks that trigger new nodes and commands to be added to pymel when a
plugin loads. This is called from pymel.__init__
"""
global _pluginLoadedCB
if _pluginLoadedCB is None:
_pluginLoadedCB = True
_logger.debug("Adding pluginLoaded callback")
#_pluginLoadedCB = pluginLoadedCallback(module)
if _versions.current() >= _versions.v2009:
id = _api.MSceneMessage.addStringArrayCallback( _api.MSceneMessage.kAfterPluginLoad, _pluginLoaded )
if hasattr(id, 'disown'):
id.disown()
else:
# BUG: this line has to be a string, because using a function causes a 'pure virtual' error every time maya shuts down
cmds.loadPlugin( addCallback='import pymel.core; pymel.core._pluginLoaded("%s")' )
else:
_logger.debug("PluginLoaded callback already exists")
global _pluginUnloadedCB
if _pluginUnloadedCB is None:
_pluginUnloadedCB = True
# BUG: autodesk still has not add python callback support, and calling this as MEL is not getting the plugin name passed to it
#mel.unloadPlugin( addCallback='''python("import pymel; pymel._pluginUnloaded('#1')")''' )
if _versions.current() >= _versions.v2009:
_logger.debug("Adding pluginUnloaded callback")
id = _api.MSceneMessage.addStringArrayCallback( _api.MSceneMessage.kAfterPluginUnload, _pluginUnloaded )
if hasattr(id, 'disown'):
id.disown()
else:
_logger.debug("PluginUnloaded callback already exists")
# add commands and nodes for plugins loaded prior to importing pymel
preLoadedPlugins = cmds.pluginInfo( q=1, listPlugins=1 )
if preLoadedPlugins:
_logger.info("Updating pymel with pre-loaded plugins: %s" % ', '.join( preLoadedPlugins ))
for plugin in preLoadedPlugins:
_pluginLoaded( plugin )
_installCallbacks()
| {
"content_hash": "f7e40caa673d0e0b3760f00eeaa034bf",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 134,
"avg_line_length": 34.63865546218487,
"alnum_prop": 0.6146288209606987,
"repo_name": "cgrebeld/pymel",
"id": "b9f9513bee47034ef3d73601c28be5c858c7ccaa",
"size": "8244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymel/core/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "2384715"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Upload',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('local_name', models.CharField(max_length=255)),
('remote_path', models.CharField(max_length=255)),
('type', models.CharField(max_length=255)),
('uploaded', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
| {
"content_hash": "9705f509b40a805e1156704ecde3c96a",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 114,
"avg_line_length": 32.42857142857143,
"alnum_prop": 0.5715859030837004,
"repo_name": "google/mirandum",
"id": "e36f584ae74b58d2a869a943d5deda526fb27a30",
"size": "1545",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alerts/upload/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "9472"
},
{
"name": "Elixir",
"bytes": "574"
},
{
"name": "HTML",
"bytes": "122101"
},
{
"name": "JavaScript",
"bytes": "19438"
},
{
"name": "Jinja",
"bytes": "4124"
},
{
"name": "Python",
"bytes": "398732"
},
{
"name": "Shell",
"bytes": "3296"
}
],
"symlink_target": ""
} |
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import json
import datetime
import enum
from sqlalchemy import ForeignKey
from sqlalchemy import UniqueConstraint
from flask import current_app as app
from sqlalchemy.orm import relationship
from app.profile.models import Publisher
from app.database import db
class BitStore(object):
"""
This model responsible for interaction with S3
"""
prefix = 'metadata'
def __init__(self, publisher, package, version='latest', body=None):
self.publisher = publisher
self.package = package
self.version = version
self.body = body
def validate(self):
data = json.loads(self.body)
if 'name' not in data:
return False
if data['name'] == '':
return False
return True
def save_metadata(self, acl='public-read'):
"""
This method put metadata object to S3
"""
bucket_name = app.config['S3_BUCKET_NAME']
s3_client = app.config['S3']
key = self.build_s3_key('datapackage.json')
s3_client.put_object(Bucket=bucket_name, Key=key,
Body=self.body, ACL=acl)
def get_metadata_body(self):
"""
This method retrieve datapackage.json from s3 for specific
publisher and package
:return: The String value of the datapackage.json or None of not found
"""
key = self.build_s3_key('datapackage.json')
return self.get_s3_object(key)
def get_s3_object(self, key):
"""
This method retrieve any object from s3 for a given key.
:param key: Object key to be retrieved
:return: The String value of the object or None of not found
"""
try:
bucket_name = app.config['S3_BUCKET_NAME']
s3_client = app.config['S3']
response = s3_client.get_object(Bucket=bucket_name, Key=key)
return response['Body'].read()
except Exception:
return None
def get_readme_object_key(self):
"""
This method search for any readme object is present for the
generated prefix by:
>>> BitStore.build_s3_key()
:return: Value of the readme key if found else None
:rtype: None or Str
"""
readme_key = None
prefix = self.build_s3_key('')
bucket_name = app.config['S3_BUCKET_NAME']
s3_client = app.config['S3']
response = s3_client.list_objects_v2(Bucket=bucket_name, Prefix=prefix)
for content in response['Contents']:
if 'readme' in content['Key'].lower():
readme_key = content['Key']
return readme_key
def get_all_metadata_name_for_publisher(self):
bucket_name = app.config['S3_BUCKET_NAME']
s3_client = app.config['S3']
keys = []
prefix = self.build_s3_base_prefix()
list_objects = s3_client.list_objects(Bucket=bucket_name,
Prefix=prefix)
if list_objects is not None and 'Contents' in list_objects:
for ob in s3_client.list_objects(Bucket=bucket_name,
Prefix=prefix)['Contents']:
keys.append(ob['Key'])
return keys
def build_s3_key(self, path):
return "{prefix}/{path}"\
.format(prefix=self.build_s3_versioned_prefix(),
path=path)
def build_s3_base_prefix(self):
return "{prefix}/{publisher}/{package}".\
format(prefix=self.prefix,
publisher=self.publisher,
package=self.package)
def build_s3_versioned_prefix(self):
return "{prefix}/_v/{version}". \
format(prefix=self.build_s3_base_prefix(),
version=self.version)
def build_s3_object_url(self, domain_name, path):
return 'https://bits.{base_url}/{key}'.\
format(base_url=domain_name,
key=self.build_s3_key(path))
def generate_pre_signed_post_object(self, path, md5,
acl='public-read',
file_type='binary/octet-stream'):
"""
This method produce required data to upload file from client side
for uploading data at client side
:param path: The relative path of the object
:param md5: The md5 hash of the file to be uploaded
:param acl: The object ACL default is public_read
:param file_type: The type of the file, default is binary/octet-stream
:return: dict containing S3 url and post params
"""
bucket_name = app.config['S3_BUCKET_NAME']
s3_client = app.config['S3']
key = self.build_s3_key(path)
post = s3_client.generate_presigned_post(Bucket=bucket_name,
Key=key,
Fields={
'acl': acl,
'Content-MD5': str(md5),
'Content-Type': file_type},
Conditions=[
{"acl": "public-read"},
["starts-with", "$Content-Type", ""],
["starts-with", "$Content-MD5", ""]
])
return post
def delete_data_package(self):
"""
This method will delete all objects with the prefix
generated by :func:`~app.mod_api.models.build_s3_prefix`.
This method is used for Hard delete data packages.
:return: Status True if able to delete or False if exception
"""
try:
bucket_name = app.config['S3_BUCKET_NAME']
s3_client = app.config['S3']
keys = []
list_objects = s3_client.list_objects(Bucket=bucket_name,
Prefix=self.build_s3_base_prefix())
if list_objects is not None and 'Contents' in list_objects:
for ob in s3_client \
.list_objects(Bucket=bucket_name,
Prefix=self.build_s3_base_prefix())['Contents']:
keys.append(dict(Key=ob['Key']))
s3_client.delete_objects(Bucket=bucket_name, Delete=dict(Objects=keys))
return True
except Exception as e:
app.logger.error(e)
return False
def change_acl(self, acl):
"""
This method will change access for all objects with the prefix
generated by :func:`~app.mod_api.models.build_s3_prefix`.
This method is used for Soft delete data packages.
:return: Status True if able to delete or False if exception
"""
try:
bucket_name = app.config['S3_BUCKET_NAME']
s3_client = app.config['S3']
keys = []
list_objects = s3_client.list_objects(Bucket=bucket_name,
Prefix=self.build_s3_base_prefix())
if list_objects is not None and 'Contents' in list_objects:
for ob in s3_client \
.list_objects(Bucket=bucket_name,
Prefix=self.build_s3_base_prefix())['Contents']:
keys.append(ob['Key'])
for key in keys:
s3_client.put_object_acl(Bucket=bucket_name, Key=key,
ACL=acl)
except Exception as e:
app.logger.error(e)
return False
return True
def copy_to_new_version(self, version):
try:
bucket_name = app.config['S3_BUCKET_NAME']
s3_client = app.config['S3']
latest_keys = []
list_objects = s3_client.list_objects(Bucket=bucket_name,
Prefix=self.build_s3_versioned_prefix())
if list_objects is not None and 'Contents' in list_objects:
for ob in s3_client \
.list_objects(Bucket=bucket_name,
Prefix=self.build_s3_versioned_prefix())['Contents']:
latest_keys.append(ob['Key'])
for key in latest_keys:
versioned_key = key.replace('/latest/', '/{0}/'.format(version))
copy_source = {'Bucket': bucket_name, 'Key': key}
s3_client.copy_object(Bucket=bucket_name,
Key=versioned_key,
CopySource=copy_source)
return True
except Exception as e:
app.logger.error(e)
return False
class PackageStateEnum(enum.Enum):
active = "ACTIVE"
deleted = "DELETED"
class Package(db.Model):
"""
This class is DB model for storing package data
"""
__tablename__ = "package"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
created_at = db.Column(db.DateTime, default=datetime.datetime.utcnow)
name = db.Column(db.TEXT, index=True)
version = db.Column(db.TEXT, index=True, default='latest')
descriptor = db.Column(db.JSON)
status = db.Column(db.Enum(PackageStateEnum, native_enum=False),
index=True, default=PackageStateEnum.active)
private = db.Column(db.BOOLEAN, default=False)
readme = db.Column(db.TEXT)
publisher_id = db.Column(db.Integer, ForeignKey('publisher.id'))
publisher = relationship("Publisher", back_populates="packages",
cascade="save-update, merge, delete, delete-orphan",
single_parent=True)
__table_args__ = (
UniqueConstraint("name", "version", "publisher_id"),
)
@staticmethod
def create_or_update_version(publisher_name, package_name, version):
try:
data_latest = Package.query.join(Publisher). \
filter(Publisher.name == publisher_name,
Package.name == package_name,
Package.version == 'latest').one()
instance = Package.query.join(Publisher). \
filter(Publisher.name == publisher_name,
Package.name == package_name,
Package.version == version).first()
update_props = ['name', 'version', 'descriptor', 'status',
'private', 'readme', 'publisher_id']
if instance is None:
instance = Package()
for update_prop in update_props:
setattr(instance, update_prop, getattr(data_latest, update_prop))
instance.version = version
db.session.add(instance)
db.session.commit()
return True
except Exception as e:
app.logger.error(e)
return False
@staticmethod
def create_or_update(name, publisher_name, **kwargs):
"""
This method creates data package of update data package attributes
:param name: package name
:param publisher_name: publisher name
:param kwargs: package attribute names
"""
pub_id = Publisher.query.filter_by(name=publisher_name).one().id
instance = Package.query.join(Publisher)\
.filter(Package.name == name,
Publisher.name == publisher_name).first()
if not instance:
instance = Package(name=name)
instance.publisher_id = pub_id
for key, value in kwargs.items():
setattr(instance, key, value)
db.session.add(instance)
db.session.commit()
@staticmethod
def change_status(publisher_name, package_name, status=PackageStateEnum.active):
"""
This method changes status of the data package. This method used
for soft delete the data package
:param publisher_name: publisher name
:param package_name: package name
:param status: status of the package
:return: If success True else False
"""
try:
data = Package.query.join(Publisher). \
filter(Publisher.name == publisher_name,
Package.name == package_name).one()
data.status = status
db.session.add(data)
db.session.commit()
return True
except Exception as e:
app.logger.error(e)
return False
@staticmethod
def delete_data_package(publisher_name, package_name):
"""
This method deletes the data package. This method used
for hard delete the data package
:param publisher_name: publisher name
:param package_name: package name
:return: If success True else False
"""
try:
data = Package.query.join(Publisher). \
filter(Publisher.name == publisher_name,
Package.name == package_name).one()
package_id = data.id
meta_data = Package.query.get(package_id)
db.session.delete(meta_data)
db.session.commit()
return True
except Exception as e:
app.logger.error(e)
return False
@staticmethod
def get_package(publisher_name, package_name):
"""
This method returns certain data packages belongs to a publisher
:param publisher_name: publisher name
:param package_name: package name
:return: data package object based on the filter.
"""
try:
instance = Package.query.join(Publisher) \
.filter(Package.name == package_name,
Publisher.name == publisher_name).first()
return instance
except Exception as e:
app.logger.error(e)
return None
| {
"content_hash": "42d5bad9cc2e84713fd3330cabc5c1e4",
"timestamp": "",
"source": "github",
"line_count": 366,
"max_line_length": 90,
"avg_line_length": 38.978142076502735,
"alnum_prop": 0.5411467825599328,
"repo_name": "subhankarb/dpr-api",
"id": "a4ea854f2a0dc7cb6f64dd2bab44e1be40b6c094",
"size": "14290",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/package/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "201797"
}
],
"symlink_target": ""
} |
import Player
import GameEngine
from BuildInfo import BuildInfo
import HostControl
import socket
# import pdb; pdb.set_trace()
# functions to broadcast gamestate changes
def broadcast_message(message):
for player in game_engine.game_state.player_array:
host.send_data(player.netid[0], message)
def broadcast_resources():
for player in game_engine.game_state.player_array:
host.send_data(player.netid[0], ' '.join(['rsrc'+str(player.inventory.brick),
str(player.inventory.grain),
str(player.inventory.lumber),
str(player.inventory.ore),
str(player.inventory.wool)]))
def broadcast_vps():
game_engine.update_vps()
for player in game_engine.game_state.player_array:
broadcast_message('pvps' + str(player.vps))
def make_build_info(request):
if request[:4] == 'road':
return BuildInfo(int(request[4]), int(request[5]), int(request[6]), True, False, False)
elif request[:4] == 'sett':
return BuildInfo(int(request[4]), int(request[5]), int(request[6]), False, True, False)
elif request[:4] == 'city':
return BuildInfo(int(request[4]), int(request[5]), int(request[6]), True, True, False)
elif request[:4] == 'devc':
return BuildInfo(0, 0, 0, False, False, True)
elif request[:4] == 'pass':
return None
else:
raise ValueError('Bad BuildInfo() request.')
# get player connections
host = HostControl.HostControl(('localhost', 8000))
player_addrs = host.get_conns()
# initialize players
player_array = []
netid = player_addrs[0]
player1 = Player.Player(netid, 'blue', 'blue')
netid = player_addrs[1]
player2 = Player.Player(netid, 'red', 'red')
player_array.append(player1)
player_array.append(player2)
for player in player_array:
player.inventory.lumber += 400
player.inventory.brick += 400
player.inventory.wool += 200
player.inventory.grain += 200
player.inventory.ore += 200
# initialize game
game_engine = GameEngine.GameEngine(player_array)
# build initial 'beginner' setup
game_engine.build(BuildInfo(0, 0, 2, True, False, False))
broadcast_message('road0020')
game_engine.build(BuildInfo(3, 2, 2, True, False, False))
broadcast_message('road3220')
game_engine.build(BuildInfo(0, 0, 3, False, True, False))
broadcast_message('sett0030')
game_engine.build(BuildInfo(3, 2, 2, False, True, False))
broadcast_message('sett3220')
game_engine.next_player()
game_engine.build(BuildInfo(1, 1, 1, True, False, False))
broadcast_message('road1111')
game_engine.build(BuildInfo(2, 2, 2, True, False, False))
broadcast_message('road2221')
game_engine.build(BuildInfo(1, 1, 2, False, True, False))
broadcast_message('sett1121')
game_engine.build(BuildInfo(2, 2, 2, False, True, False))
broadcast_message('sett2221')
game_engine.next_player()
# start first player's turn and roll first dice
game_engine.game_state.startup = False
host.send_data(game_engine.game_state.current_player.netid[0], 'strt')
roll = game_engine.dice_roll()
broadcast_message('roll{}'.format(roll))
broadcast_resources()
broadcast_vps()
# game loop
while True:
if not host.requests.empty():
print('<<< reading request')
current_request = host.requests.get()
requester_netid = current_request[0]
print(current_request)
if requester_netid == game_engine.game_state.current_player.netid:
build_info = make_build_info(current_request[1])
if build_info is not None:
result = game_engine.build(build_info)
if result is True:
print('>>> broadcasting success')
broadcast_resources()
broadcast_vps()
broadcast_message(current_request[1] + str(game_engine.game_state.current_player_number))
elif result == 'city':
print('>>> broadcasting success')
new_message = 'city' + current_request[1][4::] + str(game_engine.game_state.current_player_number)
broadcast_message(new_message)
broadcast_vps()
broadcast_resources()
else:
print('>>> sending error')
host.send_data(current_request[0][0], 'errr')
else:
print('>>> ending {}\'s turn'.format(game_engine.game_state.current_player.name))
host.send_data(current_request[0][0], 'endt')
game_engine.next_player()
print('>>> updating resources and alerting players')
roll = game_engine.dice_roll()
print('>>> sending roll: {}'.format(roll))
broadcast_resources()
broadcast_vps()
broadcast_message('turn{}'.format(game_engine.game_state.current_player_number))
broadcast_message('roll{}'.format(roll))
print('>>> starting {}\'s turn'.format(game_engine.game_state.current_player.name))
host.send_data(game_engine.game_state.current_player.netid[0], 'strt')
| {
"content_hash": "49579403dec66b7716ab524eae0af268",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 118,
"avg_line_length": 41.275590551181104,
"alnum_prop": 0.6152231972529569,
"repo_name": "stoksc/Settlers-of-definitely-not-Katan",
"id": "1d0a0c57285a5e7c435f220716f67dd14797eee3",
"size": "5242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Implementation/Host.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39114"
}
],
"symlink_target": ""
} |
'''
Module: deinflect.py
Desc: try to discern the stem of japanese inflected words to aid in lookups.
Author: john.oneil
Email: oneil.john@gmail.com
DATE: Wed May 7th 2014
'''
import os
import argparse
import re
from transliterate import romaji2hiragana
from jpn.exceptions import NonUnicodeInputException
class VerbalTransform:
def __init__(self, root, polite_ending, negative_ending, te_ending, perfect_ending):
self.root = root
self.polite = polite_ending
self.negative = negative_ending
self.te = te_ending
self.perfect = perfect_ending
def Polite(self, hiragana):
if re.search(self.polite, hiragana):
return re.sub(self.polite, self.root, hiragana)
else:
return None
def Negative(self, hiragana):
if re.search(self.negative, hiragana):
return re.sub(self.negative, self.root, hiragana)
else:
return None
def Te(self, hiragana):
if re.search(self.te, hiragana):
return re.sub(self.te, self.root, hiragana)
else:
return None
def Perfect(self, hiragana):
if re.search(self.perfect, hiragana):
return re.sub(self.perfect, self.root, hiragana)
else:
return None
VerbalTransforms = [
VerbalTransform(u'う', ur'います$', ur'わない$', ur'って$', ur'った$'),
VerbalTransform(u'つ', ur'ちます$', ur'たない$', ur'って$', ur'った$'),
VerbalTransform(u'る', ur'ります$', ur'らない$', ur'って$', ur'った$'),
VerbalTransform(u'く', ur'きます$', ur'かない$', ur'いて$', ur'いた$'),
VerbalTransform(u'ぐ', ur'ぎます$', ur'がない$', ur'いで$', ur'いだ$'),
VerbalTransform(u'ぬ', ur'にます$', ur'なない$', ur'んで$', ur'んだ$'),
VerbalTransform(u'ぶ', ur'びます$', ur'ばない$', ur'んで$', ur'んだ$'),
VerbalTransform(u'む', ur'みます$', ur'まない$', ur'んで$', ur'んだ$'),
VerbalTransform(u'す', ur'します$', ur'さない$', ur'して$', ur'した$'),
VerbalTransform(u'る', ur'ます$', ur'ない$', ur'て$', ur'た$'),
#VerbalTransform(u'る', ur'ます$', ur'ない$', ur'て$', ur'た$'),
VerbalTransform(u'する', ur'しない$', ur'しない$', ur'して$', ur'した$'),
VerbalTransform(u'くる', ur'きます$', ur'こない$', ur'きて$', ur'きた$'),
]
class AdjectivalTransform(object):
def __init__(self, root, negative, past, negative_past):
self.root = root
self.negative = negative
self.past = past
self.negative_past = negative_past
def Negative(self, hiragana):
if re.search(self.negative, hiragana):
return re.sub(self.negative, self.root, hiragana)
else:
return None
def Past(self, hiragana):
if re.search(self.past, hiragana):
return re.sub(self.past, self.root, hiragana)
else:
return None
def NegativePast(self, hiragana):
if re.search(self.negative_past, hiragana):
return re.sub(self.negative_past, self.root, hiragana)
else:
return None
AdjectivalTransforms = [
AdjectivalTransform(u'い', ur'くない$', ur'かった$', ur'くなかった$'),
]
def guess_stem(word):
"""given single input word, try to discern japanese word stem
"""
#ensure input is a unicode string
if not isinstance(word, unicode):
raise NonUnicodeInputException('Input argument {word} is not unicode.'.format(word=word))
#1. input word should have no spaces
word = word.strip().lower()
#2b Convert filtered word to hiragana via romkan
hiragana = romaji2hiragana(word)
results = [hiragana]
#3: We've got a simple single word in hiragana. First test against adjectival endings
for tx in AdjectivalTransforms:
negative = tx.Negative(hiragana)
if negative: results.append(negative)
past = tx.Past(hiragana)
if past: results.append(past)
past = tx.Past(hiragana)
if past: results.append(past)
#4: No hits for adjetive stem, test against verbal endings
for tx in VerbalTransforms:
polite = tx.Polite(hiragana)
if polite: results.append(polite)
negative = tx.Negative(hiragana)
if negative: results.append(negative)
te = tx.Te(hiragana)
if te: results.append(te)
perfect = tx.Perfect(hiragana)
if perfect: results.append(perfect)
#5: Return input word and candidate stems as tuple, to do dictionary lookups on all.
#The best hit will be the longest(?) exact match of a suggested stem
return tuple(results)
def main():
parser = argparse.ArgumentParser(description='Guess as uninflected stem of input japanese words (verbs or adjectives).')
parser.add_argument('words', nargs='*', help='Words to attempt stemming on')
args = parser.parse_args()
for word in args.words:
#best practice: decode early, encode late...
word = word.decode('utf-8')
results = guess_stem(word)
for result in results:
print result.encode('utf-8')
if __name__ == "__main__":
main()
| {
"content_hash": "d1ae4321eb3601cd3c8b5735a9b385c8",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 122,
"avg_line_length": 30.582781456953644,
"alnum_prop": 0.6658726721524469,
"repo_name": "johnoneil/jpn",
"id": "89ed0b53cdc933592f82bb53b4da0b95a08fde1c",
"size": "4990",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jpn/deinflect.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24991"
}
],
"symlink_target": ""
} |
import abc
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class VnflcmMgmtAbstractDriver(metaclass=abc.ABCMeta):
@abc.abstractmethod
def get_type(self):
"""Return one of predefined type of the hosting vnf drivers."""
pass
@abc.abstractmethod
def get_name(self):
"""Return a symbolic name for the service VM plugin."""
pass
@abc.abstractmethod
def get_description(self):
pass
@abc.abstractmethod
def instantiate_start(self, context, vnf_instance,
instantiate_vnf_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def instantiate_end(self, context, vnf_instance,
instantiate_vnf_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def terminate_start(self, context, vnf_instance,
terminate_vnf_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def terminate_end(self, context, vnf_instance,
terminate_vnf_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def scale_start(self, context, vnf_instance,
scale_vnf_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def scale_end(self, context, vnf_instance,
scale_vnf_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def heal_start(self, context, vnf_instance,
heal_vnf_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def heal_end(self, context, vnf_instance,
heal_vnf_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def change_external_connectivity_start(
self, context, vnf_instance,
change_ext_conn_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def change_external_connectivity_end(
self, context, vnf_instance,
change_ext_conn_request, grant,
grant_request, **kwargs):
pass
@abc.abstractmethod
def modify_information_start(self, context, vnf_instance,
modify_vnf_request, **kwargs):
pass
@abc.abstractmethod
def modify_information_end(self, context, vnf_instance,
modify_vnf_request, **kwargs):
pass
| {
"content_hash": "13a8355f90d7d2a3c7e3da68d0183837",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 71,
"avg_line_length": 28.52127659574468,
"alnum_prop": 0.5609847071988064,
"repo_name": "openstack/tacker",
"id": "1077ef019778c55250bf0bf7d14c1f438bd04ad2",
"size": "3308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tacker/vnfm/mgmt_drivers/vnflcm_abstract_driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "10809"
},
{
"name": "Mako",
"bytes": "1046"
},
{
"name": "Python",
"bytes": "7648075"
},
{
"name": "Ruby",
"bytes": "2841"
},
{
"name": "Shell",
"bytes": "61750"
},
{
"name": "Smarty",
"bytes": "3624"
}
],
"symlink_target": ""
} |
"""Urls for Zinnia random entries"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.random import EntryRandom
urlpatterns = patterns(
'',
url(r'^$',
EntryRandom.as_view(),
name='entry_random'),
)
| {
"content_hash": "2f330af8024ee074884eff4dd5654a3a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 43,
"avg_line_length": 20.384615384615383,
"alnum_prop": 0.6716981132075471,
"repo_name": "ZuluPro/django-blog-zinnia",
"id": "85ec2a843d674f4377f3f100b26c517cb6c7c97e",
"size": "265",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "zinnia/urls/random.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "77370"
},
{
"name": "HTML",
"bytes": "75068"
},
{
"name": "JavaScript",
"bytes": "235617"
},
{
"name": "Makefile",
"bytes": "1789"
},
{
"name": "Python",
"bytes": "506854"
}
],
"symlink_target": ""
} |
import urllib2
from HTMLParser import HTMLParser
import xml.etree.ElementTree as ET
import song
import info
import logging
import json
import re
radio_url_temp = "http://www.xiami.com/radio/play/id/%s"
radio_list_url_temp = "http://www.xiami.com/radio/xml/type/%s/id/%s?v=%s"
player_path_signature = '/res/fm/xiamiRadio'
player_host = "http://www.xiami.com"
fav_radio_url_temp = "http://www.xiami.com/radio/favlist?page=%d"
radio_data_rex = re.compile("/radio/xml/type/([0-9]+)/id/([0-9]+)")
logger = logging.getLogger('radio')
class RadioPageParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.v_val = ''
self.player_path = ''
self.in_player = False
self.radio_data_url = ''
def handle_starttag(self, tag, attrs):
attrs = dict(attrs)
if tag == 'object' and attrs['type'] == 'application/x-shockwave-flash':
path = attrs['data']
if player_path_signature in path:
values = path.split('?')[1]
for pair in values.split('&'):
split_pair = pair.split('=')
if len(split_pair) == 2 and split_pair[0] == 'v':
self.v_val = split_pair[1]
self.player_path = path
self.in_player = True
elif tag == 'param':
if 'name' in attrs and attrs['name'] == 'FlashVars':
flashvars = attrs['value']
for flashvar in flashvars.split('&'):
(name, val) = flashvar.split('=')
if name == 'dataUrl':
self.radio_data_url = val
def handle_endtag(self, tag):
if tag == 'object' and self.in_player:
self.in_player = False
def visit_radio(state, radio_pid):
radio_url = radio_url_temp % radio_pid
logger.debug("radio page: %s" % radio_url)
radio_page = urllib2.urlopen(radio_url).read()
parser = RadioPageParser()
parser.feed(radio_page)
if parser.v_val == '':
logger.warning('can"t find v value')
state['v_val'] = '0'
else:
state['v_val'] = parser.v_val
if parser.player_path:
state['player_path'] = player_host + parser.player_path
state['radio_page_path'] = radio_url
result = radio_data_rex.match(parser.radio_data_url)
state['radio_type'] = result.group(1)
state['radio_id'] = result.group(2)
def get_radio_list(state, radio_type, radio_id):
# visit the radio page to get v value
if 'v_val' not in state:
raise Exception("visit radio page first")
# get list of songs
radio_list_url = radio_list_url_temp % (radio_type, radio_id, state['v_val'])
logger.debug("radio list: %s" % radio_list_url)
radio_list = urllib2.urlopen(radio_list_url).read()
# parse list
try:
root = ET.fromstring(radio_list)
except Exception as e:
logger.error("fail to parse song list!")
logger.error(radio_list)
raise e
tracks = []
for child in root:
if child.tag == 'config':
# update personal info from new data
info.update_state(state, child)
elif child.tag == 'trackList':
for track_node in child:
track = song.Song()
for prop_node in track_node:
tag = prop_node.tag
text = prop_node.text
if tag == 'location':
text = song.decrypt_location(text)
setattr(track, tag, text)
tracks += [track]
return tracks
def get_fav_radio(state, page):
assert(page >= 1)
fav_radio_url = fav_radio_url_temp % page
fav_radio_ret = urllib2.urlopen(fav_radio_url).read()
fav_radio_parsed = json.loads(fav_radio_ret)
if not fav_radio_parsed['status']:
raise Exception('fail to get favourite radios. %s' % fav_radio_parsed['message'])
fav_radios = fav_radio_parsed['data']
# unknown
prev_fav = fav_radios['prev']
next_fav = fav_radios['next']
logger.debug("prev, next: %s %s" % (prev_fav, next_fav))
fav_radios = fav_radios['data']
return fav_radios
if __name__ == '__main__':
import init
import config
state = init.init()
if not info.authenticated(state):
import login
login.login_console(state, config.username, config.password)
visit_radio(state, config.radio_id)
import time
for x in xrange(10):
tracks = get_radio_list(state, state['radio_type'], state['radio_id'])
for track in tracks:
track.dump_info()
time.sleep(5)
| {
"content_hash": "fec6bc35d2162aa93fe4a850640301ba",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 89,
"avg_line_length": 33.68115942028985,
"alnum_prop": 0.5725043029259896,
"repo_name": "HenryHu/xmradio",
"id": "d163f788a306ddeea78fa60c09b0df9d0f4bb7d3",
"size": "4648",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "radio.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "58407"
},
{
"name": "QML",
"bytes": "14543"
},
{
"name": "Shell",
"bytes": "686"
}
],
"symlink_target": ""
} |
import io
import os
import setuptools
# Package metadata.
name = 'google-cloud-texttospeech'
description = 'Google Cloud Text-to-Speech API client library'
version = '0.2.0'
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
# 'Development Status :: 5 - Production/Stable'
release_status = 'Development Status :: 3 - Alpha'
dependencies = [
'google-api-core[grpc] < 2.0.0dev, >= 0.1.0',
]
extras = {
}
# Setup boilerplate below this line.
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, 'README.rst')
with io.open(readme_filename, encoding='utf-8') as readme_file:
readme = readme_file.read()
# Only include packages under the 'google' namespace. Do not include tests,
# benchmarks, etc.
packages = [
package for package in setuptools.find_packages()
if package.startswith('google')]
# Determine which namespaces are needed.
namespaces = ['google']
if 'google.cloud' in packages:
namespaces.append('google.cloud')
setuptools.setup(
name=name,
version=version,
description=description,
long_description=readme,
author='Google LLC',
author_email='googleapis-packages@google.com',
license='Apache 2.0',
url='https://github.com/GoogleCloudPlatform/google-cloud-python',
classifiers=[
release_status,
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Operating System :: OS Independent',
'Topic :: Internet',
],
platforms='Posix; MacOS X; Windows',
packages=packages,
namespace_packages=namespaces,
install_requires=dependencies,
extras_require=extras,
include_package_data=True,
zip_safe=False,
)
| {
"content_hash": "cb6bffe1520e78bc86b38f670851fd4f",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 75,
"avg_line_length": 28.37837837837838,
"alnum_prop": 0.6661904761904762,
"repo_name": "tseaver/gcloud-python",
"id": "5805b91b956c4496cc1fbc0449a5be3475fcb7cb",
"size": "3250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "texttospeech/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "93642"
},
{
"name": "Python",
"bytes": "2874989"
},
{
"name": "Shell",
"bytes": "4436"
}
],
"symlink_target": ""
} |
import json
# 3rd party libs
# Own project libs
from oneview_redfish_toolkit.api.rack_chassis import RackChassis
from oneview_redfish_toolkit.tests.base_test import BaseTest
class TestRackChassis(BaseTest):
"""Tests for Chassis class
Tests:
- Rack chassis instantiation
- Rack chassis serialize
"""
def setUp(self):
"""Tests preparation"""
# Loading rack mockup value
with open(
'oneview_redfish_toolkit/mockups/oneview/Rack.json'
) as f:
self.rack = json.load(f)
# Loading rack_chassis_mockup mockup result
with open(
'oneview_redfish_toolkit/mockups/redfish/RackChassis.json'
) as f:
self.rack_chassis_mockup = json.load(f)
def test_class_instantiation(self):
# Tests if class is correctly instantiated and validated
try:
rack_chassis = RackChassis(self.rack)
except Exception as e:
self.fail("Failed to instantiate RackChassis class."
" Error: {}".format(e))
self.assertIsInstance(rack_chassis, RackChassis)
def test_serialize(self):
# Tests the serialize function result against known result
try:
rack_chassis = RackChassis(self.rack)
except Exception as e:
self.fail("Failed to instantiate RackChassis class."
" Error: {}".format(e))
try:
result = json.loads(rack_chassis.serialize())
except Exception as e:
self.fail("Failed to serialize. Error: ".format(e))
self.assertEqualMockup(self.rack_chassis_mockup, result)
| {
"content_hash": "e91ca9e5b7736c5a17c267956a1931bb",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 70,
"avg_line_length": 29.614035087719298,
"alnum_prop": 0.6095971563981043,
"repo_name": "HewlettPackard/oneview-redfish-toolkit",
"id": "a9a97406d97e50f346bbd7be38e0b7c9cd978322",
"size": "2336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oneview_redfish_toolkit/tests/api/test_rack_chassis.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "284"
},
{
"name": "Python",
"bytes": "979438"
},
{
"name": "Shell",
"bytes": "866"
}
],
"symlink_target": ""
} |
"""
Chart demos using RawQuerySet
"""
from chartit import DataPool, Chart
from django.shortcuts import render_to_response
from .decorators import add_source_code_and_doc
from .models import MonthlyWeatherByCity, MonthlyWeatherSeattle
from .models import SalesHistory, BookStore, Book
@add_source_code_and_doc
def basicline(_, title, code, doc, sidebar_items):
"""
A Basic Line Chart
------------------
This is just a simple line chart with data from 2 different columns using
a ``RawQuerySet`` source.
"""
# start_code
ds = DataPool(
series=[{
'options': {
'source': MonthlyWeatherByCity.objects.raw(
"SELECT id, month, houston_temp, boston_temp "
"FROM demoproject_monthlyweatherbycity")
},
'terms': [
'month',
'houston_temp',
'boston_temp'
]
}]
)
cht = Chart(
datasource=ds,
series_options=[{
'options': {
'type': 'line',
'stacking': False
},
'terms': {
'month': [
'boston_temp',
'houston_temp'
]
}
}],
chart_options={
'title': {
'text': 'Weather Data of Boston and Houston'
},
'xAxis': {
'title': {
'text': 'Month number'
}
}
}
)
# end_code
return render_to_response('chart_code.html',
{
'chart_list': cht,
'code': code,
'title': title,
'doc': doc,
'sidebar_items': sidebar_items})
@add_source_code_and_doc
def mapf_for_x(_, title, code, doc, sidebar_items):
"""
Mapping the x-axis
------------------
This example demonstrates how to use the ``sortf_mapf_mts`` parameter to
*map* the x-axis. The database only has month numbers (1-12) but not the
month names. To display the month names in the graph, we create the
``monthname`` function and pass it to the ``Chart`` as the mapping funtion
(``mapf``).
Points to note:
- ``mts`` is ``False`` because we want to sort by month numbers and map to
the month names *after* they are sorted in order of month numbers.
Setting it to ``True`` would sort after mapping giving an incorrect sort
order like ``Apr``, ``Aug``, ``Dec``, ``...``.
"""
# start_code
ds = DataPool(
series=[{
'options': {
'source': MonthlyWeatherByCity.objects.raw(
"SELECT * FROM demoproject_monthlyweatherbycity"
)
},
'terms': [
'month',
'houston_temp',
'boston_temp'
]
}]
)
def monthname(month_num):
names = {1: 'Jan', 2: 'Feb', 3: 'Mar', 4: 'Apr', 5: 'May', 6: 'Jun',
7: 'Jul', 8: 'Aug', 9: 'Sep', 10: 'Oct', 11: 'Nov', 12: 'Dec'}
return names[month_num]
cht = Chart(
datasource=ds,
series_options=[{
'options': {
'type': 'line',
'stacking': False
},
'terms': {
'month': [
'boston_temp',
'houston_temp'
]
}
}],
chart_options={
'title': {
'text': 'Weather Data of Boston and Houston'
},
'xAxis': {
'title': {
'text': 'Month'
}
}
},
x_sortf_mapf_mts=(None, monthname, False))
# end_code
return render_to_response('chart_code.html',
{
'chart_list': cht,
'code': code,
'title': title,
'doc': doc,
'sidebar_items': sidebar_items})
@add_source_code_and_doc
def multi_table_same_x(_, title, code, doc, sidebar_items):
"""
Data from multiple models on same chart
----------------------------------------
This example demonstrates data from two different models
``MonthlyWeatherByCity`` and ``MonthlyWeatherSeattle`` on the same chart
and on the same x-axis. Notice that we've mixed ``RawQuerySet`` and
``QuerySet`` sources together!
"""
# start_code
ds = DataPool(
series=[{
'options': {
'source': MonthlyWeatherByCity.objects.raw(
"SELECT * FROM demoproject_monthlyweatherbycity"
)
},
'terms': [
'month',
'houston_temp',
'boston_temp'
]}, {
'options': {
'source': MonthlyWeatherSeattle.objects.all()
},
'terms': [
{'month_seattle': 'month'},
'seattle_temp'
]}
]
)
cht = Chart(
datasource=ds,
series_options=[{
'options': {
'type': 'line',
'stacking': False
},
'terms': {
'month': [
'boston_temp',
'houston_temp'],
'month_seattle': ['seattle_temp']
}
}],
chart_options={
'title': {
'text': 'Weather by Month (from 2 different tables)'},
'xAxis': {
'title': {
'text': 'Month number'
}
}
}
)
# end_code
return render_to_response('chart_code.html',
{
'chart_list': cht,
'code': code,
'title': title,
'doc': doc,
'sidebar_items': sidebar_items})
@add_source_code_and_doc
def basicline_with_datefield(_, title, code, doc, sidebar_items):
"""
A Basic Line Chart with DateField
---------------------------------
This chart plots sales quantities per day from the first book store.
Points to note:
- ``sale_date`` is a DateField
"""
# start_code
ds = DataPool(
series=[{
'options': {
'source': SalesHistory.objects.raw(
"SELECT * FROM demoproject_saleshistory "
"WHERE bookstore_id=%s LIMIT 10",
[BookStore.objects.first().pk]
)
},
'terms': [
'sale_date',
'sale_qty',
]
}]
)
cht = Chart(
datasource=ds,
series_options=[{
'options': {
'type': 'line',
'stacking': False
},
'terms': {
'sale_date': [
'sale_qty',
]
}
}],
chart_options={
'title': {
'text': 'Sales QTY per day'
},
'xAxis': {
'title': {
'text': 'Sale date'
}
}
}
)
# end_code
return render_to_response('chart_code.html',
{
'chart_list': cht,
'code': code,
'title': title,
'doc': doc,
'sidebar_items': sidebar_items})
@add_source_code_and_doc
def datetimefield_from_related_model(_, title, code, doc, sidebar_items):
"""
A Basic Line Chart with DateTimeField from related model
--------------------------------------------------------
This chart plots sales quantities from the first book store based on
when the book was published.
"""
# start_code
ds = DataPool(
series=[{
'options': {
'source': SalesHistory.objects.raw(
"SELECT * FROM demoproject_saleshistory "
"WHERE bookstore_id=%s LIMIT 10",
[BookStore.objects.first().pk]
)
},
'terms': [
'book__published_at',
'sale_qty',
]
}]
)
cht = Chart(
datasource=ds,
series_options=[{
'options': {
'type': 'line',
'stacking': False
},
'terms': {
'book__published_at': [
'sale_qty',
]
}
}],
chart_options={
'title': {
'text': 'Sales QTY vs. Book publish date'
},
'xAxis': {
'title': {
'text': 'Publish date'
}
}
}
)
# end_code
return render_to_response('chart_code.html',
{
'chart_list': cht,
'code': code,
'title': title,
'doc': doc,
'sidebar_items': sidebar_items})
@add_source_code_and_doc
def extra_datefield(_, title, code, doc, sidebar_items):
"""
A Basic Line Chart using extra DateField, not defined in the model
------------------------------------------------------------------
This chart plots sales quantities per day from the first book store.
In the ``RawQuerySet`` we select extra fields, which are not defined
inside the model.
"""
# start_code
ds = DataPool(
series=[{
'options': {
# NOTE: strftime is SQLite function.
# For MySQL use DATE_FORMAT
'source': SalesHistory.objects.raw(
"SELECT id, sale_qty, "
"strftime('%%Y/%%m/%%d', sale_date) as sold_at"
" FROM demoproject_saleshistory "
"WHERE bookstore_id=%s LIMIT 10",
[BookStore.objects.first().pk]
)
},
'terms': [
'sold_at',
'sale_qty',
]
}]
)
cht = Chart(
datasource=ds,
series_options=[{
'options': {
'type': 'line',
'stacking': False
},
'terms': {
'sold_at': [
'sale_qty',
]
}
}],
chart_options={
'title': {
'text': 'Sales QTY per day'
},
'xAxis': {
'title': {
'text': 'Sale date'
}
}
}
)
# end_code
return render_to_response('chart_code.html',
{
'chart_list': cht,
'code': code,
'title': title,
'doc': doc,
'sidebar_items': sidebar_items})
@add_source_code_and_doc
def avg_count(_, title, code, doc, sidebar_items):
"""
A Basic Line Chart using AVG and COUNT
--------------------------------------
This chart plots the average book rating in each genre
together with the number of books in each genre.
NOTE that we use the SQL functions for average and count!
"""
# start_code
ds = DataPool(
series=[{
'options': {
'source': Book.objects.raw(
"SELECT "
" demoproject_book.id, "
" demoproject_genre.name as genre_name, "
" avg(rating) as rating_avg, "
" count(genre_id) as genre_count "
"FROM demoproject_book "
"JOIN demoproject_genre ON "
" genre_id == demoproject_genre.id "
"GROUP BY genre_id "
)
},
'terms': [
'genre_name',
'rating_avg',
'genre_count'
]
}]
)
cht = Chart(
datasource=ds,
series_options=[{
'options': {
'type': 'line',
'stacking': False
},
'terms': {
'genre_name': [
'rating_avg', 'genre_count'
]
}
}],
chart_options={
'title': {
'text': 'Book rating and count per Genre'
},
'xAxis': {
'title': {
'text': 'Genre'
}
}
}
)
# end_code
return render_to_response('chart_code.html',
{
'chart_list': cht,
'code': code,
'title': title,
'doc': doc,
'sidebar_items': sidebar_items})
| {
"content_hash": "8b13b77ded6b2465d5189219c5832db0",
"timestamp": "",
"source": "github",
"line_count": 470,
"max_line_length": 79,
"avg_line_length": 31.8,
"alnum_prop": 0.3539408537401311,
"repo_name": "pgollakota/django-chartit",
"id": "02139717c702110fd36b713a05654ce07dd7f7b3",
"size": "14946",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "demoproject/demoproject/chartraw.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "55534"
},
{
"name": "JavaScript",
"bytes": "33930"
},
{
"name": "Prolog",
"bytes": "5596"
},
{
"name": "Python",
"bytes": "201179"
},
{
"name": "Shell",
"bytes": "5112"
}
],
"symlink_target": ""
} |
from scheduler_failover_controller.utils import date_utils
from scheduler_failover_controller.metadata.base_metadata_service import BaseMetadataService
from kazoo.client import KazooClient # documentation: https://kazoo.readthedocs.io/en/latest/basic_usage.html
import datetime
class ZookeeperMetadataService(BaseMetadataService):
def __init__(self, zookeeper_nodes, logger, zookeeper_base_bucket="/scheduler_failover_controller"):
logger.debug("Creating MetadataServer (type:ZookeeperMetadataService) with Args - zookeeper_nodes: {zookeeper_nodes}, logger: {logger}, zookeeper_base_bucket: {zookeeper_base_bucket}".format(**locals()))
self.zookeeper_nodes = zookeeper_nodes
self.zookeeper_base_bucket = zookeeper_base_bucket
self.logger = logger
self.failover_heartbeat_bucket = zookeeper_base_bucket + "/failover_heartbeat"
self.active_failover_node_bucket = zookeeper_base_bucket + "/active_failover_node"
self.active_scheduler_node_bucket = zookeeper_base_bucket + "/active_scheduler_node"
self.zk = KazooClient(hosts=zookeeper_nodes)
self.zk.start()
def initialize_metadata_source(self):
self.zk.ensure_path(self.zookeeper_base_bucket)
def get_failover_heartbeat(self):
self.logger.debug("Getting Failover Heartbeat")
heart_beat_date = None
if self.zk.exists(self.failover_heartbeat_bucket):
data, stat = self.zk.get(self.failover_heartbeat_bucket)
heart_beat_date_str = data
heart_beat_date = date_utils.get_string_as_datetime(heart_beat_date_str)
self.logger.debug("Returning " + str(heart_beat_date))
return heart_beat_date
def set_failover_heartbeat(self):
heart_beat_date_str = date_utils.get_datetime_as_str(datetime.datetime.now())
self.logger.debug("Setting Failover Heartbeat to " + str(heart_beat_date_str))
if self.zk.exists(self.failover_heartbeat_bucket):
self.logger.debug("Bucket exists. Setting bucket " + str(self.failover_heartbeat_bucket) + " to value " + str(heart_beat_date_str))
self.zk.set(self.failover_heartbeat_bucket, heart_beat_date_str)
else:
self.logger.debug("Bucket doesn't exist. Creating bucket " + str(self.failover_heartbeat_bucket) + " with value " + str(heart_beat_date_str))
self.zk.create(self.failover_heartbeat_bucket, heart_beat_date_str)
def get_active_failover_node(self):
self.logger.debug("Getting Active Failover Node")
data = None
if self.zk.exists(self.active_failover_node_bucket):
data, stat = self.zk.get(self.active_failover_node_bucket)
else:
self.logger.debug("Active Failover Node Bucket " + str(self.active_scheduler_node_bucket) + " doesn't exist.")
self.logger.debug("Returning " + str(data))
return data
def set_active_failover_node(self, node):
self.logger.debug("Setting Active Failover Node to " + str(node))
if self.zk.exists(self.active_failover_node_bucket):
self.logger.debug("Bucket exists. Setting bucket " + str(self.active_failover_node_bucket) + " to value " + str(node))
self.zk.set(self.active_failover_node_bucket, node)
else:
self.logger.debug("Bucket doesn't exist. Creating bucket " + str(self.active_failover_node_bucket) + " with value " + str(node))
self.zk.create(self.active_failover_node_bucket, node)
def get_active_scheduler_node(self):
self.logger.debug("Getting Active Scheduler Node")
data = None
if self.zk.exists(self.active_scheduler_node_bucket):
data, stat = self.zk.get(self.active_scheduler_node_bucket)
else:
self.logger.debug("Active Scheduler Node Bucket " + str(self.active_scheduler_node_bucket) + " doesn't exist.")
self.logger.debug("Returning " + str(data))
return data
def set_active_scheduler_node(self, node):
self.logger.debug("Setting Active Scheduler Node to " + str(node))
if self.zk.exists(self.active_scheduler_node_bucket):
self.logger.debug("Bucket exists. Setting bucket " + str(self.active_scheduler_node_bucket) + " to value " + str(node))
self.zk.set(self.active_scheduler_node_bucket, node)
else:
self.logger.debug("Bucket doesn't exist. Creating bucket " + str(self.active_scheduler_node_bucket) + " with value " + str(node))
self.zk.create(self.active_scheduler_node_bucket, node)
def clear(self):
self.zk.delete(self.zookeeper_base_bucket, recursive=True)
| {
"content_hash": "a28802860b21e30b3c98f3b523f444be",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 211,
"avg_line_length": 55.75,
"alnum_prop": 0.6745675848814863,
"repo_name": "teamclairvoyant/airflow-scheduler-failover-controller",
"id": "b53879cabd8e36b5614384a81e778118e5df2783",
"size": "4683",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scheduler_failover_controller/metadata/zookeeper_metadata_service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "48581"
},
{
"name": "Shell",
"bytes": "5082"
}
],
"symlink_target": ""
} |
from distutils.core import setup
setup(
name='repono',
version='0.1',
packages=['data', 'data.api', 'data.api.tinydb', 'config',
'export', 'providers', 'providers.api', 'providers.data',
'providers.extraction', 'providers.extraction.twitter'],
url='https://github.com/ademsha',
license='APACHE 2.0',
author='ademsha',
author_email='',
description='Extract useful insights/datasets from Twitter data stream',
requires=['tweepy', 'tinydb']
)
| {
"content_hash": "04bfe6a194dc0d22ffc18ea6394772fd",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 76,
"avg_line_length": 33.666666666666664,
"alnum_prop": 0.6316831683168317,
"repo_name": "ademsha/repono",
"id": "25cfbe685598be4f7c43b50d4241782904a73471",
"size": "505",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "25769"
},
{
"name": "Shell",
"bytes": "1553"
}
],
"symlink_target": ""
} |
from flask import Blueprint, render_template
from flask_security import login_required
front = Blueprint("front", __name__)
@front.route("/")
def index():
return render_template("front/index.html")
@front.route("/secure")
@login_required
def admin():
return render_template("front/index.html")
| {
"content_hash": "bddc24f89cb13ef36c941ed8b40be7cf",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 44,
"avg_line_length": 20.066666666666666,
"alnum_prop": 0.7308970099667774,
"repo_name": "AthelasPeru/laborapp",
"id": "711fdd352460fa4fbb7a80901a4c3371b242ca52",
"size": "301",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/blueprints/front/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "346803"
},
{
"name": "HTML",
"bytes": "25611"
},
{
"name": "JavaScript",
"bytes": "101056"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "17308"
},
{
"name": "Ruby",
"bytes": "976"
},
{
"name": "Shell",
"bytes": "260"
}
],
"symlink_target": ""
} |
""" Script to run Pythran file compilation with specified g++ like flags. """
import argparse
import logging
import os
import sys
import pythran
from distutils.errors import CompileError
logger = logging.getLogger("pythran")
def convert_arg_line_to_args(arg_line):
"""Read argument from file in a prettier way."""
for arg in arg_line.split():
if not arg.strip():
continue
yield arg
def compile_flags(args):
"""
Build a dictionnary with an entry for cppflags, ldflags, and cxxflags.
These options are filled according to the command line defined options
"""
compiler_options = {
'define_macros': args.defines,
'undef_macros': args.undefs,
'include_dirs': args.include_dirs,
'extra_compile_args': args.extra_flags,
'library_dirs': args.libraries_dir,
'extra_link_args': args.extra_flags,
'config': args.config,
}
for param in ('opts', ):
val = getattr(args, param, None)
if val:
compiler_options[param] = val
return compiler_options
def run():
prefix_chars = "-"
if os.name == "nt":
prefix_chars += "/"
parser = argparse.ArgumentParser(prog='pythran',
description='pythran: a python to C++ '
'compiler',
epilog="It's a megablast!",
prefix_chars=prefix_chars,
fromfile_prefix_chars="@")
parser.add_argument('input_file', type=str,
help='the pythran module to compile, '
'either a .py or a .cpp file')
parser.add_argument('-o', dest='output_file', type=str,
help='path to generated file. Honors %%{ext}.')
parser.add_argument('-P', dest='optimize_only', action='store_true',
help='only run the high-level optimizer, '
'do not compile')
parser.add_argument('-E', dest='translate_only', action='store_true',
help='only run the translator, do not compile')
parser.add_argument('-e', dest='raw_translate_only', action='store_true',
help='similar to -E, '
'but does not generate python glue')
parser.add_argument('-v', dest='verbose', action='store_true',
help='be more verbose')
parser.add_argument('-w', dest='warn_off', action='store_true',
help='be less verbose')
parser.add_argument('-V', '--version',
action='version',
version=pythran.version.__version__)
parser.add_argument('-p', dest='opts', metavar='pass',
action='append',
help='any pythran optimization to apply before code '
'generation',
default=list())
parser.add_argument('-I', dest='include_dirs', metavar='include_dir',
action='append',
help='any include dir relevant to the underlying C++ '
'compiler',
default=list())
parser.add_argument('-L', dest='libraries_dir', metavar='ldflags',
action='append',
help='any search dir relevant to the linker',
default=list())
parser.add_argument('-D', dest='defines', metavar='macro_definition',
action='append',
help='any macro definition relevant to '
'the underlying C++ compiler',
default=list())
parser.add_argument('-U', dest='undefs', metavar='macro_definition',
action='append',
help='any macro undef relevant to '
'the underlying C++ compiler',
default=list())
parser.add_argument('--config', dest='config', metavar='config',
action='append',
help='config additional params',
default=list())
parser.add_argument('-ftime-report', dest='report_times',
action='store_true',
help='report time spent in each optimization/transformation')
parser.convert_arg_line_to_args = convert_arg_line_to_args
args, extra = parser.parse_known_args(sys.argv[1:])
args.extra_flags = extra
if args.raw_translate_only:
args.translate_only = True
args.undefs.append('ENABLE_PYTHON_MODULE')
if args.verbose and args.warn_off:
logger.critical("Unexpected combination: -w and -v? Daoubennek?")
sys.exit(1)
if args.verbose:
logger.setLevel(logging.INFO)
if args.warn_off:
logger.setLevel(logging.ERROR)
if args.config:
pythran.config.update_cfg(pythran.config.cfg, args.config)
if args.verbose and not args.warn_off:
pythran.config.lint_cfg(pythran.config.cfg)
try:
if not os.path.exists(args.input_file):
raise ValueError("input file `{0}' not found".format(
args.input_file))
module_name, ext = os.path.splitext(os.path.basename(args.input_file))
# FIXME: do we want to support other ext than .cpp?
if ext not in ['.cpp', '.py']:
raise SyntaxError("Unsupported file extension: '{0}'".format(ext))
if ext == '.cpp':
if args.optimize_only:
raise ValueError("Do you really ask for Python-to-Python "
"on this C++ input file: '{0}'?".format(
args.input_file))
if args.translate_only:
raise ValueError("Do you really ask for Python-to-C++ "
"on this C++ input file: '{0}'?".format(
args.input_file))
pythran.compile_cxxfile(module_name,
args.input_file, args.output_file,
**compile_flags(args))
else: # assume we have a .py input file here
pythran.compile_pythranfile(args.input_file,
output_file=args.output_file,
cpponly=args.translate_only,
pyonly=args.optimize_only,
report_times=args.report_times,
**compile_flags(args))
except IOError as e:
logger.critical("I've got a bad feeling about this...\n"
"E: " + str(e))
sys.exit(1)
except ValueError as e:
logger.critical("Chair to keyboard interface error\n"
"E: " + str(e))
sys.exit(1)
except pythran.types.tog.PythranTypeError as e:
logger.critical("You shall not pass!\n"
"E: " + str(e))
sys.exit(1)
except pythran.syntax.PythranSyntaxError as e:
logger.critical("I am in trouble. Your input file does not seem "
"to match Pythran's constraints...\n" + str(e))
sys.exit(1)
except CompileError as e:
logger.critical("Cover me Jack. Jack? Jaaaaack!!!!\n"
"E: " + str(e))
sys.exit(1)
except NotImplementedError:
logger.critical("MAYDAY, MAYDAY, MAYDAY; pythran compiler; "
"code area out of control\n"
"E: not implemented feature needed, "
"bash the developers")
raise # Why ? we may instead display the stacktrace and exit?
except EnvironmentError as e:
logger.critical("By Jove! Your environment does not seem "
"to provide all what we need\n"
"E: " + str(e))
sys.exit(1)
if __name__ == '__main__':
run()
| {
"content_hash": "087cc0b9f2cb139018d3e588cba8745e",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 85,
"avg_line_length": 37.23744292237443,
"alnum_prop": 0.5130594727161251,
"repo_name": "serge-sans-paille/pythran",
"id": "927b879c660e6ff0d3e97a856c33ea8029b7c7b7",
"size": "8178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pythran/run.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "2074873"
},
{
"name": "Cython",
"bytes": "1701"
},
{
"name": "Jupyter Notebook",
"bytes": "27461"
},
{
"name": "Makefile",
"bytes": "1162"
},
{
"name": "Python",
"bytes": "2025760"
}
],
"symlink_target": ""
} |
"""
sentry.web.views
~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django.db.models import Q
from django.http import HttpResponse, HttpResponseBadRequest, \
HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from sentry.conf import settings
from sentry.coreapi import project_from_auth_vars, project_from_api_key_and_id, \
project_from_id, decode_and_decompress_data, safely_load_json_string, \
ensure_valid_project_id, insert_data_to_database, APIError, APIUnauthorized, \
extract_auth_vars
from sentry.models import Group, GroupBookmark, Project, View
from sentry.utils import json
from sentry.web.decorators import has_access
from sentry.web.frontend.groups import _get_group_list
from sentry.web.helpers import render_to_response, \
get_project_list, render_to_string
@csrf_exempt
@require_http_methods(['POST'])
def store(request):
try:
auth_vars = extract_auth_vars(request)
data = request.raw_post_data
if auth_vars:
project = project_from_auth_vars(auth_vars, data)
elif request.GET.get('api_key') and request.GET.get('project_id') and request.is_secure():
# ssl requests dont have to have signature verification
project = project_from_api_key_and_id(request.GET['api_key'], request.GET['project_id'])
elif request.GET.get('project_id') and request.user.is_authenticated():
# authenticated users are simply trusted to provide the right id
project = project_from_id(request)
else:
raise APIUnauthorized()
if not data.startswith('{'):
data = decode_and_decompress_data(data)
data = safely_load_json_string(data)
ensure_valid_project_id(project, data)
insert_data_to_database(data)
except APIError, error:
return HttpResponse(error.msg, status=error.http_status)
return HttpResponse('')
@csrf_exempt
@has_access
def notification(request, project):
return render_to_response('sentry/partial/_notification.html', request.GET)
@csrf_exempt
@has_access
def poll(request, project):
from sentry.templatetags.sentry_helpers import as_bookmarks
offset = 0
limit = settings.MESSAGES_PER_PAGE
view_id = request.GET.get('view_id')
if view_id:
try:
view = View.objects.get(pk=view_id)
except View.DoesNotExist:
return HttpResponseBadRequest()
else:
view = None
filters, event_list = _get_group_list(
request=request,
project=project,
view=view,
)
data = [
(m.pk, {
'html': render_to_string('sentry/partial/_group.html', {
'group': m,
'request': request,
'is_bookmarked': b,
}).strip(),
'title': m.message_top(),
'message': m.error(),
'level': m.get_level_display(),
'logger': m.logger,
'count': m.times_seen,
}) for m, b in as_bookmarks(event_list[offset:limit], request.user)]
response = HttpResponse(json.dumps(data))
response['Content-Type'] = 'application/json'
return response
@csrf_exempt
@has_access
def resolve(request, project):
gid = request.REQUEST.get('gid')
if not gid:
return HttpResponseForbidden()
try:
group = Group.objects.get(pk=gid)
except Group.DoesNotExist:
return HttpResponseForbidden()
if group.project and group.project.pk not in get_project_list(request.user):
return HttpResponseForbidden()
Group.objects.filter(pk=group.pk).update(status=1)
group.status = 1
data = [
(m.pk, {
'html': render_to_string('sentry/partial/_group.html', {
'group': m,
'request': request,
}).strip(),
'count': m.times_seen,
}) for m in [group]]
response = HttpResponse(json.dumps(data))
response['Content-Type'] = 'application/json'
return response
@csrf_exempt
@has_access
def bookmark(request, project):
gid = request.REQUEST.get('gid')
if not gid:
return HttpResponseForbidden()
if not request.user.is_authenticated():
return HttpResponseForbidden()
try:
group = Group.objects.get(pk=gid)
except Group.DoesNotExist:
return HttpResponseForbidden()
if group.project and group.project.pk not in get_project_list(request.user):
return HttpResponseForbidden()
gb, created = GroupBookmark.objects.get_or_create(
project=group.project,
user=request.user,
group=group,
)
if not created:
gb.delete()
response = HttpResponse(json.dumps({'bookmarked': created}))
response['Content-Type'] = 'application/json'
return response
@csrf_exempt
@has_access
def clear(request, project):
projects = get_project_list(request.user)
event_list = Group.objects.filter(Q(project__in=projects.keys()) | Q(project__isnull=True))
event_list.update(status=1)
data = []
response = HttpResponse(json.dumps(data))
response['Content-Type'] = 'application/json'
return response
@csrf_exempt
@has_access
def chart(request, project):
gid = request.REQUEST.get('gid')
days = int(request.REQUEST.get('days', '90'))
if gid:
try:
group = Group.objects.get(pk=gid)
except Group.DoesNotExist:
return HttpResponseForbidden()
if group.project and group.project.pk not in get_project_list(request.user):
return HttpResponseForbidden()
data = Group.objects.get_chart_data(group, max_days=days)
else:
data = Project.objects.get_chart_data(project, max_days=days)
response = HttpResponse(json.dumps(data))
response['Content-Type'] = 'application/json'
return response
| {
"content_hash": "caa07f801fa556186fb1deae9cba9e1a",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 100,
"avg_line_length": 29.524509803921568,
"alnum_prop": 0.6450273949858875,
"repo_name": "Kronuz/django-sentry",
"id": "34b2ecedc3f25b3551ed417c3adc631f7286a2cc",
"size": "6023",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sentry/web/api.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "134924"
},
{
"name": "JavaScript",
"bytes": "77963"
},
{
"name": "Python",
"bytes": "632636"
},
{
"name": "Shell",
"bytes": "4106"
}
],
"symlink_target": ""
} |
{
'name': 'Timesheet invoice release - sheet addition',
'version': '8.0.1.0.0',
'summary': 'Release timesheet lines after invoice was cancelled',
'author': 'Sunflower IT',
'website': 'http://www.sunflowerweb.nl',
'category': 'Sales Management',
'depends': [
'hr_timesheet_sheet',
'hr_timesheet_invoice_release',
],
'data': [],
'auto_install': True,
'installable': True,
}
| {
"content_hash": "85814d84330558268b83c480eeb4d662",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 69,
"avg_line_length": 28.933333333333334,
"alnum_prop": 0.5852534562211982,
"repo_name": "sunflowerit/odoo-modules",
"id": "82b54595ec5b36ef4f306fd50f83bc7fcdd77f76",
"size": "460",
"binary": false,
"copies": "1",
"ref": "refs/heads/8.0",
"path": "hr_timesheet_invoice_release_sheet/__openerp__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "409"
},
{
"name": "Python",
"bytes": "16413"
}
],
"symlink_target": ""
} |
"""
Distributions
-------------
A widget for plotting attribute distributions.
"""
from math import sqrt
import sys
import collections
from xml.sax.saxutils import escape
from PyQt4 import QtGui, QtCore
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QToolTip
import numpy
import pyqtgraph as pg
import Orange.data
from Orange.statistics import distribution, contingency
from Orange.widgets import widget, gui, settings
from Orange.widgets.utils import itemmodels
from Orange.widgets.widget import InputSignal
from Orange.widgets.visualize.owlinearprojection import LegendItem, ScatterPlotItem
from Orange.widgets.io import FileFormat
from .owscatterplotgraph import HelpEventDelegate
def selected_index(view):
"""Return the selected integer `index` (row) in the view.
If no index is selected return -1
`view` must be in single selection mode.
"""
indices = view.selectedIndexes()
assert len(indices) < 2, "View must be in single selection mode"
if indices:
return indices[0].row()
else:
return -1
class DistributionBarItem(pg.GraphicsObject):
def __init__(self, geometry, dist, colors):
super().__init__()
self.geometry = geometry
self.dist = dist
self.colors = colors
self.__picture = None
def paint(self, painter, options, widget):
if self.__picture is None:
self.__paint()
painter.drawPicture(0, 0, self.__picture)
def boundingRect(self):
return self.geometry
def __paint(self):
picture = QtGui.QPicture()
painter = QtGui.QPainter(picture)
pen = QtGui.QPen(QtGui.QBrush(Qt.white), 0.5)
pen.setCosmetic(True)
painter.setPen(pen)
geom = self.geometry
x, y = geom.x(), geom.y()
w, h = geom.width(), geom.height()
wsingle = w / len(self.dist)
for d, c in zip(self.dist, self.colors):
painter.setBrush(QtGui.QBrush(c))
painter.drawRect(QtCore.QRectF(x, y, wsingle, d * h))
x += wsingle
painter.end()
self.__picture = picture
class OWDistributions(widget.OWWidget):
name = "Distributions"
description = "Display value distributions of a data feature in a graph."
icon = "icons/Distribution.svg"
priority = 100
inputs = [InputSignal("Data", Orange.data.Table, "set_data",
doc="Set the input data set")]
settingsHandler = settings.DomainContextHandler(
match_values=settings.DomainContextHandler.MATCH_VALUES_ALL)
#: Selected variable index
variable_idx = settings.ContextSetting(-1)
#: Selected group variable
groupvar_idx = settings.ContextSetting(0)
relative_freq = settings.Setting(False)
disc_cont = settings.Setting(False)
smoothing_index = settings.Setting(5)
show_prob = settings.ContextSetting(0)
graph_name = "plot"
ASH_HIST = 50
bins = [ 2, 3, 4, 5, 8, 10, 12, 15, 20, 30, 50 ]
smoothing_facs = list(reversed([ 0.1, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 4, 6, 10 ]))
def __init__(self):
super().__init__()
self.data = None
self.distributions = None
self.contingencies = None
self.var = self.cvar = None
varbox = gui.vBox(self.controlArea, "Variable")
self.varmodel = itemmodels.VariableListModel()
self.groupvarmodel = []
self.varview = QtGui.QListView(
selectionMode=QtGui.QListView.SingleSelection)
self.varview.setSizePolicy(
QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.varview.setModel(self.varmodel)
self.varview.setSelectionModel(
itemmodels.ListSingleSelectionModel(self.varmodel))
self.varview.selectionModel().selectionChanged.connect(
self._on_variable_idx_changed)
varbox.layout().addWidget(self.varview)
box = gui.vBox(self.controlArea, "Precision")
gui.separator(self.controlArea, 4, 4)
box2 = gui.hBox(box)
self.l_smoothing_l = gui.widgetLabel(box2, "Smooth")
gui.hSlider(box2, self, "smoothing_index",
minValue=0, maxValue=len(self.smoothing_facs) - 1,
callback=self._on_set_smoothing, createLabel=False)
self.l_smoothing_r = gui.widgetLabel(box2, "Precise")
self.cb_disc_cont = gui.checkBox(
gui.indentedBox(box, sep=4),
self, "disc_cont", "Bin continuous variables",
callback=self._on_groupvar_idx_changed,
tooltip="Show continuous variables as discrete.")
box = gui.vBox(self.controlArea, "Group by")
self.icons = gui.attributeIconDict
self.groupvarview = gui.comboBox(box, self, "groupvar_idx",
callback=self._on_groupvar_idx_changed, valueType=str,
contentsLength=12)
box2 = gui.indentedBox(box, sep=4)
self.cb_rel_freq = gui.checkBox(
box2, self, "relative_freq", "Show relative frequencies",
callback=self._on_relative_freq_changed,
tooltip="Normalize probabilities so that probabilities for each group-by value sum to 1.")
gui.separator(box2)
self.cb_prob = gui.comboBox(
box2, self, "show_prob", label="Show probabilities:",
orientation=Qt.Horizontal,
callback=self._on_relative_freq_changed,
tooltip="Show probabilities for a chosen group-by value (at each point probabilities for all group-by values sum to 1).")
self.plotview = pg.PlotWidget(background=None)
self.plotview.setRenderHint(QtGui.QPainter.Antialiasing)
self.mainArea.layout().addWidget(self.plotview)
w = QtGui.QLabel()
w.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
self.mainArea.layout().addWidget(w, Qt.AlignCenter)
self.ploti = pg.PlotItem()
self.plot = self.ploti.vb
self.ploti.hideButtons()
self.plotview.setCentralItem(self.ploti)
self.plot_prob = pg.ViewBox()
self.ploti.hideAxis('right')
self.ploti.scene().addItem(self.plot_prob)
self.ploti.getAxis("right").linkToView(self.plot_prob)
self.ploti.getAxis("right").setLabel("Probability")
self.plot_prob.setZValue(10)
self.plot_prob.setXLink(self.ploti)
self.update_views()
self.ploti.vb.sigResized.connect(self.update_views)
self.plot_prob.setRange(yRange=[0,1])
def disable_mouse(plot):
plot.setMouseEnabled(False, False)
plot.setMenuEnabled(False)
disable_mouse(self.plot)
disable_mouse(self.plot_prob)
self.tooltip_items = []
self.plot.scene().installEventFilter(
HelpEventDelegate(self.help_event, self))
pen = QtGui.QPen(self.palette().color(QtGui.QPalette.Text))
for axis in ("left", "bottom"):
self.ploti.getAxis(axis).setPen(pen)
self._legend = LegendItem()
self._legend.setParentItem(self.plot)
self._legend.hide()
self._legend.anchor((1, 0), (1, 0))
def update_views(self):
self.plot_prob.setGeometry(self.plot.sceneBoundingRect())
self.plot_prob.linkedViewChanged(self.plot, self.plot_prob.XAxis)
def set_data(self, data):
self.closeContext()
self.clear()
self.warning(0)
self.data = data
if self.data is not None:
if not self.data:
self.warning(0, "Empty input data cannot be visualized")
return
domain = self.data.domain
self.varmodel[:] = list(domain)
for meta in domain._metas:
if meta.is_continuous or meta.is_discrete:
self.varmodel.append(meta)
self.groupvarview.clear()
self.groupvarmodel = \
["(None)"] + [var for var in domain if var.is_discrete]
self.groupvarview.addItem("(None)")
for var in self.groupvarmodel[1:]:
self.groupvarview.addItem(self.icons[var], var.name)
if domain.has_discrete_class:
self.groupvar_idx = \
self.groupvarmodel[1:].index(domain.class_var) + 1
self.openContext(domain)
self.variable_idx = min(max(self.variable_idx, 0),
len(self.varmodel) - 1)
self.groupvar_idx = min(max(self.groupvar_idx, 0),
len(self.groupvarmodel) - 1)
itemmodels.select_row(self.varview, self.variable_idx)
self._setup()
def clear(self):
self.plot.clear()
self.plot_prob.clear()
self.varmodel[:] = []
self.groupvarmodel = []
self.variable_idx = -1
self.groupvar_idx = 0
self._legend.clear()
self._legend.hide()
def _setup_smoothing(self):
if not self.disc_cont and self.var and self.var.is_continuous:
self.cb_disc_cont.setText("Bin continuous variables")
self.l_smoothing_l.setText("Smooth")
self.l_smoothing_r.setText("Precise")
else:
self.cb_disc_cont.setText("Bin continuous variables into {} bins".
format(self.bins[self.smoothing_index]))
self.l_smoothing_l.setText(" " + str(self.bins[0]))
self.l_smoothing_r.setText(" " + str(self.bins[-1]))
def _setup(self):
self.plot.clear()
self.plot_prob.clear()
self._legend.clear()
self._legend.hide()
varidx = self.variable_idx
self.var = self.cvar = None
if varidx >= 0:
self.var = self.varmodel[varidx]
if self.groupvar_idx > 0:
self.cvar = self.groupvarmodel[self.groupvar_idx]
self.cb_prob.clear()
self.cb_prob.addItem("(None)")
self.cb_prob.addItems(self.cvar.values)
self.cb_prob.addItem("(All)")
self.show_prob = min(max(self.show_prob, 0),
len(self.cvar.values) + 1)
data = self.data
self._setup_smoothing()
if self.var is None:
return
if self.disc_cont:
data = self.data[:, (self.var, self.cvar) if self.cvar else self.var ]
disc = Orange.preprocess.discretize.EqualWidth(n=self.bins[self.smoothing_index])
data = Orange.preprocess.Discretize(data, method=disc,
remove_const=False)
self.var = data.domain[0]
self.set_left_axis_name()
self.enable_disable_rel_freq()
if self.cvar:
self.contingencies = \
contingency.get_contingency(data, self.var, self.cvar)
self.display_contingency()
else:
self.distributions = \
distribution.get_distribution(data, self.var)
self.display_distribution()
self.plot.autoRange()
def help_event(self, ev):
in_graph_coor = self.plot.mapSceneToView(ev.scenePos())
ctooltip = []
for vb, item in self.tooltip_items:
if isinstance(item, pg.PlotCurveItem) and item.mouseShape().contains(vb.mapSceneToView(ev.scenePos())):
ctooltip.append(item.tooltip)
elif isinstance(item, DistributionBarItem) and item.boundingRect().contains(vb.mapSceneToView(ev.scenePos())):
ctooltip.append(item.tooltip)
if ctooltip:
QToolTip.showText(ev.screenPos(), "\n\n".join(ctooltip), widget=self.plotview)
return True
return False
def display_distribution(self):
dist = self.distributions
var = self.var
assert len(dist) > 0
self.plot.clear()
self.plot_prob.clear()
self.ploti.hideAxis('right')
self.tooltip_items = []
bottomaxis = self.ploti.getAxis("bottom")
bottomaxis.setLabel(var.name)
bottomaxis.resizeEvent()
self.set_left_axis_name()
if var and var.is_continuous:
bottomaxis.setTicks(None)
if not len(dist[0]):
return
edges, curve = ash_curve(dist, None, m=OWDistributions.ASH_HIST,
smoothing_factor=self.smoothing_facs[self.smoothing_index])
edges = edges + (edges[1] - edges[0])/2
edges = edges[:-1]
item = pg.PlotCurveItem()
pen = QtGui.QPen(QtGui.QBrush(Qt.white), 3)
pen.setCosmetic(True)
item.setData(edges, curve, antialias=True, stepMode=False,
fillLevel=0, brush=QtGui.QBrush(Qt.gray), pen=pen)
self.plot.addItem(item)
item.tooltip = "Density"
self.tooltip_items.append((self.plot, item))
else:
bottomaxis.setTicks([list(enumerate(var.values))])
for i, w in enumerate(dist):
geom = QtCore.QRectF(i - 0.33, 0, 0.66, w)
item = DistributionBarItem(geom, [1.0],
[QtGui.QColor(128, 128, 128)])
self.plot.addItem(item)
item.tooltip = "Frequency for %s: %r" % (var.values[i], w)
self.tooltip_items.append((self.plot, item))
def _on_relative_freq_changed(self):
self.set_left_axis_name()
if self.cvar and self.cvar.is_discrete:
self.display_contingency()
else:
self.display_distribution()
self.plot.autoRange()
def display_contingency(self):
"""
Set the contingency to display.
"""
cont = self.contingencies
var, cvar = self.var, self.cvar
assert len(cont) > 0
self.plot.clear()
self.plot_prob.clear()
self._legend.clear()
self.tooltip_items = []
if self.show_prob:
self.ploti.showAxis('right')
else:
self.ploti.hideAxis('right')
bottomaxis = self.ploti.getAxis("bottom")
bottomaxis.setLabel(var.name)
bottomaxis.resizeEvent()
cvar_values = cvar.values
colors = [QtGui.QColor(*col) for col in cvar.colors]
if var and var.is_continuous:
bottomaxis.setTicks(None)
weights, cols, cvar_values, curves = [], [], [], []
for i, dist in enumerate(cont):
v, W = dist
if len(v):
weights.append(numpy.sum(W))
cols.append(colors[i])
cvar_values.append(cvar.values[i])
curves.append(ash_curve(dist, cont, m=OWDistributions.ASH_HIST,
smoothing_factor=self.smoothing_facs[self.smoothing_index]))
weights = numpy.array(weights)
sumw = numpy.sum(weights)
weights /= sumw
colors = cols
curves = [(X, Y * w) for (X, Y), w in zip(curves, weights)]
ncval = len(cvar_values)
curvesline = [] #from histograms to lines
for (X,Y) in curves:
X = X + (X[1] - X[0])/2
X = X[:-1]
X = numpy.array(X)
Y = numpy.array(Y)
curvesline.append((X,Y))
for t in [ "fill", "line" ]:
for (X, Y), color, w, cval in reversed(list(zip(curvesline, colors, weights, cvar_values))):
item = pg.PlotCurveItem()
pen = QtGui.QPen(QtGui.QBrush(color), 3)
pen.setCosmetic(True)
color = QtGui.QColor(color)
color.setAlphaF(0.2)
item.setData(X, Y/(w if self.relative_freq else 1), antialias=True, stepMode=False,
fillLevel=0 if t == "fill" else None,
brush=QtGui.QBrush(color), pen=pen)
self.plot.addItem(item)
if t == "line":
item.tooltip = ("Normalized density " if self.relative_freq else "Density ") \
+ "\n"+ cvar.name + "=" + cval
self.tooltip_items.append((self.plot, item))
if self.show_prob:
M_EST = 5 #for M estimate
all_X = numpy.array(numpy.unique(numpy.hstack([X for X,_ in curvesline])))
inter_X = numpy.array(numpy.linspace(all_X[0], all_X[-1], len(all_X)*2))
curvesinterp = [ numpy.interp(inter_X, X, Y) for (X,Y) in curvesline ]
sumprob = numpy.sum(curvesinterp, axis=0)
# allcorrection = M_EST/sumw*numpy.sum(sumprob)/len(inter_X)
legal = sumprob > 0.05 * numpy.max(sumprob)
i = len(curvesinterp) + 1
show_all = self.show_prob == i
for Y, color, cval in reversed(list(zip(curvesinterp, colors, cvar_values))):
i -= 1
if show_all or self.show_prob == i:
item = pg.PlotCurveItem()
pen = QtGui.QPen(QtGui.QBrush(color), 3, style=QtCore.Qt.DotLine)
pen.setCosmetic(True)
#prob = (Y+allcorrection/ncval)/(sumprob+allcorrection)
prob = Y[legal] / sumprob[legal]
item.setData(inter_X[legal], prob, antialias=True, stepMode=False,
fillLevel=None, brush=None, pen=pen)
self.plot_prob.addItem(item)
item.tooltip = "Probability that \n" + cvar.name + "=" + cval
self.tooltip_items.append((self.plot_prob, item))
elif var and var.is_discrete:
bottomaxis.setTicks([list(enumerate(var.values))])
cont = numpy.array(cont)
ncval = len(cvar_values)
maxh = 0 #maximal column height
maxrh = 0 #maximal relative column height
scvar = cont.sum(axis=1)
#a cvar with sum=0 with allways have distribution counts 0,
#therefore we can divide it by anything
scvar[scvar==0] = 1
for i, (value, dist) in enumerate(zip(var.values, cont.T)):
maxh = max(maxh, max(dist))
maxrh = max(maxrh, max(dist/scvar))
for i, (value, dist) in enumerate(zip(var.values, cont.T)):
dsum = sum(dist)
geom = QtCore.QRectF(i - 0.333, 0, 0.666, maxrh
if self.relative_freq else maxh)
if self.show_prob:
prob = dist / dsum
ci = 1.96 * numpy.sqrt(prob * (1 - prob) / dsum)
else:
ci = None
item = DistributionBarItem(geom, dist/scvar/maxrh
if self.relative_freq
else dist/maxh, colors)
self.plot.addItem(item)
tooltip = "\n".join("%s: %.*f" % (n, 3 if self.relative_freq else 1, v)
for n,v in zip(cvar_values, dist/scvar if self.relative_freq else dist ))
item.tooltip = ("Normalized frequency " if self.relative_freq else "Frequency ") \
+ "(" + cvar.name + "=" + value + "):" \
+ "\n" + tooltip
self.tooltip_items.append((self.plot, item))
if self.show_prob:
item.tooltip += "\n\nProbabilities:"
for ic, a in enumerate(dist):
if self.show_prob - 1 != ic and \
self.show_prob - 1 != len(dist):
continue
position = -0.333 + ((ic+0.5)*0.666/len(dist))
if dsum < 1e-6:
continue
prob = a / dsum
if not 1e-6 < prob < 1 - 1e-6:
continue
ci = 1.96 * sqrt(prob * (1 - prob) / dsum)
item.tooltip += "\n%s: %.3f ± %.3f" % (cvar_values[ic], prob, ci)
mark = pg.ScatterPlotItem()
bar = pg.ErrorBarItem()
pen = QtGui.QPen(QtGui.QBrush(QtGui.QColor(0)), 1)
pen.setCosmetic(True)
bar.setData(x=[i+position], y=[prob],
bottom=min(numpy.array([ci]), prob),
top=min(numpy.array([ci]), 1 - prob),
beam=numpy.array([0.05]),
brush=QtGui.QColor(1), pen=pen)
mark.setData([i+position], [prob], antialias=True, symbol="o",
fillLevel=None, pxMode=True, size=10,
brush=QtGui.QColor(colors[ic]), pen=pen)
self.plot_prob.addItem(bar)
self.plot_prob.addItem(mark)
for color, name in zip(colors, cvar_values):
self._legend.addItem(
ScatterPlotItem(pen=color, brush=color, size=10, shape="s"),
escape(name)
)
self._legend.show()
def set_left_axis_name(self):
leftaxis = self.ploti.getAxis("left")
set_label = leftaxis.setLabel
if self.var and self.var.is_continuous:
set_label(["Density", "Relative density"]
[self.cvar is not None and self.relative_freq])
else:
set_label(["Frequency", "Relative frequency"]
[self.cvar is not None and self.relative_freq])
leftaxis.resizeEvent()
def enable_disable_rel_freq(self):
self.cb_prob.setDisabled(self.var is None or self.cvar is None)
self.cb_rel_freq.setDisabled(
self.var is None or self.cvar is None)
def _on_variable_idx_changed(self):
self.variable_idx = selected_index(self.varview)
self._setup()
def _on_groupvar_idx_changed(self):
self._setup()
def _on_set_smoothing(self):
self._setup()
def onDeleteWidget(self):
self.plot.clear()
super().onDeleteWidget()
def get_widget_name_extension(self):
if self.variable_idx >= 0:
return self.varmodel[self.variable_idx]
def send_report(self):
if self.variable_idx < 0:
return
self.report_plot()
text = "Distribution of '{}'".format(
self.varmodel[self.variable_idx])
if self.groupvar_idx:
group_var = self.groupvarmodel[self.groupvar_idx]
prob = self.cb_prob
indiv_probs = 0 < prob.currentIndex() < prob.count() - 1
if not indiv_probs or self.relative_freq:
text += " grouped by '{}'".format(group_var)
if self.relative_freq:
text += " (relative frequencies)"
if indiv_probs:
text += "; probabilites for '{}={}'".format(
group_var, prob.currentText())
self.report_caption(text)
def dist_sum(D1, D2):
"""
A sum of two continuous distributions.
"""
X1, W1 = D1
X2, W2 = D2
X = numpy.r_[X1, X2]
W = numpy.r_[W1, W2]
sort_ind = numpy.argsort(X)
X, W = X[sort_ind], W[sort_ind]
unique, uniq_index = numpy.unique(X, return_index=True)
spans = numpy.diff(numpy.r_[uniq_index, len(X)])
W = [numpy.sum(W[start:start + span])
for start, span in zip(uniq_index, spans)]
W = numpy.array(W)
assert W.shape[0] == unique.shape[0]
return unique, W
def ash_curve(dist, cont=None, bandwidth=None, m=3, smoothing_factor=1):
dist = numpy.asarray(dist)
X, W = dist
if bandwidth is None:
std = weighted_std(X, weights=W)
size = X.size
# if only one sample in the class
if std == 0 and cont is not None:
std = weighted_std(cont.values, weights=numpy.sum(cont.counts, axis=0))
size = cont.values.size
# if attr is constant or contingencies is None (no class variable)
if std == 0:
std = 0.1
size = X.size
bandwidth = 3.5 * std * (size ** (-1 / 3))
hist, edges = average_shifted_histogram(X, bandwidth, m, weights=W,
smoothing=smoothing_factor)
return edges, hist
def average_shifted_histogram(a, h, m=3, weights=None, smoothing=1):
"""
Compute the average shifted histogram.
Parameters
----------
a : array-like
Input data.
h : float
Base bin width.
m : int
Number of shifted histograms.
weights : array-like
An array of weights of the same shape as `a`
"""
a = numpy.asarray(a)
if weights is not None:
weights = numpy.asarray(weights)
if weights.shape != a.shape:
raise ValueError("weights should have the same shape as a")
weights = weights.ravel()
a = a.ravel()
amin, amax = a.min(), a.max()
h = h * 0.5 * smoothing
delta = h / m
wfac = 4 #extended windows for gaussian smoothing
offset = (wfac * m - 1) * delta
nbins = max(numpy.ceil((amax - amin + 2 * offset) / delta), 2 * m * wfac - 1)
bins = numpy.linspace(amin - offset, amax + offset, nbins + 1,
endpoint=True)
hist, edges = numpy.histogram(a, bins, weights=weights, density=True)
kernel = gaussian_kernel((numpy.arange(2 * wfac * m - 1) - (wfac * m - 1)) / (wfac * m), wfac)
kernel = kernel / numpy.sum(kernel)
ash = numpy.convolve(hist, kernel, mode="same")
ash = ash / numpy.diff(edges) / ash.sum()
# assert abs((numpy.diff(edges) * ash).sum()) <= 1e-6
return ash, edges
def triangular_kernel(x):
return numpy.clip(1, 0, 1 - numpy.abs(x))
def gaussian_kernel(x, k):
#fit k standard deviations into available space from [-1 .. 1]
return 1/(numpy.sqrt(2 * numpy.pi)) * numpy.exp( - (x*k)**2 / (2))
def weighted_std(a, axis=None, weights=None, ddof=0):
mean = numpy.average(a, axis=axis, weights=weights)
if axis is not None:
shape = shape_reduce_keep_dims(a.shape, axis)
mean = mean.reshape(shape)
sq_diff = numpy.power(a - mean, 2)
mean_sq_diff, wsum = numpy.average(
sq_diff, axis=axis, weights=weights, returned=True
)
if ddof != 0:
mean_sq_diff *= wsum / (wsum - ddof)
return numpy.sqrt(mean_sq_diff)
def weighted_quantiles(a, prob=[0.25, 0.5, 0.75], alphap=0.4, betap=0.4,
axis=None, weights=None):
a = numpy.asarray(a)
prob = numpy.asarray(prob)
sort_ind = numpy.argsort(a, axis)
a = a[sort_ind]
if weights is None:
weights = numpy.ones_like(a)
else:
weights = numpy.asarray(weights)
weights = weights[sort_ind]
n = numpy.sum(weights)
k = numpy.cumsum(weights, axis)
# plotting positions for the known n knots
pk = (k - alphap * weights) / (n + 1 - alphap * weights - betap * weights)
# m = alphap + prob * (1 - alphap - betap)
return numpy.interp(prob, pk, a, left=a[0], right=a[-1])
def shape_reduce_keep_dims(shape, axis):
if shape is None:
return ()
shape = list(shape)
if isinstance(axis, collections.Sequence):
for ax in axis:
shape[ax] = 1
else:
shape[axis] = 1
return tuple(shape)
def main(argv=None):
import gc
if argv is None:
argv = sys.argv
argv = list(argv)
app = QtGui.QApplication(argv)
w = OWDistributions()
w.show()
if len(argv) > 1:
filename = argv[1]
else:
filename = "heart_disease"
data = Orange.data.Table(filename)
w.set_data(data)
w.handleNewSignals()
rval = app.exec_()
w.set_data(None)
w.handleNewSignals()
w.deleteLater()
del w
app.processEvents()
gc.collect()
return rval
if __name__ == "__main__":
sys.exit(main())
| {
"content_hash": "c716ae7b64e232c65cb446ababa2af71",
"timestamp": "",
"source": "github",
"line_count": 760,
"max_line_length": 133,
"avg_line_length": 37.098684210526315,
"alnum_prop": 0.5513743571555241,
"repo_name": "qPCR4vir/orange3",
"id": "2166b09572fa1ec558cf2324fe8ec4565e333d96",
"size": "28196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Orange/widgets/visualize/owdistributions.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "20412"
},
{
"name": "C++",
"bytes": "1992"
},
{
"name": "GLSL",
"bytes": "75"
},
{
"name": "HTML",
"bytes": "3503"
},
{
"name": "JavaScript",
"bytes": "12007"
},
{
"name": "Jupyter Notebook",
"bytes": "6662"
},
{
"name": "NSIS",
"bytes": "20281"
},
{
"name": "Python",
"bytes": "4205054"
},
{
"name": "Shell",
"bytes": "48335"
}
],
"symlink_target": ""
} |
import numpy as np
import cv2
cv2.ocl.setUseOpenCL(False)
class LazyFrames(object):
"""
From OpenAI Baseline.
https://github.com/openai/baselines/blob/master/baselines/common/atari_wrappers.py
This class provides a solution to optimize the use of memory when
concatenating different frames, e.g. Atari frames in DQN. The frames are
individually stored in a list and, when numpy arrays containing them are
created, the reference to each frame is used instead of a copy.
"""
def __init__(self, frames, history_length):
self._frames = frames
assert len(self._frames) == history_length
def __array__(self, dtype=None):
out = np.array(self._frames)
if dtype is not None:
out = out.astype(dtype)
return out
def copy(self):
return self
@property
def shape(self):
return (len(self._frames),) + self._frames[0].shape
def preprocess_frame(obs, img_size):
"""
Convert a frame from rgb to grayscale and resize it.
Args:
obs (np.ndarray): array representing an rgb frame;
img_size (tuple): target size for images.
Returns:
The transformed frame as 8 bit integer array.
"""
image = cv2.cvtColor(obs, cv2.COLOR_RGB2GRAY)
image = cv2.resize(image, img_size, interpolation=cv2.INTER_LINEAR)
return np.array(image, dtype=np.uint8)
| {
"content_hash": "5c9354c589c71a4e3141911a2f7d633b",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 86,
"avg_line_length": 26.50943396226415,
"alnum_prop": 0.6548042704626335,
"repo_name": "carloderamo/mushroom",
"id": "2b855634b0acd7cf81e87cd557f3c506f380d50c",
"size": "1405",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mushroom_rl/utils/frames.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "197211"
}
],
"symlink_target": ""
} |
import models
import config
import utils
import imp
import numpy as np
from numpy import ma
imp.reload(config)
imp.reload(models)
imp.reload(utils)
data_dir = config.data_dir
path = config.path
row = config.row
time = config.time
ul = config.ul
len_sq = config.len_sq
band_option = config.band_option
b = band_option
resolution_global_var = config.resolution_global_var
def get_var_before_mask(var):
Scene = models.NetcdfVarModel(data_dir, path, row, time, var)
# return utils.interp_and_resize(Scene.data(var), 2048)
return Scene.data(var, ul=ul, len_sq=len_sq)
def get_mask():
mask = get_var_before_mask('BT_B10')
# mask[np.where(mask!=0)] = 99
mask[np.where(mask<200)] = 1e12
mask[np.where(mask<1e10)] = 0
mask[np.where(mask==1e12)] = 255
# mask = ma.masked_where(mask==0, mask)
# mask[np.where(mask==True)] = 0
# mask[np.where(mask==False)] = 1
return mask
def get_angles():
Scene = models.NetcdfVarModel(data_dir, path, row, time, 'BT_B10')
Scene.setup_file()
print(Scene.full_path)
Scene.connect_to_nc(dims=True)
scene_attributes = {}
scene_attributes['dimensions'] = Scene.dimensions
scene_attributes['theta_v'] = Scene.theta_v
scene_attributes['theta_0 '] = Scene.theta_0
scene_attributes['phi_v '] = Scene.phi_v
scene_attributes['phi_0'] = Scene.phi_0
return scene_attributes
# scene_attributes = get_angles()
mask = get_mask()
def get_var(var, mask=mask, resolution=2048):
'''
Get the data from the requested variable band.
TODO:
Choose according to lat and lon values.
'''
if resolution_global_var:
mask = utils.get_resized_array(mask, resolution) # get_mask()
result = get_var_before_mask(var)
result = utils.interp_and_resize(result, resolution)
print(result.shape)
result = ma.masked_where(mask==255, result)
else:
result = get_var_before_mask(var)
result = ma.masked_where(mask==255, result)
return result
def get_coastal():
return get_var(b+'443')
def get_blue():
return get_var(b+'483')
def get_green():
return get_var(b+'561')
def get_red():
return get_var(b+'655')
def get_nir():
return get_var(b+'865')
def get_swir():
return get_var(b+'1609')
def get_swir2():
return get_var(b+'2201')
def get_cirrus():
return get_var('rtoa_1373')
def get_temp():
return get_var('BT_B10')
def get_bqa():
return get_var('bqa')
# print(Scene.get_variables_list())
def calc_ndsi():
green = get_green()
swir = get_swir()
return (green - swir)/(green + swir)
def calc_ndvi():
nir = get_nir()
red = get_red()
return (nir - red)/(nir + red)
if __name__ == "__main__":
import views
blue = get_blue()
green = get_green()
red = get_red()
img_scaled = views.create_composite(red, green, blue)
| {
"content_hash": "442186714844b6d0a52949147ae365d1",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 70,
"avg_line_length": 23.298387096774192,
"alnum_prop": 0.6330910349601938,
"repo_name": "nicholaschris/landsatpy",
"id": "e0367340fc12c7fd68eb5b0d9601c960177882ec",
"size": "2889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bands.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2997"
},
{
"name": "Python",
"bytes": "80154"
},
{
"name": "Shell",
"bytes": "6277"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os
import sys
#use aptexport module from local dir
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../"))
from aptexport import PackageListApt
import unittest
import tempfile
import shutil
import subprocess
class BaseTests(unittest.TestCase):
"""basic test class"""
def __setup_apt_directory_tree(self):
"""setup directory structure for apt"""
os.makedirs(os.path.abspath(self.rootdir + "/etc/apt"))
#create sources.list
repository_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "repository")
with open(os.path.abspath(self.rootdir + "/etc/apt/sources.list"),
"w") as s:
s.write("""
deb [arch=amd64] file://%(repo_path)s codename1 main
deb [arch=amd64] file://%(repo_path)s codename2 component1 component2
#deb-src file://%(repo_path)s codename1 main component1 component2
#deb-src file://%(repo_path)s codename2
""" % {'repo_path': repository_path})
#create empty apt.conf
os.makedirs(os.path.abspath(self.rootdir + "/etc/apt/apt.conf.d"))
with open(os.path.abspath(self.rootdir + "/etc/apt/apt.conf"),
"w") as s:
s.write("")
def setUp(self):
"""run before every testcase"""
self.rootdir = tempfile.mkdtemp(prefix='aptexport-tests_')
self.__setup_apt_directory_tree()
self.pla = PackageListApt(rootdir=self.rootdir, cache_update=True)
def tearDown(self):
"""run after every testcase"""
if self.rootdir:
if self.rootdir.startswith("/tmp"):
shutil.rmtree(self.rootdir)
else:
sys.stdout.write(
"don't delete temp dir '%s' for safety" % (self.rootdir))
class PackageTests(BaseTests):
def test_package_keys(self):
"""test that the requested keys are available for every package"""
for p in self.pla.package_list_apt(False):
expected_keys = set(["name", "uri", "version", "summary", "sha256",
"provider", "architecture"])
available_keys = set(p.keys())
self.assertEqual(
len(available_keys.symmetric_difference(expected_keys)), 0)
class PackageListTests(BaseTests):
def test_package_dummies_in_all(self):
"""test that the 2 package dummies are available in all package list"""
name_list_all = map(lambda x: x["name"],
self.pla.package_list_apt(False))
self.assertIn("aptexport-unittest-dummy1-bin1", name_list_all)
self.assertIn("aptexport-unittest-dummy1-bin2", name_list_all)
def test_package_dummies_in_installed(self):
"""test that the 2 package dummies are not available in installed
package list"""
name_list_installed = map(lambda x: x["name"],
self.pla.package_list_apt(True))
self.assertNotIn("aptexport-unittest-dummy1-bin1", name_list_installed)
self.assertNotIn("aptexport-unittest-dummy1-bin2", name_list_installed)
class ToolsTests(unittest.TestCase):
def setUp(self):
tools_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"../tools")
self.aptcacheexport_path = os.path.join(tools_dir, "aptcacheexport")
if not os.path.exists(self.aptcacheexport_path):
raise Exception("'%s' not found" % self.aptcacheexport_path)
def test_aptcacheexport_help(self):
"""just run the help and check return value to be sure that there's no
syntax error"""
#if return code is != 0, check_output raises a CalledProcessError
subprocess.check_output([self.aptcacheexport_path, "-h"])
#now check with an invalid parameter and expect an exception
self.assertRaises(subprocess.CalledProcessError,
subprocess.check_output,
(self.aptcacheexport_path,
"--invalid-parameter-foo-bar"))
| {
"content_hash": "dc9c3573f614e952d5c26074384e1f19",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 79,
"avg_line_length": 41.27,
"alnum_prop": 0.6118245699055004,
"repo_name": "TelekomCloud/aptexport",
"id": "2077fb60733ab8c5ae92685abda724a7362caa8a",
"size": "4797",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "367"
},
{
"name": "Python",
"bytes": "11302"
}
],
"symlink_target": ""
} |
__author__ = 'Sergei'
from model.contact import Contact
from random import randrange
def test_del_contact(app):
if app.contact.count() == 0:
app.contact.create_c(Contact(first_n= "first",mid_n= "middle",last_n= "last",nick_n= "kuk",company= "adda",address= "575 oiweojdckjgsd,russia",home_ph= "12134519827",
cell_ph= "120092340980",email= "first.lastmiddle.@adda.com"))
old_contact = app.contact.get_contact_list()
index = randrange(len(old_contact))
app.contact.contact_delete_by_index(index)
new_contact = app.contact.get_contact_list()
assert len(old_contact) - 1 == len(new_contact)
old_contact[index:index+1] = []
assert old_contact == new_contact
| {
"content_hash": "22fe4b96f19e7455686a918a18b1cbd8",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 174,
"avg_line_length": 43.9375,
"alnum_prop": 0.6799431009957326,
"repo_name": "serglit/python_traning",
"id": "c1def88bd752ddb4143eb05c94350dfc37286bf9",
"size": "703",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_del_contact.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "19746"
}
],
"symlink_target": ""
} |
'''
Remove the two objects created in exercise #3 from the database.
DISCLAIMER NOTE: Solution is limited to the exercise's scope
'''
from net_system.models import NetworkDevice
import django
from termcolor import colored
def main():
# Delete the 2 NOKIA routers added in exercise 3
django.setup()
try:
pynet_rtr3 = NetworkDevice.objects.get(device_name='pynet-rtr3')
pynet_rtr4 = NetworkDevice.objects.get(device_name='pynet-rtr4')
pynet_rtr3.delete()
pynet_rtr4.delete()
print "\n [pynet_rtr3] and [pynet_rtr4] were effectively DELETED from the database"
except NetworkDevice.DoesNotExist:
print "\n [pynet_rtr3] and [pynet_rtr4] are not in the database \n"
pass
# Verification
print "\n CURRENT DATABASE RECORDS \n"
pylab_devices = NetworkDevice.objects.all()
for a_device in pylab_devices:
print "-- " + colored(a_device, 'blue') + " : " + colored(a_device.device_type, 'green') + "\n"
if __name__ == "__main__":
main()
| {
"content_hash": "5236f160707caacc67e7fe6022be32dc",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 105,
"avg_line_length": 34.06666666666667,
"alnum_prop": 0.6673189823874756,
"repo_name": "p-montero/py-ans",
"id": "99c92424360387b28dce343dda2b8fbe95eab698",
"size": "1044",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "class8/ex4_eh.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "50001"
}
],
"symlink_target": ""
} |
import logging
import time
import threading
from dataclasses import dataclass
from typing import List, Dict, Set, Callable
from chip.discovery.library_handle import _GetDiscoveryLibraryHandle
from chip.discovery.types import DiscoverSuccessCallback_t, DiscoverFailureCallback_t
@dataclass(unsafe_hash=True)
class PeerId:
"""Represents a remote peer id."""
fabricId: int
nodeId: int
@dataclass(unsafe_hash=True)
class NodeAddress:
"""Represents a distinct address where a node can be reached."""
interface: int
ip: str
port: int
@dataclass
class AggregatedDiscoveryResults:
"""Discovery results for a node."""
peerId: PeerId
addresses: Set[NodeAddress]
@dataclass
class PendingDiscovery:
"""Accumulator for ongoing discovery."""
result: AggregatedDiscoveryResults
callback: Callable[[AggregatedDiscoveryResults], None]
expireTime: int
firstResultTime: int
# Milliseconds to wait for additional results onece a single result has
# been received
_RESULT_WAIT_TIME_SEC = 0.05
class _PendingDiscoveries:
"""Manages a list of pending discoveries and associated callbacks."""
activeDiscoveries: List[PendingDiscovery] = []
def __init__(self):
self.operationCondition = threading.Condition()
self.resolution = threading.Thread(
target=self.ResolutionThread, daemon=True)
self.resolution.start()
def Start(self, peerId: PeerId, callback: Callable[[AggregatedDiscoveryResults], None], timeoutMs: int):
"""Add a new pending result item to the internal list."""
with self.operationCondition:
self.activeDiscoveries.append(
PendingDiscovery(
AggregatedDiscoveryResults(peerId, addresses=set()),
callback=callback,
expireTime=time.time() + timeoutMs/1000.0,
firstResultTime=0,
))
self.operationCondition.notify()
def OnSuccess(self, peerId: PeerId, address: NodeAddress):
"""Notify of a succesful address resolution."""
with self.operationCondition:
for item in self.activeDiscoveries:
if item.result.peerId != peerId:
continue
item.result.addresses.add(address)
if item.firstResultTime == 0:
item.firstResultTime = time.time()
self.operationCondition.notify()
def ResolutionThread(self):
while True:
with self.operationCondition:
self.operationCondition.wait(
self.ComputeNextEventTimeoutSeconds())
updatedDiscoveries = []
for item in self.activeDiscoveries:
if self.NeedsCallback(item):
try:
item.callback(item.result)
except:
logging.exception("Node discovery callback failed")
else:
updatedDiscoveries.append(item)
self.activeDiscoveries = updatedDiscoveries
def NeedsCallback(self, item: PendingDiscovery):
"""Find out if the callback needs to be called for the given item."""
now = time.time()
if item.expireTime <= now:
return True
if (item.firstResultTime > 0) and (item.firstResultTime + _RESULT_WAIT_TIME_SEC <= now):
return True
return False
def ComputeNextEventTimeoutSeconds(self):
"""Compute how much a thread needs to sleep based on the active discoveries list."""
sleepTimeSec = 10.0 # just in case
now = time.time()
for item in self.activeDiscoveries:
# figure out expiry time for an item
expireSleep = item.expireTime - now
if expireSleep < sleepTimeSec:
sleepTimeSec = expireSleep
# Allow only a short time window for 'additional results' once we
# have one
resultSleep = (item.firstResultTime + _RESULT_WAIT_TIME_SEC) - now
if resultSleep < sleepTimeSec:
sleepTimeSec = resultSleep
# in case our processing missed some expire times, set a very short
# sleep
if sleepTimeSec <= 0:
sleepTimeSec = 0.001
return sleepTimeSec
# define firstResultTime
# All pending discovery operations awayting callback results
_gPendingDiscoveries = _PendingDiscoveries()
@DiscoverSuccessCallback_t
def _DiscoverSuccess(fabric: int, node: int, interface: int, ip: str, port: int):
peerId = PeerId(fabric, node)
address = NodeAddress(interface, ip, port)
global _gPendingDiscoveries
_gPendingDiscoveries.OnSuccess(peerId, address)
@DiscoverFailureCallback_t
def _DiscoverFailure(fabric: int, node: int, errorCode: int):
# Many discovery errors currently do not include a useful node/fabric id
# hence we just log and rely on discovery timeouts to return 'no data'
logging.error("Discovery failure, error %d", errorCode)
def FindAddressAsync(fabricid: int, nodeid: int, callback, timeout_ms=1000):
"""Discovers the IP address(es) of a node.
Args:
fabricid: the fabric to which the node is attached
nodeid: the node id to find
callback: Will be called once node resolution completes.
"""
_GetDiscoveryLibraryHandle().pychip_discovery_set_callbacks(
_DiscoverSuccess, _DiscoverFailure)
global _gPendingDiscoveries
_gPendingDiscoveries.Start(
PeerId(fabricid, nodeid),
callback,
timeout_ms
)
res = _GetDiscoveryLibraryHandle().pychip_discovery_resolve(fabricid, nodeid)
if res != 0:
raise Exception("Failed to start node resolution")
class _SyncAddressFinder:
"""Performs a blocking wait for an address resolution operation."""
def __init__(self):
self.semaphore = threading.Semaphore(value=0)
self.result = None
def Callback(self, data):
self.result = data
self.semaphore.release()
def WaitForResult(self):
self.semaphore.acquire()
return self.result
def FindAddress(fabricid, nodeid, timeout_ms=1000):
"""Performs an address discovery for a node and returns the result."""
finder = _SyncAddressFinder()
FindAddressAsync(fabricid, nodeid, finder.Callback, timeout_ms)
return finder.WaitForResult()
| {
"content_hash": "8cde00679196ae71c2f38d5811f5d684",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 108,
"avg_line_length": 31.585365853658537,
"alnum_prop": 0.646023166023166,
"repo_name": "nestlabs/connectedhomeip",
"id": "e41360414bee8b154e30ab60acbf6408b3b2b4a3",
"size": "7098",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/controller/python/chip/discovery/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2249120"
},
{
"name": "C++",
"bytes": "17279144"
},
{
"name": "CMake",
"bytes": "126266"
},
{
"name": "Dockerfile",
"bytes": "39266"
},
{
"name": "Emacs Lisp",
"bytes": "1042"
},
{
"name": "Java",
"bytes": "154260"
},
{
"name": "JavaScript",
"bytes": "190569"
},
{
"name": "Jinja",
"bytes": "14915"
},
{
"name": "Kotlin",
"bytes": "177091"
},
{
"name": "Makefile",
"bytes": "7729"
},
{
"name": "Objective-C",
"bytes": "738857"
},
{
"name": "Objective-C++",
"bytes": "295149"
},
{
"name": "Python",
"bytes": "1567221"
},
{
"name": "Shell",
"bytes": "163177"
},
{
"name": "Tcl",
"bytes": "311"
},
{
"name": "ZAP",
"bytes": "582004"
}
],
"symlink_target": ""
} |
"""
Django settings for go_green project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (/a/b/myfile.py - 3 = /)
APPS_DIR = ROOT_DIR.path('go_green')
env = environ.Env()
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'rest_framework',
'django_filters',
'rest_framework.authtoken'
)
# Apps specific for this project go here.
LOCAL_APPS = (
'go_green.users', # custom users app
'go_green.event',
'go_green.image',
'go_green.badge',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'corsheaders.middleware.CorsMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
MIGRATION_MODULES = {
'sites': 'go_green.contrib.sites.migrations'
}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', False)
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""RS""", 'gogreen@gmail.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
'default': env.db('DATABASE_URL', default='postgis:///go_green'),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'UTC'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
],
},
},
]
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'UPLOADED_FILES_USE_URL': False
}
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = 'users.User'
# LOGIN_REDIRECT_URL = 'users:redirect'
# LOGIN_URL = 'account_login'
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = r'^admin/'
# Your common stuff: Below this line define 3rd party library settings
CORS_ORIGIN_ALLOW_ALL = True
| {
"content_hash": "1ecc7867155d0d3b509426d3d602d3ee",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 98,
"avg_line_length": 35.59574468085106,
"alnum_prop": 0.6053795576808129,
"repo_name": "TraMZzz/GoGreen",
"id": "16115f1b3a4110db6dacb308a5816a66bc02fc98",
"size": "8389",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/settings/common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "68647"
},
{
"name": "JavaScript",
"bytes": "169091"
},
{
"name": "Python",
"bytes": "51557"
},
{
"name": "Shell",
"bytes": "3828"
}
],
"symlink_target": ""
} |
import base64
import binascii
from functools import cached_property
from rest_framework import generics
from rest_framework.authentication import get_authorization_header
from rest_framework.exceptions import NotFound, ValidationError
from rest_framework.response import Response
from desecapi import metrics
from desecapi.authentication import (
BasicTokenAuthentication,
TokenAuthentication,
URLParamAuthentication,
)
from desecapi.exceptions import ConcurrencyException
from desecapi.models import Domain
from desecapi.pdns_change_tracker import PDNSChangeTracker
from desecapi.permissions import TokenHasDomainDynDNSPermission
from desecapi.renderers import PlainTextRenderer
from desecapi.serializers import RRsetSerializer
class DynDNS12UpdateView(generics.GenericAPIView):
authentication_classes = (
TokenAuthentication,
BasicTokenAuthentication,
URLParamAuthentication,
)
permission_classes = (TokenHasDomainDynDNSPermission,)
renderer_classes = [PlainTextRenderer]
serializer_class = RRsetSerializer
throttle_scope = "dyndns"
@property
def throttle_scope_bucket(self):
return self.domain.name
def _find_ip(self, params, separator):
# Check URL parameters
for p in params:
try:
param = self.request.query_params[p]
except KeyError:
continue
if separator in param or param in ("", "preserve"):
return param
# Check remote IP address
client_ip = self.request.META.get("REMOTE_ADDR")
if separator in client_ip:
return client_ip
# give up
return None
@cached_property
def qname(self):
# hostname parameter
try:
if self.request.query_params["hostname"] != "YES":
return self.request.query_params["hostname"].lower()
except KeyError:
pass
# host_id parameter
try:
return self.request.query_params["host_id"].lower()
except KeyError:
pass
# http basic auth username
try:
domain_name = (
base64.b64decode(
get_authorization_header(self.request)
.decode()
.split(" ")[1]
.encode()
)
.decode()
.split(":")[0]
)
if domain_name and "@" not in domain_name:
return domain_name.lower()
except (binascii.Error, IndexError, UnicodeDecodeError):
pass
# username parameter
try:
return self.request.query_params["username"].lower()
except KeyError:
pass
# only domain associated with this user account
try:
return self.request.user.domains.get().name
except Domain.MultipleObjectsReturned:
raise ValidationError(
detail={
"detail": "Request does not properly specify domain for update.",
"code": "domain-unspecified",
}
)
except Domain.DoesNotExist:
metrics.get("desecapi_dynDNS12_domain_not_found").inc()
raise NotFound("nohost")
@cached_property
def domain(self):
try:
return Domain.objects.filter_qname(
self.qname, owner=self.request.user
).order_by("-name_length")[0]
except (IndexError, ValueError):
raise NotFound("nohost")
@property
def subname(self):
return self.qname.rpartition(f".{self.domain.name}")[0]
def get_serializer_context(self):
return {
**super().get_serializer_context(),
"domain": self.domain,
"minimum_ttl": 60,
}
def get_queryset(self):
return self.domain.rrset_set.filter(
subname=self.subname, type__in=["A", "AAAA"]
)
def get(self, request, *args, **kwargs):
instances = self.get_queryset().all()
record_params = {
"A": self._find_ip(["myip", "myipv4", "ip"], separator="."),
"AAAA": self._find_ip(["myipv6", "ipv6", "myip", "ip"], separator=":"),
}
data = [
{
"type": type_,
"subname": self.subname,
"ttl": 60,
"records": [ip_param] if ip_param else [],
}
for type_, ip_param in record_params.items()
if ip_param != "preserve"
]
serializer = self.get_serializer(instances, data=data, many=True, partial=True)
try:
serializer.is_valid(raise_exception=True)
except ValidationError as e:
if any(
any(
getattr(non_field_error, "code", "") == "unique"
for non_field_error in err.get("non_field_errors", [])
)
for err in e.detail
):
raise ConcurrencyException from e
raise e
with PDNSChangeTracker():
serializer.save()
return Response("good", content_type="text/plain")
| {
"content_hash": "4f512116db506dc96db4e51035419787",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 87,
"avg_line_length": 30.970588235294116,
"alnum_prop": 0.5650522317188984,
"repo_name": "desec-io/desec-stack",
"id": "93dec8f779f514f1131bf1be482e69f4bb067c6e",
"size": "5265",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "api/desecapi/views/dyndns.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "5786"
},
{
"name": "HTML",
"bytes": "3777"
},
{
"name": "JavaScript",
"bytes": "22126"
},
{
"name": "Python",
"bytes": "716037"
},
{
"name": "Shell",
"bytes": "10425"
},
{
"name": "Vue",
"bytes": "234220"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nc', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='agency',
name='census_profile_id',
field=models.CharField(default='', blank=True, max_length=16),
),
]
| {
"content_hash": "9bfa03056fc2bb730c407f437425433a",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 74,
"avg_line_length": 21.944444444444443,
"alnum_prop": 0.5873417721518988,
"repo_name": "OpenDataPolicingNC/Traffic-Stops",
"id": "90f6dbd059392deb239ab75a5b282b60ff83bab4",
"size": "419",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "nc/migrations/0002_agency_census_profile_id.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14249"
},
{
"name": "Dockerfile",
"bytes": "1114"
},
{
"name": "Elixir",
"bytes": "40"
},
{
"name": "HCL",
"bytes": "2989"
},
{
"name": "HTML",
"bytes": "112505"
},
{
"name": "JavaScript",
"bytes": "99858"
},
{
"name": "Jupyter Notebook",
"bytes": "130974"
},
{
"name": "Makefile",
"bytes": "2662"
},
{
"name": "PLpgSQL",
"bytes": "11003"
},
{
"name": "Python",
"bytes": "261956"
},
{
"name": "SaltStack",
"bytes": "10013"
},
{
"name": "Scheme",
"bytes": "20526"
},
{
"name": "Shell",
"bytes": "250814"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import Movie, MovieList, MovieListMembership
#from .models import Genre
# Register your models here.
class MovieListModelAdmin(admin.ModelAdmin):
list_display = ['user','list_name', 'date_created']
fields = ('user','list_name', 'shared', 'shared_with')
class Meta:
model = MovieList
class MovieListMembershipModelAdmin(admin.ModelAdmin):
list_display = ['movie', 'movie_list', 'user']
class Meta:
model = MovieListMembership
admin.site.register(Movie)
#admin.site.register(Genre)
admin.site.register(MovieList, MovieListModelAdmin)
admin.site.register(MovieListMembership, MovieListMembershipModelAdmin) | {
"content_hash": "2804c2bb2ebe6552b97926bdc6df5a18",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 28.833333333333332,
"alnum_prop": 0.7442196531791907,
"repo_name": "introini/ourlist",
"id": "a3dd8d59acca67e9fc18fa81f055374ba0da044c",
"size": "692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mylist/admin.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "461932"
},
{
"name": "HTML",
"bytes": "47577"
},
{
"name": "JavaScript",
"bytes": "1334"
},
{
"name": "Makefile",
"bytes": "63"
},
{
"name": "Python",
"bytes": "45430"
},
{
"name": "Shell",
"bytes": "4307"
}
],
"symlink_target": ""
} |
from ca.base.generations import Generations
class Life(Generations):
states = 2
| {
"content_hash": "a05f0ee622d27794b92f9ae18cb9cb54",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 43,
"avg_line_length": 18.8,
"alnum_prop": 0.6914893617021277,
"repo_name": "jandecaluwe/ca",
"id": "0274d7930a49d4261b3193bc360a1be38ada367e",
"size": "94",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "base/life.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Coq",
"bytes": "25498"
},
{
"name": "Python",
"bytes": "9289"
},
{
"name": "VHDL",
"bytes": "38590"
},
{
"name": "Verilog",
"bytes": "1167"
}
],
"symlink_target": ""
} |
"""
sphinx.util.tags
~~~~~~~~~~~~~~~~
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import warnings
# jinja2.sandbox imports the sets module on purpose
warnings.filterwarnings('ignore', 'the sets module', DeprecationWarning,
module='jinja2.sandbox')
# (ab)use the Jinja parser for parsing our boolean expressions
from jinja2 import nodes
from jinja2.parser import Parser
from jinja2.environment import Environment
env = Environment()
class BooleanParser(Parser):
"""
Only allow condition exprs and/or/not operations.
"""
def parse_compare(self):
token = self.stream.current
if token.type == 'name':
if token.value in ('true', 'false', 'True', 'False'):
node = nodes.Const(token.value in ('true', 'True'),
lineno=token.lineno)
elif token.value in ('none', 'None'):
node = nodes.Const(None, lineno=token.lineno)
else:
node = nodes.Name(token.value, 'load', lineno=token.lineno)
self.stream.next()
elif token.type == 'lparen':
self.stream.next()
node = self.parse_expression()
self.stream.expect('rparen')
else:
self.fail("unexpected token '%s'" % (token,), token.lineno)
return node
class Tags(object):
def __init__(self, tags=None):
self.tags = dict.fromkeys(tags or [], True)
def has(self, tag):
return tag in self.tags
__contains__ = has
def __iter__(self):
return iter(self.tags)
def add(self, tag):
self.tags[tag] = True
def remove(self, tag):
self.tags.pop(tag, None)
def eval_condition(self, condition):
# exceptions are handled by the caller
parser = BooleanParser(env, condition, state='variable')
expr = parser.parse_expression()
if not parser.stream.eos:
raise ValueError('chunk after expression')
def eval_node(node):
if isinstance(node, nodes.CondExpr):
if eval_node(node.test):
return eval_node(node.expr1)
else:
return eval_node(node.expr2)
elif isinstance(node, nodes.And):
return eval_node(node.left) and eval_node(node.right)
elif isinstance(node, nodes.Or):
return eval_node(node.left) or eval_node(node.right)
elif isinstance(node, nodes.Not):
return not eval_node(node.node)
elif isinstance(node, nodes.Name):
return self.tags.get(node.name, False)
else:
raise ValueError('invalid node, check parsing')
return eval_node(expr)
| {
"content_hash": "9619d556ed4547737452fb1e81bcf7cd",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 75,
"avg_line_length": 32.04494382022472,
"alnum_prop": 0.5725806451612904,
"repo_name": "SurfasJones/icecream-info",
"id": "a6e62140771be8f2e837da0e9d25208695d6a22b",
"size": "2876",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "icecream/lib/python2.7/site-packages/sphinx/util/tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "288937"
},
{
"name": "JavaScript",
"bytes": "589933"
},
{
"name": "PHP",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "18137514"
},
{
"name": "Ruby",
"bytes": "990"
},
{
"name": "Shell",
"bytes": "10274"
},
{
"name": "TeX",
"bytes": "56626"
},
{
"name": "XSLT",
"bytes": "5122"
}
],
"symlink_target": ""
} |
__author__ = "mozman <mozman@gmx.at>"
import os
import re
import unittest
from dxfwrite import DXFEngine as dxf
from dxfwrite.util import is_string
class TestDrawing(unittest.TestCase):
def test_drawing(self):
dwg = dxf.drawing()
res1 = dwg.__dxf__()
self.assertTrue(is_string(res1))
def test_properties(self):
dwg = dxf.drawing()
self.assertTrue(dwg.linetypes)
self.assertTrue(dwg.layers)
self.assertTrue(dwg.styles)
self.assertTrue(dwg.views)
self.assertTrue(dwg.viewports)
self.assertTrue(dwg.ucs)
def test_add(self):
dwg = dxf.drawing()
self.assertEqual(dwg.add("TEST"), "TEST")
def test_add_modelspace(self):
dwg = dxf.drawing()
txt = dwg.modelspace.add(dxf.text('TEST', paper_space=1))
self.assertEqual(0, txt['paper_space'])
def test_add_paperspace(self):
dwg = dxf.drawing()
txt = dwg.paperspace.add(dxf.text('TEST', paper_space=0))
self.assertEqual(1, txt['paper_space'])
def test_anonymous_blockname(self):
dwg = dxf.drawing()
self.assertTrue(re.match("^\*U\d*$", dwg.anonymous_blockname('U')))
def test_add_anonymous_block(self):
dwg = dxf.drawing()
blockname = dwg.add_anonymous_block("TEST")
self.assertTrue(re.match("^\*U\d*$", blockname))
block = dwg.blocks.find(blockname)
entity = block.get_data().pop()
self.assertEqual(entity, "TEST")
def test_writing(self):
filename = 'test.dxf'
try:
os.remove(filename)
except OSError:
pass
dwg = dxf.drawing()
dwg.saveas(filename)
try:
os.remove(filename)
except OSError:
self.assertTrue(False, "Drawing not saved!")
def test_add_layer(self):
dwg = dxf.drawing()
element = dwg.add_layer('TEST')
self.assertEqual(element['name'], 'TEST')
def test_add_style(self):
dwg = dxf.drawing()
element = dwg.add_style('TEST')
self.assertEqual(element['name'], 'TEST')
def test_add_linetype(self):
dwg = dxf.drawing()
element = dwg.add_linetype('TEST')
self.assertEqual(element['name'], 'TEST')
def test_add_view(self):
dwg = dxf.drawing()
element = dwg.add_view('TEST')
self.assertEqual(element['name'], 'TEST')
def test_add_viewport(self):
dwg = dxf.drawing()
element = dwg.add_vport('TEST')
self.assertEqual(element['name'], 'TEST')
def test_add_ucs(self):
dwg = dxf.drawing()
element = dwg.add_ucs('TEST')
self.assertEqual(element['name'], 'TEST')
if __name__=='__main__':
unittest.main()
| {
"content_hash": "1501bc8ab57b372d1b0b75df676194f7",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 75,
"avg_line_length": 28.577319587628867,
"alnum_prop": 0.5836940836940837,
"repo_name": "sbarton272/AcousticBarcodes-Explorations",
"id": "ed7f706270638a9c563763d75f8d4dbb7d396869",
"size": "2891",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "barcodes/dxfwrite/tests/test_drawing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "8313"
},
{
"name": "Python",
"bytes": "725409"
},
{
"name": "Shell",
"bytes": "153"
}
],
"symlink_target": ""
} |
"""Models for test_natural.py"""
from django.db import models
class NaturalKeyAnchorManager(models.Manager):
def get_by_natural_key(self, data):
return self.get(data=data)
class NaturalKeyAnchor(models.Model):
objects = NaturalKeyAnchorManager()
data = models.CharField(max_length=100, unique=True)
title = models.CharField(max_length=100, null=True)
def natural_key(self):
return (self.data,)
class FKDataNaturalKey(models.Model):
data = models.ForeignKey(NaturalKeyAnchor, models.SET_NULL, null=True)
| {
"content_hash": "c50e61e2c789180223faf42a79e2f5df",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 74,
"avg_line_length": 27.238095238095237,
"alnum_prop": 0.6940559440559441,
"repo_name": "yephper/django",
"id": "5f5016d16d22043c9cf0b003740d372ba9da389c",
"size": "572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/serializers/models/natural.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "1538"
},
{
"name": "CSS",
"bytes": "1697381"
},
{
"name": "HTML",
"bytes": "390772"
},
{
"name": "Java",
"bytes": "588"
},
{
"name": "JavaScript",
"bytes": "3172126"
},
{
"name": "Makefile",
"bytes": "134"
},
{
"name": "PHP",
"bytes": "19336"
},
{
"name": "Python",
"bytes": "13365273"
},
{
"name": "Shell",
"bytes": "837"
},
{
"name": "Smarty",
"bytes": "133"
}
],
"symlink_target": ""
} |
import sys
import redis
class Client():
def __init__(self):
self.conn = redis.StrictRedis("redis")
def set(self, key, value):
self.conn.set(key, value)
def get(self, key):
return self.conn.get(key)
if __name__ == '__main__':
client = Client()
client.get(sys.argv[1])
| {
"content_hash": "79459f15c3a3b2c8ea182e4442e46560",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 46,
"avg_line_length": 18.58823529411765,
"alnum_prop": 0.569620253164557,
"repo_name": "leehosung/pycon-testing",
"id": "3f49a8b363603aa54dadcaa243f89925f8eb1be7",
"size": "316",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integration_test/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6483"
}
],
"symlink_target": ""
} |
from telemetry.page import page_test_results
from telemetry.page import page_measurement_value
class ValuesForSinglePage(object):
def __init__(self, page):
self.page = page
self.values = []
def AddValue(self, value):
self.values.append(value)
@property
def measurement_names(self):
return [value.measurement_name for value in self.values]
def FindValueByMeasurementName(self, measurement_name):
values = [value for value in self.values
if value.measurement_name == measurement_name]
assert len(values) <= 1
if len(values):
return values[0]
return None
def __getitem__(self, trace_name):
return self.FindValueByTraceName(trace_name)
def __contains__(self, trace_name):
return self.FindValueByTraceName(trace_name) != None
def FindValueByTraceName(self, trace_name):
values = [value for value in self.values
if value.trace_name == trace_name]
assert len(values) <= 1
if len(values):
return values[0]
return None
class PageMeasurementResults(page_test_results.PageTestResults):
def __init__(self, trace_tag=''):
super(PageMeasurementResults, self).__init__()
self._trace_tag = trace_tag
self._page_results = []
self._overall_results = []
self._all_measurements_that_have_been_seen = {}
self._values_for_current_page = {}
def __getitem__(self, i):
"""Shorthand for self.page_results[i]"""
return self._page_results[i]
def __len__(self):
return len(self._page_results)
@property
def values_for_current_page(self):
return self._values_for_current_page
@property
def page_results(self):
return self._page_results
def WillMeasurePage(self, page):
self._values_for_current_page = ValuesForSinglePage(page)
@property
def all_measurements_that_have_been_seen(self):
return self._all_measurements_that_have_been_seen
def Add(self, trace_name, units, value, chart_name=None, data_type='default'):
value = self._GetPageMeasurementValue(trace_name, units, value, chart_name,
data_type)
self._values_for_current_page.AddValue(value)
def AddSummary(self, trace_name, units, value, chart_name=None,
data_type='default'):
value = self._GetPageMeasurementValue(trace_name, units, value, chart_name,
data_type)
self._overall_results.append(value)
def _GetPageMeasurementValue(self, trace_name, units, value, chart_name,
data_type):
value = page_measurement_value.PageMeasurementValue(
trace_name, units, value, chart_name, data_type)
measurement_name = value.measurement_name
# Sanity checks.
assert measurement_name != 'url', 'The name url cannot be used'
if measurement_name in self._all_measurements_that_have_been_seen:
measurement_data = \
self._all_measurements_that_have_been_seen[measurement_name]
last_seen_units = measurement_data['units']
last_seen_data_type = measurement_data['type']
assert last_seen_units == units, \
'Unit cannot change for a name once it has been provided'
assert last_seen_data_type == data_type, \
'Unit cannot change for a name once it has been provided'
else:
self._all_measurements_that_have_been_seen[measurement_name] = {
'units': units,
'type': data_type}
return value
def DidMeasurePage(self):
assert self._values_for_current_page, 'Failed to call WillMeasurePage'
if not self.values_for_current_page.values:
# Do not store to page_results list if no results were added on this page.
return
self._page_results.append(self._values_for_current_page)
self._values_for_current_page = None
| {
"content_hash": "6c7b0f0cc9d2419bf6d0d08ac8b6a857",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 80,
"avg_line_length": 34.2972972972973,
"alnum_prop": 0.6637772524297347,
"repo_name": "mogoweb/chromium-crosswalk",
"id": "02ebd2a03b48bfebecc360996074fedc6a0043c3",
"size": "3974",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/telemetry/telemetry/page/page_measurement_results.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "54831"
},
{
"name": "Awk",
"bytes": "8660"
},
{
"name": "C",
"bytes": "40940503"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "182703853"
},
{
"name": "CSS",
"bytes": "799795"
},
{
"name": "DOT",
"bytes": "1873"
},
{
"name": "Java",
"bytes": "4807735"
},
{
"name": "JavaScript",
"bytes": "20714038"
},
{
"name": "Mercury",
"bytes": "10299"
},
{
"name": "Objective-C",
"bytes": "985558"
},
{
"name": "Objective-C++",
"bytes": "6205987"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "Perl",
"bytes": "1213389"
},
{
"name": "Python",
"bytes": "9735121"
},
{
"name": "Rebol",
"bytes": "262"
},
{
"name": "Shell",
"bytes": "1305641"
},
{
"name": "Tcl",
"bytes": "277091"
},
{
"name": "TypeScript",
"bytes": "1560024"
},
{
"name": "XSLT",
"bytes": "13493"
},
{
"name": "nesC",
"bytes": "14650"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('coopInfo', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='person',
name='inBoardSince',
field=models.DateTimeField(null=True),
preserve_default=True,
),
]
| {
"content_hash": "eef9b21ea7b9ed6e102c427cf3ee5f91",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 50,
"avg_line_length": 21.526315789473685,
"alnum_prop": 0.589242053789731,
"repo_name": "antoineclaval/ruralpowerproject",
"id": "7fea7bf34acd449da8af0faad7e6bc10e80f2a9b",
"size": "433",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ruralpowerproject/coopInfo/migrations/0002_auto_20150128_1753.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2614"
},
{
"name": "HTML",
"bytes": "23995"
},
{
"name": "JavaScript",
"bytes": "3257"
},
{
"name": "PLpgSQL",
"bytes": "75440"
},
{
"name": "Python",
"bytes": "58310"
}
],
"symlink_target": ""
} |
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_themes'))
sys.path.append(os.path.abspath("")[:-4]) # import gathernews
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'GatherNews'
copyright = u'2014 Tyler Brown'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.0'
# The full version, including alpha/beta/rc tags.
release = '0.2.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'kr'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'GatherNews'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'GatherNewsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'GatherNews.tex', u'GatherNews Documentation',
u'Tyler Brown', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'gathernews', u'GatherNews Documentation',
[u'Tyler Brown'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'GatherNews', u'GatherNews Documentation',
u'Tyler Brown', 'GatherNews', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| {
"content_hash": "46bacf41e64b6dde6c69d1a9f3cc7f50",
"timestamp": "",
"source": "github",
"line_count": 248,
"max_line_length": 79,
"avg_line_length": 31.72983870967742,
"alnum_prop": 0.7054263565891473,
"repo_name": "tbonza/GatherNews",
"id": "47b7db9ff6facd7edc73941e25199ae37a6391b0",
"size": "8292",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10268"
}
],
"symlink_target": ""
} |
"""The sensor tests for the august platform."""
from homeassistant.const import PERCENTAGE, STATE_UNAVAILABLE
from tests.components.august.mocks import (
_create_august_with_devices,
_mock_activities_from_fixture,
_mock_doorbell_from_fixture,
_mock_doorsense_enabled_august_lock_detail,
_mock_lock_from_fixture,
)
async def test_create_doorbell(hass):
"""Test creation of a doorbell."""
doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json")
await _create_august_with_devices(hass, [doorbell_one])
sensor_k98gidt45gul_name_battery = hass.states.get(
"sensor.k98gidt45gul_name_battery"
)
assert sensor_k98gidt45gul_name_battery.state == "96"
assert (
sensor_k98gidt45gul_name_battery.attributes["unit_of_measurement"] == PERCENTAGE
)
async def test_create_doorbell_offline(hass):
"""Test creation of a doorbell that is offline."""
doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json")
await _create_august_with_devices(hass, [doorbell_one])
entity_registry = await hass.helpers.entity_registry.async_get_registry()
sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery")
assert sensor_tmt100_name_battery.state == "81"
assert sensor_tmt100_name_battery.attributes["unit_of_measurement"] == PERCENTAGE
entry = entity_registry.async_get("sensor.tmt100_name_battery")
assert entry
assert entry.unique_id == "tmt100_device_battery"
async def test_create_doorbell_hardwired(hass):
"""Test creation of a doorbell that is hardwired without a battery."""
doorbell_one = await _mock_doorbell_from_fixture(
hass, "get_doorbell.nobattery.json"
)
await _create_august_with_devices(hass, [doorbell_one])
sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery")
assert sensor_tmt100_name_battery is None
async def test_create_lock_with_linked_keypad(hass):
"""Test creation of a lock with a linked keypad that both have a battery."""
lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json")
await _create_august_with_devices(hass, [lock_one])
entity_registry = await hass.helpers.entity_registry.async_get_registry()
sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get(
"sensor.a6697750d607098bae8d6baa11ef8063_name_battery"
)
assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88"
assert (
sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[
"unit_of_measurement"
]
== PERCENTAGE
)
entry = entity_registry.async_get(
"sensor.a6697750d607098bae8d6baa11ef8063_name_battery"
)
assert entry
assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery"
state = hass.states.get("sensor.front_door_lock_keypad_battery")
assert state.state == "60"
assert state.attributes["unit_of_measurement"] == PERCENTAGE
entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery")
assert entry
assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery"
async def test_create_lock_with_low_battery_linked_keypad(hass):
"""Test creation of a lock with a linked keypad that both have a battery."""
lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json")
await _create_august_with_devices(hass, [lock_one])
entity_registry = await hass.helpers.entity_registry.async_get_registry()
sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get(
"sensor.a6697750d607098bae8d6baa11ef8063_name_battery"
)
assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88"
assert (
sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[
"unit_of_measurement"
]
== PERCENTAGE
)
entry = entity_registry.async_get(
"sensor.a6697750d607098bae8d6baa11ef8063_name_battery"
)
assert entry
assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery"
state = hass.states.get("sensor.front_door_lock_keypad_battery")
assert state.state == "10"
assert state.attributes["unit_of_measurement"] == PERCENTAGE
entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery")
assert entry
assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery"
# No activity means it will be unavailable until someone unlocks/locks it
lock_operator_sensor = entity_registry.async_get(
"sensor.a6697750d607098bae8d6baa11ef8063_name_operator"
)
assert (
lock_operator_sensor.unique_id
== "A6697750D607098BAE8D6BAA11EF8063_lock_operator"
)
assert (
hass.states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator").state
== STATE_UNAVAILABLE
)
async def test_lock_operator_bluetooth(hass):
"""Test operation of a lock with doorsense and bridge."""
lock_one = await _mock_doorsense_enabled_august_lock_detail(hass)
activities = await _mock_activities_from_fixture(
hass, "get_activity.lock_from_bluetooth.json"
)
await _create_august_with_devices(hass, [lock_one], activities=activities)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
lock_operator_sensor = entity_registry.async_get(
"sensor.online_with_doorsense_name_operator"
)
assert lock_operator_sensor
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").state
== "Your favorite elven princess"
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"remote"
]
is False
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"keypad"
]
is False
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"autorelock"
]
is False
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"method"
]
== "mobile"
)
async def test_lock_operator_keypad(hass):
"""Test operation of a lock with doorsense and bridge."""
lock_one = await _mock_doorsense_enabled_august_lock_detail(hass)
activities = await _mock_activities_from_fixture(
hass, "get_activity.lock_from_keypad.json"
)
await _create_august_with_devices(hass, [lock_one], activities=activities)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
lock_operator_sensor = entity_registry.async_get(
"sensor.online_with_doorsense_name_operator"
)
assert lock_operator_sensor
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").state
== "Your favorite elven princess"
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"remote"
]
is False
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"keypad"
]
is True
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"autorelock"
]
is False
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"method"
]
== "keypad"
)
async def test_lock_operator_remote(hass):
"""Test operation of a lock with doorsense and bridge."""
lock_one = await _mock_doorsense_enabled_august_lock_detail(hass)
activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json")
await _create_august_with_devices(hass, [lock_one], activities=activities)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
lock_operator_sensor = entity_registry.async_get(
"sensor.online_with_doorsense_name_operator"
)
assert lock_operator_sensor
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").state
== "Your favorite elven princess"
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"remote"
]
is True
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"keypad"
]
is False
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"autorelock"
]
is False
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"method"
]
== "remote"
)
async def test_lock_operator_autorelock(hass):
"""Test operation of a lock with doorsense and bridge."""
lock_one = await _mock_doorsense_enabled_august_lock_detail(hass)
activities = await _mock_activities_from_fixture(
hass, "get_activity.lock_from_autorelock.json"
)
await _create_august_with_devices(hass, [lock_one], activities=activities)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
lock_operator_sensor = entity_registry.async_get(
"sensor.online_with_doorsense_name_operator"
)
assert lock_operator_sensor
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").state
== "Auto Relock"
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"remote"
]
is False
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"keypad"
]
is False
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"autorelock"
]
is True
)
assert (
hass.states.get("sensor.online_with_doorsense_name_operator").attributes[
"method"
]
== "autorelock"
)
| {
"content_hash": "d522c2b7725bee6b8f366ad88795d676",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 88,
"avg_line_length": 34.439597315436245,
"alnum_prop": 0.667348728441976,
"repo_name": "tchellomello/home-assistant",
"id": "7e69b59da070584e7d8a5f8318e68217d8d09d33",
"size": "10263",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tests/components/august/test_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "26713364"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
} |
from os.path import abspath, dirname, join, normpath
from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
# Basic package information:
name = 'mdx_collapse',
version = '0.1.0',
py_modules = ('mdx_collapse',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Markdown>=2.0'],
# Metadata for PyPI:
author = 'Alexandre Fonseca',
author_email = 'alexandrejorgefonseca@gmail.com',
license = 'Apache',
url = 'https://github.com/AlexJF/mdx_collapse',
download_url = 'https://github.com/AlexJF/mdx_collapse/archive/v0.1.0.zip',
keywords = 'markdown extension collapse',
description = ('A markdown extension for defining collapsible areas'),
long_description = long_description
)
| {
"content_hash": "9582f6d296c45b6e3252e6ad7d4dd1d4",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 79,
"avg_line_length": 28.6,
"alnum_prop": 0.6678321678321678,
"repo_name": "AlexJF/mdx_collapse",
"id": "1b239da21d69c2148a2763d3e43c23e6323147a6",
"size": "858",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5697"
}
],
"symlink_target": ""
} |
"""
This is a pure Python implementation of the merge-insertion sort algorithm
Source: https://en.wikipedia.org/wiki/Merge-insertion_sort
For doctests run following command:
python3 -m doctest -v merge_insertion_sort.py
or
python -m doctest -v merge_insertion_sort.py
For manual testing run:
python3 merge_insertion_sort.py
"""
from __future__ import annotations
def merge_insertion_sort(collection: list[int]) -> list[int]:
"""Pure implementation of merge-insertion sort algorithm in Python
:param collection: some mutable ordered collection with heterogeneous
comparable items inside
:return: the same collection ordered by ascending
Examples:
>>> merge_insertion_sort([0, 5, 3, 2, 2])
[0, 2, 2, 3, 5]
>>> merge_insertion_sort([99])
[99]
>>> merge_insertion_sort([-2, -5, -45])
[-45, -5, -2]
"""
def binary_search_insertion(sorted_list, item):
left = 0
right = len(sorted_list) - 1
while left <= right:
middle = (left + right) // 2
if left == right:
if sorted_list[middle] < item:
left = middle + 1
break
elif sorted_list[middle] < item:
left = middle + 1
else:
right = middle - 1
sorted_list.insert(left, item)
return sorted_list
def sortlist_2d(list_2d):
def merge(left, right):
result = []
while left and right:
if left[0][0] < right[0][0]:
result.append(left.pop(0))
else:
result.append(right.pop(0))
return result + left + right
length = len(list_2d)
if length <= 1:
return list_2d
middle = length // 2
return merge(sortlist_2d(list_2d[:middle]), sortlist_2d(list_2d[middle:]))
if len(collection) <= 1:
return collection
"""
Group the items into two pairs, and leave one element if there is a last odd item.
Example: [999, 100, 75, 40, 10000]
-> [999, 100], [75, 40]. Leave 10000.
"""
two_paired_list = []
has_last_odd_item = False
for i in range(0, len(collection), 2):
if i == len(collection) - 1:
has_last_odd_item = True
else:
"""
Sort two-pairs in each groups.
Example: [999, 100], [75, 40]
-> [100, 999], [40, 75]
"""
if collection[i] < collection[i + 1]:
two_paired_list.append([collection[i], collection[i + 1]])
else:
two_paired_list.append([collection[i + 1], collection[i]])
"""
Sort two_paired_list.
Example: [100, 999], [40, 75]
-> [40, 75], [100, 999]
"""
sorted_list_2d = sortlist_2d(two_paired_list)
"""
40 < 100 is sure because it has already been sorted.
Generate the sorted_list of them so that you can avoid unnecessary comparison.
Example:
group0 group1
40 100
75 999
->
group0 group1
[40, 100]
75 999
"""
result = [i[0] for i in sorted_list_2d]
"""
100 < 999 is sure because it has already been sorted.
Put 999 in last of the sorted_list so that you can avoid unnecessary comparison.
Example:
group0 group1
[40, 100]
75 999
->
group0 group1
[40, 100, 999]
75
"""
result.append(sorted_list_2d[-1][1])
"""
Insert the last odd item left if there is.
Example:
group0 group1
[40, 100, 999]
75
->
group0 group1
[40, 100, 999, 10000]
75
"""
if has_last_odd_item:
pivot = collection[-1]
result = binary_search_insertion(result, pivot)
"""
Insert the remaining items.
In this case, 40 < 75 is sure because it has already been sorted.
Therefore, you only need to insert 75 into [100, 999, 10000],
so that you can avoid unnecessary comparison.
Example:
group0 group1
[40, 100, 999, 10000]
^ You don't need to compare with this as 40 < 75 is already sure.
75
->
[40, 75, 100, 999, 10000]
"""
is_last_odd_item_inserted_before_this_index = False
for i in range(len(sorted_list_2d) - 1):
if result[i] == collection[-i]:
is_last_odd_item_inserted_before_this_index = True
pivot = sorted_list_2d[i][1]
# If last_odd_item is inserted before the item's index,
# you should forward index one more.
if is_last_odd_item_inserted_before_this_index:
result = result[: i + 2] + binary_search_insertion(result[i + 2 :], pivot)
else:
result = result[: i + 1] + binary_search_insertion(result[i + 1 :], pivot)
return result
if __name__ == "__main__":
user_input = input("Enter numbers separated by a comma:\n").strip()
unsorted = [int(item) for item in user_input.split(",")]
print(merge_insertion_sort(unsorted))
| {
"content_hash": "114433b0be099c7745e43a039c611b87",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 86,
"avg_line_length": 29.24581005586592,
"alnum_prop": 0.5379178605539637,
"repo_name": "wuweilin/python",
"id": "fb71d84a3c149093e0bbc3ee161d865c2d5df910",
"size": "5235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sorts/merge_insertion_sort.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from __future__ import print_function
import json
import os
import re
import subprocess
import sys
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
import time
from pyversion import is_python3
if is_python3():
import urllib.request
import urllib.error
else:
import urllib2
import imp
urllib = imp.new_module('urllib')
urllib.request = urllib2
urllib.error = urllib2
from signal import SIGTERM
from error import GitError, UploadError
from trace import Trace
if is_python3():
from http.client import HTTPException
else:
from httplib import HTTPException
from git_command import GitCommand
from git_command import ssh_sock
from git_command import terminate_ssh_clients
R_HEADS = 'refs/heads/'
R_TAGS = 'refs/tags/'
ID_RE = re.compile(r'^[0-9a-f]{40}$')
REVIEW_CACHE = dict()
def IsId(rev):
return ID_RE.match(rev)
def _key(name):
parts = name.split('.')
if len(parts) < 2:
return name.lower()
parts[ 0] = parts[ 0].lower()
parts[-1] = parts[-1].lower()
return '.'.join(parts)
class GitConfig(object):
_ForUser = None
@classmethod
def ForUser(cls):
if cls._ForUser is None:
cls._ForUser = cls(configfile = os.path.expanduser('~/.gitconfig'))
return cls._ForUser
@classmethod
def ForRepository(cls, gitdir, defaults=None):
return cls(configfile = os.path.join(gitdir, 'config'),
defaults = defaults)
def __init__(self, configfile, defaults=None, jsonFile=None):
self.file = configfile
self.defaults = defaults
self._cache_dict = None
self._section_dict = None
self._remotes = {}
self._branches = {}
self._json = jsonFile
if self._json is None:
self._json = os.path.join(
os.path.dirname(self.file),
'.repo_' + os.path.basename(self.file) + '.json')
def Has(self, name, include_defaults = True):
"""Return true if this configuration file has the key.
"""
if _key(name) in self._cache:
return True
if include_defaults and self.defaults:
return self.defaults.Has(name, include_defaults = True)
return False
def GetBoolean(self, name):
"""Returns a boolean from the configuration file.
None : The value was not defined, or is not a boolean.
True : The value was set to true or yes.
False: The value was set to false or no.
"""
v = self.GetString(name)
if v is None:
return None
v = v.lower()
if v in ('true', 'yes'):
return True
if v in ('false', 'no'):
return False
return None
def GetString(self, name, all_keys=False):
"""Get the first value for a key, or None if it is not defined.
This configuration file is used first, if the key is not
defined or all_keys = True then the defaults are also searched.
"""
try:
v = self._cache[_key(name)]
except KeyError:
if self.defaults:
return self.defaults.GetString(name, all_keys = all_keys)
v = []
if not all_keys:
if v:
return v[0]
return None
r = []
r.extend(v)
if self.defaults:
r.extend(self.defaults.GetString(name, all_keys = True))
return r
def SetString(self, name, value):
"""Set the value(s) for a key.
Only this configuration file is modified.
The supplied value should be either a string,
or a list of strings (to store multiple values).
"""
key = _key(name)
try:
old = self._cache[key]
except KeyError:
old = []
if value is None:
if old:
del self._cache[key]
self._do('--unset-all', name)
elif isinstance(value, list):
if len(value) == 0:
self.SetString(name, None)
elif len(value) == 1:
self.SetString(name, value[0])
elif old != value:
self._cache[key] = list(value)
self._do('--replace-all', name, value[0])
for i in range(1, len(value)):
self._do('--add', name, value[i])
elif len(old) != 1 or old[0] != value:
self._cache[key] = [value]
self._do('--replace-all', name, value)
def GetRemote(self, name):
"""Get the remote.$name.* configuration values as an object.
"""
try:
r = self._remotes[name]
except KeyError:
r = Remote(self, name)
self._remotes[r.name] = r
return r
def GetBranch(self, name):
"""Get the branch.$name.* configuration values as an object.
"""
try:
b = self._branches[name]
except KeyError:
b = Branch(self, name)
self._branches[b.name] = b
return b
def GetSubSections(self, section):
"""List all subsection names matching $section.*.*
"""
return self._sections.get(section, set())
def HasSection(self, section, subsection = ''):
"""Does at least one key in section.subsection exist?
"""
try:
return subsection in self._sections[section]
except KeyError:
return False
def UrlInsteadOf(self, url):
"""Resolve any url.*.insteadof references.
"""
for new_url in self.GetSubSections('url'):
for old_url in self.GetString('url.%s.insteadof' % new_url, True):
if old_url is not None and url.startswith(old_url):
return new_url + url[len(old_url):]
return url
@property
def _sections(self):
d = self._section_dict
if d is None:
d = {}
for name in self._cache.keys():
p = name.split('.')
if 2 == len(p):
section = p[0]
subsect = ''
else:
section = p[0]
subsect = '.'.join(p[1:-1])
if section not in d:
d[section] = set()
d[section].add(subsect)
self._section_dict = d
return d
@property
def _cache(self):
if self._cache_dict is None:
self._cache_dict = self._Read()
return self._cache_dict
def _Read(self):
d = self._ReadJson()
if d is None:
d = self._ReadGit()
self._SaveJson(d)
return d
def _ReadJson(self):
try:
if os.path.getmtime(self._json) \
<= os.path.getmtime(self.file):
os.remove(self._json)
return None
except OSError:
return None
try:
Trace(': parsing %s', self.file)
fd = open(self._json)
try:
return json.load(fd)
finally:
fd.close()
except (IOError, ValueError):
os.remove(self._json)
return None
def _SaveJson(self, cache):
try:
fd = open(self._json, 'w')
try:
json.dump(cache, fd, indent=2)
finally:
fd.close()
except (IOError, TypeError):
if os.path.exists(self.json):
os.remove(self._json)
def _ReadGit(self):
"""
Read configuration data from git.
This internal method populates the GitConfig cache.
"""
c = {}
d = self._do('--null', '--list')
if d is None:
return c
for line in d.decode('utf-8').rstrip('\0').split('\0'): # pylint: disable=W1401
# Backslash is not anomalous
if '\n' in line:
key, val = line.split('\n', 1)
else:
key = line
val = None
if key in c:
c[key].append(val)
else:
c[key] = [val]
return c
def _do(self, *args):
command = ['config', '--file', self.file]
command.extend(args)
p = GitCommand(None,
command,
capture_stdout = True,
capture_stderr = True)
if p.Wait() == 0:
return p.stdout
else:
GitError('git config %s: %s' % (str(args), p.stderr))
class RefSpec(object):
"""A Git refspec line, split into its components:
forced: True if the line starts with '+'
src: Left side of the line
dst: Right side of the line
"""
@classmethod
def FromString(cls, rs):
lhs, rhs = rs.split(':', 2)
if lhs.startswith('+'):
lhs = lhs[1:]
forced = True
else:
forced = False
return cls(forced, lhs, rhs)
def __init__(self, forced, lhs, rhs):
self.forced = forced
self.src = lhs
self.dst = rhs
def SourceMatches(self, rev):
if self.src:
if rev == self.src:
return True
if self.src.endswith('/*') and rev.startswith(self.src[:-1]):
return True
return False
def DestMatches(self, ref):
if self.dst:
if ref == self.dst:
return True
if self.dst.endswith('/*') and ref.startswith(self.dst[:-1]):
return True
return False
def MapSource(self, rev):
if self.src.endswith('/*'):
return self.dst[:-1] + rev[len(self.src) - 1:]
return self.dst
def __str__(self):
s = ''
if self.forced:
s += '+'
if self.src:
s += self.src
if self.dst:
s += ':'
s += self.dst
return s
_master_processes = []
_master_keys = set()
_ssh_master = True
_master_keys_lock = None
def init_ssh():
"""Should be called once at the start of repo to init ssh master handling.
At the moment, all we do is to create our lock.
"""
global _master_keys_lock
assert _master_keys_lock is None, "Should only call init_ssh once"
_master_keys_lock = _threading.Lock()
def _open_ssh(host, port=None):
global _ssh_master
# Acquire the lock. This is needed to prevent opening multiple masters for
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
# one that was passed to repo init.
_master_keys_lock.acquire()
try:
# Check to see whether we already think that the master is running; if we
# think it's already running, return right away.
if port is not None:
key = '%s:%s' % (host, port)
else:
key = host
if key in _master_keys:
return True
if not _ssh_master \
or 'GIT_SSH' in os.environ \
or sys.platform in ('win32', 'cygwin'):
# failed earlier, or cygwin ssh can't do this
#
return False
# We will make two calls to ssh; this is the common part of both calls.
command_base = ['ssh',
'-o','ControlPath %s' % ssh_sock(),
host]
if port is not None:
command_base[1:1] = ['-p', str(port)]
# Since the key wasn't in _master_keys, we think that master isn't running.
# ...but before actually starting a master, we'll double-check. This can
# be important because we can't tell that that 'git@myhost.com' is the same
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
check_command = command_base + ['-O','check']
try:
Trace(': %s', ' '.join(check_command))
check_process = subprocess.Popen(check_command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
check_process.communicate() # read output, but ignore it...
isnt_running = check_process.wait()
if not isnt_running:
# Our double-check found that the master _was_ infact running. Add to
# the list of keys.
_master_keys.add(key)
return True
except Exception:
# Ignore excpetions. We we will fall back to the normal command and print
# to the log there.
pass
command = command_base[:1] + \
['-M', '-N'] + \
command_base[1:]
try:
Trace(': %s', ' '.join(command))
p = subprocess.Popen(command)
except Exception as e:
_ssh_master = False
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
% (host,port, str(e)), file=sys.stderr)
return False
_master_processes.append(p)
_master_keys.add(key)
time.sleep(1)
return True
finally:
_master_keys_lock.release()
def close_ssh():
global _master_keys_lock
terminate_ssh_clients()
for p in _master_processes:
try:
os.kill(p.pid, SIGTERM)
p.wait()
except OSError:
pass
del _master_processes[:]
_master_keys.clear()
d = ssh_sock(create=False)
if d:
try:
os.rmdir(os.path.dirname(d))
except OSError:
pass
# We're done with the lock, so we can delete it.
_master_keys_lock = None
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
def GetSchemeFromUrl(url):
m = URI_ALL.match(url)
if m:
return m.group(1)
return None
def _preconnect(url):
m = URI_ALL.match(url)
if m:
scheme = m.group(1)
host = m.group(2)
if ':' in host:
host, port = host.split(':')
else:
port = None
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
return _open_ssh(host, port)
return False
m = URI_SCP.match(url)
if m:
host = m.group(1)
return _open_ssh(host)
return False
class Remote(object):
"""Configuration options related to a remote.
"""
def __init__(self, config, name):
self._config = config
self.name = name
self.url = self._Get('url')
self.review = self._Get('review')
self.projectname = self._Get('projectname')
self.fetch = list(map(RefSpec.FromString,
self._Get('fetch', all_keys=True)))
self._review_url = None
def _InsteadOf(self):
globCfg = GitConfig.ForUser()
urlList = globCfg.GetSubSections('url')
longest = ""
longestUrl = ""
for url in urlList:
key = "url." + url + ".insteadOf"
insteadOfList = globCfg.GetString(key, all_keys=True)
for insteadOf in insteadOfList:
if self.url.startswith(insteadOf) \
and len(insteadOf) > len(longest):
longest = insteadOf
longestUrl = url
if len(longest) == 0:
return self.url
return self.url.replace(longest, longestUrl, 1)
def PreConnectFetch(self):
connectionUrl = self._InsteadOf()
return _preconnect(connectionUrl)
def ReviewUrl(self, userEmail):
if self._review_url is None:
if self.review is None:
return None
u = self.review
if u.split(':')[0] not in ('http', 'https', 'sso'):
u = 'http://%s' % u
if u.endswith('/Gerrit'):
u = u[:len(u) - len('/Gerrit')]
if u.endswith('/ssh_info'):
u = u[:len(u) - len('/ssh_info')]
if not u.endswith('/'):
u += '/'
http_url = u
if u in REVIEW_CACHE:
self._review_url = REVIEW_CACHE[u]
elif 'REPO_HOST_PORT_INFO' in os.environ:
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
self._review_url = self._SshReviewUrl(userEmail, host, port)
REVIEW_CACHE[u] = self._review_url
elif u.startswith('sso:'):
self._review_url = u # Assume it's right
REVIEW_CACHE[u] = self._review_url
else:
try:
info_url = u + 'ssh_info'
info = urllib.request.urlopen(info_url).read()
if info == 'NOT_AVAILABLE' or '<' in info:
# If `info` contains '<', we assume the server gave us some sort
# of HTML response back, like maybe a login page.
#
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
self._review_url = http_url
else:
host, port = info.split()
self._review_url = self._SshReviewUrl(userEmail, host, port)
except urllib.error.HTTPError as e:
raise UploadError('%s: %s' % (self.review, str(e)))
except urllib.error.URLError as e:
raise UploadError('%s: %s' % (self.review, str(e)))
except HTTPException as e:
raise UploadError('%s: %s' % (self.review, e.__class__.__name__))
REVIEW_CACHE[u] = self._review_url
return self._review_url + self.projectname
def _SshReviewUrl(self, userEmail, host, port):
username = self._config.GetString('review.%s.username' % self.review)
if username is None:
username = userEmail.split('@')[0]
return 'ssh://%s@%s:%s/' % (username, host, port)
def ToLocal(self, rev):
"""Convert a remote revision string to something we have locally.
"""
if IsId(rev):
return rev
if rev.startswith(R_TAGS):
return rev
if not rev.startswith('refs/'):
rev = R_HEADS + rev
for spec in self.fetch:
if spec.SourceMatches(rev):
return spec.MapSource(rev)
raise GitError('remote %s does not have %s' % (self.name, rev))
def WritesTo(self, ref):
"""True if the remote stores to the tracking ref.
"""
for spec in self.fetch:
if spec.DestMatches(ref):
return True
return False
def ResetFetch(self, mirror=False):
"""Set the fetch refspec to its default value.
"""
if mirror:
dst = 'refs/heads/*'
else:
dst = 'refs/remotes/%s/*' % self.name
self.fetch = [RefSpec(True, 'refs/heads/*', dst)]
def Save(self):
"""Save this remote to the configuration.
"""
self._Set('url', self.url)
self._Set('review', self.review)
self._Set('projectname', self.projectname)
self._Set('fetch', list(map(str, self.fetch)))
def _Set(self, key, value):
key = 'remote.%s.%s' % (self.name, key)
return self._config.SetString(key, value)
def _Get(self, key, all_keys=False):
key = 'remote.%s.%s' % (self.name, key)
return self._config.GetString(key, all_keys = all_keys)
class Branch(object):
"""Configuration options related to a single branch.
"""
def __init__(self, config, name):
self._config = config
self.name = name
self.merge = self._Get('merge')
r = self._Get('remote')
if r:
self.remote = self._config.GetRemote(r)
else:
self.remote = None
@property
def LocalMerge(self):
"""Convert the merge spec to a local name.
"""
if self.remote and self.merge:
return self.remote.ToLocal(self.merge)
return None
def Save(self):
"""Save this branch back into the configuration.
"""
if self._config.HasSection('branch', self.name):
if self.remote:
self._Set('remote', self.remote.name)
else:
self._Set('remote', None)
self._Set('merge', self.merge)
else:
fd = open(self._config.file, 'a')
try:
fd.write('[branch "%s"]\n' % self.name)
if self.remote:
fd.write('\tremote = %s\n' % self.remote.name)
if self.merge:
fd.write('\tmerge = %s\n' % self.merge)
finally:
fd.close()
def _Set(self, key, value):
key = 'branch.%s.%s' % (self.name, key)
return self._config.SetString(key, value)
def _Get(self, key, all_keys=False):
key = 'branch.%s.%s' % (self.name, key)
return self._config.GetString(key, all_keys = all_keys)
| {
"content_hash": "9c5dfdc70e0f23f14f225077fb2fcaed",
"timestamp": "",
"source": "github",
"line_count": 701,
"max_line_length": 89,
"avg_line_length": 26.713266761768903,
"alnum_prop": 0.5827726156146534,
"repo_name": "xin3liang/git-repo",
"id": "aa07d1b757fecc805c806cfd4a34695b8a2c7b54",
"size": "19329",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "git_config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "350998"
},
{
"name": "Shell",
"bytes": "6043"
}
],
"symlink_target": ""
} |
import logging
import requests
from fakturo.core import exceptions, utils
LOG = logging.getLogger(__name__)
class BaseClient(object):
def __init__(self, url=None):
url.rstrip('/')
self.url = url
self.requests = self.get_requests()
def get_requests(self, headers={}, args_hooks=[], pre_request_hooks=[]):
if not 'Content-Type' in headers:
headers['Content-Type'] = 'application/json'
pre_request_hooks = pre_request_hooks + [utils.log_request]
session = requests.Session()
session.hooks = dict(
args=args_hooks,
pre_request=pre_request_hooks)
session.headers.update(headers)
return session
def wrap_api_call(self, function, path, status_code=200, *args, **kw):
path = path.lstrip('/') if path else ''
url = self.url + '/' + path
LOG.debug('Wrapping request to %s' % url)
wrapper = kw.get('wrapper', None)
# NOTE: If we're passed a wrapper function by the caller, pass the
# requests function to it along with path and other args...
if wrapper and hasattr(wrapper, '__call__'):
return wrapper(function, url, *args, **kw)
response = function(url, *args, **kw)
# NOTE: Make a function that can extract errors based on content type?
if response.status_code != status_code:
error = None
if response.json:
error = response.json.get('error', None)
if not error:
error = 'Remote error occured. Response Body:\n%s' % \
response.content
raise exceptions.RemoteError(response.status_code, error)
return response
def get(self, *args, **kw):
return self.wrap_api_call(self.requests.get, *args, **kw)
def post(self, *args, **kw):
return self.wrap_api_call(self.requests.post, *args, **kw)
def put(self, *args, **kw):
return self.wrap_api_call(self.requests.put, *args, **kw)
def delete(self, *args, **kw):
return self.wrap_api_call(self.requests.delete, *args, **kw)
| {
"content_hash": "15e8dd25c1a95c88b8999ccca4963143",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 78,
"avg_line_length": 32.89230769230769,
"alnum_prop": 0.5893358278765201,
"repo_name": "billingstack/python-fakturo",
"id": "75d24df5bd0e6849ca2a4865bee045cc9e29cc86",
"size": "2138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fakturo/core/client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "54647"
}
],
"symlink_target": ""
} |
import os
def writeANNProperties(in_scaler,out_scaler,scaler,labels):
try:
assert os.path.isdir('ANNProperties')
except:
os.mkdir('ANNProperties')
s='_'
ANNProperties = open('ANNProperties/ANNProperties_'+s.join(x for x in labels), 'w')
try:
with open('ANNProperties/top', encoding='utf-8') as f:
for line in f.readlines():
ANNProperties.write(line)
except:
print('Include a header file named: top into ./ANNProperties!')
# write which is the normalization scaler: MinMax or Standard
ANNProperties.write('ANN_scaler %s;\n' % scaler)
if scaler == 'Standard':
ANNProperties.write('\nin_scale\n')
ANNProperties.write('{\n')
if(hasattr(in_scaler.std, 'mean_')):
for i in range(len(in_scaler.std.mean_)):
mean_string = 'in_%i_mean %.16f;\n' % (i + 1, in_scaler.std.mean_[i])
var_string = 'in_%i_var %.16f;\n' % (i + 1, in_scaler.std.scale_[i])
ANNProperties.write(mean_string)
ANNProperties.write(var_string)
else:
for i in range(3):
mean_string = 'in_%i_mean %.16f;\n' % (i + 1, 0)
var_string = 'in_%i_var %.16f;\n' % (i + 1, 1)
ANNProperties.write(mean_string)
ANNProperties.write(var_string)
ANNProperties.write('}\n')
ANNProperties.write('\nout_scale\n')
ANNProperties.write('{\n')
for i in range(len(out_scaler.std.mean_)):
ANNProperties.write('out_%i_mean %.16f;\n' % (i + 1, out_scaler.std.mean_[i]))
ANNProperties.write('out_%i_var %.16f;\n' % (i + 1, out_scaler.std.scale_[i]))
ANNProperties.write('}\n')
# write the number of species
ANNProperties.write('nr_features %i;\n' % len(out_scaler.std.mean_))
ANNProperties.write('\ninput_layer //input_1;\n')
ANNProperties.write('output_layer //dense_2;\n')
elif scaler == 'MinMax':
ANNProperties.write('\nin_scale\n')
ANNProperties.write('{\n')
for i in range(len(in_scaler.data_max_)):
ANNProperties.write('in_%i_max %.16f;\n' % (i + 1, in_scaler.data_max_[i]))
ANNProperties.write('in_%i_min %.16f;\n' % (i + 1, in_scaler.data_min_[i]))
ANNProperties.write('}\n')
ANNProperties.write('\nout_scale\n')
ANNProperties.write('{\n')
for i in range(len(out_scaler.data_max_)):
ANNProperties.write('out_%i_max %.16f;\n' % (i + 1, out_scaler.data_max_[i]))
ANNProperties.write('out_%i_min %.16f;\n' % (i + 1, out_scaler.data_min_[i]))
ANNProperties.write('}\n')
ANNProperties.write('\n')
ANNProperties.write('range_min %s;\n' % min(in_scaler.feature_range))
ANNProperties.write('range_max %s;\n' % max(in_scaler.feature_range))
# write nr of species
ANNProperties.write('nr_features %i;\n' % len(out_scaler.data_max_))
ANNProperties.write('\ninput_layer //input_1;\n')
ANNProperties.write('output_layer //dense_2;\n')
ANNProperties.write('\n// ************************************************************************* //')
ANNProperties.close()
print('\nANNProperties are written') | {
"content_hash": "15a91c478c6e7e87aad08cedeb452c7c",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 108,
"avg_line_length": 42.34567901234568,
"alnum_prop": 0.536734693877551,
"repo_name": "uqyge/combustionML",
"id": "2f3ce47d2f3269a0d71231fba3e5d9f5c9a359fe",
"size": "3430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "FPV_ANN/utils/writeANNProperties.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4525"
},
{
"name": "C++",
"bytes": "144009"
},
{
"name": "Dockerfile",
"bytes": "816"
},
{
"name": "Jupyter Notebook",
"bytes": "40959474"
},
{
"name": "Makefile",
"bytes": "1310"
},
{
"name": "Python",
"bytes": "276493"
},
{
"name": "Shell",
"bytes": "285"
}
],
"symlink_target": ""
} |
import argparse
import matplotlib.pyplot as plt
def view():
result = []
xindex = []
index = 0
for line in open(args.logpath):
if 'Validation-accuracy' in line:
result.append(line[line.index('=') + 1:])
xindex.append(index)
index += 1
if len(result) >= 1:
plt.title('')
plt.xlabel('epoch')
plt.ylabel('res')
plt.plot(xindex, result)
plt.show()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='visualize log',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--logpath', type=str, default='log.log',
help='log path')
args = parser.parse_args()
view()
| {
"content_hash": "7208a8a16be672a1e36c79db92c8fc5f",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 92,
"avg_line_length": 27.137931034482758,
"alnum_prop": 0.542566709021601,
"repo_name": "gu-yan/mlAlgorithms",
"id": "a2e4ab85f8ec09bb1d249b93f390eee5c2073aee",
"size": "812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mxnet/tools/visualize.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "159631"
}
],
"symlink_target": ""
} |
from os.path import dirname, basename, isfile
import glob
modules = glob.glob(dirname(__file__)+"/*.py")
__all__ = [basename(f)[:-3] for f in modules if isfile(f)]
| {
"content_hash": "ae9eae0c48a1eaa9357b8cf66496d71f",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 58,
"avg_line_length": 27.666666666666668,
"alnum_prop": 0.6626506024096386,
"repo_name": "sql-assurance/sql-assurance",
"id": "29f180d50abbb92369a8a29f6c549ae10a9fd6cf",
"size": "166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sql_assurance/connectors/drivers/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2319"
},
{
"name": "Python",
"bytes": "24201"
}
],
"symlink_target": ""
} |
def merge_species_databases(species_prefix):
""" Build a merged database """
from ibeis.control import IBEISControl
from ibeis.dev import sysres
print('[ibsfuncs] Merging species with prefix: %r' % species_prefix)
ut.util_parallel.ensure_pool(warn=False)
with ut.Indenter(' '):
# Build / get target database
all_db = '__ALL_' + species_prefix + '_'
all_dbdir = sysres.db_to_dbdir(all_db, allow_newdir=True)
ibs_target = IBEISControl.IBEISController(all_dbdir)
# Build list of databases to merge
species_dbdir_list = get_species_dbs(species_prefix)
ibs_source_list = []
for dbdir in species_dbdir_list:
ibs_source = IBEISControl.IBEISController(dbdir)
ibs_source_list.append(ibs_source)
print('[ibsfuncs] Destination database: %r' % all_db)
print('[ibsfuncs] Source databases:' +
ut.indentjoin(species_dbdir_list, '\n * '))
#Merge the databases into ibs_target
merge_databases(ibs_target, ibs_source_list)
return ibs_target
def merge_databases(ibs_target, ibs_source_list):
""" Merges a list of databases into a target
This is OLD. use export_subset instead
"""
raise AssertionError('Use transfer_subset instead')
def merge_images(ibs_target, ibs_source):
""" merge image helper """
gid_list1 = ibs_source.get_valid_gids()
uuid_list1 = ibs_source.get_image_uuids(gid_list1)
gpath_list1 = ibs_source.get_image_paths(gid_list1)
reviewed_list1 = ibs_source.get_image_reviewed(gid_list1)
# Add images to target
ibs_target.add_images(gpath_list1)
# Merge properties
gid_list2 = ibs_target.get_image_gids_from_uuid(uuid_list1)
ibs_target.set_image_reviewed(gid_list2, reviewed_list1)
def merge_annotations(ibs_target, ibs_source):
""" merge annotations helper """
aid_list1 = ibs_source.get_valid_aids()
uuid_list1 = ibs_source.get_annot_uuids(aid_list1)
# Get the images in target_db
gid_list1 = ibs_source.get_annot_gids(aid_list1)
bbox_list1 = ibs_source.get_annot_bboxes(aid_list1)
theta_list1 = ibs_source.get_annot_thetas(aid_list1)
name_list1 = ibs_source.get_annot_names(aid_list1)
notes_list1 = ibs_source.get_annot_notes(aid_list1)
image_uuid_list1 = ibs_source.get_image_uuids(gid_list1)
gid_list2 = ibs_target.get_image_gids_from_uuid(image_uuid_list1)
image_uuid_list2 = ibs_target.get_image_uuids(gid_list2)
# Assert that the image uuids have not changed
assert image_uuid_list1 == image_uuid_list2, 'error merging annotation image uuids'
aid_list2 = ibs_target.add_annots(gid_list2,
bbox_list1,
theta_list=theta_list1,
name_list=name_list1,
notes_list=notes_list1)
uuid_list2 = ibs_target.get_annot_uuids(aid_list2)
assert uuid_list2 == uuid_list1, 'error merging annotation uuids'
# Do the merging
for ibs_source in ibs_source_list:
try:
print('Merging ' + ibs_source.get_dbname() +
' into ' + ibs_target.get_dbname())
merge_images(ibs_target, ibs_source)
merge_annotations(ibs_target, ibs_source)
except Exception as ex:
ut.printex(ex, 'error merging ' + ibs_source.get_dbname() +
' into ' + ibs_target.get_dbname())
# from params
parser2.add_str(('--merge-species'), help='merges all databases of given species')
if params.args.merge_species is not None:
ibsfuncs.merge_species_databases(params.args.merge_species)
# Merge all jaguar databases into single big database
python main.py --merge-species JAG_
| {
"content_hash": "8611f234694529b2235f17b7e73e76d8",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 91,
"avg_line_length": 44.47727272727273,
"alnum_prop": 0.6226366888094022,
"repo_name": "SU-ECE-17-7/ibeis",
"id": "bea0d73cee33f4a395d059a9ecc8b9d50c1f71f0",
"size": "3914",
"binary": false,
"copies": "1",
"ref": "refs/heads/next",
"path": "_broken/oldibsfuncs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "331"
},
{
"name": "CSS",
"bytes": "26792"
},
{
"name": "HTML",
"bytes": "33762203"
},
{
"name": "Inno Setup",
"bytes": "1585"
},
{
"name": "JavaScript",
"bytes": "227454"
},
{
"name": "Jupyter Notebook",
"bytes": "66346367"
},
{
"name": "Python",
"bytes": "6112508"
},
{
"name": "Shell",
"bytes": "58211"
}
],
"symlink_target": ""
} |
import logging
import time
from db.connection import connection
from model.usuario import usuario
# Enable logging
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger(__name__)
# Modelo que representa el retiro de la DB
class retiro:
def __init__(self, id_retiro: int = 0, numc_usuario: int = '',
monto_retiro: int = 0, fecha_inicio_retiro=time.strftime('%Y-%m-%d %H:%M:%S'), fecha_fin_retiro=None,
estado_retiro: str = ''):
self.id_retiro = id_retiro
self.numc_usuario = numc_usuario
self.monto_retiro = monto_retiro if monto_retiro > 0 else - (monto_retiro)
self.fecha_inicio_retiro = fecha_inicio_retiro
self.fecha_fin_retiro = fecha_fin_retiro
self.estado_retiro = estado_retiro
def realizar_retiro(self):
retorno = False
mensaje = ''
if self.numc_usuario != 0:
us = usuario(self.numc_usuario)
if us.cargar_datos():
con = connection()
# Si el estado del retiro es E -> ESpera, A -> Aceptada, C -> Cancelada
# Solo tomamos una posicion porque solo puede haber una retiro en cola
output = con.execute(
"SELECT id_retiro, numc_usuario, monto_retiro, fecha_inicio_retiro FROM Retiros WHERE numc_usuario = %s "
"AND estado_retiro = 'E'", [self.numc_usuario]).fetchall()
if len(output) == 0:
saldo_disp = us.saldo - self.monto_retiro
if saldo_disp < 0:
mensaje = "Usted no posee suficiente dinero para realizar el Retiro"
logging.warning(
"Usuario 1 %s no tiene fondos para realizar el Retiro" % self.numc_usuario)
else:
rowcount = con.execute(
"INSERT INTO Retiros(numc_usuario,monto_retiro, fecha_inicio_retiro)"
"VALUES (%s, %s, %s)",
[self.numc_usuario, self.monto_retiro,
self.fecha_inicio_retiro],
True).rowcount
if rowcount > 0:
# Solo debe haber un dato
output2 = con.execute(
"SELECT id_retiro FROM Retiros WHERE numc_usuario = %s "
"AND estado_retiro = 'E'", [self.numc_usuario]).fetchall()[0]
retorno = True
mensaje = output2[0]
logging.info("Retiro en cola con éxito a la espera de confirmación"
" por el usuario %s", self.numc_usuario)
else:
logging.info("Retiro fallido por el usuario %s",
self.numc_usuario)
mensaje = "Algo salió mal al realizar tu Retiro :("
else:
output = output[0]
self.id_retiro = output[0]
self.numc_usuario = output[1]
self.monto_retiro = output[2]
self.fecha_inicio_retiro = output[3]
mensaje = "Usted tiene un Retiro pendiente - %s" % self.id_retiro
logging.warning("Usuario %s tiene un retiro pendiente %s" % (
self.numc_usuario, self.id_retiro))
else:
mensaje = "Usuario no registrado"
logging.error("Usuario no regirstrado %s" % self.numc_usuario)
else:
mensaje = "Usuario inválido"
logging.error("Usuario inválido, datos no iniciados correctamente")
return (retorno, mensaje)
def actualizar_retiro(self, estado_retiro):
retorno = False
mensaje = ''
if estado_retiro in ('A', 'C'):
con = connection()
output = con.execute("SELECT id_retiro,numc_usuario,monto_retiro FROM Retiros WHERE id_retiro = %s",
[self.id_retiro]).fetchall()
if len(output) == 1:
output = output[0]
us = usuario(output[1])
us.cargar_datos()
logging.info("Actualizando Retiro %s" % self.id_retiro)
rowcount = con.execute(
"UPDATE Retiros SET estado_retiro = %s, fecha_fin_retiro = %s WHERE id_retiro = %s",
[estado_retiro, time.strftime('%Y-%m-%d %H:%M:%S'), self.id_retiro], True).rowcount
# Si es igual a 1 es porque se pudo realizar la actualizacion
if rowcount == 1:
if estado_retiro == 'A':
logging.info(
"cambiando los saldos de los usuario despues del Retiro %s Aceptado" % self.id_retiro)
monto = output[2]
saldo_disp_us1 = us.saldo - monto
rowcount2 = con.execute("UPDATE Usuarios SET saldo = %s WHERE num_cuenta = %s",
[saldo_disp_us1, us.num_cuenta], True).rowcount
if rowcount2 == 1:
retorno = True
mensaje = 'Retiro aceptado'
logging.info("Retiro aceptado")
else:
mensaje = "Retiro cancelado"
logging.info("Retiro %s cancelado", self.id_retiro)
else:
mensaje = 'Código de Traslado erróneo\nPor favor ingrese el código correcto'
logging.info("Código de Retiro erróneo")
con.close_connection()
else:
logging.warning("Estado de Retiro Incorrecto")
return (retorno, mensaje)
if __name__ == '__main__':
re = retiro(numc_usuario=111111, monto_retiro=20)
print(re.realizar_retiro())
print(retiro(id_retiro=8327).actualizar_retiro('A'))
| {
"content_hash": "ea36f01505d65a052012fffd6be990a0",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 125,
"avg_line_length": 46,
"alnum_prop": 0.4966183574879227,
"repo_name": "cvem8165/KodeFest12",
"id": "e2ee3f095cff4701f9e57f1d26cf00bc2725ca26",
"size": "6220",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model/retiro.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "52122"
}
],
"symlink_target": ""
} |
from boto.iam import IAMConnection
from boto.exception import BotoServerError
import json
import os
import re
import urllib
from prettytable import PrettyTable
from nephoria.baseops.botobaseops import BotoBaseOps
class IAMops(BotoBaseOps):
EUCARC_URL_NAME = 'iam_url'
SERVICE_PREFIX = 'iam'
CONNECTION_CLASS = IAMConnection
def setup_resource_trackers(self):
## add test resource trackers and cleanup methods...
self.test_resources["iam_accounts"] = self.test_resources.get('iam_accounts', [])
self.test_resources_clean_methods["iam_accounts"] = None
def create_account(self, account_name, ignore_existing=True):
"""
Create an account with the given name
:param account_name: str name of account to create
"""
params = {'AccountName': account_name}
try:
res = self.get_response_items('CreateAccount', params, item_marker='account')
self.log.debug("Created account: " + account_name)
except BotoServerError as BE:
if not (BE.status == 409 and ignore_existing):
raise
res = self.get_account(account_name=account_name)
self.log.debug("create_account(). Account already exists: " + account_name)
self.test_resources["iam_accounts"].append(account_name)
return res
def delete_account(self, account_name, recursive=False):
"""
Delete an account with the given name
:param account_name: str name of account to delete
:param recursive:
"""
self.log.debug("Deleting account: " + account_name)
params = {
'AccountName': account_name,
'Recursive': recursive
}
self.connection.get_response('DeleteAccount', params)
def get_all_accounts(self, account_id=None, account_name=None, search=False):
"""
Request all accounts, return account dicts that match given criteria
:param account_id: regex string - to use for account_name
:param account_name: regex - to use for account ID
:param search: boolean - specify whether to use match or search when filtering the returned list
:return: list of account names
"""
if search:
re_meth = re.search
else:
re_meth = re.match
if account_id and not re.match("\d{12}", account_id):
if not account_name:
account_name = account_id
account_id = None
self.log.debug('Attempting to fetch all accounts matching- account_id:' +
str(account_id) + ' account_name:' + str(account_name))
response = self.get_response_items('ListAccounts', {}, item_marker='accounts',
list_marker='Accounts')
retlist = []
for account in response:
if account_name is not None:
if not search:
account_name = "^{0}$".format(account_name.strip())
if not re_meth(account_name, account['account_name']):
continue
if account_id is not None:
if not search:
account_id = "^{0}$".format(account_id .strip())
if not re_meth(account['account_id'], account_id):
continue
retlist.append(account)
return retlist
def get_account(self, account_id=None, account_name=None, search=False):
"""
Request a specific account, returns an account dict that matches the given criteria
:param account_id: regex string - to use for account_name
:param account_name: regex - to use for account ID
:param search: boolean - specify whether to use match or search when filtering the returned list
:return: account dict
"""
if not (account_id or account_name):
aliases = self.get_account_aliases()
if aliases:
account_name = aliases[0]
else:
raise ValueError('get_account(). Account id, name, or alias not found')
accounts = self.get_all_accounts(account_id=account_id, account_name=account_name,
search=search)
if accounts:
if len(accounts) > 1:
raise ValueError('get_account matched more than a single account with the '
'provided criteria: account_id="{0}", account_name="{1}". '
'Matched:{2}'
.format(account_id, account_name,
", ".join(str(x) for x in accounts)))
else:
return accounts[0]
return None
def create_user(self, user_name, path="/", delegate_account=None, ignore_existing=True):
"""
Create a user
:param user_name: str name of user
:param path: str user path
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an account to operate on
"""
if not user_name:
# Assuming this could be part of a test, allow it but warn...
self.log.warning('create_user(). Passed unsupported user_name:"{0}"'
.format(user_name))
params = {'UserName': user_name,
'Path': path }
if delegate_account:
params['DelegateAccount'] = delegate_account
try:
res = self.get_response_items('CreateUser', params, item_marker='user')
self.log.debug('Created user:"{0}"'.format(user_name))
except BotoServerError as BE:
if not (BE.status == 409 and ignore_existing):
raise
res = self.get_user(user_name=user_name, delegate_account=delegate_account)
self.log.debug("create_user(). User already exists: " + user_name)
return res
def get_user(self, user_name=None, delegate_account=None):
params = {}
if user_name:
params['UserName'] = user_name
if delegate_account:
params['DelegateAccount'] = delegate_account
return self.get_response_items('GetUser', params, item_marker='user')
def delete_user(self, user_name, delegate_account=None):
"""
Delete a user
:param user_name: str name of user
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an account to operate on
"""
self.log.debug("Deleting user " + user_name)
params = {'UserName': user_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
self.connection.get_response('DeleteUser', params)
def get_users_from_account(self, path=None, user_name=None, user_id=None,
delegate_account=None, search=False):
"""
Returns access that match given criteria. By default will return current account.
:param path: regex - to match for path
:param user_name: str name of user
:param user_id: regex - to match for user_id
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an account to operate on
:param search: use regex search (any occurrence) rather than match (exact same strings must occur)
:return:
"""
self.log.debug('Attempting to fetch all access matching- user_id:' +
str(user_id) + ' user_name:' + str(user_name) + " acct_name:" +
str(delegate_account))
retlist = []
params = {}
if search:
re_meth = re.search
else:
re_meth = re.match
if delegate_account:
params['DelegateAccount'] = delegate_account
response = self.get_response_items('ListUsers', params, item_marker='users',
list_marker='Users')
for user in response:
if path is not None and not re_meth(path, user['path']):
continue
if user_name is not None and not re_meth(user_name, user['user_name']):
continue
if user_id is not None and not re_meth(user_id, user['user_id']):
continue
retlist.append(user)
return retlist
def show_all_accounts(self, account_name=None, account_id=None, search=False,
print_table=True):
"""
Debug Method to print an account list based on given filter criteria
:param account_name: regex - to use for account_name
:param account_id: regex - to use for account_id
:param search: boolean - specify whether to use match or search when filtering the returned list
"""
pt = PrettyTable(['ACCOUNT_NAME', 'ACCOUNT_ID'])
pt.hrules = 1
pt.align = 'l'
list = self.get_all_accounts(account_name=account_name,
account_id=account_id,
search=search)
for account in list:
pt.add_row([account['account_name'], account['account_id']])
if print_table:
self.log.info("\n" + str(pt) + "\n")
else:
return pt
def show_all_groups(self, account_name=None, account_id=None, path=None,
group_name=None, group_id=None, search=False, print_table=True):
"""
Print all groups in an account
:param account_name: regex - to use for account_name
:param account_id: regex - to use for
:param path: regex - to match for path
:param group_name: regex - to match for user_name
:param group_id: regex - to match for user_id
:param search: boolean - specify whether to use match or search when filtering the returned list
"""
pt = PrettyTable(['ACCOUNT:', 'GROUPNAME:', 'GROUP_ID:'])
pt.hrules = 1
pt.align = 'l'
list = self.get_all_groups(account_name=account_name, account_id=account_id,
path=path, group_name=group_name, group_id=group_id,
search=search)
for group in list:
pt.add_row([group['account_name'], group['group_name'], group['group_id']])
if print_table:
self.log.info("\n" + str(pt) + "\n")
else:
return pt
def show_all_users(self, account_name=None, account_id=None, path=None, user_name=None,
user_id=None, search=False, print_table=True ):
"""
Debug Method to print a user list based on given filter criteria
:param account_name: regex - to use for account_name
:param account_id: regex - to use for
:param path: regex - to match for path
:param user_name: regex - to match for user_name
:param user_id: regex - to match for user_id
:param search: boolean - specify whether to use match or search when filtering the returned list
"""
pt = PrettyTable(['ACCOUNT:', 'USERNAME:', 'USER_ID', 'ACCT_ID'])
pt.hrules = 1
pt.align = 'l'
list = self.get_all_users(account_name=account_name, account_id=account_id, path=path,
user_name=user_name, user_id=user_id, search=search)
for user in list:
pt.add_row([user['account_name'], user['user_name'],
user['user_id'], user['account_id']])
if print_table:
self.log.info("\n" + str(pt) + "\n")
else:
return pt
def get_account_aliases(self, delegate_account=None):
params = {}
if delegate_account:
params['DelegateAccount'] = delegate_account
resp = self.get_response_items('ListAccountAliases', params,
item_marker='account_aliases',
list_marker='AccountAliases') or []
return resp
def get_username_for_active_connection(self):
"""
Helper method to show the active connections username in the case that the active
context is not this IAMops class's connection/context.
"""
user_info = self.get_user_info()
return getattr(user_info, 'user_name', None)
def get_accountname_for_active_connection(self):
"""
Helper method to show the active connections account name/alias in the case that the active
context is not this IAMops class's connection/context.
"""
aliases = self.get_account_aliases()
if aliases:
return aliases[0]
return None
def get_username_eucarc(self):
if self.eucarc:
return self.eucarc.user_name
return None
def get_accountname_eucarc(self):
if self.eucarc:
return self.eucarc.account_name
return None
def get_connections_accountname(self):
"""
Get account name of current user
"""
account_info = self.get_account()
return getattr(account_info, 'account_name', None)
def get_all_users(self, account_name=None, account_id=None, path=None,
user_name=None, user_id=None, search=False ):
"""
Queries all accounts matching given account criteria, returns all access found within
these accounts which then match the given user criteria.
Account info is added to the user dicts
:param account_name: regex - to use for account name
:param account_id: regex - to use for account id
:param path: regex - to match for path
:param user_name: regex - to match for user name
:param user_id: regex - to match for user id
:param search: boolean - specify whether to use match or search when filtering the
returned list
:return: List of access with account name tuples
"""
userlist=[]
accounts = self.get_all_accounts(account_id=account_id, account_name=account_name,
search=search)
for account in accounts:
#if account['account_id'] == self.account_id:
# access =self.get_users_from_account()
#else:
if account.get('account_id') == self.eucarc.account_id:
delegate_account = None
else:
delegate_account = account['account_name']
users = self.get_users_from_account(path=path,
user_name=user_name,
user_id=user_id,
delegate_account=delegate_account,
search=search)
for user in users:
user['account_name']=account['account_name']
user['account_id']=account['account_id']
userlist.append(user)
return userlist
def get_user_policy_names(self, user_name, policy_name=None, delegate_account=None,
search=False, ignore_admin_err=True):
"""
Returns list of policy names associated with a given user, and match given criteria.
:param user_name: string - user to get policies for.
:param policy_name: regex - to match/filter returned policies
:param delegate_account: string - used for user lookup
:param search: specify whether to use match or search when filtering the returned list
:return: list of policy names
"""
retlist = []
params = {}
if search:
re_meth = re.search
else:
re_meth = re.match
params = {'UserName': user_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
try:
response = self.connection.get_response('ListUserPolicies', params,
list_marker='PolicyNames')
p_names = response['list_user_policies_response']['list_user_policies_result']\
['policy_names']
for name in p_names:
if policy_name is not None and not re_meth(policy_name, name):
continue
retlist.append(name)
except BotoServerError, BE:
err = 'Error fetching policy for params:\n{0}: '.format(params, BE)
if BE.status == 403 and ignore_admin_err and str(user_name).strip() == 'sys_admin':
self.log.debug('IGNORING: '+ err)
else:
self.log.critical(err)
raise
return retlist
def get_user_policies(self, user_name, policy_name=None, delegate_account=None, doc=None,
search=False, ignore_admin_err=True):
"""
Returns list of policy dicts associated with a given user, and match given criteria.
:param user_name: string - user to get policies for.
:param policy_name: regex - to match/filter returned policies
:param delegate_account: string - used for user lookup
:param doc: policy document to use as a filter
:param search: boolean - specify whether to use match or search when filtering the
returned list
:param ignore_admin_err: boolean- will ignore 403 responses if the user is 'sys_admin'
:return:
"""
retlist = []
params = {}
if search:
re_meth = re.search
else:
re_meth = re.match
names = self.get_user_policy_names(user_name, policy_name=policy_name,
delegate_account=delegate_account, search=search)
for p_name in names:
params = {'UserName': user_name,
'PolicyName': p_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
try:
policy = self.connection.get_response(
'GetUserPolicy',
params,
verb='POST')['get_user_policy_response']['get_user_policy_result']
except BotoServerError, BE:
err_msg = 'Error fetching policy for params:\n{0}: "{1}"'.format(params, BE)
if BE.status == 403 and ignore_admin_err and str(p_name).strip() == 'sys_admin':
self.log.debug('IGNORING:' + str(err_msg))
else:
self.log.critical(err_msg)
raise
if doc is not None and not re_meth(doc, policy['policy_document']):
continue
retlist.append(policy)
return retlist
def show_user_policy_summary(self,user_name,policy_name=None,delegate_account=None,
doc=None, search=False, print_table=True):
"""
Debug method to display policy summary applied to a given user
:param user_name: string - user to get policies for.
:param policy_name: regex - to match/filter returned policies
:param delegate_account: string - used for user lookup
:param doc: policy document to use as a filter
:param search: boolean - specify whether to use match or search when filtering the returned list
"""
title = 'POLICIES FOR USER: {0}'.format(user_name)
main_pt = PrettyTable([title])
main_pt.hrules = 1
main_pt.align = 'l'
main_pt.max_width[title] = 120
policies = self.get_user_policies(user_name, policy_name=policy_name,
delegate_account=delegate_account, doc=doc, search=search)
if not policies:
main_pt.add_row(['-- No Policies --'])
else:
for policy in policies:
main_pt.add_row(['POLICY NAME: "{0}" :'.format(policy['policy_name'])])
p_doc = urllib.unquote(policy['policy_document'])
p_json = json.loads(p_doc)
pretty_json = (json.dumps(p_json, indent=2) or "") + "\n"
main_pt.add_row([pretty_json])
if print_table:
self.log.info("\n" + str(main_pt) + "\n")
else:
return main_pt
def show_user_summary(self,user_name, delegate_account=None, account_id=None,
print_table=True):
"""
Debug method for to display euare/iam info for a specific user.
:param user_name: string - user to get policies for.
:param delegate_account: string - used for user lookup
:param account_id: regex - to use for account id
"""
user_name = user_name
if delegate_account is None:
account_id=self.eucarc.account_id
delegate_account= self.get_all_accounts(account_id=account_id)[0]['account_name']
self.log.debug('Fetching user summary for: user_name:' + str(user_name) +
" account:" + str(delegate_account) + " account_id:" + str(account_id))
title = 'USER SUMMARY: user:{0}, account:{1}'.format(user_name, delegate_account)
pt = PrettyTable([title])
pt.align ='l'
user_table = str(self.show_all_users(account_name=delegate_account, account_id=account_id,
user_name=user_name, print_table=False)) + "\n"
pt.add_row([user_table])
pol_pt = self.show_user_policy_summary(user_name, delegate_account=delegate_account,
print_table=False)
new_title = str(pol_pt._field_names[0]).center(len(user_table.splitlines()[0])-4)
new_pt = PrettyTable([new_title])
new_pt.align[new_title] = 'l'
new_pt.hrules = 1
new_pt._rows = pol_pt._rows
pt.add_row([new_pt])
if print_table:
self.log.info("\n" + str(pt) + "\n")
else:
return pt
def attach_policy_user(self, user_name, policy_name, policy_json, delegate_account=None):
"""
Attach a policy string to a user
:param user_name: string - user to apply policy to
:param policy_name: Name to upload policy as
:param policy_json: Policy text
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an account to operate on
"""
self.log.debug("Attaching the following policy to " + user_name + ":" + policy_json)
params = {'UserName': user_name,
'PolicyName': policy_name,
'PolicyDocument': policy_json}
if delegate_account:
params['DelegateAccount'] = delegate_account
self.connection.get_response('PutUserPolicy', params, verb='POST')
def detach_policy_user(self, user_name, policy_name, delegate_account=None):
"""
Detach a policy from user
:param user_name: string - user to apply policy to
:param policy_name: Name to upload policy as
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an
account to operate on
"""
self.log.debug("Detaching the following policy from " + user_name + ":" + policy_name)
params = {'UserName': user_name,
'PolicyName': policy_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
self.connection.get_response('DeleteUserPolicy', params, verb='POST')
def get_all_groups(self, account_name=None, account_id=None, path=None, group_name=None,
group_id=None, search=False ):
"""
Queries all accounts matching given account criteria, returns all groups found within
these accounts which then match the given user criteria.
Account info is added to the group dicts
:param account_name: regex - to use for account_name
:param account_id: regex - to use for
:param path: regex - to match for path
:param group_name: regex - to match for group_name
:param group_id: regex - to match for group_id
:param search: boolean - specify whether to use match or search when filtering the
returned list
:return:
"""
grouplist=[]
accounts = self.get_all_accounts(account_id=account_id, account_name=account_name,
search=search)
for account in accounts:
groups = self.get_groups_from_account(path=path,
group_name=group_name,
group_id=group_id,
delegate_account=account['account_name'],
search=search)
for group in groups:
group['account_name']=account['account_name']
group['account_id']=account['account_id']
grouplist.append(group)
return grouplist
def get_groups_from_account(self, path=None, group_name=None, group_id=None,
delegate_account=None, search=False):
"""
Returns groups that match given criteria. By default will return groups from
current account.
:param path: regex - to match for path
:param group_name: regex - to match for group_name
:param group_id: regex - to match for group_id
:param delegate_account: string - to use for delegating account lookup
:param search: specify whether to use match or search when filtering the returned list
:return:
"""
self.log.debug('Attempting to fetch all groups matching- group_id:' + str(group_id) +
' group_name:' + str(group_name) + " acct_name:" + str(delegate_account))
retlist = []
params = {}
if search:
re_meth = re.search
else:
re_meth = re.match
if delegate_account:
params['DelegateAccount'] = delegate_account
response = self.connection.get_response('ListGroups', params, list_marker='Groups')
for group in response['list_groups_response']['list_groups_result']['groups']:
if path is not None and not re_meth(path, group['path']):
continue
if group_name is not None and not re_meth(group_name, group['group_name']):
continue
if group_id is not None and not re_meth(group_id, group['group_id']):
continue
retlist.append(group)
return retlist
def get_users_from_group(self, group_name, delegate_account=None):
"""
:param group_name: name of the group whose access should be returned.
:param delegate_account: specific account name when method is being called from
eucalyptus sys_admin user.
:return: list of access of an IAM group.
"""
ret_list = []
params = {}
if delegate_account:
params['DelegateAccount'] = delegate_account
params['GroupName'] = group_name
response = self.connection.get_response('GetGroup', params, list_marker='Users')
for user in response['get_group_response']['get_group_result']['access']:
ret_list.append(user)
return ret_list
def get_group_policy_names(self, group_name, policy_name=None,delegate_account=None,
search=False):
"""
Returns list of policy names associated with a given group, and match given criteria.
:param group_name: string - group to get policies for.
:param policy_name: regex - to match/filter returned policies
:param delegate_account: string - used for group lookup
:param search: specify whether to use match or search when filtering the returned list
:return: list of policy names
"""
retlist = []
params = {}
if search:
re_meth = re.search
else:
re_meth = re.match
params = {'GroupName': group_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
response = self.connection.get_response('ListGroupPolicies',
params, list_marker='PolicyNames')
for name in response['list_group_policies_response']['list_group_policies_result']\
['policy_names']:
if policy_name is not None and not re_meth(policy_name, name):
continue
retlist.append(name)
return retlist
def get_group_policies(self, group_name, policy_name=None,delegate_account=None, doc=None,
search=False):
"""
Returns list of policy dicts associated with a given group, and match given criteria.
:param group_name: string - group to get policies for.
:param policy_name: regex - to match/filter returned policies
:param delegate_account: string - used for group lookup
:param doc: policy document to use as a filter
:param search: boolean - specify whether to use match or search when filtering the
returned list
:return:
"""
retlist = []
params = {}
if search:
re_meth = re.search
else:
re_meth = re.match
names = self.get_group_policy_names(group_name, policy_name=policy_name,
delegate_account=delegate_account, search=search)
for p_name in names:
params = {'GroupName': group_name,
'PolicyName': p_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
policy = self.connection.get_response('GetGroupPolicy', params, verb='POST')\
['get_group_policy_response']['get_group_policy_result']
if doc is not None and not re_meth(doc, policy['policy_document']):
continue
retlist.append(policy)
return retlist
def create_group(self, group_name,path="/", delegate_account=None):
"""
Create group.
:param
:param path: path for group
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an
account to operate on
"""
self.log.debug("Attempting to create group: " + group_name)
params = {'GroupName': group_name,
'Path': path}
if delegate_account:
params['DelegateAccount'] = delegate_account
self.connection.get_response('CreateGroup', params)
def delete_group(self, group_name, delegate_account=None):
"""
Delete group.
:param group_name: name of group to delete
:param delegate_account:
"""
self.log.debug("Deleting group " + group_name)
params = {'GroupName': group_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
self.connection.get_response('DeleteGroup', params)
def add_user_to_group(self, group_name, user_name, delegate_account=None):
"""
Add a user to a group.
:param group_name: name of group to add user to
:param user_name: name of user to add to group
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an
account to operate on
"""
self.log.debug("Adding user " + user_name + " to group " + group_name)
params = {'GroupName': group_name,
'UserName': user_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
self.connection.get_response('AddUserToGroup', params)
def remove_user_from_group(self, group_name, user_name, delegate_account=None):
"""
Remove a user from a group.
:param group_name: name of group to remove user from
:param user_name: name of user to remove from group
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an
account to operate on
"""
self.log.debug("Removing user " + user_name + " to group " + group_name)
params = {'GroupName': group_name,
'UserName': user_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
self.connection.get_response('RemoveUserFromGroup', params)
def attach_policy_group(self, group_name, policy_name, policy_json, delegate_account=None):
"""
Attach a policy to a group.
:param group_name: name of group to remove user from
:param policy_name: Name to upload policy as
:param policy_json: Policy text
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an
account to operate on
"""
self.log.debug("Attaching the following policy to " + group_name + ":" + policy_json)
params = {'GroupName': group_name,
'PolicyName': policy_name,
'PolicyDocument': policy_json}
if delegate_account:
params['DelegateAccount'] = delegate_account
self.connection.get_response('PutGroupPolicy', params, verb='POST')
def detach_policy_group(self, group_name, policy_name, delegate_account=None):
"""
Remove a policy from a group.
:param group_name: name of group to remove user from
:param policy_name: Name to upload policy as
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an
account to operate on
"""
self.log.debug("Detaching the following policy from " + group_name + ":" + policy_name)
params = {'GroupName': group_name,
'PolicyName': policy_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
self.connection.get_response('DeleteGroupPolicy', params, verb='POST')
def create_access_key(self, user_name=None, delegate_account=None):
"""
Create a new access key for the user.
:param user_name: Name of user to create access key for to
:param delegate_account: str can be used by Cloud sys_admin in Eucalyptus to choose an
account to operate on
:return: A tuple of access key and and secret key with keys: 'access_key_id' and
'secret_access_key'
"""
self.log.debug("Creating access key for " + user_name )
params = {'UserName': user_name}
if delegate_account:
params['DelegateAccount'] = delegate_account
response = self.connection.get_response('CreateAccessKey', params)
access_tuple = {}
access_tuple['access_key_id'] = response['create_access_key_response']\
['create_access_key_result']['access_key']['access_key_id']
access_tuple['secret_access_key'] = response['create_access_key_response']\
['create_access_key_result']['access_key']['secret_access_key']
return access_tuple
def get_aws_access_key(self, user_name=None, delegate_account=None):
if not user_name and not delegate_account and self.connection.aws_access_key_id:
aws_access_key = self.connection.aws_access_key_id or self.eucarc.aws_access_key
if aws_access_key:
return aws_access_key
params = {}
if user_name:
params['UserName'] = user_name
if delegate_account:
params['DelegateAccount'] = delegate_account
response = self.get_response_items('ListAccessKeys', params, item_marker='member')
#result = response['list_access_keys_response']['list_access_keys_result']
#return result['access_key_metadata']['member']['access_key_id']
return response
def create_signing_cert(self, user_name=None, delegate_account=None):
params = {}
if user_name:
params['UserName'] = user_name
if delegate_account:
params['DelegateAccount'] = delegate_account
response = self.get_response_items('CreateSigningCertificate', params,
item_marker='certificate')
def delete_signing_cert(self, cert_id, user_name=None, delegate_account=None):
params = {'CertificateId': cert_id}
if user_name:
params['UserName'] = user_name
if delegate_account:
params['DelegateAccount'] = delegate_account
return self.connection.get_response('DeleteSigningCertificate', params)
def delete_all_signing_certs(self, user_name=None, delegate_account=None, verbose=False):
for cert in self.get_all_signing_certs(user_name=user_name,
delegate_account=delegate_account):
certid = cert.get('certificate_id')
if certid:
if verbose:
self.log.debug('Deleting signing cert: "{0}"'.format(cert))
self.delete_signing_cert(certid, user_name=user_name,
delegate_account=delegate_account)
else:
raise ValueError('certificate_id not found for cert dict:"{0}"'.format(cert))
def get_all_signing_certs(self, marker=None, max_items=None,
user_name=None, delegate_account=None):
params = {}
if marker:
params['Marker'] = marker
if max_items:
params['MaxItems'] = max_items
if user_name:
params['UserName'] = user_name
if delegate_account:
params['DelegateAccount'] = delegate_account
return self.get_response_items('ListSigningCertificates',
params, item_marker='certificates',
list_marker='Certificates')
def get_active_id_for_cert(self, certpath, machine):
'''
Attempt to get the cloud's active id for a certificate at 'certpath' on
the 'machine' filesystem. Also see is_ec2_cert_active() for validating the current
cert in use or the body (string buffer) of a cert.
:param certpath: string representing the certificate path on the machines filesystem
:param machine: Machine obj which certpath exists on
:returns :str() certificate id (if cert is found to be active) else None
'''
if not certpath:
raise ValueError('No ec2 certpath provided or set for eutester obj')
self.log.debug('Verifying cert: "{0}"...'.format(certpath))
body = str("\n".join(machine.sys('cat {0}'.format(certpath), verbose=False)) ).strip()
certs = []
if body:
certs = self.get_all_signing_certs()
for cert in certs:
if str(cert.get('certificate_body')).strip() == body:
self.log.debug('verified certificate with id "{0}" is still valid'
.format(cert.get('certificate_id')))
return cert.get('certificate_id')
self.log.debug('Cert: "{0}" is NOT active'.format(certpath or body))
return None
def find_active_cert_and_key_in_dir(self, machine, dir="", recursive=True):
'''
Attempts to find an "active" cert and the matching key files in the provided
directory 'dir' on the provided 'machine' via ssh.
If recursive is enabled, will attempt a recursive search from the provided directory.
:param dir: the base dir to search in on the machine provided
:param machine: a Machine() obj used for ssh search commands
:param recursive: boolean, if set will attempt to search recursively from the dir provided
:returns dict w/ values 'certpath' and 'keypath' or {} if not found.
'''
ret_dict = {}
if dir and not dir.endswith("/"):
dir += "/"
if recursive:
rec = "r"
else:
rec = ""
certfiles = machine.sys('grep "{0}" -l{1} {2}*.pem'.format('^-*BEGIN CERTIFICATE', rec, dir))
for f in certfiles:
if self.get_active_id_for_cert(f, machine=machine):
dir = os.path.dirname(f)
keypath = self.get_key_for_cert(certpath=f, keydir=dir, machine=machine)
if keypath:
self.log.debug('Found existing active cert and key on clc: {0}, {1}'
.format(f, keypath))
return {'certpath':f, 'keypath':keypath}
return ret_dict
def get_key_for_cert(self, certpath, keydir, machine, recursive=True):
'''
Attempts to find a matching key for cert at 'certpath' in the provided directory 'dir'
on the provided 'machine'.
If recursive is enabled, will attempt a recursive search from the provided directory.
:param dir: the base dir to search in on the machine provided
:param machine: a Machine() obj used for ssh search commands
:param recursive: boolean, if set will attempt to search recursively from the dir provided
:returns string representing the path to the key found or None if not found.
'''
self.log.debug('Looking for key to go with cert...')
if keydir and not keydir.endswith("/"):
keydir += "/"
if recursive:
rec = "r"
else:
rec = ""
certmodmd5 = machine.sys('openssl x509 -noout -modulus -in {0} | md5sum'
.format(certpath))
if certmodmd5:
certmodmd5 = str(certmodmd5[0]).strip()
else:
return None
keyfiles = machine.sys('grep "{0}" -lz{1} {2}*.pem'
.format("^\-*BEGIN RSA PRIVATE KEY.*\n.*END RSA PRIVATE KEY\-*",
rec, keydir))
for kf in keyfiles:
keymodmd5 = machine.sys('openssl rsa -noout -modulus -in {0} | md5sum'.format(kf))
if keymodmd5:
keymodmd5 = str(keymodmd5[0]).strip()
if keymodmd5 == certmodmd5:
self.log.debug('Found key {0} for cert {1}'.format(kf, certpath))
return kf
return None
def is_ec2_cert_active(self, certbody=None):
'''
Attempts to verify if the current self.ec2_cert @ self.ec2_certpath is still active.
:param certbody
:returns the cert id if found active, otherwise returns None
'''
certbody = certbody or self.ec2_cert
if not certbody:
raise ValueError('No ec2 cert body provided or set for eutester to check for active')
if isinstance(certbody, dict):
checkbody = certbody.get('certificate_body')
if not checkbody:
raise ValueError('Invalid certbody provided, did not have "certificate body" attr')
for cert in self.get_all_signing_certs():
body = str(cert.get('certificate_body')).strip()
if body and body == str(certbody).strip():
return cert.get('certificate_id')
return None
def upload_server_cert(self, cert_name, cert_body, private_key):
self.log.debug("uploading server certificate: " + cert_name)
self.upload_server_cert(cert_name=cert_name, cert_body=cert_body,
private_key=private_key)
if cert_name not in str(self.connection.get_server_certificate(cert_name)):
raise Exception("certificate " + cert_name + " not uploaded")
def update_server_cert(self, cert_name, new_cert_name=None, new_path=None):
self.log.debug("updating server certificate: " + cert_name)
self.connection.update_server_cert(cert_name=cert_name,
new_cert_name=new_cert_name,
new_path=new_path)
if (new_cert_name and new_path) not in \
str(self.connection.get_server_certificate(new_cert_name)):
raise Exception("certificate " + cert_name + " not updated.")
def get_server_cert(self, cert_name):
self.log.debug("getting server certificate: " + cert_name)
cert = self.connection.get_server_certificate(cert_name=cert_name)
self.log.debug(cert)
return cert
def delete_server_cert(self, cert_name):
self.log.debug("deleting server certificate: " + cert_name)
self.connection.delete_server_cert(cert_name)
if (cert_name) in str(self.connection.get_all_server_certs()):
raise Exception("certificate " + cert_name + " not deleted.")
def list_server_certs(self, path_prefix='/', marker=None, max_items=None):
self.log.debug("listing server certificates")
certs = self.connection.list_server_certs(path_prefix=path_prefix,
marker=marker, max_items=max_items)
self.log.debug(certs)
return certs
def create_login_profile(self, user_name, password, delegate_account=None):
self.log.debug("Creating login profile for: " + user_name + " with password: " + password)
params = {'UserName': user_name,
'Password': password}
if delegate_account:
params['DelegateAccount'] = delegate_account
return self.connection.get_response('CreateLoginProfile', params, verb='POST')
@staticmethod
def _search_dict(dictionary, marker):
if marker in dictionary.keys():
return dictionary.get(marker)
else:
for value in dictionary.itervalues():
if isinstance(value, dict):
res = IAMops._search_dict(value, marker)
if res:
return res
return {}
def get_response_items(self, action, params, item_marker, path='/', parent=None,
verb='POST', list_marker='Set'):
if list_marker is None:
list_marker = 'Set'
resp = self.connection.get_response(action=action, params=params, path=path, parent=parent,
verb=verb, list_marker=list_marker)
return IAMops._search_dict(resp, item_marker)
def get_user_info(self, user_name=None, delegate_account=None):
params = {}
if user_name:
params['UserName'] = user_name
if delegate_account:
params['DelegateAccount'] = delegate_account
return self.get_response_items(action='GetUser', params=params,
item_marker='user', list_marker='user')
class IAMResourceNotFoundException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value) | {
"content_hash": "b0bd78af4d6fe7301574f7e3f4a6715b",
"timestamp": "",
"source": "github",
"line_count": 1051,
"max_line_length": 116,
"avg_line_length": 45.41769743101808,
"alnum_prop": 0.5707671680563121,
"repo_name": "nephomaniac/nephoria",
"id": "62f99b8195868a391e5711b5582e709b25e39b0c",
"size": "49202",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nephoria/aws/iam/iamops.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "924331"
},
{
"name": "Shell",
"bytes": "775"
}
],
"symlink_target": ""
} |
from twisted.conch.ssh import keys, factory, common
from twisted.python import log
import primes
import os
class OpenSSHFactory(factory.SSHFactory):
dataRoot = '/usr/local/etc'
moduliRoot = '/usr/local/etc' # for openbsd which puts moduli in a different
# directory from keys
def getPublicKeys(self):
ks = {}
for file in os.listdir(self.dataRoot):
if file[:9] == 'ssh_host_' and file[-8:]=='_key.pub':
try:
k = keys.getPublicKeyString(self.dataRoot+'/'+file)
t = common.getNS(k)[0]
ks[t] = k
except Exception, e:
log.msg('bad public key file %s: %s' % (file,e))
return ks
def getPrivateKeys(self):
ks = {}
euid,egid = os.geteuid(), os.getegid()
os.setegid(0) # gain priviledges
os.seteuid(0)
for file in os.listdir(self.dataRoot):
if file[:9] == 'ssh_host_' and file[-4:]=='_key':
try:
k = keys.getPrivateKeyObject(self.dataRoot+'/'+file)
t = keys.objectType(k)
ks[t] = k
except Exception, e:
log.msg('bad private key file %s: %s' % (file, e))
os.setegid(egid) # drop them just as quickily
os.seteuid(euid)
return ks
def getPrimes(self):
try:
return primes.parseModuliFile(self.moduliRoot+'/moduli')
except IOError:
return None
| {
"content_hash": "04d131f285d20074afe0d7829a752573",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 80,
"avg_line_length": 36.30232558139535,
"alnum_prop": 0.5150544522741832,
"repo_name": "santisiri/popego",
"id": "e14d7e280249f16b3ac622f08fa9d5fe0d77db99",
"size": "1561",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "envs/ALPHA-POPEGO/lib/python2.5/site-packages/twisted/conch/openssh_compat/factory.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1246"
},
{
"name": "C",
"bytes": "504141"
},
{
"name": "C++",
"bytes": "26125"
},
{
"name": "CSS",
"bytes": "342653"
},
{
"name": "FORTRAN",
"bytes": "4872"
},
{
"name": "GAP",
"bytes": "13267"
},
{
"name": "Genshi",
"bytes": "407"
},
{
"name": "Groff",
"bytes": "17116"
},
{
"name": "HTML",
"bytes": "383181"
},
{
"name": "JavaScript",
"bytes": "1090769"
},
{
"name": "Makefile",
"bytes": "2441"
},
{
"name": "Mako",
"bytes": "376944"
},
{
"name": "Python",
"bytes": "20895618"
},
{
"name": "Ruby",
"bytes": "3380"
},
{
"name": "Shell",
"bytes": "23581"
},
{
"name": "Smarty",
"bytes": "522"
},
{
"name": "TeX",
"bytes": "35712"
}
],
"symlink_target": ""
} |
import itertools
import posixpath
from urllib.parse import urlparse
from flask import current_app, g, render_template, request, session
from markupsafe import Markup
from pytz import common_timezones, common_timezones_set
from indico.core import signals
from indico.core.config import config
from indico.modules.core.settings import core_settings
from indico.modules.legal import legal_settings
from indico.util.decorators import classproperty
from indico.util.i18n import _, get_all_locales
from indico.util.signals import values_from_signal
from indico.web.flask.templating import get_template_module
from indico.web.flask.util import url_for
from indico.web.menu import build_menu_structure
from indico.web.util import jsonify_template
def _get_timezone_display(local_tz, timezone, force=False):
if force and local_tz:
return local_tz
elif timezone == 'LOCAL':
return local_tz or config.DEFAULT_TIMEZONE
else:
return timezone
def render_header(category=None, protected_object=None, local_tz=None, force_local_tz=False):
top_menu_items = build_menu_structure('top-menu')
return render_template('header.html',
category=category,
top_menu_items=top_menu_items,
protected_object=protected_object,
local_tz=local_tz,
force_local_tz=force_local_tz)
def render_session_bar(protected_object=None, local_tz=None, force_local_tz=False):
protection_disclaimers = {
'network': legal_settings.get('network_protected_disclaimer'),
'restricted': legal_settings.get('restricted_disclaimer')
}
default_tz = config.DEFAULT_TIMEZONE
if session.user:
user_tz = session.user.settings.get('timezone', default_tz)
if session.timezone == 'LOCAL':
tz_mode = 'local'
elif session.timezone == user_tz:
tz_mode = 'user'
else:
tz_mode = 'custom'
else:
user_tz = None
tz_mode = 'local' if session.timezone == 'LOCAL' else 'custom'
active_tz = _get_timezone_display(local_tz, session.timezone, force_local_tz)
timezones = common_timezones
if active_tz not in common_timezones_set:
timezones = list(common_timezones) + [active_tz]
timezone_data = {
'disabled': force_local_tz,
'user_tz': user_tz,
'active_tz': active_tz,
'tz_mode': tz_mode,
'timezones': timezones,
}
tpl = get_template_module('_session_bar.html')
rv = tpl.render_session_bar(protected_object=protected_object,
protection_disclaimers=protection_disclaimers,
timezone_data=timezone_data,
languages=get_all_locales())
return Markup(rv)
class WPJinjaMixin:
"""Mixin for WPs backed by Jinja templates.
This allows you to use a single WP class and its layout, CSS,
etc. for multiple pages in a lightweight way while still being
able to use a subclass if more.
To avoid collisions between blueprint and application templates,
your blueprint template folders should have a subfolder named like
the blueprint. To avoid writing it all the time, you can store it
as `template_prefix` (with a trailing slash) in your WP class.
This only applies to the indico core as plugins always use a separate
template namespace!
"""
# you can set `ALLOW_JSON = False` to disable sending a jsonified
# version for XHR requests. the attribute is not set here on the class
# because the current inheritance chain would make it impossible to
# change it on some base classes such as `WPEventBase` as this mixin
# is used deeper down in the hierarchy
template_prefix = ''
render_template_func = staticmethod(render_template)
@classmethod
def render_template(cls, template_name_or_list=None, *wp_args, **context):
"""Render a jinja template inside the WP.
:param template_name_or_list: the name of the template - if unsed, the
`_template` attribute of the class is used.
can also be a list containing multiple
templates (the first existing one is used)
:param wp_args: list of arguments to be passed to the WP's' constructor
:param context: the variables that should be available in the context of
the template
"""
template = cls._prefix_template(template_name_or_list or cls._template)
if getattr(cls, 'ALLOW_JSON', True) and request.is_xhr:
return jsonify_template(template, _render_func=cls.render_template_func, **context)
else:
context['_jinja_template'] = template
return cls(g.rh, *wp_args, **context).display()
@classmethod
def render_string(cls, html, *wp_args):
"""Render a string inside the WP.
:param html: a string containing html
:param wp_args: list of arguments to be passed to the WP's' constructor
"""
return cls(g.rh, *wp_args, _html=html).display()
@classmethod
def _prefix_template(cls, template):
if cls.template_prefix and cls.template_prefix[-1] != '/':
raise ValueError('template_prefix needs to end with a slash')
if isinstance(template, str):
return cls.template_prefix + template
else:
templates = []
for tpl in template:
pos = tpl.find(':') + 1
templates.append(tpl[:pos] + cls.template_prefix + tpl[pos:])
return templates
def _get_page_content(self, params):
html = params.pop('_html', None)
if html is not None:
return html
template = params.pop('_jinja_template')
params['bundles'] = (current_app.manifest[x] for x in self._resolve_bundles())
return self.render_template_func(template, **params)
class WPBundleMixin:
bundles = ('exports.js', 'common-runtime.js')
print_bundles = tuple()
@classproperty
@classmethod
def additional_bundles(cls):
"""Additional bundle objects that will be included."""
return {
'screen': (),
'print': ()
}
@classmethod
def _resolve_bundles(cls):
"""Add up all bundles, following the MRO."""
seen_bundles = set()
for class_ in reversed(cls.mro()[:-1]):
attr = class_.__dict__.get('bundles', ())
if isinstance(attr, classproperty):
attr = attr.__get__(None, class_)
elif isinstance(attr, property):
attr = attr.fget(cls)
for bundle in attr:
if config.DEBUG and bundle in seen_bundles:
raise Exception(f"Duplicate bundle found in {class_.__name__}: '{bundle}'")
seen_bundles.add(bundle)
yield bundle
@classproperty
@classmethod
def page_metadata(self):
site_name = core_settings.get('site_title')
return {
'og': {
'site_name': (site_name + ' (Indico)') if site_name != 'Indico' else site_name,
'image': url_for('assets.image', filename='indico_square.png', _external=True)
}
}
class WPBase(WPBundleMixin):
title = ''
#: Whether the WP is used for management (adds suffix to page title)
MANAGEMENT = False
def __init__(self, rh, **kwargs):
self._rh = rh
self._kwargs = kwargs
def get_extra_css_files(self):
"""Return CSS urls that will be included after all other CSS."""
return []
@classproperty
@classmethod
def bundles(cls):
_bundles = ('common.css', 'common.js', 'react.css', 'react.js', 'semantic-ui.js', 'semantic-ui.css',
'jquery.css', 'jquery.js', 'main.css', 'main.js', 'module_core.js', 'module_events.creation.js',
'module_attachments.js', 'outdatedbrowser.js', 'outdatedbrowser.css')
if not g.get('static_site'):
_bundles += ('ckeditor.js',)
return _bundles
def _get_head_content(self):
"""Return _additional_ content between <head></head> tags.
Please note that <title>, <meta> and standard CSS are always included.
Override this method to add your own, page-specific loading of
JavaScript, CSS and other legal content for HTML <head> tag.
"""
return ''
def _fix_path(self, path):
url_path = urlparse(config.BASE_URL).path or '/'
# append base path only if not absolute already
# and not in 'static site' mode (has to be relative)
if path[0] != '/' and not g.get('static_site'):
path = posixpath.join(url_path, path)
return path
def _display(self, params):
raise NotImplementedError
@property
def _extra_title_parts(self):
return ()
def display(self, **params):
from indico.modules.admin import RHAdminBase
from indico.modules.core.settings import core_settings, social_settings
title_parts = [*self._extra_title_parts, self.title]
if self.MANAGEMENT:
title_parts.insert(0, _('Management'))
elif isinstance(self._rh, RHAdminBase):
title_parts.insert(0, _('Administration'))
injected_bundles = values_from_signal(signals.plugin.inject_bundle.send(self.__class__), as_list=True,
multi_value_types=list)
custom_js = list(current_app.manifest['__custom.js'])
custom_css = list(current_app.manifest['__custom.css'])
css_files = list(map(self._fix_path, self.get_extra_css_files() + custom_css))
js_files = list(map(self._fix_path, custom_js))
body = self._display(params)
bundles = itertools.chain((current_app.manifest[x] for x in self._resolve_bundles()
if x in current_app.manifest._entries),
self.additional_bundles['screen'], injected_bundles)
print_bundles = itertools.chain((current_app.manifest[x] for x in self.print_bundles),
self.additional_bundles['print'])
return render_template('indico_base.html',
css_files=css_files, js_files=js_files,
bundles=bundles, print_bundles=print_bundles,
site_name=core_settings.get('site_title'),
social=social_settings.get_all(),
page_metadata=self.page_metadata,
page_title=' - '.join(str(x) for x in title_parts if x),
head_content=self._get_head_content(),
body=body)
class WPNewBase(WPBundleMixin, WPJinjaMixin):
title = ''
bundles = ('outdatedbrowser.js', 'outdatedbrowser.css')
print_bundles = tuple()
#: Whether the WP is used for management (adds suffix to page title)
MANAGEMENT = False
def __init__(self, rh, **kwargs):
self._rh = rh
self._kwargs = kwargs
@classmethod
def _fix_path(cls, path):
url_path = urlparse(config.BASE_URL).path or '/'
# append base path only if not absolute already
# and not in 'static site' mode (has to be relative)
if path[0] != '/' and not g.get('static_site'):
path = posixpath.join(url_path, path)
return path
@classmethod
def display(cls, template_name, **params):
from indico.modules.admin import RHAdminBase
from indico.modules.core.settings import core_settings, social_settings
title_parts = [cls.title]
if cls.MANAGEMENT:
title_parts.insert(0, _('Management'))
elif isinstance(g.rh, RHAdminBase):
title_parts.insert(0, _('Administration'))
injected_bundles = values_from_signal(signals.plugin.inject_bundle.send(cls), as_list=True,
multi_value_types=list)
custom_js = list(current_app.manifest['__custom.js'])
custom_css = list(current_app.manifest['__custom.css'])
css_files = list(map(cls._fix_path, custom_css))
js_files = list(map(cls._fix_path, custom_js))
bundles = itertools.chain((current_app.manifest[x] for x in cls._resolve_bundles()
if x in current_app.manifest._entries),
cls.additional_bundles['screen'], injected_bundles)
print_bundles = itertools.chain((current_app.manifest[x] for x in cls.print_bundles),
cls.additional_bundles['print'])
template = cls._prefix_template(template_name)
return render_template(template,
css_files=css_files, js_files=js_files,
page_metadata=cls.page_metadata,
bundles=bundles, print_bundles=print_bundles,
site_name=core_settings.get('site_title'),
social=social_settings.get_all(),
page_title=' - '.join(str(x) for x in title_parts if x),
**params)
class WPDecorated(WPBase):
sidemenu_option = None
def _get_header(self):
return render_header()
def _get_footer(self):
return render_template('footer.html')
def _apply_decoration(self, body):
breadcrumbs = self._get_breadcrumbs()
return '<div class="header">{}</div>\n<div class="main">{}<div>{}</div></div>\n{}'.format(
self._get_header(), breadcrumbs, body, self._get_footer())
def _display(self, params):
return self._apply_decoration(self._get_body(params | self._kwargs))
def _get_body(self, params):
raise NotImplementedError
def _get_breadcrumbs(self):
return ''
class WPError(WPDecorated, WPJinjaMixin):
def __init__(self, message, description):
WPDecorated.__init__(self, None)
self._message = message
self._description = description
def _get_body(self, params):
return self._get_page_content({
'_jinja_template': 'error.html',
'error_message': self._message,
'error_description': self._description
})
def getHTML(self):
return self.display()
| {
"content_hash": "3243c8a73b36581b9b5cfdc638f3b897",
"timestamp": "",
"source": "github",
"line_count": 369,
"max_line_length": 116,
"avg_line_length": 39.764227642276424,
"alnum_prop": 0.5922442581612486,
"repo_name": "DirkHoffmann/indico",
"id": "c3b9b8d808b376b2951bbc01ee0a72a342d37122",
"size": "14887",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "indico/web/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "33249"
},
{
"name": "HTML",
"bytes": "1398354"
},
{
"name": "JavaScript",
"bytes": "2295843"
},
{
"name": "Mako",
"bytes": "1527"
},
{
"name": "Python",
"bytes": "5426206"
},
{
"name": "SCSS",
"bytes": "496904"
},
{
"name": "Shell",
"bytes": "3877"
},
{
"name": "TeX",
"bytes": "23435"
},
{
"name": "XSLT",
"bytes": "1504"
}
],
"symlink_target": ""
} |
import sys, os, re
import eclim
from util import caret_position, completion_popup, \
completion_popup_with_snippet
class CompletionProposal(object):
def __init__(self, name, insert=None, type="None"):
self.name = name
self.display = name
self.insert = insert
self.type = "None"
def call_eclim(project, file, offset, shell=True):
eclim.update_java_src(project, file)
complete_cmd = "$ECLIM -command java_complete \
-p %s \
-f %s \
-o %i \
-e utf-8 \
-l compact" % (project, file, offset)
out = eclim.call_eclim(complete_cmd)
return out
def to_proposals(eclim_output):
results = []
with_snippets = False
for l in eclim_output.split("\n"):
if not l: continue
parts = l.split("|")
if parts[1]:
prop = CompletionProposal(parts[1])
results.append(prop)
else:
variants = parts[3].split("<br/>")
param_lists = [re.search(r'\((.*)\)', v).group(1) for v in variants]
props = []
for idx, pl in enumerate(param_lists):
params = [par.split(" ")[-1] for par in pl.split(", ")]
insert = ", ".join(["${%i:%s}" % (i,s)
for i,s in
zip(range(1,len(params)+1), params)
])
props.append(CompletionProposal(variants[idx], insert))
with_snippets = True
results.extend(props)
return results, with_snippets
def completion_command():
project, file = eclim.get_context()
# we cannot read the code from TM via stdin, as it will not have
# the correct line endings when editing windows files (it will just have \n)
#code = sys.stdin.read()
# so we read from disk
with open(os.environ["TM_FILEPATH"]) as f:
code = f.read()
pos = caret_position(code)
proposals, with_snippets = to_proposals(call_eclim(project, file, pos))
if with_snippets:
completion_popup_with_snippet(proposals)
else:
completion_popup(proposals)
if __name__ == '__main__':
completion_command() | {
"content_hash": "eea864afee152b79da2b63b854b416cc",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 80,
"avg_line_length": 33.69565217391305,
"alnum_prop": 0.5234408602150538,
"repo_name": "JulianEberius/Eclim.tmbundle",
"id": "fcd2973970d1f8b5e4862d58cdc99628945d6793",
"size": "2347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Support/bin/completion.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18666"
}
],
"symlink_target": ""
} |
from azure.identity import DefaultAzureCredential
from azure.mgmt.maintenance import MaintenanceManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-maintenance
# USAGE
python apply_updates_create_or_update.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = MaintenanceManagementClient(
credential=DefaultAzureCredential(),
subscription_id="5b4b650e-28b9-4790-b3ab-ddbd88d727c4",
)
response = client.apply_updates.create_or_update(
resource_group_name="examplerg",
provider_name="Microsoft.Compute",
resource_type="virtualMachineScaleSets",
resource_name="smdtest1",
)
print(response)
# x-ms-original-file: specification/maintenance/resource-manager/Microsoft.Maintenance/preview/2022-07-01-preview/examples/ApplyUpdates_CreateOrUpdate.json
if __name__ == "__main__":
main()
| {
"content_hash": "6ccb2363dbe411f02e470ae3be0d4ecf",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 155,
"avg_line_length": 35.371428571428574,
"alnum_prop": 0.7374798061389337,
"repo_name": "Azure/azure-sdk-for-python",
"id": "ab11de511ab1430946fdd2dff13dbe8bfddad10e",
"size": "1706",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/maintenance/azure-mgmt-maintenance/generated_samples/apply_updates_create_or_update.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
"""
Django settings for sw_tts project.
Generated by 'django-admin startproject' using Django 1.9.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework_swagger',
'rest_framework',
'tts',
'core',
'social.apps.django_app.default',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sw_tts.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.request',
'social.apps.django_app.context_processors.backends',
'social.apps.django_app.context_processors.login_redirect',
],
},
},
]
WSGI_APPLICATION = 'sw_tts.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/'
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
# AUTH_PASSWORD_VALIDATORS = [
# {
# 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
# },
# ]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
('frontend', os.path.join(BASE_DIR, 'frontend')),
)
LANGUAGE_CODE = 'ru-Ru'
OUTPUT_DIR = os.path.join(BASE_DIR, 'generated')
MAX_SOUND_LIFE = 60*60*12 # seconds of sound file storing
SOCIAL_AUTH_VK_OPENAPI_ID = '5596606'
SOCIAL_AUTH_VK_APP_SECRET = 'jBx8nnH7pzevq7UA3hH0'
SOCIAL_AUTH_VK_APP_USER_MODE = 2
VK_APP_ID = '5596606'
VKONTAKTE_APP_ID = VK_APP_ID
VK_API_SECRET = 'jBx8nnH7pzevq7UA3hH0'
VKONTAKTE_APP_SECRET = VK_API_SECRET
SOCIAL_AUTH_VK_OAUTH2_KEY = '5596606'
SOCIAL_AUTH_VK_OAUTH2_SECRET = 'jBx8nnH7pzevq7UA3hH0'
AUTHENTICATION_BACKENDS = (
'social.backends.facebook.FacebookOAuth2',
'social.backends.facebook.FacebookAppOAuth2',
'social.backends.google.GoogleOAuth2',
'social.backends.twitter.TwitterOAuth',
'social.backends.vk.VKOAuth2',
'social.backends.vk.VKontakteOpenAPI',
'social.backends.yandex.YaruOAuth2',
'social.backends.yandex.YandexOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
SOCIAL_AUTH_URL_NAMESPACE = 'social'
# SOCIAL_AUTH_STORAGE = 'social.apps.django_app.me.models.DjangoStorage'
| {
"content_hash": "44584adcbbb06d7383aab5353d5fd742",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 93,
"avg_line_length": 27.56969696969697,
"alnum_prop": 0.6871839964827434,
"repo_name": "g10k/sw_tts",
"id": "f423912a0d20990d378662dcb60208384982dc3e",
"size": "4549",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sw_tts/local_settings.sample.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "5549"
},
{
"name": "JavaScript",
"bytes": "2077"
},
{
"name": "Python",
"bytes": "11739"
}
],
"symlink_target": ""
} |
"""Strip static qualifier from C functions to enable unit testing."""
import os
import re
import sys
import textwrap
import gflags
import makani
gflags.DEFINE_string('autogen_root', makani.HOME,
'Root of the source tree for the output files.')
gflags.DEFINE_string('input_source', None,
'Full path to input source file.')
gflags.MarkFlagAsRequired('input_source')
gflags.DEFINE_string('output_source', None,
'Full path to output source file.')
gflags.DEFINE_string('output_header', None,
'Full path to output header file.')
gflags.DEFINE_string('static_prefix', '',
'Function prefix to prepend to static functions.')
gflags.DEFINE_string('stub', '',
'List of functions to rename.')
gflags.DEFINE_string('stub_prefix', 'Stubbed',
'Function prefix to prepend to stub functions.')
FLAGS = gflags.FLAGS
def main(argv):
try:
argv = FLAGS(argv)
except gflags.FlagsError, e:
print '{}\nUsage: {} ARGS\n{}'.format(e, sys.argv[0], FLAGS)
sys.exit(1)
# Read input file.
with open(FLAGS.input_source, 'r') as input_source:
source_data = ''
if FLAGS.output_header:
header_path = os.path.relpath(FLAGS.output_header,
start=FLAGS.autogen_root)
source_data += '#include "%s"\n' % header_path
else:
header_path = ''
source_data += input_source.read()
static_funcs = []
# Rewrite stub function prototypes and definitions.
for stub_func in FLAGS.stub.split(','):
if stub_func:
stub_re = re.compile(
r'^(static\s+)?(.*\s)(%s)(\s*\([^\{;]*\))(\s*[\{;])' % stub_func,
re.MULTILINE)
static_funcs += [s for s in stub_re.findall(source_data) if s[0]]
source_data = stub_re.sub(
r'\2{0}\3\4;\n\2{0}\3\4\5'.format(FLAGS.stub_prefix), source_data)
# Rewrite static function prototypes and definitions.
static_re = re.compile(r'^(static\s+)(.*\s)([a-zA-Z][a-zA-Z_0-9]*)'
r'(\s*\([^\{;]*\))(\s*[\{;])', re.MULTILINE)
static_funcs += static_re.findall(source_data)
source_data = static_re.sub(
r'\2{0}\3\4\5'.format(FLAGS.static_prefix), source_data)
# Rewrite static function calls.
for in_groups in static_funcs:
in_func = in_groups[2]
source_data = re.sub(r'(\W)(%s)(\s*\()' % in_func,
r'\1%s\2\3' % FLAGS.static_prefix, source_data,
re.MULTILINE)
# Generate header data.
header_guard = header_path.upper().replace(os.path.sep, '_')
header_guard = header_guard.replace(os.path.extsep, '_') + '_'
header_orig = os.path.relpath(FLAGS.input_source, start=makani.HOME)
header_orig = re.sub(r'\.c.*$', '.h', header_orig)
header_data = textwrap.dedent("""
#ifndef {0}
#define {0}
#include "{1}"
""")[1:].format(header_guard, header_orig)
for in_groups in static_funcs:
out_return = in_groups[1]
out_func = FLAGS.static_prefix + in_groups[2]
out_args = in_groups[3]
header_data += '{}{}{};\n'.format(out_return, out_func, out_args)
header_data += textwrap.dedent("""
#endif // {}
""")[:-1].format(header_guard)
# Write output source file.
if FLAGS.output_source:
with open(FLAGS.output_source, 'w') as output_source:
output_source.write(source_data)
# Write output header file.
if FLAGS.output_header:
with open(FLAGS.output_header, 'w') as output_header:
output_header.write(header_data)
if __name__ == '__main__':
main(sys.argv)
| {
"content_hash": "139b9067d7edaf572c76827e0652edf1",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 78,
"avg_line_length": 34.5607476635514,
"alnum_prop": 0.58085451595457,
"repo_name": "google/makani",
"id": "a55e69536354b0d9582c9223213550445f29ab31",
"size": "4306",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/python/strip_static.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "119408"
},
{
"name": "C",
"bytes": "20174258"
},
{
"name": "C++",
"bytes": "30512322"
},
{
"name": "CSS",
"bytes": "8921"
},
{
"name": "Dockerfile",
"bytes": "1381"
},
{
"name": "Emacs Lisp",
"bytes": "1134"
},
{
"name": "HTML",
"bytes": "65745"
},
{
"name": "Java",
"bytes": "1558475"
},
{
"name": "JavaScript",
"bytes": "130727"
},
{
"name": "Jupyter Notebook",
"bytes": "1154728"
},
{
"name": "MATLAB",
"bytes": "1026162"
},
{
"name": "Makefile",
"bytes": "2798"
},
{
"name": "Objective-C",
"bytes": "62972"
},
{
"name": "Perl",
"bytes": "870724"
},
{
"name": "Python",
"bytes": "5552781"
},
{
"name": "RPC",
"bytes": "195736"
},
{
"name": "Roff",
"bytes": "2567875"
},
{
"name": "SWIG",
"bytes": "8663"
},
{
"name": "Shell",
"bytes": "297941"
},
{
"name": "Starlark",
"bytes": "462998"
},
{
"name": "Vim Script",
"bytes": "2281"
},
{
"name": "XC",
"bytes": "50398"
},
{
"name": "XS",
"bytes": "49289"
}
],
"symlink_target": ""
} |
__doc__ = """
>>> from django.conf import settings
>>> settings.ROOT_URLCONF = 'yui_loader.tests.urls'
>>> from django.test import Client
>>> c = Client()
>>> print c.get('/').content.replace(settings.YUI_INCLUDE_BASE, 'YUI_INCLUDE_BASE/')
<html>
<head>
<title> test-yui-include.html </title>
<link rel="stylesheet" type="text/css" href="YUI_INCLUDE_BASE/reset/reset-min.css" />
<link rel="stylesheet" type="text/css" href="YUI_INCLUDE_BASE/fonts/fonts-min.css" />
<link rel="stylesheet" type="text/css" href="YUI_INCLUDE_BASE/grids/grids-min.css" />
<link rel="stylesheet" type="text/css" href="YUI_INCLUDE_BASE/base/base-min.css" />
<script type="text/javascript" src="YUI_INCLUDE_BASE/yahoo/yahoo-debug.js"></script>
<script type="text/javascript" src="YUI_INCLUDE_BASE/dom/dom-debug.js"></script>
<script type="text/javascript" src="YUI_INCLUDE_BASE/selector/selector-debug.js"></script>
<script type="text/javascript" src="YUI_INCLUDE_BASE/event/event-debug.js"></script>
<script type="text/javascript" src="YUI_INCLUDE_BASE/element/element-debug.js"></script>
<BLANKLINE>
</head>
<body>
<BLANKLINE>
<BLANKLINE>
Haa! Haa!
</body>
</html>
<BLANKLINE>
"""
if __name__ == '__main__':
import sys, os, doctest
from os.path import join, dirname
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
os.environ['DJANGO_SETTINGS_MODULE'] = 'yui_loader.tests.settings'
doctest.testmod()
| {
"content_hash": "14bacd466384e3da6c6de5258eb3c510",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 94,
"avg_line_length": 42.21052631578947,
"alnum_prop": 0.6408977556109726,
"repo_name": "akaihola/django-yui-loader",
"id": "5e5c9f76d09b98e7a388ea0e4bc259561842d905",
"size": "1604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yui_loader/tests/test_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "67627"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Booking.arrival'
db.delete_column(u'reserved_booking', 'arrival')
# Deleting field 'Booking.depature'
db.delete_column(u'reserved_booking', 'depature')
def backwards(self, orm):
# Adding field 'Booking.arrival'
db.add_column(u'reserved_booking', 'arrival',
self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
# Adding field 'Booking.depature'
db.add_column(u'reserved_booking', 'depature',
self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
models = {
u'account.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'date_deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['account.UserProfile']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'middle_names': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '256', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'easy_maps.address': {
'Meta': {'object_name': 'Address'},
'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'computed_address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'geocode_error': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'reserved.booking': {
'Meta': {'object_name': 'Booking'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['reserved.Company']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'customers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['reserved.Customer']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'})
},
u'reserved.company': {
'Meta': {'object_name': 'Company'},
'addresses': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['reserved.Location']", 'symmetrical': 'False'}),
'contact': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['account.UserProfile']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'reserved.customer': {
'Meta': {'object_name': 'Customer'},
'address': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['reserved.Location']", 'symmetrical': 'False'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'reserved.event': {
'Meta': {'object_name': 'Event'},
'bookings': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['reserved.Booking']", 'null': 'True', 'blank': 'True'}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['reserved.Company']", 'null': 'True', 'blank': 'True'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'venues': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['reserved.Venue']", 'symmetrical': 'False'})
},
u'reserved.location': {
'Meta': {'object_name': 'Location', '_ormbases': [u'easy_maps.Address']},
u'address_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['easy_maps.Address']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'reserved.telephone': {
'Meta': {'object_name': 'Telephone'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'number': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['reserved.Customer']"})
},
u'reserved.venue': {
'Meta': {'object_name': 'Venue'},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['reserved.Location']"}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['reserved.Company']"}),
'contact': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['reserved.Customer']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['reserved'] | {
"content_hash": "15f1d4f43a148daed2c29cce8cb11ac8",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 195,
"avg_line_length": 72.47014925373135,
"alnum_prop": 0.5547317475028318,
"repo_name": "hellsgate1001/bookit",
"id": "5d00034f96bfd0029811e491b32628ca20c4c5f0",
"size": "9735",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reserved/migrations/0012_auto__del_field_booking_arrival__del_field_booking_depature.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "429638"
},
{
"name": "C++",
"bytes": "15261"
},
{
"name": "CSS",
"bytes": "258246"
},
{
"name": "JavaScript",
"bytes": "202757"
},
{
"name": "PowerShell",
"bytes": "8104"
},
{
"name": "Python",
"bytes": "8879326"
},
{
"name": "Shell",
"bytes": "1197"
}
],
"symlink_target": ""
} |
"""Use serial protocol of Acer projector to obtain state of the projector."""
import logging
import re
import serial
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_FILENAME,
CONF_NAME,
CONF_TIMEOUT,
STATE_OFF,
STATE_ON,
STATE_UNKNOWN,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_WRITE_TIMEOUT = "write_timeout"
DEFAULT_NAME = "Acer Projector"
DEFAULT_TIMEOUT = 1
DEFAULT_WRITE_TIMEOUT = 1
ECO_MODE = "ECO Mode"
ICON = "mdi:projector"
INPUT_SOURCE = "Input Source"
LAMP = "Lamp"
LAMP_HOURS = "Lamp Hours"
MODEL = "Model"
# Commands known to the projector
CMD_DICT = {
LAMP: "* 0 Lamp ?\r",
LAMP_HOURS: "* 0 Lamp\r",
INPUT_SOURCE: "* 0 Src ?\r",
ECO_MODE: "* 0 IR 052\r",
MODEL: "* 0 IR 035\r",
STATE_ON: "* 0 IR 001\r",
STATE_OFF: "* 0 IR 002\r",
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_FILENAME): cv.isdevice,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(
CONF_WRITE_TIMEOUT, default=DEFAULT_WRITE_TIMEOUT
): cv.positive_int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Connect with serial port and return Acer Projector."""
serial_port = config[CONF_FILENAME]
name = config[CONF_NAME]
timeout = config[CONF_TIMEOUT]
write_timeout = config[CONF_WRITE_TIMEOUT]
add_entities([AcerSwitch(serial_port, name, timeout, write_timeout)], True)
class AcerSwitch(SwitchEntity):
"""Represents an Acer Projector as a switch."""
def __init__(self, serial_port, name, timeout, write_timeout, **kwargs):
"""Init of the Acer projector."""
self.ser = serial.Serial(
port=serial_port, timeout=timeout, write_timeout=write_timeout, **kwargs
)
self._serial_port = serial_port
self._name = name
self._state = False
self._available = False
self._attributes = {
LAMP_HOURS: STATE_UNKNOWN,
INPUT_SOURCE: STATE_UNKNOWN,
ECO_MODE: STATE_UNKNOWN,
}
def _write_read(self, msg):
"""Write to the projector and read the return."""
ret = ""
# Sometimes the projector won't answer for no reason or the projector
# was disconnected during runtime.
# This way the projector can be reconnected and will still work
try:
if not self.ser.is_open:
self.ser.open()
msg = msg.encode("utf-8")
self.ser.write(msg)
# Size is an experience value there is no real limit.
# AFAIK there is no limit and no end character so we will usually
# need to wait for timeout
ret = self.ser.read_until(size=20).decode("utf-8")
except serial.SerialException:
_LOGGER.error("Problem communicating with %s", self._serial_port)
self.ser.close()
return ret
def _write_read_format(self, msg):
"""Write msg, obtain answer and format output."""
# answers are formatted as ***\answer\r***
awns = self._write_read(msg)
match = re.search(r"\r(.+)\r", awns)
if match:
return match.group(1)
return STATE_UNKNOWN
@property
def available(self):
"""Return if projector is available."""
return self._available
@property
def name(self):
"""Return name of the projector."""
return self._name
@property
def is_on(self):
"""Return if the projector is turned on."""
return self._state
@property
def state_attributes(self):
"""Return state attributes."""
return self._attributes
def update(self):
"""Get the latest state from the projector."""
msg = CMD_DICT[LAMP]
awns = self._write_read_format(msg)
if awns == "Lamp 1":
self._state = True
self._available = True
elif awns == "Lamp 0":
self._state = False
self._available = True
else:
self._available = False
for key in self._attributes:
msg = CMD_DICT.get(key)
if msg:
awns = self._write_read_format(msg)
self._attributes[key] = awns
def turn_on(self, **kwargs):
"""Turn the projector on."""
msg = CMD_DICT[STATE_ON]
self._write_read(msg)
self._state = STATE_ON
def turn_off(self, **kwargs):
"""Turn the projector off."""
msg = CMD_DICT[STATE_OFF]
self._write_read(msg)
self._state = STATE_OFF
| {
"content_hash": "2792c15e8d0344618eec55ee2d140ddc",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 84,
"avg_line_length": 28.86904761904762,
"alnum_prop": 0.5940206185567011,
"repo_name": "partofthething/home-assistant",
"id": "101f7cbd6155b0edb24e164ece4d4c17bbaed58a",
"size": "4850",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/acer_projector/switch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
'''
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
:copyright: © 2012-2013 by the SaltStack Team, see AUTHORS for more details
:license: Apache 2.0, see LICENSE for more details.
tests.integration.shell.master
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'''
# Import python libs
import os
import yaml
import signal
import shutil
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class MasterTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
_call_binary_ = 'salt-master'
def test_issue_7754(self):
old_cwd = os.getcwd()
config_dir = os.path.join(integration.TMP, 'issue-7754')
if not os.path.isdir(config_dir):
os.makedirs(config_dir)
os.chdir(config_dir)
config_file_name = 'master'
pid_path = os.path.join(config_dir, '{0}.pid'.format(config_file_name))
config = yaml.load(
open(self.get_config_file_path(config_file_name), 'r').read()
)
config['root_dir'] = config_dir
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
config['ret_port'] = config['ret_port'] + 10
config['publish_port'] = config['publish_port'] + 10
open(os.path.join(config_dir, config_file_name), 'w').write(
yaml.dump(config, default_flow_style=False)
)
self.run_script(
self._call_binary_,
'--config-dir {0} --pid-file {1} -l debug'.format(
config_dir,
pid_path
),
timeout=5,
catch_stderr=True
)
# Now kill it if still running
if os.path.exists(pid_path):
try:
os.kill(int(open(pid_path).read()), signal.SIGKILL)
except OSError:
pass
try:
self.assertFalse(os.path.isdir(os.path.join(config_dir, 'file:')))
finally:
os.chdir(old_cwd)
if os.path.isdir(config_dir):
shutil.rmtree(config_dir)
if __name__ == '__main__':
from integration import run_tests
run_tests(MasterTest)
| {
"content_hash": "2cdb08ef5ada7182bf34c6f0be5da8bd",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 79,
"avg_line_length": 28.5974025974026,
"alnum_prop": 0.5703905540417802,
"repo_name": "victorywang80/Maintenance",
"id": "adcc4c00eadd32041ffe0bd7dc7669580fce8c49",
"size": "2227",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "saltstack/src/tests/integration/shell/master.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "160954"
},
{
"name": "JavaScript",
"bytes": "1"
},
{
"name": "Python",
"bytes": "4522522"
},
{
"name": "Scheme",
"bytes": "7488"
},
{
"name": "Shell",
"bytes": "14653"
}
],
"symlink_target": ""
} |
from __future__ import division, absolute_import
import time
import pygame
import events
import consts
import levels
from frames.utils import *
class MainMenuFrame(object):
def __init__(self):
self.name = 'main_menu'
self.title_font = pygame.font.SysFont('Segoe UI', 72)
self.title_surface = self.title_font.render("A M O E B A", True, (255, 255, 255))
self.start_button = Button(
(consts.SCREEN_SIZE[0]/2, 250),
(400, 60),
"START",
"game", levels.level_1)
self.levels_button = Button(
(consts.SCREEN_SIZE[0]/2, 350),
(400, 60),
"LEVEL SELECT",
"level_select")
self.options_button = Button(
(consts.SCREEN_SIZE[0]/2, 450),
(400, 60),
"OPTIONS",
"options")
def start(self, events_manager):
self.events_manager = events_manager
self.events_manager.attach(
self.name, pygame.MOUSEMOTION, self.start_button)
self.events_manager.attach(
self.name, pygame.MOUSEMOTION, self.levels_button)
self.events_manager.attach(
self.name, pygame.MOUSEMOTION, self.options_button)
self.events_manager.attach(
self.name, pygame.MOUSEBUTTONDOWN, self.start_button)
self.events_manager.attach(
self.name, pygame.MOUSEBUTTONDOWN, self.levels_button)
self.events_manager.attach(
self.name, pygame.MOUSEBUTTONDOWN, self.options_button)
def update(self):
pass
def draw(self, screen):
screen.fill((0,0,0))
self._draw_title(screen)
self.start_button.render(screen)
self.levels_button.render(screen)
self.options_button.render(screen)
def suspend(self):
pass
def end(self):
pass
def _draw_title(self, screen):
width, height = self.title_surface.get_size()
x, y = consts.SCREEN_SIZE[0] / 2 - width/2, 50
screen.blit(self.title_surface, (x, y))
| {
"content_hash": "7a32f42a0283d149514346d8c9f67733",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 89,
"avg_line_length": 29.293333333333333,
"alnum_prop": 0.5493855257168867,
"repo_name": "Michael0x2a/Amoeba",
"id": "04ef7868562bd61b8264c51b5d27cda6aee597a2",
"size": "2220",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "amoeba/frames/main_menu.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2948"
}
],
"symlink_target": ""
} |
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.created_date_v30_rc2 import CreatedDateV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30_rc2 import LastModifiedDateV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.source_v30_rc2 import SourceV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.url_v30_rc2 import UrlV30Rc2 # noqa: F401,E501
class PersonExternalIdentifierV30Rc2(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'created_date': 'CreatedDateV30Rc2',
'last_modified_date': 'LastModifiedDateV30Rc2',
'source': 'SourceV30Rc2',
'external_id_type': 'str',
'external_id_value': 'str',
'external_id_url': 'UrlV30Rc2',
'external_id_relationship': 'str',
'visibility': 'str',
'path': 'str',
'put_code': 'int',
'display_index': 'int'
}
attribute_map = {
'created_date': 'created-date',
'last_modified_date': 'last-modified-date',
'source': 'source',
'external_id_type': 'external-id-type',
'external_id_value': 'external-id-value',
'external_id_url': 'external-id-url',
'external_id_relationship': 'external-id-relationship',
'visibility': 'visibility',
'path': 'path',
'put_code': 'put-code',
'display_index': 'display-index'
}
def __init__(self, created_date=None, last_modified_date=None, source=None, external_id_type=None, external_id_value=None, external_id_url=None, external_id_relationship=None, visibility=None, path=None, put_code=None, display_index=None): # noqa: E501
"""PersonExternalIdentifierV30Rc2 - a model defined in Swagger""" # noqa: E501
self._created_date = None
self._last_modified_date = None
self._source = None
self._external_id_type = None
self._external_id_value = None
self._external_id_url = None
self._external_id_relationship = None
self._visibility = None
self._path = None
self._put_code = None
self._display_index = None
self.discriminator = None
if created_date is not None:
self.created_date = created_date
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if source is not None:
self.source = source
self.external_id_type = external_id_type
self.external_id_value = external_id_value
if external_id_url is not None:
self.external_id_url = external_id_url
if external_id_relationship is not None:
self.external_id_relationship = external_id_relationship
if visibility is not None:
self.visibility = visibility
if path is not None:
self.path = path
if put_code is not None:
self.put_code = put_code
if display_index is not None:
self.display_index = display_index
@property
def created_date(self):
"""Gets the created_date of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The created_date of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: CreatedDateV30Rc2
"""
return self._created_date
@created_date.setter
def created_date(self, created_date):
"""Sets the created_date of this PersonExternalIdentifierV30Rc2.
:param created_date: The created_date of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: CreatedDateV30Rc2
"""
self._created_date = created_date
@property
def last_modified_date(self):
"""Gets the last_modified_date of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The last_modified_date of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: LastModifiedDateV30Rc2
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this PersonExternalIdentifierV30Rc2.
:param last_modified_date: The last_modified_date of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: LastModifiedDateV30Rc2
"""
self._last_modified_date = last_modified_date
@property
def source(self):
"""Gets the source of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The source of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: SourceV30Rc2
"""
return self._source
@source.setter
def source(self, source):
"""Sets the source of this PersonExternalIdentifierV30Rc2.
:param source: The source of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: SourceV30Rc2
"""
self._source = source
@property
def external_id_type(self):
"""Gets the external_id_type of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The external_id_type of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: str
"""
return self._external_id_type
@external_id_type.setter
def external_id_type(self, external_id_type):
"""Sets the external_id_type of this PersonExternalIdentifierV30Rc2.
:param external_id_type: The external_id_type of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: str
"""
if external_id_type is None:
raise ValueError("Invalid value for `external_id_type`, must not be `None`") # noqa: E501
self._external_id_type = external_id_type
@property
def external_id_value(self):
"""Gets the external_id_value of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The external_id_value of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: str
"""
return self._external_id_value
@external_id_value.setter
def external_id_value(self, external_id_value):
"""Sets the external_id_value of this PersonExternalIdentifierV30Rc2.
:param external_id_value: The external_id_value of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: str
"""
if external_id_value is None:
raise ValueError("Invalid value for `external_id_value`, must not be `None`") # noqa: E501
self._external_id_value = external_id_value
@property
def external_id_url(self):
"""Gets the external_id_url of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The external_id_url of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: UrlV30Rc2
"""
return self._external_id_url
@external_id_url.setter
def external_id_url(self, external_id_url):
"""Sets the external_id_url of this PersonExternalIdentifierV30Rc2.
:param external_id_url: The external_id_url of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: UrlV30Rc2
"""
self._external_id_url = external_id_url
@property
def external_id_relationship(self):
"""Gets the external_id_relationship of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The external_id_relationship of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: str
"""
return self._external_id_relationship
@external_id_relationship.setter
def external_id_relationship(self, external_id_relationship):
"""Sets the external_id_relationship of this PersonExternalIdentifierV30Rc2.
:param external_id_relationship: The external_id_relationship of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: str
"""
allowed_values = ["PART_OF", "SELF", "VERSION_OF"] # noqa: E501
if external_id_relationship not in allowed_values:
raise ValueError(
"Invalid value for `external_id_relationship` ({0}), must be one of {1}" # noqa: E501
.format(external_id_relationship, allowed_values)
)
self._external_id_relationship = external_id_relationship
@property
def visibility(self):
"""Gets the visibility of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The visibility of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""Sets the visibility of this PersonExternalIdentifierV30Rc2.
:param visibility: The visibility of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: str
"""
allowed_values = ["LIMITED", "REGISTERED_ONLY", "PUBLIC", "PRIVATE"] # noqa: E501
if visibility not in allowed_values:
raise ValueError(
"Invalid value for `visibility` ({0}), must be one of {1}" # noqa: E501
.format(visibility, allowed_values)
)
self._visibility = visibility
@property
def path(self):
"""Gets the path of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The path of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this PersonExternalIdentifierV30Rc2.
:param path: The path of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: str
"""
self._path = path
@property
def put_code(self):
"""Gets the put_code of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The put_code of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: int
"""
return self._put_code
@put_code.setter
def put_code(self, put_code):
"""Sets the put_code of this PersonExternalIdentifierV30Rc2.
:param put_code: The put_code of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: int
"""
self._put_code = put_code
@property
def display_index(self):
"""Gets the display_index of this PersonExternalIdentifierV30Rc2. # noqa: E501
:return: The display_index of this PersonExternalIdentifierV30Rc2. # noqa: E501
:rtype: int
"""
return self._display_index
@display_index.setter
def display_index(self, display_index):
"""Sets the display_index of this PersonExternalIdentifierV30Rc2.
:param display_index: The display_index of this PersonExternalIdentifierV30Rc2. # noqa: E501
:type: int
"""
self._display_index = display_index
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PersonExternalIdentifierV30Rc2, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PersonExternalIdentifierV30Rc2):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| {
"content_hash": "cc68a2e38e1123c29658c14cc190f5e3",
"timestamp": "",
"source": "github",
"line_count": 387,
"max_line_length": 257,
"avg_line_length": 33.48837209302326,
"alnum_prop": 0.6198302469135802,
"repo_name": "Royal-Society-of-New-Zealand/NZ-ORCID-Hub",
"id": "771ff443fda284d6b972998f04451b012c934da9",
"size": "12977",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "orcid_api_v3/models/person_external_identifier_v30_rc2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "20266"
},
{
"name": "Dockerfile",
"bytes": "3303"
},
{
"name": "HTML",
"bytes": "239338"
},
{
"name": "JavaScript",
"bytes": "2240"
},
{
"name": "Makefile",
"bytes": "600"
},
{
"name": "PLpgSQL",
"bytes": "2581"
},
{
"name": "Python",
"bytes": "7935510"
},
{
"name": "Shell",
"bytes": "12088"
}
],
"symlink_target": ""
} |
from .jolokia_session import JolokiaSession
from .remote_jmx_queue import RemoteJmxQueue
class RemoteJmxBroker(object):
@staticmethod
def connect(host, port, broker_name):
jolokia_session = JolokiaSession.connect(host, port)
return RemoteJmxBroker(jolokia_session, broker_name)
def __init__(self, jolokia_session, broker_name):
self.jolokia_session = jolokia_session
self.broker_name = broker_name
def add_queue(self, queue_name):
mbean = 'org.apache.activemq:type=Broker,brokerName={}'.format(self.broker_name)
operation = {
'type': 'exec',
'mbean': mbean,
'operation': 'addQueue',
'arguments': [queue_name]
}
self.jolokia_session.request(operation)
return RemoteJmxQueue(self.jolokia_session, self.broker_name, queue_name)
| {
"content_hash": "fc76214e192d6e5f5ec230cd2f42cee4",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 88,
"avg_line_length": 36.041666666666664,
"alnum_prop": 0.6520231213872832,
"repo_name": "julianghionoiu/tdl-client-python",
"id": "e15485d65566877f45e9d2da3f0680025101903b",
"size": "865",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/utils/jmx/broker/remote_jmx_broker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "45519"
},
{
"name": "Shell",
"bytes": "1812"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.