hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
be0bf10b31fef414b421f81e9b0dc7210fd37eed
| 5,413
|
py
|
Python
|
src/core/utils.py
|
thetartan/tartan-database
|
55adcebed4c68c717fbf714d7619da95da6a70db
|
[
"MIT"
] | 7
|
2016-11-11T06:49:44.000Z
|
2021-10-13T03:45:46.000Z
|
src/core/utils.py
|
kravets-levko/tartan-database
|
55adcebed4c68c717fbf714d7619da95da6a70db
|
[
"MIT"
] | null | null | null |
src/core/utils.py
|
kravets-levko/tartan-database
|
55adcebed4c68c717fbf714d7619da95da6a70db
|
[
"MIT"
] | 1
|
2018-04-27T08:42:48.000Z
|
2018-04-27T08:42:48.000Z
|
import re
import random
import string
from datetime import datetime
from PIL import ImageColor
from HTMLParser import HTMLParser
html = HTMLParser()
BOM = '\xEF\xBB\xBF'
re_words = re.compile('[a-z]{3,}', re.IGNORECASE)
stop_words = ['the', 'for', 'and'] # Two-letter words ignored by regex
remap_dictionary = {
'schools': 'school',
'artefact': 'artifact',
'assoc': 'association',
'regiment': 'regimental',
'univ': 'universal',
'sports': 'sport',
'weaver': 'weavers',
'restrticted': 'restricted',
'malay': 'malaysian',
'indan': 'indian',
'germany': 'german',
'distrtict': 'district',
'caanadian': 'canadian',
'commemorarive': 'commemorative',
'comemmorative': 'commemorative',
'commemmorative': 'commemorative',
'com': 'commemorative',
'comm': 'commemorative',
'commem': 'commemorative',
'coprorate': 'corporate',
'corparate': 'corporate',
'corpoarate': 'corporate',
'corpoate': 'corporate',
'corppoate': 'corporate',
'corpporate': 'corporate',
'corprate': 'corporate',
'fashin': 'fashion',
'dashion': 'fashion',
'portrair': 'portrait',
'portrat': 'portrait',
'peronal': 'personal',
'perposnal': 'personal',
'personnal': 'personal',
'pipeband': 'pipers',
'pipes': 'pipers',
'pipe': 'pipers',
'uncategorised': 'other',
'unidentfied': 'other',
'unidentieid': 'other',
'unidentified': 'other',
'unidientified': 'other',
'unknown': 'other',
'unnamed': 'other',
'misc': 'other',
'new': 'other',
'non': 'other',
'not': 'other',
}
change_case = {
'uae': 'UAE',
}
def now(fmt='%Y/%m/%d %H:%M:%S'):
return datetime.now().strftime(fmt)
def cleanup(value):
value = re.sub(
'(<!--.*?-->|<[^>]*>)', '', value,
flags=re.UNICODE | re.DOTALL
)
return re.sub('\s+', ' ',
html.unescape(value).strip(),
flags=re.UNICODE)
def remap_word(word):
while True:
new = remap_dictionary.get(word, None)
if new is None:
break
word = new
return word
def extract_words(value):
words = re_words.findall(value.lower())
words.reverse()
if (len(words) > 0) and (words[0] == 'tartan'):
del words[0]
return filter(len, [remap_word(x) for x in words if x not in stop_words])
allowed_categories = [
# Administrative
'city', 'county', 'district', 'state', 'country',
# Category
'ancient', 'artifact', 'commemorative', 'corporate', 'dance', 'design',
'dress', 'fancy', 'fashion', 'general', 'hunting', 'plaid', 'portrait',
'universal', 'gathering',
# Activity and organizations
'band', 'club', 'national', 'international', 'regimental', 'royal',
'school', 'trade', 'sport', 'university', 'weavers', 'academy',
'association',
# Person
'clan', 'family', 'name', 'personal',
]
def parse_category_from_name(name, delimiter='; '):
words = extract_words(name)
result = []
if (len(words) > 0) and (words[0] not in allowed_categories):
del words[0]
for word in words:
if word in allowed_categories:
result.append(change_case.get(word, word.title()))
else:
break
result.reverse()
result = sorted(list(set(result)))
return delimiter.join(result)
def parse_category(value, delimiter='; '):
result = map(
lambda v: change_case.get(v, v.title()),
sorted(list(set(extract_words(value))))
)
return delimiter.join(result)
def commonprefix(l):
# this unlike the os.path.commonprefix version
# always returns path prefixes as it compares
# path component wise
cp = []
ls = [p.split('/') for p in l]
ml = min( len(p) for p in ls )
for i in range(ml):
s = set( p[i] for p in ls )
if len(s) != 1:
break
cp.append(s.pop())
return '/'.join(cp)
def html_adjust(color, factor):
return '#' + ''.join(map(
lambda v: '%02x' % min(int(v * factor), 255),
ImageColor.getrgb(color)
)).upper()
def html_mix(*colors):
return '#' + ''.join(map(
lambda v: '%02x' % int(sum(v) / len(v)),
zip(*map(ImageColor.getrgb, colors))
)).upper()
def adjust_color(name, palette, default='%%'):
adjust = {'L': 1.0 + 1.0/3.0, 'D': 1.0 - 1.0/3.0}
result = palette.get(name, '')
if result != '':
return result
prefix = name[0].upper()
if prefix in ['L', 'D']:
name = name[1:]
result = palette.get(name, '')
if result == '':
result = html_mix(*[x for x in map(
lambda c: palette.get(c, ''),
name
) if x != '']) + default
if result != '':
if prefix in adjust:
return html_adjust(result, adjust[prefix])
return result
return default
def random_item(items):
return random.choice(items)
def random_string(chars, min=6, max=20):
return ''.join(random.choice(chars) for x in range(min, max))
def random_letters(min=6, max=20):
return random_string(string.ascii_letters, min, max)
def random_lower(min=6, max=20):
return random_string(string.ascii_lowercase, min, max)
def random_upper(min=6, max=20):
return random_string(string.ascii_uppercase, min, max)
def random_digits(min=3, max=10):
return random_string(string.digits, min, max)
| 23.741228
| 77
| 0.582856
|
ffb82f516256cddf4a8400aeaba11f5337783512
| 2,792
|
py
|
Python
|
pyleecan/Methods/Machine/LamSlot/plot.py
|
mxgnsr/pyleecan
|
2b0a04e4ae67c073a91362ab42332908fef53bdd
|
[
"Apache-2.0"
] | 2
|
2019-06-08T15:04:39.000Z
|
2020-09-07T13:32:22.000Z
|
pyleecan/Methods/Machine/LamSlot/plot.py
|
mxgnsr/pyleecan
|
2b0a04e4ae67c073a91362ab42332908fef53bdd
|
[
"Apache-2.0"
] | null | null | null |
pyleecan/Methods/Machine/LamSlot/plot.py
|
mxgnsr/pyleecan
|
2b0a04e4ae67c073a91362ab42332908fef53bdd
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from matplotlib.patches import Patch
from matplotlib.pyplot import axis, legend
from ....Functions.init_fig import init_fig
from ....definitions import config_dict
ROTOR_COLOR = config_dict["PLOT"]["COLOR_DICT"]["ROTOR_COLOR"]
STATOR_COLOR = config_dict["PLOT"]["COLOR_DICT"]["STATOR_COLOR"]
def plot(
self,
fig=None,
is_lam_only=False,
sym=1,
alpha=0,
delta=0,
is_edge_only=False,
is_display=True,
is_show=True,
):
"""Plot the Lamination with empty Slots in a matplotlib fig
Parameters
----------
self : LamSlot
A LamSlot object
fig :
if None, open a new fig and plot, else add to the
current one (Default value = None)
is_lam_only: bool
True to plot only the lamination (No effect for LamSlot)
sym : int
Symmetry factor (1= full machine, 2= half of the machine...)
alpha : float
Angle for rotation [rad]
delta : complex
Complex value for translation
is_edge_only: bool
To plot transparent Patches
is_display : bool
False to return the patches
is_show : bool
To call show at the end of the method
Returns
-------
patches : list
List of Patches
"""
if self.is_stator:
lam_color = STATOR_COLOR
else:
lam_color = ROTOR_COLOR
(fig, axes, patch_leg, label_leg) = init_fig(fig)
surf_list = self.build_geometry(sym=sym, alpha=alpha, delta=delta)
patches = list()
for surf in surf_list:
if "Lamination" in surf.label:
patches.extend(surf.get_patches(color=lam_color, is_edge_only=is_edge_only))
else:
patches.extend(surf.get_patches(is_edge_only=is_edge_only))
# Display the result
if is_display:
(fig, axes, patch_leg, label_leg) = init_fig(fig)
axes.set_xlabel("(m)")
axes.set_ylabel("(m)")
for patch in patches:
axes.add_patch(patch)
# Axis Setup
axis("equal")
# The Lamination is centered in the figure
Lim = self.Rext * 1.5
axes.set_xlim(-Lim, Lim)
axes.set_ylim(-Lim, Lim)
# Add the legend
if not is_edge_only:
if self.is_stator and "Stator" not in label_leg:
patch_leg.append(Patch(color=STATOR_COLOR))
label_leg.append("Stator")
axes.set_title("Stator with empty slot")
elif not self.is_stator and "Rotor" not in label_leg:
patch_leg.append(Patch(color=ROTOR_COLOR))
label_leg.append("Rotor")
axes.set_title("Rotor with empty slot")
legend(patch_leg, label_leg)
if is_show:
fig.show()
else:
return patches
| 28.20202
| 88
| 0.608883
|
67679af3da5395d8d373e77f6b8d7ed4304fb553
| 19,450
|
py
|
Python
|
tests/test_resolver.py
|
timgates42/pex
|
bee95d65fcf7ad2beb75a923890de6b47f726b7d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_resolver.py
|
timgates42/pex
|
bee95d65fcf7ad2beb75a923890de6b47f726b7d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_resolver.py
|
timgates42/pex
|
bee95d65fcf7ad2beb75a923890de6b47f726b7d
|
[
"Apache-2.0"
] | 1
|
2021-03-08T21:42:57.000Z
|
2021-03-08T21:42:57.000Z
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import functools
import os
import subprocess
from collections import defaultdict
from textwrap import dedent
import pkginfo
import pytest
from pex.common import safe_copy, safe_mkdtemp, temporary_dir
from pex.compatibility import nested
from pex.distribution_target import DistributionTarget
from pex.interpreter import PythonInterpreter, spawn_python_job
from pex.resolver import (
InstalledDistribution,
IntegrityError,
LocalDistribution,
Unsatisfiable,
download,
install,
resolve_multi,
)
from pex.testing import (
IS_LINUX,
IS_PYPY,
PY27,
PY35,
PY36,
PY_VER,
built_wheel,
ensure_python_interpreter,
make_project,
make_source_dir,
)
from pex.third_party.pkg_resources import Requirement
from pex.typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Any, List, Union
def create_sdist(**kwargs):
# type: (**Any) -> str
dist_dir = safe_mkdtemp()
with make_project(**kwargs) as project_dir:
cmd = ["setup.py", "sdist", "--dist-dir={}".format(dist_dir)]
spawn_python_job(
args=cmd,
cwd=project_dir,
expose=["setuptools"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
).communicate()
dists = os.listdir(dist_dir)
assert len(dists) == 1
return os.path.join(dist_dir, dists[0])
def build_wheel(**kwargs):
# type: (**Any) -> str
with built_wheel(**kwargs) as whl:
return whl
def local_resolve_multi(*args, **kwargs):
# type: (*Any, **Any) -> List[InstalledDistribution]
# Skip remote lookups.
kwargs["indexes"] = []
return list(resolve_multi(*args, **kwargs))
def test_empty_resolve():
# type: () -> None
empty_resolve_multi = local_resolve_multi([])
assert empty_resolve_multi == []
with temporary_dir() as td:
empty_resolve_multi = local_resolve_multi([], cache=td)
assert empty_resolve_multi == []
def test_simple_local_resolve():
# type: () -> None
project_wheel = build_wheel(name="project")
with temporary_dir() as td:
safe_copy(project_wheel, os.path.join(td, os.path.basename(project_wheel)))
resolved_dists = local_resolve_multi(["project"], find_links=[td])
assert len(resolved_dists) == 1
def test_resolve_cache():
# type: () -> None
project_wheel = build_wheel(name="project")
with nested(temporary_dir(), temporary_dir()) as (td, cache):
safe_copy(project_wheel, os.path.join(td, os.path.basename(project_wheel)))
# Without a cache, each resolve should be isolated, but otherwise identical.
resolved_dists1 = local_resolve_multi(["project"], find_links=[td])
resolved_dists2 = local_resolve_multi(["project"], find_links=[td])
assert resolved_dists1 != resolved_dists2
assert len(resolved_dists1) == 1
assert len(resolved_dists2) == 1
assert resolved_dists1[0].requirement == resolved_dists2[0].requirement
assert resolved_dists1[0].distribution.location != resolved_dists2[0].distribution.location
# With a cache, each resolve should be identical.
resolved_dists3 = local_resolve_multi(["project"], find_links=[td], cache=cache)
resolved_dists4 = local_resolve_multi(["project"], find_links=[td], cache=cache)
assert resolved_dists1 != resolved_dists3
assert resolved_dists2 != resolved_dists3
assert resolved_dists3 == resolved_dists4
def test_diamond_local_resolve_cached():
# type: () -> None
# This exercises the issue described here: https://github.com/pantsbuild/pex/issues/120
project1_wheel = build_wheel(name="project1", install_reqs=["project2<1.0.0"])
project2_wheel = build_wheel(name="project2")
with temporary_dir() as dd:
for wheel in (project1_wheel, project2_wheel):
safe_copy(wheel, os.path.join(dd, os.path.basename(wheel)))
with temporary_dir() as cd:
resolved_dists = local_resolve_multi(
["project1", "project2"], find_links=[dd], cache=cd
)
assert len(resolved_dists) == 2
def test_cached_dependency_pinned_unpinned_resolution_multi_run():
# type: () -> None
# This exercises the issue described here: https://github.com/pantsbuild/pex/issues/178
project1_0_0 = build_wheel(name="project", version="1.0.0")
project1_1_0 = build_wheel(name="project", version="1.1.0")
with temporary_dir() as td:
for wheel in (project1_0_0, project1_1_0):
safe_copy(wheel, os.path.join(td, os.path.basename(wheel)))
with temporary_dir() as cd:
# First run, pinning 1.0.0 in the cache
resolved_dists = local_resolve_multi(["project==1.0.0"], find_links=[td], cache=cd)
assert len(resolved_dists) == 1
assert resolved_dists[0].distribution.version == "1.0.0"
# Second, run, the unbounded 'project' req will find the 1.0.0 in the cache. But should also
# return SourcePackages found in td
resolved_dists = local_resolve_multi(["project"], find_links=[td], cache=cd)
assert len(resolved_dists) == 1
assert resolved_dists[0].distribution.version == "1.1.0"
def test_intransitive():
# type: () -> None
foo1_0 = build_wheel(name="foo", version="1.0.0")
# The nonexistent req ensures that we are actually not acting transitively (as that would fail).
bar1_0 = build_wheel(name="bar", version="1.0.0", install_reqs=["nonexistent==1.0.0"])
with temporary_dir() as td:
for wheel in (foo1_0, bar1_0):
safe_copy(wheel, os.path.join(td, os.path.basename(wheel)))
with temporary_dir() as cd:
resolved_dists = local_resolve_multi(
["foo", "bar"], find_links=[td], cache=cd, transitive=False
)
assert len(resolved_dists) == 2
def test_resolve_prereleases():
# type: () -> None
stable_dep = build_wheel(name="dep", version="2.0.0")
prerelease_dep = build_wheel(name="dep", version="3.0.0rc3")
with temporary_dir() as td:
for wheel in (stable_dep, prerelease_dep):
safe_copy(wheel, os.path.join(td, os.path.basename(wheel)))
def assert_resolve(expected_version, **resolve_kwargs):
resolved_dists = local_resolve_multi(["dep>=1,<4"], find_links=[td], **resolve_kwargs)
assert 1 == len(resolved_dists)
resolved_dist = resolved_dists[0]
assert expected_version == resolved_dist.distribution.version
assert_resolve("2.0.0")
assert_resolve("2.0.0", allow_prereleases=False)
assert_resolve("3.0.0rc3", allow_prereleases=True)
def _parse_requirement(req):
# type: (Union[str, Requirement]) -> Requirement
if isinstance(req, Requirement):
req = str(req)
return Requirement.parse(req)
def test_resolve_extra_setup_py():
# type: () -> None
with make_source_dir(
name="project1", version="1.0.0", extras_require={"foo": ["project2"]}
) as project1_dir:
project2_wheel = build_wheel(name="project2", version="2.0.0")
with temporary_dir() as td:
safe_copy(project2_wheel, os.path.join(td, os.path.basename(project2_wheel)))
resolved_dists = local_resolve_multi(["{}[foo]".format(project1_dir)], find_links=[td])
assert {_parse_requirement(req) for req in ("project1==1.0.0", "project2==2.0.0")} == {
_parse_requirement(resolved_dist.requirement) for resolved_dist in resolved_dists
}
def test_resolve_extra_wheel():
# type: () -> None
project1_wheel = build_wheel(
name="project1", version="1.0.0", extras_require={"foo": ["project2"]}
)
project2_wheel = build_wheel(name="project2", version="2.0.0")
with temporary_dir() as td:
for wheel in (project1_wheel, project2_wheel):
safe_copy(wheel, os.path.join(td, os.path.basename(wheel)))
resolved_dists = local_resolve_multi(["project1[foo]"], find_links=[td])
assert {_parse_requirement(req) for req in ("project1==1.0.0", "project2==2.0.0")} == {
_parse_requirement(resolved_dist.requirement) for resolved_dist in resolved_dists
}
def resolve_wheel_names(**kwargs):
# type: (**Any) -> List[str]
return [
os.path.basename(resolved_distribution.distribution.location)
for resolved_distribution in resolve_multi(**kwargs)
]
def resolve_p537_wheel_names(**kwargs):
# type: (**Any) -> List[str]
return resolve_wheel_names(requirements=["p537==1.0.4"], transitive=False, **kwargs)
@pytest.fixture(scope="module")
def p537_resolve_cache():
# type: () -> str
return safe_mkdtemp()
@pytest.mark.skipif(
PY_VER < (3, 5) or IS_PYPY, reason="The p537 distribution only builds for CPython 3.5+"
)
def test_resolve_current_platform(p537_resolve_cache):
# type: (str) -> None
resolve_current = functools.partial(
resolve_p537_wheel_names, cache=p537_resolve_cache, platforms=["current"]
)
other_python_version = PY36 if PY_VER == (3, 5) else PY35
other_python = PythonInterpreter.from_binary(ensure_python_interpreter(other_python_version))
current_python = PythonInterpreter.get()
resolved_other = resolve_current(interpreters=[other_python])
resolved_current = resolve_current()
assert 1 == len(resolved_other)
assert 1 == len(resolved_current)
assert resolved_other != resolved_current
assert resolved_current == resolve_current(interpreters=[current_python])
assert resolved_current == resolve_current(interpreters=[current_python, current_python])
# Here we have 2 local interpreters satisfying current but with different platforms and thus
# different dists for 2 total dists.
assert 2 == len(resolve_current(interpreters=[current_python, other_python]))
@pytest.mark.skipif(
PY_VER < (3, 5) or IS_PYPY, reason="The p537 distribution only builds for CPython 3.5+"
)
def test_resolve_current_and_foreign_platforms(p537_resolve_cache):
# type: (str) -> None
foreign_platform = "macosx-10.13-x86_64-cp-37-m" if IS_LINUX else "manylinux1_x86_64-cp-37-m"
resolve_current_and_foreign = functools.partial(
resolve_p537_wheel_names, cache=p537_resolve_cache, platforms=["current", foreign_platform]
)
assert 2 == len(resolve_current_and_foreign())
other_python_version = PY36 if PY_VER == (3, 5) else PY35
other_python = PythonInterpreter.from_binary(ensure_python_interpreter(other_python_version))
current_python = PythonInterpreter.get()
assert 2 == len(resolve_current_and_foreign(interpreters=[current_python]))
assert 2 == len(resolve_current_and_foreign(interpreters=[other_python]))
assert 2 == len(resolve_current_and_foreign(interpreters=[current_python, current_python]))
# Here we have 2 local interpreters, satisfying current, but with different platforms and thus
# different dists and then the foreign platform for 3 total dists.
assert 3 == len(resolve_current_and_foreign(interpreters=[current_python, other_python]))
def test_resolve_foreign_abi3():
# type: () -> None
# For version 2.8, cryptography publishes the following abi3 wheels for linux and macosx:
# cryptography-2.8-cp34-abi3-macosx_10_6_intel.whl
# cryptography-2.8-cp34-abi3-manylinux1_x86_64.whl
# cryptography-2.8-cp34-abi3-manylinux2010_x86_64.whl
cryptogrpahy_resolve_cache = safe_mkdtemp()
foreign_ver = "37" if PY_VER == (3, 6) else "36"
resolve_cryptography_wheel_names = functools.partial(
resolve_wheel_names,
requirements=["cryptography==2.8"],
platforms=[
"linux_x86_64-cp-{}-m".format(foreign_ver),
"macosx_10.11_x86_64-cp-{}-m".format(foreign_ver),
],
transitive=False,
build=False,
cache=cryptogrpahy_resolve_cache,
)
wheel_names = resolve_cryptography_wheel_names(manylinux="manylinux2014")
assert {
"cryptography-2.8-cp34-abi3-manylinux2010_x86_64.whl",
"cryptography-2.8-cp34-abi3-macosx_10_6_intel.whl",
} == set(wheel_names)
wheel_names = resolve_cryptography_wheel_names(manylinux="manylinux2010")
assert {
"cryptography-2.8-cp34-abi3-manylinux2010_x86_64.whl",
"cryptography-2.8-cp34-abi3-macosx_10_6_intel.whl",
} == set(wheel_names)
wheel_names = resolve_cryptography_wheel_names(manylinux="manylinux1")
assert {
"cryptography-2.8-cp34-abi3-manylinux1_x86_64.whl",
"cryptography-2.8-cp34-abi3-macosx_10_6_intel.whl",
} == set(wheel_names)
def test_issues_851():
# type: () -> None
# Previously, the PY36 resolve would fail post-resolution checks for configparser, pathlib2 and
# contextlib2 which are only required for python_version<3.
def resolve_pytest(python_version, pytest_version):
interpreter = PythonInterpreter.from_binary(ensure_python_interpreter(python_version))
resolved_dists = resolve_multi(
interpreters=[interpreter], requirements=["pytest=={}".format(pytest_version)]
)
project_to_version = {rd.requirement.key: rd.distribution.version for rd in resolved_dists}
assert project_to_version["pytest"] == pytest_version
return project_to_version
resolved_project_to_version = resolve_pytest(python_version=PY36, pytest_version="5.3.4")
assert "importlib-metadata" in resolved_project_to_version
assert "configparser" not in resolved_project_to_version
assert "pathlib2" not in resolved_project_to_version
assert "contextlib2" not in resolved_project_to_version
resolved_project_to_version = resolve_pytest(python_version=PY27, pytest_version="4.6.9")
assert "importlib-metadata" in resolved_project_to_version
assert "configparser" in resolved_project_to_version
assert "pathlib2" in resolved_project_to_version
assert "contextlib2" in resolved_project_to_version
def test_issues_892():
# type: () -> None
python27 = ensure_python_interpreter(PY27)
program = dedent(
"""\
from __future__ import print_function
import os
import sys
# This puts python3.6 stdlib on PYTHONPATH.
os.environ['PYTHONPATH'] = os.pathsep.join(sys.path)
from pex import resolver
from pex.interpreter import PythonInterpreter
python27 = PythonInterpreter.from_binary({python27!r})
result = resolver.resolve(requirements=['packaging==19.2'], interpreter=python27)
print('Resolved: {{}}'.format(result))
""".format(
python27=python27
)
)
python36 = ensure_python_interpreter(PY36)
cmd, process = PythonInterpreter.from_binary(python36).open_process(
args=["-c", program], stderr=subprocess.PIPE
)
_, stderr = process.communicate()
assert process.returncode == 0, dedent(
"""
Command {cmd} failed with {returncode}.
STDERR
======
{stderr}
""".format(
cmd=cmd, returncode=process.returncode, stderr=stderr.decode("utf8")
)
)
def test_download():
# type: () -> None
project1_sdist = create_sdist(
name="project1", version="1.0.0", extras_require={"foo": ["project2"]}
)
project2_wheel = build_wheel(
name="project2",
version="2.0.0",
# This is the last version of setuptools compatible with Python 2.7.
install_reqs=["setuptools==44.1.0"],
)
downloaded_by_target = defaultdict(list)
for local_distribution in download(
requirements=["{}[foo]".format(project1_sdist)],
find_links=[os.path.dirname(project2_wheel)],
):
distribution = pkginfo.get_metadata(local_distribution.path)
downloaded_by_target[local_distribution.target].append(distribution)
assert 1 == len(downloaded_by_target)
target, distributions = downloaded_by_target.popitem()
assert DistributionTarget.current() == target
distributions_by_name = {distribution.name: distribution for distribution in distributions}
assert 3 == len(distributions_by_name)
def assert_dist(project_name, dist_type, version):
dist = distributions_by_name[project_name]
assert dist_type is type(dist)
assert version == dist.version
assert_dist("project1", pkginfo.SDist, "1.0.0")
assert_dist("project2", pkginfo.Wheel, "2.0.0")
assert_dist("setuptools", pkginfo.Wheel, "44.1.0")
def test_install():
# type: () -> None
project1_sdist = create_sdist(name="project1", version="1.0.0")
project2_wheel = build_wheel(name="project2", version="2.0.0")
installed_by_target = defaultdict(list)
for installed_distribution in install(
[LocalDistribution.create(path=dist) for dist in (project1_sdist, project2_wheel)]
):
installed_by_target[installed_distribution.target].append(
installed_distribution.distribution
)
assert 1 == len(installed_by_target)
target, distributions = installed_by_target.popitem()
assert DistributionTarget.current() == target
distributions_by_name = {distribution.key: distribution for distribution in distributions}
assert 2 == len(distributions_by_name)
assert "1.0.0" == distributions_by_name["project1"].version
assert "2.0.0" == distributions_by_name["project2"].version
assert 2 == len(
{distribution.location for distribution in distributions}
), "Expected installed distributions to have independent chroot paths."
def test_install_unsatisfiable():
# type: () -> None
project1_sdist = create_sdist(name="project1", version="1.0.0")
project2_wheel = build_wheel(name="project2", version="2.0.0", install_reqs=["project1==1.0.1"])
local_distributions = [
LocalDistribution.create(path=dist) for dist in (project1_sdist, project2_wheel)
]
assert 2 == len(install(local_distributions, ignore_errors=True))
with pytest.raises(Unsatisfiable):
install(local_distributions, ignore_errors=False)
def test_install_invalid_local_distribution():
# type: () -> None
project1_sdist = create_sdist(name="project1", version="1.0.0")
valid_local_sdist = LocalDistribution.create(project1_sdist)
assert 1 == len(install([valid_local_sdist]))
with pytest.raises(IntegrityError):
install([LocalDistribution.create(project1_sdist, fingerprint="mismatch")])
project1_wheel = build_wheel(name="project1", version="1.0.0")
with pytest.raises(IntegrityError):
install(
[LocalDistribution.create(project1_wheel, fingerprint=valid_local_sdist.fingerprint)]
)
def test_resolve_arbitrary_equality_issues_940():
# type: () -> None
dist = create_sdist(
name="foo",
version="1.0.2-fba4511",
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*",
)
resolved_distributions = local_resolve_multi(requirements=[dist])
assert len(resolved_distributions) == 1
requirement = resolved_distributions[0].requirement
assert [("===", "1.0.2-fba4511")] == requirement.specs
assert requirement.marker is None
| 37.047619
| 104
| 0.687661
|
c47f864a8fdf2d1085e6e38323ffa1e7fbdf00e7
| 4,223
|
py
|
Python
|
aim/db/migration/alembic_migrations/versions/aceb1ac13668_vmm_policy.py
|
gaurav-dalvi/aci-integration-module
|
5d31f65f4fca3e0322d6003e7736ca14aa7ec72d
|
[
"Apache-2.0"
] | 7
|
2017-11-20T12:45:36.000Z
|
2021-11-14T12:52:48.000Z
|
aim/db/migration/alembic_migrations/versions/aceb1ac13668_vmm_policy.py
|
gaurav-dalvi/aci-integration-module
|
5d31f65f4fca3e0322d6003e7736ca14aa7ec72d
|
[
"Apache-2.0"
] | 92
|
2016-03-30T04:48:18.000Z
|
2022-01-19T21:25:27.000Z
|
aim/db/migration/alembic_migrations/versions/aceb1ac13668_vmm_policy.py
|
gaurav-dalvi/aci-integration-module
|
5d31f65f4fca3e0322d6003e7736ca14aa7ec72d
|
[
"Apache-2.0"
] | 11
|
2017-12-23T15:55:24.000Z
|
2021-08-03T12:59:33.000Z
|
# Copyright (c) 2016 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Create tables for VMM Policies.
Revision ID: aceb1ac13668
Revises: 7838968744ce
Create Date: 2016-08-11 17:59:18.910872
"""
# revision identifiers, used by Alembic.
revision = 'aceb1ac13668'
down_revision = '7838968744ce'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from aim import aim_manager
from aim.api import resource
from aim import context
from aim.db import api
def upgrade():
op.create_table(
'aim_vmm_policies',
sa.Column('type', sa.String(64), nullable=False),
sa.Column('aim_id', sa.Integer, autoincrement=True),
sa.Column('display_name', sa.String(256), nullable=False, default=''),
sa.Column('monitored', sa.Boolean, nullable=False, default=False),
sa.PrimaryKeyConstraint('aim_id'))
session = api.get_session(expire_on_commit=True)
old_vmm_table = sa.Table('aim_vmm_domains', sa.MetaData(),
sa.Column('type', sa.String(64), nullable=False),
sa.Column('name', sa.String(64), nullable=False))
old_phys_table = sa.Table('aim_physical_domains', sa.MetaData(),
sa.Column('name', sa.String(64), nullable=False))
mgr = aim_manager.AimManager()
ctx = context.AimContext(db_session=session)
new_vmms = []
new_phys = []
with session.begin(subtransactions=True):
for vmm in session.query(old_vmm_table).all():
new_vmms.append(resource.VMMDomain(type=vmm.type, name=vmm.name,
monitored=True))
for phys in session.query(old_phys_table).all():
new_phys.append(resource.PhysicalDomain(name=phys.name,
monitored=True))
op.drop_table('aim_vmm_domains')
op.drop_table('aim_physical_domains')
op.create_table(
'aim_vmm_domains',
sa.Column('type', sa.String(64), nullable=False),
sa.Column('name', sa.String(64), nullable=False),
sa.Column('aim_id', sa.Integer, autoincrement=True),
sa.Column('display_name', sa.String(256), nullable=False, default=''),
sa.Column('monitored', sa.Boolean, nullable=False, default=False),
sa.Column('enforcement_pref', sa.Enum('sw', 'hw', 'unknown')),
sa.Column('mode', sa.Enum('default', 'n1kv', 'unknown', 'ovs',
'k8s')),
sa.Column('mcast_address', sa.String(64)),
sa.Column('encap_mode', sa.Enum('unknown', 'vlan', 'vxlan')),
sa.Column('pref_encap_mode', sa.Enum('unspecified', 'vlan',
'vxlan')),
sa.Column('vlan_pool_name', sa.String(64)),
sa.Column('vlan_pool_type', sa.Enum('static', 'dynamic')),
sa.Column('mcast_addr_pool_name', sa.String(64)),
sa.PrimaryKeyConstraint('aim_id'),
sa.UniqueConstraint('type', 'name',
name='uniq_aim_vmm_domains_identity'),
sa.Index('idx_aim_vmm_domains_identity', 'type', 'name'))
op.create_table(
'aim_physical_domains',
sa.Column('name', sa.String(64), nullable=False),
sa.Column('aim_id', sa.Integer, autoincrement=True),
sa.Column('display_name', sa.String(256), nullable=False, default=''),
sa.Column('monitored', sa.Boolean, nullable=False, default=False),
sa.PrimaryKeyConstraint('aim_id'))
with session.begin(subtransactions=True):
for obj in new_vmms + new_phys:
mgr.create(ctx, obj)
def downgrade():
pass
| 38.743119
| 79
| 0.630358
|
5298f60db8615c9179ae2e42b544ffe640694805
| 1,701
|
py
|
Python
|
src/graphdb_builder/databases/parsers/mutationDsParser.py
|
enryH/CKG
|
020e444b42cbb2e3d8c7ebd55e6f4ace5468f2f0
|
[
"MIT"
] | null | null | null |
src/graphdb_builder/databases/parsers/mutationDsParser.py
|
enryH/CKG
|
020e444b42cbb2e3d8c7ebd55e6f4ace5468f2f0
|
[
"MIT"
] | null | null | null |
src/graphdb_builder/databases/parsers/mutationDsParser.py
|
enryH/CKG
|
020e444b42cbb2e3d8c7ebd55e6f4ace5468f2f0
|
[
"MIT"
] | null | null | null |
import os.path
import re
from graphdb_builder import builder_utils
############################
# IntAct - MutationDs #
############################
def parser(databases_directory, download=True):
relationships = set()
config = builder_utils.get_config(config_name="mutationDsConfig.yml", data_type='databases')
header = config['header']
output_file_name = "mutation_curated_affects_interaction_with.tsv"
regex = r":(\w+)\("
url = config['mutations_url']
directory = os.path.join(databases_directory, "MutationDs")
builder_utils.checkDirectory(directory)
file_name = os.path.join(directory, url.split('/')[-1])
if download:
builder_utils.downloadDB(url, directory)
with open(file_name, 'r') as mf:
first = True
for line in mf:
if first:
first = False
continue
data = line.rstrip("\r\n").split("\t")
if len(data) > 12:
internal_id = data[0]
pvariant= '_'.join(data[1].split(':'))
effect = data[5]
organism = data[10]
interaction = data[11]
evidence = data[12]
if organism.startswith("9606"):
matches = re.finditer(regex, interaction)
for matchNum, match in enumerate(matches, start=1):
interactor = match.group(1)
relationships.add((pvariant, interactor, "CURATED_AFFECTS_INTERACTION_WITH", effect, interaction, evidence, internal_id, "Intact-MutationDs"))
builder_utils.remove_directory(directory)
return (relationships, header, output_file_name)
| 37.8
| 166
| 0.582011
|
f693ca3b43336aa0bab22b3d7acc5f12408eb17c
| 5,398
|
py
|
Python
|
catalog/views.py
|
rahulblhra/django-lib
|
380abb9ebb50de7a2fdc4a155ccbd1a7a19a89b5
|
[
"MIT"
] | null | null | null |
catalog/views.py
|
rahulblhra/django-lib
|
380abb9ebb50de7a2fdc4a155ccbd1a7a19a89b5
|
[
"MIT"
] | null | null | null |
catalog/views.py
|
rahulblhra/django-lib
|
380abb9ebb50de7a2fdc4a155ccbd1a7a19a89b5
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.views import generic
# Create your views here.
from .models import Book, Author, BookInstance, Genre, Language
from django.contrib.auth.mixins import LoginRequiredMixin
def index(request):
"""View function for home page of site."""
# Generate counts of some of the main objects
#num_authors = Author.objects.count() # The 'all()' is implied by default.
# Number of visits to this view, as counted in the session variable.
num_visits = request.session.get('num_visits', 0)
request.session['num_visits'] = num_visits + 1
num_books = Book.objects.all().count()
num_instances = BookInstance.objects.all().count()
num_genres = Genre.objects.count()
# Available books (status = 'a')
num_instances_available = BookInstance.objects.filter(status__exact='a').count()
# The 'all()' is implied by default.
num_authors = Author.objects.count()
#num_books_specific = Book.objects.filter(title__icontains='harry').count()
context = {
'num_books': num_books,
'num_instances': num_instances,
'num_instances_available': num_instances_available,
'num_authors': num_authors,
'num_genres': num_genres,
'num_visits': num_visits,
}
# Render the HTML template index.html with the data in the context variable
return render(request, 'index.html', context=context)
class BookListView(generic.ListView):
model = Book
paginate_by = 10
#context_object_name = 'my_book_list' # your own name for the list as a template variable
#queryset = Book.objects.filter(title__icontains='war')[:5] # Get 5 books containing the title war
#template_name = 'books/my_arbitrary_template_name_list.html' # Specify your own template name/location
# def get_context_data(self, **kwargs):
# # Call the base implementation first to get the context
# context = super(BookListView, self).get_context_data(**kwargs)
# # Create any data and add it to the context
# context['some_data'] = 'This is just some data'
#return context
#class MyView(LoginRequiredMixin, View):
class BookDetailView(generic.DetailView):
model = Book
# def book_detail_view(request, primary_key):
# try:
# book = Book.objects.get(pk=primary_key)
# except Book.DoesNotExist:
# raise Http404('Book does not exist')
## from django.shortcuts import get_object_or_404
# #book = get_object_or_404(Book, pk=primary_key)
#return render(request, 'catalog/book_detail.html', context={'book': book})
class AuthorListView(generic.ListView):
model = Author
paginate_by = 10
#class MyView(LoginRequiredMixin, 'author-detail'):
# login_url = '/login/'
#redirect_field_name = 'redirect_to_author-details'
class AuthorDetailView(generic.DetailView):
model = Author
#from django.contrib.auth.mixins import LoginRequiredMixin
class LoanedBooksByUserListView(LoginRequiredMixin,generic.ListView):
"""Generic class-based view listing books on loan to current user."""
model = BookInstance
template_name ='catalog/bookinstance_list_borrowed_user.html'
paginate_by = 10
def get_queryset(self):
return BookInstance.objects.filter(borrower=self.request.user).filter(status__exact='o').order_by('due_back')
import datetime
from django.contrib.auth.decorators import permission_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect
from django.urls import reverse
from .forms import RenewBookForm
@permission_required('catalog.can_mark_returned')
def renew_book_librarian(request, pk):
book_instance = get_object_or_404(BookInstance, pk=pk)
# If this is a POST request then process the Form data
if request.method == 'POST':
# Create a form instance and populate it with data from the request (binding):
form = RenewBookForm(request.POST)
# Check if the form is valid:
if form.is_valid():
# process the data in form.cleaned_data as required (here we just write it to the model due_back field)
book_instance.due_back = form.cleaned_data['renewal_date']
book_instance.save()
# redirect to a new URL:
return HttpResponseRedirect(reverse('index') )
# If this is a GET (or any other method) create the default form.
else:
proposed_renewal_date = datetime.date.today() + datetime.timedelta(weeks=3)
form = RenewBookForm(initial={'renewal_date': proposed_renewal_date,})
return render(request, 'catalog/book_renew_librarian.html', {'form': form, 'book_instance':book_instance})
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.urls import reverse_lazy
from .models import Author
class AuthorCreate(CreateView):
model = Author
fields = '__all__'
initial = {'date_of_death': '05/01/2018',}
class AuthorUpdate(UpdateView):
model = Author
fields = ['first_name', 'last_name', 'date_of_birth', 'date_of_death']
class AuthorDelete(DeleteView):
model = Author
success_url = reverse_lazy('authors')
| 31.940828
| 118
| 0.687662
|
e5b8afb697e0e5a9606c1cf44a83edadb45ccfb6
| 7,091
|
py
|
Python
|
kubernetes/client/models/v1beta1_priority_class_list.py
|
fsduser/python
|
2b20069ebc05283352fbdc95bbdca2b6133a4175
|
[
"Apache-2.0"
] | 1
|
2021-10-15T13:05:45.000Z
|
2021-10-15T13:05:45.000Z
|
kubernetes/client/models/v1beta1_priority_class_list.py
|
belajarqywok/python
|
b15bea16a87ad03136a4627941ac437582ea4657
|
[
"Apache-2.0"
] | 10
|
2020-10-01T03:15:01.000Z
|
2022-03-01T03:06:31.000Z
|
kubernetes/client/models/v1beta1_priority_class_list.py
|
belajarqywok/python
|
b15bea16a87ad03136a4627941ac437582ea4657
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.19
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1beta1PriorityClassList(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'items': 'list[V1beta1PriorityClass]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501
"""V1beta1PriorityClassList - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this V1beta1PriorityClassList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1beta1PriorityClassList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1beta1PriorityClassList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1beta1PriorityClassList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this V1beta1PriorityClassList. # noqa: E501
items is the list of PriorityClasses # noqa: E501
:return: The items of this V1beta1PriorityClassList. # noqa: E501
:rtype: list[V1beta1PriorityClass]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this V1beta1PriorityClassList.
items is the list of PriorityClasses # noqa: E501
:param items: The items of this V1beta1PriorityClassList. # noqa: E501
:type: list[V1beta1PriorityClass]
"""
if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this V1beta1PriorityClassList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1beta1PriorityClassList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1beta1PriorityClassList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1beta1PriorityClassList. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1beta1PriorityClassList. # noqa: E501
:return: The metadata of this V1beta1PriorityClassList. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1beta1PriorityClassList.
:param metadata: The metadata of this V1beta1PriorityClassList. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1PriorityClassList):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1PriorityClassList):
return True
return self.to_dict() != other.to_dict()
| 34.42233
| 312
| 0.634466
|
9f055bcf3403a3484ce832e2291600689663f619
| 261
|
py
|
Python
|
example/generic/get_exchange_currencies.py
|
bailzx5522/huobi_Python
|
d87cb11b44304c32da6e57c8ada8d03ee5fdb0e7
|
[
"Apache-2.0"
] | 611
|
2019-07-10T08:17:50.000Z
|
2022-03-21T18:56:39.000Z
|
example/generic/get_exchange_currencies.py
|
bailzx5522/huobi_Python
|
d87cb11b44304c32da6e57c8ada8d03ee5fdb0e7
|
[
"Apache-2.0"
] | 105
|
2019-07-12T03:43:41.000Z
|
2022-03-30T10:33:06.000Z
|
example/generic/get_exchange_currencies.py
|
bailzx5522/huobi_Python
|
d87cb11b44304c32da6e57c8ada8d03ee5fdb0e7
|
[
"Apache-2.0"
] | 325
|
2019-07-12T02:46:54.000Z
|
2022-03-21T18:56:41.000Z
|
from huobi.client.generic import GenericClient
from huobi.utils import *
generic_client = GenericClient()
list_obj = generic_client.get_exchange_currencies()
LogInfo.output("---- Supported currency ----")
for currency in list_obj:
LogInfo.output(currency)
| 29
| 51
| 0.785441
|
ad3a5ef10f3d78e8bccb941eca70cdedef814511
| 16,545
|
py
|
Python
|
qa/rpc-tests/pruning.py
|
fujicoin/fujicoin-bitcore
|
bd4219c284e716c2326ba450cc3288ca691cd8b3
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/pruning.py
|
fujicoin/fujicoin-bitcore
|
bd4219c284e716c2326ba450cc3288ca691cd8b3
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/pruning.py
|
fujicoin/fujicoin-bitcore
|
bd4219c284e716c2326ba450cc3288ca691cd8b3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Fujicoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test pruning code
# ********
# WARNING:
# This test uses 4GB of disk space.
# This test takes 30 mins or more (up to 2 hours)
# ********
from test_framework.test_framework import FujicoinTestFramework
from test_framework.util import *
def calc_usage(blockdir):
return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(blockdir+f)) / (1024. * 1024.)
class PruneTest(FujicoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
self.utxo = []
self.address = ["",""]
self.txouts = gen_return_txouts()
def setup_network(self):
self.nodes = []
self.is_network_split = False
# Create nodes 0 and 1 to mine
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug","-maxreceivebuffer=20000","-blockmaxsize=999000", "-checkblocks=5"], timewait=900))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug","-maxreceivebuffer=20000","-blockmaxsize=999000", "-checkblocks=5"], timewait=900))
# Create node 2 to test pruning
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug","-maxreceivebuffer=20000","-prune=550"], timewait=900))
self.prunedir = self.options.tmpdir+"/node2/regtest/blocks/"
self.address[0] = self.nodes[0].getnewaddress()
self.address[1] = self.nodes[1].getnewaddress()
# Determine default relay fee
self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[2], 0)
sync_blocks(self.nodes[0:3])
def create_big_chain(self):
# Start by creating some coinbases we can spend later
self.nodes[1].generate(200)
sync_blocks(self.nodes[0:2])
self.nodes[0].generate(150)
# Then mine enough full blocks to create more than 550MiB of data
for i in range(645):
self.mine_full_block(self.nodes[0], self.address[0])
sync_blocks(self.nodes[0:3])
def test_height_min(self):
if not os.path.isfile(self.prunedir+"blk00000.dat"):
raise AssertionError("blk00000.dat is missing, pruning too early")
print("Success")
print("Though we're already using more than 550MiB, current usage:", calc_usage(self.prunedir))
print("Mining 25 more blocks should cause the first block file to be pruned")
# Pruning doesn't run until we're allocating another chunk, 20 full blocks past the height cutoff will ensure this
for i in range(25):
self.mine_full_block(self.nodes[0],self.address[0])
waitstart = time.time()
while os.path.isfile(self.prunedir+"blk00000.dat"):
time.sleep(0.1)
if time.time() - waitstart > 30:
raise AssertionError("blk00000.dat not pruned when it should be")
print("Success")
usage = calc_usage(self.prunedir)
print("Usage should be below target:", usage)
if (usage > 550):
raise AssertionError("Pruning target not being met")
def create_chain_with_staleblocks(self):
# Create stale blocks in manageable sized chunks
print("Mine 24 (stale) blocks on Node 1, followed by 25 (main chain) block reorg from Node 0, for 12 rounds")
for j in range(12):
# Disconnect node 0 so it can mine a longer reorg chain without knowing about node 1's soon-to-be-stale chain
# Node 2 stays connected, so it hears about the stale blocks and then reorg's when node0 reconnects
# Stopping node 0 also clears its mempool, so it doesn't have node1's transactions to accidentally mine
stop_node(self.nodes[0],0)
self.nodes[0]=start_node(0, self.options.tmpdir, ["-debug","-maxreceivebuffer=20000","-blockmaxsize=999000", "-checkblocks=5"], timewait=900)
# Mine 24 blocks in node 1
self.utxo = self.nodes[1].listunspent()
for i in range(24):
if j == 0:
self.mine_full_block(self.nodes[1],self.address[1])
else:
self.nodes[1].generate(1) #tx's already in mempool from previous disconnects
# Reorg back with 25 block chain from node 0
self.utxo = self.nodes[0].listunspent()
for i in range(25):
self.mine_full_block(self.nodes[0],self.address[0])
# Create connections in the order so both nodes can see the reorg at the same time
connect_nodes(self.nodes[1], 0)
connect_nodes(self.nodes[2], 0)
sync_blocks(self.nodes[0:3])
print("Usage can be over target because of high stale rate:", calc_usage(self.prunedir))
def reorg_test(self):
# Node 1 will mine a 300 block chain starting 287 blocks back from Node 0 and Node 2's tip
# This will cause Node 2 to do a reorg requiring 288 blocks of undo data to the reorg_test chain
# Reboot node 1 to clear its mempool (hopefully make the invalidate faster)
# Lower the block max size so we don't keep mining all our big mempool transactions (from disconnected blocks)
stop_node(self.nodes[1],1)
self.nodes[1]=start_node(1, self.options.tmpdir, ["-debug","-maxreceivebuffer=20000","-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"], timewait=900)
height = self.nodes[1].getblockcount()
print("Current block height:", height)
invalidheight = height-287
badhash = self.nodes[1].getblockhash(invalidheight)
print("Invalidating block at height:",invalidheight,badhash)
self.nodes[1].invalidateblock(badhash)
# We've now switched to our previously mined-24 block fork on node 1, but thats not what we want
# So invalidate that fork as well, until we're on the same chain as node 0/2 (but at an ancestor 288 blocks ago)
mainchainhash = self.nodes[0].getblockhash(invalidheight - 1)
curhash = self.nodes[1].getblockhash(invalidheight - 1)
while curhash != mainchainhash:
self.nodes[1].invalidateblock(curhash)
curhash = self.nodes[1].getblockhash(invalidheight - 1)
assert(self.nodes[1].getblockcount() == invalidheight - 1)
print("New best height", self.nodes[1].getblockcount())
# Reboot node1 to clear those giant tx's from mempool
stop_node(self.nodes[1],1)
self.nodes[1]=start_node(1, self.options.tmpdir, ["-debug","-maxreceivebuffer=20000","-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"], timewait=900)
print("Generating new longer chain of 300 more blocks")
self.nodes[1].generate(300)
print("Reconnect nodes")
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[2], 1)
sync_blocks(self.nodes[0:3], timeout=120)
print("Verify height on node 2:",self.nodes[2].getblockcount())
print("Usage possibly still high bc of stale blocks in block files:", calc_usage(self.prunedir))
print("Mine 220 more blocks so we have requisite history (some blocks will be big and cause pruning of previous chain)")
self.nodes[0].generate(220) #node 0 has many large tx's in its mempool from the disconnects
sync_blocks(self.nodes[0:3], timeout=300)
usage = calc_usage(self.prunedir)
print("Usage should be below target:", usage)
if (usage > 550):
raise AssertionError("Pruning target not being met")
return invalidheight,badhash
def reorg_back(self):
# Verify that a block on the old main chain fork has been pruned away
try:
self.nodes[2].getblock(self.forkhash)
raise AssertionError("Old block wasn't pruned so can't test redownload")
except JSONRPCException as e:
print("Will need to redownload block",self.forkheight)
# Verify that we have enough history to reorg back to the fork point
# Although this is more than 288 blocks, because this chain was written more recently
# and only its other 299 small and 220 large block are in the block files after it,
# its expected to still be retained
self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight))
first_reorg_height = self.nodes[2].getblockcount()
curchainhash = self.nodes[2].getblockhash(self.mainchainheight)
self.nodes[2].invalidateblock(curchainhash)
goalbestheight = self.mainchainheight
goalbesthash = self.mainchainhash2
# As of 0.10 the current block download logic is not able to reorg to the original chain created in
# create_chain_with_stale_blocks because it doesn't know of any peer thats on that chain from which to
# redownload its missing blocks.
# Invalidate the reorg_test chain in node 0 as well, it can successfully switch to the original chain
# because it has all the block data.
# However it must mine enough blocks to have a more work chain than the reorg_test chain in order
# to trigger node 2's block download logic.
# At this point node 2 is within 288 blocks of the fork point so it will preserve its ability to reorg
if self.nodes[2].getblockcount() < self.mainchainheight:
blocks_to_mine = first_reorg_height + 1 - self.mainchainheight
print("Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed:", blocks_to_mine)
self.nodes[0].invalidateblock(curchainhash)
assert(self.nodes[0].getblockcount() == self.mainchainheight)
assert(self.nodes[0].getbestblockhash() == self.mainchainhash2)
goalbesthash = self.nodes[0].generate(blocks_to_mine)[-1]
goalbestheight = first_reorg_height + 1
print("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
waitstart = time.time()
while self.nodes[2].getblockcount() < goalbestheight:
time.sleep(0.1)
if time.time() - waitstart > 900:
raise AssertionError("Node 2 didn't reorg to proper height")
assert(self.nodes[2].getbestblockhash() == goalbesthash)
# Verify we can now have the data for a block previously pruned
assert(self.nodes[2].getblock(self.forkhash)["height"] == self.forkheight)
def mine_full_block(self, node, address):
# Want to create a full block
# We'll generate a 66k transaction below, and 14 of them is close to the 1MB block limit
for j in range(14):
if len(self.utxo) < 14:
self.utxo = node.listunspent()
inputs=[]
outputs = {}
t = self.utxo.pop()
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
remchange = t["amount"] - 100*self.relayfee # Fee must be above min relay rate for 66kb tx
outputs[address]=remchange
# Create a basic transaction that will send change back to ourself after account for a fee
# And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #
# of txouts is stored and is the only thing we overwrite from the original transaction
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + self.txouts
newtx = newtx + rawtx[94:]
# Appears to be ever so slightly faster to sign with SIGHASH_NONE
signresult = node.signrawtransaction(newtx,None,None,"NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
# Mine a full sized block which will be these transactions we just created
node.generate(1)
def run_test(self):
print("Warning! This test requires 4GB of disk space and takes over 30 mins (up to 2 hours)")
print("Mining a big blockchain of 995 blocks")
self.create_big_chain()
# Chain diagram key:
# * blocks on main chain
# +,&,$,@ blocks on other forks
# X invalidated block
# N1 Node 1
#
# Start by mining a simple chain that all nodes have
# N0=N1=N2 **...*(995)
print("Check that we haven't started pruning yet because we're below PruneAfterHeight")
self.test_height_min()
# Extend this chain past the PruneAfterHeight
# N0=N1=N2 **...*(1020)
print("Check that we'll exceed disk space target if we have a very high stale block rate")
self.create_chain_with_staleblocks()
# Disconnect N0
# And mine a 24 block chain on N1 and a separate 25 block chain on N0
# N1=N2 **...*+...+(1044)
# N0 **...**...**(1045)
#
# reconnect nodes causing reorg on N1 and N2
# N1=N2 **...*(1020) *...**(1045)
# \
# +...+(1044)
#
# repeat this process until you have 12 stale forks hanging off the
# main chain on N1 and N2
# N0 *************************...***************************(1320)
#
# N1=N2 **...*(1020) *...**(1045) *.. ..**(1295) *...**(1320)
# \ \ \
# +...+(1044) &.. $...$(1319)
# Save some current chain state for later use
self.mainchainheight = self.nodes[2].getblockcount() #1320
self.mainchainhash2 = self.nodes[2].getblockhash(self.mainchainheight)
print("Check that we can survive a 288 block reorg still")
(self.forkheight,self.forkhash) = self.reorg_test() #(1033, )
# Now create a 288 block reorg by mining a longer chain on N1
# First disconnect N1
# Then invalidate 1033 on main chain and 1032 on fork so height is 1032 on main chain
# N1 **...*(1020) **...**(1032)X..
# \
# ++...+(1031)X..
#
# Now mine 300 more blocks on N1
# N1 **...*(1020) **...**(1032) @@...@(1332)
# \ \
# \ X...
# \ \
# ++...+(1031)X.. ..
#
# Reconnect nodes and mine 220 more blocks on N1
# N1 **...*(1020) **...**(1032) @@...@@@(1552)
# \ \
# \ X...
# \ \
# ++...+(1031)X.. ..
#
# N2 **...*(1020) **...**(1032) @@...@@@(1552)
# \ \
# \ *...**(1320)
# \ \
# ++...++(1044) ..
#
# N0 ********************(1032) @@...@@@(1552)
# \
# *...**(1320)
print("Test that we can rerequest a block we previously pruned if needed for a reorg")
self.reorg_back()
# Verify that N2 still has block 1033 on current chain (@), but not on main chain (*)
# Invalidate 1033 on current chain (@) on N2 and we should be able to reorg to
# original main chain (*), but will require redownload of some blocks
# In order to have a peer we think we can download from, must also perform this invalidation
# on N0 and mine a new longest chain to trigger.
# Final result:
# N0 ********************(1032) **...****(1553)
# \
# X@...@@@(1552)
#
# N2 **...*(1020) **...**(1032) **...****(1553)
# \ \
# \ X@...@@@(1552)
# \
# +..
#
# N1 doesn't change because 1033 on main chain (*) is invalid
print("Done")
if __name__ == '__main__':
PruneTest().main()
| 48.377193
| 167
| 0.585131
|
22f41111ed44f921ac6df80c28ca5352817c686e
| 7,145
|
py
|
Python
|
aiida_cp2k/tests/test_bands.py
|
dev-zero/aiida-cp2k
|
99ca8bb29570e8b7feb0abd722becc31b4b0589b
|
[
"MIT"
] | 1
|
2019-05-20T15:48:55.000Z
|
2019-05-20T15:48:55.000Z
|
aiida_cp2k/tests/test_bands.py
|
dev-zero/aiida-cp2k
|
99ca8bb29570e8b7feb0abd722becc31b4b0589b
|
[
"MIT"
] | null | null | null |
aiida_cp2k/tests/test_bands.py
|
dev-zero/aiida-cp2k
|
99ca8bb29570e8b7feb0abd722becc31b4b0589b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (c), The AiiDA-CP2K authors. #
# SPDX-License-Identifier: MIT #
# AiiDA-CP2K is hosted on GitHub at https://github.com/aiidateam/aiida-cp2k #
# For further information on the license, see the LICENSE.txt file. #
###############################################################################
"""Test simple Band Structure calculations"""
from __future__ import print_function
from __future__ import absolute_import
import pytest
from . import get_computer, get_code
@pytest.mark.process_execution
def test_band_structure_calc_Si(new_workdir):
"""Computing Band Structure of Si"""
import numpy as np
from ase.atoms import Atoms
from aiida.engine import run
from aiida.plugins import CalculationFactory
from aiida.orm import Dict, StructureData
computer = get_computer(workdir=new_workdir)
code = get_code(entry_point="cp2k", computer=computer)
# structure
positions = [
[0.0000000000, 0.0000000000, 2.6954627656],
[4.0431941484, 4.0431941484, 4.0431941484],
]
cell = [
[0.0, 2.69546276561, 2.69546276561],
[2.69546276561, 0.0, 2.69546276561],
[2.69546276561, 2.69546276561, 0.0],
]
atoms = Atoms("Si2", positions=positions, cell=cell)
structure = StructureData(ase=atoms)
# parameters
parameters = Dict(
dict={
"FORCE_EVAL": {
"METHOD": "Quickstep",
"DFT": {
"CHARGE": 0,
"KPOINTS": {
"SCHEME MONKHORST-PACK": "1 1 1",
"SYMMETRY": "OFF",
"WAVEFUNCTIONS": "REAL",
"FULL_GRID": ".TRUE.",
"PARALLEL_GROUP_SIZE": 0,
},
"MGRID": {"CUTOFF": 600, "NGRIDS": 4, "REL_CUTOFF": 50},
"UKS": False,
"BASIS_SET_FILE_NAME": "BASIS_MOLOPT",
"POTENTIAL_FILE_NAME": "GTH_POTENTIALS",
"QS": {"METHOD": "GPW", "EXTRAPOLATION": "USE_GUESS"},
"POISSON": {"PERIODIC": "XYZ"},
"SCF": {
"EPS_SCF": 1.0e-4,
"ADDED_MOS": 1,
"SMEAR": {
"METHOD": "FERMI_DIRAC",
"ELECTRONIC_TEMPERATURE": 300,
},
"DIAGONALIZATION": {"ALGORITHM": "STANDARD", "EPS_ADAPT": 0.01},
"MIXING": {
"METHOD": "BROYDEN_MIXING",
"ALPHA": 0.2,
"BETA": 1.5,
"NBROYDEN": 8,
},
},
"XC": {"XC_FUNCTIONAL": {"_": "PBE"}},
"PRINT": {
"MO_CUBES": { # this is to print the band gap
"STRIDE": "1 1 1",
"WRITE_CUBE": "F",
"NLUMO": 1,
"NHOMO": 1,
},
"BAND_STRUCTURE": {
"KPOINT_SET": [
{
"NPOINTS": 10,
"SPECIAL_POINT": [
"GAMMA 0.0 0.0 0.0",
"X 0.5 0.0 0.5",
],
"UNITS": "B_VECTOR",
},
{
"NPOINTS": 10,
"SPECIAL_POINT": [
"X 0.5 0.0 0.5",
"U 0.625 0.25 0.625",
],
"UNITS": "B_VECTOR",
},
{
"NPOINTS": 10,
"SPECIAL_POINT": [
"K 0.375 0.375 0.75",
"GAMMA 0.0 0.0 0.0",
],
"UNITS": "B_VECTOR",
},
{
"NPOINTS": 10,
"SPECIAL_POINT": [
"GAMMA 0.0 0.0 0.0",
"L 0.5 0.5 0.5",
],
"UNITS": "B_VECTOR",
},
{
"NPOINTS": 10,
"SPECIAL_POINT": [
"L 0.5 0.5 0.5",
"W 0.5 0.25 0.75",
],
"UNITS": "B_VECTOR",
},
{
"NPOINTS": 10,
"SPECIAL_POINT": [
"W 0.5 0.25 0.75",
"X 0.5 0.0 0.5",
],
"UNITS": "B_VECTOR",
},
]
},
},
},
"SUBSYS": {
"KIND": [
{
"_": "Si",
"BASIS_SET": "DZVP-MOLOPT-SR-GTH",
"POTENTIAL": "GTH-PBE-q4",
}
]
},
"PRINT": { # this is to print forces (may be necessary for problems
# detection)
"FORCES": {"_": "ON"}
},
},
"GLOBAL": {"EXTENDED_FFT_LENGTHS": True}, # Needed for large systems
}
)
options = {
"resources": {"num_machines": 1, "num_mpiprocs_per_machine": 1},
"max_wallclock_seconds": 1 * 3 * 60,
}
inputs = {
"structure": structure,
"parameters": parameters,
"code": code,
"metadata": {"options": options},
}
result = run(CalculationFactory("cp2k"), **inputs)
bands = result["output_bands"]
# check bands
expected_gamma_kpoint = np.array(
[-5.71237757, 6.5718575, 6.5718575, 6.5718575, 8.88653953]
)
assert bands.get_kpoints().shape == (66, 3)
assert bands.get_bands().shape == (66, 5)
assert abs(max(bands.get_bands()[0] - expected_gamma_kpoint)) < 1e-7
| 38.831522
| 88
| 0.328202
|
938edce85adaaf2319afbe3acc37976ef875f8fd
| 248
|
py
|
Python
|
tests/config.py
|
socialwifi/flask-oauthres
|
232b3cccdb566fa1e01d76db752607f8d7b015f0
|
[
"BSD-3-Clause"
] | 4
|
2017-04-27T09:28:19.000Z
|
2017-09-04T11:07:51.000Z
|
tests/config.py
|
riklaunim/flask-oauthres
|
e17803d6b6f8974feb3b7ea796155456cd587e87
|
[
"BSD-3-Clause"
] | 4
|
2017-02-28T10:35:25.000Z
|
2017-10-27T12:26:53.000Z
|
tests/config.py
|
riklaunim/flask-oauthres
|
e17803d6b6f8974feb3b7ea796155456cd587e87
|
[
"BSD-3-Clause"
] | 2
|
2017-09-04T11:18:44.000Z
|
2021-04-19T13:15:32.000Z
|
DEBUG = 1
TESTING = 1
SECRET_KEY = 'development'
OAUTH2_RESOURCE_ID = 'resource_helloworld'
OAUTH2_CLIENT_ID = 'helloworld_client'
OAUTH2_CLIENT_SECRET = 'helloworld_secret'
OAUTH2_CHECK_TOKEN_ENDPOINT_URL = 'https://example.com/oauth/check_token'
| 31
| 73
| 0.822581
|
f3e5f144436ea8d4c90e60df598fd897142fe81a
| 1,509
|
py
|
Python
|
edexOsgi/com.raytheon.uf.common.dataplugin.gfe/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ConfigureTextProductsRequest.py
|
srcarter3/awips2
|
37f31f5e88516b9fd576eaa49d43bfb762e1d174
|
[
"Apache-2.0"
] | null | null | null |
edexOsgi/com.raytheon.uf.common.dataplugin.gfe/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ConfigureTextProductsRequest.py
|
srcarter3/awips2
|
37f31f5e88516b9fd576eaa49d43bfb762e1d174
|
[
"Apache-2.0"
] | null | null | null |
edexOsgi/com.raytheon.uf.common.dataplugin.gfe/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ConfigureTextProductsRequest.py
|
srcarter3/awips2
|
37f31f5e88516b9fd576eaa49d43bfb762e1d174
|
[
"Apache-2.0"
] | 1
|
2021-10-30T00:03:05.000Z
|
2021-10-30T00:03:05.000Z
|
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# File auto-generated against equivalent DynamicSerialize Java class
class ConfigureTextProductsRequest(object):
def __init__(self):
self.mode = None
self.template = None
self.site = None
self.destinationDir = None
def getMode(self):
return self.mode
def setMode(self, mode):
self.mode = mode
def getTemplate(self):
return self.template
def setTemplate(self, template):
self.template = template
def getSite(self):
return self.site
def setSite(self, site):
self.site = site
def getDestinationDir(self):
return self.destinationDir
def setDestinationDir(self, destinationDir):
self.destinationDir = destinationDir
| 27.436364
| 69
| 0.664016
|
2cd70d58da717b2917f8aedb06093e38b516da1b
| 2,118
|
py
|
Python
|
watch-site/main.py
|
Licsber/notice
|
79ac322bd342b16cad7c389639d08809f1edcfbe
|
[
"BSD-3-Clause"
] | 3
|
2020-03-19T07:24:53.000Z
|
2021-08-24T11:56:02.000Z
|
watch-site/main.py
|
BrightMoonInner/notice
|
79ac322bd342b16cad7c389639d08809f1edcfbe
|
[
"BSD-3-Clause"
] | 1
|
2021-09-02T09:30:35.000Z
|
2021-09-02T10:02:38.000Z
|
watch-site/main.py
|
BrightMoonInner/notice
|
79ac322bd342b16cad7c389639d08809f1edcfbe
|
[
"BSD-3-Clause"
] | 1
|
2021-08-24T11:56:08.000Z
|
2021-08-24T11:56:08.000Z
|
import time
import sys
sys.path.append('..')
print(len(sys.path), sys.path)
import spider
import os
os.environ['TZ'] = 'Asia/Shanghai'
now_time = time.localtime()
now_time = time.strftime('%Y-%m-%d', now_time)
def get_url_lists():
result = set()
njit = spider.get_html('http://www.njit.edu.cn')
jwc = spider.get_html('http://jwc.njit.edu.cn')
nc = spider.njit_catcher(njit)
result.update(nc.get_all_link())
time.sleep(1)
jwcc = spider.jwc_catcher(jwc)
result.update(jwcc.get_all_link())
print(len(result), result)
return result
def get_content(mail, proxies=None):
send_list = []
for url in get_url_lists():
text = spider.get_html(url, proxy=proxies)
if text is None:
print(url + ' cannot load')
continue
if '无权访问' in text:
print(url + ' can only access from local')
continue
if 'jwc' in url:
parser = spider.jwc_parser(text)
elif 'xinghuo' in url:
parser = spider.xh_parser(text)
elif 'www.njit' in url:
parser = spider.njit_parser(text)
else:
print(url, 'cannot find parser')
continue
page_time = parser.get_time()
if now_time == page_time:
print(url, ' match time')
# if True:
title = parser.get_title()
body = parser.get_body()
send_list.append((page_time + " NJIT:" + title, title + '\n' + body + '\n\n' + url))
time.sleep(1)
print(len(send_list), send_list)
for send in send_list:
title = send[0]
body = send[1]
mail.send_mail_to(title, body)
time.sleep(1)
def main(mail, proxies=None):
get_content(mail, proxies)
if __name__ == '__main__':
if len(sys.argv) != 1:
proxies = None
if len(sys.argv) == 3:
proxy = sys.argv[2]
proxies = {
'http': proxy,
'https': proxy
}
password = sys.argv[1]
mail = spider.SMTP(password)
main(mail, proxies)
| 23.533333
| 96
| 0.55288
|
01059b84fb0dcaeb1ace5d20a287b28eb807131a
| 1,390
|
py
|
Python
|
computer_vision/learning-opencv-practical/image-process-100ask/Question_81_90/answers/answer_81.py
|
magic428/subjects_notes
|
6930adbb3f445c11ca9d024abb12a53d6aca19e7
|
[
"MIT"
] | 2
|
2020-03-18T17:13:00.000Z
|
2020-03-25T02:34:03.000Z
|
computer_vision/learning-opencv-practical/image-process-100ask/Question_81_90/answers/answer_81.py
|
magic428/subjects_notes
|
6930adbb3f445c11ca9d024abb12a53d6aca19e7
|
[
"MIT"
] | null | null | null |
computer_vision/learning-opencv-practical/image-process-100ask/Question_81_90/answers/answer_81.py
|
magic428/subjects_notes
|
6930adbb3f445c11ca9d024abb12a53d6aca19e7
|
[
"MIT"
] | null | null | null |
import cv2
import numpy as np
import matplotlib.pyplot as plt
# Read image
img = cv2.imread("thorino.jpg").astype(np.float32)
H, W, C = img.shape
## Grayscale
gray = 0.2126 * img[..., 2] + 0.7152 * img[..., 1] + 0.0722 * img[..., 0]
gray = gray.astype(np.uint8)
## Sobel
sobely = np.array(((1, 2, 1),
(0, 0, 0),
(-1, -2, -1)), dtype=np.float32)
sobelx = np.array(((1, 0, -1),
(2, 0, -2),
(1, 0, -1)), dtype=np.float32)
tmp = np.pad(gray, (1, 1), 'edge')
Ix = np.zeros_like(gray, dtype=np.float32)
Iy = np.zeros_like(gray, dtype=np.float32)
for y in range(H):
for x in range(W):
Ix[y, x] = np.mean(tmp[y:y+3, x:x+3] * sobelx)
Iy[y, x] = np.mean(tmp[y:y+3, x:x+3] * sobely)
Ix2 = Ix ** 2
IxIy = Ix * Iy
Iy2 = Iy ** 2
out = np.array((gray, gray, gray))
out = np.transpose(out, (1,2,0))
## Hessian
Hes = np.zeros((H, W))
for y in range(H):
for x in range(W):
Hes[y,x] = Ix2[y,x] * Iy2[y,x] - IxIy[y,x] ** 2
## Detect Corner
for y in range(H):
for x in range(W):
if Hes[y,x] == np.max(Hes[max(y-1,0):min(y+2,H), max(x-1,0):min(x+2,W)]) and Hes[y,x] > np.max(Hes)*0.1:
out[y, x] = [0, 0, 255]
out = out.astype(np.uint8)
cv2.imwrite("out.jpg", out)
cv2.imshow("result", out)
cv2.waitKey(0)
| 24.385965
| 113
| 0.502878
|
7b17f9a8db53fe361527626f9d6ae15ee2d64422
| 6,915
|
py
|
Python
|
hw_07/binary_classification_competition_julius.py
|
coinflip112/ml_101
|
9e56ffdb99ac241ed396e25d7f7818a58ee5c4de
|
[
"MIT"
] | null | null | null |
hw_07/binary_classification_competition_julius.py
|
coinflip112/ml_101
|
9e56ffdb99ac241ed396e25d7f7818a58ee5c4de
|
[
"MIT"
] | null | null | null |
hw_07/binary_classification_competition_julius.py
|
coinflip112/ml_101
|
9e56ffdb99ac241ed396e25d7f7818a58ee5c4de
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# dcfac0e3-1ade-11e8-9de3-00505601122b
# 7d179d73-3e93-11e9-b0fd-00505601122b
import argparse
import lzma
import pickle
import os
import urllib.request
import sys
import numpy as np
import pandas as pd
from sklearn.pipeline import Pipeline
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import OneHotEncoder, StandardScaler, PolynomialFeatures
import sklearn.metrics
import sklearn.model_selection
import sklearn.linear_model
from sklearn.linear_model import LogisticRegression
def is_categorical(x):
"""Function which checks whether a value a integer or string/object type suggesting a categorical variable
Arguments:
x {Variouse formats} -- Constant which is subject to the is_integer test in the homework context it is a pandas dataframe
Returns:
np.bool_ -- Boolean value indicating whether the given argument is a instance of integer or string.
"""
is_categorical = x.apply(lambda x: isinstance(x, int) or isinstance(x, str)).values
# CHANGED testing whether a value is categorical is given by whether the columns is an integer/string
return is_categorical
# return two lists with indexes of numerical and categorical columns
# function doesnt work as expected, integers can be numerics
def index_finder(dataset):
"""Function which indifies the positinal index of columns of categorical and numeric variables in a pandas dataframe
Arguments:
dataset {pd.DataFrame} -- Input pandas dataframe, features of which are subject ot being classified into either numeric or catgorical
Returns:
(list, list) -- A touple of lists, with the first containing positional indexes of categorical variables in the dataframe and the others being numeric.
"""
cat_index = []
num_index = []
# CHANGED testing whether a value is categorical is given by whether the columns is an integer/string
for index in range(dataset.shape[1]): # number of columns in np array
test = is_categorical(dataset.iloc[:, index])
result = all(test) # if whole array contains True
if result:
cat_index.append(index)
# this works kinda but think about the case when there would be mixed column values
# this would result into a column being classified as numeric as not all values are categorical
# however the given column is much more likely to be categorical or in need of additional processing
else:
num_index.append(index)
return (
cat_index,
num_index,
) # return the indexes of categorical colums and numerical columns
class Dataset:
def __init__(
self,
name="binary_classification_competition.train.csv.xz",
url="https://ufal.mff.cuni.cz/~straka/courses/npfl129/1920/datasets/",
):
if not os.path.exists(name):
print("Downloading dataset {}...".format(name), file=sys.stderr)
urllib.request.urlretrieve(url + name, filename=name)
# Load the dataset and split it into `train_target` (column Target)
# and `train_data` (all other columns).
dataset = pd.read_csv(name)
self.data, self.target = dataset.drop("Target", axis=1), dataset["Target"]
parser = argparse.ArgumentParser()
parser.add_argument(
"--model_path",
default="binary_classification_competition.model",
type=str,
help="Model path",
)
parser.add_argument("--seed", default=42, type=int, help="Random seed")
if __name__ == "__main__":
args = parser.parse_args()
# Set random seed
np.random.seed(args.seed)
# Load the dataset, downloading it if required
train = Dataset()
features = train.data
targets = train.target
# Note that `train.data` and `train.target` are a `pandas.DataFrame`.
# It is similar to a Numpy array, but columns have names and types.
# You can get column types using `train_data.dtypes`; note that
# strings are actually encoded as `object`s.
cat_index = features.select_dtypes(
include=[np.object_]
).columns # this includes all string columns in the dataframe
num_index = features.select_dtypes(
include=[np.number]
).columns # this includes all numeric columns in the dataframe
# either way it is better to select and map the types on a semi-individual basic
# the same way you would do in R
# explore a few rows of the data
# decide what is numeric what is categorical and anything else
# create a lists of those feature categories (almost manually just copy and paste)
preprocessing = ColumnTransformer(
[
(
"categoric",
OneHotEncoder(handle_unknown="ignore", categories="auto", sparse=False),
cat_index,
),
("numeric", sklearn.preprocessing.StandardScaler(), num_index),
]
)
# You had a double pipeline here. Or maybe i added it? Dont remember really Honestly not really most concious:D
feat_engineering = PolynomialFeatures(2, include_bias=False)
# you can combine all 3 steps into a single pipeline object
# This object will represent an estimator which will:
# 1. preprocess the features,
# 2. create 2nd degree polynomial features
# (beware that ur doing the second power of one-hot-encoding) and therefore ur model will be overparametrized
# (hint apply Polynomial features only to the numerical features)
# finally fit a classifeir (try selecting a classifer which can handle non-linear relationships)
classifier = LogisticRegression(solver="liblinear", C=10, max_iter=10000)
estimator = Pipeline(
steps=[
("feat_preprocessing", preprocessing),
("feat_engineering", feat_engineering),
("classifier", classifier),
]
)
estimator.fit(features, targets)
# with lzma.open(args.model_path, "wb") as model_file:
# pickle.dump(estimator, model_file)
# TODO: The trained model needs to be saved. All sklearn models can
# be serialized and deserialized using the standard `pickle` module.
# Additionally, we can also compress the model.
#
# To save a model, open a target file for binary access, and use
# `pickle.dump` to save the model to the opened file:
# with lzma.open(args.model_path, "wb") as model_file:
# pickle.dump(model, model_file)
# The `recodex_predict` is called during ReCodEx evaluation (there can be
# several Python sources in the submission, but exactly one should contain
# a `recodex_predict` method).
def recodex_predict(data):
# The `data` is a hopefully pandas dataset
with lzma.open("binary_classification_competition.model", "rb") as model_file:
model = pickle.load(model_file)
predictions = model.predict(data)
return predictions
| 38.416667
| 160
| 0.699349
|
7dfbce16d05249d3948eb1548a9aa3352d0a59c3
| 91
|
py
|
Python
|
mgd/__init__.py
|
KaiyuYue/mgd
|
a5f98775963be680a18f9c46f00a116f3803c075
|
[
"MIT"
] | 52
|
2020-08-23T17:44:26.000Z
|
2022-03-03T14:57:36.000Z
|
mgd/__init__.py
|
KaiyuYue/mgd
|
a5f98775963be680a18f9c46f00a116f3803c075
|
[
"MIT"
] | 6
|
2020-08-31T16:59:32.000Z
|
2021-04-08T00:41:39.000Z
|
mgd/__init__.py
|
KaiyuYue/mgd
|
a5f98775963be680a18f9c46f00a116f3803c075
|
[
"MIT"
] | 13
|
2020-10-03T07:46:41.000Z
|
2021-12-04T11:50:57.000Z
|
from .builder import MGDistiller, SMDistiller
from .sampler import ExtraDistributedSampler
| 30.333333
| 45
| 0.868132
|
c6480bd222842cbb92437b21fd4ec30d0e1a6ea0
| 777
|
py
|
Python
|
examples/download_by_canvas.py
|
ConnectionMaster/qgis-earthengine-plugin
|
dfb7cbaeca856e03ca3560bb52454a1c17ce347a
|
[
"MIT"
] | 307
|
2018-04-28T02:50:27.000Z
|
2022-03-31T09:39:25.000Z
|
examples/download_by_canvas.py
|
ConnectionMaster/qgis-earthengine-plugin
|
dfb7cbaeca856e03ca3560bb52454a1c17ce347a
|
[
"MIT"
] | 72
|
2019-09-18T00:03:00.000Z
|
2022-01-26T17:33:55.000Z
|
examples/download_by_canvas.py
|
ConnectionMaster/qgis-earthengine-plugin
|
dfb7cbaeca856e03ca3560bb52454a1c17ce347a
|
[
"MIT"
] | 95
|
2018-05-21T04:12:53.000Z
|
2022-03-24T11:52:46.000Z
|
import json
import ee
from ee_plugin import Map
from ee_plugin.contrib import palettes
dem = ee.Image('JAXA/ALOS/AW3D30/V2_2').select('AVE_DSM')
Map.addLayer(dem, { 'min': 0, 'max': 3000 }, 'DEM', True)
# MANUAL STEP: use "Create layer from extent" tool and activate the resulting layer
# get first feature geometry from active layer
layer = iface.activeLayer()
feature = next(layer.getFeatures())
geom = feature.geometry()
geom_json = json.loads(geom.asJson())
# show geometry (double-check)
geom_ee = ee.Geometry.Polygon(geom_json['coordinates'], 'EPSG:3857', False)
Map.addLayer(geom_ee, {}, 'geom')
# download dem using given geometry as region
url = dem.getDownloadURL({
'name': 'dem',
'scale': 30,
'region': json.dumps(geom_ee.getInfo())
})
print(url)
| 25.9
| 83
| 0.715573
|
e88ef980d3519bab16964f224fdfc36c655c5477
| 33,035
|
bzl
|
Python
|
swift/internal/xcode_swift_toolchain.bzl
|
thomasvl/rules_swift
|
df90fbbfd669eab91e4bc994aaeb81239daff314
|
[
"Apache-2.0"
] | null | null | null |
swift/internal/xcode_swift_toolchain.bzl
|
thomasvl/rules_swift
|
df90fbbfd669eab91e4bc994aaeb81239daff314
|
[
"Apache-2.0"
] | null | null | null |
swift/internal/xcode_swift_toolchain.bzl
|
thomasvl/rules_swift
|
df90fbbfd669eab91e4bc994aaeb81239daff314
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BUILD rules used to provide a Swift toolchain provided by Xcode on macOS.
The rules defined in this file are not intended to be used outside of the Swift
toolchain package. If you are looking for rules to build Swift code using this
toolchain, see `swift.bzl`.
"""
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:partial.bzl", "partial")
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
load(":actions.bzl", "swift_action_names")
load(":attrs.bzl", "swift_toolchain_driver_attrs")
load(":compiling.bzl", "compile_action_configs", "features_from_swiftcopts")
load(
":feature_names.bzl",
"SWIFT_FEATURE_BITCODE_EMBEDDED",
"SWIFT_FEATURE_BITCODE_EMBEDDED_MARKERS",
"SWIFT_FEATURE_BUNDLED_XCTESTS",
"SWIFT_FEATURE_COVERAGE",
"SWIFT_FEATURE_COVERAGE_PREFIX_MAP",
"SWIFT_FEATURE_DEBUG_PREFIX_MAP",
"SWIFT_FEATURE_ENABLE_BATCH_MODE",
"SWIFT_FEATURE_ENABLE_SKIP_FUNCTION_BODIES",
"SWIFT_FEATURE_MODULE_MAP_HOME_IS_CWD",
"SWIFT_FEATURE_MODULE_MAP_NO_PRIVATE_HEADERS",
"SWIFT_FEATURE_REMAP_XCODE_PATH",
"SWIFT_FEATURE_SUPPORTS_LIBRARY_EVOLUTION",
"SWIFT_FEATURE_SUPPORTS_PRIVATE_DEPS",
"SWIFT_FEATURE_SUPPORTS_SYSTEM_MODULE_FLAG",
"SWIFT_FEATURE_USE_RESPONSE_FILES",
)
load(":features.bzl", "features_for_build_modes")
load(":toolchain_config.bzl", "swift_toolchain_config")
load(
":providers.bzl",
"SwiftFeatureAllowlistInfo",
"SwiftInfo",
"SwiftToolchainInfo",
)
load(
":utils.bzl",
"collect_implicit_deps_providers",
"compact",
"get_swift_executable_for_toolchain",
"resolve_optional_tool",
)
def _swift_developer_lib_dir(platform_framework_dir):
"""Returns the directory containing extra Swift developer libraries.
Args:
platform_framework_dir: The developer platform framework directory for
the current platform.
Returns:
The directory containing extra Swift-specific development libraries and
swiftmodules.
"""
return paths.join(
paths.dirname(paths.dirname(platform_framework_dir)),
"usr",
"lib",
)
def _command_line_objc_copts(compilation_mode, objc_fragment):
"""Returns copts that should be passed to `clang` from the `objc` fragment.
Args:
compilation_mode: The current compilation mode.
objc_fragment: The `objc` configuration fragment.
Returns:
A list of `clang` copts, each of which is preceded by `-Xcc` so that
they can be passed through `swiftc` to its underlying ClangImporter
instance.
"""
# In general, every compilation mode flag from native `objc_*` rules should
# be passed, but `-g` seems to break Clang module compilation. Since this
# flag does not make much sense for module compilation and only touches
# headers, it's ok to omit.
# TODO(b/153867054): These flags were originally being set by Bazel's legacy
# hardcoded Objective-C behavior, which has been migrated to crosstool. In
# the long term, we should query crosstool for the flags we're interested in
# and pass those to ClangImporter, and do this across all platforms. As an
# immediate short-term workaround, we preserve the old behavior by passing
# the exact set of flags that Bazel was originally passing if the list we
# get back from the configuration fragment is empty.
legacy_copts = objc_fragment.copts_for_current_compilation_mode
if not legacy_copts:
if compilation_mode == "dbg":
legacy_copts = [
"-O0",
"-DDEBUG=1",
"-fstack-protector",
"-fstack-protector-all",
]
elif compilation_mode == "opt":
legacy_copts = [
"-Os",
"-DNDEBUG=1",
"-Wno-unused-variable",
"-Winit-self",
"-Wno-extra",
]
clang_copts = objc_fragment.copts + legacy_copts
return [copt for copt in clang_copts if copt != "-g"]
def _platform_developer_framework_dir(
apple_toolchain,
apple_fragment,
xcode_config):
"""Returns the Developer framework directory for the platform.
Args:
apple_fragment: The `apple` configuration fragment.
apple_toolchain: The `apple_common.apple_toolchain()` object.
xcode_config: The Xcode configuration.
Returns:
The path to the Developer framework directory for the platform if one
exists, otherwise `None`.
"""
# All platforms have a `Developer/Library/Frameworks` directory in their
# platform root, except for watchOS prior to Xcode 12.5.
platform_type = apple_fragment.single_arch_platform.platform_type
if (
platform_type == apple_common.platform_type.watchos and
not _is_xcode_at_least_version(xcode_config, "12.5")
):
return None
return apple_toolchain.platform_developer_framework_dir(apple_fragment)
def _sdk_developer_framework_dir(apple_toolchain, apple_fragment, xcode_config):
"""Returns the Developer framework directory for the SDK.
Args:
apple_fragment: The `apple` configuration fragment.
apple_toolchain: The `apple_common.apple_toolchain()` object.
xcode_config: The Xcode configuration.
Returns:
The path to the Developer framework directory for the SDK if one
exists, otherwise `None`.
"""
# All platforms have a `Developer/Library/Frameworks` directory in their SDK
# root except for macOS (all versions of Xcode so far), and watchOS (prior
# to Xcode 12.5).
platform_type = apple_fragment.single_arch_platform.platform_type
if (
platform_type == apple_common.platform_type.macos or
(
platform_type == apple_common.platform_type.watchos and
not _is_xcode_at_least_version(xcode_config, "12.5")
)
):
return None
return paths.join(apple_toolchain.sdk_dir(), "Developer/Library/Frameworks")
def _swift_linkopts_providers(
apple_fragment,
apple_toolchain,
platform,
target,
toolchain_label,
xcode_config):
"""Returns providers containing flags that should be passed to the linker.
The providers returned by this function will be used as implicit
dependencies of the toolchain to ensure that any binary containing Swift code
will link to the standard libraries correctly.
Args:
apple_fragment: The `apple` configuration fragment.
apple_toolchain: The `apple_common.apple_toolchain()` object.
platform: The `apple_platform` value describing the target platform.
target: The target triple.
toolchain_label: The label of the Swift toolchain that will act as the
owner of the linker input propagating the flags.
xcode_config: The Xcode configuration.
Returns:
A `struct` containing the following fields:
* `cc_info`: A `CcInfo` provider that will provide linker flags to
binaries that depend on Swift targets.
* `objc_info`: An `apple_common.Objc` provider that will provide
linker flags to binaries that depend on Swift targets.
"""
platform_developer_framework_dir = _platform_developer_framework_dir(
apple_toolchain,
apple_fragment,
xcode_config,
)
sdk_developer_framework_dir = _sdk_developer_framework_dir(
apple_toolchain,
apple_fragment,
xcode_config,
)
swift_lib_dir = paths.join(
apple_toolchain.developer_dir(),
"Toolchains/XcodeDefault.xctoolchain/usr/lib/swift",
platform.name_in_plist.lower(),
)
linkopts = [
"-F{}".format(path)
for path in compact([
platform_developer_framework_dir,
sdk_developer_framework_dir,
])
] + [
"-Wl,-rpath,/usr/lib/swift",
"-L{}".format(swift_lib_dir),
"-L/usr/lib/swift",
# TODO(b/112000244): These should get added by the C++ Starlark API,
# but we're using the "c++-link-executable" action right now instead
# of "objc-executable" because the latter requires additional
# variables not provided by cc_common. Figure out how to handle this
# correctly.
"-ObjC",
"-Wl,-objc_abi_version,2",
]
# Add the linker path to the directory containing the dylib with Swift
# extensions for the XCTest module.
if platform_developer_framework_dir:
linkopts.extend([
"-L{}".format(
_swift_developer_lib_dir(platform_developer_framework_dir),
),
])
return struct(
cc_info = CcInfo(
linking_context = cc_common.create_linking_context(
linker_inputs = depset([
cc_common.create_linker_input(
owner = toolchain_label,
user_link_flags = depset(linkopts),
),
]),
),
),
objc_info = apple_common.new_objc_provider(linkopt = depset(linkopts)),
)
def _features_for_bitcode_mode(bitcode_mode):
"""Gets the list of features to enable for the selected Bitcode mode.
Args:
bitcode_mode: The `bitcode_mode` value from the C++ configuration
fragment.
Returns:
A list containing the features to enable.
"""
bitcode_mode_string = str(bitcode_mode)
if bitcode_mode_string == "embedded":
return [SWIFT_FEATURE_BITCODE_EMBEDDED]
elif bitcode_mode_string == "embedded_markers":
return [SWIFT_FEATURE_BITCODE_EMBEDDED_MARKERS]
elif bitcode_mode_string == "none":
return []
fail("Internal error: expected bitcode_mode to be one of: " +
"['embedded', 'embedded_markers', 'none'], but got '{}'".format(
bitcode_mode_string,
))
def _resource_directory_configurator(developer_dir, prerequisites, args):
"""Configures compiler flags about the toolchain's resource directory.
We must pass a resource directory explicitly if the build rules are invoked
using a custom driver executable or a partial toolchain root, so that the
compiler doesn't try to find its resources relative to that binary.
Args:
developer_dir: The path to Xcode's Developer directory. This argument is
pre-bound in the partial.
prerequisites: The value returned by
`swift_common.action_prerequisites`.
args: The `Args` object to which flags will be added.
"""
args.add(
"-resource-dir",
(
"{developer_dir}/Toolchains/{toolchain}.xctoolchain/" +
"usr/lib/swift"
).format(
developer_dir = developer_dir,
toolchain = "XcodeDefault",
),
)
def _all_action_configs(
additional_objc_copts,
additional_swiftc_copts,
apple_fragment,
apple_toolchain,
generated_header_rewriter,
needs_resource_directory,
target_triple,
xcode_config):
"""Returns the action configurations for the Swift toolchain.
Args:
additional_objc_copts: Additional Objective-C compiler flags obtained
from the `objc` configuration fragment (and legacy flags that were
previously passed directly by Bazel).
additional_swiftc_copts: Additional Swift compiler flags obtained from
the `swift` configuration fragment.
apple_fragment: The `apple` configuration fragment.
apple_toolchain: The `apple_common.apple_toolchain()` object.
generated_header_rewriter: An executable that will be invoked after
compilation to rewrite the generated header, or None if this is not
desired.
needs_resource_directory: If True, the toolchain needs the resource
directory passed explicitly to the compiler.
target_triple: The target triple.
xcode_config: The Xcode configuration.
Returns:
The action configurations for the Swift toolchain.
"""
platform_developer_framework_dir = _platform_developer_framework_dir(
apple_toolchain,
apple_fragment,
xcode_config,
)
sdk_developer_framework_dir = _sdk_developer_framework_dir(
apple_toolchain,
apple_fragment,
xcode_config,
)
developer_framework_dirs = compact([
platform_developer_framework_dir,
sdk_developer_framework_dir,
])
# Basic compilation flags (target triple and toolchain search paths).
action_configs = [
swift_toolchain_config.action_config(
actions = [
swift_action_names.COMPILE,
swift_action_names.DERIVE_FILES,
swift_action_names.PRECOMPILE_C_MODULE,
swift_action_names.DUMP_AST,
],
configurators = [
swift_toolchain_config.add_arg("-target", target_triple),
swift_toolchain_config.add_arg(
"-sdk",
apple_toolchain.sdk_dir(),
),
] + [
swift_toolchain_config.add_arg(framework_dir, format = "-F%s")
for framework_dir in developer_framework_dirs
],
),
swift_toolchain_config.action_config(
actions = [swift_action_names.PRECOMPILE_C_MODULE],
configurators = [
swift_toolchain_config.add_arg(
"-Xcc",
framework_dir,
format = "-F%s",
)
for framework_dir in developer_framework_dirs
],
),
]
# The platform developer framework directory contains XCTest.swiftmodule
# with Swift extensions to XCTest, so it needs to be added to the search
# path on platforms where it exists.
if platform_developer_framework_dir:
action_configs.append(
swift_toolchain_config.action_config(
actions = [
swift_action_names.COMPILE,
swift_action_names.DERIVE_FILES,
swift_action_names.PRECOMPILE_C_MODULE,
swift_action_names.DUMP_AST,
],
configurators = [
swift_toolchain_config.add_arg(
_swift_developer_lib_dir(
platform_developer_framework_dir,
),
format = "-I%s",
),
],
),
)
action_configs.extend([
# Bitcode-related flags.
swift_toolchain_config.action_config(
actions = [
swift_action_names.COMPILE,
swift_action_names.PRECOMPILE_C_MODULE,
],
configurators = [swift_toolchain_config.add_arg("-embed-bitcode")],
features = [SWIFT_FEATURE_BITCODE_EMBEDDED],
),
swift_toolchain_config.action_config(
actions = [
swift_action_names.COMPILE,
swift_action_names.PRECOMPILE_C_MODULE,
],
configurators = [
swift_toolchain_config.add_arg("-embed-bitcode-marker"),
],
features = [SWIFT_FEATURE_BITCODE_EMBEDDED_MARKERS],
),
# Xcode path remapping
swift_toolchain_config.action_config(
actions = [
swift_action_names.COMPILE,
swift_action_names.DERIVE_FILES,
],
configurators = [
swift_toolchain_config.add_arg(
"-debug-prefix-map",
"__BAZEL_XCODE_DEVELOPER_DIR__=DEVELOPER_DIR",
),
],
features = [
[SWIFT_FEATURE_REMAP_XCODE_PATH, SWIFT_FEATURE_DEBUG_PREFIX_MAP],
],
),
swift_toolchain_config.action_config(
actions = [
swift_action_names.COMPILE,
swift_action_names.DERIVE_FILES,
],
configurators = [
swift_toolchain_config.add_arg(
"-coverage-prefix-map",
"__BAZEL_XCODE_DEVELOPER_DIR__=DEVELOPER_DIR",
),
],
features = [
[
SWIFT_FEATURE_REMAP_XCODE_PATH,
SWIFT_FEATURE_COVERAGE_PREFIX_MAP,
SWIFT_FEATURE_COVERAGE,
],
],
),
])
if needs_resource_directory:
# If the user is using a custom driver but not a complete custom
# toolchain, provide the original toolchain's resources as the resource
# directory so that modules are found correctly.
action_configs.append(
swift_toolchain_config.action_config(
actions = [
swift_action_names.COMPILE,
swift_action_names.DERIVE_FILES,
swift_action_names.PRECOMPILE_C_MODULE,
swift_action_names.DUMP_AST,
],
configurators = [
partial.make(
_resource_directory_configurator,
apple_toolchain.developer_dir(),
),
],
),
)
action_configs.extend(compile_action_configs(
additional_objc_copts = additional_objc_copts,
additional_swiftc_copts = additional_swiftc_copts,
generated_header_rewriter = generated_header_rewriter.executable,
))
return action_configs
def _all_tool_configs(
custom_toolchain,
env,
execution_requirements,
generated_header_rewriter,
swift_executable,
toolchain_root,
xcode_config):
"""Returns the tool configurations for the Swift toolchain.
Args:
custom_toolchain: The bundle identifier of a custom Swift toolchain, if
one was requested.
env: The environment variables to set when launching tools.
execution_requirements: The execution requirements for tools.
generated_header_rewriter: A `struct` returned by
`resolve_optional_tool` that represents an executable that will be
invoked after compilation to rewrite the generated header.
swift_executable: A custom Swift driver executable to be used during the
build, if provided.
toolchain_root: The root directory of the toolchain, if provided.
xcode_config: The `apple_common.XcodeVersionConfig` provider.
Returns:
A dictionary mapping action name to tool configuration.
"""
# Configure the environment variables that the worker needs to fill in the
# Bazel placeholders for SDK root and developer directory, along with the
# custom toolchain if requested.
if custom_toolchain:
env = dict(env)
env["TOOLCHAINS"] = custom_toolchain
tool_config = swift_toolchain_config.driver_tool_config(
driver_mode = "swiftc",
env = env,
execution_requirements = execution_requirements,
swift_executable = swift_executable,
tool_input_manifests = generated_header_rewriter.input_manifests,
tool_inputs = generated_header_rewriter.inputs,
toolchain_root = toolchain_root,
use_param_file = True,
worker_mode = "persistent",
)
tool_configs = {
swift_action_names.COMPILE: tool_config,
swift_action_names.DERIVE_FILES: tool_config,
swift_action_names.DUMP_AST: tool_config,
}
# Xcode 12.0 implies Swift 5.3.
if _is_xcode_at_least_version(xcode_config, "12.0"):
tool_configs[swift_action_names.PRECOMPILE_C_MODULE] = (
swift_toolchain_config.driver_tool_config(
driver_mode = "swiftc",
env = env,
execution_requirements = execution_requirements,
swift_executable = swift_executable,
toolchain_root = toolchain_root,
use_param_file = True,
worker_mode = "wrap",
)
)
return tool_configs
def _is_xcode_at_least_version(xcode_config, desired_version):
"""Returns True if we are building with at least the given Xcode version.
Args:
xcode_config: The `apple_common.XcodeVersionConfig` provider.
desired_version: The minimum desired Xcode version, as a dotted version
string.
Returns:
True if the current target is being built with a version of Xcode at
least as high as the given version.
"""
current_version = xcode_config.xcode_version()
if not current_version:
fail("Could not determine Xcode version at all. This likely means " +
"Xcode isn't available; if you think this is a mistake, please " +
"file an issue.")
desired_version_value = apple_common.dotted_version(desired_version)
return current_version >= desired_version_value
def _swift_apple_target_triple(cpu, platform, version):
"""Returns a target triple string for an Apple platform.
Args:
cpu: The CPU of the target.
platform: The `apple_platform` value describing the target platform.
version: The target platform version as a dotted version string.
Returns:
A target triple string describing the platform.
"""
platform_string = str(platform.platform_type)
if platform_string == "macos":
platform_string = "macosx"
environment = ""
if not platform.is_device:
environment = "-simulator"
return "{cpu}-apple-{platform}{version}{environment}".format(
cpu = cpu,
environment = environment,
platform = platform_string,
version = version,
)
def _xcode_env(xcode_config, platform):
"""Returns a dictionary containing Xcode-related environment variables.
Args:
xcode_config: The `XcodeVersionConfig` provider that contains
information about the current Xcode configuration.
platform: The `apple_platform` value describing the target platform
being built.
Returns:
A `dict` containing Xcode-related environment variables that should be
passed to Swift compile and link actions.
"""
return dicts.add(
apple_common.apple_host_system_env(xcode_config),
apple_common.target_apple_env(xcode_config, platform),
)
def _xcode_swift_toolchain_impl(ctx):
apple_fragment = ctx.fragments.apple
cpp_fragment = ctx.fragments.cpp
apple_toolchain = apple_common.apple_toolchain()
cc_toolchain = find_cpp_toolchain(ctx)
cpu = apple_fragment.single_arch_cpu
platform = apple_fragment.single_arch_platform
xcode_config = ctx.attr._xcode_config[apple_common.XcodeVersionConfig]
target_os_version = xcode_config.minimum_os_for_platform_type(
platform.platform_type,
)
target = _swift_apple_target_triple(cpu, platform, target_os_version)
swift_linkopts_providers = _swift_linkopts_providers(
apple_fragment,
apple_toolchain,
platform,
target,
ctx.label,
xcode_config,
)
# `--define=SWIFT_USE_TOOLCHAIN_ROOT=<path>` is a rapid development feature
# that lets you build *just* a custom `swift` driver (and `swiftc`
# symlink), rather than a full toolchain, and point compilation actions at
# those. Note that the files must still be in a "toolchain-like" directory
# structure, meaning that the path passed here must contain a `bin`
# directory and that directory contains the `swift` and `swiftc` files.
#
# TODO(allevato): Retire this feature in favor of the `swift_executable`
# attribute, which supports remote builds.
#
# To use a "standard" custom toolchain built using the full Swift build
# script, use `--define=SWIFT_CUSTOM_TOOLCHAIN=<id>` as shown below.
swift_executable = get_swift_executable_for_toolchain(ctx)
toolchain_root = ctx.var.get("SWIFT_USE_TOOLCHAIN_ROOT")
custom_toolchain = ctx.var.get("SWIFT_CUSTOM_TOOLCHAIN")
if toolchain_root and custom_toolchain:
fail("Do not use SWIFT_USE_TOOLCHAIN_ROOT and SWIFT_CUSTOM_TOOLCHAIN" +
"in the same build.")
# Compute the default requested features and conditional ones based on Xcode
# version.
requested_features = features_for_build_modes(
ctx,
cpp_fragment = cpp_fragment,
) + features_from_swiftcopts(swiftcopts = ctx.fragments.swift.copts())
requested_features.extend(ctx.features)
requested_features.extend(
_features_for_bitcode_mode(cpp_fragment.apple_bitcode_mode),
)
requested_features.extend([
SWIFT_FEATURE_BUNDLED_XCTESTS,
SWIFT_FEATURE_ENABLE_BATCH_MODE,
SWIFT_FEATURE_USE_RESPONSE_FILES,
SWIFT_FEATURE_DEBUG_PREFIX_MAP,
SWIFT_FEATURE_SUPPORTS_LIBRARY_EVOLUTION,
SWIFT_FEATURE_SUPPORTS_PRIVATE_DEPS,
# TODO(b/142867898): Added to match existing Bazel Objective-C module
# map behavior; remove it when possible.
SWIFT_FEATURE_MODULE_MAP_NO_PRIVATE_HEADERS,
])
# Xcode 11.0 implies Swift 5.1.
if _is_xcode_at_least_version(xcode_config, "11.0"):
requested_features.append(SWIFT_FEATURE_SUPPORTS_LIBRARY_EVOLUTION)
requested_features.append(SWIFT_FEATURE_SUPPORTS_PRIVATE_DEPS)
# Xcode 11.4 implies Swift 5.2.
if _is_xcode_at_least_version(xcode_config, "11.4"):
requested_features.append(SWIFT_FEATURE_ENABLE_SKIP_FUNCTION_BODIES)
# Xcode 12.5 implies Swift 5.4.
if _is_xcode_at_least_version(xcode_config, "12.5"):
requested_features.append(SWIFT_FEATURE_SUPPORTS_SYSTEM_MODULE_FLAG)
env = _xcode_env(platform = platform, xcode_config = xcode_config)
execution_requirements = xcode_config.execution_info()
generated_header_rewriter = resolve_optional_tool(
ctx,
target = ctx.attr.generated_header_rewriter,
)
all_tool_configs = _all_tool_configs(
custom_toolchain = custom_toolchain,
env = env,
execution_requirements = execution_requirements,
generated_header_rewriter = generated_header_rewriter,
swift_executable = swift_executable,
toolchain_root = toolchain_root,
xcode_config = xcode_config,
)
all_action_configs = _all_action_configs(
additional_objc_copts = _command_line_objc_copts(
ctx.var["COMPILATION_MODE"],
ctx.fragments.objc,
),
additional_swiftc_copts = ctx.fragments.swift.copts(),
apple_fragment = apple_fragment,
apple_toolchain = apple_toolchain,
generated_header_rewriter = generated_header_rewriter,
needs_resource_directory = swift_executable or toolchain_root,
target_triple = target,
xcode_config = xcode_config,
)
return [
SwiftToolchainInfo(
action_configs = all_action_configs,
cc_toolchain_info = cc_toolchain,
clang_implicit_deps_providers = collect_implicit_deps_providers(
ctx.attr.clang_implicit_deps,
),
feature_allowlists = [
target[SwiftFeatureAllowlistInfo]
for target in ctx.attr.feature_allowlists
],
generated_header_module_implicit_deps_providers = (
collect_implicit_deps_providers(
ctx.attr.generated_header_module_implicit_deps,
)
),
implicit_deps_providers = collect_implicit_deps_providers(
ctx.attr.implicit_deps,
additional_cc_infos = [swift_linkopts_providers.cc_info],
additional_objc_infos = [swift_linkopts_providers.objc_info],
),
linker_supports_filelist = True,
requested_features = requested_features,
swift_worker = ctx.executable._worker,
test_configuration = struct(
env = env,
execution_requirements = execution_requirements,
),
tool_configs = all_tool_configs,
unsupported_features = ctx.disabled_features + [
SWIFT_FEATURE_MODULE_MAP_HOME_IS_CWD,
],
),
]
xcode_swift_toolchain = rule(
attrs = dicts.add(
swift_toolchain_driver_attrs(),
{
"clang_implicit_deps": attr.label_list(
doc = """\
A list of labels to library targets that should be unconditionally added as
implicit dependencies of any explicit C/Objective-C module compiled by the Swift
toolchain.
Despite being C/Objective-C modules, the targets specified by this attribute
must propagate the `SwiftInfo` provider because the Swift build rules use that
provider to look up Clang module requirements. In particular, the targets must
propagate the provider in their rule implementation themselves and not rely on
the implicit traversal performed by `swift_clang_module_aspect`; the latter is
not possible as it would create a dependency cycle between the toolchain and the
implicit dependencies.
""",
providers = [[SwiftInfo]],
),
"feature_allowlists": attr.label_list(
doc = """\
A list of `swift_feature_allowlist` targets that allow or prohibit packages from
requesting or disabling features.
""",
providers = [[SwiftFeatureAllowlistInfo]],
),
"generated_header_module_implicit_deps": attr.label_list(
doc = """\
Targets whose `SwiftInfo` providers should be treated as compile-time inputs to
actions that precompile the explicit module for the generated Objective-C header
of a Swift module.
""",
providers = [[SwiftInfo]],
),
"generated_header_rewriter": attr.label(
allow_files = True,
cfg = "exec",
doc = """\
If present, an executable that will be invoked after compilation to rewrite the
generated header.
This tool is expected to have a command line interface such that the Swift
compiler invocation is passed to it following a `"--"` argument, and any
arguments preceding the `"--"` can be defined by the tool itself (however, at
this time the worker does not support passing additional flags to the tool).
""",
executable = True,
),
"implicit_deps": attr.label_list(
allow_files = True,
doc = """\
A list of labels to library targets that should be unconditionally added as
implicit dependencies of any Swift compilation or linking target.
""",
providers = [
[CcInfo],
[SwiftInfo],
],
),
"_cc_toolchain": attr.label(
default = Label("@bazel_tools//tools/cpp:current_cc_toolchain"),
doc = """\
The C++ toolchain from which linking flags and other tools needed by the Swift
toolchain (such as `clang`) will be retrieved.
""",
),
"_worker": attr.label(
cfg = "exec",
allow_files = True,
default = Label(
"@build_bazel_rules_swift//tools/worker",
),
doc = """\
An executable that wraps Swift compiler invocations and also provides support
for incremental compilation using a persistent mode.
""",
executable = True,
),
"_xcode_config": attr.label(
default = configuration_field(
name = "xcode_config_label",
fragment = "apple",
),
),
},
),
doc = "Represents a Swift compiler toolchain provided by Xcode.",
fragments = [
"apple",
"cpp",
"objc",
"swift",
],
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
incompatible_use_toolchain_transition = True,
implementation = _xcode_swift_toolchain_impl,
)
| 37.668187
| 81
| 0.649372
|
d654a950a6461927a0b7b28d0a1294305881d915
| 7,919
|
py
|
Python
|
antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py
|
AntaresSimulatorTeam/antaREST
|
d686d2a86a52737c211ae67f3cee591f559909f2
|
[
"Apache-2.0"
] | 2
|
2020-09-30T11:40:22.000Z
|
2020-11-09T09:06:30.000Z
|
antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py
|
AntaresSimulatorTeam/antaREST
|
d686d2a86a52737c211ae67f3cee591f559909f2
|
[
"Apache-2.0"
] | 542
|
2021-01-11T13:23:47.000Z
|
2022-03-31T15:38:10.000Z
|
antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py
|
AntaresSimulatorTeam/antaREST
|
d686d2a86a52737c211ae67f3cee591f559909f2
|
[
"Apache-2.0"
] | 1
|
2020-10-01T12:18:15.000Z
|
2020-10-01T12:18:15.000Z
|
import re
from abc import ABC, abstractmethod
from typing import Tuple
import pandas as pd # type: ignore
class IDateMatrixSerializer(ABC):
"""
Abstract class to handle date index reading and writing for many time frequency.
Used by OutputSeriesMatrix
"""
_MONTHS = {
"JAN": "01",
"FEB": "02",
"MAR": "03",
"APR": "04",
"MAY": "05",
"JUN": "06",
"JUL": "07",
"AUG": "08",
"SEP": "09",
"OCT": "10",
"NOV": "11",
"DEC": "12",
}
_R_MONTHS = {v: k for k, v in _MONTHS.items()}
UNM2 = "Unnamed: 2"
UNM3 = "Unnamed: 3"
UNM4 = "Unnamed: 4"
def __init__(self, area: str):
self.area = area
@abstractmethod
def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]:
"""
Extract date from raw columns inside matrix file
Args:
df: raw matrix from file content
Returns: (date index, other matrix part)
"""
raise NotImplementedError()
@abstractmethod
def build_date(self, index: pd.Index) -> pd.DataFrame:
"""
Format in antares style date index
Args:
index: date index
Returns: raw matrix date waith antares style ready to be save on disk
"""
raise NotImplementedError()
class HourlyMatrixSerializer(IDateMatrixSerializer):
"""
Class implementation for hourly index
"""
def build_date(self, index: pd.Index) -> pd.DataFrame:
def _map(row: str) -> Tuple[str, int, str, str, str]:
m, d, h = re.split("[\s/]", row)
return "", 1, d.strip("0"), IDateMatrixSerializer._R_MONTHS[m], h
items = index.map(_map).tolist()
matrix = pd.DataFrame(items)
matrix[1] = matrix[1].cumsum()
headers = pd.DataFrame(
[
[self.area.upper(), "hourly", "", "", ""],
["", "", "", "", ""],
["", "index", "day", "month", "hourly"],
]
)
return pd.concat([headers, matrix], axis=0)
def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]:
# Extract left part with date
date = df.loc[
2:,
[
IDateMatrixSerializer.UNM2,
IDateMatrixSerializer.UNM3,
IDateMatrixSerializer.UNM4,
],
]
date.columns = ["day", "month", "hour"]
date["month"] = date["month"].map(IDateMatrixSerializer._MONTHS)
date = (
date["month"].astype(str)
+ "/"
+ date["day"].astype(str).str.zfill(2)
+ " "
+ date["hour"]
)
# Extract right part with data
node = df.columns[0]
body = df.drop(
[
node,
"hourly",
IDateMatrixSerializer.UNM2,
IDateMatrixSerializer.UNM3,
IDateMatrixSerializer.UNM4,
],
axis=1,
)
return pd.Index(date), body
class DailyMatrixSerializer(IDateMatrixSerializer):
"""
Class implementation for daily index
"""
def build_date(self, index: pd.Index) -> pd.DataFrame:
def _map(row: str) -> Tuple[str, int, str, str]:
m, d = row.split("/")
return "", 1, d.strip("0"), IDateMatrixSerializer._R_MONTHS[m]
items = index.map(_map).tolist()
matrix = pd.DataFrame(items)
matrix[1] = matrix[1].cumsum()
headers = pd.DataFrame(
[
[self.area.upper(), "daily", "", ""],
["", "", "", ""],
["", "index", "day", "month"],
]
)
return pd.concat([headers, matrix], axis=0)
def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]:
# Extract left part with date
date = df.loc[
2:, [IDateMatrixSerializer.UNM2, IDateMatrixSerializer.UNM3]
]
date.columns = ["day", "month"]
date["month"] = date["month"].map(IDateMatrixSerializer._MONTHS)
date = (
date["month"].astype(str)
+ "/"
+ date["day"].astype(str).str.zfill(2)
)
# Extract right part with data
node = df.columns[0]
body = df.drop(
[
node,
"daily",
IDateMatrixSerializer.UNM2,
IDateMatrixSerializer.UNM3,
],
axis=1,
)
return pd.Index(date), body
class WeeklyMatrixSerializer(IDateMatrixSerializer):
"""
Class implementation for weekly index
"""
def build_date(self, index: pd.Index) -> pd.DataFrame:
matrix = pd.DataFrame({0: [""] * index.size, 1: index.values})
headers = pd.DataFrame(
[
[self.area.upper(), "weekly"],
["", ""],
["", "week"],
]
)
return pd.concat([headers, matrix], axis=0)
def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]:
# Extract left part with date
date = df.loc[2:, ["weekly"]]
# Extract right part with data
node = df.columns[0]
body = df.drop([node, "weekly"], axis=1)
return pd.Index(date), body
class MonthlyMatrixSerializer(IDateMatrixSerializer):
"""
Class implementation for monthly index
"""
def build_date(self, index: pd.Index) -> pd.DataFrame:
matrix = pd.DataFrame(
{
0: [""] * index.size,
1: range(1, index.size + 1),
2: index.map(IDateMatrixSerializer._R_MONTHS),
}
)
headers = pd.DataFrame(
[
[self.area.upper(), "monthly", ""],
["", "", ""],
["", "index", "month"],
]
)
return pd.concat([headers, matrix], axis=0)
def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]:
# Extract left part with date
date = df.loc[2:, [IDateMatrixSerializer.UNM2]]
date.columns = ["month"]
date["month"] = date["month"].map(IDateMatrixSerializer._MONTHS)
# Extract right part with data
node = df.columns[0]
body = df.drop([node, "monthly", IDateMatrixSerializer.UNM2], axis=1)
return pd.Index(date["month"]), body
class AnnualMatrixSerializer(IDateMatrixSerializer):
"""
Class implementation for annual index
"""
def build_date(self, index: pd.Index) -> pd.DataFrame:
return pd.DataFrame(
[
[self.area.upper(), "annual"],
["", ""],
["", ""],
["", "Annual"],
]
)
def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]:
# Extract left part with date
date = df.loc[2:, ["annual"]]
# Extract right part with data
node = df.columns[0]
body = df.drop([node, "annual"], axis=1)
return pd.Index(date["annual"]), body
class FactoryDateSerializer:
"""
Factory to choice correct DateMatrixSerializer according antares time frequency
"""
@staticmethod
def create(freq: str, area: str) -> IDateMatrixSerializer:
if freq == "hourly":
return HourlyMatrixSerializer(area)
if freq == "daily":
return DailyMatrixSerializer(area)
if freq == "weekly":
return WeeklyMatrixSerializer(area)
if freq == "monthly":
return MonthlyMatrixSerializer(area)
if freq == "annual":
return AnnualMatrixSerializer(area)
raise NotImplementedError(
f"Any date serializer compatible with freq={freq}"
)
| 27.688811
| 84
| 0.514459
|
cd02c6b44d90afde98b5a039a4d575e330bb44dd
| 1,085
|
py
|
Python
|
src/python/pants/backend/codegen/targets/python_thrift_library.py
|
qma/pants
|
604f58a366b66bc5cfa83e7250cb8af8130832cf
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/backend/codegen/targets/python_thrift_library.py
|
qma/pants
|
604f58a366b66bc5cfa83e7250cb8af8130832cf
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/backend/codegen/targets/python_thrift_library.py
|
qma/pants
|
604f58a366b66bc5cfa83e7250cb8af8130832cf
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.python.targets.python_target import PythonTarget
class PythonThriftLibrary(PythonTarget):
"""Generates a stub Python library from thrift IDL files."""
def __init__(self, **kwargs):
"""
:param sources: thrift source files (If more than one tries to use the same
namespace, beware https://issues.apache.org/jira/browse/THRIFT-515)
:type sources: ``Fileset`` or list of strings. Paths are relative to the
BUILD file's directory.
:param resources: non-Python resources, e.g. templates, keys, other data (it is
recommended that your application uses the pkgutil package to access these
resources in a .zip-module friendly way.)
"""
super(PythonThriftLibrary, self).__init__(**kwargs)
self.add_labels('codegen')
| 40.185185
| 93
| 0.727189
|
e1763039b3a962a43f2fe3a22c05cb32cba596ed
| 3,339
|
py
|
Python
|
python/paddle/fluid/contrib/slim/tests/test_filter_pruning.py
|
ysh329/Paddle
|
50ad9046c9a440564d104eaa354eb9df83a35678
|
[
"Apache-2.0"
] | 3
|
2019-07-17T09:30:31.000Z
|
2021-12-27T03:16:55.000Z
|
python/paddle/fluid/contrib/slim/tests/test_filter_pruning.py
|
ysh329/Paddle
|
50ad9046c9a440564d104eaa354eb9df83a35678
|
[
"Apache-2.0"
] | 1
|
2019-05-26T14:23:24.000Z
|
2019-05-26T14:23:51.000Z
|
python/paddle/fluid/contrib/slim/tests/test_filter_pruning.py
|
ysh329/Paddle
|
50ad9046c9a440564d104eaa354eb9df83a35678
|
[
"Apache-2.0"
] | 1
|
2020-02-21T07:40:27.000Z
|
2020-02-21T07:40:27.000Z
|
# copyright (c) 2019 paddlepaddle authors. all rights reserved.
#
# licensed under the apache license, version 2.0 (the "license");
# you may not use this file except in compliance with the license.
# you may obtain a copy of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis,
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.
import paddle
import unittest
import paddle.fluid as fluid
from mobilenet import MobileNet
from paddle.fluid.contrib.slim.core import Compressor
from paddle.fluid.contrib.slim.graph import GraphWrapper
class TestFilterPruning(unittest.TestCase):
def test_compression(self):
"""
Model: mobilenet_v1
data: mnist
step1: Training one epoch
step2: pruning flops
step3: fine-tune one epoch
step4: check top1_acc.
"""
if not fluid.core.is_compiled_with_cuda():
return
class_dim = 10
image_shape = [1, 28, 28]
image = fluid.layers.data(
name='image', shape=image_shape, dtype='float32')
image.stop_gradient = False
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
out = MobileNet().net(input=image, class_dim=class_dim)
acc_top1 = fluid.layers.accuracy(input=out, label=label, k=1)
acc_top5 = fluid.layers.accuracy(input=out, label=label, k=5)
val_program = fluid.default_main_program().clone(for_test=False)
cost = fluid.layers.cross_entropy(input=out, label=label)
avg_cost = fluid.layers.mean(x=cost)
optimizer = fluid.optimizer.Momentum(
momentum=0.9,
learning_rate=0.01,
regularization=fluid.regularizer.L2Decay(4e-5))
place = fluid.CUDAPlace(0)
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
val_reader = paddle.batch(paddle.dataset.mnist.test(), batch_size=128)
val_feed_list = [('img', image.name), ('label', label.name)]
val_fetch_list = [('acc_top1', acc_top1.name), ('acc_top5',
acc_top5.name)]
train_reader = paddle.batch(
paddle.dataset.mnist.train(), batch_size=128)
train_feed_list = [('img', image.name), ('label', label.name)]
train_fetch_list = [('loss', avg_cost.name)]
com_pass = Compressor(
place,
fluid.global_scope(),
fluid.default_main_program(),
train_reader=train_reader,
train_feed_list=train_feed_list,
train_fetch_list=train_fetch_list,
eval_program=val_program,
eval_reader=val_reader,
eval_feed_list=val_feed_list,
eval_fetch_list=val_fetch_list,
train_optimizer=optimizer)
com_pass.config('./filter_pruning/compress.yaml')
eval_graph = com_pass.run()
self.assertTrue(
abs((com_pass.context.eval_results['acc_top1'][-1] - 0.969) / 0.969)
< 0.02)
if __name__ == '__main__':
unittest.main()
| 37.1
| 80
| 0.642108
|
649340015599110dc517305fe716af9244ba06fe
| 28,982
|
py
|
Python
|
torchmetrics/metric.py
|
BeyondTheProof/metrics
|
8af688daff819a95f4cb3d757ffc919c86072ee9
|
[
"Apache-2.0"
] | null | null | null |
torchmetrics/metric.py
|
BeyondTheProof/metrics
|
8af688daff819a95f4cb3d757ffc919c86072ee9
|
[
"Apache-2.0"
] | null | null | null |
torchmetrics/metric.py
|
BeyondTheProof/metrics
|
8af688daff819a95f4cb3d757ffc919c86072ee9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import inspect
import operator as op
from abc import ABC, abstractmethod
from collections.abc import Sequence
from contextlib import contextmanager
from copy import deepcopy
from typing import Any, Callable, Dict, Generator, List, Optional, Union
import torch
from torch import Tensor, nn
from torch.nn import Module
from torchmetrics.utilities import apply_to_collection, rank_zero_warn
from torchmetrics.utilities.data import _flatten, dim_zero_cat, dim_zero_mean, dim_zero_sum
from torchmetrics.utilities.distributed import gather_all_tensors
from torchmetrics.utilities.exceptions import TorchMetricsUserError
from torchmetrics.utilities.imports import _LIGHTNING_AVAILABLE, _compare_version
def jit_distributed_available() -> bool:
return torch.distributed.is_available() and torch.distributed.is_initialized()
class Metric(nn.Module, ABC):
"""
Base class for all metrics present in the Metrics API.
Implements ``add_state()``, ``forward()``, ``reset()`` and a few other things to
handle distributed synchronization and per-step metric computation.
Override ``update()`` and ``compute()`` functions to implement your own metric. Use
``add_state()`` to register metric state variables which keep track of state on each
call of ``update()`` and are synchronized across processes when ``compute()`` is called.
Note:
Metric state variables can either be ``torch.Tensors`` or an empty list which can we used
to store `torch.Tensors``.
Note:
Different metrics only override ``update()`` and not ``forward()``. A call to ``update()``
is valid, but it won't return the metric value at the current step. A call to ``forward()``
automatically calls ``update()`` and also returns the metric value at the current step.
Args:
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False. default: True
dist_sync_on_step:
Synchronize metric state across processes at each ``forward()``
before returning the value at the step.
process_group:
Specify the process group on which synchronization is called. default: None (which selects the entire world)
dist_sync_fn:
Callback that performs the allgather operation on the metric state. When `None`, DDP
will be used to perform the allgather.
"""
__jit_ignored_attributes__ = ['is_differentiable', 'device', 'dtype']
def __init__(
self,
compute_on_step: bool = True,
dist_sync_on_step: bool = False,
process_group: Optional[Any] = None,
dist_sync_fn: Callable = None,
) -> None:
super().__init__()
# see (https://github.com/pytorch/pytorch/blob/3e6bb5233f9ca2c5aa55d9cda22a7ee85439aa6e/
# torch/nn/modules/module.py#L227)
torch._C._log_api_usage_once(f"torchmetrics.metric.{self.__class__.__name__}")
self._LIGHTNING_GREATER_EQUAL_1_3 = _compare_version("pytorch_lightning", op.ge, "1.3.0")
self.dist_sync_on_step = dist_sync_on_step
self.compute_on_step = compute_on_step
self.process_group = process_group
self.dist_sync_fn = dist_sync_fn
self._to_sync = True
self._should_unsync = True
self._update_signature = inspect.signature(self.update)
self.update: Callable = self._wrap_update(self.update) # type: ignore
self.compute: Callable = self._wrap_compute(self.compute) # type: ignore
self._computed = None
self._forward_cache = None
self._update_called = False
# initialize state
self._defaults: Dict[str, Union[List, Tensor]] = {}
self._persistent: Dict[str, bool] = {}
self._reductions: Dict[str, Union[str, Callable[[Union[List[Tensor], Tensor]], Tensor], None]] = {}
# state management
self._is_synced = False
self._cache: Optional[Dict[str, Union[List[Tensor], Tensor]]] = None
def add_state(
self,
name: str,
default: Union[list, Tensor],
dist_reduce_fx: Optional[Union[str, Callable]] = None,
persistent: bool = False
) -> None:
"""
Adds metric state variable. Only used by subclasses.
Args:
name: The name of the state variable. The variable will then be accessible at ``self.name``.
default: Default value of the state; can either be a ``torch.Tensor`` or an empty list. The state will be
reset to this value when ``self.reset()`` is called.
dist_reduce_fx (Optional): Function to reduce state across multiple processes in distributed mode.
If value is ``"sum"``, ``"mean"``, or ``"cat"``, we will use ``torch.sum``, ``torch.mean``,
and ``torch.cat`` respectively, each with argument ``dim=0``. Note that the ``"cat"`` reduction
only makes sense if the state is a list, and not a tensor. The user can also pass a custom
function in this parameter.
persistent (Optional): whether the state will be saved as part of the modules ``state_dict``.
Default is ``False``.
Note:
Setting ``dist_reduce_fx`` to None will return the metric state synchronized across different processes.
However, there won't be any reduction function applied to the synchronized metric state.
The metric states would be synced as follows
- If the metric state is ``torch.Tensor``, the synced value will be a stacked ``torch.Tensor`` across
the process dimension if the metric state was a ``torch.Tensor``. The original ``torch.Tensor`` metric
state retains dimension and hence the synchronized output will be of shape ``(num_process, ...)``.
- If the metric state is a ``list``, the synced value will be a ``list`` containing the
combined elements from all processes.
Note:
When passing a custom function to ``dist_reduce_fx``, expect the synchronized metric state to follow
the format discussed in the above note.
Raises:
ValueError:
If ``default`` is not a ``tensor`` or an ``empty list``.
ValueError:
If ``dist_reduce_fx`` is not callable or one of ``"mean"``, ``"sum"``, ``"cat"``, ``None``.
"""
if not isinstance(default, (Tensor, list)) or (isinstance(default, list) and default):
raise ValueError("state variable must be a tensor or any empty list (where you can append tensors)")
if dist_reduce_fx == "sum":
dist_reduce_fx = dim_zero_sum
elif dist_reduce_fx == "mean":
dist_reduce_fx = dim_zero_mean
elif dist_reduce_fx == "cat":
dist_reduce_fx = dim_zero_cat
elif dist_reduce_fx is not None and not callable(dist_reduce_fx):
raise ValueError("`dist_reduce_fx` must be callable or one of ['mean', 'sum', 'cat', None]")
if isinstance(default, Tensor):
default = default.contiguous()
setattr(self, name, default)
self._defaults[name] = deepcopy(default)
self._persistent[name] = persistent
self._reductions[name] = dist_reduce_fx
@torch.jit.unused
def forward(self, *args: Any, **kwargs: Any) -> Any:
"""
Automatically calls ``update()``. Returns the metric value over inputs if ``compute_on_step`` is True.
"""
# add current step
if self._is_synced:
raise TorchMetricsUserError(
"The Metric shouldn't be synced when performing ``update``. "
"HINT: Did you forget to call ``unsync`` ?."
)
with torch.no_grad():
self.update(*args, **kwargs)
if self.compute_on_step:
self._to_sync = self.dist_sync_on_step
# skip restore cache operation from compute as cache is stored below.
self._should_unsync = False
# save context before switch
cache = {attr: getattr(self, attr) for attr in self._defaults}
# call reset, update, compute, on single batch
self.reset()
self.update(*args, **kwargs)
self._forward_cache = self.compute()
# restore context
for attr, val in cache.items():
setattr(self, attr, val)
self._is_synced = False
self._should_unsync = True
self._to_sync = True
self._computed = None
return self._forward_cache
def _sync_dist(self, dist_sync_fn: Callable = gather_all_tensors, process_group: Optional[Any] = None) -> None:
input_dict = {attr: getattr(self, attr) for attr in self._reductions}
for attr, reduction_fn in self._reductions.items():
# pre-concatenate metric states that are lists to reduce number of all_gather operations
if reduction_fn == dim_zero_cat and isinstance(input_dict[attr], list) and len(input_dict[attr]) > 1:
input_dict[attr] = [dim_zero_cat(input_dict[attr])]
output_dict = apply_to_collection(
input_dict,
Tensor,
dist_sync_fn,
group=process_group or self.process_group,
)
for attr, reduction_fn in self._reductions.items():
# pre-processing ops (stack or flatten for inputs)
if isinstance(output_dict[attr][0], Tensor):
output_dict[attr] = torch.stack(output_dict[attr])
elif isinstance(output_dict[attr][0], list):
output_dict[attr] = _flatten(output_dict[attr])
if not (callable(reduction_fn) or reduction_fn is None):
raise TypeError('reduction_fn must be callable or None')
reduced = reduction_fn(output_dict[attr]) if reduction_fn is not None else output_dict[attr]
setattr(self, attr, reduced)
def _wrap_update(self, update: Callable) -> Callable:
@functools.wraps(update)
def wrapped_func(*args: Any, **kwargs: Any) -> Optional[Any]:
self._computed = None
self._update_called = True
return update(*args, **kwargs)
return wrapped_func
def sync(
self,
dist_sync_fn: Optional[Callable] = None,
process_group: Optional[Any] = None,
should_sync: bool = True,
distributed_available: Optional[Callable] = jit_distributed_available,
) -> None:
"""
Sync function for manually controlling when metrics states should be synced across processes
Args:
dist_sync_fn: Function to be used to perform states synchronization
process_group:
Specify the process group on which synchronization is called.
default: None (which selects the entire world)
should_sync: Whether to apply to state synchronization. This will have an impact
only when running in a distributed setting.
distributed_available: Function to determine if we are running inside a distributed setting
"""
if self._is_synced and should_sync:
raise TorchMetricsUserError("The Metric has already been synced.")
is_distributed = distributed_available() if callable(distributed_available) else None
if not should_sync or not is_distributed:
return
if dist_sync_fn is None:
dist_sync_fn = gather_all_tensors
# cache prior to syncing
self._cache = {attr: getattr(self, attr) for attr in self._defaults}
# sync
self._sync_dist(dist_sync_fn, process_group=process_group)
self._is_synced = True
def unsync(self, should_unsync: bool = True) -> None:
"""
Unsync function for manually controlling when metrics states should be reverted back to their local states.
Args:
should_unsync: Whether to perform unsync
"""
if not should_unsync:
return
if not self._is_synced:
raise TorchMetricsUserError("The Metric has already been un-synced.")
if self._cache is None:
raise TorchMetricsUserError("The internal cache should exist to unsync the Metric.")
# if we synced, restore to cache so that we can continue to accumulate un-synced state
for attr, val in self._cache.items():
setattr(self, attr, val)
self._is_synced = False
self._cache = None
@contextmanager
def sync_context(
self,
dist_sync_fn: Optional[Callable] = None,
process_group: Optional[Any] = None,
should_sync: bool = True,
should_unsync: bool = True,
distributed_available: Optional[Callable] = jit_distributed_available,
) -> Generator:
"""
Context manager to synchronize the states between processes when running in a distributed setting
and restore the local cache states after yielding.
Args:
dist_sync_fn: Function to be used to perform states synchronization
process_group:
Specify the process group on which synchronization is called.
default: None (which selects the entire world)
should_sync: Whether to apply to state synchronization. This will have an impact
only when running in a distributed setting.
should_unsync: Whether to restore the cache state so that the metrics can
continue to be accumulated.
distributed_available: Function to determine if we are running inside a distributed setting
"""
self.sync(
dist_sync_fn=dist_sync_fn,
process_group=process_group,
should_sync=should_sync,
distributed_available=distributed_available
)
yield
self.unsync(should_unsync=self._is_synced and should_unsync)
def _wrap_compute(self, compute: Callable) -> Callable:
@functools.wraps(compute)
def wrapped_func(*args: Any, **kwargs: Any) -> Any:
if not self._update_called:
rank_zero_warn(
f"The ``compute`` method of metric {self.__class__.__name__}"
" was called before the ``update`` method which may lead to errors,"
" as metric states have not yet been updated.", UserWarning
)
# return cached value
if self._computed is not None:
return self._computed
# compute relies on the sync context manager to gather the states across processes and apply reduction
# if synchronization happened, the current rank accumulated states will be restored to keep
# accumulation going if ``should_unsync=True``,
with self.sync_context(
dist_sync_fn=self.dist_sync_fn, should_sync=self._to_sync, should_unsync=self._should_unsync
):
self._computed = compute(*args, **kwargs)
return self._computed
return wrapped_func
@abstractmethod
def update(self, *_: Any, **__: Any) -> None:
"""
Override this method to update the state variables of your metric class.
"""
@abstractmethod
def compute(self) -> Any:
"""
Override this method to compute the final metric value from state variables
synchronized across the distributed backend.
"""
def reset(self) -> None:
"""
This method automatically resets the metric state variables to their default value.
"""
self._update_called = False
self._forward_cache = None
# lower lightning versions requires this implicitly to log metric objects correctly in self.log
if not _LIGHTNING_AVAILABLE or self._LIGHTNING_GREATER_EQUAL_1_3:
self._computed = None
for attr, default in self._defaults.items():
current_val = getattr(self, attr)
if isinstance(default, Tensor):
setattr(self, attr, default.detach().clone().to(current_val.device))
else:
setattr(self, attr, [])
# reset internal states
self._cache = None
self._is_synced = False
def clone(self) -> "Metric":
""" Make a copy of the metric """
return deepcopy(self)
def __getstate__(self) -> Dict[str, Any]:
# ignore update and compute functions for pickling
return {k: v for k, v in self.__dict__.items() if k not in ["update", "compute", "_update_signature"]}
def __setstate__(self, state: Dict[str, Any]) -> None:
# manually restore update and compute functions for pickling
self.__dict__.update(state)
self._update_signature = inspect.signature(self.update)
self.update: Callable = self._wrap_update(self.update) # type: ignore
self.compute: Callable = self._wrap_compute(self.compute) # type: ignore
def _apply(self, fn: Callable) -> Module:
"""Overwrite _apply function such that we can also move metric states
to the correct device when `.to`, `.cuda`, etc methods are called
"""
this = super()._apply(fn)
# Also apply fn to metric states and defaults
for key, value in this._defaults.items():
if isinstance(value, Tensor):
this._defaults[key] = fn(value)
elif isinstance(value, Sequence):
this._defaults[key] = [fn(v) for v in value]
current_val = getattr(this, key)
if isinstance(current_val, Tensor):
setattr(this, key, fn(current_val))
elif isinstance(current_val, Sequence):
setattr(this, key, [fn(cur_v) for cur_v in current_val])
else:
raise TypeError(
"Expected metric state to be either a Tensor"
f"or a list of Tensor, but encountered {current_val}"
)
return this
def persistent(self, mode: bool = False) -> None:
"""Method for post-init to change if metric states should be saved to
its state_dict
"""
for key in self._persistent:
self._persistent[key] = mode
def state_dict(
self,
destination: Dict[str, Any] = None,
prefix: str = "",
keep_vars: bool = False,
) -> Optional[Dict[str, Any]]:
destination = super().state_dict(destination=destination, prefix=prefix, keep_vars=keep_vars)
# Register metric states to be part of the state_dict
for key in self._defaults:
if not self._persistent[key]:
continue
current_val = getattr(self, key)
if not keep_vars:
if isinstance(current_val, Tensor):
current_val = current_val.detach()
elif isinstance(current_val, list):
current_val = [cur_v.detach() if isinstance(cur_v, Tensor) else cur_v for cur_v in current_val]
destination[prefix + key] = deepcopy(current_val) # type: ignore
return destination
def _load_from_state_dict(
self,
state_dict: dict,
prefix: str,
local_metadata: dict,
strict: bool,
missing_keys: List[str],
unexpected_keys: List[str],
error_msgs: List[str],
) -> None:
""" Loads metric states from state_dict """
for key in self._defaults:
name = prefix + key
if name in state_dict:
setattr(self, key, state_dict.pop(name))
super()._load_from_state_dict(
state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs
)
def _filter_kwargs(self, **kwargs: Any) -> Dict[str, Any]:
""" filter kwargs such that they match the update signature of the metric """
# filter all parameters based on update signature except those of
# type VAR_POSITIONAL (*args) and VAR_KEYWORD (**kwargs)
_params = (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD)
_sign_params = self._update_signature.parameters
filtered_kwargs = {
k: v
for k, v in kwargs.items() if (k in _sign_params.keys() and _sign_params[k].kind not in _params)
}
# if no kwargs filtered, return al kwargs as default
if not filtered_kwargs:
filtered_kwargs = kwargs
return filtered_kwargs
def __hash__(self) -> int:
hash_vals = [self.__class__.__name__]
for key in self._defaults:
val = getattr(self, key)
# Special case: allow list values, so long
# as their elements are hashable
if hasattr(val, "__iter__") and not isinstance(val, Tensor):
hash_vals.extend(val)
else:
hash_vals.append(val)
return hash(tuple(hash_vals))
def __add__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.add, self, other)
def __and__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.bitwise_and, self, other)
# Fixme: this shall return bool instead of Metric
def __eq__(self, other: "Metric") -> "Metric": # type: ignore
return CompositionalMetric(torch.eq, self, other)
def __floordiv__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.floor_divide, self, other)
def __ge__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.ge, self, other)
def __gt__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.gt, self, other)
def __le__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.le, self, other)
def __lt__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.lt, self, other)
def __matmul__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.matmul, self, other)
def __mod__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.fmod, self, other)
def __mul__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.mul, self, other)
# Fixme: this shall return bool instead of Metric
def __ne__(self, other: "Metric") -> "Metric": # type: ignore
return CompositionalMetric(torch.ne, self, other)
def __or__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.bitwise_or, self, other)
def __pow__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.pow, self, other)
def __radd__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.add, other, self)
def __rand__(self, other: "Metric") -> "Metric":
# swap them since bitwise_and only supports that way and it's commutative
return CompositionalMetric(torch.bitwise_and, self, other)
def __rfloordiv__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.floor_divide, other, self)
def __rmatmul__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.matmul, other, self)
def __rmod__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.fmod, other, self)
def __rmul__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.mul, other, self)
def __ror__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.bitwise_or, other, self)
def __rpow__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.pow, other, self)
def __rsub__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.sub, other, self)
def __rtruediv__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.true_divide, other, self)
def __rxor__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.bitwise_xor, other, self)
def __sub__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.sub, self, other)
def __truediv__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.true_divide, self, other)
def __xor__(self, other: "Metric") -> "Metric":
return CompositionalMetric(torch.bitwise_xor, self, other)
def __abs__(self) -> "Metric":
return CompositionalMetric(torch.abs, self, None)
def __inv__(self) -> "Metric":
return CompositionalMetric(torch.bitwise_not, self, None)
def __invert__(self) -> "Metric":
return self.__inv__()
def __neg__(self) -> "Metric":
return CompositionalMetric(_neg, self, None)
def __pos__(self) -> "Metric":
return CompositionalMetric(torch.abs, self, None)
def __getitem__(self, idx: int) -> "Metric":
return CompositionalMetric(lambda x: x[idx], self, None)
@property
def is_differentiable(self) -> Optional[bool]:
# There is a bug in PyTorch that leads to properties being executed during scripting
# To make the metric scriptable, we add property to ignore list and switch to return None here
return None
def _neg(x: Tensor) -> Tensor:
return -torch.abs(x)
class CompositionalMetric(Metric):
"""Composition of two metrics with a specific operator which will be executed upon metrics compute """
def __init__(
self,
operator: Callable,
metric_a: Union[Metric, int, float, Tensor],
metric_b: Union[Metric, int, float, Tensor, None],
) -> None:
"""
Args:
operator: the operator taking in one (if metric_b is None)
or two arguments. Will be applied to outputs of metric_a.compute()
and (optionally if metric_b is not None) metric_b.compute()
metric_a: first metric whose compute() result is the first argument of operator
metric_b: second metric whose compute() result is the second argument of operator.
For operators taking in only one input, this should be None
"""
super().__init__()
self.op = operator
if isinstance(metric_a, Tensor):
self.register_buffer("metric_a", metric_a)
else:
self.metric_a = metric_a
if isinstance(metric_b, Tensor):
self.register_buffer("metric_b", metric_b)
else:
self.metric_b = metric_b
def _sync_dist(self, dist_sync_fn: Optional[Callable] = None, *_: Any) -> None:
# No syncing required here. syncing will be done in metric_a and metric_b
pass
def update(self, *args: Any, **kwargs: Any) -> None:
if isinstance(self.metric_a, Metric):
self.metric_a.update(*args, **self.metric_a._filter_kwargs(**kwargs))
if isinstance(self.metric_b, Metric):
self.metric_b.update(*args, **self.metric_b._filter_kwargs(**kwargs))
def compute(self) -> Any:
# also some parsing for kwargs?
if isinstance(self.metric_a, Metric):
val_a = self.metric_a.compute()
else:
val_a = self.metric_a
if isinstance(self.metric_b, Metric):
val_b = self.metric_b.compute()
else:
val_b = self.metric_b
if val_b is None:
return self.op(val_a)
return self.op(val_a, val_b)
def reset(self) -> None:
if isinstance(self.metric_a, Metric):
self.metric_a.reset()
if isinstance(self.metric_b, Metric):
self.metric_b.reset()
def persistent(self, mode: bool = False) -> None:
if isinstance(self.metric_a, Metric):
self.metric_a.persistent(mode=mode)
if isinstance(self.metric_b, Metric):
self.metric_b.persistent(mode=mode)
def __repr__(self) -> str:
_op_metrics = f"(\n {self.op.__name__}(\n {repr(self.metric_a)},\n {repr(self.metric_b)}\n )\n)"
repr_str = self.__class__.__name__ + _op_metrics
return repr_str
| 40.141274
| 120
| 0.634049
|
44e0b9c37210dc3065329b2885e6634223b96c19
| 4,905
|
py
|
Python
|
tests/integration/aws_interface/test_aws_interface_get_resources.py
|
Sam-Martin/cloud-wanderer
|
1879f9bb150054be5bf33fd46a47414b4939529e
|
[
"MIT"
] | 1
|
2020-12-07T10:37:41.000Z
|
2020-12-07T10:37:41.000Z
|
tests/integration/aws_interface/test_aws_interface_get_resources.py
|
Sam-Martin/cloud-wanderer
|
1879f9bb150054be5bf33fd46a47414b4939529e
|
[
"MIT"
] | null | null | null |
tests/integration/aws_interface/test_aws_interface_get_resources.py
|
Sam-Martin/cloud-wanderer
|
1879f9bb150054be5bf33fd46a47414b4939529e
|
[
"MIT"
] | null | null | null |
from unittest.mock import ANY
import pytest
from moto import mock_ec2, mock_iam, mock_sts
from itertools import islice
from cloudwanderer import URN
from cloudwanderer.aws_interface.models import AWSResourceTypeFilter
from cloudwanderer.exceptions import UnsupportedResourceTypeError, UnsupportedServiceError
from ...pytest_helpers import compare_dict_allow_any, create_iam_policy, create_iam_role
@mock_ec2
@mock_sts
def test_get_resources_of_type_in_region_eu_west_2(aws_interface):
result = list(
aws_interface.get_resources(
service_name="ec2",
resource_type="vpc",
region="eu-west-2",
)
)[0]
compare_dict_allow_any(
dict(result),
{
"cidr_block": "172.31.0.0/16",
"cidr_block_association_set": ANY,
"cloudwanderer_metadata": {
"CidrBlock": "172.31.0.0/16",
"CidrBlockAssociationSet": [
{
"AssociationId": ANY,
"CidrBlock": "172.31.0.0/16",
"CidrBlockState": {"State": "associated"},
}
],
"DhcpOptionsId": ANY,
"EnableDnsSupport": True,
"InstanceTenancy": "default",
"Ipv6CidrBlockAssociationSet": [],
"IsDefault": True,
"OwnerId": ANY,
"State": "available",
"Tags": [],
"VpcId": ANY,
},
"dependent_resource_urns": [],
"dhcp_options_id": ANY,
"discovery_time": ANY,
"enable_dns_support": True,
"instance_tenancy": "default",
"ipv6_cidr_block_association_set": [],
"is_default": True,
"owner_id": ANY,
"parent_urn": None,
"relationships": ANY,
"state": "available",
"tags": [],
"urn": ANY,
"vpc_id": ANY,
},
)
@mock_iam
@mock_sts
def test_get_resources_of_type_in_region_us_east_1(aws_interface):
create_iam_role()
result = list(aws_interface.get_resources(service_name="iam", resource_type="role", region="us-east-1"))[0]
compare_dict_allow_any(
dict(result),
{
"urn": URN(
cloud_name="aws",
account_id="123456789012",
region="us-east-1",
service="iam",
resource_type="role_policy",
resource_id_parts=["test-role", "test-role-policy"],
),
"relationships": [],
"dependent_resource_urns": [],
"parent_urn": URN(
cloud_name="aws",
account_id="123456789012",
region="us-east-1",
service="iam",
resource_type="role",
resource_id_parts=["test-role"],
),
"cloudwanderer_metadata": {
"RoleName": "test-role",
"PolicyName": "test-role-policy",
"PolicyDocument": {
"Version": "2012-10-17",
"Statement": {
"Effect": "Allow",
"Action": "s3:ListBucket",
"Resource": "arn:aws:s3:::example_bucket",
},
},
},
"discovery_time": ANY,
"role_name": "test-role",
"policy_name": "test-role-policy",
"policy_document": {
"Version": "2012-10-17",
"Statement": {"Effect": "Allow", "Action": "s3:ListBucket", "Resource": "arn:aws:s3:::example_bucket"},
},
},
)
def test_get_resources_unsupported_service(aws_interface):
with pytest.raises(UnsupportedServiceError):
list(aws_interface.get_resources(service_name="unicorn_stable", resource_type="instance", region="eu-west-1"))
def test_get_resources_unsupported_resource_type(aws_interface):
with pytest.raises(
UnsupportedResourceTypeError,
match="Could not find Boto3 collection for unicorn",
):
list(aws_interface.get_resources(service_name="ec2", resource_type="unicorn", region="eu-west-1"))
@mock_iam
@mock_sts
def test_jmespath_filters(aws_interface):
create_iam_policy()
result = aws_interface.get_resources(
service_name="iam",
resource_type="policy",
region="us-east-1",
service_resource_type_filters=[
AWSResourceTypeFilter(
service="iam", resource_type="policy_version", jmespath_filters=["[?IsDefaultVersion==`true`]"]
)
],
)
assert list(islice((r.is_default_version for r in result if hasattr(r, "is_default_version")), 10)) == [True] * 10
# TODO: test custom and default filters
| 33.141892
| 119
| 0.543119
|
f63a6b9ec6cbf2b2af6ac99df17208f1acc95033
| 1,325
|
py
|
Python
|
src/core/tests/test_admin.py
|
mho85/recipe-app-api
|
f08759e0fe15d11ecb18298ece6b2b458fdc68c5
|
[
"MIT"
] | 1
|
2021-03-29T16:51:49.000Z
|
2021-03-29T16:51:49.000Z
|
src/core/tests/test_admin.py
|
mho85/recipe-app-api
|
f08759e0fe15d11ecb18298ece6b2b458fdc68c5
|
[
"MIT"
] | null | null | null |
src/core/tests/test_admin.py
|
mho85/recipe-app-api
|
f08759e0fe15d11ecb18298ece6b2b458fdc68c5
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTest(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='admin@geeglee.net',
password='admin123'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email='user@geeglee.net',
password='user123',
name='Test user'
)
def test_users_listed(self):
"""Test that users are listed on user page"""
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
"""Test that user edit page works"""
url = reverse('admin:core_user_change', args=[self.user.id])
# admin/core/user/id
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
"""Test that create user page works"""
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| 30.113636
| 68
| 0.633208
|
3b62089f5f92dc54c055e13570951fa4c6bfccf6
| 2,937
|
py
|
Python
|
cvxpy/atoms/elementwise/kl_div.py
|
hashstat/cvxpy
|
20d667ebe8614821fa38e41b1e333257512d9594
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2022-01-19T05:24:56.000Z
|
2022-01-19T05:24:56.000Z
|
cvxpy/atoms/elementwise/kl_div.py
|
hashstat/cvxpy
|
20d667ebe8614821fa38e41b1e333257512d9594
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cvxpy/atoms/elementwise/kl_div.py
|
hashstat/cvxpy
|
20d667ebe8614821fa38e41b1e333257512d9594
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
"""
Copyright 2013 Steven Diamond
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import division
from typing import List, Tuple, Optional
import numpy as np
from scipy.sparse import csc_matrix
from scipy.special import kl_div as kl_div_scipy
from cvxpy.atoms.elementwise.elementwise import Elementwise
class kl_div(Elementwise):
""":math:`x\\log(x/y) - x + y`
For disambiguation between kl_div and rel_entr, see https://github.com/cvxpy/cvxpy/issues/733
"""
def __init__(self, x, y) -> None:
super(kl_div, self).__init__(x, y)
@Elementwise.numpy_numeric
def numeric(self, values):
x = values[0]
y = values[1]
return kl_div_scipy(x, y)
def sign_from_args(self) -> Tuple[bool, bool]:
"""Returns sign (is positive, is negative) of the expression.
"""
# Always positive.
return (True, False)
def is_atom_convex(self) -> bool:
"""Is the atom convex?
"""
return True
def is_atom_concave(self) -> bool:
"""Is the atom concave?
"""
return False
def is_incr(self, idx) -> bool:
"""Is the composition non-decreasing in argument idx?
"""
return False
def is_decr(self, idx) -> bool:
"""Is the composition non-increasing in argument idx?
"""
return False
def _grad(self, values) -> List[Optional[csc_matrix]]:
"""Gives the (sub/super)gradient of the atom w.r.t. each argument.
Matrix expressions are vectorized, so the gradient is a matrix.
Args:
values: A list of numeric values for the arguments.
Returns:
A list of SciPy CSC sparse matrices or None.
"""
if np.min(values[0]) <= 0 or np.min(values[1]) <= 0:
# Non-differentiable.
return [None, None]
else:
div = values[0]/values[1]
grad_vals = [np.log(div), 1 - div]
grad_list = []
for idx in range(len(values)):
rows = self.args[idx].size
cols = self.size
grad_list += [kl_div.elemwise_grad_to_diag(grad_vals[idx],
rows, cols)]
return grad_list
def _domain(self):
"""Returns constraints describing the domain of the node.
"""
return [self.args[0] >= 0, self.args[1] >= 0]
| 29.969388
| 97
| 0.609465
|
9f723f1f8c610430878b84865b699bc3bd1e3279
| 5,775
|
py
|
Python
|
tests/test_utils.py
|
frdwhite24/frewpy
|
79368c69b918404e4d1ee5c3a7b6a88cee994d2f
|
[
"MIT"
] | 3
|
2020-10-02T15:49:44.000Z
|
2021-11-22T15:39:55.000Z
|
tests/test_utils.py
|
frdwhite24/frewpy
|
79368c69b918404e4d1ee5c3a7b6a88cee994d2f
|
[
"MIT"
] | 1
|
2020-10-02T09:07:04.000Z
|
2020-10-02T09:07:04.000Z
|
tests/test_utils.py
|
frdwhite24/frewpy
|
79368c69b918404e4d1ee5c3a7b6a88cee994d2f
|
[
"MIT"
] | 2
|
2021-04-20T14:43:14.000Z
|
2021-12-09T10:02:55.000Z
|
import os
import pytest
from test_config import TEST_DATA
from test_fixtures import json_data, json_data_with_results
from frewpy.utils import (
_check_frew_path,
check_json_path,
load_data,
model_to_json,
get_titles,
get_file_history,
get_file_version,
get_frew_version,
get_num_stages,
get_stage_names,
get_num_nodes,
clear_results,
get_num_design_cases,
get_design_case_names,
check_results_present,
)
from frewpy.models.exceptions import FrewError, NodeError
json_data = json_data
json_data_with_results = json_data_with_results
def test_check_frew_path_type():
with pytest.raises(FrewError):
_check_frew_path(5)
def test_check_frew_path_exists():
with pytest.raises(FrewError):
_check_frew_path("path_does_not_exists.fwd")
def test_check_frew_path_extension():
with pytest.raises(FrewError):
_check_frew_path(os.path.join(TEST_DATA, "test_model_1.json"))
def test_check_json_path_type():
with pytest.raises(FrewError):
check_json_path(5)
def test_model_to_json():
json_path = model_to_json(
os.path.join(TEST_DATA, "convert_model_test.fwd")
)
path_exists = os.path.exists(json_path)
os.remove(json_path)
assert path_exists
def test_load_data():
loaded_data = load_data(os.path.join(TEST_DATA, "test_model_1.json"))
assert list(loaded_data.keys()) == [
"OasysHeader",
"JsonSchema",
"File history",
"Version",
"Units",
"Materials",
"Struts",
"Loads",
"Stages",
"Partial Factor Sets",
"Node Generation",
"Integral Bridge Data",
]
def test_check_json_path_exists():
with pytest.raises(FrewError):
check_json_path("path_does_not_exists.json")
def test_check_json_path_extension():
with pytest.raises(FrewError):
check_json_path(os.path.join(TEST_DATA, "test_model_1.fwd"))
def test_clear_results(json_data_with_results):
json_data_without_results = clear_results(json_data_with_results)
assert not json_data_without_results.get("Frew Results", False)
def test_titles_job_number(json_data):
assert get_titles(json_data)["JobNumber"] == "261026"
def test_titles_job_title(json_data):
assert get_titles(json_data)["JobTitle"] == "Elizabeth House "
def test_titles_initials(json_data):
assert get_titles(json_data)["Initials"] == "FW"
def test_titles_key_error():
with pytest.raises(FrewError):
get_titles({"None": 1})
def test_titles_index_error():
with pytest.raises(FrewError):
get_titles({"OasysHeader": []})
def test_get_file_history_first(json_data):
assert get_file_history(json_data)[0] == {
"Date": "10-Jun-2020",
"Time": "08:01",
"Mode": "Edit",
"User": "Fred.White",
"Comments": "New",
}
def test_get_file_history_third(json_data):
assert get_file_history(json_data)[2] == {
"Date": "10-Jun-2020",
"Time": "09:06",
"Mode": "Edit",
"User": "Fred.White",
"Comments": "Open",
}
def test_get_file_history_key_error():
with pytest.raises(FrewError):
get_file_history({"None": 1})
def test_get_file_version(json_data):
assert get_file_version(json_data) == "19.4.0.23"
def test_get_file_version_key_error():
with pytest.raises(FrewError):
get_file_version({"None": 1})
def test_get_file_version_index_error():
with pytest.raises(FrewError):
get_file_version({"OasysHeader": []})
def test_get_frew_version(json_data):
assert get_frew_version(json_data) == "19.4"
def test_get_frew_version_key_error():
with pytest.raises(FrewError):
get_frew_version({"None": 1})
def test_get_frew_version_index_error():
with pytest.raises(FrewError):
get_frew_version({"OasysHeader": []})
def test_get_num_stages(json_data):
assert get_num_stages(json_data) == 11
def test_get_num_stages_key_error():
with pytest.raises(FrewError):
get_num_stages({"None": 1})
def test_second_stage_name(json_data):
assert (
get_stage_names(json_data)[1] == " Install wall (900mm @ 1300mm C/C)"
)
def test_fifth_stage_name(json_data):
assert get_stage_names(json_data)[4] == (
" Cast B02 floor slab (350mm thk @ -6.375mOD)"
)
def test_ninth_stage_name(json_data):
assert get_stage_names(json_data)[8] == (
" Cast B03 floor slab (2000mm thk @ -12.7mOD) - prop"
)
def test_get_num_nodes(json_data):
assert get_num_nodes(json_data) == 68
def test_get_num_nodes_none():
example_dict = {"Stages": [{}]}
assert get_num_nodes(example_dict) == 0
def test_get_num_nodes_different_per_stage():
with pytest.raises(NodeError):
example_dict = {
"Stages": [
{"GeoFrewNodes": ["Node1", "Node2"]},
{"GeoFrewNodes": ["Node1", "Node2", "Node3"]},
]
}
get_num_nodes(example_dict)
def test_get_num_design_cases(json_data_with_results):
assert get_num_design_cases(json_data_with_results) == 1
def test_get_num_design_cases_none(json_data):
with pytest.raises(FrewError):
get_num_design_cases(json_data)
def test_get_design_case_names(json_data_with_results):
assert get_design_case_names(json_data_with_results) == ["SLS"]
def test_get_design_case_names_none(json_data):
with pytest.raises(FrewError):
get_design_case_names(json_data)
def test_check_results_present_none(json_data):
with pytest.raises(FrewError):
check_results_present(json_data)
def test_check_results_present(json_data_with_results):
check_results_present(json_data_with_results)
assert True
| 23.962656
| 77
| 0.692814
|
95542c73265ce46eb8c06a668d11c5e4f6e763d0
| 18,588
|
py
|
Python
|
ucscsdk/mometa/config/ConfigConsumerCatalogue.py
|
parag-may4/ucscsdk
|
2ea762fa070330e3a4e2c21b46b157469555405b
|
[
"Apache-2.0"
] | 9
|
2016-12-22T08:39:25.000Z
|
2019-09-10T15:36:19.000Z
|
ucscsdk/mometa/config/ConfigConsumerCatalogue.py
|
parag-may4/ucscsdk
|
2ea762fa070330e3a4e2c21b46b157469555405b
|
[
"Apache-2.0"
] | 10
|
2017-01-31T06:59:56.000Z
|
2021-11-09T09:14:37.000Z
|
ucscsdk/mometa/config/ConfigConsumerCatalogue.py
|
parag-may4/ucscsdk
|
2ea762fa070330e3a4e2c21b46b157469555405b
|
[
"Apache-2.0"
] | 13
|
2016-11-14T07:42:58.000Z
|
2022-02-10T17:32:05.000Z
|
"""This module contains the general information for ConfigConsumerCatalogue ManagedObject."""
from ...ucscmo import ManagedObject
from ...ucsccoremeta import UcscVersion, MoPropertyMeta, MoMeta
from ...ucscmeta import VersionMeta
class ConfigConsumerCatalogueConsts():
ADMIN_STATE_ACTIVE = "active"
ADMIN_STATE_DELETE = "delete"
FSM_PREV_CREATE_BEGIN = "CreateBegin"
FSM_PREV_CREATE_FAIL = "CreateFail"
FSM_PREV_CREATE_MAKE_CFG_DIRS = "CreateMakeCfgDirs"
FSM_PREV_CREATE_SUCCESS = "CreateSuccess"
FSM_PREV_DELETE_BEGIN = "DeleteBegin"
FSM_PREV_DELETE_FAIL = "DeleteFail"
FSM_PREV_DELETE_LOCAL = "DeleteLocal"
FSM_PREV_DELETE_SUCCESS = "DeleteSuccess"
FSM_PREV_NOP = "nop"
FSM_RMT_INV_ERR_CODE_ERR_DIAG_CANCELLED = "ERR-DIAG-cancelled"
FSM_RMT_INV_ERR_CODE_ERR_DIAG_FSM_RESTARTED = "ERR-DIAG-fsm-restarted"
FSM_RMT_INV_ERR_CODE_ERR_DIAG_TEST_FAILED = "ERR-DIAG-test-failed"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_AUTHENTICATION_FAILURE = "ERR-DNLD-authentication-failure"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_ERROR = "ERR-DNLD-error"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_HOSTKEY_MISMATCH = "ERR-DNLD-hostkey-mismatch"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_INVALID_IMAGE = "ERR-DNLD-invalid-image"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_NO_FILE = "ERR-DNLD-no-file"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_NO_SPACE = "ERR-DNLD-no-space"
FSM_RMT_INV_ERR_CODE_ERR_DNS_DELETE_ERROR = "ERR-DNS-delete-error"
FSM_RMT_INV_ERR_CODE_ERR_DNS_GET_ERROR = "ERR-DNS-get-error"
FSM_RMT_INV_ERR_CODE_ERR_DNS_SET_ERROR = "ERR-DNS-set-error"
FSM_RMT_INV_ERR_CODE_ERR_DIGEST_VALIDATION_ERROR = "ERR-Digest-Validation-error"
FSM_RMT_INV_ERR_CODE_ERR_EXEC_GEN_CERT_ERROR = "ERR-Exec-Gen-Cert-error"
FSM_RMT_INV_ERR_CODE_ERR_EXEC_GET_CA_CERT_ERROR = "ERR-Exec-Get-CA-Cert-error"
FSM_RMT_INV_ERR_CODE_ERR_FILTER_ILLEGAL_FORMAT = "ERR-FILTER-illegal-format"
FSM_RMT_INV_ERR_CODE_ERR_FSM_NO_SUCH_STATE = "ERR-FSM-no-such-state"
FSM_RMT_INV_ERR_CODE_ERR_GET_CA_CERT_ERROR = "ERR-Get-CA-Cert-error"
FSM_RMT_INV_ERR_CODE_ERR_GET_CERT_ERROR = "ERR-Get-Cert-error"
FSM_RMT_INV_ERR_CODE_ERR_GET_OUT_DIGET_MESSAGE_ERROR = "ERR-Get-Out-Diget-Message-error"
FSM_RMT_INV_ERR_CODE_ERR_HTTP_REQUEST_ERROR = "ERR-HTTP-Request-error"
FSM_RMT_INV_ERR_CODE_ERR_HTTP_SET_ERROR = "ERR-HTTP-set-error"
FSM_RMT_INV_ERR_CODE_ERR_HTTPS_SET_ERROR = "ERR-HTTPS-set-error"
FSM_RMT_INV_ERR_CODE_ERR_IPV6_ADDR_CONFIGURED = "ERR-Ipv6-addr-configured"
FSM_RMT_INV_ERR_CODE_ERR_MO_CONFIG_CHILD_OBJECT_CANT_BE_CONFIGURED = "ERR-MO-CONFIG-child-object-cant-be-configured"
FSM_RMT_INV_ERR_CODE_ERR_MO_META_NO_SUCH_OBJECT_CLASS = "ERR-MO-META-no-such-object-class"
FSM_RMT_INV_ERR_CODE_ERR_MO_PROPERTY_NO_SUCH_PROPERTY = "ERR-MO-PROPERTY-no-such-property"
FSM_RMT_INV_ERR_CODE_ERR_MO_PROPERTY_VALUE_OUT_OF_RANGE = "ERR-MO-PROPERTY-value-out-of-range"
FSM_RMT_INV_ERR_CODE_ERR_MO_ACCESS_DENIED = "ERR-MO-access-denied"
FSM_RMT_INV_ERR_CODE_ERR_MO_DELETION_RULE_VIOLATION = "ERR-MO-deletion-rule-violation"
FSM_RMT_INV_ERR_CODE_ERR_MO_DUPLICATE_OBJECT = "ERR-MO-duplicate-object"
FSM_RMT_INV_ERR_CODE_ERR_MO_ILLEGAL_CONTAINMENT = "ERR-MO-illegal-containment"
FSM_RMT_INV_ERR_CODE_ERR_MO_ILLEGAL_CREATION = "ERR-MO-illegal-creation"
FSM_RMT_INV_ERR_CODE_ERR_MO_ILLEGAL_ITERATOR_STATE = "ERR-MO-illegal-iterator-state"
FSM_RMT_INV_ERR_CODE_ERR_MO_ILLEGAL_OBJECT_LIFECYCLE_TRANSITION = "ERR-MO-illegal-object-lifecycle-transition"
FSM_RMT_INV_ERR_CODE_ERR_MO_NAMING_RULE_VIOLATION = "ERR-MO-naming-rule-violation"
FSM_RMT_INV_ERR_CODE_ERR_MO_OBJECT_NOT_FOUND = "ERR-MO-object-not-found"
FSM_RMT_INV_ERR_CODE_ERR_MO_RESOURCE_ALLOCATION = "ERR-MO-resource-allocation"
FSM_RMT_INV_ERR_CODE_ERR_NTP_DELETE_ERROR = "ERR-NTP-delete-error"
FSM_RMT_INV_ERR_CODE_ERR_NTP_GET_ERROR = "ERR-NTP-get-error"
FSM_RMT_INV_ERR_CODE_ERR_NTP_SET_ERROR = "ERR-NTP-set-error"
FSM_RMT_INV_ERR_CODE_ERR_POLICY_RESOLUTION_IN_PROGRESS = "ERR-Policy-resolution-in-progress"
FSM_RMT_INV_ERR_CODE_ERR_TOKEN_REQUEST_DENIED = "ERR-TOKEN-request-denied"
FSM_RMT_INV_ERR_CODE_ERR_UPDATE_VM_IP_MASK_GATEWAY_ERROR = "ERR-Update-VM-IP-Mask-Gateway-error"
FSM_RMT_INV_ERR_CODE_ERR_AAA_CONFIG_MODIFY_ERROR = "ERR-aaa-config-modify-error"
FSM_RMT_INV_ERR_CODE_ERR_ACCT_REALM_SET_ERROR = "ERR-acct-realm-set-error"
FSM_RMT_INV_ERR_CODE_ERR_ADMIN_PASSWD_SET = "ERR-admin-passwd-set"
FSM_RMT_INV_ERR_CODE_ERR_AUTH_REALM_SET_ERROR = "ERR-auth-realm-set-error"
FSM_RMT_INV_ERR_CODE_ERR_AUTHENTICATION = "ERR-authentication"
FSM_RMT_INV_ERR_CODE_ERR_AUTHORIZATION_REQUIRED = "ERR-authorization-required"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_CHASSISPACK_UNDER_DG = "ERR-create-chassispack-under-dg"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_HFP_UNDER_DG = "ERR-create-hfp-under-dg"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_KEYRING = "ERR-create-keyring"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_LOCALE = "ERR-create-locale"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_ROLE = "ERR-create-role"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_USER = "ERR-create-user"
FSM_RMT_INV_ERR_CODE_ERR_DELETE_LOCALE = "ERR-delete-locale"
FSM_RMT_INV_ERR_CODE_ERR_DELETE_ROLE = "ERR-delete-role"
FSM_RMT_INV_ERR_CODE_ERR_DELETE_SESSION = "ERR-delete-session"
FSM_RMT_INV_ERR_CODE_ERR_DELETE_USER = "ERR-delete-user"
FSM_RMT_INV_ERR_CODE_ERR_ESTIMATE_IMPACT_ON_RECONNECT = "ERR-estimate-impact-on-reconnect"
FSM_RMT_INV_ERR_CODE_ERR_GET_MAX_HTTP_USER_SESSIONS = "ERR-get-max-http-user-sessions"
FSM_RMT_INV_ERR_CODE_ERR_HTTP_INITIALIZING = "ERR-http-initializing"
FSM_RMT_INV_ERR_CODE_ERR_INTERNAL_ERROR = "ERR-internal-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_DELETE_ERROR = "ERR-ldap-delete-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_GET_ERROR = "ERR-ldap-get-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_GROUP_MODIFY_ERROR = "ERR-ldap-group-modify-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_GROUP_SET_ERROR = "ERR-ldap-group-set-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_SET_ERROR = "ERR-ldap-set-error"
FSM_RMT_INV_ERR_CODE_ERR_LOCALE_SET_ERROR = "ERR-locale-set-error"
FSM_RMT_INV_ERR_CODE_ERR_MAX_USERID_SESSIONS_REACHED = "ERR-max-userid-sessions-reached"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_LOCALE = "ERR-modify-locale"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_ROLE = "ERR-modify-role"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_USER = "ERR-modify-user"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_USER_LOCALE = "ERR-modify-user-locale"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_USER_ROLE = "ERR-modify-user-role"
FSM_RMT_INV_ERR_CODE_ERR_NFS_DOWN = "ERR-nfs-down"
FSM_RMT_INV_ERR_CODE_ERR_PROVIDER_GROUP_MODIFY_ERROR = "ERR-provider-group-modify-error"
FSM_RMT_INV_ERR_CODE_ERR_PROVIDER_GROUP_SET_ERROR = "ERR-provider-group-set-error"
FSM_RMT_INV_ERR_CODE_ERR_RADIUS_GLOBAL_SET_ERROR = "ERR-radius-global-set-error"
FSM_RMT_INV_ERR_CODE_ERR_RADIUS_GROUP_SET_ERROR = "ERR-radius-group-set-error"
FSM_RMT_INV_ERR_CODE_ERR_RADIUS_SET_ERROR = "ERR-radius-set-error"
FSM_RMT_INV_ERR_CODE_ERR_ROLE_SET_ERROR = "ERR-role-set-error"
FSM_RMT_INV_ERR_CODE_ERR_SERVICE_NOT_READY = "ERR-service-not-ready"
FSM_RMT_INV_ERR_CODE_ERR_SESSION_CACHE_FULL = "ERR-session-cache-full"
FSM_RMT_INV_ERR_CODE_ERR_SESSION_NOT_FOUND = "ERR-session-not-found"
FSM_RMT_INV_ERR_CODE_ERR_SET_PASSWORD_STRENGTH_CHECK = "ERR-set-password-strength-check"
FSM_RMT_INV_ERR_CODE_ERR_TACACS_ENABLE_ERROR = "ERR-tacacs-enable-error"
FSM_RMT_INV_ERR_CODE_ERR_TACACS_GLOBAL_SET_ERROR = "ERR-tacacs-global-set-error"
FSM_RMT_INV_ERR_CODE_ERR_TACACS_GROUP_SET_ERROR = "ERR-tacacs-group-set-error"
FSM_RMT_INV_ERR_CODE_ERR_TACACS_SET_ERROR = "ERR-tacacs-set-error"
FSM_RMT_INV_ERR_CODE_ERR_TIMEZONE_SET_ERROR = "ERR-timezone-set-error"
FSM_RMT_INV_ERR_CODE_ERR_USER_ACCOUNT_EXPIRED = "ERR-user-account-expired"
FSM_RMT_INV_ERR_CODE_ERR_USER_SET_ERROR = "ERR-user-set-error"
FSM_RMT_INV_ERR_CODE_NONE = "none"
FSM_STAMP_NEVER = "never"
FSM_STATUS_CREATE_BEGIN = "CreateBegin"
FSM_STATUS_CREATE_FAIL = "CreateFail"
FSM_STATUS_CREATE_MAKE_CFG_DIRS = "CreateMakeCfgDirs"
FSM_STATUS_CREATE_SUCCESS = "CreateSuccess"
FSM_STATUS_DELETE_BEGIN = "DeleteBegin"
FSM_STATUS_DELETE_FAIL = "DeleteFail"
FSM_STATUS_DELETE_LOCAL = "DeleteLocal"
FSM_STATUS_DELETE_SUCCESS = "DeleteSuccess"
FSM_STATUS_NOP = "nop"
OPER_STATE_CLEANING = "cleaning"
OPER_STATE_NONE = "none"
OPER_STATE_PROVISIONING = "provisioning"
OPER_STATE_USER_CLEANING = "user-cleaning"
SYNC_STATUS_SYNCHED = "synched"
SYNC_STATUS_UNSYNCHED = "unsynched"
class ConfigConsumerCatalogue(ManagedObject):
"""This is ConfigConsumerCatalogue class."""
consts = ConfigConsumerCatalogueConsts()
naming_props = set([u'internalName'])
mo_meta = MoMeta("ConfigConsumerCatalogue", "configConsumerCatalogue", "consumer-catalogue-[internal_name]", VersionMeta.Version101a, "InputOutput", 0x3f, [], ["admin"], [u'configCatalogue', u'consumerInst'], [u'configBackup', u'configConsumerCatalogueFsm', u'configConsumerCatalogueFsmTask', u'eventInst', u'faultInst'], ["Get", "Set"])
prop_meta = {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["active", "delete"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"consumer_dn": MoPropertyMeta("consumer_dn", "consumerDn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"flt_aggr": MoPropertyMeta("flt_aggr", "fltAggr", "ulong", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"fsm_descr": MoPropertyMeta("fsm_descr", "fsmDescr", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"fsm_prev": MoPropertyMeta("fsm_prev", "fsmPrev", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, ["CreateBegin", "CreateFail", "CreateMakeCfgDirs", "CreateSuccess", "DeleteBegin", "DeleteFail", "DeleteLocal", "DeleteSuccess", "nop"], []),
"fsm_progr": MoPropertyMeta("fsm_progr", "fsmProgr", "byte", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, [], ["0-100"]),
"fsm_rmt_inv_err_code": MoPropertyMeta("fsm_rmt_inv_err_code", "fsmRmtInvErrCode", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, ["ERR-DIAG-cancelled", "ERR-DIAG-fsm-restarted", "ERR-DIAG-test-failed", "ERR-DNLD-authentication-failure", "ERR-DNLD-error", "ERR-DNLD-hostkey-mismatch", "ERR-DNLD-invalid-image", "ERR-DNLD-no-file", "ERR-DNLD-no-space", "ERR-DNS-delete-error", "ERR-DNS-get-error", "ERR-DNS-set-error", "ERR-Digest-Validation-error", "ERR-Exec-Gen-Cert-error", "ERR-Exec-Get-CA-Cert-error", "ERR-FILTER-illegal-format", "ERR-FSM-no-such-state", "ERR-Get-CA-Cert-error", "ERR-Get-Cert-error", "ERR-Get-Out-Diget-Message-error", "ERR-HTTP-Request-error", "ERR-HTTP-set-error", "ERR-HTTPS-set-error", "ERR-Ipv6-addr-configured", "ERR-MO-CONFIG-child-object-cant-be-configured", "ERR-MO-META-no-such-object-class", "ERR-MO-PROPERTY-no-such-property", "ERR-MO-PROPERTY-value-out-of-range", "ERR-MO-access-denied", "ERR-MO-deletion-rule-violation", "ERR-MO-duplicate-object", "ERR-MO-illegal-containment", "ERR-MO-illegal-creation", "ERR-MO-illegal-iterator-state", "ERR-MO-illegal-object-lifecycle-transition", "ERR-MO-naming-rule-violation", "ERR-MO-object-not-found", "ERR-MO-resource-allocation", "ERR-NTP-delete-error", "ERR-NTP-get-error", "ERR-NTP-set-error", "ERR-Policy-resolution-in-progress", "ERR-TOKEN-request-denied", "ERR-Update-VM-IP-Mask-Gateway-error", "ERR-aaa-config-modify-error", "ERR-acct-realm-set-error", "ERR-admin-passwd-set", "ERR-auth-realm-set-error", "ERR-authentication", "ERR-authorization-required", "ERR-create-chassispack-under-dg", "ERR-create-hfp-under-dg", "ERR-create-keyring", "ERR-create-locale", "ERR-create-role", "ERR-create-user", "ERR-delete-locale", "ERR-delete-role", "ERR-delete-session", "ERR-delete-user", "ERR-estimate-impact-on-reconnect", "ERR-get-max-http-user-sessions", "ERR-http-initializing", "ERR-internal-error", "ERR-ldap-delete-error", "ERR-ldap-get-error", "ERR-ldap-group-modify-error", "ERR-ldap-group-set-error", "ERR-ldap-set-error", "ERR-locale-set-error", "ERR-max-userid-sessions-reached", "ERR-modify-locale", "ERR-modify-role", "ERR-modify-user", "ERR-modify-user-locale", "ERR-modify-user-role", "ERR-nfs-down", "ERR-provider-group-modify-error", "ERR-provider-group-set-error", "ERR-radius-global-set-error", "ERR-radius-group-set-error", "ERR-radius-set-error", "ERR-role-set-error", "ERR-service-not-ready", "ERR-session-cache-full", "ERR-session-not-found", "ERR-set-password-strength-check", "ERR-tacacs-enable-error", "ERR-tacacs-global-set-error", "ERR-tacacs-group-set-error", "ERR-tacacs-set-error", "ERR-timezone-set-error", "ERR-user-account-expired", "ERR-user-set-error", "none"], ["0-4294967295"]),
"fsm_rmt_inv_err_descr": MoPropertyMeta("fsm_rmt_inv_err_descr", "fsmRmtInvErrDescr", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, 0, 510, None, [], []),
"fsm_rmt_inv_rslt": MoPropertyMeta("fsm_rmt_inv_rslt", "fsmRmtInvRslt", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, r"""((defaultValue|not-applicable|resource-unavailable|service-unavailable|intermittent-error|sw-defect|service-not-implemented-ignore|extend-timeout|capability-not-implemented-failure|illegal-fru|end-point-unavailable|failure|resource-capacity-exceeded|service-protocol-error|fw-defect|service-not-implemented-fail|task-reset|unidentified-fail|capability-not-supported|end-point-failed|fru-state-indeterminate|resource-dependency|fru-identity-indeterminate|internal-error|hw-defect|service-not-supported|fru-not-supported|end-point-protocol-error|capability-unavailable|fru-not-ready|capability-not-implemented-ignore|fru-info-malformed|timeout),){0,32}(defaultValue|not-applicable|resource-unavailable|service-unavailable|intermittent-error|sw-defect|service-not-implemented-ignore|extend-timeout|capability-not-implemented-failure|illegal-fru|end-point-unavailable|failure|resource-capacity-exceeded|service-protocol-error|fw-defect|service-not-implemented-fail|task-reset|unidentified-fail|capability-not-supported|end-point-failed|fru-state-indeterminate|resource-dependency|fru-identity-indeterminate|internal-error|hw-defect|service-not-supported|fru-not-supported|end-point-protocol-error|capability-unavailable|fru-not-ready|capability-not-implemented-ignore|fru-info-malformed|timeout){0,1}""", [], []),
"fsm_stage_descr": MoPropertyMeta("fsm_stage_descr", "fsmStageDescr", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"fsm_stamp": MoPropertyMeta("fsm_stamp", "fsmStamp", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", ["never"], []),
"fsm_status": MoPropertyMeta("fsm_status", "fsmStatus", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, ["CreateBegin", "CreateFail", "CreateMakeCfgDirs", "CreateSuccess", "DeleteBegin", "DeleteFail", "DeleteLocal", "DeleteSuccess", "nop"], []),
"fsm_try": MoPropertyMeta("fsm_try", "fsmTry", "byte", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"internal_name": MoPropertyMeta("internal_name", "internalName", "string", VersionMeta.Version101a, MoPropertyMeta.NAMING, 0x8, 1, 510, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"oper_state": MoPropertyMeta("oper_state", "operState", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["cleaning", "none", "provisioning", "user-cleaning"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"sync_status": MoPropertyMeta("sync_status", "syncStatus", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["synched", "unsynched"], []),
}
prop_map = {
"adminState": "admin_state",
"childAction": "child_action",
"consumerDn": "consumer_dn",
"dn": "dn",
"fltAggr": "flt_aggr",
"fsmDescr": "fsm_descr",
"fsmPrev": "fsm_prev",
"fsmProgr": "fsm_progr",
"fsmRmtInvErrCode": "fsm_rmt_inv_err_code",
"fsmRmtInvErrDescr": "fsm_rmt_inv_err_descr",
"fsmRmtInvRslt": "fsm_rmt_inv_rslt",
"fsmStageDescr": "fsm_stage_descr",
"fsmStamp": "fsm_stamp",
"fsmStatus": "fsm_status",
"fsmTry": "fsm_try",
"internalName": "internal_name",
"name": "name",
"operState": "oper_state",
"rn": "rn",
"status": "status",
"syncStatus": "sync_status",
}
def __init__(self, parent_mo_or_dn, internal_name, **kwargs):
self._dirty_mask = 0
self.internal_name = internal_name
self.admin_state = None
self.child_action = None
self.consumer_dn = None
self.flt_aggr = None
self.fsm_descr = None
self.fsm_prev = None
self.fsm_progr = None
self.fsm_rmt_inv_err_code = None
self.fsm_rmt_inv_err_descr = None
self.fsm_rmt_inv_rslt = None
self.fsm_stage_descr = None
self.fsm_stamp = None
self.fsm_status = None
self.fsm_try = None
self.name = None
self.oper_state = None
self.status = None
self.sync_status = None
ManagedObject.__init__(self, "ConfigConsumerCatalogue", parent_mo_or_dn, **kwargs)
| 87.267606
| 2,742
| 0.756563
|
9faa2f2d2bdcc850e68efa29a07ca56be8a55572
| 9,338
|
py
|
Python
|
generate_sync_yaml.py
|
fat8701/sync_image
|
05feb9919917f2ec1c5e810a89ec627fc5c0f500
|
[
"MIT"
] | null | null | null |
generate_sync_yaml.py
|
fat8701/sync_image
|
05feb9919917f2ec1c5e810a89ec627fc5c0f500
|
[
"MIT"
] | null | null | null |
generate_sync_yaml.py
|
fat8701/sync_image
|
05feb9919917f2ec1c5e810a89ec627fc5c0f500
|
[
"MIT"
] | null | null | null |
import os
import yaml
import requests
from distutils.version import LooseVersion
# 基本配置
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
CONFIG_FILE = os.path.join(BASE_DIR, 'config.yaml')
SYNC_FILE = os.path.join(BASE_DIR, 'sync.yaml')
CUSTOM_SYNC_FILE = os.path.join(BASE_DIR, 'custom_sync.yaml')
def is_exclude_tag(tag):
"""
排除tag
:param tag:
:return:
"""
excludes = ['alpha', 'beta', 'rc', 'amd64', 'ppc64le', 'arm64', 'arm', 's390x', 'SNAPSHOT', '-', 'master', 'latest', 'main']
for e in excludes:
if e.lower() in tag.lower():
return True
if str.isalpha(tag):
return True
if len(tag) >= 40:
return True
return False
def get_repo_aliyun_tags(image):
"""
获取 aliyuncs repo 最新的 tag
:param image:
:return:
"""
image_name = image.split('/')[-1]
tags = []
hearders = {
'User-Agent': 'docker/19.03.12 go/go1.13.10 git-commit/48a66213fe kernel/5.8.0-1.el7.elrepo.x86_64 os/linux arch/amd64 UpstreamClient(Docker-Client/19.03.12 \(linux\))'
}
token_url = "https://dockerauth.cn-hangzhou.aliyuncs.com/auth?scope=repository:kainstall/{image}:pull&service=registry.aliyuncs.com:cn-hangzhou:26842".format(
image=image_name)
try:
token_res = requests.get(url=token_url, headers=hearders)
token_data = token_res.json()
access_token = token_data['token']
except Exception as e:
print('[Get repo token]', e)
return tags
tag_url = "https://registry.cn-hangzhou.aliyuncs.com/v2/kainstall/{image}/tags/list".format(image=image_name)
hearders['Authorization'] = 'Bearer ' + access_token
try:
tag_res = requests.get(url=tag_url, headers=hearders)
tag_data = tag_res.json()
print('[aliyun tag]: ', tag_data)
except Exception as e:
print('[Get tag Error]', e)
return tags
tags = tag_data.get('tags', [])
return tags
def get_repo_gcr_tags(image, limit=5, host="k8s.gcr.io"):
"""
获取 gcr.io repo 最新的 tag
:param host:
:param image:
:param limit:
:return:
"""
tag_url = "https://{host}/v2/{image}/tags/list".format(host=host, image=image)
tags = []
tags_data = []
manifest_data = []
try:
tag_rep = requests.get(url=tag_url)
tag_req_json = tag_rep.json()
manifest_data = tag_req_json['manifest']
except Exception as e:
print('[Get tag Error]', e)
return tags
for manifest in manifest_data:
sha256_data = manifest_data[manifest]
sha256_tag = sha256_data.get('tag', [])
if len(sha256_tag) > 0:
# 排除 tag
if is_exclude_tag(sha256_tag[0]):
continue
tags_data.append({
'tag': sha256_tag[0],
'timeUploadedMs': sha256_data.get('timeUploadedMs')
})
tags_sort_data = sorted(tags_data, key=lambda i: i['timeUploadedMs'], reverse=True)
# limit tag
tags_limit_data = tags_sort_data[:limit]
image_aliyun_tags = get_repo_aliyun_tags(image)
for t in tags_limit_data:
# 去除同步过的
if t['tag'] in image_aliyun_tags:
continue
tags.append(t['tag'])
print('[repo tag]', tags)
return tags
def get_repo_quay_tags(image, limit=5):
"""
获取 quay.io repo 最新的 tag
:param image:
:param limit:
:return:
"""
tag_url = "https://quay.io/api/v1/repository/{image}/tag/?onlyActiveTags=true&limit=100".format(image=image)
tags = []
tags_data = []
manifest_data = []
try:
tag_rep = requests.get(url=tag_url)
tag_req_json = tag_rep.json()
manifest_data = tag_req_json['tags']
except Exception as e:
print('[Get tag Error]', e)
return tags
for manifest in manifest_data:
name = manifest.get('name', '')
# 排除 tag
if is_exclude_tag(name):
continue
tags_data.append({
'tag': name,
'start_ts': manifest.get('start_ts')
})
tags_sort_data = sorted(tags_data, key=lambda i: i['start_ts'], reverse=True)
# limit tag
tags_limit_data = tags_sort_data[:limit]
image_aliyun_tags = get_repo_aliyun_tags(image)
for t in tags_limit_data:
# 去除同步过的
if t['tag'] in image_aliyun_tags:
continue
tags.append(t['tag'])
print('[repo tag]', tags)
return tags
def get_repo_elastic_tags(image, limit=5):
"""
获取 elastic.io repo 最新的 tag
:param image:
:param limit:
:return:
"""
token_url = "https://docker-auth.elastic.co/auth?service=token-service&scope=repository:{image}:pull".format(
image=image)
tag_url = "https://docker.elastic.co/v2/{image}/tags/list".format(image=image)
tags = []
tags_data = []
manifest_data = []
hearders = {
'User-Agent': 'docker/19.03.12 go/go1.13.10 git-commit/48a66213fe kernel/5.8.0-1.el7.elrepo.x86_64 os/linux arch/amd64 UpstreamClient(Docker-Client/19.03.12 \(linux\))'
}
try:
token_res = requests.get(url=token_url, headers=hearders)
token_data = token_res.json()
access_token = token_data['token']
except Exception as e:
print('[Get repo token]', e)
return tags
hearders['Authorization'] = 'Bearer ' + access_token
try:
tag_rep = requests.get(url=tag_url, headers=hearders)
tag_req_json = tag_rep.json()
manifest_data = tag_req_json['tags']
except Exception as e:
print('[Get tag Error]', e)
return tags
for tag in manifest_data:
# 排除 tag
if is_exclude_tag(tag):
continue
tags_data.append(tag)
tags_sort_data = sorted(tags_data, key=LooseVersion, reverse=True)
# limit tag
tags_limit_data = tags_sort_data[:limit]
image_aliyun_tags = get_repo_aliyun_tags(image)
for t in tags_limit_data:
# 去除同步过的
if t in image_aliyun_tags:
continue
tags.append(t)
print('[repo tag]', tags)
return tags
def get_repo_tags(repo, image, limit=5):
"""
获取 repo 最新的 tag
:param repo:
:param image:
:param limit:
:return:
"""
tags_data = []
if repo == 'gcr.io':
tags_data = get_repo_gcr_tags(image, limit, "gcr.io")
elif repo == 'k8s.gcr.io':
tags_data = get_repo_gcr_tags(image, limit, "k8s.gcr.io")
elif repo == 'quay.io':
tags_data = get_repo_quay_tags(image, limit)
elif repo == 'docker.elastic.co':
tags_data = get_repo_elastic_tags(image, limit)
return tags_data
def generate_dynamic_conf():
"""
生成动态同步配置
:return:
"""
print('[generate_dynamic_conf] start.')
config = None
with open(CONFIG_FILE, 'r') as stream:
try:
config = yaml.safe_load(stream)
except yaml.YAMLError as e:
print('[Get Config]', e)
exit(1)
print('[config]', config)
skopeo_sync_data = {}
for repo in config['images']:
if repo not in skopeo_sync_data:
skopeo_sync_data[repo] = {'images': {}}
if config['images'][repo] is None:
continue
for image in config['images'][repo]:
print("[image] {image}".format(image=image))
sync_tags = get_repo_tags(repo, image, config['last'])
if len(sync_tags) > 0:
skopeo_sync_data[repo]['images'][image] = sync_tags
skopeo_sync_data[repo]['images'][image].append('latest')
else:
print('[{image}] no sync tag.'.format(image=image))
print('[sync data]', skopeo_sync_data)
with open(SYNC_FILE, 'w+') as f:
yaml.safe_dump(skopeo_sync_data, f, default_flow_style=False)
print('[generate_dynamic_conf] done.', end='\n\n')
def generate_custom_conf():
"""
生成自定义的同步配置
:return:
"""
print('[generate_custom_conf] start.')
custom_sync_config = None
with open(CUSTOM_SYNC_FILE, 'r') as stream:
try:
custom_sync_config = yaml.safe_load(stream)
except yaml.YAMLError as e:
print('[Get Config]', e)
exit(1)
print('[custom_sync config]', custom_sync_config)
custom_skopeo_sync_data = {}
for repo in custom_sync_config:
if repo not in custom_skopeo_sync_data:
custom_skopeo_sync_data[repo] = {'images': {}}
if custom_sync_config[repo]['images'] is None:
continue
for image in custom_sync_config[repo]['images']:
image_aliyun_tags = get_repo_aliyun_tags(image)
for tag in custom_sync_config[repo]['images'][image]:
if tag in image_aliyun_tags:
continue
if image not in custom_skopeo_sync_data[repo]['images']:
custom_skopeo_sync_data[repo]['images'][image] = [tag]
else:
custom_skopeo_sync_data[repo]['images'][image].append(tag)
print('[custom_sync data]', custom_skopeo_sync_data)
with open(CUSTOM_SYNC_FILE, 'w+') as f:
yaml.safe_dump(custom_skopeo_sync_data, f, default_flow_style=False)
print('[generate_custom_conf] done.', end='\n\n')
generate_dynamic_conf()
generate_custom_conf()
| 28.042042
| 176
| 0.604519
|
53ef379f390891534c3f8ed3cb6b926f6a8b65bc
| 4,732
|
py
|
Python
|
purity_fb/purity_fb_1dot10/models/smtp.py
|
mabdelhafez/purity_fb_python_client
|
a9856875b3df43b4302a2e4addd1a6b71f51f5ce
|
[
"Apache-2.0"
] | null | null | null |
purity_fb/purity_fb_1dot10/models/smtp.py
|
mabdelhafez/purity_fb_python_client
|
a9856875b3df43b4302a2e4addd1a6b71f51f5ce
|
[
"Apache-2.0"
] | null | null | null |
purity_fb/purity_fb_1dot10/models/smtp.py
|
mabdelhafez/purity_fb_python_client
|
a9856875b3df43b4302a2e4addd1a6b71f51f5ce
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Pure Storage FlashBlade REST 1.10 Python SDK
Pure Storage FlashBlade REST 1.10 Python SDK, developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.10
Contact: info@purestorage.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class Smtp(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'relay_host': 'str',
'sender_domain': 'str'
}
attribute_map = {
'name': 'name',
'relay_host': 'relay_host',
'sender_domain': 'sender_domain'
}
def __init__(self, name=None, relay_host=None, sender_domain=None):
"""
Smtp - a model defined in Swagger
"""
self._name = None
self._relay_host = None
self._sender_domain = None
if name is not None:
self.name = name
if relay_host is not None:
self.relay_host = relay_host
if sender_domain is not None:
self.sender_domain = sender_domain
@property
def name(self):
"""
Gets the name of this Smtp.
name of the object (e.g., a file system or snapshot)
:return: The name of this Smtp.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this Smtp.
name of the object (e.g., a file system or snapshot)
:param name: The name of this Smtp.
:type: str
"""
self._name = name
@property
def relay_host(self):
"""
Gets the relay_host of this Smtp.
Relay server used as a forwarding point for email sent from the array
:return: The relay_host of this Smtp.
:rtype: str
"""
return self._relay_host
@relay_host.setter
def relay_host(self, relay_host):
"""
Sets the relay_host of this Smtp.
Relay server used as a forwarding point for email sent from the array
:param relay_host: The relay_host of this Smtp.
:type: str
"""
self._relay_host = relay_host
@property
def sender_domain(self):
"""
Gets the sender_domain of this Smtp.
Domain name appended to alert email messages
:return: The sender_domain of this Smtp.
:rtype: str
"""
return self._sender_domain
@sender_domain.setter
def sender_domain(self, sender_domain):
"""
Sets the sender_domain of this Smtp.
Domain name appended to alert email messages
:param sender_domain: The sender_domain of this Smtp.
:type: str
"""
self._sender_domain = sender_domain
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, Smtp):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 26
| 205
| 0.556847
|
fabe1b135cd247fba6d1e0652050f1af1a1975c2
| 52,839
|
py
|
Python
|
PV_ICE/main.py
|
NREL/PV_ICE
|
8ed90857199ede4d66d155f0a193236a5fe07e0b
|
[
"BSD-3-Clause"
] | 12
|
2021-01-28T13:44:16.000Z
|
2022-03-20T07:28:48.000Z
|
PV_ICE/main.py
|
NREL/PV_ICE
|
8ed90857199ede4d66d155f0a193236a5fe07e0b
|
[
"BSD-3-Clause"
] | 6
|
2020-12-22T21:12:54.000Z
|
2021-09-24T19:50:15.000Z
|
PV_ICE/main.py
|
NREL/PV_ICE
|
8ed90857199ede4d66d155f0a193236a5fe07e0b
|
[
"BSD-3-Clause"
] | 1
|
2021-03-24T14:34:01.000Z
|
2021-03-24T14:34:01.000Z
|
# -*- coding: utf-8 -*-
"""
Main.py contains the functions to calculate the different quantities of materials
in each step of the process. Reffer to the diagram on Package-Overview for the
steps considered.
Support functions include Weibull functions for reliability and failure; also,
functions to modify baseline values and evaluate sensitivity to the parameters.
"""
import numpy as np
import pandas as pd
import datetime
import os
import matplotlib.pyplot as plt
def read_baseline_material(scenario, material='None', file=None):
if file is None:
try:
file = _interactive_load('Select baseline file')
except:
raise Exception('Interactive load failed. Tkinter not supported'+
'on this system. Try installing X-Quartz and reloading')
def _interactive_load(title=None):
# Tkinter file picker
import tkinter
from tkinter import filedialog
root = tkinter.Tk()
root.withdraw() #Start interactive file input
root.attributes("-topmost", True) #Bring window into foreground
return filedialog.askopenfilename(parent=root, title=title) #initialdir = data_dir
def _unitReferences(keyword):
'''
Specify units for variable in scenario or materials
Parameters
----------
keyword : str
String of scenario or material column label
Returns
-------
yunits : str
Unit specific to the keyword provided
'''
moduleDictionary = {'year': {'unit': 'Years', 'source': 'input'},
'new_Installed_Capacity_[MW]': {'unit': 'Power [MW]', 'source':'input'},
'mod_eff': {'unit': 'Efficiency $\eta$ [%]', 'source':'input'},
'mod_reliability_t50': {'unit': 'Years' , 'source':'input'},
'mod_reliability_t90': {'unit': 'Years', 'source':'input'},
'mod_degradation': {'unit': 'Percentage [%]', 'source':'input'},
'mod_lifetime': {'unit': 'Years', 'source':'input'},
'mod_MFG_eff': {'unit': 'Efficiency $\eta$ [%]', 'source':'input'},
'mod_EOL_collection_eff': {'unit': 'Efficiency $\eta$ [%]', 'source':'input'},
'mod_EOL_collected_recycled': {'unit': 'Percentage [%]', 'source':'input'},
'mod_Repair': {'unit': 'Percentage [%]', 'source':'input'},
'mod_MerchantTail': {'unit': 'Percentage [%]', 'source':'input'},
'mod_Reuse': {'unit': 'Percentage [%]', 'source':'input'},
'Area': {'unit': 'm$^2$', 'source': 'generated'},
'Cumulative_Area_disposedby_Failure': {'unit': 'm$^2$', 'source': 'generated'},
'Cumulative_Area_disposedby_ProjectLifetime': {'unit': 'm$^2$', 'source': 'generated'},
'Cumulative_Area_disposed': {'unit': 'm$^2$', 'source': 'generated'},
'Cumulative_Active_Area': {'unit': 'm$^2$', 'source': 'generated'},
'Installed_Capacity_[W]': {'unit': 'Power [W]', 'source': 'generated'},
'EOL_on_Year_0': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_1': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_2': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_3': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_4': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_5': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_6': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_7': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_8': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_9': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_10': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_11': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_12': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_13': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_14': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_15': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_16': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_17': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_18': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_19': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_20': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_21': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_22': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_23': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_24': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_25': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_26': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_27': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_28': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_29': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_30': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_31': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_32': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_33': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_34': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_35': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_36': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_37': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_38': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_39': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_40': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_41': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_42': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_43': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_44': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_45': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_46': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_47': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_48': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_49': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_50': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_51': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_52': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_53': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_54': {'unit': 'm$^2$', 'source': 'generated'},
'EOL_on_Year_55': {'unit': 'm$^2$', 'source': 'generated'},
'EoL_Collected': {'unit': 'm$^2$', 'source': 'generated'},
'EoL_NotCollected': {'unit': 'm$^2$', 'source': 'generated'},
'EoL_Recycled': {'unit': 'm$^2$', 'source': 'generated'},
'EoL_NotRecycled_Landfilled': {'unit': 'm$^2$', 'source': 'generated'}
}
materialDictionary={'year': {'unit': 'Years', 'source': 'input'},
'mat_virgin_eff': {'unit': 'Efficiency $\eta$ [%]', 'source': 'input'},
'mat_massperm2': {'unit': 'Mass [g]', 'source': 'input'},
'mat_MFG_eff': {'unit': 'Efficiency $\eta$ [%]', 'source': 'input'},
'mat_MFG_scrap_recycled': {'unit': 'Percentage [%]', 'source': 'input'},
'mat_MFG_scrap_Recycled': {'unit': 'Efficiency $\eta$ [%]', 'source': 'input'},
'mat_MFG_scrap_Recycled_into_HQ': {'unit': 'Percentage [%]', 'source': 'input'},
'mat_MFG_scrap_Recycled_into_HQ_Reused4MFG': {'unit': 'Percentage [%]', 'source': 'input'},
'mat_EOL_collected_Recycled': {'unit': 'Percentage [%]', 'source': 'input'},
'mat_EOL_Recycling_eff': {'unit': 'Efficiency $\eta$ [%]', 'source': 'input'},
'mat_EOL_Recycled_into_HQ': {'unit': 'Percentage [%]', 'source': 'input'},
'mat_EOL_RecycledHQ_Reused4MFG': {'unit': 'Percentage [%]', 'source': 'input'},
'mat_modules_NotRecycled': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_modules_NotCollected': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_EOL_sento_Recycling': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_EOL_NotRecycled_Landfilled': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_EOL_Recycled': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_EOL_Recycled_Losses_Landfilled': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_EOL_Recycled_2_HQ': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_EOL_Recycled_2_OQ': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_EoL_Recycled_HQ_into_MFG': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_EOL_Recycled_HQ_into_OU': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_UsedinManufacturing': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_Manufacturing_Input': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_MFG_Scrap': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_MFG_Scrap_Sentto_Recycling': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_MFG_Scrap_Landfilled': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_MFG_Scrap_Recycled_Successfully': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_MFG_Scrap_Recycled_Losses_Landfilled': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_MFG_Recycled_into_HQ': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_MFG_Recycled_into_OQ': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_MFG_Recycled_HQ_into_MFG': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_MFG_Recycled_HQ_into_OU': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_Virgin_Stock': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_Total_EOL_Landfilled': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_Total_MFG_Landfilled': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_Total_Landfilled': {'unit': 'Mass [g]', 'source': 'generated'},
'mat_Total_Recycled_OU': {'unit': 'Mass [g]', 'source': 'generated'}
}
if keyword in moduleDictionary.keys():
yunits = moduleDictionary[keyword]['unit']
elif keyword in materialDictionary.keys():
yunits = materialDictionary[keyword]['unit']
else:
print("Warning: Keyword / Units not Found")
yunits = 'UNITS'
return yunits
def distance(s_lat, s_lng, e_lat, e_lng):
"""
# Haversine formula for numpy arrays
# Author: MalyutinS
# imported from comment on: https://gist.github.com/rochacbruno/2883505
# Example:
# s_lat = 45; s_lng = -110; e_lat=[33, 44]; e_lng = [-115, -140]
# Returns distance from the source point to the two ending points:
# r = distance(s_lat, s_lng, e_lat, e_lng)
# r = array([1402.24996689, 2369.0150434 ])
#
"""
# approximate radius of earth in km
R = 6373.0
# s_lat = s_lat*np.pi/180.0
s_lat = np.deg2rad(s_lat)
s_lng = np.deg2rad(s_lng)
e_lat = np.deg2rad(e_lat)
e_lng = np.deg2rad(e_lng)
d = np.sin((e_lat - s_lat)/2)**2 + np.cos(s_lat)*np.cos(e_lat) * np.sin((e_lng - s_lng)/2)**2
distance = 2 * R * np.arcsin(np.sqrt(d))
return distance
def drivingdistance(origin, destination, APIkey):
"""
Creates call for google-maps api to get driving directions betwen two points.
Input
-----
origin: array
[lat, lon] expected
destination: array
[lat, lon] expected
APYkey: str
String
"""
lat1, lon1 = origin
lat2, lon2 = destination
gm_url = ('https://maps.googleapis.com/maps/api/directions/xml?'+
'origin='+str(lat1)+','+str(lon1)+
'&destination='+str(lat2)+','+str(lon2)+
'&key='+APIkey)
return gm_url
class Simulation:
"""
The ScenarioObj top level class is used to work on Circular Economy scenario objects,
keep track of filenames, data for module and materials, operations modifying
the baselines, etc.
Parameters
----------
name : text to append to output files
nowstr : current date/time string
path : working directory with circular economy results
Methods
-------
__init__ : initialize the object
_setPath : change the working directory
"""
def __init__(self, name=None, path=None):
'''
initialize ScenarioObj with path of Scenario's baseline of module and materials
as well as a basename to append to
Parameters
----------
name: string, append temporary and output files with this value
path: location of Radiance materials and objects
Returns
-------
none
'''
self.path = "" # path of working directory
self.name = "" # basename to append
now = datetime.datetime.now()
self.nowstr = str(now.date())+'_'+str(now.hour)+str(now.minute)+str(now.second)
if path is None:
self._setPath(os.getcwd())
else:
self._setPath(path)
if name is None:
self.name = self.nowstr # set default filename for output files
else:
self.name = name
self.scenario={}
def _setPath(self, path):
"""
setPath - move path and working directory
"""
self.path = os.path.abspath(path)
print('path = '+ path)
try:
os.chdir(self.path)
except OSError as exc:
LOGGER.error('Path doesn''t exist: %s' % (path))
LOGGER.exception(exc)
raise(exc)
# check for path in the new Radiance directory:
def _checkPath(path): # create the file structure if it doesn't exist
if not os.path.exists(path):
os.makedirs(path)
print('Making path: '+path)
def createScenario(self, name, file=None):
self.scenario[name] = Scenario(name, file)
def calculateMassFlow(self, weibullInputParams = None,
bifacialityfactors = None, reducecapacity = True, debugflag=False):
'''
Function takes as input a baseline dataframe already imported,
with the right number of columns and content.
It returns the dataframe with all the added calculation columns.
Parameters
------------
weibullInputParams : None
Dictionary with 'alpha' and 'beta' value for shaping the weibull
curve. beta is sometimes exchanged with lifetime, for example on
Irena 2016 values beta = 30. If weibullInputParams = None,
alfa and beta are calcualted from the t50 and t90 columns on the
module baseline.
bifacialityfactors : str
File with bifacialtiy factors for each year under consideration
Returns
--------
df: dataframe
input dataframe with addeds columns for the calculations of recycled,
collected, waste, installed area, etc.
'''
for scen in self.scenario:
print("Working on Scenario: ", scen)
print("********************")
df = self.scenario[scen].data
# Constant
if bifacialityfactors is not None:
bf = pd.read_csv(bifacialityfactors)
df['irradiance_stc'] = 1000.0 + bf['bifi']*100.0 # W/m^2 (min. Bifacial STC Increase)
else:
df['irradiance_stc'] = 1000.0 # W/m^2
# Renaming and re-scaling
df['new_Installed_Capacity_[W]'] = df['new_Installed_Capacity_[MW]']*1e6
df['t50'] = df['mod_reliability_t50']
df['t90'] = df['mod_reliability_t90']
# Calculating Area and Mass
if reducecapacity:
df['Area'] = df['new_Installed_Capacity_[W]']/(df['mod_eff']*0.01)/df['irradiance_stc'] # m^2
else:
df['Area'] = df['new_Installed_Capacity_[W]']/(df['mod_eff']*0.01)/1000.0 # m^2
df['Area'] = df['Area'].fillna(0) # Chagne na's to 0s.
# Calculating Wast by Generation by Year, and Cumulative Waste by Year.
Generation_Disposed_byYear = []
Generation_Active_byYear= []
Generation_Power_byYear = []
weibullParamList = []
df['Cumulative_Area_disposedby_Failure'] = 0
df['Cumulative_Area_disposedby_ProjectLifetime'] = 0
df['Cumulative_Area_disposed'] = 0
df['Repaired_[W]'] = 0
df['Repaired_Area'] = 0
df['Cumulative_Active_Area'] = 0
df['Installed_Capacity_[W]'] = 0
for generation, row in df.iterrows():
#generation is an int 0,1,2,.... etc.
#generation=4
#row=df.iloc[generation]
if weibullInputParams:
weibullIParams = weibullInputParams
elif 'weibull_alpha' in row:
# "Weibull Input Params passed internally as a column"
weibullIParams = {'alpha': row['weibull_alpha'], 'beta': row['weibull_beta']}
else:
# "Calculating Weibull Params from Modules t50 and T90"
t50, t90 = row['t50'], row['t90']
weibullIParams = weibull_params({t50: 0.50, t90: 0.90})
f = weibull_cdf(weibullIParams['alpha'], weibullIParams['beta'])
weibullParamList.append(weibullIParams)
x = np.clip(df.index - generation, 0, np.inf)
cdf = list(map(f, x))
# pdf = [0] + [j - i for i, j in zip(cdf[: -1], cdf[1 :])]
activearea = row['Area']
if np.isnan(activearea):
activearea=0
activeareacount = []
areadisposed_failure = []
areadisposed_projectlifetime = []
arearepaired = []
arearepaired_powergen = []
areapowergen = []
active=0
disposed_projectlifetime=0
for age in range(len(cdf)):
disposed_projectlifetime=0
if x[age] == 0.0:
activeareacount.append(0)
areadisposed_failure.append(0)
areadisposed_projectlifetime.append(0)
areapowergen.append(0)
arearepaired.append(0)
arearepaired_powergen.append(0)
else:
active += 1
activeareaprev = activearea
activearea = activearea*(1-cdf[age]*(1-df.iloc[age]['mod_Repair']*0.01))
arearepaired_failure = activearea*cdf[age]*df.iloc[age]['mod_Repair']*0.01
arearepaired.append(arearepaired_failure)
arearepaired_powergen.append(arearepaired_failure*row['mod_eff']*0.01*row['irradiance_stc']*(1-row['mod_degradation']*0.01)**active)
areadisposed_failure.append(activeareaprev-activearea)
if age == int(row['mod_lifetime']+generation):
activearea_temp = activearea
activearea = 0+activearea*(df.iloc[age]['mod_MerchantTail']*0.01)
disposed_projectlifetime = activearea_temp-activearea
activearea = 0+disposed_projectlifetime*(df.iloc[age]['mod_Reuse']*0.01)
disposed_projectlifetime = activearea_temp-activearea
areadisposed_projectlifetime.append(disposed_projectlifetime)
activeareacount.append(activearea)
areapowergen.append(activearea*row['mod_eff']*0.01*row['irradiance_stc']*(1-row['mod_degradation']*0.01)**active)
try:
# becuase the clip starts with 0 for the installation year, identifying installation year
# and adding initial area
fixinitialareacount = next((i for i, e in enumerate(x) if e), None) - 1
activeareacount[fixinitialareacount] = activeareacount[fixinitialareacount]+row['Area']
areapowergen[fixinitialareacount] = (areapowergen[fixinitialareacount] +
row['Area'] * row['mod_eff'] *0.01 * row['irradiance_stc'])
except:
# Last value does not have a xclip value of nonzero so it goes
# to except. But it also means the loop finished for the calculations
# of Lifetime.
fixinitialareacount = len(cdf)-1
activeareacount[fixinitialareacount] = activeareacount[fixinitialareacount]+row['Area']
areapowergen[fixinitialareacount] = (areapowergen[fixinitialareacount] +
row['Area'] * row['mod_eff'] *0.01 * row['irradiance_stc'])
print("Finished Area+Power Generation Calculations")
# area_disposed_of_generation_by_year = [element*row['Area'] for element in pdf]
df['Cumulative_Area_disposedby_Failure'] += areadisposed_failure
df['Cumulative_Area_disposedby_ProjectLifetime'] += areadisposed_projectlifetime
df['Cumulative_Area_disposed'] += areadisposed_failure
df['Cumulative_Area_disposed'] += areadisposed_projectlifetime
df['Repaired_[W]'] += arearepaired_powergen
df['Repaired_Area'] += arearepaired
df['Cumulative_Active_Area'] += activeareacount
df['Installed_Capacity_[W]'] += areapowergen
Generation_Disposed_byYear.append([x + y for x, y in zip(areadisposed_failure, areadisposed_projectlifetime)])
Generation_Active_byYear.append(activeareacount)
Generation_Power_byYear.append(areapowergen)
df['WeibullParams'] = weibullParamList
MatrixDisposalbyYear = pd.DataFrame(Generation_Disposed_byYear, columns = df.index, index = df.index)
MatrixDisposalbyYear = MatrixDisposalbyYear.add_prefix("EOL_on_Year_")
try:
df = df[df.columns.drop(list(df.filter(regex='EOL_on_Year_')))]
except:
print("Warning: Issue dropping EOL columns generated by " \
"calculateMFC routine to overwrite")
df = df.join(MatrixDisposalbyYear)
## Start to do EOL Processes
############################
filter_col = [col for col in df if col.startswith('EOL_on_Year_')]
EOL = df[filter_col]
# This Multiplication pattern goes through Module and then material.
# It is for processes that depend on each year as they improve, i.e.
# Collection Efficiency,
#
# [ G1_1 G1_2 G1_3 G2_4 ...] [N1
# [ 0 G2_1 G2_2 G2_3 ...] X N2
# [ 0 0 G3_1 G3_2 ...] N3
# N4]
#
# EQUAL
# EOL_Collected =
# [ G1_1*N1 G1_2 *N2 G1_3 *N3 G2_4 *N4 ...]
# [ 0 G2_1 *N2 G2_2 *N3 G2_3 *N4 ...]
# [ 0 0 G3_1 *N3 G3_2 *N4 ...]
#
EOL_Collected = EOL.mul(df['mod_EOL_collection_eff'].values*0.01)
df['EoL_Collected'] = list(EOL_Collected.sum())
landfill_Collection = EOL.mul(1-(df['mod_EOL_collection_eff'].values*0.01))
df['EoL_NotCollected'] = list(landfill_Collection.sum())
EOL_Recycled = EOL_Collected.mul(df['mod_EOL_collected_recycled'].values*0.01)
df['EoL_Recycled'] = list(EOL_Recycled.sum())
EOL_NotRecycled_Landfilled = EOL_Collected.mul((1-df['mod_EOL_collected_recycled'].values*0.01))
df['EoL_NotRecycled_Landfilled'] = list(EOL_NotRecycled_Landfilled.sum())
# Cleanup of internal renaming and internal use columns
df.drop(['new_Installed_Capacity_[W]', 't50', 't90'], axis = 1, inplace=True)
df['ModuleTotal_MFG']=df['Area']*100/df['mod_MFG_eff']
self.scenario[scen].data = df
# collection losses here
# Recyle % here
################
# Material Loop#
################
for mat in self.scenario[scen].material:
print("==> Working on Material : ", mat)
dm = self.scenario[scen].material[mat].materialdata
# SWITCH TO MASS UNITS FOR THE MATERILA NOW:
# THIS IS DIFFERENT MULTIPLICATION THAN THE REST
# BECAUSE IT DEPENDS TO THE ORIGINAL MASS OF EACH MODULE WHEN INSTALLED
# [M1 * [ G1_1 G1_2 G1_3 G2_4 ...]
# M2 [ 0 G2_1 G2_2 G2_3 ...]
# M3] [ 0 0 G3_1 G3_2 ...]
#
# EQUAL
# mat_EOL_sentoRecycling =
# [ G1_1*M1 G1_2*M1 G1_3*M1 G2_4*M1 ...]
# [ 0 G2_1*M2 G2_2*M2 G2_3*M2 ...]
# [ 0 0 G3_1*M3 G3_2*M3 ...]
#
mat_modules_EOL_sentoRecycling = EOL_Recycled.multiply(dm['mat_massperm2'], axis=0)
dm['mat_modules_Collected'] = list(EOL_Collected.multiply(dm['mat_massperm2'], axis=0).sum())
dm['mat_modules_NotCollected'] = list(landfill_Collection.multiply(dm['mat_massperm2'], axis=0).sum())
dm['mat_modules_Recycled'] = list(EOL_Recycled.multiply(dm['mat_massperm2'], axis=0).sum())
dm['mat_modules_NotRecycled'] = list(EOL_NotRecycled_Landfilled.multiply(dm['mat_massperm2'], axis=0).sum())
# mat_EOL_collected_Recycled CHANGE NAME
# chnge also landfill_material_EOL_NotRecycled_Landfilled
mat_EOL_sento_Recycling = mat_modules_EOL_sentoRecycling.mul(dm['mat_EOL_collected_Recycled'].values*0.01)
dm['mat_EOL_sento_Recycling'] = list(mat_EOL_sento_Recycling.sum())
landfill_material_EOL_NotRecycled_Landfilled = mat_modules_EOL_sentoRecycling.mul(1-(dm['mat_EOL_collected_Recycled'].values*0.01))
dm['mat_EOL_NotRecycled_Landfilled'] = list(landfill_material_EOL_NotRecycled_Landfilled.sum())
mat_EOL_Recycled_Succesfully = mat_EOL_sento_Recycling.mul(dm['mat_EOL_Recycling_eff'].values*0.01)
dm['mat_EOL_Recycled'] = list(mat_EOL_Recycled_Succesfully.sum())
landfill_material_EOL_Recyled_Losses_Landfilled = mat_EOL_sento_Recycling.mul(1-(dm['mat_EOL_Recycling_eff'].values*0.01))
dm['mat_EOL_Recycled_Losses_Landfilled'] = list(landfill_material_EOL_Recyled_Losses_Landfilled.sum())
mat_EOL_Recycled_HQ = mat_EOL_Recycled_Succesfully.mul(dm['mat_EOL_Recycled_into_HQ'].values*0.01)
dm['mat_EOL_Recycled_2_HQ'] = list(mat_EOL_Recycled_HQ.sum())
mat_EOL_Recycled_OQ = mat_EOL_Recycled_Succesfully.mul(1-(dm['mat_EOL_Recycled_into_HQ'].values*0.01))
dm['mat_EOL_Recycled_2_OQ'] = list(mat_EOL_Recycled_OQ.sum())
mat_EOL_Recycled_HQ_into_MFG = mat_EOL_Recycled_HQ.mul(dm['mat_EOL_RecycledHQ_Reused4MFG'].values*0.01)
dm['mat_EoL_Recycled_HQ_into_MFG'] = list(mat_EOL_Recycled_HQ_into_MFG.sum())
mat_EOL_Recycled_HQ_into_OU = mat_EOL_Recycled_HQ.mul(1-(dm['mat_EOL_RecycledHQ_Reused4MFG'].values*0.01))
dm['mat_EOL_Recycled_HQ_into_OU'] = list(mat_EOL_Recycled_HQ_into_OU.sum())
# BULK Calculations Now
dm['mat_UsedSuccessfullyinModuleManufacturing'] = (df['Area'] * dm['mat_massperm2'])
dm['mat_EnteringModuleManufacturing'] = (df['Area'] * dm['mat_massperm2']*100/df['mod_MFG_eff'])
dm['mat_LostinModuleManufacturing'] = dm['mat_EnteringModuleManufacturing'] - dm['mat_UsedSuccessfullyinModuleManufacturing']
dm['mat_Manufacturing_Input'] = dm['mat_EnteringModuleManufacturing'] / (dm['mat_MFG_eff'] * 0.01)
# Scrap = Lost to Material manufacturing losses + Module manufacturing losses
dm['mat_MFG_Scrap'] = (dm['mat_Manufacturing_Input'] - dm['mat_EnteringModuleManufacturing'] +
dm['mat_LostinModuleManufacturing'])
dm['mat_MFG_Scrap_Sentto_Recycling'] = dm['mat_MFG_Scrap'] * dm['mat_MFG_scrap_Recycled'] * 0.01
dm['mat_MFG_Scrap_Landfilled'] = dm['mat_MFG_Scrap'] - dm['mat_MFG_Scrap_Sentto_Recycling']
dm['mat_MFG_Scrap_Recycled_Successfully'] = (dm['mat_MFG_Scrap_Sentto_Recycling'] *
dm['mat_MFG_scrap_Recycling_eff'] * 0.01)
dm['mat_MFG_Scrap_Recycled_Losses_Landfilled'] = (dm['mat_MFG_Scrap_Sentto_Recycling'] -
dm['mat_MFG_Scrap_Recycled_Successfully'])
dm['mat_MFG_Recycled_into_HQ'] = (dm['mat_MFG_Scrap_Recycled_Successfully'] *
dm['mat_MFG_scrap_Recycled_into_HQ'] * 0.01)
dm['mat_MFG_Recycled_into_OQ'] = dm['mat_MFG_Scrap_Recycled_Successfully'] - dm['mat_MFG_Recycled_into_HQ']
dm['mat_MFG_Recycled_HQ_into_MFG'] = (dm['mat_MFG_Recycled_into_HQ'] *
dm['mat_MFG_scrap_Recycled_into_HQ_Reused4MFG'] * 0.01)
dm['mat_MFG_Recycled_HQ_into_OU'] = dm['mat_MFG_Recycled_into_HQ'] - dm['mat_MFG_Recycled_HQ_into_MFG']
dm['mat_Virgin_Stock'] = dm['mat_Manufacturing_Input'] - dm['mat_EoL_Recycled_HQ_into_MFG'] - dm['mat_MFG_Recycled_HQ_into_MFG']
# Calculate raw virgin needs before mining and refining efficiency losses
dm['mat_Virgin_Stock_Raw'] = (dm['mat_Virgin_Stock'] * 100 / dm['mat_virgin_eff'])
# Add Wastes
dm['mat_Total_EOL_Landfilled'] = (dm['mat_modules_NotCollected'] +
dm['mat_modules_NotRecycled'] +
dm['mat_EOL_NotRecycled_Landfilled'] +
dm['mat_EOL_Recycled_Losses_Landfilled'])
dm['mat_Total_MFG_Landfilled'] = (dm['mat_MFG_Scrap_Landfilled'] +
dm['mat_MFG_Scrap_Recycled_Losses_Landfilled'])
dm['mat_Total_Landfilled'] = (dm['mat_Total_EOL_Landfilled'] +
dm['mat_Total_MFG_Landfilled'])
dm['mat_Total_Recycled_OU'] = (dm['mat_EOL_Recycled_2_OQ'] +
dm['mat_EOL_Recycled_HQ_into_OU'] +
dm['mat_MFG_Recycled_into_OQ'] +
dm['mat_MFG_Recycled_HQ_into_OU'])
self.scenario[scen].material[mat].materialdata = dm
def scenMod_IRENIFY(self, scens=None, ELorRL='RL'):
if ELorRL == 'RL':
weibullInputParams = {'alpha': 5.3759, 'beta': 30} # Regular-loss scenario IRENA
if ELorRL == 'EL':
weibullInputParams = {'alpha': 2.49, 'beta': 30} # Regular-loss scenario IRENA
if scens is None:
scens = list(self.scenario.keys())
for scen in scens:
self.scenario[scen].data['weibull_alpha'] = weibullInputParams['alpha']
self.scenario[scen].data['weibull_beta'] = weibullInputParams['beta']
self.scenario[scen].data['mod_lifetime'] = 40
self.scenario[scen].data['mod_MFG_eff'] = 100.0
for mat in self.scenario[scen].material:
self.scenario[scen].material[mat].materialdata['mat_MFG_eff'] = 100.0
self.scenario[scen].material[mat].materialdata['mat_MFG_scrap_Recycled'] = 0.0
return
def plotScenariosComparison(self, keyword=None):
if keyword is None:
scens = list(self.scenario.keys())[0]
print("Choose one of the keywords: ", list(self.scenario[scens].data.keys()))
return
yunits = _unitReferences(keyword)
plt.figure()
for scen in self.scenario:
plt.plot(self.scenario[scen].data['year'],self.scenario[scen].data[keyword], label=scen)
plt.legend()
plt.xlabel('Year')
plt.title(keyword.replace('_', " "))
plt.ylabel(yunits)
def plotMaterialComparisonAcrossScenarios(self, material = None, keyword=None):
if keyword is None:
scens = list(self.scenario.keys())[0]
mats = list(self.scenario[scens].material.keys())[0]
print("Choose one of the keywords: ", list(self.scenario[scens].material[mats].materialdata.keys()))
return
if material is None:
scens = list(self.scenario.keys())[0]
mats = list(self.scenario[scens].material.keys())
print("Choose one of the Materials: ", mats)
return
yunits = _unitReferences(keyword)
plt.figure()
for scen in self.scenario:
plt.plot(self.scenario[scen].data['year'], self.scenario[scen].material[material].materialdata[keyword], label=scen)
plt.legend()
plt.xlabel('Year')
plt.title((material + ' ' + keyword.replace('_', " ")))
plt.ylabel(yunits)
class Scenario(Simulation):
def __init__(self, name, file=None):
self.name = name
self.material = {}
if file is None:
try:
file = _interactive_load('Select module baseline file')
except:
raise Exception('Interactive load failed. Tkinter not supported'+
'on this system. Try installing X-Quartz and reloading')
csvdata = open(str(file), 'r', encoding="UTF-8")
csvdata = open(str(file), 'r', encoding="UTF-8-sig")
firstline = csvdata.readline()
secondline = csvdata.readline()
head = firstline.rstrip('\n').split(",")
meta = dict(zip(head, secondline.rstrip('\n').split(",")))
data = pd.read_csv(csvdata, names=head)
data.loc[:, data.columns != 'year'] = data.loc[:, data.columns != 'year'].astype(float)
self.baselinefile = file
self.metdata = meta,
self.data = data
def addMaterial(self, materialname, file=None):
self.material[materialname] = Material(materialname, file)
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key):
return setattr(self, key)
class Material:
def __init__(self, materialname, file):
self.materialname = materialname
if file is None:
try:
file = _interactive_load('Select material baseline file')
except:
raise Exception('Interactive load failed. Tkinter not supported'+
'on this system. Try installing X-Quartz and reloading')
csvdata = open(str(file), 'r', encoding="UTF-8")
csvdata = open(str(file), 'r', encoding="UTF-8-sig")
firstline = csvdata.readline()
secondline = csvdata.readline()
head = firstline.rstrip('\n').split(",")
meta = dict(zip(head, secondline.rstrip('\n').split(",")))
data = pd.read_csv(csvdata, names=head)
data.loc[:, data.columns != 'year'] = data.loc[:, data.columns != 'year'].astype(float)
self.materialfile = file
self.materialmetdata = meta
self.materialdata = data
def weibull_params(keypoints):
r'''Returns shape parameter `alpha` and scale parameter `beta`
for a Weibull distribution whose CDF passes through the
two time: value pairs in `keypoints`
Parameters
----------
keypoints : list
Two lists of t50 and 590 values, where t50 is the year since deployment
that the cohort has lost 50% of originally installed modules, and t90
is the year since deployment that the cohort has lost 90% of the originally
installed modules. These values are used to calcualte the shape and scale
parameters for the weibull distribution.
Returns
-------
alpha : float
Shape parameter `alpha` for weibull distribution.
beta : float
Scale parameter `beta` for weibull distribution. Often exchanged with ``lifetime``
like in Irena 2016, beta = 30.
'''
t1, t2 = tuple(keypoints.keys())
cdf1, cdf2 = tuple(keypoints.values())
alpha = np.ndarray.item(np.real_if_close(
(np.log(np.log(1 - cdf1)+0j) - np.log(np.log(1 - cdf2)+0j))/(np.log(t1) - np.log(t2))
))
beta = np.abs(np.exp(
(
np.log(t2)*((0+1j)*np.pi + np.log(np.log(1 - cdf1)+0j))
+ np.log(t1)*(((0-1j))*np.pi - np.log(np.log(1 - cdf2)+0j))
)/(
np.log(np.log(1 - cdf1)+0j) - np.log(np.log(1 - cdf2)+0j)
)
))
return {'alpha': alpha, 'beta': beta}
def weibull_cdf(alpha, beta):
'''Return the CDF for a Weibull distribution having:
shape parameter `alpha`
scale parameter `beta`
Parameters
----------
alpha : float
Shape parameter `alpha` for weibull distribution.
beta : float
Scale parameter `beta` for weibull distribution. Often exchanged with ``lifetime``
like in Irena 2016, beta = 30.
'''
def cdf(x):
return 1 - np.exp(-(np.array(x)/beta)**alpha)
return cdf
def weibull_pdf(alpha, beta):
r'''Return the PDF for a Weibull distribution having:
shape parameter `alpha`
scale parameter `beta`
Parameters
----------
alpha : float
Shape parameter `alpha` for weibull distribution.
beta : float
Scale parameter `beta` for weibull distribution. Often exchanged with ``lifetime``
like in Irena 2016, beta = 30.
'''
def pdf(x):
return (alpha/np.array(x)) * ((np.array(x)/beta)**alpha) * (np.exp(-(np.array(x)/beta)**alpha))
return pdf
def weibull_cdf_vis(alpha, beta, xlim=56):
r''' Returns the CDF for a weibull distribution of 1 generation
so it can be plotted.
Parameters
----------
alpha : float
Shape parameter `alpha` for weibull distribution.
beta : float
Scale parameter `beta` for weibull distribution. Often exchanged with ``lifetime``
like in Irena 2016, beta = 30.
xlim : int
Number of years to calculate the distribution for. i.e. x-axis limit.
Returns
-------
idf : list
List of weibull cumulative distribution values for year 0 until xlim.
'''
dfindex = pd.RangeIndex(0,xlim,1)
x = np.clip(dfindex - 0, 0, np.inf)
if alpha and beta:
i = weibull_cdf(alpha, beta)
idf = list(map(i, x))
return idf
def sens_StageImprovement(df, stage, improvement=1.3, start_year=None):
'''
Modifies baseline scenario for evaluating sensitivity of lifetime parameter.
t50 and t90 reliability years get incresed by `improvement` parameter
starting the `year_increase` year specified.
Parameters
----------
df : dataframe
dataframe to be modified
stage : str
Stage that wants to be modified. This can be any of the module or
material specified values, for example:'MFG_Material_eff',
'mat_MFG_scrap_recycled', 'mat_MFG_scrap_Recycled',
'mat_MFG_scrap_Recycled_into_HQ', 'mat_MFG_scrap_Recycled_into_HQ_Reused4MFG'
'mod_EOL_collection_losses', 'mod_EOL_collected_recycled',
'mat_EOL_Recycling_eff', 'mat_EOL_Recycled_into_HQ',
'mat_EOL_RecycledHQ_Reused4MFG', 'mod_Repair',
'mod_MerchantTail', 'mod_Reuse', 'mod_eff', etc.
improvement : decimal
Percent increase in decimal (i.e. "1.3" for 30% increase in value)
or percent decrease (i.e. "0.3") relative to values in df.
start_year :
the year at which the improvement occurs
Returns
--------
df : dataframe
dataframe of expected module lifetime increased or decreased at specified year
'''
if start_year is None:
start_year = int(datetime.datetime.now().year)
#df[df.index > 2000]['mod_reliability_t50'].apply(lambda x: x*1.3)
df[stage] = df[stage].astype(float)
df.loc[df.index > start_year, stage] = df[df.index > start_year][stage].apply(lambda x: x*improvement)
return df
def sens_StageEfficiency(df, stage, target_eff = 95.0, start_year = None,
goal_year = 2030, plotflag = False):
'''
Modifies baseline scenario for evaluating sensitivity to increasing a stage in the
lifetime of the module's efficiency. It either increases or decreases from the
start year until the goal year the value to the target efficiency by interpolation.
Parameters
----------
df : dataframe
dataframe to be modified
stage : str
Stage that wants to be modified. This can be any of the module or
material specified efficiencies, for example:'MFG_Material_eff',
'mat_MFG_scrap_recycled', 'mat_MFG_scrap_Recycled',
'mat_MFG_scrap_Recycled_into_HQ', 'mat_MFG_scrap_Recycled_into_HQ_Reused4MFG'
'mod_EOL_collection_losses', 'mod_EOL_collected_recycled',
'mat_EOL_Recycling_eff', 'mat_EOL_Recycled_into_HQ',
'mat_EOL_RecycledHQ_Reused4MFG', 'mod_Repair',
'mod_MerchantTail', 'mod_Reuse', 'mod_eff', etc.
start_year: int
Year to start modifying the value. This specifies the initial efficiency
value that is going to be modified. If None is passed, current year is used.
target_eff: flat
target eff value in percentage to be reached. i.e. 95.0 %.
goal_year : int
year by which target efficiency will be reached. i.e. 2030. Must be higher than current year.
Returns
-------
df : dataframe
modified dataframe
'''
if start_year is None:
start_year = int(datetime.datetime.now().year)
if start_year > goal_year:
print("Error. Goal Year is before start year")
return
if 0 < abs(target_eff) < 1: # checking it is not 0.95 but 95% i.e.
print("Warning: target_eff value is between 0 and 1; it has been"
"multiplied by 100% assuming it was a percentage in decimal form.")
target_eff = target_eff*100
if target_eff > 100 or target_eff < 0:
print("Warning: target_eff is out of range. Input value between"
"0 and 100")
return
if stage in df.columns:
df2 = df.copy()
df2[stage]=df2[stage].astype(float)
df2.loc[(df2.index < goal_year) & (df2.index > start_year), stage] = np.nan
df2.loc[df2.index >= goal_year , stage] = target_eff
df2[stage] = df2[stage].interpolate()
if plotflag:
plt.plot(df[stage], label='Original')
plt.plot(df2[stage], label='Modified')
plt.title('Updated values for '+stage)
plt.legend()
return df2
else:
print("Stage name incorrect.")
def _modDict(originaldict, moddict):
'''
Compares keys in originaldict with moddict and updates values of
originaldict to moddict if existing.
Parameters
----------
originaldict : dictionary
Original dictionary calculated, for example frontscan or backscan dictionaries.
moddict : dictionary
Modified dictinoary, for example modscan['x'] = 0 to change position of x.
Returns
-------
originaldict : dictionary
Updated original dictionary with values from moddict.
'''
for key in moddict:
try:
originaldict[key] = moddict[key]
except:
print("Wrong key in modified dictionary")
return originaldict
def calculateLCA(PVarea, modified_impacts=None, printflag = False):
'''
'''
if printflag:
print("Doing calculations of LCA analysis for Silicon Photovoltaic Panels")
impacts = {'Acidification':{'UUID': '75d0c8a2-e466-3bd7-813b-5beef2209330',
'Result': 1.29374135667815,
'Unit': 'kg SO2' },
'Carcinogenics':{'UUID': 'a6e5e5d8-a1e5-3c77-8170-586c4fe37514',
'Result': 0.0000231966690476102,
'Unit': 'CTUh' },
'Ecotoxicity':{'UUID': '338e9370-ceb0-3d18-9d87-5f91feb7829c',
'Result': 5933.77859696668,
'Unit': 'CTUe' },
'Eutrophication':{'UUID': '45b8cd56-498a-3c6f-9488-134e951d8c02',
'Result': 1.34026194777363,
'Unit': 'kg N eq' },
'Fossil fuel depletion':{'UUID': '0e45786f-67fa-3b8a-b8a3-73a7c316434c',
'Result': 249.642261689385,
'Unit': 'MJ surplus' },
'Global warming':{'UUID': '31967441-d687-313d-9910-13da3a584ab7',
'Result': 268.548841324818,
'Unit': 'kg CO2 eq' },
'Non carcinogenics':{'UUID': 'd4827ae3-c873-3ea4-85fb-860b7f3f2dee',
'Result': 0.000135331806321799,
'Unit': 'CTUh' },
'Ozone depletion':{'UUID': '6c05dad1-6661-35f2-82aa-6e8e6a498aec',
'Result': 0.0000310937628622019,
'Unit': 'kg CFC-11 eq' },
'Respiratory effects':{'UUID': 'e0916d62-7fbd-3d0a-a4a5-52659b0ac9c1',
'Result': 0.373415542664206,
'Unit': 'kg PM2.5 eq' },
'Smog':{'UUID': '7a149078-e2fd-3e07-a5a3-79035c60e7c3',
'Result': 15.35483065,
'Unit': 'kg O3 eq' },
}
if modified_impacts is not None:
impacts = _modDict(impacts, modified_impacts)
if printflag:
print("Following Modified impacts provided instead of TRACI 2.1 default")
print(impacts)
print("")
else:
if printflag:
print("Following TRACI 2.1")
acidification = impacts['Acidification']['Result']*PVarea
carcinogenics = impacts['Carcinogenics']['Result']*PVarea
ecotoxicity = impacts['Ecotoxicity']['Result']*PVarea
eutrophication = impacts['Eutrophication']['Result']*PVarea
fossil_fuel_depletion = impacts['Fossil fuel depletion']['Result']*PVarea
global_warming = impacts['Global warming']['Result']*PVarea
non_carcinogenics = impacts['Non carcinogenics']['Result']*PVarea
ozone_depletion = impacts['Ozone depletion']['Result']*PVarea
respiratory_effects = impacts['Respiratory effects']['Result']*PVarea
smog = impacts['Smog']['Result']*PVarea
if printflag:
print("RESULTS FOR PV AREA ", PVarea, " m2 ")
print("****************************************")
print('Acidification: ', round(impacts['Acidification']['Result']*PVarea, 2), ' ', impacts['Acidification']['Unit'])
print('Carcinogenics: ', round(impacts['Carcinogenics']['Result']*PVarea, 2), ' ', impacts['Carcinogenics']['Unit'])
print('Ecotoxicity: ', round(impacts['Ecotoxicity']['Result']*PVarea, 2), ' ', impacts['Ecotoxicity']['Unit'])
print('Eutrophication: ', round(impacts['Eutrophication']['Result']*PVarea, 2), ' ', impacts['Eutrophication']['Unit'])
print('Fossil fuel depletion: ', round(impacts['Fossil fuel depletion']['Result']*PVarea, 2), ' ', impacts['Fossil fuel depletion']['Unit'])
print('Global warming: ', round(impacts['Global warming']['Result']*PVarea, 2), ' ', impacts['Global warming']['Unit'])
print('Non carcinogenics: ', round(impacts['Non carcinogenics']['Result']*PVarea, 2), ' ', impacts['Non carcinogenics']['Unit'])
print('Ozone depletion: ', round(impacts['Ozone depletion']['Result']*PVarea, 2), ' ', impacts['Ozone depletion']['Unit'])
print('Respiratory effects: ', round(impacts['Respiratory effects']['Result']*PVarea, 2), ' ', impacts['Respiratory effects']['Unit'])
print('Smog: ', round(impacts['Smog']['Result']*PVarea, 2), ' ', impacts['Smog']['Unit'])
return (acidification, carcinogenics, ecotoxicity, eutrophication,
fossil_fuel_depletion, global_warming,
non_carcinogenics, ozone_depletion, respiratory_effects, smog)
| 47.389238
| 184
| 0.539147
|
483f39a3038df58bb4eb40c95d9192d8d2038e45
| 1,918
|
py
|
Python
|
option.py
|
hoya012/captum-tutorials-pytorch
|
ab8cace9a1457a74cadfe6671598237a4d389cc0
|
[
"MIT"
] | 22
|
2020-10-21T08:03:35.000Z
|
2022-03-08T16:07:27.000Z
|
option.py
|
hoya012/captum-tutorials-pytorch
|
ab8cace9a1457a74cadfe6671598237a4d389cc0
|
[
"MIT"
] | 2
|
2021-01-29T11:09:47.000Z
|
2022-01-06T14:40:41.000Z
|
option.py
|
hoya012/captum-tutorials-pytorch
|
ab8cace9a1457a74cadfe6671598237a4d389cc0
|
[
"MIT"
] | 5
|
2020-10-22T04:32:07.000Z
|
2022-01-14T17:09:46.000Z
|
import argparse
def get_args():
parser = argparse.ArgumentParser()
# model architecture & checkpoint
parser.add_argument('--model', default='ResNet18', choices=('ResNet18', 'ResNet50'),
help='optimizer to use (ResNet18 | ResNet50)')
parser.add_argument('--norm', default='batchnorm')
parser.add_argument('--num_classes', type=int, default=6)
parser.add_argument('--pretrained', type=int, default=1)
parser.add_argument('--pretrained_path', type=str, default=None)
parser.add_argument('--checkpoint_dir', type=str, default='checkpoint')
parser.add_argument('--checkpoint_name', type=str, default='')
# data loading
parser.add_argument('--num_workers', type=int, default=16)
parser.add_argument('--seed', type=int, default=42, help='random seed')
# training hyper parameters
parser.add_argument('--batch_size', type=int, default=256)
parser.add_argument('--epochs', type=int, default=120)
parser.add_argument('--log_interval', type=int, default=20)
parser.add_argument('--evaluate', action='store_true', default=False)
parser.add_argument('--amp', action='store_true', default=False)
# optimzier & learning rate scheduler
parser.add_argument('--learning_rate', type=float, default=0.0001)
parser.add_argument('--weight_decay', type=float, default=0.0001)
parser.add_argument('--optimizer', default='ADAM', choices=('SGD', 'ADAM'),
help='optimizer to use (SGD | ADAM)')
parser.add_argument('--decay_type', default='cosine_warmup', choices=('step', 'step_warmup', 'cosine_warmup', 'swa'),
help='optimizer to use (step | step_warmup | cosine_warmup | stochastic weight averaging)')
parser.add_argument('--swa_start', type=int, default=90)
parser.add_argument('--swa_lr', type=float, default=0.05)
args = parser.parse_args()
return args
| 47.95
| 121
| 0.679353
|
287e4996fe6eedecc512558093fb96c80de68774
| 5,519
|
py
|
Python
|
pretrain/pretrain_tape.py
|
IC-hub/ProteinLM
|
58fbf1f674569cf814becf32f71dd0d8f0c592fa
|
[
"Apache-2.0"
] | 59
|
2021-05-11T03:41:30.000Z
|
2022-03-02T19:16:18.000Z
|
pretrain/pretrain_tape.py
|
YijiaShaw/ProteinLM
|
fda4f381b4b974721b187cece968dd7bc96a81f4
|
[
"Apache-2.0"
] | 7
|
2021-05-22T11:17:39.000Z
|
2022-02-26T01:07:40.000Z
|
pretrain/pretrain_tape.py
|
YijiaShaw/ProteinLM
|
fda4f381b4b974721b187cece968dd7bc96a81f4
|
[
"Apache-2.0"
] | 13
|
2021-05-06T01:27:17.000Z
|
2021-09-09T02:23:06.000Z
|
# coding=utf-8
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2021, Knowledge Engineering Group (KEG), Tsinghua University
# Modified by Jiezhong Qiu
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pretrain TAPE"""
import torch
import torch.nn.functional as F
from megatron import get_args, get_tokenizer
from megatron import print_rank_0
from megatron import get_timers
from megatron import mpu
from megatron.data.tape_dataset import build_train_valid_test_datasets
from megatron.model import BertModel, BertModelFirstStage, BertModelIntermediateStage, BertModelLastStage
from megatron.training import pretrain
from megatron.utils import average_losses_across_data_parallel_group
from megatron.utils import get_tape_masks_and_position_ids
from megatron.model.bert_model import bert_extended_attention_mask
def model_provider():
"""Build the model."""
print_rank_0('building TAPE model ...')
args = get_args()
if mpu.get_pipeline_model_parallel_world_size() > 1:
# Determine model based on position of stage in pipeline.
if mpu.is_pipeline_first_stage():
model = BertModelFirstStage(
num_tokentypes=0)
elif mpu.is_pipeline_last_stage():
model = BertModelLastStage(
num_tokentypes=0,
add_binary_head=False,
parallel_output=True)
else:
model = BertModelIntermediateStage(
num_tokentypes=0)
else:
model = BertModel(
num_tokentypes=0,
add_binary_head=False,
parallel_output=True)
return model
def get_batch(data_iterator):
"""Build the batch."""
tokenizer = get_tokenizer()
# Items and their type.
keys = ['text', 'labels', 'loss_mask', 'padding_mask']
datatype = torch.int64
# Broadcast data.
if data_iterator is not None:
data = next(data_iterator)
else:
data = None
data_b = mpu.broadcast_data(keys, data, datatype)
# Unpack.
tokens = data_b['text'].long()
loss_mask = data_b['loss_mask'].float()
lm_labels = data_b['labels'].long()
padding_mask = data_b['padding_mask'].long()
# Get the masks and postition ids.
attention_mask, position_ids = get_tape_masks_and_position_ids(
tokens,
tokenizer.cls,
reset_position_ids=True,
reset_attention_mask=True)
return tokens, loss_mask, lm_labels, padding_mask, attention_mask, position_ids
def forward_step(data_iterator, model, input_tensor):
"""Forward step."""
args = get_args()
timers = get_timers()
# Get the batch.
timers('batch-generator').start()
tokens, loss_mask, lm_labels, padding_mask, attention_mask, position_ids \
= get_batch(data_iterator)
timers('batch-generator').stop()
extended_attention_mask = bert_extended_attention_mask(padding_mask) + attention_mask
# Forward pass through the model.
if mpu.is_pipeline_first_stage():
assert input_tensor is None
if mpu.is_pipeline_last_stage():
output_tensor = model(tokens, extended_attention_mask, tokentype_ids=None,
lm_labels=lm_labels, position_ids=position_ids)
else:
output_tensor = model(tokens, extended_attention_mask, tokentype_ids=None)
elif mpu.is_pipeline_last_stage():
assert input_tensor is not None
output_tensor = model(input_tensor, extended_attention_mask, lm_labels=lm_labels)
else:
assert input_tensor is not None
output_tensor = model(input_tensor, extended_attention_mask, position_ids=position_ids)
if mpu.is_pipeline_last_stage():
lm_loss_, _ = output_tensor
lm_loss_ = lm_loss_.float()
loss_mask = loss_mask.float()
lm_loss = torch.sum(
lm_loss_.view(-1) * loss_mask.reshape(-1)) / loss_mask.sum()
loss = lm_loss
averaged_losses = average_losses_across_data_parallel_group([lm_loss,])
return loss, {'lm loss': averaged_losses[0]}
return output_tensor
def train_valid_test_datasets_provider(train_val_test_num_samples):
"""Build train, valid, and test datasets."""
args = get_args()
print_rank_0('> building train, validation, and test datasets '
'for TAPE ...')
train_ds, valid_ds, test_ds = build_train_valid_test_datasets(
data_prefix=args.data_path,
data_impl=args.data_impl,
splits_string=args.split,
train_valid_test_num_samples=train_val_test_num_samples,
seq_length=args.seq_length,
masked_lm_prob=args.mask_prob,
seed=args.seed,
skip_warmup=(not args.mmap_warmup))
print_rank_0("> finished creating TAPE datasets ...")
return train_ds, valid_ds, test_ds
if __name__ == "__main__":
pretrain(train_valid_test_datasets_provider, model_provider, forward_step,
args_defaults={'tokenizer_type': 'BertWordPieceLowerCase'})
| 33.858896
| 105
| 0.69904
|
fd93fe0901886d30c237997e726a37a9e4ef7a8c
| 3,343
|
py
|
Python
|
src/olympia/conf/prod/settings.py
|
petercpg/addons-server
|
892e442ee61fe2592f19969357ed5d40a9e9fb2e
|
[
"BSD-3-Clause"
] | 3
|
2020-03-05T18:17:14.000Z
|
2020-03-09T01:24:38.000Z
|
src/olympia/conf/prod/settings.py
|
petercpg/addons-server
|
892e442ee61fe2592f19969357ed5d40a9e9fb2e
|
[
"BSD-3-Clause"
] | null | null | null |
src/olympia/conf/prod/settings.py
|
petercpg/addons-server
|
892e442ee61fe2592f19969357ed5d40a9e9fb2e
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
from olympia.lib.settings_base import * # noqa
ENGAGE_ROBOTS = True
EMAIL_URL = env.email_url('EMAIL_URL')
EMAIL_HOST = EMAIL_URL['EMAIL_HOST']
EMAIL_PORT = EMAIL_URL['EMAIL_PORT']
EMAIL_BACKEND = EMAIL_URL['EMAIL_BACKEND']
EMAIL_HOST_USER = EMAIL_URL['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = EMAIL_URL['EMAIL_HOST_PASSWORD']
SEND_REAL_EMAIL = True
ENV = env('ENV')
API_THROTTLING = True
CDN_HOST = 'https://addons.cdn.mozilla.net'
DOMAIN = env('DOMAIN', default='addons.mozilla.org')
SERVER_EMAIL = 'zprod@addons.mozilla.org'
SITE_URL = 'https://' + DOMAIN
EXTERNAL_SITE_URL = env('EXTERNAL_SITE_URL',
default='https://addons.mozilla.org')
SERVICES_URL = env('SERVICES_URL',
default='https://services.addons.mozilla.org')
CODE_MANAGER_URL = env('CODE_MANAGER_URL',
default='https://code.addons.mozilla.org')
STATIC_URL = '%s/static/' % CDN_HOST
MEDIA_URL = '%s/user-media/' % CDN_HOST
SESSION_COOKIE_DOMAIN = ".%s" % DOMAIN
# Domain emails should be sent to.
INBOUND_EMAIL_DOMAIN = env('INBOUND_EMAIL_DOMAIN',
default='addons.mozilla.org')
DATABASES = {
'default': get_db_config('DATABASES_DEFAULT_URL'),
'replica': get_db_config('DATABASES_REPLICA_URL', atomic_requests=False),
}
SERVICES_DATABASE = get_db_config('SERVICES_DATABASE_URL')
REPLICA_DATABASES = ['replica']
CACHES = {}
CACHES['default'] = env.cache('CACHES_DEFAULT')
CACHES['default']['TIMEOUT'] = 500
CACHES['default']['BACKEND'] = 'django.core.cache.backends.memcached.MemcachedCache' # noqa
CACHES['default']['KEY_PREFIX'] = CACHE_KEY_PREFIX
# Celery
CELERY_BROKER_CONNECTION_TIMEOUT = 0.5
LOGGING['loggers'].update({
'adi.updatecounts': {'level': logging.INFO},
'amqp': {'level': logging.WARNING},
'raven': {'level': logging.WARNING},
'requests': {'level': logging.WARNING},
'z.addons': {'level': logging.INFO},
'z.task': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
})
ES_TIMEOUT = 60
ES_HOSTS = env('ES_HOSTS')
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_%s' % (v, ENV)) for k, v in ES_INDEXES.items())
CEF_PRODUCT = STATSD_PREFIX
NEW_FEATURES = True
ADDONS_LINTER_BIN = 'node_modules/.bin/addons-linter'
NEWRELIC_ENABLE = env.bool('NEWRELIC_ENABLE', default=False)
if NEWRELIC_ENABLE:
NEWRELIC_INI = '/etc/newrelic.d/%s.ini' % DOMAIN
FXA_CONFIG = {
'default': {
'client_id': env('FXA_CLIENT_ID'),
'client_secret': env('FXA_CLIENT_SECRET'),
# fxa redirects to https://%s/api/auth/authenticate-callback/ % DOMAIN
},
}
DEFAULT_FXA_CONFIG_NAME = 'default'
ALLOWED_FXA_CONFIGS = ['default']
ES_DEFAULT_NUM_SHARDS = 10
RECOMMENDATION_ENGINE_URL = env(
'RECOMMENDATION_ENGINE_URL',
default='https://taar.prod.mozaws.net/v1/api/recommendations/')
TAAR_LITE_RECOMMENDATION_ENGINE_URL = env(
'TAAR_LITE_RECOMMENDATION_ENGINE_URL',
default=('https://taarlite.prod.mozaws.net/taarlite/api/v1/'
'addon_recommendations/'))
FXA_SQS_AWS_QUEUE_URL = (
'https://sqs.us-west-2.amazonaws.com/361527076523/'
'amo-account-change-prod')
EXTENSION_WORKSHOP_URL = env(
'EXTENSION_WORKSHOP_URL', default='https://extensionworkshop.com')
KINTO_API_URL = 'https://settings-writer.prod.mozaws.net/v1/'
| 29.584071
| 92
| 0.700867
|
b4ebf73a51caa7bd394a79d2a4acda8884cfa14c
| 433
|
py
|
Python
|
server/support/multithreadSupport.py
|
BernardoGO/PUCPyWS
|
8daecc5205ee00260b7320104e95b84e38ce9fdf
|
[
"Unlicense",
"MIT"
] | 12
|
2015-03-03T00:34:33.000Z
|
2018-10-03T18:56:43.000Z
|
server/support/multithreadSupport.py
|
BernardoGO/PUCPyWS
|
8daecc5205ee00260b7320104e95b84e38ce9fdf
|
[
"Unlicense",
"MIT"
] | null | null | null |
server/support/multithreadSupport.py
|
BernardoGO/PUCPyWS
|
8daecc5205ee00260b7320104e95b84e38ce9fdf
|
[
"Unlicense",
"MIT"
] | null | null | null |
__author__ = 'bernardogo'
import sys
if sys.version_info >= (3, 0):
from socketserver import ThreadingMixIn
from http.server import HTTPServer as http
else:
from SocketServer import ThreadingMixIn
from BaseHTTPServer import HTTPServer as http
class ThreadedHTTPServer(ThreadingMixIn, http):
""" This class allows to handle requests in separated threads.
No further content needed, don't touch this. """
| 30.928571
| 66
| 0.750577
|
986e3de5d3c075dfd552bfec824e5f119844d0e5
| 2,333
|
py
|
Python
|
tools/SDKTool/libs/customDialog.py
|
BernhardRiemann/GameAISDK
|
da24c600e1cdc890739ee274032a17fb9ce75c5c
|
[
"Apache-2.0"
] | 3
|
2021-03-15T13:53:37.000Z
|
2021-11-17T10:34:29.000Z
|
tools/SDKTool/libs/customDialog.py
|
VenmoTools/GameAISDK
|
208320760440400d369aa8ab2f2439494195e6bd
|
[
"Apache-2.0"
] | null | null | null |
tools/SDKTool/libs/customDialog.py
|
VenmoTools/GameAISDK
|
208320760440400d369aa8ab2f2439494195e6bd
|
[
"Apache-2.0"
] | 1
|
2021-02-19T12:04:05.000Z
|
2021-02-19T12:04:05.000Z
|
'''
自定义的确认弹框,labelimg中的代码
代码链接:https://github.com/tzutalin/labelImg/tree/master/libs
'''
try:
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
except ImportError:
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from libs.utils import newIcon, labelValidator
BB = QDialogButtonBox
class customDialog(QDialog):
def __init__(self, text="输入版本号", parent=None):
super(customDialog, self).__init__(parent)
self.setWindowTitle(text)
self.edit = QLineEdit()
self.edit.setText(text)
self.edit.setValidator(labelValidator())
self.edit.editingFinished.connect(self.postProcess)
layout = QVBoxLayout()
layout.addWidget(self.edit)
self.buttonBox = bb = BB(BB.Ok | BB.Cancel, Qt.Horizontal, self)
bb.button(BB.Ok).setIcon(newIcon('done'))
bb.button(BB.Cancel).setIcon(newIcon('undo'))
bb.accepted.connect(self.validate)
bb.rejected.connect(self.reject)
layout.addWidget(bb)
self.setLayout(layout)
def validate(self):
try:
if self.edit.text().trimmed():
self.accept()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
if self.edit.text().strip():
self.accept()
def postProcess(self):
try:
self.edit.setText(self.edit.text().trimmed())
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
self.edit.setText(self.edit.text())
def popUp(self, text='', move=True):
self.edit.setText(text)
self.edit.setSelection(0, len(text))
self.edit.setFocus(Qt.PopupFocusReason)
if move:
self.move(QCursor.pos())
return self.edit.text() if self.exec() else None
def listItemClick(self, tQListWidgetItem):
try:
text = tQListWidgetItem.text().trimmed()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
text = tQListWidgetItem.text().strip()
self.edit.setText(text)
def listItemDoubleClick(self, tQListWidgetItem):
self.listItemClick(tQListWidgetItem)
self.validate()
| 30.697368
| 76
| 0.625804
|
ff2cfb7fed92baaed3ef382ee1a5c9ba4c7c6e08
| 1,973
|
py
|
Python
|
sys/2/common.py
|
wittrup/crap
|
a77474588fd54a5a998e24df7b1e6e2ab473ded1
|
[
"MIT"
] | 1
|
2017-12-12T13:58:08.000Z
|
2017-12-12T13:58:08.000Z
|
sys/2/common.py
|
wittrup/crap
|
a77474588fd54a5a998e24df7b1e6e2ab473ded1
|
[
"MIT"
] | null | null | null |
sys/2/common.py
|
wittrup/crap
|
a77474588fd54a5a998e24df7b1e6e2ab473ded1
|
[
"MIT"
] | 1
|
2019-11-03T10:16:35.000Z
|
2019-11-03T10:16:35.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import os.path
def is_non_zero_file(fpath):
return os.path.isfile(fpath) and os.path.getsize(fpath) > 0
def jsloifex(filename, default={}): # JSON load if exists
return json.load(open(filename, encoding='utf-8')) if is_non_zero_file(filename) else default
def jsstfacp(obj, path, indent=None, separators=None, sort_keys=False, odpl=None):
""" JSON store fix and create path
:param obj:
:param path:
:param basename:
:param indent:
:param separators:
:param sort_keys:
:param odpl:
:return: """
head, tail = os.path.split(path)
root, extension = os.path.splitext(tail)
head += "/" if not (head.endswith("/") or head.endswith("\\")) else ""
mkdir(head)
extension += '.json' if not extension.endswith('.json') else ''
path = os.path.join(head, root + extension)
if odpl: # One Dictionary Per Line
with open(path, 'w', encoding="utf-8") as f:
dump = json.dumps(obj, sort_keys=sort_keys)
if type(obj) == list:
f.write("[%s]" % ",\n ".join(map(json.dumps, obj)))
elif type(obj) == dict:
dump = dump.replace("}, ", "},\n")
fili = dump[:dump.find("}")] # First line
sbac = fili.count("{") # Start brackets count "{"
sfux = [" " * findnthstr(fili, "{", i) for i in range(-1, sbac)]
for line in dump.split("\n"):
stag = line.count("{") # start tags/brackets
f.write(sfux[sbac - stag] + line + "\n")
else:
print('nothing to do here')
else:
json.dump(obj, open(path, 'w'), indent=indent, separators=separators, sort_keys=sort_keys)
def findnthstr(s, c, n):
p = 0
for i in range(n):
p = s.find(c, p + 1)
return p
def mkdir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
| 34.614035
| 98
| 0.563102
|
04b12243d95ede2c445de8404269efe36067e1b3
| 1,091
|
py
|
Python
|
Course 01 - Getting Started with Python/Extra Studies/Basics/ex011.py
|
marcoshsq/python_practical_exercises
|
77136cd4bc0f34acde3380ffdc5af74f7a960670
|
[
"MIT"
] | 9
|
2022-03-22T16:45:17.000Z
|
2022-03-25T20:22:35.000Z
|
Course 01 - Getting Started with Python/Extra Studies/Basics/ex011.py
|
marcoshsq/python_practical_exercises
|
77136cd4bc0f34acde3380ffdc5af74f7a960670
|
[
"MIT"
] | null | null | null |
Course 01 - Getting Started with Python/Extra Studies/Basics/ex011.py
|
marcoshsq/python_practical_exercises
|
77136cd4bc0f34acde3380ffdc5af74f7a960670
|
[
"MIT"
] | 3
|
2022-03-22T17:03:38.000Z
|
2022-03-29T17:20:55.000Z
|
# Extra Exercise 010
"""Make a program that asks how much you earn per hour and the number of hours worked in the month.
Calculate and show the total of your salary in that month, knowing that 11% is deducted for Income Tax,
8% for the INSS and 5% for the union, make a program that gives us:
a) gross salary.
b) how much you paid to the INSS.
c) how much he paid to the union.
d) the net salary.
e) calculate the discounts and the net salary, according to the table below:
+ Gross Salary: R$
- Income Tax (11%): BRL
- INSS (8%): BRL
- Union (5%): BRL
= Net Salary: R$
Obs.: Gross Salary - Discounts = Net Salary."""
hour_pay = float(input("Earning per hour: "))
worked_hour = float(input("Hours of work: "))
gross_salary = hour_pay * worked_hour
ir = gross_salary * 0.11
inss = gross_salary * 0.08
sindicate = gross_salary * 0.05
net_salary = gross_salary - (ir + inss + sindicate)
print(f"O seu salário bruto é: R${gross_salary}")
print(
f"Foram descontados R${ir} de IR(11%), R${inss} de INSS(8%) e R${sindicate} do sindicato"
)
print(f"O seu salário liquido é R${net_salary}")
| 32.088235
| 103
| 0.706691
|
b7f60eba7ebdbee027fb13c875a97229f228cb47
| 1,189
|
py
|
Python
|
molecule/resources/tests/test_docker_engine.py
|
Penbase/ansible-dockerswarm
|
579f802ec3b97a6a57f1362fdea04006d13b4ee2
|
[
"MIT"
] | 269
|
2016-07-07T05:03:17.000Z
|
2022-03-14T13:36:49.000Z
|
molecule/resources/tests/test_docker_engine.py
|
Penbase/ansible-dockerswarm
|
579f802ec3b97a6a57f1362fdea04006d13b4ee2
|
[
"MIT"
] | 77
|
2016-09-01T17:30:14.000Z
|
2022-02-21T16:31:26.000Z
|
molecule/resources/tests/test_docker_engine.py
|
Penbase/ansible-dockerswarm
|
579f802ec3b97a6a57f1362fdea04006d13b4ee2
|
[
"MIT"
] | 148
|
2016-09-07T15:40:13.000Z
|
2022-03-19T21:49:14.000Z
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
debian_os = ['debian', 'ubuntu']
rhel_os = ['redhat', 'centos']
def test_docker_ce_config(host):
d = host.file('/etc/docker')
assert d.exists
assert d.user == 'root'
assert d.group == 'root'
assert d.mode == 0o755
f = host.file('/etc/docker/daemon.json')
assert f.exists
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o640
def test_docker_ce_stable_repository_exists(host):
f = None
if host.system_info.distribution.lower() in debian_os:
f = host.file('/etc/apt/sources.list.d/docker_ce_stable.list')
if host.system_info.distribution.lower() in rhel_os:
f = host.file('/etc/yum.repos.d/docker_ce_stable.repo')
assert f.exists
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
def test_docker_ce_installed(host):
assert host.package('docker-ce').is_installed
def test_docker_ce_service(host):
s = host.service('docker')
assert s.is_running
assert s.is_enabled
| 23.78
| 70
| 0.682927
|
8bebbadb1ff46fbfe8717dedeb3bc62229e18510
| 1,530
|
py
|
Python
|
Game24/Game24.py
|
ttkaixin1998/pikachupythongames
|
609a3a5a2be3f5a187c332c7980bb5bb14548f02
|
[
"MIT"
] | 4,013
|
2018-06-16T08:00:02.000Z
|
2022-03-30T11:48:14.000Z
|
Game24/Game24.py
|
pigbearcat/Games
|
b8c47ef1bcce9a9db3f3730c162e6e8e08b508a2
|
[
"MIT"
] | 22
|
2018-10-18T00:15:50.000Z
|
2022-01-13T08:16:15.000Z
|
Game24/Game24.py
|
pigbearcat/Games
|
b8c47ef1bcce9a9db3f3730c162e6e8e08b508a2
|
[
"MIT"
] | 2,172
|
2018-07-20T04:03:14.000Z
|
2022-03-31T14:18:29.000Z
|
'''
Function:
贪吃蛇小游戏
Author:
Charles
微信公众号:
Charles的皮卡丘
'''
import cfg
import sys
import pygame
from modules import *
'''主函数'''
def main(cfg):
# 游戏初始化
pygame.init()
screen = pygame.display.set_mode(cfg.SCREENSIZE)
pygame.display.set_caption('Greedy Snake —— Charles的皮卡丘')
clock = pygame.time.Clock()
# 播放背景音乐
pygame.mixer.music.load(cfg.BGMPATH)
pygame.mixer.music.play(-1)
# 游戏主循环
snake = Snake(cfg)
apple = Apple(cfg, snake.coords)
score = 0
while True:
screen.fill(cfg.BLACK)
# --按键检测
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
elif event.type == pygame.KEYDOWN:
if event.key in [pygame.K_UP, pygame.K_DOWN, pygame.K_LEFT, pygame.K_RIGHT]:
snake.setDirection({pygame.K_UP: 'up', pygame.K_DOWN: 'down', pygame.K_LEFT: 'left', pygame.K_RIGHT: 'right'}[event.key])
# --更新贪吃蛇和食物
if snake.update(apple):
apple = Apple(cfg, snake.coords)
score += 1
# --判断游戏是否结束
if snake.isgameover: break
# --显示游戏里必要的元素
drawGameGrid(cfg, screen)
snake.draw(screen)
apple.draw(screen)
showScore(cfg, score, screen)
# --屏幕更新
pygame.display.update()
clock.tick(cfg.FPS)
return endInterface(screen, cfg)
'''run'''
if __name__ == '__main__':
while True:
if not main(cfg):
break
| 25.5
| 141
| 0.572549
|
07a63f4272f84ac5fe8af26fd69874c7fef5f393
| 1,631
|
py
|
Python
|
books/admin.py
|
tas09009/django-back
|
7c3f181a6658c771e155393736909f176db878c4
|
[
"MIT"
] | null | null | null |
books/admin.py
|
tas09009/django-back
|
7c3f181a6658c771e155393736909f176db878c4
|
[
"MIT"
] | null | null | null |
books/admin.py
|
tas09009/django-back
|
7c3f181a6658c771e155393736909f176db878c4
|
[
"MIT"
] | 3
|
2021-01-27T04:24:51.000Z
|
2021-03-11T18:56:22.000Z
|
from django.contrib import admin
from django import forms
from .models import Book, Category, User, BookCategory, UserHistory
class BookAdminForm(forms.ModelForm):
class Meta:
model = Book
fields = '__all__'
class CategoryAdminForm(forms.ModelForm):
class Meta:
model = Category
fields = '__all__'
class UserAdminForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
class BookCategoryAdminForm(forms.ModelForm):
class Meta:
model = BookCategory
fields = '__all__'
class UserHistoryAdminForm(forms.ModelForm):
class Meta:
model = UserHistory
fields = '__all__'
class BookAdmin(admin.ModelAdmin):
form = BookAdminForm
list_display = ['title', 'url', 'author']
readonly_fields = []
admin.site.register(Book, BookAdmin)
class CategoryAdmin(admin.ModelAdmin):
form = CategoryAdminForm
list_display = ['name']
readonly_fields = []
admin.site.register(Category, CategoryAdmin)
class UserAdmin(admin.ModelAdmin):
form = UserAdminForm
list_display = ['name', 'email', 'password_digest', 'intro']
readonly_fields = []
admin.site.register(User, UserAdmin)
class BookCategoryAdmin(admin.ModelAdmin):
form = BookCategoryAdminForm
list_display = ['book', 'category']
readonly_fields = []
admin.site.register(BookCategory, BookCategoryAdmin)
class UserHistoryAdmin(admin.ModelAdmin):
form = UserHistoryAdminForm
list_display = ['progress', 'book', 'user']
readonly_fields = []
admin.site.register(UserHistory, UserHistoryAdmin)
| 22.342466
| 67
| 0.688535
|
efc226c9378c9caaecd82c5f06ec7863ac94a5ba
| 515
|
py
|
Python
|
NoSQL/query_dynamodb.py
|
JSJeong-me/KOSA_BIGDATA_DEEPLEARNONG
|
5126befee247c33afedcba4ce72d1a000e25a218
|
[
"MIT"
] | null | null | null |
NoSQL/query_dynamodb.py
|
JSJeong-me/KOSA_BIGDATA_DEEPLEARNONG
|
5126befee247c33afedcba4ce72d1a000e25a218
|
[
"MIT"
] | null | null | null |
NoSQL/query_dynamodb.py
|
JSJeong-me/KOSA_BIGDATA_DEEPLEARNONG
|
5126befee247c33afedcba4ce72d1a000e25a218
|
[
"MIT"
] | 1
|
2022-02-15T04:41:38.000Z
|
2022-02-15T04:41:38.000Z
|
import boto3
from boto3.dynamodb.conditions import Key
ID = ''
ID_KEY = ''
tableName = 'users'
dynamodb = boto3.resource('dynamodb', region_name='ap-northeast-1',aws_access_key_id=ID, aws_secret_access_key=ID_KEY)
table = dynamodb.Table(tableName)
#Get all users with last name of Johnson
johnsons = table.scan(
FilterExpression=Key('last_name').eq("Johnson")
)
print(johnsons)
#Get all users over the age of 30
overThirty = table.scan(
FilterExpression=Key('age').gt(30)
)
print(overThirty['Items'])
| 21.458333
| 118
| 0.743689
|
1726bd36c4b69ba6d860e101ba10423b487067c5
| 358
|
py
|
Python
|
balutils/CatalogFeature/Mcal.py
|
mattkwiecien/balutils
|
9dcd5419a796e23df671fc4dc22c78cf7e2c61fc
|
[
"MIT"
] | null | null | null |
balutils/CatalogFeature/Mcal.py
|
mattkwiecien/balutils
|
9dcd5419a796e23df671fc4dc22c78cf7e2c61fc
|
[
"MIT"
] | null | null | null |
balutils/CatalogFeature/Mcal.py
|
mattkwiecien/balutils
|
9dcd5419a796e23df671fc4dc22c78cf7e2c61fc
|
[
"MIT"
] | null | null | null |
from balutils.CatalogFeature import SimpleCatalog
# TODO
class Mcal(SimpleCatalog):
"""
Adds Mcal catalog functionality to the catalog.
"""
def applyTo(self, catalog: Catalog) -> None:
# First, call the parent feature method
self.parent.applyTo(catalog)
# TODO implement this decorators logic
return
| 25.571429
| 51
| 0.659218
|
fa355f675d1a55b6059fe41eeef13cd72d252397
| 11,102
|
py
|
Python
|
log_stacker/log_stacker.py
|
Ron-Chang/LogStacker
|
abd92842969773949d2b36eeb4313843081339eb
|
[
"MIT"
] | null | null | null |
log_stacker/log_stacker.py
|
Ron-Chang/LogStacker
|
abd92842969773949d2b36eeb4313843081339eb
|
[
"MIT"
] | null | null | null |
log_stacker/log_stacker.py
|
Ron-Chang/LogStacker
|
abd92842969773949d2b36eeb4313843081339eb
|
[
"MIT"
] | null | null | null |
import logging
import os
import sys
import traceback
from datetime import datetime
from logging.handlers import TimedRotatingFileHandler
class Dyer:
_RESETALL = '\x1b[0m'
_STYLE = '\x1b[{style}'
_FG = '3{fg}'
_BG = '4{bg}'
class Style:
NORMAL = 0
BOLD = 1
DARK = 2
ITALIC = 3
UNDERSCORE = 4
BLINK_SLOW = 5
BLINK_FAST = 6
REVERSE = 7
HIDE = 8
STRIKE_THROUGH = 9
class Color:
BLACK = 0
RED = 1
GREEN = 2
YELLOW = 3
BLUE = 4
PURPLE = 5
CYAN = 6
GRAY = 7
@classmethod
def _validate(cls, fg, bg):
if fg is None and bg is None:
raise ValueError('fg and bg either one of them is required.')
if fg not in cls.Color.__dict__.values():
raise ValueError('fg color code is out of range.')
if bg not in cls.Color.__dict__.values():
raise ValueError('bg color code is out of range.')
@classmethod
def dye(cls, fg=None, bg=None, style=None):
cls._validate(fg=fg, bg=bg)
style_tag = f'\x1b[{cls.Style.NORMAL};' if style is None else f'\x1b[{style};'
fg_tag = f'30' if fg is None else f'3{fg}'
bg_tag = '' if bg is None else f';4{bg}'
return f'{style_tag}{fg_tag}{bg_tag}m'
@classmethod
def reset(cls):
return cls._RESETALL
class LoggerFormatter(logging.Formatter):
LOGGING_STYLE = logging.PercentStyle
STREAM = 1
FILE = 2
_RESET = Dyer.reset()
_FG_CYAN = Dyer.dye(fg=Dyer.Color.CYAN)
_BG_CYAN = Dyer.dye(bg=Dyer.Color.CYAN)
_FG_GREEN = Dyer.dye(fg=Dyer.Color.GREEN)
_BG_GREEN = Dyer.dye(bg=Dyer.Color.GREEN)
_FG_YELLOW = Dyer.dye(fg=Dyer.Color.YELLOW)
_BG_YELLOW = Dyer.dye(bg=Dyer.Color.YELLOW)
_FG_PURPLE = Dyer.dye(fg=Dyer.Color.PURPLE)
_BG_PURPLE = Dyer.dye(bg=Dyer.Color.PURPLE)
_FG_RED_HIGHLIGHT = Dyer.dye(fg=Dyer.Color.RED, style=Dyer.Style.BOLD)
_BG_RED_HIGHLIGHT = Dyer.dye(bg=Dyer.Color.RED, style=Dyer.Style.BLINK_SLOW)
_STREAM_FMT = '[%(asctime)s] {badge_color}[%(levelname)-8s]{reset} {text_color}[%(message)s]{reset}'
_DEFAULT_FMT = '[%(asctime)s] [%(levelname)-10s] [%(message)s]'
_STREAM_INFO_FORMAT = _STREAM_FMT.format(
badge_color=_BG_GREEN,
text_color=_FG_GREEN,
reset=_RESET
)
_STREAM_DEBUG_FORMAT = _STREAM_FMT.format(
badge_color=_BG_CYAN,
text_color=_FG_CYAN,
reset=_RESET
)
_STREAM_WARNING_FORMAT = _STREAM_FMT.format(
badge_color=_BG_YELLOW,
text_color=_FG_YELLOW,
reset=_RESET
)
_STREAM_ERROR_FORMAT = _STREAM_FMT.format(
badge_color=_BG_PURPLE,
text_color=_FG_PURPLE,
reset=_RESET
)
_STREAM_CRITICAL_FORMAT = _STREAM_FMT.format(
badge_color=_BG_RED_HIGHLIGHT,
text_color=_FG_RED_HIGHLIGHT,
reset=_RESET
)
_STREAM_FORMATS = {
logging.INFO: LOGGING_STYLE(_STREAM_INFO_FORMAT),
logging.DEBUG: LOGGING_STYLE(_STREAM_DEBUG_FORMAT),
logging.WARNING: LOGGING_STYLE(_STREAM_WARNING_FORMAT),
logging.ERROR: LOGGING_STYLE(_STREAM_ERROR_FORMAT),
logging.CRITICAL: LOGGING_STYLE(_STREAM_CRITICAL_FORMAT),
}
_FILE_FORMATS = {
logging.INFO: LOGGING_STYLE(_DEFAULT_FMT),
logging.DEBUG: LOGGING_STYLE(_DEFAULT_FMT),
logging.WARNING: LOGGING_STYLE(_DEFAULT_FMT),
logging.ERROR: LOGGING_STYLE(_DEFAULT_FMT),
logging.CRITICAL: LOGGING_STYLE(_DEFAULT_FMT),
}
def __init__(self, type_, fmt=None):
"""
Cannot recognized by instance() method
logging.FileHandler is inherit from logging.StreamHandler
"""
fmt = fmt or self._DEFAULT_FMT
super().__init__(fmt=fmt)
self.type_ = type_
def format(self, record):
self._style = self.LOGGING_STYLE(self._DEFAULT_FMT)
if self.type_ == self.STREAM:
self._style = self._STREAM_FORMATS.get(record.levelno, self._style)
elif self.type_ == self.FILE:
self._style = self._FILE_FORMATS.get(record.levelno, self._style)
return logging.Formatter.format(self, record)
class StreamLogger:
def get_handler(level=logging.DEBUG):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(level)
formatter = LoggerFormatter(type_=LoggerFormatter.STREAM)
handler.setFormatter(formatter)
return handler
class FileLogger:
_TITLE = os.environ.get('APP_NAME', 'default_log').replace(' ', '_').lower()
_ROOT = os.path.abspath('.')
_LEVEL_MAPS = {
logging.DEBUG: 'debug',
logging.INFO: 'info',
logging.WARNING: 'warning',
logging.ERROR: 'error',
logging.CRITICAL: 'critical',
}
@staticmethod
def _get_rotating_file_handler(filename, level=logging.DEBUG):
handler = TimedRotatingFileHandler(
filename=filename,
when='H',
interval=1,
backupCount=10000,
encoding=None,
delay=False,
utc=False,
)
handler.setLevel(level)
formatter = LoggerFormatter(type_=LoggerFormatter.FILE)
handler.setFormatter(formatter)
return handler
@classmethod
def get_handlers(cls, entry_point, level=logging.DEBUG):
handlers = list()
for levelno, level_name in cls._LEVEL_MAPS.items():
if levelno < level:
continue
path = f'{cls._ROOT}/log/{level_name}_log'
os.makedirs(path, exist_ok=True)
filename = os.path.join(f'{path}/{cls._TITLE}.{entry_point}.{level_name}.log')
handler = cls._get_rotating_file_handler(filename=filename, level=levelno)
handlers.append(handler)
return handlers
class LogStacker:
"""
- How to use:
Import LogStacker at the entry point and customized optional settings before LogStacker.logging(__file__)
from log_stacker import LogStacker
# -------------- optional settings start -------------- #
LogStacker.STREAM_OUTPUT = True
# default: True
LogStacker.LOCAL_OUTPUT = True
# default: True
LogStacker.ROOT_LEVEL = LogStacker.DEBUG
# default: LogStacker.DEBUG
LogStacker.STREAM_LEVEL = LogStacker.WARNING
# default: LogStacker.DEBUG
LogStacker.LOCAL_LEVEL = LogStacker.DEBUG
# default: LogStacker.DEBUG
LogStacker.TRACEBACK_LEVEL.add(LogStacker.INFO)
# default: {LogStacker.WARNING, LogStacker.ERROR, LogStacker.CRITICAL}
LogStacker.IGNORE_PACKAGES.add('package_name_str')
# default: {}
# In progress attributes
# LogStacker.REMOTE_OUTPUT = False
# LogStacker.REMOTE_LEVEL = LogStacker.DEBUG
# -------------- optional settings end -------------- #
LogStacker.logging(__file__)
"""
CRITICAL = logging.CRITICAL
ERROR = logging.ERROR
WARNING = logging.WARNING
INFO = logging.INFO
DEBUG = logging.DEBUG
ROOT_LOGGER = None
STREAM_OUTPUT = True
LOCAL_OUTPUT = True
ROOT_LEVEL = logging.DEBUG
STREAM_LEVEL = logging.DEBUG
FILE_LEVEL = logging.DEBUG
# REMOTE_OUTPUT = True # in progress
# REMOTE_LEVEL = logging.DEBUG # in progress
TRACEBACK_LEVEL = {
logging.WARNING,
logging.ERROR,
logging.CRITICAL,
}
IGNORE_PACKAGES = {}
@classmethod
def _update_root_logger(cls, handlers):
cls.ROOT_LOGGER = logging.getLogger()
cls.ROOT_LOGGER.setLevel(cls.ROOT_LEVEL)
for handler in handlers:
cls.ROOT_LOGGER.addHandler(handler)
@staticmethod
def _resist_packages(packages):
for package in packages:
logging.getLogger(package).setLevel(logging.WARNING)
logging.captureWarnings(True)
@classmethod
def logging(cls, entry_point, stream_level=None, file_level=None, remote_level=None):
"""
:params entry_point: entry point location
:type entry_point: str
:params stream_level: LogStacker.CRITICAL, LogStacker.ERROR, LogStacker.WARNING, LogStacker.INFO, LogStacker.DEBUG
:type stream_level: int
:params file_level: LogStacker.CRITICAL, LogStacker.ERROR, LogStacker.WARNING, LogStacker.INFO, LogStacker.DEBUG
:type file_level: int
:params remote_level: LogStacker.CRITICAL, LogStacker.ERROR, LogStacker.WARNING, LogStacker.INFO, LogStacker.DEBUG
:type remote_level: int
TODO
if cls.REMOTE_OUTPUT:
add fluent
"""
entry_point = os.path.basename(entry_point)
handlers = list()
if cls.STREAM_OUTPUT:
stream_handler = StreamLogger.get_handler(level=cls.STREAM_LEVEL)
handlers.append(stream_handler)
if cls.LOCAL_OUTPUT:
file_handlers = FileLogger.get_handlers(entry_point=entry_point, level=cls.FILE_LEVEL)
handlers.extend(file_handlers)
cls._update_root_logger(handlers=handlers)
cls._resist_packages(packages=cls.IGNORE_PACKAGES)
@classmethod
def _validate(cls):
if cls.ROOT_LOGGER is None:
raise Exception(f'LogStacker Error: Initialization is required. \n{cls.__doc__}')
@classmethod
def _get_traceback(cls, level):
if level not in cls.TRACEBACK_LEVEL:
return str()
result = traceback.format_exc()
if 'NoneType: None' in result:
return str()
return result
@classmethod
def _get_msg(cls, level, msg=None, exception=None):
message = msg or str()
exception = exception or str()
exception_traceback = cls._get_traceback(level=level)
output = (
f'\n\t<MESSAGE>: {message}'
f'\n\t<EXCEPTION>: {exception}'
f'\n\t<TRACEBACK>: \n{exception_traceback}'
)
return output
@classmethod
def debug(cls, exception=None, msg=None):
cls._validate()
msg = cls._get_msg(level=logging.DEBUG, msg=msg, exception=exception)
cls.ROOT_LOGGER.debug(msg=msg)
@classmethod
def info(cls, exception=None, msg=None):
cls._validate()
msg = cls._get_msg(level=logging.INFO, msg=msg, exception=exception)
cls.ROOT_LOGGER.info(msg=msg)
@classmethod
def warning(cls, exception=None, msg=None):
cls._validate()
msg = cls._get_msg(level=logging.WARNING, msg=msg, exception=exception)
cls.ROOT_LOGGER.warning(msg=msg)
@classmethod
def error(cls, exception=None, msg=None):
cls._validate()
msg = cls._get_msg(level=logging.ERROR, msg=msg, exception=exception)
cls.ROOT_LOGGER.error(msg=msg)
@classmethod
def critical(cls, exception=None, msg=None):
cls._validate()
msg = cls._get_msg(level=logging.CRITICAL, msg=msg, exception=exception)
cls.ROOT_LOGGER.critical(msg=msg)
| 30.924791
| 122
| 0.635111
|
dba119143164a9548ef911a7c26c94512bd82578
| 746
|
py
|
Python
|
cflearn/constants.py
|
carefree0910/carefree-learn
|
2043812afbe9c56f01ec1639961736313ee062ba
|
[
"MIT"
] | 400
|
2020-07-05T18:55:49.000Z
|
2022-02-21T02:33:08.000Z
|
cflearn/constants.py
|
carefree0910/carefree-learn
|
2043812afbe9c56f01ec1639961736313ee062ba
|
[
"MIT"
] | 82
|
2020-08-01T13:29:38.000Z
|
2021-10-09T07:13:44.000Z
|
cflearn/constants.py
|
carefree0910/carefree-learn
|
2043812afbe9c56f01ec1639961736313ee062ba
|
[
"MIT"
] | 34
|
2020-07-05T21:15:34.000Z
|
2021-12-20T08:45:17.000Z
|
import os
from cftool.misc import LoggingMixin
INFO_PREFIX = LoggingMixin.info_prefix
ERROR_PREFIX = LoggingMixin.error_prefix
WARNING_PREFIX = LoggingMixin.warning_prefix
TIME_FORMAT = "%Y-%m-%d_%H-%M-%S-%f"
LOSS_KEY = "loss"
INPUT_KEY = "input"
LATENT_KEY = "latent"
PREDICTIONS_KEY = "predictions"
LABEL_KEY = "labels"
ORIGINAL_LABEL_KEY = "original_labels"
BATCH_INDICES_KEY = "batch_indices"
PT_PREFIX = "model_"
SCORES_FILE = "scores.json"
CHECKPOINTS_FOLDER = "checkpoints"
META_CONFIG_NAME = "__meta__"
DATA_CONFIG_FILE = "__data__.json"
ML_PIPELINE_SAVE_NAME = "ml_pipeline"
DEFAULT_ZOO_TAG = "default"
CACHE_DIR = os.path.join(os.path.expanduser("~"), ".cache", "carefree-learn")
DATA_CACHE_DIR = os.path.join(CACHE_DIR, "data")
| 24.866667
| 77
| 0.772118
|
cf2fc8a8040abcd62dfc1f824197900b6c56eadd
| 12,942
|
py
|
Python
|
play.py
|
wq1977/garamaker
|
7240dd13687eaf2eced9f12f41f73325e8aceb14
|
[
"MIT"
] | 1
|
2020-04-03T00:55:15.000Z
|
2020-04-03T00:55:15.000Z
|
play.py
|
wq1977/garamaker
|
7240dd13687eaf2eced9f12f41f73325e8aceb14
|
[
"MIT"
] | null | null | null |
play.py
|
wq1977/garamaker
|
7240dd13687eaf2eced9f12f41f73325e8aceb14
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#!/usr/bin/python
import requests,sys,argparse
bpm=90
hexuan={}
play="吉他"
target=""
beats=0
record=False
totalbeats = 0
debug=False
checkGrammar=True
hexianSequences = []
needFix={}
puhao=1
gangqinDiao=0
format='lily'
def parse(path):
global bpm,hexuan,play,target,beats,totalbeats,checkGrammar,needFix
inPlay=False
lines = open(path).readlines()
result=""
for idx,line in enumerate(lines):
defines = line.split("#")
desc=""
if len(defines)>1:
desc = "#".join(defines[1:]).strip()
line = defines[0].strip()
if line.startswith("bpm="):
bpm = int(line.split("=")[1])
if line.startswith("beats="):
beats = int(line.split("=")[1])
if len(line)>2 and line[0] in [chr(ch+ord('A')) for ch in range(26)] and line[1]=='=':
hexuan[line[:1]]={"value":[ord(ch.strip()[0])-ord('0') for ch in line[2:].split(",")], "name":desc.split(" ")[0]}
if inPlay and len(line)>0:
if (beats > 0):
blocks = line.split(",")
if len(blocks) % beats != 1 and checkGrammar:
print "line(%d): %s 节拍数不对!" % (idx+1, line)
for i in range(0, len(blocks)/3):
print "%d:%s\t%d:%s\t%d:%s" % (i*3+1, blocks[i*3], i*3+2, blocks[i*3+1], i*3+3, blocks[i*3+2])
sys.exit()
else:
for i in range(len(blocks) / beats / 2):
foundTarget = ("".join(blocks[i*beats*2:i*beats*2+beats*2])).strip()
hexianinBeat = []
currentBeat = totalbeats + i
for idx, ch in enumerate(foundTarget):
if idx % 2 == 1 and ch in hexuan:
hexianname = hexuan[ch]["name"]
if len(hexianinBeat)<=0 or hexianname != hexianinBeat[len(hexianinBeat)-1]:
hexianinBeat.append(hexianname)
if len(hexianname)>2 and hexianname[len(hexianname)-2] == "+":
if not currentBeat in needFix:
needFix[currentBeat]=set()
needFix[currentBeat].add(ch)
hexianSequences.append(" ".join(hexianinBeat))
totalbeats += len(blocks) / beats / 2
result += line
if (line in ["[吉他]", "[贝斯]", "[架子鼓]", "[Solo]", "[小提琴]", "[钢琴]"]):
if (target == "" or target == line.strip('[]')):
if not inPlay:
inPlay=True
play=line.strip('[]')
if play=='Solo':
play="吉他"
else:
inPlay = False
else:
if inPlay:
inPlay=False
return result
def standname(xuan, pin):
base = [41,35,31,27,22,17]
return (base[xuan]+pin,0)
def mapA2B_jita(yin):
xpos = [65, 205, 335, 455, 570, 680, 760, 875, 975];
ypos = [380, 435, 495, 560, 625, 685]
if yin[0] == '0':
return (0,0)
if yin[1] in hexuan:
value = hexuan[yin[1]]["value"]
yin = yin[0] + "%d" % (value[ord(yin[0])-ord('1')])
if format == 'dd':
return standname(ord(yin[0]) - ord('1'),ord(yin[1]) - ord('0'))
return (xpos[ord(yin[1]) - ord('0')],ypos[ord(yin[0]) - ord('1')])
def mapA2B_xiaotiqin(yin):
xpos = [80, 200, 320, 440, 560, 620, 750, 860, 1000];
ypos = [400,480,560,640]
if yin[0] == '0':
return (0,0)
if yin[1] in hexuan:
value = hexuan[yin[1]]["value"]
yin = yin[0] + "%d" % (value[ord(yin[0])-ord('1')])
return (xpos[ord(yin[1]) - ord('0')],ypos[ord(yin[0]) - ord('1')])
def mapA2B_beisi(yin):
xpos = [60, 195, 323, 453, 560, 671, 781, 867, 973];
ypos = [382,494,576,666]
if yin[0] == '0':
return (0,0)
return (xpos[ord(yin[1]) - ord('0')],ypos[ord(yin[0]) - ord('1')])
def mapA2B_gangqin(yin):
global puhao,gangqinDiao
startpos={
1: -13,
4: -1
}
poses = [
(50,715),(74,601),(104,704),(143,618),(158,713),(197,617),(215,705),
(271,701),(292,607),(317,704),(358,606),(375,700),(434,706),(460,597),(488,712),(509,603),(545,723),(576,601),(596,721),
(645,713),(671,607),(707,707),(736,595),(754,698),(813,709),(841,604),(866,709),(897,609),(922,716),(956,599),(974,702),
(46,384),(63,300),(101,384),(124,298),(149,391),(206,409),(222,310),(261,398),(274,289),(299,401),(356,299),(368,393),
(425,417),(453,292),(478,409),(516,301),(548,412),(592,422),(617,300),(652,412),(674,306),(706,410),(734,300),(755,415),
(807,425),(842,299),(869,410),(893,306),(929,402),(967,419),(995,299),
]
index=0
if yin[0] in ['1','a','A']:
if yin[1] in [chr(ord('a')+x) for x in range(0,10)]:
index = -1 * (ord(yin[1]) - ord('a') + 1)
if yin[1] in [chr(ord('0')+x) for x in range(0,10)]:
index = ord(yin[1]) - ord('1')
if yin[1] in [chr(ord('A')+x) for x in range(0,10)]:
index = 9 + ord(yin[1]) - ord('A')
index_in_pos = 0
index_i = startpos[puhao]
while index_i != index:
index_i += 1
index_in_pos += 1
if (index_in_pos % 12) in [1,3,5,8,10]:
index_in_pos += 1
if yin[0] == 'A':
index_in_pos+=1
if yin[0] == 'a':
index_in_pos-=1
return poses[index_in_pos+gangqinDiao]
return (0,0)
def mapA2B_gu(yin):
points={
"A" : [112,200], #镲拔
"B" : [112,350], #踏板踏拔
"C" : [80,400], #开音踏拔
"D" : [200,400], #合音踏拔
"E" : [100,600], #鼓皮边缘
"F" : [200,600], #响弦鼓
"G" : [200,750], #鼓边
"H" : [400,400], #高音桶鼓
"I" : [600,400], #中音桶鼓
"J" : [500,650], #低音鼓
"K" : [1000,200], #钟铃
"L" : [900,200], #骑拔
"M" : [900,600] #低音桶鼓
}
if yin[1] in points:
return points[yin[1]]
return (0,0)
def mapA2B(yin):
global play
if play == "吉他":
return mapA2B_jita(yin)
elif play == "小提琴":
return mapA2B_xiaotiqin(yin)
elif play == "架子鼓":
return mapA2B_gu(yin)
elif play == "钢琴":
return mapA2B_gangqin(yin)
elif play == "贝斯":
return mapA2B_beisi(yin)
print '不能识别的乐器'
def appendCmd(yin, cmds, startat, duration):
if yin[0] in ['1','2','3','4','5','6']:
x,y=mapA2B(yin)
cmds.append("%d,%d,%d,%d" % (startat,x,y,duration - 5))
elif yin[0] in ['U','D']:
for i in range(1,7):
x,y=mapA2B(chr(ord('0')+i)+yin[1])
subduration = 60000 / 75 / 2 / 30 #fix this to 75
if yin[0]=='D':
cmds.append("%d,%d,%d,%d" % (startat + (6 - i) * subduration,x,y,duration - 50))
else:
cmds.append("%d,%d,%d,%d" % (startat + (i - 1) * subduration,x,y,duration - 50))
elif yin[0] in ['u','d']:
for i in range(1,4):
x,y=mapA2B(chr(ord('0')+i)+yin[1])
subduration = 60000 / 75 / 2 / 30 #fix this to 75
if yin[0]=='d':
cmds.append("%d,%d,%d,%d" % (startat + (3 - i) * subduration,x,y,duration - 50))
else:
cmds.append("%d,%d,%d,%d" % (startat + (i - 1) * subduration,x,y,duration - 50))
def convert(jianpu):
global bpm,puhao,gangqinDiao
lines = [line for line in jianpu.strip().strip(",").split("\n") if not line.startswith('#')]
jianpu = "".join(lines)
pieces = jianpu.strip().split(',')
cmds=[]
for idx in range(len(pieces)):
piece = pieces[idx].strip();
yins = [piece[i:i+2] for i in range(0, len(piece), 2)]
if len(yins) <=0 :
print "参数错误,长度为0的音", piece, idx
sys.exit(1)
if yins[0]=="@2":
startat1 = idx * 60000 / bpm / 2
startat2 = startat1 + 60000 / bpm / 4
appendCmd(yins[1],cmds, startat1, 60000 / bpm / 4)
appendCmd(yins[2],cmds, startat2, 60000 / bpm / 4)
continue
if yins[0]=="@3":
startat1 = idx * 60000 / bpm / 2
startat2 = startat1 + 60000 / bpm / 6
startat3 = startat2 + 60000 / bpm / 6
appendCmd(yins[1],cmds, startat1, 60000 / bpm / 6)
appendCmd(yins[2],cmds, startat2, 60000 / bpm / 6)
appendCmd(yins[3],cmds, startat3, 60000 / bpm / 6)
continue
if yins[0]=="@4":
startat = idx * 60000 / bpm / 2
duration = 60000 / bpm / 8
for i in range(4):
appendCmd(yins[i],cmds, startat + i*duration, duration)
continue
if yins[0]=="@p":
cmdidx = len(cmds)-1
prevstart=0
while True:
if cmdidx < 0:
break
lastcmd = cmds[cmdidx].split(',')
if prevstart == 0 or prevstart == lastcmd[0]:
prevstart = lastcmd[0]
lastcmd[len(lastcmd)-1] = "%d" % (int(lastcmd[len(lastcmd)-1]) + 60000 / bpm / 4 )
cmds[cmdidx] = ",".join(lastcmd)
cmdidx-=1
else:
break
startat = idx * 60000 / bpm / 2 + 60000 / bpm / 4
duration = 60000 / bpm / 4
appendCmd(yins[1], cmds, startat, duration)
appendCmd(yins[2], cmds, startat + duration / 2 - 10, 50)
#cmds.append("%d,%d,%d,%d" % (startat,x1,y1,duration));
#cmds.append("%d,%d,%d,%d" % (startat + duration / 2 - 10,x2,y2,20));
continue
if yins[0]=="@'": #这个音长度减半,上个音延长半个长度
cmdidx = len(cmds)-1
prevstart=0
while True:
if cmdidx < 0:
break
lastcmd = cmds[cmdidx].split(',')
if prevstart == 0 or prevstart == lastcmd[0]:
prevstart = lastcmd[0]
lastcmd[len(lastcmd)-1] = "%d" % (int(lastcmd[len(lastcmd)-1]) + 60000 / bpm / 4 )
cmds[cmdidx] = ",".join(lastcmd)
cmdidx-=1
else:
break
startat = idx * 60000 / bpm / 2 + 60000 / bpm / 4
appendCmd(yins[1], cmds, startat, 60000 / bpm / 4)
continue
if yins[0]=="&4":
puhao=4
yins = yins[1:]
if yins[0]=="&a":
puhao=1
gangqinDiao=-1
yins = yins[1:]
if yins[0]=="&d":
puhao=4
gangqinDiao=-1
yins = yins[1:]
if yins[0]=="@s":
x1,y1=mapA2B(yins[1]);
startat1 = idx * 60000 / bpm / 2
duration1 = 60000 / bpm / 2 / 2;
x2,y2=mapA2B(yins[2]);
startat2 = idx * 60000 / bpm / 2 + duration1
duration2 = 60000 / bpm / 2 / 2;
cmds.append("%d,%d,%d,%d,%d,%d,%d" % (startat1,x1,y1,duration1,x2,y2,duration2));
continue
if yins[0]=="@-": #上一个音停留 2/3 时长,然后用 2/3 时长滑到下一个音 然后停留 2/3
duration = 60000 / bpm * 3 / 8
duration_slide = 60000 / bpm * 2 / 8
lastcmd = cmds[len(cmds)-1].split(',')
lastcmd[len(lastcmd)-1] = "%d" % (duration)
x1,y1=mapA2B(yins[1]);
lastcmd.append(lastcmd[1])
lastcmd.append(lastcmd[2])
lastcmd.append("%d" % (duration_slide))
lastcmd.append("%d" % (x1))
lastcmd.append("%d" % (y1))
lastcmd.append("%d" % (duration))
cmds[len(cmds)-1] = ",".join(lastcmd)
continue
for yin in yins:
if yin == '--': #上一个音延长一个八分音符
cmdidx = len(cmds)-1
prevstart=0
while True:
if cmdidx < 0:
break
lastcmd = cmds[cmdidx].split(',')
if prevstart == 0 or prevstart == lastcmd[0]:
prevstart = lastcmd[0]
lastcmd[len(lastcmd)-1] = "%d" % (int(lastcmd[len(lastcmd)-1]) + 60000 / bpm / 4 )
cmds[cmdidx] = ",".join(lastcmd)
cmdidx-=1
else:
break
continue
startat = idx * 60000 / bpm / 2
duration = 1;
appendCmd(yin, cmds, startat,duration * 60000 / bpm / 2)
return cmds
parser = argparse.ArgumentParser(description='用 GarageBand 播放 lily 格式的歌曲.')
parser.add_argument('path', metavar='path', type=str, help='要播放的歌曲路径')
parser.add_argument('-r', dest='record', action='store_true', help='是否自动启动录音')
parser.add_argument('-g', dest='gateway', action='store', help='指定iPad地址')
parser.add_argument('-d', dest='debug', action='store_true', help='是否打印命令')
parser.add_argument('-s', dest='skip_grammar', action='store_true', help='是否跳过语法检查')
parser.add_argument('-y', dest='confirm', action='store_false', help='是否询问执行')
parser.add_argument('-f', dest='format', default='lily', action='store', help='指定输出格式')
parser.add_argument('-t', dest='target', default="", action='store', help='指定播放段落(吉他,贝斯等)')
args = parser.parse_args()
target = args.target
record = args.record
format = args.format
gateway = "http://127.0.0.1:8088"
if args.gateway:
gateway = args.gateway
checkGrammar = not args.skip_grammar
jianpu=parse(args.path)
if args.debug:
print jianpu
if format == 'dd':
song = "\n".join(convert(jianpu))
print song
sys.exit()
else:
song = "|".join(convert(jianpu))
if record:
delta={3:57,4:61, 2:51}
song = ('R,%d,572,48,%d,453,48|' % (60000 * beats / bpm + delta[beats], 60000 * beats / bpm)) + song
for i in range(len(hexianSequences) / beats / 2 + 1):
hexuanhelp = "%3d:\t" % (i)
for j in range(beats * 2):
idx = i*beats*2+j
if idx >= len(hexianSequences):
break
hexuanhelp += "%s\t" % (hexianSequences[idx])
print hexuanhelp
if len(needFix) > 0:
print "\n录音以后需要手动调整以下和弦:"
for k,v in needFix.items():
print "\t第 %d 小节,%s" % (k+1, " ".join([hexuan[x]["name"] for x in v]))
print "\n总共 %d 小节" % (totalbeats)
run=True
if args.confirm:
run=False
answer = raw_input('输入 Y 确认开始演奏 [%s] ...\n' % (play))
if len(answer)>0 and (answer[0] == 'Y' or answer[0] == 'y'):
run=True
if run:
if args.debug:
print song
r = requests.post(gateway, data="play="+song)
print(r.text)
else:
print '放弃演奏,退出'
| 33.968504
| 124
| 0.54922
|
a2054f9358237c5c3b8b6bf5d9bc9d160c3de369
| 13,272
|
py
|
Python
|
src/capture/realsense/cameras.py
|
hobbitsyfeet/3DMeasure
|
829dbc4e9a1974064ed7baa221c765c3c9123834
|
[
"MIT"
] | 6
|
2020-01-14T14:37:31.000Z
|
2021-12-16T19:45:29.000Z
|
src/capture/realsense/cameras.py
|
hobbitsyfeet/3DMeasure
|
829dbc4e9a1974064ed7baa221c765c3c9123834
|
[
"MIT"
] | null | null | null |
src/capture/realsense/cameras.py
|
hobbitsyfeet/3DMeasure
|
829dbc4e9a1974064ed7baa221c765c3c9123834
|
[
"MIT"
] | null | null | null |
import pyrealsense2 as rs
import numpy as np
import cv2
import open3d as o3d
import argparse
import os
from time import strftime
#from cv_track import track
import zipfile
parser = argparse.ArgumentParser()
parser.add_argument('--all_filters', type=bool, default=False, help='Enables all post-processing filters to enhace quality and reduce noise. Spatial, Temporal and Disparity Transform')
parser.add_argument('--spatial', '-s', type=bool, default=True, help='Enables smoothing that preserves edges.')
parser.add_argument('--temporal', '-t', type=bool, default=True, help='Smooths/improves depth data by sampling previous frames. Best used with static scene due to blurriung')
parser.add_argument('--disparity', type=bool, default=False, help="Only if you're dispair_ate.")
parser.add_argument('--decimation' , '-d', type=int, default=2, help="Reduces resolution, and averages out depth of downsampled data.")
parser.add_argument('--output', '-o', type=str, default='C:/Users/Justin/Documents/Github/3DMeasure/', help="Where to write the data")
parser.add_argument('--config', type=str, default='')
FLAGS = parser.parse_args()
ALL_FILTERS = FLAGS.all_filters
SPATIAL = FLAGS.spatial
TEMPORAL = FLAGS.temporal
DISPARITY = FLAGS.disparity
DECIMATION = FLAGS.decimation
jsonObj = json.load(open(self.jsonFile))
self.json_string = str(jsonObj).replace("'", '\"')
def loadConfiguration(self):
self.dev = self.cfg.get_device()
self.advnc_mode = rs.rs400_advanced_mode(self.dev)
print("Advanced mode is", "enabled" if self.advnc_mode.is_enabled() else "disabled")
self.advnc_mode.load_json(self.json_string)
#tracker = track()
save_path = "./data/"
#zipfile name is the Year/Month/Day-Hour/Minute started.
zip_dir_name = save_path + strftime("%Y-%m-%d_%H-%M-%S")
#code to zip files
# Declare the function to return all file paths of the particular directory
def retrieve_file_paths(dirName):
# setup file paths variable
file_paths = []
# Read all directory, subdirectories and file lists
for file in os.listdir(save_path):
if file.endswith(".ply"):
# # Create the full filepath by using os module.
file_path = os.path.join(save_path, file)
if file_path[file_path.find("."):] != ".zip":
file_paths.append(file_path)
# return all paths
return file_paths
# import pcl
# from pcl import pcl_visualization
CV2_LBUTTON_FLAG = False
# Configure depth and color streams...
# ...from Camera 1
pipelines = []
configs = []
profiles = []
pipeline_1 = rs.pipeline()
config_1 = rs.config()
config_1.enable_device('816612061111')
config_1.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
config_1.enable_stream(rs.stream.color, 640, 480, rs.format.rgb8, 30)
# ...from Camera 2
pipeline_2 = rs.pipeline()
config_2 = rs.config()
config_2.enable_device('816612061344')
config_2.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
config_2.enable_stream(rs.stream.color, 640, 480, rs.format.rgb8, 30)
# Start streaming from both cameras
pipeline_1.start(config_1)
pipeline_2.start(config_2)
profile_1 = pipeline_1.get_active_profile()
profile_2 = pipeline_2.get_active_profile()
# depth_sensor_1 = profile_1.get_device().first_depth_sensor()
# depth_sensor_2 = profile_2.get_device().first_depth_sensor()
# depth_scale_1 = depth_sensor_1.get_depth_scale()
# depth_scale_2 = depth_sensor_2.get_depth_scale()
depth_profile_1 = rs.video_stream_profile(profile_1.get_stream(rs.stream.depth))
depth_profile_2 = rs.video_stream_profile(profile_2.get_stream(rs.stream.depth))
depth_intrinsics_1 = depth_profile_1.get_intrinsics()
depth_intrinsics_2 = depth_profile_2.get_intrinsics()
w1, h1 = depth_intrinsics_1.width, depth_intrinsics_1.height
w2, h2 = depth_intrinsics_2.width, depth_intrinsics_2.height
pc_1 = rs.pointcloud()
pc_2 = rs.pointcloud()
decimate1 = rs.decimation_filter()
decimate2 = rs.decimation_filter()
decimate1.set_option(rs.option.filter_magnitude, DECIMATION)
decimate2.set_option(rs.option.filter_magnitude, DECIMATION)
global save_index
save_index = 0
colorizer_1 = rs.colorizer()
colorizer_2 = rs.colorizer()
filters = [
#rs.disparity_transform(),
rs.spatial_filter(),
#rs.temporal_filter(),
#rs.disparity_transform(False)
]
# if DISPARITY:
# filters.append(rs.disparity_transform())
# if SPATIAL:
# rs.spatial_filter()
def nothing(x):
pass
def set_CVLBUTTON_FLAG(event):
global CV2_LBUTTON_FLAG
if event != 0 and event != cv2.EVENT_LBUTTONDBLCLK:
if event == cv2.EVENT_LBUTTONDOWN:
print("SETTING CV_LBUTTON_FLAG TRUE")
CV2_LBUTTON_FLAG = True
if event == cv2.EVENT_LBUTTONUP:
print("SETTING CV_LBUTTON_FLAG FALSE")
CV2_LBUTTON_FLAG = False
def o3d_view_pointcloud(path_1, path_2):
"""
View two pointlcouds from a their paths in one Open3D visualization.
"""
o3d_cloud1 = o3d.io.read_point_cloud(path_1, format="ply")
o3d_cloud2 = o3d.io.read_point_cloud(path_2, format="ply")
o3d.visualization.draw_geometries([o3d_cloud1, o3d_cloud2])
# def view_pointcloud(path_1, path_2):
# pcl_cloud1 = pcl.load_XYZRGB(path_1, format="ply")
# pcl_cloud2 = pcl.load_XYZRGB(path_2, format="ply")
# viewer = pcl_visualization.CloudViewing()
# viewer.ShowColorCloud(pcl_cloud1)
# viewer.ShowColorCloud(pcl_cloud2)
# v = True
# while v:
# v = not(viewer.WasStopped())
def get_depth_data(frame_1, frame_2, color_frame_1, color_frame_2):
"""
Returns depth data ready to export.
This depth data is processed and has the respective colour images mapped onto them.
"""
# frames_1 = pipeline_1.wait_for_frames()
depth_frame_1 = frame_1.get_depth_frame()
depth_frame_2 = frame_2.get_depth_frame()
# depth_image_1 = np.asanyarray(depth_frame_1.get_data())
# depth_image_2 = np.asanyarray(depth_frame_2.get_data())
color_image_1 = np.asanyarray(color_frame_1.get_data())
color_image_2 = np.asanyarray(color_frame_2.get_data())
# Apply colormap on depth image (image must be converted to 8-bit per pixel first)
#NOTE This is what reduces the pointcloud density.
depth_frame_1 = decimate1.process(depth_frame_1)
depth_frame_2 = decimate2.process(depth_frame_2)
for f in filters:
depth_frame_1 = f.process(depth_frame_1)
depth_frame_2 = f.process(depth_frame_2)
# Convert images to numpy arrays
colorized_depth_1 = colorizer_1.colorize(depth_frame_1)
colorized_depth_2 = colorizer_2.colorize(depth_frame_2)
# Apply colormap on depth image (image must be converted to 8-bit per pixel first)
depth_colormap_1 = np.asanyarray(colorized_depth_1.get_data())
depth_colormap_2 = np.asanyarray(colorized_depth_2.get_data())
# Stack all images horizontally
mapped_frame_1, color_source_1 = color_frame_1, color_image_1
mapped_frame_2, color_source_2 = color_frame_2, color_image_2
points_1 = pc_1.calculate(depth_frame_1)
points_2 = pc_2.calculate(depth_frame_2)
pc_1.map_to(mapped_frame_1)
pc_2.map_to(mapped_frame_2)
return points_1, points_2, mapped_frame_1, mapped_frame_2
def save(event,x,y,flags,param):
"""
This function is designed to work with CV2 callback.
Double left click saves and displays the pointcloud.
Left click only saves. If held, it shoud continuously save data.
"""
set_CVLBUTTON_FLAG(event)
global save_index
# check if it was double click first, if so, save and display.
if event == cv2.EVENT_LBUTTONDBLCLK:
save_index+=1
points_1, points_2, mapped_frame_1, mapped_frame_2 = get_depth_data(param[0],param[1], param[2], param[3])
print((save_path + "816612061111_no"+str(save_index)+ ".ply"))
print((save_path + "816612061344_no"+str(save_index)+ ".ply"))
points_1.export_to_ply((save_path + "816612061111_no"+str(save_index)+".ply"),mapped_frame_1)
points_2.export_to_ply((save_path + "816612061344_no"+str(save_index)+".ply"),mapped_frame_2)
o3d_view_pointcloud((save_path + "816612061111_no"+str(save_index)+".ply"),
(save_path + "816612061344_no"+str(save_index)+".ply"))
print("Saved")
# view_pointcloud((save_path + "816612061111_no"+str(save_index)+".ply"),
# (save_path + "816612061344_no"+str(save_index)+".ply"))
# # Otherwise check and see if left button is down (no double click) and simply save.
# # If left click is held down, it shoud record continuously
# elif CV2_LBUTTON_FLAG:
# save_index+=1
# points_1, points_2, mapped_frame_1, mapped_frame_2 = get_depth_data(param[0],param[1], param[2], param[3])
# print((save_path + "816612061111_no"+str(save_index)+ ".ply"))
# print((save_path + "816612061344_no"+str(save_index)+ ".ply"))
# points_1.export_to_ply((save_path + "816612061111_no"+str(save_index)+".ply"),mapped_frame_1)
# points_2.export_to_ply((save_path + "816612061344_no"+str(save_index)+".ply"),mapped_frame_2)
# print("Saved")
# name for the trackbar. This also acts as the toggle variable.
switch = '0 : OFF \n1 : ON'
#create the cv2 window for display then make it fullscreen
cv2.namedWindow('RealSense', cv2.WINDOW_NORMAL)
# cv2.setWindowProperty('Realsense', cv2.WINDOW_NORMAL, cv2.WINDOW_FULLSCREEN)
cv2.resizeWindow('RealSense', 2560 , 1440)
cv2.createTrackbar(switch, 'RealSense',0,1, nothing)
try:
#This initialization loads up the first frame to all the global values.
#This is done because the first process does not load the colour data over the point data.
#I dont know why.
print("Preforming initial Check...")
frames_1 = pipeline_1.wait_for_frames()
frames_2 = pipeline_2.wait_for_frames()
color_frame_1 = frames_1.get_color_frame()
color_frame_2 = frames_2.get_color_frame()
get_depth_data(frames_1, frames_2, color_frame_1, color_frame_2)
print("Initial test complete...")
# Continuously display the colour frames from the RGB camera on the D415 cameras.
while cv2.getWindowProperty('RealSense', 1) >= 0 :
#collect and process only the colour frames for viewing
frames_1 = pipeline_1.wait_for_frames()
color_frame_1 = frames_1.get_color_frame()
color_image_1 = np.asanyarray(color_frame_1.get_data())
frames_2 = pipeline_2.wait_for_frames()
color_frame_2 = frames_2.get_color_frame()
color_image_2 = np.asanyarray(color_frame_2.get_data())
#needed for a break for viewing
cv2.waitKey(1)
# try:
# color_image_1 = tracker.detect_model(color_image_1)
# color_image_2 = tracker.detect_model(color_image_2)
# except:
# print("could not detect on image 1")
# try:
# output = tracker.detect_model(color_image_2)
# color_image_1, tracker_dimentions = tracker.show_detection(output, color_image_2)
# print(color_image_1)
# except:
# print("could not detect on image 2")
#colour images prepped to display throu CV2
color_image_1 = cv2.cvtColor(color_image_1, cv2.COLOR_BGR2RGB)
color_image_2 = cv2.cvtColor(color_image_2, cv2.COLOR_BGR2RGB)
images = np.hstack((color_image_1,color_image_2))
#Use the trackbar existance to check if the X has been selected. Quits the program.
if(cv2.getTrackbarPos(switch,'RealSense')) == -1:
break
#display the images
cv2.imshow('RealSense', images)
#using CV2 callback, save the images
cv2.setMouseCallback('RealSense', save, [frames_1, frames_2, color_frame_1, color_frame_2])
s = cv2.getTrackbarPos(switch,'RealSense')
# Save images and depth maps from both cameras by turning on the switch
if s==1 or CV2_LBUTTON_FLAG:
save_index += 1
print((save_path + "816612061111_no"+str(save_index)+ ".ply"))
print((save_path + "816612061344_no"+str(save_index)+ ".ply"))
points_1, points_2, mapped_frame_1, mapped_frame_2 = get_depth_data(frames_1, frames_2, color_frame_1, color_frame_2)
points_1.export_to_ply((save_path + "816612061111_no"+str(save_index)+ ".ply"), mapped_frame_1)
points_2.export_to_ply((save_path + "816612061344_no"+str(save_index)+ ".ply"), mapped_frame_2)
print ("Save")
finally:
# Stop streaming
pipeline_1.stop()
pipeline_2.stop()
print("Exporting into Zip")
filePaths = retrieve_file_paths(save_path)
# writing files to a zipfile
zip_file = zipfile.ZipFile(zip_dir_name +'.zip', 'w', zipfile.ZIP_DEFLATED)
with zip_file:
# writing each file one by one
for file in filePaths:
arcname = file[file.rfind('/')+1:]
print("Writing " + file ,end="...")
zip_file.write(file,arcname=arcname)
os.remove(file)
print("Removed from dir.")
print(zip_dir_name+'.zip file is created successfully!')
| 37.597734
| 184
| 0.699367
|
a4402c0e32a9316b9476f6b4bd1488c2733e1a94
| 1,557
|
py
|
Python
|
tests/test_commands.py
|
adamchainz/sentinelhub-py
|
d7ad283cf9d4bd4c8c1a8b169cdbe37c5bc8208a
|
[
"MIT"
] | 573
|
2018-01-15T21:44:59.000Z
|
2022-03-27T22:03:37.000Z
|
tests/test_commands.py
|
adamchainz/sentinelhub-py
|
d7ad283cf9d4bd4c8c1a8b169cdbe37c5bc8208a
|
[
"MIT"
] | 189
|
2018-01-23T14:35:34.000Z
|
2022-03-24T15:44:04.000Z
|
tests/test_commands.py
|
adamchainz/sentinelhub-py
|
d7ad283cf9d4bd4c8c1a8b169cdbe37c5bc8208a
|
[
"MIT"
] | 193
|
2018-01-16T09:31:47.000Z
|
2022-03-29T07:11:42.000Z
|
import subprocess
import os
import pytest
from sentinelhub.testing_utils import get_output_folder
OUTPUT_FOLDER = get_output_folder(__file__)
COMPACT_PRODUCT_ID = 'S2A_MSIL1C_20170414T003551_N0204_R016_T54HVH_20170414T003551'
OLD_PRODUCT_ID = 'S2A_OPER_PRD_MSIL1C_PDMC_20160121T043931_R069_V20160103T171947_20160103T171947'
L2A_PRODUCT_ID = 'S2A_MSIL2A_20180402T151801_N0207_R068_T33XWJ_20180402T202222'
L1C_TILE = 'T38TML 2015-12-19'
L2A_TILE = 'T33XWJ 2018-04-02'
URL = 'http://roda.sentinel-hub.com/sentinel-s2-l1c/tiles/54/H/VH/2017/4/14/0/metadata.xml'
@pytest.mark.parametrize('command', [
pytest.param(f'sentinelhub.aws --product {COMPACT_PRODUCT_ID} -ri -b B8A', marks=pytest.mark.aws_integration),
pytest.param(f'sentinelhub.aws --product {OLD_PRODUCT_ID} -i', marks=pytest.mark.aws_integration),
pytest.param(f'sentinelhub.aws --product {L2A_PRODUCT_ID} -i', marks=pytest.mark.aws_integration),
pytest.param(f'sentinelhub.aws --tile {L1C_TILE} -rei --bands B01,B10', marks=pytest.mark.aws_integration),
pytest.param(f'sentinelhub.aws --tile {L2A_TILE} --l2a -f {OUTPUT_FOLDER}', marks=pytest.mark.aws_integration),
pytest.param(f'sentinelhub.download {URL} {os.path.join(OUTPUT_FOLDER, "example.xml")} -r',
marks=pytest.mark.sh_integration),
'sentinelhub.config --show',
'sentinelhub --help',
'sentinelhub.aws --help',
'sentinelhub.config --help',
'sentinelhub.download --help',
])
def test_return_type(output_folder, command):
assert subprocess.call(command, shell=True) == 0
| 47.181818
| 115
| 0.763648
|
8b4aaa573e7cab675b2cce4597f210de697666a3
| 405
|
py
|
Python
|
cogs/example.py
|
St0pcha/discord-bot-skeleton-py
|
d983eac8185c01bc91b4cf4834d57193c83de199
|
[
"Unlicense"
] | 5
|
2021-04-03T06:59:14.000Z
|
2021-07-01T15:39:45.000Z
|
cogs/example.py
|
St0pcha/discord-bot-skeleton-py
|
d983eac8185c01bc91b4cf4834d57193c83de199
|
[
"Unlicense"
] | null | null | null |
cogs/example.py
|
St0pcha/discord-bot-skeleton-py
|
d983eac8185c01bc91b4cf4834d57193c83de199
|
[
"Unlicense"
] | null | null | null |
import config
import discord
from discord.ext import commands
class CogName(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener() # bot event
async def on_ready(self):
print("Bot is ready!")
@commands.command() # bot command
async def test(self, ctx):
await ctx.send("Test")
def setup(bot):
bot.add_cog(CogName(bot))
| 19.285714
| 40
| 0.634568
|
b0f4213bcd86a70f39d63214aa55381440796b68
| 8,184
|
py
|
Python
|
generate.py
|
abodacs/hardware-aware-transformers
|
35d12828fb9e8e7a184adaa4caa187bef3a94be2
|
[
"MIT"
] | 1
|
2020-11-22T12:33:58.000Z
|
2020-11-22T12:33:58.000Z
|
generate.py
|
abodacs/hardware-aware-transformers
|
35d12828fb9e8e7a184adaa4caa187bef3a94be2
|
[
"MIT"
] | null | null | null |
generate.py
|
abodacs/hardware-aware-transformers
|
35d12828fb9e8e7a184adaa4caa187bef3a94be2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3 -u
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Translate pre-processed data with a trained model.
"""
import torch
from fairseq import bleu, checkpoint_utils, options, progress_bar, tasks, utils
from fairseq.meters import StopwatchMeter, TimeMeter
import sys
import pdb
import numpy as np
def main(args):
assert args.path is not None, '--path required for generation!'
assert not args.sampling or args.nbest == args.beam, \
'--sampling requires --nbest to be equal to --beam'
assert args.replace_unk is None or args.raw_text, \
'--replace-unk requires a raw text dataset (--raw-text)'
utils.import_user_module(args)
if args.max_tokens is None and args.max_sentences is None:
args.max_tokens = 12000
print(args)
use_cuda = torch.cuda.is_available() and not args.cpu
# Load dataset splits
task = tasks.setup_task(args)
task.load_dataset(args.gen_subset)
# Set dictionaries
try:
src_dict = getattr(task, 'source_dictionary', None)
except NotImplementedError:
src_dict = None
tgt_dict = task.target_dictionary
# Load ensemble
print('| loading model(s) from {}'.format(args.path))
models, _model_args = checkpoint_utils.load_model_ensemble(
args.path.split(':'),
arg_overrides=eval(args.model_overrides),
task=task,
)
torch.manual_seed(args.seed)
# Optimize ensemble for generation
for model in models:
if use_cuda:
model.cuda()
config = utils.get_subtransformer_config(args)
model.set_sample_config(config)
model.make_generation_fast_(
beamable_mm_beam_size=None if args.no_beamable_mm else args.beam,
need_attn=args.print_alignment,
)
if args.fp16:
model.half()
if use_cuda:
model.cuda()
print(model, file=sys.stderr)
print(args.path, file=sys.stderr)
# Load alignment dictionary for unknown word replacement
# (None if no unknown word replacement, empty if no path to align dictionary)
align_dict = utils.load_align_dict(args.replace_unk)
# Load dataset (possibly sharded)
itr = task.get_batch_iterator(
dataset=task.dataset(args.gen_subset),
max_tokens=args.max_tokens,
max_sentences=args.max_sentences,
max_positions=utils.resolve_max_positions(
task.max_positions(),
*[model.max_positions() for model in models]
),
ignore_invalid_inputs=args.skip_invalid_size_inputs_valid_test,
required_batch_size_multiple=args.required_batch_size_multiple,
num_shards=args.num_shards,
shard_id=args.shard_id,
num_workers=args.num_workers,
).next_epoch_itr(shuffle=False)
# Initialize generator
gen_timer = StopwatchMeter()
generator = task.build_generator(args)
num_sentences = 0
has_target = True
decoder_times_all = []
input_len_all = []
with progress_bar.build_progress_bar(args, itr) as t:
wps_meter = TimeMeter()
for sample in t:
sample = utils.move_to_cuda(sample) if use_cuda else sample
if 'net_input' not in sample:
continue
prefix_tokens = None
if args.prefix_size > 0:
prefix_tokens = sample['target'][:, :args.prefix_size]
gen_timer.start()
hypos, decoder_times = task.inference_step(generator, models, sample, prefix_tokens)
input_len_all.append(np.mean(sample['net_input']['src_lengths'].cpu().numpy()))
print(decoder_times)
decoder_times_all.append(decoder_times)
num_generated_tokens = sum(len(h[0]['tokens']) for h in hypos)
gen_timer.stop(num_generated_tokens)
for i, sample_id in enumerate(sample['id'].tolist()):
has_target = sample['target'] is not None
# Remove padding
src_tokens = utils.strip_pad(sample['net_input']['src_tokens'][i, :], tgt_dict.pad())
target_tokens = None
if has_target:
target_tokens = utils.strip_pad(sample['target'][i, :], tgt_dict.pad()).int().cpu()
# Either retrieve the original sentences or regenerate them from tokens.
if align_dict is not None:
src_str = task.dataset(args.gen_subset).src.get_original_text(sample_id)
target_str = task.dataset(args.gen_subset).tgt.get_original_text(sample_id)
else:
if src_dict is not None:
src_str = src_dict.string(src_tokens, args.remove_bpe)
else:
src_str = ""
if has_target:
target_str = tgt_dict.string(target_tokens, args.remove_bpe, escape_unk=True)
if not args.quiet:
if src_dict is not None:
print('S-{}\t{}'.format(sample_id, src_str))
if has_target:
print('T-{}\t{}'.format(sample_id, target_str))
# Process top predictions
for j, hypo in enumerate(hypos[i][:args.nbest]):
hypo_tokens, hypo_str, alignment = utils.post_process_prediction(
hypo_tokens=hypo['tokens'].int().cpu(),
src_str=src_str,
alignment=hypo['alignment'].int().cpu() if hypo['alignment'] is not None else None,
align_dict=align_dict,
tgt_dict=tgt_dict,
remove_bpe=args.remove_bpe,
)
if not args.quiet:
print('H-{}\t{}\t{}'.format(sample_id, hypo['score'], hypo_str))
print('P-{}\t{}'.format(
sample_id,
' '.join(map(
lambda x: '{:.4f}'.format(x),
hypo['positional_scores'].tolist(),
))
))
if args.print_alignment:
print('A-{}\t{}'.format(
sample_id,
' '.join(map(lambda x: str(utils.item(x)), alignment))
))
wps_meter.update(num_generated_tokens)
t.log({'wps': round(wps_meter.avg)})
num_sentences += sample['nsentences']
def cli_main():
parser = options.get_generation_parser()
parser.add_argument('--encoder-embed-dim-subtransformer', type=int, help='subtransformer encoder embedding dimension',
default=None)
parser.add_argument('--decoder-embed-dim-subtransformer', type=int, help='subtransformer decoder embedding dimension',
default=None)
parser.add_argument('--encoder-ffn-embed-dim-all-subtransformer', nargs='+', default=None, type=int)
parser.add_argument('--decoder-ffn-embed-dim-all-subtransformer', nargs='+', default=None, type=int)
parser.add_argument('--encoder-layer-num-subtransformer', type=int, help='subtransformer num encoder layers')
parser.add_argument('--decoder-layer-num-subtransformer', type=int, help='subtransformer num decoder layers')
parser.add_argument('--encoder-self-attention-heads-all-subtransformer', nargs='+', default=None, type=int)
parser.add_argument('--decoder-self-attention-heads-all-subtransformer', nargs='+', default=None, type=int)
parser.add_argument('--decoder-ende-attention-heads-all-subtransformer', nargs='+', default=None, type=int)
parser.add_argument('--decoder-arbitrary-ende-attn-all-subtransformer', nargs='+', default=None, type=int)
args = options.parse_args_and_arch(parser)
if args.pdb:
pdb.set_trace()
main(args)
if __name__ == '__main__':
cli_main()
| 38.603774
| 122
| 0.601051
|
2bd90dadd16534e6a4478d69f3f43025ac7beee2
| 4,675
|
py
|
Python
|
infra/bots/recipes/perf_canvaskit.py
|
jonhpark7966/skia
|
39d16248f5a7d1692cec49847052cf3a5b420ff1
|
[
"BSD-3-Clause"
] | 1
|
2019-02-12T19:10:24.000Z
|
2019-02-12T19:10:24.000Z
|
infra/bots/recipes/perf_canvaskit.py
|
jonhpark7966/skia
|
39d16248f5a7d1692cec49847052cf3a5b420ff1
|
[
"BSD-3-Clause"
] | null | null | null |
infra/bots/recipes/perf_canvaskit.py
|
jonhpark7966/skia
|
39d16248f5a7d1692cec49847052cf3a5b420ff1
|
[
"BSD-3-Clause"
] | 1
|
2019-02-14T01:00:16.000Z
|
2019-02-14T01:00:16.000Z
|
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Recipe which runs the PathKit tests using docker
DEPS = [
'checkout',
'infra',
'recipe_engine/file',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
'run',
'vars',
]
DOCKER_IMAGE = 'gcr.io/skia-public/perf-karma-chrome-tests:68.0.3440.106_v6'
INNER_KARMA_SCRIPT = '/SRC/skia/infra/canvaskit/perf_canvaskit.sh'
def RunSteps(api):
api.vars.setup()
checkout_root = api.checkout.default_checkout_root
out_dir = api.vars.swarming_out_dir
api.checkout.bot_update(checkout_root=checkout_root)
# Make sure this exists, otherwise Docker will make it with root permissions.
api.file.ensure_directory('mkdirs out_dir', out_dir, mode=0777)
# The karma script is configured to look in ./canvaskit/bin/ for
# the test files to load, so we must copy them there (see Set up for docker).
copy_dest = checkout_root.join('skia', 'experimental', 'canvaskit',
'canvaskit', 'bin')
base_dir = api.vars.build_dir
bundle_name = 'canvaskit.wasm'
api.python.inline(
name='Set up for docker',
program='''import errno
import os
import shutil
import sys
copy_dest = sys.argv[1]
base_dir = sys.argv[2]
bundle_name = sys.argv[3]
out_dir = sys.argv[4]
# Clean out old binaries (if any)
try:
shutil.rmtree(copy_dest)
except OSError as e:
if e.errno != errno.ENOENT:
raise
# Make folder
try:
os.makedirs(copy_dest)
except OSError as e:
if e.errno != errno.EEXIST:
raise
# Copy binaries (canvaskit.js and canvaskit.wasm) to where the karma tests
# expect them ($SKIA_ROOT/experimental/canvaskit/canvaskit/bin/)
dest = os.path.join(copy_dest, 'canvaskit.js')
shutil.copyfile(os.path.join(base_dir, 'canvaskit.js'), dest)
os.chmod(dest, 0o644) # important, otherwise non-privileged docker can't read.
if bundle_name:
dest = os.path.join(copy_dest, bundle_name)
shutil.copyfile(os.path.join(base_dir, bundle_name), dest)
os.chmod(dest, 0o644) # important, otherwise non-privileged docker can't read.
# Prepare output folder, api.file.ensure_directory doesn't touch
# the permissions of the out directory if it already exists.
os.chmod(out_dir, 0o777) # important, otherwise non-privileged docker can't write.
''',
args=[copy_dest, base_dir, bundle_name, out_dir],
infra_step=True)
cmd = ['docker', 'run', '--shm-size=2gb', '--rm',
'--volume', '%s:/SRC' % checkout_root,
'--volume', '%s:/OUT' % out_dir]
cmd.extend([
DOCKER_IMAGE, INNER_KARMA_SCRIPT,
'--builder', api.vars.builder_name,
'--git_hash', api.properties['revision'],
'--buildbucket_build_id', api.properties.get('buildbucket_build_id',
''),
'--bot_id', api.vars.swarming_bot_id,
'--task_id', api.vars.swarming_task_id,
'--browser', 'Chrome',
'--config', api.vars.configuration,
'--source_type', 'canvaskit',
])
if api.vars.is_trybot:
cmd.extend([
'--issue', api.vars.issue,
'--patchset', api.vars.patchset,
'--patch_storage', api.vars.patch_storage,
])
api.run(
api.step,
'Performance tests of canvaskit with Docker',
cmd=cmd)
def GenTests(api):
yield (
api.test('Perf-Debian9-EMCC-GCE-CPU-AVX2-wasm-Release-All-CanvasKit') +
api.properties(buildername=('Perf-Debian9-EMCC-GCE-CPU-AVX2'
'-wasm-Release-All-CanvasKit'),
repository='https://skia.googlesource.com/skia.git',
revision='abc123',
path_config='kitchen',
swarm_out_dir='[SWARM_OUT_DIR]')
)
yield (
api.test('pathkit_trybot') +
api.properties(buildername=('Perf-Debian9-EMCC-GCE-GPU-AVX2'
'-wasm-Release-All-CanvasKit'),
repository='https://skia.googlesource.com/skia.git',
revision='abc123',
path_config='kitchen',
swarm_out_dir='[SWARM_OUT_DIR]',
patch_ref='89/456789/12',
patch_repo='https://skia.googlesource.com/skia.git',
patch_storage='gerrit',
patch_set=7,
patch_issue=1234,
gerrit_project='skia',
gerrit_url='https://skia-review.googlesource.com/')
)
| 32.692308
| 82
| 0.620321
|
6496c8b7f561e604062dd8b44bc0473d4c271d02
| 10,704
|
py
|
Python
|
tools/dependency/cve_scan.py
|
terorie/envoy
|
7e3b483bbb004bf2b8234e3ec2be6ceef254a2f9
|
[
"Apache-2.0"
] | 1
|
2020-12-28T15:03:17.000Z
|
2020-12-28T15:03:17.000Z
|
tools/dependency/cve_scan.py
|
terorie/envoy
|
7e3b483bbb004bf2b8234e3ec2be6ceef254a2f9
|
[
"Apache-2.0"
] | 8
|
2020-12-28T15:16:39.000Z
|
2021-06-02T03:45:06.000Z
|
tools/dependency/cve_scan.py
|
terorie/envoy
|
7e3b483bbb004bf2b8234e3ec2be6ceef254a2f9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Scan for any external dependencies that were last updated before known CVEs
# (and near relatives). We also try a fuzzy match on version information.
from collections import defaultdict, namedtuple
import datetime as dt
import gzip
import json
import re
import sys
import textwrap
import urllib.request
import utils as dep_utils
# These CVEs are false positives for the match heuristics. An explanation is
# required when adding a new entry to this list as a comment.
IGNORES_CVES = set([
# Node.js issue unrelated to http-parser (napi_ API implementation).
'CVE-2020-8174',
# Node.js HTTP desync attack. Request smuggling due to CR and hyphen
# conflation in llhttp
# (https://github.com/nodejs/llhttp/commit/9d9da1d0f18599ceddd8f484df5a5ad694d23361).
# This was a result of using llparse's toLowerUnsafe() for header keys.
# http-parser uses a TOKEN method that doesn't have the same issue for
# header fields.
'CVE-2020-8201',
# Node.js issue unrelated to http-parser. This is a DoS due to a lack of
# request/connection timeouts, see
# https://github.com/nodejs/node/commit/753f3b247a.
'CVE-2020-8251',
# Node.js issue unrelated to http-parser (libuv).
'CVE-2020-8252',
# Fixed via the nghttp2 1.41.0 bump in Envoy 8b6ea4.
'CVE-2020-11080',
# Node.js issue rooted in a c-ares bug. Does not appear to affect
# http-parser or our use of c-ares, c-ares has been bumped regardless.
'CVE-2020-8277',
# gRPC issue that only affects Javascript bindings.
'CVE-2020-7768',
# Node.js issue unrelated to http-parser, see
# https://github.com/mhart/StringStream/issues/7.
'CVE-2018-21270',
])
# Subset of CVE fields that are useful below.
Cve = namedtuple(
'Cve',
['id', 'description', 'cpes', 'score', 'severity', 'published_date', 'last_modified_date'])
class Cpe(namedtuple('CPE', ['part', 'vendor', 'product', 'version'])):
'''Model a subset of CPE fields that are used in CPE matching.'''
@classmethod
def FromString(cls, cpe_str):
assert (cpe_str.startswith('cpe:2.3:'))
components = cpe_str.split(':')
assert (len(components) >= 6)
return cls(*components[2:6])
def __str__(self):
return f'cpe:2.3:{self.part}:{self.vendor}:{self.product}:{self.version}'
def VendorNormalized(self):
'''Return a normalized CPE where only part and vendor are significant.'''
return Cpe(self.part, self.vendor, '*', '*')
def ParseCveJson(cve_json, cves, cpe_revmap):
'''Parse CVE JSON dictionary.
Args:
cve_json: a NIST CVE JSON dictionary.
cves: dictionary mapping CVE ID string to Cve object (output).
cpe_revmap: a reverse map from vendor normalized CPE to CVE ID string.
'''
# This provides an over-approximation of possible CPEs affected by CVE nodes
# metadata; it traverses the entire AND-OR tree and just gathers every CPE
# observed. Generally we expect that most of Envoy's CVE-CPE matches to be
# simple, plus it's interesting to consumers of this data to understand when a
# CPE pops up, even in a conditional setting.
def GatherCpes(nodes, cpe_set):
for node in nodes:
for cpe_match in node.get('cpe_match', []):
cpe_set.add(Cpe.FromString(cpe_match['cpe23Uri']))
GatherCpes(node.get('children', []), cpe_set)
for cve in cve_json['CVE_Items']:
cve_id = cve['cve']['CVE_data_meta']['ID']
description = cve['cve']['description']['description_data'][0]['value']
cpe_set = set()
GatherCpes(cve['configurations']['nodes'], cpe_set)
if len(cpe_set) == 0:
continue
cvss_v3_score = cve['impact']['baseMetricV3']['cvssV3']['baseScore']
cvss_v3_severity = cve['impact']['baseMetricV3']['cvssV3']['baseSeverity']
def ParseCveDate(date_str):
assert (date_str.endswith('Z'))
return dt.date.fromisoformat(date_str.split('T')[0])
published_date = ParseCveDate(cve['publishedDate'])
last_modified_date = ParseCveDate(cve['lastModifiedDate'])
cves[cve_id] = Cve(cve_id, description, cpe_set, cvss_v3_score, cvss_v3_severity,
published_date, last_modified_date)
for cpe in cpe_set:
cpe_revmap[str(cpe.VendorNormalized())].add(cve_id)
return cves, cpe_revmap
def DownloadCveData(urls):
'''Download NIST CVE JSON databases from given URLs and parse.
Args:
urls: a list of URLs.
Returns:
cves: dictionary mapping CVE ID string to Cve object (output).
cpe_revmap: a reverse map from vendor normalized CPE to CVE ID string.
'''
cves = {}
cpe_revmap = defaultdict(set)
for url in urls:
print(f'Loading NIST CVE database from {url}...')
with urllib.request.urlopen(url) as request:
with gzip.GzipFile(fileobj=request) as json_data:
ParseCveJson(json.loads(json_data.read()), cves, cpe_revmap)
return cves, cpe_revmap
def FormatCveDetails(cve, deps):
formatted_deps = ', '.join(sorted(deps))
wrapped_description = '\n '.join(textwrap.wrap(cve.description))
return f'''
CVE ID: {cve.id}
CVSS v3 score: {cve.score}
Severity: {cve.severity}
Published date: {cve.published_date}
Last modified date: {cve.last_modified_date}
Dependencies: {formatted_deps}
Description: {wrapped_description}
Affected CPEs:
''' + '\n '.join(f'- {cpe}' for cpe in cve.cpes)
FUZZY_DATE_RE = re.compile('(\d{4}).?(\d{2}).?(\d{2})')
FUZZY_SEMVER_RE = re.compile('(\d+)[:\.\-_](\d+)[:\.\-_](\d+)')
def RegexGroupsMatch(regex, lhs, rhs):
'''Do two strings match modulo a regular expression?
Args:
regex: regular expression
lhs: LHS string
rhs: RHS string
Returns:
A boolean indicating match.
'''
lhs_match = regex.search(lhs)
if lhs_match:
rhs_match = regex.search(rhs)
if rhs_match and lhs_match.groups() == rhs_match.groups():
return True
return False
def CpeMatch(cpe, dep_metadata):
'''Heuristically match dependency metadata against CPE.
We have a number of rules below that should are easy to compute without having
to look at the dependency metadata. In the future, with additional access to
repository information we could do the following:
- For dependencies at a non-release version, walk back through git history to
the last known release version and attempt a match with this.
- For dependencies at a non-release version, use the commit date to look for a
version match where version is YYYY-MM-DD.
Args:
cpe: Cpe object to match against.
dep_metadata: dependency metadata dictionary.
Returns:
A boolean indicating a match.
'''
dep_cpe = Cpe.FromString(dep_metadata['cpe'])
dep_version = dep_metadata['version']
# The 'part' and 'vendor' must be an exact match.
if cpe.part != dep_cpe.part:
return False
if cpe.vendor != dep_cpe.vendor:
return False
# We allow Envoy dependency CPEs to wildcard the 'product', this is useful for
# LLVM where multiple product need to be covered.
if dep_cpe.product != '*' and cpe.product != dep_cpe.product:
return False
# Wildcard versions always match.
if cpe.version == '*':
return True
# An exact version match is a hit.
if cpe.version == dep_version:
return True
# Allow the 'release_date' dependency metadata to substitute for date.
# TODO(htuch): Consider fuzzier date ranges.
if cpe.version == dep_metadata['release_date']:
return True
# Try a fuzzy date match to deal with versions like fips-20190304 in dependency version.
if RegexGroupsMatch(FUZZY_DATE_RE, dep_version, cpe.version):
return True
# Try a fuzzy semver match to deal with things like 2.1.0-beta3.
if RegexGroupsMatch(FUZZY_SEMVER_RE, dep_version, cpe.version):
return True
# Fall-thru.
return False
def CveMatch(cve, dep_metadata):
'''Heuristically match dependency metadata against CVE.
In general, we allow false positives but want to keep the noise low, to avoid
the toil around having to populate IGNORES_CVES.
Args:
cve: Cve object to match against.
dep_metadata: dependency metadata dictionary.
Returns:
A boolean indicating a match.
'''
wildcard_version_match = False
# Consider each CPE attached to the CVE for a match against the dependency CPE.
for cpe in cve.cpes:
if CpeMatch(cpe, dep_metadata):
# Wildcard version matches need additional heuristics unrelated to CPE to
# qualify, e.g. last updated date.
if cpe.version == '*':
wildcard_version_match = True
else:
return True
if wildcard_version_match:
# If the CVE was published after the dependency was last updated, it's a
# potential match.
last_dep_update = dt.date.fromisoformat(dep_metadata['release_date'])
if last_dep_update <= cve.published_date:
return True
return False
def CveScan(cves, cpe_revmap, cve_allowlist, repository_locations):
'''Scan for CVEs in a parsed NIST CVE database.
Args:
cves: CVE dictionary as provided by DownloadCveData().
cve_revmap: CPE-CVE reverse map as provided by DownloadCveData().
cve_allowlist: an allowlist of CVE IDs to ignore.
repository_locations: a dictionary of dependency metadata in the format
described in api/bazel/external_deps.bzl.
Returns:
possible_cves: a dictionary mapping CVE IDs to Cve objects.
cve_deps: a dictionary mapping CVE IDs to dependency names.
'''
possible_cves = {}
cve_deps = defaultdict(list)
for dep, metadata in repository_locations.items():
cpe = metadata.get('cpe', 'N/A')
if cpe == 'N/A':
continue
candidate_cve_ids = cpe_revmap.get(str(Cpe.FromString(cpe).VendorNormalized()), [])
for cve_id in candidate_cve_ids:
cve = cves[cve_id]
if cve.id in cve_allowlist:
continue
if CveMatch(cve, metadata):
possible_cves[cve_id] = cve
cve_deps[cve_id].append(dep)
return possible_cves, cve_deps
if __name__ == '__main__':
# Allow local overrides for NIST CVE database URLs via args.
urls = sys.argv[1:]
if not urls:
# We only look back a few years, since we shouldn't have any ancient deps.
current_year = dt.datetime.now().year
scan_years = range(2018, current_year + 1)
urls = [
f'https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-{year}.json.gz' for year in scan_years
]
cves, cpe_revmap = DownloadCveData(urls)
possible_cves, cve_deps = CveScan(cves, cpe_revmap, IGNORES_CVES, dep_utils.RepositoryLocations())
if possible_cves:
print('\nBased on heuristic matching with the NIST CVE database, Envoy may be vulnerable to:')
for cve_id in sorted(possible_cves):
print(f'{FormatCveDetails(possible_cves[cve_id], cve_deps[cve_id])}')
sys.exit(1)
| 36.162162
| 100
| 0.703008
|
bd98f5bc93198dbd9e86cf297b078bc0f005d6ae
| 5,709
|
py
|
Python
|
train.py
|
nvvaulin/medical_imaging
|
ff00fc43ac0edcfb2151478f89e6c82be40af433
|
[
"Apache-2.0"
] | null | null | null |
train.py
|
nvvaulin/medical_imaging
|
ff00fc43ac0edcfb2151478f89e6c82be40af433
|
[
"Apache-2.0"
] | null | null | null |
train.py
|
nvvaulin/medical_imaging
|
ff00fc43ac0edcfb2151478f89e6c82be40af433
|
[
"Apache-2.0"
] | null | null | null |
from config import ex
import pytorch_lightning as pl
from pytorch_lightning.callbacks import EarlyStopping
import models
from utils.existed_checkpoint import ExistedModelCheckpoint
import datasets
from pytorch_lightning import loggers as pl_loggers
from torchvision import transforms
from torch.utils.data import DataLoader
import os
from utils import load_from_config,config_name
import torch
from utils import samplers
import numpy as np
def load_dataset(dataset, mode, sampler=None,batch_size=64,input_size=(224,224),num_workers=8,reduce_size=None,label_names=None):
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
input_size = tuple(input_size)
if mode == 'train':
transform = transforms.Compose([
transforms.RandomResizedCrop(input_size, (0.8, 1.2)),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(mean, std)
])
else:
transform = transforms.Compose([
transforms.Resize(input_size),
transforms.ToTensor(),
transforms.Normalize(mean, std)
])
if not isinstance(dataset,list):
dataset = [dataset]
dataset = [load_from_config(i,datasets)(mode=mode,transform=transform,reduce_size=reduce_size) for i in dataset]
dataset = datasets.JoinDatasets(dataset,use_names=label_names)
if not (sampler is None):
sampler = load_from_config(sampler, samplers)(labels=dataset.labels,
label_names=dataset.label_names)
if mode=='train' and (sampler is None):
loader = DataLoader(dataset, batch_size=batch_size, num_workers=num_workers,shuffle=True)
else:
loader = DataLoader(dataset, batch_size=batch_size, num_workers=num_workers,sampler=sampler)
return loader,dataset
@ex.capture
def load_train_val_test(dataset=None,train_dataset=None,val_dataset=None,test_dataset=None,sampler=None,
batch_size=64, input_size=(224, 224), num_workers=8, label_names=None,reduce_size=None):
train = load_dataset(train_dataset or dataset,mode='train',sampler=sampler,batch_size=batch_size,input_size=input_size,num_workers=num_workers,label_names=label_names,reduce_size=reduce_size)
if label_names is None:
label_names = train[1].label_names
val = load_dataset(val_dataset or dataset,mode='val',batch_size=batch_size,input_size=input_size,num_workers=num_workers,label_names=label_names,reduce_size=reduce_size)
test_dataset = test_dataset or dataset
if not isinstance(test_dataset,list):
test_dataset = [test_dataset]
test = [load_dataset(i,mode='test',batch_size=batch_size,input_size=input_size,num_workers=num_workers,label_names=label_names,reduce_size=reduce_size) for i in test_dataset]
number_of_samples = np.concatenate([(i[1].labels==1).sum(0)[:,None] for i in [train,val,*test]],1)
print('\n'.join(['{:15s} | '.format(n)+' | '.join(['{:6d}'.format(i) for i in c]) for n,c in zip(label_names,number_of_samples)]))
return label_names, train[0],val[0],[i[0] for i in test]
@ex.capture
def load_model(label_names,optimizer,scheduler,backbone,unfreeze_epoch=0,pretrained_backbone=None):
backbone = load_from_config(backbone,models)()
if not (pretrained_backbone is None):
backbone.load_state_dict(torch.load(pretrained_backbone)['state_dict'],strict=True)
optimizer = load_from_config(optimizer,torch.optim)
lr_scheduler = load_from_config(scheduler,torch.optim.lr_scheduler)
model = models.BasicClassifierModel(backbone, label_names, optimizer, lr_scheduler,unfreeze_epoch=unfreeze_epoch)
return model
@ex.capture
def load_trainer(exp_root,exp_name,version,_config,load_epoch=None):
tb_logger = pl_loggers.TensorBoardLogger(exp_root,exp_name,version)
checkpointer = ExistedModelCheckpoint(monitor='val_loss',
mode='min',
save_top_k=5,
dirpath = os.path.join(exp_root,exp_name,version,'checkpoints'),
filename=config_name(_config['backbone'])+'-{epoch}-{val_loss:.3f}-{train_loss:.3f}')
callbacks = [checkpointer,EarlyStopping(monitor='val_loss',patience=10)]
trainer = pl.Trainer(logger=tb_logger,
resume_from_checkpoint=checkpointer.get_checkpoint_path(load_epoch),
callbacks=callbacks,**_config.get('trainer',{}))
return trainer,checkpointer
@ex.capture
def write_results(path,results,exp_root,exp_name,version):
open(os.path.join(exp_root,exp_name,version,'%s.csv'%(path.split('/')[-1])),'a').write('\n'.join(['%s,%s'%(k,str(v)) for k,v in results[0].items()])+'\n')
@ex.command
def test(load_epoch):
label_names, _, _, test_loaders = load_train_val_test()
model = load_model(label_names)
trainer,checkpointer = load_trainer()
for test_loader in test_loaders:
results = trainer.test(model=model,test_dataloaders=test_loader)
write_results(checkpointer.get_checkpoint_path(load_epoch),results)
@ex.automain
def main(load_epoch):
label_names,train_loader, val_loader, test_loaders = load_train_val_test()
model=load_model(label_names)
trainer,checkpointer = load_trainer()
try:
trainer.fit(model, train_loader, val_loader)
finally:
if not checkpointer.get_checkpoint_path(load_epoch) is None:
for test_loader in test_loaders:
results = trainer.test(model=model, test_dataloaders=test_loader)
write_results(checkpointer.get_checkpoint_path(load_epoch), results)
| 47.181818
| 195
| 0.705728
|
171cef4a7b916f79ebb8f28a5288a314da176bae
| 938
|
py
|
Python
|
501.py
|
wilbertgeng/LeetCode_exercise
|
f00c08e0d28ffa88d61d4262c6d1f49f1fa91ebc
|
[
"MIT"
] | null | null | null |
501.py
|
wilbertgeng/LeetCode_exercise
|
f00c08e0d28ffa88d61d4262c6d1f49f1fa91ebc
|
[
"MIT"
] | null | null | null |
501.py
|
wilbertgeng/LeetCode_exercise
|
f00c08e0d28ffa88d61d4262c6d1f49f1fa91ebc
|
[
"MIT"
] | null | null | null |
"""501. Find Mode in Binary Search Tree"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution(object):
def findMode(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
nums = []
if not root:
return None
self.dfs(root, nums)
d = {}
for num in nums:
if num not in d:
d[num] = 1
else:
d[num] += 1
res = []
mode = max(d.values())
for key in d:
if d[key] == mode:
res.append(key)
return res
def dfs(self, node, nums):
if not node:
return
self.dfs(node.left, nums)
nums.append(node.val)
self.dfs(node.right, nums)
| 22.333333
| 55
| 0.464819
|
2d41625d6b606d3887aa36d96dda77bf937aa3d7
| 6,590
|
py
|
Python
|
docs/source/conf.py
|
stanwood/alexa-skill
|
cb28475d25c0ce2b78f0569eb7e94b4759cbb1b1
|
[
"MIT"
] | 5
|
2018-09-28T08:19:48.000Z
|
2018-10-09T11:46:41.000Z
|
docs/source/conf.py
|
stanwood/alexa-skill
|
cb28475d25c0ce2b78f0569eb7e94b4759cbb1b1
|
[
"MIT"
] | 1
|
2018-12-27T15:21:55.000Z
|
2021-05-29T19:47:51.000Z
|
docs/source/conf.py
|
stanwood/alexa-skill
|
cb28475d25c0ce2b78f0569eb7e94b4759cbb1b1
|
[
"MIT"
] | null | null | null |
# The MIT License (MIT)
#
# Copyright (c) 2018 stanwood GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../../'))
# -- Project information -----------------------------------------------------
import sphinx_rtd_theme
project = u'alexa-skill'
copyright = u'2018, Piotr Rogulski, stanwood GmbH'
author = u'Piotr Rogulski, stanwood GmbH'
# The short X.Y version
version = u''
# The full version, including alpha/beta/rc tags
release = u''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
# 'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), '_static']
html_logo = '_static/social.png'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'alexa-skilldoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'alexa-skill.tex', u'alexa-skill Documentation',
u'Piotr Rogulski, stanwood GmbH', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'alexa-skill', u'alexa-skill Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'alexa-skill', u'alexa-skill Documentation',
author, 'alexa-skill', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
| 32.146341
| 79
| 0.673141
|
f4b5ef14789a6dfd6876207867fb5770ddbb3848
| 2,239
|
py
|
Python
|
backend/app/model/questionnaires/chain_questionnaire.py
|
sartography/star-drive
|
c0f33378d42913c3e677e07f74eb46d7b2b82a0a
|
[
"MIT"
] | null | null | null |
backend/app/model/questionnaires/chain_questionnaire.py
|
sartography/star-drive
|
c0f33378d42913c3e677e07f74eb46d7b2b82a0a
|
[
"MIT"
] | 368
|
2018-12-18T14:43:20.000Z
|
2022-03-02T02:54:18.000Z
|
backend/app/model/questionnaires/chain_questionnaire.py
|
sartography/star-drive
|
c0f33378d42913c3e677e07f74eb46d7b2b82a0a
|
[
"MIT"
] | 2
|
2019-10-02T03:06:06.000Z
|
2020-10-05T16:53:48.000Z
|
from marshmallow import pre_load
from sqlalchemy import func
from sqlalchemy.ext.declarative import declared_attr
from app import db, ma
from app.export_service import ExportService
from app.model.questionnaires.chain_session import ChainSessionSchema, ChainSession
from app.schema.model_schema import ModelSchema
class ChainQuestionnaire(db.Model):
__tablename__ = "chain_questionnaire"
__label__ = "SkillSTAR Chain Questionnaire"
__question_type__ = ExportService.TYPE_UNRESTRICTED
__estimated_duration_minutes__ = 5
id = db.Column(db.Integer, primary_key=True)
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
time_on_task_ms = db.Column(db.BigInteger, default=0)
@declared_attr
def participant_id(cls):
return db.Column("participant_id", db.Integer, db.ForeignKey("stardrive_participant.id"))
@declared_attr
def user_id(cls):
return db.Column("user_id", db.Integer, db.ForeignKey("stardrive_user.id"))
@declared_attr
def sessions(cls):
return db.relationship(
"ChainSession",
backref=db.backref(cls.__tablename__, lazy=True),
cascade="all, delete-orphan",
passive_deletes=True
)
def get_field_groups(self):
field_groups = {
"sessions": {
"type": "repeat",
"display_order": 3,
"wrappers": ["card"],
"repeat_class": ChainSession,
"template_options": {
"label": "Chain Session",
"description": "Add a session",
},
"expression_properties": {},
},
}
return field_groups
class ChainQuestionnaireSchema(ModelSchema):
@pre_load
def set_field_session(self, data, **kwargs):
self.fields['sessions'].schema.session = self.session
return data
class Meta(ModelSchema.Meta):
model = ChainQuestionnaire
fields = (
"id",
"last_updated",
"participant_id",
"user_id",
"time_on_task_ms",
"sessions",
)
sessions = ma.Nested(ChainSessionSchema, many=True)
| 30.671233
| 97
| 0.623493
|
47ef5220745668ee9dc31fa589c0eb23da7c3bb8
| 6,157
|
py
|
Python
|
qiskit/circuit/library/standard_gates/y.py
|
SpinQTech/SpinQKit
|
2e24826688b2b26cf7efa66fd47f0e7ef883a96c
|
[
"Apache-2.0"
] | 2
|
2021-12-20T05:19:44.000Z
|
2021-12-20T05:21:48.000Z
|
qiskit/circuit/library/standard_gates/y.py
|
SpinQTech/SpinQKit
|
2e24826688b2b26cf7efa66fd47f0e7ef883a96c
|
[
"Apache-2.0"
] | null | null | null |
qiskit/circuit/library/standard_gates/y.py
|
SpinQTech/SpinQKit
|
2e24826688b2b26cf7efa66fd47f0e7ef883a96c
|
[
"Apache-2.0"
] | 1
|
2021-12-20T05:20:35.000Z
|
2021-12-20T05:20:35.000Z
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Y and CY gates."""
import numpy
from numpy import pi
# pylint: disable=cyclic-import
from qiskit.circuit.controlledgate import ControlledGate
from qiskit.circuit.gate import Gate
from qiskit.circuit.quantumregister import QuantumRegister
class YGate(Gate):
r"""The single-qubit Pauli-Y gate (:math:`\sigma_y`).
**Matrix Representation:**
.. math::
Y = \begin{pmatrix}
0 & -i \\
i & 0
\end{pmatrix}
**Circuit symbol:**
.. parsed-literal::
┌───┐
q_0: ┤ Y ├
└───┘
Equivalent to a :math:`\pi` radian rotation about the Y axis.
.. note::
A global phase difference exists between the definitions of
:math:`RY(\pi)` and :math:`Y`.
.. math::
RY(\pi) = \begin{pmatrix}
0 & -1 \\
1 & 0
\end{pmatrix}
= -i Y
The gate is equivalent to a bit and phase flip.
.. math::
|0\rangle \rightarrow i|1\rangle \\
|1\rangle \rightarrow -i|0\rangle
"""
def __init__(self, label=None):
"""Create new Y gate."""
super().__init__("y", 1, [], label=label)
def _define(self):
# pylint: disable=cyclic-import
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .u3 import U3Gate
q = QuantumRegister(1, "q")
qc = QuantumCircuit(q, name=self.name)
rules = [(U3Gate(pi, pi / 2, pi / 2), [q[0]], [])]
for instr, qargs, cargs in rules:
qc._append(instr, qargs, cargs)
self.definition = qc
def control(self, num_ctrl_qubits=1, label=None, ctrl_state=None):
"""Return a (multi-)controlled-Y gate.
One control returns a CY gate.
Args:
num_ctrl_qubits (int): number of control qubits.
label (str or None): An optional label for the gate [Default: None]
ctrl_state (int or str or None): control state expressed as integer,
string (e.g. '110'), or None. If None, use all 1s.
Returns:
ControlledGate: controlled version of this gate.
"""
if num_ctrl_qubits == 1:
gate = CYGate(label=label, ctrl_state=ctrl_state)
gate.base_gate.label = self.label
return gate
return super().control(num_ctrl_qubits=num_ctrl_qubits, label=label, ctrl_state=ctrl_state)
def inverse(self):
r"""Return inverted Y gate (:math:`Y{\dagger} = Y`)"""
return YGate() # self-inverse
def __array__(self, dtype=None):
"""Return a numpy.array for the Y gate."""
return numpy.array([[0, -1j], [1j, 0]], dtype=dtype)
class CYGate(ControlledGate):
r"""Controlled-Y gate.
**Circuit symbol:**
.. parsed-literal::
q_0: ──■──
┌─┴─┐
q_1: ┤ Y ├
└───┘
**Matrix representation:**
.. math::
CY\ q_0, q_1 =
I \otimes |0 \rangle\langle 0| + Y \otimes |1 \rangle\langle 1| =
\begin{pmatrix}
1 & 0 & 0 & 0 \\
0 & 0 & 0 & -i \\
0 & 0 & 1 & 0 \\
0 & i & 0 & 0
\end{pmatrix}
.. note::
In Qiskit's convention, higher qubit indices are more significant
(little endian convention). In many textbooks, controlled gates are
presented with the assumption of more significant qubits as control,
which in our case would be q_1. Thus a textbook matrix for this
gate will be:
.. parsed-literal::
┌───┐
q_0: ┤ Y ├
└─┬─┘
q_1: ──■──
.. math::
CY\ q_1, q_0 =
|0 \rangle\langle 0| \otimes I + |1 \rangle\langle 1| \otimes Y =
\begin{pmatrix}
1 & 0 & 0 & 0 \\
0 & 1 & 0 & 0 \\
0 & 0 & 0 & -i \\
0 & 0 & i & 0
\end{pmatrix}
"""
# Define class constants. This saves future allocation time.
_matrix1 = numpy.array([[1, 0, 0, 0], [0, 0, 0, -1j], [0, 0, 1, 0], [0, 1j, 0, 0]])
_matrix0 = numpy.array([[0, 0, -1j, 0], [0, 1, 0, 0], [1j, 0, 0, 0], [0, 0, 0, 1]])
def __init__(self, label=None, ctrl_state=None):
"""Create new CY gate."""
super().__init__(
"cy", 2, [], num_ctrl_qubits=1, label=label, ctrl_state=ctrl_state, base_gate=YGate()
)
def _define(self):
"""
gate cy a,b { sdg b; cx a,b; s b; }
"""
# pylint: disable=cyclic-import
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .s import SGate, SdgGate
from .x import CXGate
q = QuantumRegister(2, "q")
qc = QuantumCircuit(q, name=self.name)
rules = [(SdgGate(), [q[1]], []), (CXGate(), [q[0], q[1]], []), (SGate(), [q[1]], [])]
for instr, qargs, cargs in rules:
qc._append(instr, qargs, cargs)
self.definition = qc
def inverse(self):
"""Return inverted CY gate (itself)."""
return CYGate(ctrl_state=self.ctrl_state) # self-inverse
def __array__(self, dtype=None):
"""Return a numpy.array for the CY gate."""
mat = self._matrix1 if self.ctrl_state else self._matrix0
if dtype:
return numpy.asarray(mat, dtype=dtype)
return mat
| 30.480198
| 100
| 0.520058
|
6b70333081fc5f7c5e18def4b6e8b0e409c28738
| 2,639
|
py
|
Python
|
src/main/python/parse/tree_sitter/aw_tree_sitter/ast.py
|
sarveshbhatnagar/astminer
|
e240162942aaf6fcd0c54a2396d4a179e85d4a4b
|
[
"MIT"
] | 36
|
2019-03-27T16:05:33.000Z
|
2019-11-21T19:58:58.000Z
|
src/main/python/parse/tree_sitter/aw_tree_sitter/ast.py
|
vovak/pathminer
|
5692fa522863b76b0fe2260a075cdba402fe1122
|
[
"MIT"
] | 26
|
2019-03-11T13:46:18.000Z
|
2019-11-22T13:58:02.000Z
|
src/main/python/parse/tree_sitter/aw_tree_sitter/ast.py
|
vovak/pathminer
|
5692fa522863b76b0fe2260a075cdba402fe1122
|
[
"MIT"
] | 17
|
2019-03-12T20:07:55.000Z
|
2019-11-23T02:50:47.000Z
|
from tree_sitter import TreeCursor
from typing import Optional, TypedDict, List
Position = TypedDict("Position", {"l": int, "c": int})
NodeRange = TypedDict("NodeRange", {"start": Position, "end": Position})
NodeAsDict = TypedDict(
"NodeAsDict", {"token": Optional[str], "nodeType": str, "range": NodeRange, "children": List[int]}
)
TreeAsDict = TypedDict("TreeAsDict", {"tree": List[NodeAsDict]})
class TreeBuilder:
_cursor: TreeCursor
_file_bytes: bytes
def __init__(self, cursor: TreeCursor, file_bytes: bytes):
self._cursor = cursor
self._file_bytes = file_bytes
def _get_current_node_range(self) -> NodeRange:
node = self._cursor.node
start = node.start_point
end = node.end_point
return {
"start": {"l": start[0] + 1, "c": start[1] + 1},
"end": {"l": end[0] + 1, "c": end[1] + 1}
}
def _get_current_node_as_dict(self) -> NodeAsDict:
node_type = self._cursor.node.type
node_range = self._get_current_node_range()
if len(self._cursor.node.children) == 0:
node_value_bytes = self._file_bytes[self._cursor.node.start_byte : self._cursor.node.end_byte]
node_value: Optional[str] = node_value_bytes.decode("utf-8")
else:
node_value = None
return {"token": node_value, "nodeType": node_type, "range": node_range, "children": []}
def get_tree_as_dict(self) -> TreeAsDict:
depth = 0
tree = []
last_node_by_depth = {}
index = 0
while True:
# creating new node
node = self._get_current_node_as_dict()
last_node_by_depth[depth] = node
if depth > 0:
last_node_by_depth[depth - 1]["children"].append(index)
tree.append(node)
index += 1
# going deeper if we can
if self._cursor.goto_first_child():
depth += 1
continue
# trying to go right
if self._cursor.goto_next_sibling():
continue
# if we are in the most deep right node
# traverse up to find node with right sibling
found_right_sibling = False
while self._cursor.goto_parent():
depth -= 1
if self._cursor.goto_next_sibling():
found_right_sibling = True
break
if found_right_sibling:
continue
# if we couldn't find any new node to traverse
# end while loop
break
return {"tree": tree}
| 35.186667
| 106
| 0.571808
|
71a7a53a444487ab61b8c7e61e67400244403af0
| 11,331
|
py
|
Python
|
src/foolscap/appserver/client.py
|
jaraco/foolscap
|
845bea550447991b194ef884713a7b3be4b4a6c2
|
[
"MIT"
] | 29
|
2015-01-05T19:37:27.000Z
|
2021-03-03T21:59:13.000Z
|
src/foolscap/appserver/client.py
|
jaraco/foolscap
|
845bea550447991b194ef884713a7b3be4b4a6c2
|
[
"MIT"
] | 65
|
2015-03-01T03:18:03.000Z
|
2022-03-24T16:00:48.000Z
|
src/foolscap/appserver/client.py
|
jaraco/foolscap
|
845bea550447991b194ef884713a7b3be4b4a6c2
|
[
"MIT"
] | 22
|
2015-01-28T10:51:46.000Z
|
2022-01-26T07:56:25.000Z
|
from __future__ import print_function, unicode_literals
import six
import os, sys
from io import BytesIO
from twisted.python import usage
from twisted.internet import defer
# does "flappserver start" need us to refrain from importing the reactor here?
import foolscap
from foolscap.api import Tub, Referenceable, fireEventually
class BaseOptions(usage.Options):
def opt_h(self):
return self.opt_help()
class UploadFileOptions(BaseOptions):
def getSynopsis(self):
return "Usage: flappclient [--furl=|--furlfile] upload-file SOURCEFILES.."
def parseArgs(self, *sourcefiles):
self.sourcefiles = sourcefiles
longdesc = """This client sends one or more files to the upload-file
service waiting at the given FURL. All files will be placed in the
pre-configured target directory, using the basename of each SOURCEFILE
argument."""
class Uploader(Referenceable):
def run(self, rref, sourcefile, name):
self.f = open(os.path.expanduser(sourcefile), "rb")
return rref.callRemote("putfile", six.ensure_binary(name), self)
def remote_read(self, size):
return self.f.read(size)
class UploadFile(Referenceable):
def run(self, rref, options):
d = defer.succeed(None)
for sf in options.sourcefiles:
name = os.path.basename(sf)
d.addCallback(self._upload, rref, sf, name)
d.addCallback(self._done, options, name)
d.addCallback(lambda _ign: 0)
return d
def _upload(self, _ignored, rref, sf, name):
return Uploader().run(rref, sf, name)
def _done(self, _ignored, options, name):
options.stdout.write(six.ensure_binary("%s: uploaded\n" % name))
class RunCommandOptions(BaseOptions):
def getSynopsis(self):
return "Usage: flappclient [--furl=|--furlfile] run-command"
longdesc = """This client triggers a prearranged command to be executed
by the run-command service waiting at the given FURL. The executable, its
working directory, and all arguments are configured by the server. Unless
the server has overridden the defaults, this client will emit the
command's stdout and stderr as it runs, and will exit with the same
result code as the remote command. If the server desires it, this client
will read data from stdin and send everything (plus a close-stdin event)
to the server.
This client has no control over the command being run or its
arguments."""
from twisted.internet.stdio import StandardIO as TwitchyStandardIO
class StandardIO(TwitchyStandardIO):
def childConnectionLost(self, fd, reason):
# the usual StandardIO class doesn't seem to handle half-closed stdio
# well, specifically when our stdout is closed, then some data is
# written to our stdin. The class responds to stdout's closure by
# shutting down everything. I think this is related to
# ProcessWriter.doRead returning CONNECTION_LOST instead of
# CONNECTION_DONE (which ProcessWriter.connectionLost sends along to
# StandardIO.childConnectionLost). There is code in
# StandardIO.childConnectionLost to treat CONNECTION_DONE as a
# half-close, but not CONNECTION_LOST.
#
# so this hack is to make it look more like a half-close
#print >>sys.stderr, "my StandardIO.childConnectionLost", fd, reason.value
from twisted.internet import error, main
from twisted.python import failure
if reason.check(error.ConnectionLost) and fd == "write":
#print >>sys.stderr, " fixing"
reason = failure.Failure(main.CONNECTION_DONE)
return TwitchyStandardIO.childConnectionLost(self, fd, reason)
from twisted.internet.protocol import Protocol
#from zope.interface import implements
#from twisted.internet.interfaces import IHalfCloseableProtocol
def wrap_in_binary_mode(f):
if hasattr(f, "buffer"):
# py3 "text file", as returned by open(), or sys.std(in|out|err)
return f.buffer # _io.BufferedWriter
assert isinstance(f, BytesIO)
return f
class RunCommand(Referenceable, Protocol):
#implements(IHalfCloseableProtocol)
def run(self, rref, options):
self.done = False
self.d = defer.Deferred()
rref.notifyOnDisconnect(self._done, 3)
self.stdin_writer = None
self.stdio = options.stdio
self.stdout = options.stdout
self.stderr = options.stderr
d = rref.callRemote("execute", self)
d.addCallback(self._started)
d.addErrback(self._err)
return self.d
def dataReceived(self, data):
if not isinstance(data, bytes):
raise TypeError("stdin can accept only strings of bytes, not '%s'"
% (type(data),))
# this is from stdin. It shouldn't be called until after _started
# sets up stdio and self.stdin_writer
self.stdin_writer.callRemoteOnly("feed_stdin", data)
def connectionLost(self, reason):
# likewise, this won't be called unless _started wanted stdin
self.stdin_writer.callRemoteOnly("close_stdin")
def _started(self, stdin_writer):
if stdin_writer:
self.stdin_writer = stdin_writer # rref
self.stdio(self) # start accepting stdin
# otherwise they don't want our stdin, so leave stdin_writer=None
def remote_stdout(self, data):
#print(b"remote_stdout", type(data))
assert isinstance(data, bytes)
#print(data)
self.stdout.write(data)
self.stdout.flush()
#print(b"flushed stdout")
def remote_stderr(self, data):
assert isinstance(data, bytes)
self.stderr.write(data)
self.stderr.flush()
def remote_done(self, signal, exitcode):
if signal:
self._done(127)
else:
self._done(exitcode)
def _err(self, f):
self._done(f)
def _done(self, res):
if not self.done:
self.done = True
self.d.callback(res)
class ClientOptions(usage.Options):
synopsis = "Usage: flappclient [--furl=|--furlfile=] COMMAND"
optParameters = [
("furl", None, None, "FURL of the service to contact"),
("furlfile", "f", None, "file containing the FURL of the service"),
]
longdesc = """This client invokes a remote service that is running as
part of a 'flappserver'. Each service lives at a specific secret FURL,
which starts with 'pb://'. This FURL can be passed on the command line
with --furl=FURL, or it can be stored in a file (along with comment lines
that start with '#') and passed with --furlfile=FILE.
Each service has a specific COMMAND type, and the client invocation must
match the service. For more details on a specific command, run
'flappclient COMMAND --help', e.g. 'flappclient upload-file --help'.
"""
subCommands = [
("upload-file", None, UploadFileOptions, "upload a file"),
("run-command", None, RunCommandOptions, "cause a command to be run"),
]
def read_furlfile(self):
ff = os.path.expanduser(self["furlfile"])
for line in open(ff).readlines():
line = line.strip()
if line.startswith("pb://"):
return line
return None
def postOptions(self):
self.furl = self["furl"]
if self["furlfile"]:
self.furl = self.read_furlfile()
if not self.furl:
raise usage.UsageError("must provide --furl or --furlfile")
if not hasattr(self, 'subOptions'):
raise usage.UsageError("must specify a command")
def opt_help(self):
self.stdout.write(six.ensure_binary("%s\n" % (self.synopsis,)))
sys.exit(0)
def opt_version(self):
from twisted import copyright
self.stdout.write(six.ensure_binary("Foolscap version: %s\n" % foolscap.__version__))
self.stdout.write(six.ensure_binary("Twisted version: %s\n" % copyright.version))
sys.exit(0)
dispatch_table = {
"upload-file": UploadFile,
"run-command": RunCommand,
}
def parse_options(command_name, argv, stdio, stdout, stderr):
try:
config = ClientOptions()
config.stdout = stdout
config.stderr = stderr
config.parseOptions(argv)
config.subOptions.stdio = stdio # for streaming input
config.subOptions.stdout = stdout
config.subOptions.stderr = stderr
except usage.error as e:
stderr.write(six.ensure_binary("%s: %s\n" % (command_name, e)))
stderr.write(b"\n")
c = getattr(config, 'subOptions', config)
stderr.write(six.ensure_binary("%s\n" % (c,)))
sys.exit(1)
return config
def run_command(config):
c = dispatch_table[config.subCommand]()
tub = Tub()
try:
from twisted.internet import reactor
from twisted.internet.endpoints import clientFromString
from foolscap.connections import tor
CONTROL = os.environ.get("FOOLSCAP_TOR_CONTROL_PORT", "")
SOCKS = os.environ.get("FOOLSCAP_TOR_SOCKS_PORT", "")
if CONTROL:
h = tor.control_endpoint(clientFromString(reactor, CONTROL))
tub.addConnectionHintHandler("tor", h)
elif SOCKS:
h = tor.socks_endpoint(clientFromString(reactor, SOCKS))
tub.addConnectionHintHandler("tor", h)
#else:
# h = tor.default_socks()
# tub.addConnectionHintHandler("tor", h)
except ImportError:
pass
d = defer.succeed(None)
d.addCallback(lambda _ign: tub.startService())
d.addCallback(lambda _ign: tub.getReference(config.furl))
d.addCallback(c.run, config.subOptions) # might provide tub here
d.addBoth(lambda res: tub.stopService().addCallback(lambda _ign: res))
return d
def run_flappclient(argv=None, run_by_human=True, stdio=StandardIO):
if run_by_human:
stdout = wrap_in_binary_mode(sys.stdout)
stderr = wrap_in_binary_mode(sys.stderr)
else:
stdout = BytesIO()
stderr = BytesIO()
if argv:
command_name,argv = argv[0],argv[1:]
else:
command_name = sys.argv[0]
d = fireEventually()
d.addCallback(lambda _ign: parse_options(command_name, argv,
stdio, stdout, stderr))
d.addCallback(run_command)
if run_by_human:
# we need to spin up our own reactor
from twisted.internet import reactor
stash_rc = []
def good(rc):
stash_rc.append(rc)
reactor.stop()
def oops(f):
if f.check(SystemExit):
stash_rc.append(f.value.args[0])
else:
stderr.write(b"flappclient command failed:\n")
stderr.write(six.ensure_binary("%s\n" % (f,)))
stash_rc.append(-1)
reactor.stop()
d.addCallbacks(good, oops)
reactor.run()
sys.exit(stash_rc[0])
else:
def _convert_system_exit(f):
f.trap(SystemExit)
return f.value.args[0]
d.addErrback(_convert_system_exit)
def done(rc):
return (rc, stdout.getvalue(), stderr.getvalue())
d.addCallback(done)
return d
| 37.39604
| 93
| 0.64628
|
f3ba33898891601186655df4f65210004ddb2d04
| 3,308
|
py
|
Python
|
server/greenlite/settings.py
|
aoswalt/greenlite-hardware
|
056ed78829519f49adab60dbcf67878243fe764e
|
[
"MIT"
] | null | null | null |
server/greenlite/settings.py
|
aoswalt/greenlite-hardware
|
056ed78829519f49adab60dbcf67878243fe764e
|
[
"MIT"
] | 1
|
2016-11-01T23:55:07.000Z
|
2016-11-01T23:55:07.000Z
|
server/greenlite/settings.py
|
aoswalt/greenlite-hardware
|
056ed78829519f49adab60dbcf67878243fe764e
|
[
"MIT"
] | null | null | null |
"""
Django settings for greenlite project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'bsd3ra-omkxl=16)lczo+u8#0&_wvs9q#4anz$&umob&p+4lf0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'scheduler',
'corsheaders',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
CORS_ORIGIN_ALLOW_ALL = True
CORS_ORIGIN_WHITELIST = (
'localhost:8000',
'127.0.0.1:8000',
)
ROOT_URLCONF = 'greenlite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'greenlite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
| 25.251908
| 91
| 0.696493
|
9ef11a7b275548648ca1e7d9d7451ca9d6f3ffd0
| 809
|
py
|
Python
|
django/urls.py
|
Sravan996/django
|
3a982382d5cfe9bfb498534f1effcf58a3771539
|
[
"MIT"
] | null | null | null |
django/urls.py
|
Sravan996/django
|
3a982382d5cfe9bfb498534f1effcf58a3771539
|
[
"MIT"
] | null | null | null |
django/urls.py
|
Sravan996/django
|
3a982382d5cfe9bfb498534f1effcf58a3771539
|
[
"MIT"
] | null | null | null |
"""mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from new import views
urlpatterns = [
path('admin/', admin.site.urls),
path('',views.index,name='index'),
]
| 33.708333
| 77
| 0.70581
|
6f9b6d22aa9971a64955c32c64852367eb09e181
| 1,110
|
py
|
Python
|
blog/migrations/0006_auto_20191214_1618.py
|
vijay-chauhan07/Xtension
|
288c4d4f57428d07ca06874e9ff1c1844fdb915a
|
[
"MIT"
] | 1
|
2020-01-12T13:11:19.000Z
|
2020-01-12T13:11:19.000Z
|
blog/migrations/0006_auto_20191214_1618.py
|
vijay-chauhan07/Xtension
|
288c4d4f57428d07ca06874e9ff1c1844fdb915a
|
[
"MIT"
] | 11
|
2020-06-05T20:40:43.000Z
|
2022-03-12T00:17:09.000Z
|
blog/migrations/0006_auto_20191214_1618.py
|
vijay-chauhan07/Xtension
|
288c4d4f57428d07ca06874e9ff1c1844fdb915a
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0 on 2019-12-14 10:48
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('blog', '0005_auto_20191213_1722'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(auto_now=True, db_index=True)),
('user_from', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rel_from_set', to=settings.AUTH_USER_MODEL)),
('user_to', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rel_to_set', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-date',),
},
),
migrations.DeleteModel(
name='Follow',
),
]
| 34.6875
| 152
| 0.620721
|
b0861a770ea55ce870e351f90f978959e4b0f5d2
| 5,744
|
py
|
Python
|
utils.py
|
deadrobots/BoVot-17
|
ac9795f92207dd6b6689cce9a5d588b89985a652
|
[
"MIT"
] | null | null | null |
utils.py
|
deadrobots/BoVot-17
|
ac9795f92207dd6b6689cce9a5d588b89985a652
|
[
"MIT"
] | null | null | null |
utils.py
|
deadrobots/BoVot-17
|
ac9795f92207dd6b6689cce9a5d588b89985a652
|
[
"MIT"
] | null | null | null |
'''
Created on Jan 3, 2016
@author: graysonelias
'''
'''
This module provides some of our standard methods.
'''
import constants as c
from wallaby import ao
from wallaby import msleep
from wallaby import digital
from wallaby import seconds
from wallaby import freeze
from wallaby import set_servo_position
from wallaby import get_servo_position
from wallaby import analog
from wallaby import enable_servos
from motorsPlusPlus import rotate
from motorsPlusPlus import pivot_left
from motorsPlusPlus import drive_condition
from motorsPlusPlus import drive_speed
# Servo Constants
DELAY = 10
# Loop break timers #
time = 0 # This represents how long to wait before breaking a loop.
#Causes the robot to stop until the right button is pressed
def waitForButton():
print "Press Button..."
while not digital(c.RIGHT_BUTTON):
pass
msleep(1)
print "Pressed"
msleep(1000)
#Causes the robot to stop
def DEBUG():
freeze(c.LMOTOR)
freeze(c.RMOTOR)
ao()
print 'Program stop for DEBUG\nSeconds: ', seconds() - c.startTime
exit(0)
#Causes the robot to stop and hold its position for 5 seconds
def DEBUGwithWait():
freeze(c.LMOTOR)
freeze(c.RMOTOR)
ao()
msleep(5000)
print 'Program stop for DEBUG\nSeconds: ', seconds() - c.startTime
exit(0)
#Checks if there is a black line under the left tophat
def seeLineOne():
return analog(0) < 2000
#Checks is there is a black line under the right tophat
def seeLineTwo():
return analog(1) > 2000
#Checks to see if all of the servos, motors, and sensors are working properly
def start_up_test():
set_servo_position(c.servoCowArm, c.cowArmDown)
enable_servos()
move_servo(c.servoCowArm, c.cowArmUp, 500)
pivot_left(45, 25)
msleep(250)
pivot_left(-45, 25)
msleep(250)
move_servo(c.servoArm, c.armUp, 500)
move_servo(c.servoClaw, c.clawOpen, 500)
move_servo(c.servoCowClaw, c.cowClawOpen, 500)
print "i need to see something"
while analog(c.ET) < 1000:
pass
print "now i dont"
while analog(c.ET) > 1000:
pass
print "show me again"
while analog(c.ET) < 1000:
pass
msleep(250)
drive_condition(100, 100, seeLineOne, True)
msleep(250)
drive_condition(-100, -100, seeLineTwo, True)
# Servo Control #
# Moves a servo with increment "speed".
def move_servo(servo, endPos, speed=10):
# speed of 1 is slow
# speed of 2000 is fast
# speed of 10 is the default
now = get_servo_position(servo)
if speed == 0:
speed = 2047
if endPos >= 2048:
print "Programmer Error"
exit(0)
if endPos < 0:
print "Programmer Error"
exit(0)
if now > endPos:
speed = -speed
for i in range(int(now), int(endPos), int(speed)):
set_servo_position(servo, i)
msleep(DELAY)
set_servo_position(servo, endPos)
msleep(DELAY)
# Moves a servo with increment "speed".
def move_servo_on_white(servo, endPos, speed=10):
# speed of 1 is slow
# speed of 2000 is fast
# speed of 10 is the default
now = get_servo_position(servo)
if speed == 0:
speed = 2047
if endPos >= 2048:
print "Programmer Error"
exit(0)
if endPos < 0:
print "Programmer Error"
exit(0)
if now > endPos:
speed = -speed
for i in range(int(now), int(endPos), int(speed)):
set_servo_position(servo, i)
msleep(DELAY)
if seeBlackLeft():
rotate(20, 25)
set_servo_position(servo, endPos)
msleep(DELAY)
#Gets the robot to the correct start position
def position():
if c.isClone:
drive_speed(-2, 25)
drive_speed(2.15, 50)
pivot_left(48, 25)
drive_speed(.03, 25)
else:
#drive_speed(-1, 15)
drive_speed(3.6, 50)
rotate(-40, 25) #use to be 49
#drive_speed(.03, 25)
def seeBlackLeft():
return analog(c.LTOPHAT) > 1500
def seeBlackRight():
return analog(c.RTOPHAT) > 1500
def seeBlackRightTime():
return analog(c.RTOPHAT) > 1500 and getWait()
# Moves a servo over a specific time.
def move_servo_timed(servo, endPos, time):
if time == 0:
speed = 2047
else:
speed = abs((DELAY * (get_servo_position(servo) - endPos)) / time)
move_servo(servo, endPos, speed)
# Sets wait time in seconds before breaking a loop.
def setWait(DELAY):
global time
time = seconds() + DELAY
# Used to break a loop after using "setWait". An example would be: setWiat(10) | while true and getWait(): do something().
def getWait():
return seconds() < time
def wait4light():
while not calibrate(c.STARTLIGHT):
pass
wait4(c.STARTLIGHT)
from wallaby import left_button, right_button
def calibrate(port):
print "Press LEFT button with light on"
while not left_button():
pass
while left_button():
pass
lightOn = analog(port)
print "On value =", lightOn
if lightOn > 200:
print "Bad calibration"
return False
msleep(1000)
print "Press RIGHT button with light off"
while not right_button():
pass
while right_button():
pass
lightOff = analog(port)
print "Off value =", lightOff
if lightOff < 3000:
print "Bad calibration"
return False
if (lightOff - lightOn) < 2000:
print "Bad calibration"
return False
c.startLightThresh = (lightOff - lightOn) / 2
print "Good calibration! ", c.startLightThresh
return True
def wait4(port):
print "waiting for light!! "
if c.seeding:
print("SEEDING")
else:
print("HEAD TO HEAD")
while analog(port) > c.startLightThresh:
pass
| 24.236287
| 122
| 0.649547
|
0f6241bea5e71933f241c8b019263601035236d5
| 55,829
|
py
|
Python
|
sanic/app.py
|
Lin0818/sanic
|
0cb342aef4c8cfd8a7287f800dc9a3487b1360ca
|
[
"MIT"
] | null | null | null |
sanic/app.py
|
Lin0818/sanic
|
0cb342aef4c8cfd8a7287f800dc9a3487b1360ca
|
[
"MIT"
] | null | null | null |
sanic/app.py
|
Lin0818/sanic
|
0cb342aef4c8cfd8a7287f800dc9a3487b1360ca
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
import asyncio
import logging
import logging.config
import re
import sys
from asyncio import (
AbstractEventLoop,
CancelledError,
Task,
ensure_future,
get_event_loop,
get_running_loop,
wait_for,
)
from asyncio.futures import Future
from collections import defaultdict, deque
from contextlib import suppress
from functools import partial
from inspect import isawaitable
from socket import socket
from traceback import format_exc
from types import SimpleNamespace
from typing import (
TYPE_CHECKING,
Any,
AnyStr,
Awaitable,
Callable,
Coroutine,
Deque,
Dict,
Iterable,
List,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from urllib.parse import urlencode, urlunparse
from warnings import filterwarnings
from sanic_routing.exceptions import ( # type: ignore
FinalizationError,
NotFound,
)
from sanic_routing.route import Route # type: ignore
from sanic.application.ext import setup_ext
from sanic.application.state import ApplicationState, Mode, ServerStage
from sanic.asgi import ASGIApp
from sanic.base.root import BaseSanic
from sanic.blueprint_group import BlueprintGroup
from sanic.blueprints import Blueprint
from sanic.compat import OS_IS_WINDOWS, enable_windows_color_support
from sanic.config import SANIC_PREFIX, Config
from sanic.exceptions import (
InvalidUsage,
SanicException,
ServerError,
URLBuildError,
)
from sanic.handlers import ErrorHandler
from sanic.http import Stage
from sanic.log import (
LOGGING_CONFIG_DEFAULTS,
deprecation,
error_logger,
logger,
)
from sanic.mixins.listeners import ListenerEvent
from sanic.mixins.runner import RunnerMixin
from sanic.models.futures import (
FutureException,
FutureListener,
FutureMiddleware,
FutureRegistry,
FutureRoute,
FutureSignal,
FutureStatic,
)
from sanic.models.handler_types import ListenerType, MiddlewareType
from sanic.models.handler_types import Sanic as SanicVar
from sanic.request import Request
from sanic.response import BaseHTTPResponse, HTTPResponse, ResponseStream
from sanic.router import Router
from sanic.server.websockets.impl import ConnectionClosed
from sanic.signals import Signal, SignalRouter
from sanic.touchup import TouchUp, TouchUpMeta
if TYPE_CHECKING: # no cov
try:
from sanic_ext import Extend # type: ignore
from sanic_ext.extensions.base import Extension # type: ignore
except ImportError:
Extend = TypeVar("Extend") # type: ignore
if OS_IS_WINDOWS: # no cov
enable_windows_color_support()
filterwarnings("once", category=DeprecationWarning)
class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
"""
The main application instance
"""
__touchup__ = (
"handle_request",
"handle_exception",
"_run_response_middleware",
"_run_request_middleware",
)
__slots__ = (
"_asgi_app",
"_asgi_client",
"_blueprint_order",
"_delayed_tasks",
"_ext",
"_future_exceptions",
"_future_listeners",
"_future_middleware",
"_future_registry",
"_future_routes",
"_future_signals",
"_future_statics",
"_state",
"_task_registry",
"_test_client",
"_test_manager",
"blueprints",
"config",
"configure_logging",
"ctx",
"error_handler",
"go_fast",
"listeners",
"named_request_middleware",
"named_response_middleware",
"request_class",
"request_middleware",
"response_middleware",
"router",
"signal_router",
"sock",
"strict_slashes",
"websocket_enabled",
"websocket_tasks",
)
_app_registry: Dict[str, "Sanic"] = {}
_uvloop_setting = None # TODO: Remove in v22.6
test_mode = False
def __init__(
self,
name: str = None,
config: Optional[Config] = None,
ctx: Optional[Any] = None,
router: Optional[Router] = None,
signal_router: Optional[SignalRouter] = None,
error_handler: Optional[ErrorHandler] = None,
env_prefix: Optional[str] = SANIC_PREFIX,
request_class: Optional[Type[Request]] = None,
strict_slashes: bool = False,
log_config: Optional[Dict[str, Any]] = None,
configure_logging: bool = True,
register: Optional[bool] = None,
dumps: Optional[Callable[..., AnyStr]] = None,
) -> None:
super().__init__(name=name)
# logging
if configure_logging:
dict_config = log_config or LOGGING_CONFIG_DEFAULTS
logging.config.dictConfig(dict_config) # type: ignore
if config and env_prefix != SANIC_PREFIX:
raise SanicException(
"When instantiating Sanic with config, you cannot also pass "
"env_prefix"
)
# First setup config
self.config: Config = config or Config(env_prefix=env_prefix)
# Then we can do the rest
self._asgi_client: Any = None
self._blueprint_order: List[Blueprint] = []
self._delayed_tasks: List[str] = []
self._future_registry: FutureRegistry = FutureRegistry()
self._state: ApplicationState = ApplicationState(app=self)
self._task_registry: Dict[str, Task] = {}
self._test_client: Any = None
self._test_manager: Any = None
self.asgi = False
self.auto_reload = False
self.blueprints: Dict[str, Blueprint] = {}
self.configure_logging: bool = configure_logging
self.ctx: Any = ctx or SimpleNamespace()
self.error_handler: ErrorHandler = error_handler or ErrorHandler()
self.listeners: Dict[str, List[ListenerType[Any]]] = defaultdict(list)
self.named_request_middleware: Dict[str, Deque[MiddlewareType]] = {}
self.named_response_middleware: Dict[str, Deque[MiddlewareType]] = {}
self.request_class: Type[Request] = request_class or Request
self.request_middleware: Deque[MiddlewareType] = deque()
self.response_middleware: Deque[MiddlewareType] = deque()
self.router: Router = router or Router()
self.signal_router: SignalRouter = signal_router or SignalRouter()
self.sock: Optional[socket] = None
self.strict_slashes: bool = strict_slashes
self.websocket_enabled: bool = False
self.websocket_tasks: Set[Future[Any]] = set()
# Register alternative method names
self.go_fast = self.run
if register is not None:
deprecation(
"The register argument is deprecated and will stop working "
"in v22.6. After v22.6 all apps will be added to the Sanic "
"app registry.",
22.6,
)
self.config.REGISTER = register
if self.config.REGISTER:
self.__class__.register_app(self)
self.router.ctx.app = self
self.signal_router.ctx.app = self
if dumps:
BaseHTTPResponse._dumps = dumps # type: ignore
@property
def loop(self):
"""
Synonymous with asyncio.get_event_loop().
.. note::
Only supported when using the `app.run` method.
"""
if self.state.stage is ServerStage.STOPPED and self.asgi is False:
raise SanicException(
"Loop can only be retrieved after the app has started "
"running. Not supported with `create_server` function"
)
return get_event_loop()
# -------------------------------------------------------------------- #
# Registration
# -------------------------------------------------------------------- #
def register_listener(
self, listener: ListenerType[SanicVar], event: str
) -> ListenerType[SanicVar]:
"""
Register the listener for a given event.
:param listener: callable i.e. setup_db(app, loop)
:param event: when to register listener i.e. 'before_server_start'
:return: listener
"""
try:
_event = ListenerEvent[event.upper()]
except (ValueError, AttributeError):
valid = ", ".join(
map(lambda x: x.lower(), ListenerEvent.__members__.keys())
)
raise InvalidUsage(f"Invalid event: {event}. Use one of: {valid}")
if "." in _event:
self.signal(_event.value)(
partial(self._listener, listener=listener)
)
else:
self.listeners[_event.value].append(listener)
return listener
def register_middleware(
self, middleware: MiddlewareType, attach_to: str = "request"
) -> MiddlewareType:
"""
Register an application level middleware that will be attached
to all the API URLs registered under this application.
This method is internally invoked by the :func:`middleware`
decorator provided at the app level.
:param middleware: Callback method to be attached to the
middleware
:param attach_to: The state at which the middleware needs to be
invoked in the lifecycle of an *HTTP Request*.
**request** - Invoke before the request is processed
**response** - Invoke before the response is returned back
:return: decorated method
"""
if attach_to == "request":
if middleware not in self.request_middleware:
self.request_middleware.append(middleware)
if attach_to == "response":
if middleware not in self.response_middleware:
self.response_middleware.appendleft(middleware)
return middleware
def register_named_middleware(
self,
middleware: MiddlewareType,
route_names: Iterable[str],
attach_to: str = "request",
):
"""
Method for attaching middleware to specific routes. This is mainly an
internal tool for use by Blueprints to attach middleware to only its
specific routes. But, it could be used in a more generalized fashion.
:param middleware: the middleware to execute
:param route_names: a list of the names of the endpoints
:type route_names: Iterable[str]
:param attach_to: whether to attach to request or response,
defaults to "request"
:type attach_to: str, optional
"""
if attach_to == "request":
for _rn in route_names:
if _rn not in self.named_request_middleware:
self.named_request_middleware[_rn] = deque()
if middleware not in self.named_request_middleware[_rn]:
self.named_request_middleware[_rn].append(middleware)
if attach_to == "response":
for _rn in route_names:
if _rn not in self.named_response_middleware:
self.named_response_middleware[_rn] = deque()
if middleware not in self.named_response_middleware[_rn]:
self.named_response_middleware[_rn].appendleft(middleware)
return middleware
def _apply_exception_handler(
self,
handler: FutureException,
route_names: Optional[List[str]] = None,
):
"""Decorate a function to be registered as a handler for exceptions
:param exceptions: exceptions
:return: decorated function
"""
for exception in handler.exceptions:
if isinstance(exception, (tuple, list)):
for e in exception:
self.error_handler.add(e, handler.handler, route_names)
else:
self.error_handler.add(exception, handler.handler, route_names)
return handler.handler
def _apply_listener(self, listener: FutureListener):
return self.register_listener(listener.listener, listener.event)
def _apply_route(self, route: FutureRoute) -> List[Route]:
params = route._asdict()
websocket = params.pop("websocket", False)
subprotocols = params.pop("subprotocols", None)
if websocket:
self.enable_websocket()
websocket_handler = partial(
self._websocket_handler,
route.handler,
subprotocols=subprotocols,
)
websocket_handler.__name__ = route.handler.__name__ # type: ignore
websocket_handler.is_websocket = True # type: ignore
params["handler"] = websocket_handler
ctx = params.pop("route_context")
routes = self.router.add(**params)
if isinstance(routes, Route):
routes = [routes]
for r in routes:
r.ctx.websocket = websocket
r.ctx.static = params.get("static", False)
r.ctx.__dict__.update(ctx)
return routes
def _apply_static(self, static: FutureStatic) -> Route:
return self._register_static(static)
def _apply_middleware(
self,
middleware: FutureMiddleware,
route_names: Optional[List[str]] = None,
):
if route_names:
return self.register_named_middleware(
middleware.middleware, route_names, middleware.attach_to
)
else:
return self.register_middleware(
middleware.middleware, middleware.attach_to
)
def _apply_signal(self, signal: FutureSignal) -> Signal:
return self.signal_router.add(*signal)
def dispatch(
self,
event: str,
*,
condition: Optional[Dict[str, str]] = None,
context: Optional[Dict[str, Any]] = None,
fail_not_found: bool = True,
inline: bool = False,
reverse: bool = False,
) -> Coroutine[Any, Any, Awaitable[Any]]:
return self.signal_router.dispatch(
event,
context=context,
condition=condition,
inline=inline,
reverse=reverse,
fail_not_found=fail_not_found,
)
async def event(
self, event: str, timeout: Optional[Union[int, float]] = None
):
signal = self.signal_router.name_index.get(event)
if not signal:
if self.config.EVENT_AUTOREGISTER:
self.signal_router.reset()
self.add_signal(None, event)
signal = self.signal_router.name_index[event]
self.signal_router.finalize()
else:
raise NotFound("Could not find signal %s" % event)
return await wait_for(signal.ctx.event.wait(), timeout=timeout)
def enable_websocket(self, enable=True):
"""Enable or disable the support for websocket.
Websocket is enabled automatically if websocket routes are
added to the application.
"""
if not self.websocket_enabled:
# if the server is stopped, we want to cancel any ongoing
# websocket tasks, to allow the server to exit promptly
self.listener("before_server_stop")(self._cancel_websocket_tasks)
self.websocket_enabled = enable
def blueprint(
self,
blueprint: Union[
Blueprint, List[Blueprint], Tuple[Blueprint], BlueprintGroup
],
**options: Any,
):
"""Register a blueprint on the application.
:param blueprint: Blueprint object or (list, tuple) thereof
:param options: option dictionary with blueprint defaults
:return: Nothing
"""
if isinstance(blueprint, (list, tuple, BlueprintGroup)):
for item in blueprint:
params = {**options}
if isinstance(blueprint, BlueprintGroup):
if blueprint.url_prefix:
merge_from = [
options.get("url_prefix", ""),
blueprint.url_prefix,
]
if not isinstance(item, BlueprintGroup):
merge_from.append(item.url_prefix or "")
merged_prefix = "/".join(
u.strip("/") for u in merge_from
).rstrip("/")
params["url_prefix"] = f"/{merged_prefix}"
for _attr in ["version", "strict_slashes"]:
if getattr(item, _attr) is None:
params[_attr] = getattr(
blueprint, _attr
) or options.get(_attr)
if item.version_prefix == "/v":
if blueprint.version_prefix == "/v":
params["version_prefix"] = options.get(
"version_prefix"
)
else:
params["version_prefix"] = blueprint.version_prefix
self.blueprint(item, **params)
return
if blueprint.name in self.blueprints:
assert self.blueprints[blueprint.name] is blueprint, (
'A blueprint with the name "%s" is already registered. '
"Blueprint names must be unique." % (blueprint.name,)
)
else:
self.blueprints[blueprint.name] = blueprint
self._blueprint_order.append(blueprint)
if (
self.strict_slashes is not None
and blueprint.strict_slashes is None
):
blueprint.strict_slashes = self.strict_slashes
blueprint.register(self, options)
def url_for(self, view_name: str, **kwargs):
"""Build a URL based on a view name and the values provided.
In order to build a URL, all request parameters must be supplied as
keyword arguments, and each parameter must pass the test for the
specified parameter type. If these conditions are not met, a
`URLBuildError` will be thrown.
Keyword arguments that are not request parameters will be included in
the output URL's query string.
There are several _special_ keyword arguments that will alter how the
URL will be returned:
1. **_anchor**: ``str`` - Adds an ``#anchor`` to the end
2. **_scheme**: ``str`` - Should be either ``"http"`` or ``"https"``,
default is ``"http"``
3. **_external**: ``bool`` - Whether to return the path or a full URL
with scheme and host
4. **_host**: ``str`` - Used when one or more hosts are defined for a
route to tell Sanic which to use
(only applies with ``_external=True``)
5. **_server**: ``str`` - If not using ``_host``, this will be used
for defining the hostname of the URL
(only applies with ``_external=True``),
defaults to ``app.config.SERVER_NAME``
If you want the PORT to appear in your URL, you should set it in:
.. code-block::
app.config.SERVER_NAME = "myserver:7777"
`See user guide re: routing
<https://sanicframework.org/guide/basics/routing.html#generating-a-url>`__
:param view_name: string referencing the view name
:param kwargs: keys and values that are used to build request
parameters and query string arguments.
:return: the built URL
Raises:
URLBuildError
"""
# find the route by the supplied view name
kw: Dict[str, str] = {}
# special static files url_for
if "." not in view_name:
view_name = f"{self.name}.{view_name}"
if view_name.endswith(".static"):
name = kwargs.pop("name", None)
if name:
view_name = view_name.replace("static", name)
kw.update(name=view_name)
route = self.router.find_route_by_view_name(view_name, **kw)
if not route:
raise URLBuildError(
f"Endpoint with name `{view_name}` was not found"
)
uri = route.path
if getattr(route.ctx, "static", None):
filename = kwargs.pop("filename", "")
# it's static folder
if "__file_uri__" in uri:
folder_ = uri.split("<__file_uri__:", 1)[0]
if folder_.endswith("/"):
folder_ = folder_[:-1]
if filename.startswith("/"):
filename = filename[1:]
kwargs["__file_uri__"] = filename
if (
uri != "/"
and uri.endswith("/")
and not route.strict
and not route.raw_path[:-1]
):
uri = uri[:-1]
if not uri.startswith("/"):
uri = f"/{uri}"
out = uri
# _method is only a placeholder now, don't know how to support it
kwargs.pop("_method", None)
anchor = kwargs.pop("_anchor", "")
# _external need SERVER_NAME in config or pass _server arg
host = kwargs.pop("_host", None)
external = kwargs.pop("_external", False) or bool(host)
scheme = kwargs.pop("_scheme", "")
if route.ctx.hosts and external:
if not host and len(route.ctx.hosts) > 1:
raise ValueError(
f"Host is ambiguous: {', '.join(route.ctx.hosts)}"
)
elif host and host not in route.ctx.hosts:
raise ValueError(
f"Requested host ({host}) is not available for this "
f"route: {route.ctx.hosts}"
)
elif not host:
host = list(route.ctx.hosts)[0]
if scheme and not external:
raise ValueError("When specifying _scheme, _external must be True")
netloc = kwargs.pop("_server", None)
if netloc is None and external:
netloc = host or self.config.get("SERVER_NAME", "")
if external:
if not scheme:
if ":" in netloc[:8]:
scheme = netloc[:8].split(":", 1)[0]
else:
scheme = "http"
if "://" in netloc[:8]:
netloc = netloc.split("://", 1)[-1]
# find all the parameters we will need to build in the URL
# matched_params = re.findall(self.router.parameter_pattern, uri)
route.finalize()
for param_info in route.params.values():
# name, _type, pattern = self.router.parse_parameter_string(match)
# we only want to match against each individual parameter
try:
supplied_param = str(kwargs.pop(param_info.name))
except KeyError:
raise URLBuildError(
f"Required parameter `{param_info.name}` was not "
"passed to url_for"
)
# determine if the parameter supplied by the caller
# passes the test in the URL
if param_info.pattern:
pattern = (
param_info.pattern[1]
if isinstance(param_info.pattern, tuple)
else param_info.pattern
)
passes_pattern = pattern.match(supplied_param)
if not passes_pattern:
if param_info.cast != str:
msg = (
f'Value "{supplied_param}" '
f"for parameter `{param_info.name}` does "
"not match pattern for type "
f"`{param_info.cast.__name__}`: "
f"{pattern.pattern}"
)
else:
msg = (
f'Value "{supplied_param}" for parameter '
f"`{param_info.name}` does not satisfy "
f"pattern {pattern.pattern}"
)
raise URLBuildError(msg)
# replace the parameter in the URL with the supplied value
replacement_regex = f"(<{param_info.name}.*?>)"
out = re.sub(replacement_regex, supplied_param, out)
# parse the remainder of the keyword arguments into a querystring
query_string = urlencode(kwargs, doseq=True) if kwargs else ""
# scheme://netloc/path;parameters?query#fragment
out = urlunparse((scheme, netloc, out, "", query_string, anchor))
return out
# -------------------------------------------------------------------- #
# Request Handling
# -------------------------------------------------------------------- #
async def handle_exception(
self, request: Request, exception: BaseException
): # no cov
"""
A handler that catches specific exceptions and outputs a response.
:param request: The current request object
:param exception: The exception that was raised
:raises ServerError: response 500
"""
await self.dispatch(
"http.lifecycle.exception",
inline=True,
context={"request": request, "exception": exception},
)
if (
request.stream is not None
and request.stream.stage is not Stage.HANDLER
):
error_logger.exception(exception, exc_info=True)
logger.error(
"The error response will not be sent to the client for "
f'the following exception:"{exception}". A previous response '
"has at least partially been sent."
)
# ----------------- deprecated -----------------
handler = self.error_handler._lookup(
exception, request.name if request else None
)
if handler:
deprecation(
"An error occurred while handling the request after at "
"least some part of the response was sent to the client. "
"Therefore, the response from your custom exception "
f"handler {handler.__name__} will not be sent to the "
"client. Beginning in v22.6, Sanic will stop executing "
"custom exception handlers in this scenario. Exception "
"handlers should only be used to generate the exception "
"responses. If you would like to perform any other "
"action on a raised exception, please consider using a "
"signal handler like "
'`@app.signal("http.lifecycle.exception")`\n'
"For further information, please see the docs: "
"https://sanicframework.org/en/guide/advanced/"
"signals.html",
22.6,
)
try:
response = self.error_handler.response(request, exception)
if isawaitable(response):
response = await response
except BaseException as e:
logger.error("An error occurred in the exception handler.")
error_logger.exception(e)
# ----------------------------------------------
return
# -------------------------------------------- #
# Request Middleware
# -------------------------------------------- #
response = await self._run_request_middleware(
request, request_name=None
)
# No middleware results
if not response:
try:
response = self.error_handler.response(request, exception)
if isawaitable(response):
response = await response
except Exception as e:
if isinstance(e, SanicException):
response = self.error_handler.default(request, e)
elif self.debug:
response = HTTPResponse(
(
f"Error while handling error: {e}\n"
f"Stack: {format_exc()}"
),
status=500,
)
else:
response = HTTPResponse(
"An error occurred while handling an error", status=500
)
if response is not None:
try:
request.reset_response()
response = await request.respond(response)
except BaseException:
# Skip response middleware
if request.stream:
request.stream.respond(response)
await response.send(end_stream=True)
raise
else:
if request.stream:
response = request.stream.response
# Marked for cleanup and DRY with handle_request/handle_exception
# when ResponseStream is no longer supporder
if isinstance(response, BaseHTTPResponse):
await self.dispatch(
"http.lifecycle.response",
inline=True,
context={
"request": request,
"response": response,
},
)
await response.send(end_stream=True)
elif isinstance(response, ResponseStream):
resp = await response(request)
await self.dispatch(
"http.lifecycle.response",
inline=True,
context={
"request": request,
"response": resp,
},
)
await response.eof()
else:
raise ServerError(
f"Invalid response type {response!r} (need HTTPResponse)"
)
async def handle_request(self, request: Request): # no cov
"""Take a request from the HTTP Server and return a response object
to be sent back The HTTP Server only expects a response object, so
exception handling must be done here
:param request: HTTP Request object
:return: Nothing
"""
await self.dispatch(
"http.lifecycle.handle",
inline=True,
context={"request": request},
)
# Define `response` var here to remove warnings about
# allocation before assignment below.
response = None
try:
await self.dispatch(
"http.routing.before",
inline=True,
context={"request": request},
)
# Fetch handler from router
route, handler, kwargs = self.router.get(
request.path,
request.method,
request.headers.getone("host", None),
)
request._match_info = {**kwargs}
request.route = route
await self.dispatch(
"http.routing.after",
inline=True,
context={
"request": request,
"route": route,
"kwargs": kwargs,
"handler": handler,
},
)
if (
request.stream
and request.stream.request_body
and not route.ctx.ignore_body
):
if hasattr(handler, "is_stream"):
# Streaming handler: lift the size limit
request.stream.request_max_size = float("inf")
else:
# Non-streaming handler: preload body
await request.receive_body()
# -------------------------------------------- #
# Request Middleware
# -------------------------------------------- #
response = await self._run_request_middleware(
request, request_name=route.name
)
# No middleware results
if not response:
# -------------------------------------------- #
# Execute Handler
# -------------------------------------------- #
if handler is None:
raise ServerError(
(
"'None' was returned while requesting a "
"handler from the router"
)
)
# Run response handler
response = handler(request, **request.match_info)
if isawaitable(response):
response = await response
if request.responded:
if response is not None:
error_logger.error(
"The response object returned by the route handler "
"will not be sent to client. The request has already "
"been responded to."
)
if request.stream is not None:
response = request.stream.response
elif response is not None:
response = await request.respond(response)
elif not hasattr(handler, "is_websocket"):
response = request.stream.response # type: ignore
# Marked for cleanup and DRY with handle_request/handle_exception
# when ResponseStream is no longer supporder
if isinstance(response, BaseHTTPResponse):
await self.dispatch(
"http.lifecycle.response",
inline=True,
context={
"request": request,
"response": response,
},
)
await response.send(end_stream=True)
elif isinstance(response, ResponseStream):
resp = await response(request)
await self.dispatch(
"http.lifecycle.response",
inline=True,
context={
"request": request,
"response": resp,
},
)
await response.eof()
else:
if not hasattr(handler, "is_websocket"):
raise ServerError(
f"Invalid response type {response!r} "
"(need HTTPResponse)"
)
except CancelledError:
raise
except Exception as e:
# Response Generation Failed
await self.handle_exception(request, e)
async def _websocket_handler(
self, handler, request, *args, subprotocols=None, **kwargs
):
if self.asgi:
ws = request.transport.get_websocket_connection()
await ws.accept(subprotocols)
else:
protocol = request.transport.get_protocol()
ws = await protocol.websocket_handshake(request, subprotocols)
# schedule the application handler
# its future is kept in self.websocket_tasks in case it
# needs to be cancelled due to the server being stopped
fut = ensure_future(handler(request, ws, *args, **kwargs))
self.websocket_tasks.add(fut)
cancelled = False
try:
await fut
except Exception as e:
self.error_handler.log(request, e)
except (CancelledError, ConnectionClosed):
cancelled = True
finally:
self.websocket_tasks.remove(fut)
if cancelled:
ws.end_connection(1000)
else:
await ws.close()
# -------------------------------------------------------------------- #
# Testing
# -------------------------------------------------------------------- #
@property
def test_client(self): # noqa
if self._test_client:
return self._test_client
elif self._test_manager:
return self._test_manager.test_client
from sanic_testing.testing import SanicTestClient # type: ignore
self._test_client = SanicTestClient(self)
return self._test_client
@property
def asgi_client(self): # noqa
"""
A testing client that uses ASGI to reach into the application to
execute hanlers.
:return: testing client
:rtype: :class:`SanicASGITestClient`
"""
if self._asgi_client:
return self._asgi_client
elif self._test_manager:
return self._test_manager.asgi_client
from sanic_testing.testing import SanicASGITestClient # type: ignore
self._asgi_client = SanicASGITestClient(self)
return self._asgi_client
# -------------------------------------------------------------------- #
# Execution
# -------------------------------------------------------------------- #
async def _run_request_middleware(
self, request, request_name=None
): # no cov
# The if improves speed. I don't know why
named_middleware = self.named_request_middleware.get(
request_name, deque()
)
applicable_middleware = self.request_middleware + named_middleware
# request.request_middleware_started is meant as a stop-gap solution
# until RFC 1630 is adopted
if applicable_middleware and not request.request_middleware_started:
request.request_middleware_started = True
for middleware in applicable_middleware:
await self.dispatch(
"http.middleware.before",
inline=True,
context={
"request": request,
"response": None,
},
condition={"attach_to": "request"},
)
response = middleware(request)
if isawaitable(response):
response = await response
await self.dispatch(
"http.middleware.after",
inline=True,
context={
"request": request,
"response": None,
},
condition={"attach_to": "request"},
)
if response:
return response
return None
async def _run_response_middleware(
self, request, response, request_name=None
): # no cov
named_middleware = self.named_response_middleware.get(
request_name, deque()
)
applicable_middleware = self.response_middleware + named_middleware
if applicable_middleware:
for middleware in applicable_middleware:
await self.dispatch(
"http.middleware.before",
inline=True,
context={
"request": request,
"response": response,
},
condition={"attach_to": "response"},
)
_response = middleware(request, response)
if isawaitable(_response):
_response = await _response
await self.dispatch(
"http.middleware.after",
inline=True,
context={
"request": request,
"response": _response if _response else response,
},
condition={"attach_to": "response"},
)
if _response:
response = _response
if isinstance(response, BaseHTTPResponse):
response = request.stream.respond(response)
break
return response
def _build_endpoint_name(self, *parts):
parts = [self.name, *parts]
return ".".join(parts)
@classmethod
def _cancel_websocket_tasks(cls, app, loop):
for task in app.websocket_tasks:
task.cancel()
@staticmethod
async def _listener(
app: Sanic, loop: AbstractEventLoop, listener: ListenerType
):
try:
maybe_coro = listener(app) # type: ignore
except TypeError:
maybe_coro = listener(app, loop) # type: ignore
if maybe_coro and isawaitable(maybe_coro):
await maybe_coro
# -------------------------------------------------------------------- #
# Task management
# -------------------------------------------------------------------- #
@classmethod
def _prep_task(
cls,
task,
app,
loop,
):
if callable(task):
try:
task = task(app)
except TypeError:
task = task()
return task
@classmethod
def _loop_add_task(
cls,
task,
app,
loop,
*,
name: Optional[str] = None,
register: bool = True,
) -> Task:
if not isinstance(task, Future):
prepped = cls._prep_task(task, app, loop)
if sys.version_info < (3, 8): # no cov
task = loop.create_task(prepped)
if name:
error_logger.warning(
"Cannot set a name for a task when using Python 3.7. "
"Your task will be created without a name."
)
task.get_name = lambda: name
else:
task = loop.create_task(prepped, name=name)
if name and register and sys.version_info > (3, 7):
app._task_registry[name] = task
return task
@staticmethod
async def dispatch_delayed_tasks(app, loop):
for name in app._delayed_tasks:
await app.dispatch(name, context={"app": app, "loop": loop})
app._delayed_tasks.clear()
@staticmethod
async def run_delayed_task(app, loop, task):
prepped = app._prep_task(task, app, loop)
await prepped
def add_task(
self,
task: Union[Future[Any], Coroutine[Any, Any, Any], Awaitable[Any]],
*,
name: Optional[str] = None,
register: bool = True,
) -> Optional[Task]:
"""
Schedule a task to run later, after the loop has started.
Different from asyncio.ensure_future in that it does not
also return a future, and the actual ensure_future call
is delayed until before server start.
`See user guide re: background tasks
<https://sanicframework.org/guide/basics/tasks.html#background-tasks>`__
:param task: future, couroutine or awaitable
"""
try:
loop = self.loop # Will raise SanicError if loop is not started
return self._loop_add_task(
task, self, loop, name=name, register=register
)
except SanicException:
task_name = f"sanic.delayed_task.{hash(task)}"
if not self._delayed_tasks:
self.after_server_start(partial(self.dispatch_delayed_tasks))
if name:
raise RuntimeError(
"Cannot name task outside of a running application"
)
self.signal(task_name)(partial(self.run_delayed_task, task=task))
self._delayed_tasks.append(task_name)
return None
def get_task(
self, name: str, *, raise_exception: bool = True
) -> Optional[Task]:
try:
return self._task_registry[name]
except KeyError:
if raise_exception:
raise SanicException(
f'Registered task named "{name}" not found.'
)
return None
async def cancel_task(
self,
name: str,
msg: Optional[str] = None,
*,
raise_exception: bool = True,
) -> None:
task = self.get_task(name, raise_exception=raise_exception)
if task and not task.cancelled():
args: Tuple[str, ...] = ()
if msg:
if sys.version_info >= (3, 9):
args = (msg,)
else: # no cov
raise RuntimeError(
"Cancelling a task with a message is only supported "
"on Python 3.9+."
)
task.cancel(*args)
try:
await task
except CancelledError:
...
def purge_tasks(self):
for key, task in self._task_registry.items():
if task.done() or task.cancelled():
self._task_registry[key] = None
self._task_registry = {
k: v for k, v in self._task_registry.items() if v is not None
}
def shutdown_tasks(
self, timeout: Optional[float] = None, increment: float = 0.1
):
for task in self.tasks:
if task.get_name() != "RunServer":
task.cancel()
if timeout is None:
timeout = self.config.GRACEFUL_SHUTDOWN_TIMEOUT
while len(self._task_registry) and timeout:
with suppress(RuntimeError):
running_loop = get_running_loop()
running_loop.run_until_complete(asyncio.sleep(increment))
self.purge_tasks()
timeout -= increment
@property
def tasks(self):
return iter(self._task_registry.values())
# -------------------------------------------------------------------- #
# ASGI
# -------------------------------------------------------------------- #
async def __call__(self, scope, receive, send):
"""
To be ASGI compliant, our instance must be a callable that accepts
three arguments: scope, receive, send. See the ASGI reference for more
details: https://asgi.readthedocs.io/en/latest
"""
self.asgi = True
if scope["type"] == "lifespan":
self.motd("")
self._asgi_app = await ASGIApp.create(self, scope, receive, send)
asgi_app = self._asgi_app
await asgi_app()
_asgi_single_callable = True # We conform to ASGI 3.0 single-callable
# -------------------------------------------------------------------- #
# Configuration
# -------------------------------------------------------------------- #
def update_config(self, config: Union[bytes, str, dict, Any]):
"""
Update app.config. Full implementation can be found in the user guide.
`See user guide re: configuration
<https://sanicframework.org/guide/deployment/configuration.html#basics>`__
"""
self.config.update_config(config)
@property
def asgi(self):
return self.state.asgi
@asgi.setter
def asgi(self, value: bool):
self.state.asgi = value
@property
def debug(self):
return self.state.is_debug
@debug.setter
def debug(self, value: bool):
deprecation(
"Setting the value of a Sanic application's debug value directly "
"is deprecated and will be removed in v22.9. Please set it using "
"the CLI, app.run, app.prepare, or directly set "
"app.state.mode to Mode.DEBUG.",
22.9,
)
mode = Mode.DEBUG if value else Mode.PRODUCTION
self.state.mode = mode
@property
def auto_reload(self):
return self.config.AUTO_RELOAD
@auto_reload.setter
def auto_reload(self, value: bool):
self.config.AUTO_RELOAD = value
@property
def state(self):
return self._state
@property
def is_running(self):
deprecation(
"Use of the is_running property is no longer used by Sanic "
"internally. The property is now deprecated and will be removed "
"in version 22.9. You may continue to set the property for your "
"own needs until that time. If you would like to check whether "
"the application is operational, please use app.state.stage. More "
"information is available at ___.",
22.9,
)
return self.state.is_running
@is_running.setter
def is_running(self, value: bool):
deprecation(
"Use of the is_running property is no longer used by Sanic "
"internally. The property is now deprecated and will be removed "
"in version 22.9. You may continue to set the property for your "
"own needs until that time. If you would like to check whether "
"the application is operational, please use app.state.stage. More "
"information is available at ___.",
22.9,
)
self.state.is_running = value
@property
def is_stopping(self):
deprecation(
"Use of the is_stopping property is no longer used by Sanic "
"internally. The property is now deprecated and will be removed "
"in version 22.9. You may continue to set the property for your "
"own needs until that time. If you would like to check whether "
"the application is operational, please use app.state.stage. More "
"information is available at ___.",
22.9,
)
return self.state.is_stopping
@is_stopping.setter
def is_stopping(self, value: bool):
deprecation(
"Use of the is_stopping property is no longer used by Sanic "
"internally. The property is now deprecated and will be removed "
"in version 22.9. You may continue to set the property for your "
"own needs until that time. If you would like to check whether "
"the application is operational, please use app.state.stage. More "
"information is available at ___.",
22.9,
)
self.state.is_stopping = value
@property
def reload_dirs(self):
return self.state.reload_dirs
@property
def ext(self) -> Extend:
if not hasattr(self, "_ext"):
setup_ext(self, fail=True)
if not hasattr(self, "_ext"):
raise RuntimeError(
"Sanic Extensions is not installed. You can add it to your "
"environment using:\n$ pip install sanic[ext]\nor\n$ pip "
"install sanic-ext"
)
return self._ext # type: ignore
def extend(
self,
*,
extensions: Optional[List[Type[Extension]]] = None,
built_in_extensions: bool = True,
config: Optional[Union[Config, Dict[str, Any]]] = None,
**kwargs,
) -> Extend:
if hasattr(self, "_ext"):
raise RuntimeError(
"Cannot extend Sanic after Sanic Extensions has been setup."
)
setup_ext(
self,
extensions=extensions,
built_in_extensions=built_in_extensions,
config=config,
fail=True,
**kwargs,
)
return self.ext
# -------------------------------------------------------------------- #
# Class methods
# -------------------------------------------------------------------- #
@classmethod
def register_app(cls, app: "Sanic") -> None:
"""
Register a Sanic instance
"""
if not isinstance(app, cls):
raise SanicException("Registered app must be an instance of Sanic")
name = app.name
if name in cls._app_registry and not cls.test_mode:
raise SanicException(f'Sanic app name "{name}" already in use.')
cls._app_registry[name] = app
@classmethod
def get_app(
cls, name: Optional[str] = None, *, force_create: bool = False
) -> "Sanic":
"""
Retrieve an instantiated Sanic instance
"""
if name is None:
if len(cls._app_registry) > 1:
raise SanicException(
'Multiple Sanic apps found, use Sanic.get_app("app_name")'
)
elif len(cls._app_registry) == 0:
raise SanicException("No Sanic apps have been registered.")
else:
return list(cls._app_registry.values())[0]
try:
return cls._app_registry[name]
except KeyError:
if force_create:
return cls(name)
raise SanicException(f'Sanic app name "{name}" not found.')
# -------------------------------------------------------------------- #
# Lifecycle
# -------------------------------------------------------------------- #
def finalize(self):
try:
self.router.finalize()
except FinalizationError as e:
if not Sanic.test_mode:
raise e
def signalize(self, allow_fail_builtin=True):
self.signal_router.allow_fail_builtin = allow_fail_builtin
try:
self.signal_router.finalize()
except FinalizationError as e:
if not Sanic.test_mode:
raise e
async def _startup(self):
self._future_registry.clear()
if not hasattr(self, "_ext"):
setup_ext(self)
if hasattr(self, "_ext"):
self.ext._display()
if self.state.is_debug:
self.config.TOUCHUP = False
# Setup routers
self.signalize(self.config.TOUCHUP)
self.finalize()
# TODO: Replace in v22.6 to check against apps in app registry
if (
self.__class__._uvloop_setting is not None
and self.__class__._uvloop_setting != self.config.USE_UVLOOP
):
error_logger.warning(
"It looks like you're running several apps with different "
"uvloop settings. This is not supported and may lead to "
"unintended behaviour."
)
self.__class__._uvloop_setting = self.config.USE_UVLOOP
# Startup time optimizations
if self.state.primary:
# TODO:
# - Raise warning if secondary apps have error handler config
ErrorHandler.finalize(self.error_handler, config=self.config)
if self.config.TOUCHUP:
TouchUp.run(self)
self.state.is_started = True
async def _server_event(
self,
concern: str,
action: str,
loop: Optional[AbstractEventLoop] = None,
) -> None:
event = f"server.{concern}.{action}"
if action not in ("before", "after") or concern not in (
"init",
"shutdown",
):
raise SanicException(f"Invalid server event: {event}")
if self.state.verbosity >= 1:
logger.debug(f"Triggering server events: {event}")
reverse = concern == "shutdown"
if loop is None:
loop = self.loop
await self.dispatch(
event,
fail_not_found=False,
reverse=reverse,
inline=True,
context={
"app": self,
"loop": loop,
},
)
| 35.201135
| 82
| 0.537946
|
7b8130579c4b47314b47945a7086d14797f89307
| 3,745
|
py
|
Python
|
shop/settings2.py
|
bifnavent/tiendaroxy
|
232264a3dff710bca2847af167b2ed950c01c183
|
[
"MIT"
] | null | null | null |
shop/settings2.py
|
bifnavent/tiendaroxy
|
232264a3dff710bca2847af167b2ed950c01c183
|
[
"MIT"
] | null | null | null |
shop/settings2.py
|
bifnavent/tiendaroxy
|
232264a3dff710bca2847af167b2ed950c01c183
|
[
"MIT"
] | null | null | null |
"""
Django settings for shop project.
Generated by 'django-admin startproject' using Django 3.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
import dj_database_url
from decouple import config
import cloudinary
import cloudinary.uploader
import cloudinary.api
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-ye)ravcmni-yfnff7(ij=o9u^lx)gn3bfc4u!@llz0_6z3_y(z'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'tienda',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cloudinary',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'shop.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'shop.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config(
default=config('DATABASE_URL')
)
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'es-es'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
cloudinary.config(
cloud_name = "solinsoft",
api_key = "599129267623282",
api_secret = "AmZ-QOj_bbnviDwa3UBJBY-HloY"
)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
| 24.966667
| 91
| 0.71482
|
e01b7630e7623dc547ffd1b22eb4b55063f8f489
| 10,630
|
py
|
Python
|
mmdet/exp/yolox/yolox_base_coco.py
|
jie311/miemiedetection
|
b0e7a45717fe6c9cf9bf3c0f47d47a2e6c68b1b6
|
[
"Apache-2.0"
] | 65
|
2021-12-30T03:30:52.000Z
|
2022-03-25T01:44:32.000Z
|
mmdet/exp/yolox/yolox_base_coco.py
|
jie311/miemiedetection
|
b0e7a45717fe6c9cf9bf3c0f47d47a2e6c68b1b6
|
[
"Apache-2.0"
] | 1
|
2021-12-31T01:51:35.000Z
|
2022-01-01T14:42:37.000Z
|
mmdet/exp/yolox/yolox_base_coco.py
|
jie311/miemiedetection
|
b0e7a45717fe6c9cf9bf3c0f47d47a2e6c68b1b6
|
[
"Apache-2.0"
] | 7
|
2021-12-31T09:25:06.000Z
|
2022-03-10T01:25:09.000Z
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
import os
import sys
import random
import torch
import torch.distributed as dist
import torch.nn as nn
from mmdet.exp.datasets.coco_base import COCOBaseExp
class YOLOXExp(COCOBaseExp):
def __init__(self):
super().__init__()
# ---------------- architecture name(算法名) ---------------- #
self.archi_name = 'YOLOX'
# ---------------- model config ---------------- #
self.depth = 1.00
self.width = 1.00
self.act = 'silu'
# ---------------- dataloader config ---------------- #
# 默认是4。如果报错“OSError: [WinError 1455] 页面文件太小,无法完成操作”,设置为2或0解决。
self.data_num_workers = 2
self.input_size = (640, 640) # (height, width)
# Actual multiscale ranges: [640-5*32, 640+5*32].
# To disable multiscale training, set the
# self.multiscale_range to 0.
self.multiscale_range = 5
# You can uncomment this line to specify a multiscale range
# self.random_size = (14, 26)
self.output_dir = "YOLOX_outputs"
# --------------- transform config ----------------- #
self.mosaic_prob = 1.0
self.mixup_prob = 1.0
self.hsv_prob = 1.0
self.flip_prob = 0.5
self.degrees = 10.0
self.translate = 0.1
self.mosaic_scale = (0.1, 2)
self.mixup_scale = (0.5, 1.5)
self.shear = 2.0
self.enable_mixup = True
# -------------- training config --------------------- #
self.warmup_epochs = 5
self.max_epoch = 300
self.warmup_lr = 0
self.basic_lr_per_img = 0.01 / 64.0
self.scheduler = "yoloxwarmcos"
self.no_aug_epochs = 15
self.min_lr_ratio = 0.05
self.ema = True
self.ema_decay = 0.9998
self.freeze_at = 0
self.weight_decay = 5e-4
self.momentum = 0.9
self.print_interval = 10
self.eval_interval = 10
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
# ----------------- testing config ------------------ #
self.test_size = (640, 640)
self.test_conf = 0.01
self.nmsthre = 0.65
# 判断是否是调试状态
isDebug = True if sys.gettrace() else False
if isDebug:
print('Debug Mode.')
self.data_dir = '../' + self.data_dir
self.cls_names = '../' + self.cls_names
self.output_dir = '../' + self.output_dir
def get_model(self):
from mmdet.models import YOLOX, YOLOPAFPN, YOLOXHead
def init_yolo(M):
for m in M.modules():
if isinstance(m, nn.BatchNorm2d):
m.eps = 1e-3
m.momentum = 0.03
if getattr(self, "model", None) is None:
in_channels = [256, 512, 1024]
backbone = YOLOPAFPN(self.depth, self.width, in_channels=in_channels, act=self.act, freeze_at=self.freeze_at)
head = YOLOXHead(self.num_classes, self.width, in_channels=in_channels, act=self.act)
self.model = YOLOX(backbone, head)
self.model.apply(init_yolo)
self.model.head.initialize_biases(1e-2)
return self.model
def get_data_loader(
self, batch_size, is_distributed, no_aug=False, cache_img=False
):
from mmdet.data import (
COCODataset,
TrainTransform,
YoloBatchSampler,
DataLoader,
InfiniteSampler,
MosaicDetection,
worker_init_reset_seed,
)
from mmdet.utils import (
wait_for_the_master,
get_local_rank,
)
local_rank = get_local_rank()
with wait_for_the_master(local_rank):
dataset = COCODataset(
data_dir=self.data_dir,
json_file=self.train_ann,
ann_folder=self.ann_folder,
name=self.train_image_folder,
img_size=self.input_size,
preproc=TrainTransform(
max_labels=50,
flip_prob=self.flip_prob,
hsv_prob=self.hsv_prob),
cache=cache_img,
)
dataset = MosaicDetection(
dataset,
mosaic=not no_aug,
img_size=self.input_size,
preproc=TrainTransform(
max_labels=120,
flip_prob=self.flip_prob,
hsv_prob=self.hsv_prob),
degrees=self.degrees,
translate=self.translate,
mosaic_scale=self.mosaic_scale,
mixup_scale=self.mixup_scale,
shear=self.shear,
enable_mixup=self.enable_mixup,
mosaic_prob=self.mosaic_prob,
mixup_prob=self.mixup_prob,
)
self.dataset = dataset
if is_distributed:
batch_size = batch_size // dist.get_world_size()
sampler = InfiniteSampler(len(self.dataset), seed=self.seed if self.seed else 0)
batch_sampler = YoloBatchSampler(
sampler=sampler,
batch_size=batch_size,
drop_last=False,
mosaic=not no_aug,
)
dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True}
dataloader_kwargs["batch_sampler"] = batch_sampler
# Make sure each process has different random seed, especially for 'fork' method.
# Check https://github.com/pytorch/pytorch/issues/63311 for more details.
dataloader_kwargs["worker_init_fn"] = worker_init_reset_seed
train_loader = DataLoader(self.dataset, **dataloader_kwargs)
return train_loader
def random_resize(self, data_loader, epoch, rank, is_distributed):
tensor = torch.LongTensor(2).cuda()
if rank == 0:
size_factor = self.input_size[1] * 1.0 / self.input_size[0]
if not hasattr(self, 'random_size'):
min_size = int(self.input_size[0] / 32) - self.multiscale_range
max_size = int(self.input_size[0] / 32) + self.multiscale_range
self.random_size = (min_size, max_size)
size = random.randint(*self.random_size)
size = (int(32 * size), 32 * int(size * size_factor))
tensor[0] = size[0]
tensor[1] = size[1]
if is_distributed:
dist.barrier()
dist.broadcast(tensor, 0)
input_size = (tensor[0].item(), tensor[1].item())
return input_size
def preprocess(self, inputs, targets, tsize):
scale_y = tsize[0] / self.input_size[0]
scale_x = tsize[1] / self.input_size[1]
if scale_x != 1 or scale_y != 1:
inputs = nn.functional.interpolate(
inputs, size=tsize, mode="bilinear", align_corners=False
)
targets[..., 1::2] = targets[..., 1::2] * scale_x
targets[..., 2::2] = targets[..., 2::2] * scale_y
return inputs, targets
def get_optimizer(self, batch_size):
if "optimizer" not in self.__dict__:
if self.warmup_epochs > 0:
lr = self.warmup_lr
else:
lr = self.basic_lr_per_img * batch_size
pg0, pg1, pg2 = [], [], [] # optimizer parameter groups
for k, v in self.model.named_modules():
if hasattr(v, "bias") and isinstance(v.bias, nn.Parameter):
if v.bias.requires_grad:
pg2.append(v.bias) # biases
if isinstance(v, nn.BatchNorm2d) or "bn" in k:
if v.weight.requires_grad:
pg0.append(v.weight) # no decay
elif hasattr(v, "weight") and isinstance(v.weight, nn.Parameter):
if v.weight.requires_grad:
pg1.append(v.weight) # apply decay
optimizer = torch.optim.SGD(
pg0, lr=lr, momentum=self.momentum, nesterov=True
)
optimizer.add_param_group(
{"params": pg1, "weight_decay": self.weight_decay}
) # add pg1 with weight_decay
optimizer.add_param_group({"params": pg2})
self.optimizer = optimizer
return self.optimizer
def get_lr_scheduler(self, lr, iters_per_epoch):
from mmdet.utils import LRScheduler
scheduler = LRScheduler(
self.scheduler,
lr,
iters_per_epoch,
self.max_epoch,
warmup_epochs=self.warmup_epochs,
warmup_lr_start=self.warmup_lr,
no_aug_epochs=self.no_aug_epochs,
min_lr_ratio=self.min_lr_ratio,
)
return scheduler
def get_eval_loader(self, batch_size, is_distributed, testdev=False, legacy=False):
from mmdet.data import COCODataset, ValTransform
valdataset = COCODataset(
data_dir=self.data_dir,
json_file=self.val_ann if not testdev else "image_info_test-dev2017.json",
ann_folder=self.ann_folder,
name=self.val_image_folder if not testdev else "test2017",
img_size=self.test_size,
preproc=ValTransform(legacy=legacy),
)
if is_distributed:
batch_size = batch_size // dist.get_world_size()
sampler = torch.utils.data.distributed.DistributedSampler(
valdataset, shuffle=False
)
else:
sampler = torch.utils.data.SequentialSampler(valdataset)
dataloader_kwargs = {
"num_workers": self.data_num_workers,
"pin_memory": True,
"sampler": sampler,
}
dataloader_kwargs["batch_size"] = batch_size
val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs)
return val_loader
def get_evaluator(self, batch_size, is_distributed, testdev=False, legacy=False):
from mmdet.evaluators import COCOEvaluator
val_loader = self.get_eval_loader(batch_size, is_distributed, testdev, legacy)
evaluator = COCOEvaluator(
dataloader=val_loader,
img_size=self.test_size,
confthre=self.test_conf,
nmsthre=self.nmsthre,
num_classes=self.num_classes,
archi_name=self.archi_name,
testdev=testdev,
)
return evaluator
def eval(self, model, evaluator, is_distributed, half=False):
return evaluator.evaluate_yolox(model, is_distributed, half)
| 35.198675
| 121
| 0.567262
|
6ce12143a54647c9f0a9f518fdb15a04046c1d39
| 62,471
|
py
|
Python
|
Tests/test_bytes.py
|
elfscript/ironpython3
|
fde41d69ff178e60b33b68752e0ea8ba8f5e45e7
|
[
"Apache-2.0"
] | null | null | null |
Tests/test_bytes.py
|
elfscript/ironpython3
|
fde41d69ff178e60b33b68752e0ea8ba8f5e45e7
|
[
"Apache-2.0"
] | null | null | null |
Tests/test_bytes.py
|
elfscript/ironpython3
|
fde41d69ff178e60b33b68752e0ea8ba8f5e45e7
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
import sys
import unittest
from iptest import IronPythonTestCase, ip_supported_encodings, is_cli, is_mono, is_osx, run_test, skipUnlessIronPython
if not is_cli:
long = int
types = [bytearray, bytes]
class IndexableOC:
def __init__(self, value):
self.value = value
def __index__(self):
return self.value
class Indexable(object):
def __init__(self, value):
self.value = value
def __index__(self):
return self.value
class BytesTest(IronPythonTestCase):
def test_capitalize(self):
tests = [(b'foo', b'Foo'),
(b' foo', b' foo'),
(b'fOO', b'Foo'),
(b' fOO BAR', b' foo bar'),
(b'fOO BAR', b'Foo bar'),
]
for testType in types:
for data, result in tests:
self.assertEqual(testType(data).capitalize(), result)
y = b''
x = y.capitalize()
self.assertEqual(id(x), id(y))
y = bytearray(b'')
x = y.capitalize()
self.assertTrue(id(x) != id(y), "bytearray.capitalize returned self")
def test_center(self):
for testType in types:
self.assertEqual(testType(b'aa').center(4), b' aa ')
self.assertEqual(testType(b'aa').center(4, b'*'), b'*aa*')
self.assertEqual(testType(b'aa').center(2), b'aa')
self.assertEqual(testType(b'aa').center(2, b'*'), b'aa')
self.assertRaises(TypeError, testType(b'abc').center, 3, [2, ])
self.assertRaises(TypeError, testType(b'abc').center, 3, ' ')
x = b'aa'
self.assertEqual(id(x.center(2, b'*')), id(x))
x = bytearray(b'aa')
self.assertTrue(id(x.center(2, b'*')) != id(x))
def test_count(self):
for testType in types:
self.assertEqual(testType(b"adadad").count(b"d"), 3)
self.assertEqual(testType(b"adbaddads").count(b"ad"), 3)
self.assertEqual(testType(b"adbaddads").count(b"ad", 1, 8), 2)
self.assertEqual(testType(b"adbaddads").count(b"ad", -1, -1), 0)
self.assertEqual(testType(b"adbaddads").count(b"ad", 0, -1), 3)
self.assertEqual(testType(b"adbaddads").count(b"", 0, -1), 9)
self.assertEqual(testType(b"adbaddads").count(b"", 27), 0)
self.assertRaises(TypeError, testType(b"adbaddads").count, [2,])
self.assertRaises(TypeError, testType(b"adbaddads").count, [2,], 0)
self.assertRaises(TypeError, testType(b"adbaddads").count, [2,], 0, 1)
def test_decode(self):
for testType in types:
self.assertEqual(testType(b'\xff\xfea\x00b\x00c\x00').decode('utf-16'), 'abc')
def test_endswith(self):
for testType in types:
self.assertRaises(TypeError, testType(b'abcdef').endswith, ([], ))
self.assertRaises(TypeError, testType(b'abcdef').endswith, [])
self.assertRaises(TypeError, testType(b'abcdef').endswith, [], 0)
self.assertRaises(TypeError, testType(b'abcdef').endswith, [], 0, 1)
self.assertEqual(testType(b'abcdef').endswith(b'def'), True)
self.assertEqual(testType(b'abcdef').endswith(b'def', -1, -2), False)
self.assertEqual(testType(b'abcdef').endswith(b'def', 0, 42), True)
self.assertEqual(testType(b'abcdef').endswith(b'def', 0, -7), False)
self.assertEqual(testType(b'abcdef').endswith(b'def', 42, -7), False)
self.assertEqual(testType(b'abcdef').endswith(b'def', 42), False)
self.assertEqual(testType(b'abcdef').endswith(b'bar'), False)
self.assertEqual(testType(b'abcdef').endswith((b'def', )), True)
self.assertEqual(testType(b'abcdef').endswith((b'baz', )), False)
self.assertEqual(testType(b'abcdef').endswith((b'baz', ), 0, 42), False)
self.assertEqual(testType(b'abcdef').endswith((b'baz', ), 0, -42), False)
for x in (0, 1, 2, 3, -10, -3, -4):
self.assertEqual(testType(b"abcdef").endswith(b"def", x), True)
self.assertEqual(testType(b"abcdef").endswith(b"de", x, 5), True)
self.assertEqual(testType(b"abcdef").endswith(b"de", x, -1), True)
self.assertEqual(testType(b"abcdef").endswith((b"def", ), x), True)
self.assertEqual(testType(b"abcdef").endswith((b"de", ), x, 5), True)
self.assertEqual(testType(b"abcdef").endswith((b"de", ), x, -1), True)
for x in (4, 5, 6, 10, -1, -2):
self.assertEqual(testType(b"abcdef").endswith((b"def", ), x), False)
self.assertEqual(testType(b"abcdef").endswith((b"de", ), x, 5), False)
self.assertEqual(testType(b"abcdef").endswith((b"de", ), x, -1), False)
def test_expandtabs(self):
for testType in types:
self.assertTrue(testType(b"\ttext\t").expandtabs(0) == b"text")
self.assertTrue(testType(b"\ttext\t").expandtabs(-10) == b"text")
self.assertEqual(testType(b"\r\ntext\t").expandtabs(-10), b"\r\ntext")
self.assertEqual(len(testType(b"aaa\taaa\taaa").expandtabs()), 19)
self.assertEqual(testType(b"aaa\taaa\taaa").expandtabs(), b"aaa aaa aaa")
self.assertRaises(OverflowError, bytearray(b'\t\t').expandtabs, sys.maxsize)
def test_extend(self):
b = bytearray(b'abc')
b.extend(b'def')
self.assertEqual(b, b'abcdef')
b.extend(bytearray(b'ghi'))
self.assertEqual(b, b'abcdefghi')
b = bytearray(b'abc')
b.extend([2,3,4])
self.assertEqual(b, b'abc' + b'\x02\x03\x04')
b = bytearray(b'abc')
b.extend(memoryview(b"def"))
self.assertEqual(b, b'abcdef')
def test_find(self):
for testType in types:
self.assertEqual(testType(b"abcdbcda").find(b"cd", 1), 2)
self.assertEqual(testType(b"abcdbcda").find(b"cd", 3), 5)
self.assertEqual(testType(b"abcdbcda").find(b"cd", 7), -1)
self.assertEqual(testType(b'abc').find(b'abc', -1, 1), -1)
self.assertEqual(testType(b'abc').find(b'abc', 25), -1)
self.assertEqual(testType(b'abc').find(b'add', 0, 3), -1)
if testType == bytes:
self.assertEqual(testType(b'abc').find(b'add', 0, None), -1)
self.assertEqual(testType(b'abc').find(b'add', None, None), -1)
self.assertEqual(testType(b'abc').find(b'', None, 0), 0)
self.assertEqual(testType(b'x').find(b'x', None, 0), -1)
self.assertEqual(testType(b'abc').find(b'', 0, 0), 0)
self.assertEqual(testType(b'abc').find(b'', 0, 1), 0)
self.assertEqual(testType(b'abc').find(b'', 0, 2), 0)
self.assertEqual(testType(b'abc').find(b'', 0, 3), 0)
self.assertEqual(testType(b'abc').find(b'', 0, 4), 0)
self.assertEqual(testType(b'').find(b'', 0, 4), 0)
self.assertEqual(testType(b'x').find(b'x', 0, 0), -1)
self.assertEqual(testType(b'x').find(b'x', 3, 0), -1)
self.assertEqual(testType(b'x').find(b'', 3, 0), -1)
self.assertRaises(TypeError, testType(b'x').find, [1])
self.assertRaises(TypeError, testType(b'x').find, [1], 0)
self.assertRaises(TypeError, testType(b'x').find, [1], 0, 1)
def test_fromhex(self):
for testType in types:
if testType != str:
self.assertRaises(ValueError, testType.fromhex, '0')
self.assertRaises(ValueError, testType.fromhex, 'A')
self.assertRaises(ValueError, testType.fromhex, 'a')
self.assertRaises(ValueError, testType.fromhex, 'aG')
self.assertRaises(ValueError, testType.fromhex, 'Ga')
self.assertEqual(testType.fromhex('00'), b'\x00')
self.assertEqual(testType.fromhex('00 '), b'\x00')
self.assertEqual(testType.fromhex('00 '), b'\x00')
self.assertEqual(testType.fromhex('00 01'), b'\x00\x01')
self.assertEqual(testType.fromhex('00 01 0a'), b'\x00\x01\x0a')
self.assertEqual(testType.fromhex('00 01 0a 0B'), b'\x00\x01\x0a\x0B')
self.assertEqual(testType.fromhex('00 a1 Aa 0B'), b'\x00\xA1\xAa\x0B')
def test_index(self):
for testType in types:
self.assertRaises(ValueError, testType(b'abc').index, 257)
self.assertEqual(testType(b'abc').index(b'a'), 0)
self.assertEqual(testType(b'abc').index(b'a', 0, -1), 0)
self.assertRaises(ValueError, testType(b'abc').index, b'c', 0, -1)
self.assertRaises(ValueError, testType(b'abc').index, b'a', -1)
self.assertEqual(testType(b'abc').index(b'ab'), 0)
self.assertEqual(testType(b'abc').index(b'bc'), 1)
self.assertRaises(ValueError, testType(b'abc').index, b'abcd')
self.assertRaises(ValueError, testType(b'abc').index, b'e')
self.assertRaises(TypeError, testType(b'x').index, [1])
self.assertRaises(TypeError, testType(b'x').index, [1], 0)
self.assertRaises(TypeError, testType(b'x').index, [1], 0, 1)
def test_insert(self):
b = bytearray(b'abc')
b.insert(0, ord('d'))
self.assertEqual(b, b'dabc')
b.insert(1000, ord('d'))
self.assertEqual(b, b'dabcd')
b.insert(-1, ord('d'))
self.assertEqual(b, b'dabcdd')
self.assertRaises(ValueError, b.insert, 0, 256)
def check_is_method(self, methodName, result):
for testType in types:
self.assertEqual(getattr(testType(b''), methodName)(), False)
for i in range(256):
data = bytearray()
data.append(i)
self.assertTrue(getattr(testType(data), methodName)() == result(i), chr(i) + " (" + str(i) + ") should be " + str(result(i)))
def test_isalnum(self):
self.check_is_method('isalnum', lambda i : i >= ord('a') and i <= ord('z') or i >= ord('A') and i <= ord('Z') or i >= ord('0') and i <= ord('9'))
def test_isalpha(self):
self.check_is_method('isalpha', lambda i : i >= ord('a') and i <= ord('z') or i >= ord('A') and i <= ord('Z'))
def test_isdigit(self):
self.check_is_method('isdigit', lambda i : (i >= ord('0') and i <= ord('9')))
def test_islower(self):
self.check_is_method('islower', lambda i : i >= ord('a') and i <= ord('z'))
for testType in types:
for i in range(256):
if not chr(i).isupper():
self.assertEqual((testType(b'a') + testType([i])).islower(), True)
def test_isspace(self):
self.check_is_method('isspace', lambda i : i in [ord(' '), ord('\t'), ord('\f'), ord('\n'), ord('\r'), 11])
for testType in types:
for i in range(256):
if not chr(i).islower():
self.assertEqual((testType(b'A') + testType([i])).isupper(), True)
def test_istitle(self):
for testType in types:
self.assertEqual(testType(b'').istitle(), False)
self.assertEqual(testType(b'Foo').istitle(), True)
self.assertEqual(testType(b'Foo Bar').istitle(), True)
self.assertEqual(testType(b'FooBar').istitle(), False)
self.assertEqual(testType(b'foo').istitle(), False)
def test_isupper(self):
self.check_is_method('isupper', lambda i : i >= ord('A') and i <= ord('Z'))
def test_join(self):
x = b''
self.assertEqual(id(x.join(b'')), id(x))
x = bytearray(x)
self.assertTrue(id(x.join(b'')) != id(x))
x = b'abc'
self.assertEqual(id(b'foo'.join([x])), id(x))
self.assertRaises(TypeError, b'foo'.join, [42])
x = bytearray(b'foo')
self.assertTrue(id(bytearray(b'foo').join([x])) != id(x), "got back same object on single arg join w/ bytearray")
for testType in types:
self.assertEqual(testType(b'x').join([b'd', b'e', b'f']), b'dxexf')
self.assertEqual(testType(b'x').join([b'd', b'e', b'f']), b'dxexf')
self.assertEqual(type(testType(b'x').join([b'd', b'e', b'f'])), testType)
if str != bytes:
# works in Py3k/Ipy, not in Py2.6
self.assertEqual(b'x'.join([testType(b'd'), testType(b'e'), testType(b'f')]), b'dxexf')
self.assertEqual(bytearray(b'x').join([testType(b'd'), testType(b'e'), testType(b'f')]), b'dxexf')
self.assertEqual(testType(b'').join([]), b'')
self.assertEqual(testType(b'').join((b'abc', )), b'abc')
self.assertEqual(testType(b'').join((b'abc', b'def')), b'abcdef')
self.assertRaises(TypeError, testType(b'').join, (42, ))
def test_ljust(self):
for testType in types:
self.assertRaises(TypeError, testType(b'').ljust, 42, ' ')
self.assertRaises(TypeError, testType(b'').ljust, 42, ' ')
self.assertRaises(TypeError, testType(b'').ljust, 42, b' ')
self.assertRaises(TypeError, testType(b'').ljust, 42, '\u0100')
self.assertEqual(testType(b'abc').ljust(4), b'abc ')
self.assertEqual(testType(b'abc').ljust(4, b'x'), b'abcx')
x = b'abc'
self.assertEqual(id(x.ljust(2)), id(x))
x = bytearray(x)
self.assertTrue(id(x.ljust(2)) != id(x))
def test_lower(self):
expected = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' \
b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !"#$%' \
b'&\'()*+,-./0123456789:;<=>?@abcdefghijklmnopqrstuvwxyz[\\]^_`' \
b'abcdefghijklmnopqrstuvwxyz{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88' \
b'\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99' \
b'\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa' \
b'\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb' \
b'\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc' \
b'\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd' \
b'\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee' \
b'\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff'
data = bytearray()
for i in range(256):
data.append(i)
for testType in types:
self.assertEqual(testType(data).lower(), expected)
def test_lstrip(self):
for testType in types:
self.assertEqual(testType(b' abc').lstrip(), b'abc')
self.assertEqual(testType(b' abc ').lstrip(), b'abc ')
self.assertEqual(testType(b' ').lstrip(), b'')
x = b'abc'
self.assertEqual(id(x.lstrip()), id(x))
x = bytearray(x)
self.assertTrue(id(x.lstrip()) != id(x))
def test_partition(self):
for testType in types:
self.assertRaises(TypeError, testType(b'').partition, None)
self.assertRaises(ValueError, testType(b'').partition, b'')
self.assertRaises(ValueError, testType(b'').partition, b'')
if testType == bytearray:
self.assertEqual(testType(b'a\x01c').partition([1]), (b'a', b'\x01', b'c'))
else:
self.assertRaises(TypeError, testType(b'a\x01c').partition, [1])
self.assertEqual(testType(b'abc').partition(b'b'), (b'a', b'b', b'c'))
self.assertEqual(testType(b'abc').partition(b'd'), (b'abc', b'', b''))
x = testType(b'abc')
one, two, three = x.partition(b'd')
if testType == bytearray:
self.assertTrue(id(one) != id(x))
else:
self.assertEqual(id(one), id(x))
one, two, three = b''.partition(b'abc')
self.assertEqual(id(one), id(two))
self.assertEqual(id(two), id(three))
one, two, three = bytearray().partition(b'abc')
self.assertTrue(id(one) != id(two))
self.assertTrue(id(two) != id(three))
self.assertTrue(id(three) != id(one))
def test_pop(self):
b = bytearray()
self.assertRaises(IndexError, b.pop)
self.assertRaises(IndexError, b.pop, 0)
b = bytearray(b'abc')
self.assertEqual(b.pop(), ord('c'))
self.assertEqual(b, b'ab')
b = bytearray(b'abc')
b.pop(1)
self.assertEqual(b, b'ac')
b = bytearray(b'abc')
b.pop(-1)
self.assertEqual(b, b'ab')
def test_replace(self):
for testType in types:
self.assertRaises(TypeError, testType(b'abc').replace, None, b'abc')
self.assertRaises(TypeError, testType(b'abc').replace, b'abc', None)
self.assertRaises(TypeError, testType(b'abc').replace, None, b'abc', 1)
self.assertRaises(TypeError, testType(b'abc').replace, b'abc', None, 1)
self.assertRaises(TypeError, testType(b'abc').replace, [1], b'abc')
self.assertRaises(TypeError, testType(b'abc').replace, b'abc', [1])
self.assertRaises(TypeError, testType(b'abc').replace, [1], b'abc', 1)
self.assertRaises(TypeError, testType(b'abc').replace, b'abc', [1], 1)
self.assertEqual(testType(b'abc').replace(b'b', b'foo'), b'afooc')
self.assertEqual(testType(b'abc').replace(b'b', b''), b'ac')
self.assertEqual(testType(b'abcb').replace(b'b', b'foo', 1), b'afoocb')
self.assertEqual(testType(b'abcb').replace(b'b', b'foo', 2), b'afoocfoo')
self.assertEqual(testType(b'abcb').replace(b'b', b'foo', 3), b'afoocfoo')
self.assertEqual(testType(b'abcb').replace(b'b', b'foo', -1), b'afoocfoo')
self.assertEqual(testType(b'abcb').replace(b'', b'foo', 100), b'fooafoobfoocfoobfoo')
self.assertEqual(testType(b'abcb').replace(b'', b'foo', 0), b'abcb')
self.assertEqual(testType(b'abcb').replace(b'', b'foo', 1), b'fooabcb')
self.assertEqual(testType(b'ooooooo').replace(b'o', b'u'), b'uuuuuuu')
x = b'abc'
self.assertEqual(id(x.replace(b'foo', b'bar', 0)), id(x))
if is_cli:
# CPython bug in 2.6 - http://bugs.python.org/issue4348
x = bytearray(b'abc')
self.assertTrue(id(x.replace(b'foo', b'bar', 0)) != id(x))
def test_remove(self):
for toremove in (ord('a'), b'a', Indexable(ord('a')), IndexableOC(ord('a'))):
b = bytearray(b'abc')
b.remove(ord('a'))
self.assertEqual(b, b'bc')
self.assertRaises(ValueError, b.remove, ord('x'))
b = bytearray(b'abc')
self.assertRaises(TypeError, b.remove, bytearray(b'a'))
def test_reverse(self):
b = bytearray(b'abc')
b.reverse()
self.assertEqual(b, b'cba')
# CoreCLR bug xxxx found in build 30324 from silverlight_w2
def test_rfind(self):
for testType in types:
self.assertEqual(testType(b"abcdbcda").rfind(b"cd", 1), 5)
self.assertEqual(testType(b"abcdbcda").rfind(b"cd", 3), 5)
self.assertEqual(testType(b"abcdbcda").rfind(b"cd", 7), -1)
self.assertEqual(testType(b"abcdbcda").rfind(b"cd", -1, -2), -1)
self.assertEqual(testType(b"abc").rfind(b"add", 3, 0), -1)
self.assertEqual(testType(b'abc').rfind(b'bd'), -1)
self.assertRaises(TypeError, testType(b'abc').rfind, [1])
self.assertRaises(TypeError, testType(b'abc').rfind, [1], 1)
self.assertRaises(TypeError, testType(b'abc').rfind, [1], 1, 2)
if testType == bytes:
self.assertEqual(testType(b"abc").rfind(b"add", None, 0), -1)
self.assertEqual(testType(b"abc").rfind(b"add", 3, None), -1)
self.assertEqual(testType(b"abc").rfind(b"add", None, None), -1)
self.assertEqual(testType(b'abc').rfind(b'', 0, 0), 0)
self.assertEqual(testType(b'abc').rfind(b'', 0, 1), 1)
self.assertEqual(testType(b'abc').rfind(b'', 0, 2), 2)
self.assertEqual(testType(b'abc').rfind(b'', 0, 3), 3)
self.assertEqual(testType(b'abc').rfind(b'', 0, 4), 3)
self.assertEqual(testType(b'x').rfind(b'x', 0, 0), -1)
self.assertEqual(testType(b'x').rfind(b'x', 3, 0), -1)
self.assertEqual(testType(b'x').rfind(b'', 3, 0), -1)
def test_rindex(self):
for testType in types:
self.assertRaises(ValueError, testType(b'abc').rindex, 257)
self.assertEqual(testType(b'abc').rindex(b'a'), 0)
self.assertEqual(testType(b'abc').rindex(b'a', 0, -1), 0)
self.assertRaises(TypeError, testType(b'abc').rindex, [1])
self.assertRaises(TypeError, testType(b'abc').rindex, [1], 1)
self.assertRaises(TypeError, testType(b'abc').rindex, [1], 1, 2)
self.assertRaises(ValueError, testType(b'abc').rindex, b'c', 0, -1)
self.assertRaises(ValueError, testType(b'abc').rindex, b'a', -1)
def test_rjust(self):
for testType in types:
self.assertRaises(TypeError, testType(b'').rjust, 42, ' ')
self.assertRaises(TypeError, testType(b'').rjust, 42, ' ')
self.assertRaises(TypeError, testType(b'').rjust, 42, b' ')
self.assertRaises(TypeError, testType(b'').rjust, 42, '\u0100')
self.assertRaises(TypeError, testType(b'').rjust, 42, [2])
self.assertEqual(testType(b'abc').rjust(4), b' abc')
self.assertEqual(testType(b'abc').rjust(4, b'x'), b'xabc')
x = b'abc'
self.assertEqual(id(x.rjust(2)), id(x))
x = bytearray(x)
self.assertTrue(id(x.rjust(2)) != id(x))
def test_rpartition(self):
for testType in types:
self.assertRaises(TypeError, testType(b'').rpartition, None)
self.assertRaises(ValueError, testType(b'').rpartition, b'')
if testType == bytearray:
self.assertEqual(testType(b'a\x01c').rpartition([1]), (b'a', b'\x01', b'c'))
else:
self.assertRaises(TypeError, testType(b'a\x01c').rpartition, [1])
self.assertEqual(testType(b'abc').rpartition(b'b'), (b'a', b'b', b'c'))
self.assertEqual(testType(b'abc').rpartition(b'd'), (b'', b'', b'abc'))
x = testType(b'abc')
one, two, three = x.rpartition(b'd')
if testType == bytearray:
self.assertTrue(id(three) != id(x))
else:
self.assertEqual(id(three), id(x))
b = testType(b'mississippi')
self.assertEqual(b.rpartition(b'i'), (b'mississipp', b'i', b''))
self.assertEqual(type(b.rpartition(b'i')[0]), testType)
self.assertEqual(type(b.rpartition(b'i')[1]), testType)
self.assertEqual(type(b.rpartition(b'i')[2]), testType)
b = testType(b'abcdefgh')
self.assertEqual(b.rpartition(b'a'), (b'', b'a', b'bcdefgh'))
one, two, three = b''.rpartition(b'abc')
self.assertEqual(id(one), id(two))
self.assertEqual(id(two), id(three))
one, two, three = bytearray().rpartition(b'abc')
self.assertTrue(id(one) != id(two))
self.assertTrue(id(two) != id(three))
self.assertTrue(id(three) != id(one))
def test_rsplit(self):
for testType in types:
x=testType(b"Hello Worllds")
self.assertEqual(x.rsplit(), [b'Hello', b'Worllds'])
s = x.rsplit(b"ll")
self.assertTrue(s[0] == b"He")
self.assertTrue(s[1] == b"o Wor")
self.assertTrue(s[2] == b"ds")
self.assertTrue(testType(b"1--2--3--4--5--6--7--8--9--0").rsplit(b"--", 2) == [b'1--2--3--4--5--6--7--8', b'9', b'0'])
for temp_string in [b"", b" ", b" ", b"\t", b" \t", b"\t ", b"\t\t", b"\n", b"\n\n", b"\n \n"]:
self.assertEqual(temp_string.rsplit(None), [])
self.assertEqual(testType(b"ab").rsplit(None), [b"ab"])
self.assertEqual(testType(b"a b").rsplit(None), [b"a", b"b"])
self.assertRaises(TypeError, testType(b'').rsplit, [2])
self.assertRaises(TypeError, testType(b'').rsplit, [2], 2)
def test_rstrip(self):
for testType in types:
self.assertEqual(testType(b'abc ').rstrip(), b'abc')
self.assertEqual(testType(b' abc ').rstrip(), b' abc')
self.assertEqual(testType(b' ').rstrip(), b'')
self.assertEqual(testType(b'abcx').rstrip(b'x'), b'abc')
self.assertEqual(testType(b'xabc').rstrip(b'x'), b'xabc')
self.assertEqual(testType(b'x').rstrip(b'x'), b'')
self.assertRaises(TypeError, testType(b'').rstrip, [2])
x = b'abc'
self.assertEqual(id(x.rstrip()), id(x))
x = bytearray(x)
self.assertTrue(id(x.rstrip()) != id(x))
def test_split(self):
for testType in types:
x=testType(b"Hello Worllds")
self.assertRaises(ValueError, x.split, b'')
self.assertEqual(x.split(None, 0), [b'Hello Worllds'])
self.assertEqual(x.split(None, -1), [b'Hello', b'Worllds'])
self.assertEqual(x.split(None, 2), [b'Hello', b'Worllds'])
self.assertEqual(x.split(), [b'Hello', b'Worllds'])
self.assertEqual(testType(b'abc').split(b'c'), [b'ab', b''])
self.assertEqual(testType(b'abcd').split(b'c'), [b'ab', b'd'])
self.assertEqual(testType(b'abccdef').split(b'c'), [b'ab', b'', b'def'])
s = x.split(b"ll")
self.assertTrue(s[0] == b"He")
self.assertTrue(s[1] == b"o Wor")
self.assertTrue(s[2] == b"ds")
self.assertTrue(testType(b"1,2,3,4,5,6,7,8,9,0").split(b",") == [b'1',b'2',b'3',b'4',b'5',b'6',b'7',b'8',b'9',b'0'])
self.assertTrue(testType(b"1,2,3,4,5,6,7,8,9,0").split(b",", -1) == [b'1',b'2',b'3',b'4',b'5',b'6',b'7',b'8',b'9',b'0'])
self.assertTrue(testType(b"1,2,3,4,5,6,7,8,9,0").split(b",", 2) == [b'1',b'2',b'3,4,5,6,7,8,9,0'])
self.assertTrue(testType(b"1--2--3--4--5--6--7--8--9--0").split(b"--") == [b'1',b'2',b'3',b'4',b'5',b'6',b'7',b'8',b'9',b'0'])
self.assertTrue(testType(b"1--2--3--4--5--6--7--8--9--0").split(b"--", -1) == [b'1',b'2',b'3',b'4',b'5',b'6',b'7',b'8',b'9',b'0'])
self.assertTrue(testType(b"1--2--3--4--5--6--7--8--9--0").split(b"--", 2) == [b'1', b'2', b'3--4--5--6--7--8--9--0'])
self.assertEqual(testType(b"").split(None), [])
self.assertEqual(testType(b"ab").split(None), [b"ab"])
self.assertEqual(testType(b"a b").split(None), [b"a", b"b"])
self.assertEqual(bytearray(b' a bb c ').split(None, 1), [bytearray(b'a'), bytearray(b'bb c ')])
self.assertEqual(testType(b' ').split(), [])
self.assertRaises(TypeError, testType(b'').split, [2])
self.assertRaises(TypeError, testType(b'').split, [2], 2)
def test_splitlines(self):
for testType in types:
self.assertEqual(testType(b'foo\nbar\n').splitlines(), [b'foo', b'bar'])
self.assertEqual(testType(b'foo\nbar\n').splitlines(True), [b'foo\n', b'bar\n'])
self.assertEqual(testType(b'foo\r\nbar\r\n').splitlines(True), [b'foo\r\n', b'bar\r\n'])
self.assertEqual(testType(b'foo\r\nbar\r\n').splitlines(), [b'foo', b'bar'])
self.assertEqual(testType(b'foo\rbar\r').splitlines(True), [b'foo\r', b'bar\r'])
self.assertEqual(testType(b'foo\nbar\nbaz').splitlines(), [b'foo', b'bar', b'baz'])
self.assertEqual(testType(b'foo\nbar\nbaz').splitlines(True), [b'foo\n', b'bar\n', b'baz'])
self.assertEqual(testType(b'foo\r\nbar\r\nbaz').splitlines(True), [b'foo\r\n', b'bar\r\n', b'baz'])
self.assertEqual(testType(b'foo\rbar\rbaz').splitlines(True), [b'foo\r', b'bar\r', b'baz'])
def test_startswith(self):
for testType in types:
self.assertRaises(TypeError, testType(b'abcdef').startswith, [])
self.assertRaises(TypeError, testType(b'abcdef').startswith, [], 0)
self.assertRaises(TypeError, testType(b'abcdef').startswith, [], 0, 1)
self.assertEqual(testType(b"abcde").startswith(b'c', 2, 6), True)
self.assertEqual(testType(b"abc").startswith(b'c', 4, 6), False)
self.assertEqual(testType(b"abcde").startswith(b'cde', 2, 9), True)
self.assertEqual(testType(b'abc').startswith(b'abcd', 4), False)
self.assertEqual(testType(b'abc').startswith(b'abc', -3), True)
self.assertEqual(testType(b'abc').startswith(b'abc', -10), True)
self.assertEqual(testType(b'abc').startswith(b'abc', -3, 0), False)
self.assertEqual(testType(b'abc').startswith(b'abc', -10, 0), False)
self.assertEqual(testType(b'abc').startswith(b'abc', -10, -10), False)
self.assertEqual(testType(b'abc').startswith(b'ab', 0, -1), True)
self.assertEqual(testType(b'abc').startswith((b'abc', ), -10), True)
self.assertEqual(testType(b'abc').startswith((b'abc', ), 10), False)
self.assertEqual(testType(b'abc').startswith((b'abc', ), -10, 0), False)
self.assertEqual(testType(b'abc').startswith((b'abc', ), 10, 0), False)
self.assertEqual(testType(b'abc').startswith((b'abc', ), 1, -10), False)
self.assertEqual(testType(b'abc').startswith((b'abc', ), 1, -1), False)
self.assertEqual(testType(b'abc').startswith((b'abc', ), -1, -2), False)
self.assertEqual(testType(b'abc').startswith((b'abc', b'def')), True)
self.assertEqual(testType(b'abc').startswith((b'qrt', b'def')), False)
self.assertEqual(testType(b'abc').startswith((b'abc', b'def'), -3), True)
self.assertEqual(testType(b'abc').startswith((b'qrt', b'def'), -3), False)
self.assertEqual(testType(b'abc').startswith((b'abc', b'def'), 0), True)
self.assertEqual(testType(b'abc').startswith((b'qrt', b'def'), 0), False)
self.assertEqual(testType(b'abc').startswith((b'abc', b'def'), -3, 3), True)
self.assertEqual(testType(b'abc').startswith((b'qrt', b'def'), -3, 3), False)
self.assertEqual(testType(b'abc').startswith((b'abc', b'def'), 0, 3), True)
self.assertEqual(testType(b'abc').startswith((b'qrt', b'def'), 0, 3), False)
hw = testType(b"hello world")
self.assertTrue(hw.startswith(b"hello"))
self.assertTrue(not hw.startswith(b"heloo"))
self.assertTrue(hw.startswith(b"llo", 2))
self.assertTrue(not hw.startswith(b"lno", 2))
self.assertTrue(hw.startswith(b"wor", 6, 9))
self.assertTrue(not hw.startswith(b"wor", 6, 7))
self.assertTrue(not hw.startswith(b"wox", 6, 10))
self.assertTrue(not hw.startswith(b"wor", 6, 2))
def test_strip(self):
for testType in types:
self.assertEqual(testType(b'abc ').strip(), b'abc')
self.assertEqual(testType(b' abc').strip(), b'abc')
self.assertEqual(testType(b' abc ').strip(), b'abc')
self.assertEqual(testType(b' ').strip(), b'')
self.assertEqual(testType(b'abcx').strip(b'x'), b'abc')
self.assertEqual(testType(b'xabc').strip(b'x'), b'abc')
self.assertEqual(testType(b'xabcx').strip(b'x'), b'abc')
self.assertEqual(testType(b'x').strip(b'x'), b'')
x = b'abc'
self.assertEqual(id(x.strip()), id(x))
x = bytearray(x)
self.assertTrue(id(x.strip()) != id(x))
def test_swapcase(self):
expected = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' \
b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !"#$%' \
b'&\'()*+,-./0123456789:;<=>?@abcdefghijklmnopqrstuvwxyz[\\]^_`' \
b'ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88' \
b'\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99' \
b'\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa' \
b'\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb' \
b'\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc' \
b'\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd' \
b'\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee' \
b'\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff'
data = bytearray()
for i in range(256):
data.append(i)
for testType in types:
self.assertEqual(testType(b'123').swapcase(), b'123')
b = testType(b'123')
self.assertTrue(id(b.swapcase()) != id(b))
self.assertEqual(testType(b'abc').swapcase(), b'ABC')
self.assertEqual(testType(b'ABC').swapcase(), b'abc')
self.assertEqual(testType(b'ABc').swapcase(), b'abC')
x = testType(data).swapcase()
self.assertEqual(testType(data).swapcase(), expected)
def test_title(self):
for testType in types:
self.assertEqual(testType(b'').title(), b'')
self.assertEqual(testType(b'foo').title(), b'Foo')
self.assertEqual(testType(b'Foo').title(), b'Foo')
self.assertEqual(testType(b'foo bar baz').title(), b'Foo Bar Baz')
for i in range(256):
b = bytearray()
b.append(i)
if (b >= b'a' and b <= b'z') or (b >= b'A' and b <= b'Z'):
continue
inp = testType(b.join([b'foo', b'bar', b'baz']))
exp = b.join([b'Foo', b'Bar', b'Baz'])
self.assertEqual(inp.title(), exp)
x = b''
self.assertEqual(id(x.title()), id(x))
x = bytearray(b'')
self.assertTrue(id(x.title()) != id(x))
def test_translate(self):
identTable = bytearray()
for i in range(256):
identTable.append(i)
repAtable = bytearray(identTable)
repAtable[ord('A')] = ord('B')
for testType in types:
self.assertRaises(TypeError, testType(b'').translate, {})
self.assertRaises(ValueError, testType(b'foo').translate, b'')
self.assertRaises(ValueError, testType(b'').translate, b'')
self.assertEqual(testType(b'AAA').translate(repAtable), b'BBB')
self.assertEqual(testType(b'AAA').translate(repAtable, b'A'), b'')
self.assertRaises(TypeError, b''.translate, identTable, None)
self.assertEqual(b'AAA'.translate(None, b'A'), b'')
self.assertEqual(b'AAABBB'.translate(None, b'A'), b'BBB')
self.assertEqual(b'AAA'.translate(None), b'AAA')
self.assertEqual(bytearray(b'AAA').translate(None, b'A'),
b'')
self.assertEqual(bytearray(b'AAA').translate(None),
b'AAA')
b = b'abc'
self.assertEqual(id(b.translate(None)), id(b))
b = b''
self.assertEqual(id(b.translate(identTable)), id(b))
b = b''
self.assertEqual(id(b.translate(identTable, b'')), id(b))
b = b''
self.assertEqual(id(b.translate(identTable, b'')), id(b))
if is_cli:
# CPython bug 4348 - http://bugs.python.org/issue4348
b = bytearray(b'')
self.assertTrue(id(b.translate(identTable)) != id(b))
self.assertRaises(TypeError, testType(b'').translate, [])
self.assertRaises(TypeError, testType(b'').translate, [], [])
def test_upper(self):
expected = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' \
b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !"#$%' \
b'&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`' \
b'ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88' \
b'\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99' \
b'\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa' \
b'\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb' \
b'\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc' \
b'\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd' \
b'\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee' \
b'\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff'
data = bytearray()
for i in range(256):
data.append(i)
for testType in types:
self.assertEqual(testType(data).upper(), expected)
def test_zfill(self):
for testType in types:
self.assertEqual(testType(b'abc').zfill(0), b'abc')
self.assertEqual(testType(b'abc').zfill(4), b'0abc')
self.assertEqual(testType(b'+abc').zfill(5), b'+0abc')
self.assertEqual(testType(b'-abc').zfill(5), b'-0abc')
self.assertEqual(testType(b'').zfill(2), b'00')
self.assertEqual(testType(b'+').zfill(2), b'+0')
self.assertEqual(testType(b'-').zfill(2), b'-0')
b = b'abc'
self.assertEqual(id(b.zfill(0)), id(b))
b = bytearray(b)
self.assertTrue(id(b.zfill(0)) != id(b))
def test_none(self):
for testType in types:
self.assertRaises(TypeError, testType(b'abc').replace, b"new")
self.assertRaises(TypeError, testType(b'abc').replace, b"new", 2)
self.assertRaises(TypeError, testType(b'abc').center, 0, None)
if str != bytes:
self.assertRaises(TypeError, testType(b'abc').fromhex, None)
self.assertRaises(TypeError, testType(b'abc').decode, 'ascii', None)
for fn in ['find', 'index', 'rfind', 'count', 'startswith', 'endswith']:
f = getattr(testType(b'abc'), fn)
self.assertRaises(TypeError, f, None)
self.assertRaises(TypeError, f, None, 0)
self.assertRaises(TypeError, f, None, 0, 2)
self.assertRaises(TypeError, testType(b'abc').replace, None, b'ef')
self.assertRaises(TypeError, testType(b'abc').replace, None, b'ef', 1)
self.assertRaises(TypeError, testType(b'abc').replace, b'abc', None)
self.assertRaises(TypeError, testType(b'abc').replace, b'abc', None, 1)
def test_add_mul(self):
for testType in types:
self.assertRaises(TypeError, lambda: testType(b"a") + 3)
self.assertRaises(TypeError, lambda: 3 + testType(b"a"))
self.assertRaises(TypeError, lambda: "a" * "3")
self.assertRaises(OverflowError, lambda: "a" * (sys.maxsize + 1))
self.assertRaises(OverflowError, lambda: (sys.maxsize + 1) * "a")
class mylong(long): pass
# multiply
self.assertEqual("aaaa", "a" * 4)
self.assertEqual("aaaa", "a" * mylong(4))
self.assertEqual("aaa", "a" * 3)
self.assertEqual("a", "a" * True)
self.assertEqual("", "a" * False)
self.assertEqual("aaaa", 4 * "a")
self.assertEqual("aaaa", mylong(4) * "a")
self.assertEqual("aaa", 3 * "a")
self.assertEqual("a", True * "a")
self.assertEqual("", False * "a" )
# zero-length string
def test_empty_bytes(self):
for testType in types:
self.assertEqual(testType(b'').title(), b'')
self.assertEqual(testType(b'').capitalize(), b'')
self.assertEqual(testType(b'').count(b'a'), 0)
table = testType(b'10') * 128
self.assertEqual(testType(b'').translate(table), b'')
self.assertEqual(testType(b'').replace(b'a', b'ef'), b'')
self.assertEqual(testType(b'').replace(b'bc', b'ef'), b'')
self.assertEqual(testType(b'').split(), [])
self.assertEqual(testType(b'').split(b' '), [b''])
self.assertEqual(testType(b'').split(b'a'), [b''])
def test_encode_decode(self):
for testType in types:
self.assertEqual(testType(b'abc').decode(), 'abc')
def test_encode_decode_error(self):
for testType in types:
self.assertRaises(TypeError, testType(b'abc').decode, None)
def test_bytes_subclass(self):
for testType in types:
class customstring(testType):
def __str__(self): return 'xyz'
def __repr__(self): return 'foo'
def __hash__(self): return 42
def __mul__(self, count): return b'multiplied'
def __add__(self, other): return 23
def __len__(self): return 2300
def __contains__(self, value): return False
o = customstring(b'abc')
self.assertEqual(str(o), "xyz")
self.assertEqual(repr(o), "foo")
self.assertEqual(hash(o), 42)
self.assertEqual(o * 3, b'multiplied')
self.assertEqual(o + b'abc', 23)
self.assertEqual(len(o), 2300)
self.assertEqual(b'a' in o, False)
class custombytearray(bytearray):
def __init__(self, value):
bytearray.__init__(self)
self.assertEqual(custombytearray(42), bytearray())
class custombytearray(bytearray):
def __init__(self, value, **args):
bytearray.__init__(self)
self.assertEqual(custombytearray(42, x=42), bytearray())
def test_bytes_equals(self):
for testType in types:
x = testType(b'abc') == testType(b'abc')
y = testType(b'def') == testType(b'def')
self.assertEqual(id(x), id(y))
self.assertEqual(id(x), id(True))
x = testType(b'abc') != testType(b'abc')
y = testType(b'def') != testType(b'def')
self.assertEqual(id(x), id(y))
self.assertEqual(id(x), id(False))
x = testType(b'abcx') == testType(b'abc')
y = testType(b'defx') == testType(b'def')
self.assertEqual(id(x), id(y))
self.assertEqual(id(x), id(False))
x = testType(b'abcx') != testType(b'abc')
y = testType(b'defx') != testType(b'def')
self.assertEqual(id(x), id(y))
self.assertEqual(id(x), id(True))
def test_bytes_dict(self):
self.assertTrue('__init__' not in list(bytes.__dict__.keys()))
self.assertTrue('__init__' in list(bytearray.__dict__.keys()))
for testType in types:
extra_str_dict_keys = [ "__cmp__", "isdecimal", "isnumeric", "isunicode"] # "__radd__",
#It's OK that __getattribute__ does not show up in the __dict__. It is
#implemented.
self.assertTrue(hasattr(testType, "__getattribute__"), str(testType) + " has no __getattribute__ method")
for temp_key in extra_str_dict_keys:
self.assertTrue(not temp_key in list(testType.__dict__.keys()))
def test_bytes_to_numeric(self):
for testType in types:
class substring(testType):
def __int__(self): return 1
def __complex__(self): return 1j
def __float__(self): return 1.0
def __long__(self): return 1
class myfloat(float): pass
class mylong(long): pass
class myint(int): pass
class mycomplex(complex): pass
v = substring(b"123")
self.assertEqual(float(v), 1.0)
self.assertEqual(myfloat(v), 1.0)
self.assertEqual(type(myfloat(v)), myfloat)
self.assertEqual(int(v), 1)
self.assertEqual(mylong(v), 1)
self.assertEqual(type(mylong(v)), mylong)
self.assertEqual(int(v), 1)
self.assertEqual(myint(v), 1)
self.assertEqual(type(myint(v)), myint)
self.assertEqual(complex(v), 1j)
self.assertEqual(mycomplex(v), 1j)
class substring(testType): pass
v = substring(b"123")
self.assertEqual(int(v), 123)
self.assertEqual(int(v), 123)
self.assertEqual(float(v), 123.0)
self.assertEqual(mylong(v), 123)
self.assertEqual(type(mylong(v)), mylong)
self.assertEqual(myint(v), 123)
self.assertEqual(type(myint(v)), myint)
if testType == str:
# 2.6 allows this, 3.0 disallows this.
self.assertEqual(complex(v), 123+0j)
self.assertEqual(mycomplex(v), 123+0j)
else:
self.assertRaises(TypeError, complex, v)
self.assertRaises(TypeError, mycomplex, v)
def test_compares(self):
a = b'A'
b = b'B'
bb = b'BB'
aa = b'AA'
ab = b'AB'
ba = b'BA'
for testType in types:
for otherType in types:
self.assertEqual(testType(a) > otherType(b), False)
self.assertEqual(testType(a) < otherType(b), True)
self.assertEqual(testType(a) <= otherType(b), True)
self.assertEqual(testType(a) >= otherType(b), False)
self.assertEqual(testType(a) == otherType(b), False)
self.assertEqual(testType(a) != otherType(b), True)
self.assertEqual(testType(b) > otherType(a), True)
self.assertEqual(testType(b) < otherType(a), False)
self.assertEqual(testType(b) <= otherType(a), False)
self.assertEqual(testType(b) >= otherType(a), True)
self.assertEqual(testType(b) == otherType(a), False)
self.assertEqual(testType(b) != otherType(a), True)
self.assertEqual(testType(a) > otherType(a), False)
self.assertEqual(testType(a) < otherType(a), False)
self.assertEqual(testType(a) <= otherType(a), True)
self.assertEqual(testType(a) >= otherType(a), True)
self.assertEqual(testType(a) == otherType(a), True)
self.assertEqual(testType(a) != otherType(a), False)
self.assertEqual(testType(aa) > otherType(b), False)
self.assertEqual(testType(aa) < otherType(b), True)
self.assertEqual(testType(aa) <= otherType(b), True)
self.assertEqual(testType(aa) >= otherType(b), False)
self.assertEqual(testType(aa) == otherType(b), False)
self.assertEqual(testType(aa) != otherType(b), True)
self.assertEqual(testType(bb) > otherType(a), True)
self.assertEqual(testType(bb) < otherType(a), False)
self.assertEqual(testType(bb) <= otherType(a), False)
self.assertEqual(testType(bb) >= otherType(a), True)
self.assertEqual(testType(bb) == otherType(a), False)
self.assertEqual(testType(bb) != otherType(a), True)
self.assertEqual(testType(ba) > otherType(b), True)
self.assertEqual(testType(ba) < otherType(b), False)
self.assertEqual(testType(ba) <= otherType(b), False)
self.assertEqual(testType(ba) >= otherType(b), True)
self.assertEqual(testType(ba) == otherType(b), False)
self.assertEqual(testType(ba) != otherType(b), True)
self.assertEqual(testType(ab) > otherType(a), True)
self.assertEqual(testType(ab) < otherType(a), False)
self.assertEqual(testType(ab) <= otherType(a), False)
self.assertEqual(testType(ab) >= otherType(a), True)
self.assertEqual(testType(ab) == otherType(a), False)
self.assertEqual(testType(ab) != otherType(a), True)
self.assertEqual(testType(ab) == [], False)
self.assertRaises(TypeError, lambda: testType(a) > None)
self.assertRaises(TypeError, lambda: testType(a) < None)
self.assertRaises(TypeError, lambda: testType(a) <= None)
self.assertRaises(TypeError, lambda: testType(a) >= None)
self.assertRaises(TypeError, lambda: None > testType(a))
self.assertRaises(TypeError, lambda: None < testType(a))
self.assertRaises(TypeError, lambda: None <= testType(a))
self.assertRaises(TypeError, lambda: None >= testType(a))
def test_bytearray(self):
self.assertRaises(TypeError, hash, bytearray(b'abc'))
self.assertRaises(TypeError, bytearray(b'').__setitem__, None, b'abc')
self.assertRaises(TypeError, bytearray(b'').__delitem__, None)
x = bytearray(b'abc')
del x[-1]
self.assertEqual(x, b'ab')
def f():
x = bytearray(b'abc')
x[0:2] = [1j]
self.assertRaises(TypeError, f)
x = bytearray(b'abc')
x[0:1] = [ord('d')]
self.assertEqual(x, b'dbc')
x = bytearray(b'abc')
x[0:3] = x
self.assertEqual(x, b'abc')
x = bytearray(b'abc')
del x[0]
self.assertEqual(x, b'bc')
x = bytearray(b'abc')
x += b'foo'
self.assertEqual(x, b'abcfoo')
b = bytearray(b"abc")
b1 = b
b += b"def"
self.assertEqual(b1, b)
x = bytearray(b'abc')
x += bytearray(b'foo')
self.assertEqual(x, b'abcfoo')
x = bytearray(b'abc')
x *= 2
self.assertEqual(x, b'abcabc')
x = bytearray(b'abcdefghijklmnopqrstuvwxyz')
x[25:1] = b'x' * 24
self.assertEqual(x, b'abcdefghijklmnopqrstuvwxyxxxxxxxxxxxxxxxxxxxxxxxxz')
x = bytearray(b'abcdefghijklmnopqrstuvwxyz')
x[25:0] = b'x' * 25
self.assertEqual(x, b'abcdefghijklmnopqrstuvwxyxxxxxxxxxxxxxxxxxxxxxxxxxz')
tests = ( ((0, 3, None), b'abc', b''),
((0, 2, None), b'abc', b'c'),
((4, 0, 2), b'abc', b'abc'),
((3, 0, 2), b'abc', b'abc'),
((3, 0, -2), b'abc', b'ab'),
((0, 3, 1), b'abc', b''),
((0, 2, 1), b'abc', b'c'),
((0, 3, 2), b'abc', b'b'),
((0, 2, 2), b'abc', b'bc'),
((0, 3, -1), b'abc', b'abc'),
((0, 2, -1), b'abc', b'abc'),
((3, 0, -1), b'abc', b'a'),
((2, 0, -1), b'abc', b'a'),
((4, 2, -1), b'abcdef', b'abcf'),
)
for indexes, input, result in tests:
x = bytearray(input)
if indexes[2] == None:
del x[indexes[0] : indexes[1]]
self.assertEqual(x, result)
else:
del x[indexes[0] : indexes[1] : indexes[2]]
self.assertEqual(x, result)
class myint(int): pass
class intobj(object):
def __int__(self):
return 42
x = bytearray(b'abe')
x[-1] = ord('a')
self.assertEqual(x, b'aba')
x[-1] = IndexableOC(ord('r'))
self.assertEqual(x, b'abr')
x[-1] = Indexable(ord('s'))
self.assertEqual(x, b'abs')
def f(): x[-1] = IndexableOC(256)
self.assertRaises(ValueError, f)
def f(): x[-1] = Indexable(256)
self.assertRaises(ValueError, f)
x[-1] = ord(b'b')
self.assertEqual(x, b'abb')
x[-1] = myint(ord('c'))
self.assertEqual(x, b'abc')
with self.assertRaises(TypeError):
x[0:1] = 2
x = bytearray(b'abc')
x[0:1] = [2]*2
self.assertEqual(x, b'\x02\x02bc')
x[0:2] = b'a'
self.assertEqual(x, b'abc')
x[0:1] = b'd'
self.assertEqual(x, b'dbc')
x[0:1] = [myint(3)]*3
self.assertEqual(x, b'\x03\x03\x03bc')
x[0:3] = [ord('a'), ord('b'), ord('c')]
self.assertEqual(x, b'abcbc')
with self.assertRaises(TypeError):
x[0:1] = [intobj()]
for setval in [[b'b', b'a', b'r'], (b'b', b'a', b'r'), (98, b'a', b'r'), (Indexable(98), b'a', b'r'), (IndexableOC(98), b'a', b'r')]:
with self.assertRaises(TypeError):
x[0:3] = setval
for setval in [b'bar', bytearray(b'bar'), [98, 97, 114], (98, 97, 114), (Indexable(98), 97, 114), (IndexableOC(98), 97, 114)]:
x = bytearray(b'abc')
x[0:3] = setval
self.assertEqual(x, b'bar')
x = bytearray(b'abc')
x[1:4] = setval
self.assertEqual(x, b'abar')
x = bytearray(b'abc')
x[0:2] = setval
self.assertEqual(x, b'barc')
x = bytearray(b'abc')
x[4:0:2] = setval[-1:-1]
self.assertEqual(x, b'abc')
x = bytearray(b'abc')
x[3:0:2] = setval[-1:-1]
self.assertEqual(x, b'abc')
x = bytearray(b'abc')
x[3:0:-2] = setval[-1:-1]
self.assertEqual(x, b'ab')
x = bytearray(b'abc')
x[3:0:-2] = setval[0:-2]
self.assertEqual(x, b'abb')
x = bytearray(b'abc')
x[0:3:1] = setval
self.assertEqual(x, b'bar')
x = bytearray(b'abc')
x[0:2:1] = setval
self.assertEqual(x, b'barc')
x = bytearray(b'abc')
x[0:3:2] = setval[0:-1]
self.assertEqual(x, b'bba')
x = bytearray(b'abc')
x[0:2:2] = setval[0:-2]
self.assertEqual(x, b'bbc')
x = bytearray(b'abc')
x[0:3:-1] = setval[-1:-1]
self.assertEqual(x, b'abc')
x = bytearray(b'abc')
x[0:2:-1] = setval[-1:-1]
self.assertEqual(x, b'abc')
x = bytearray(b'abc')
x[3:0:-1] = setval[0:-1]
self.assertEqual(x, b'aab')
x = bytearray(b'abc')
x[2:0:-1] = setval[0:-1]
self.assertEqual(x, b'aab')
x = bytearray(b'abcdef')
def f():x[0:6:2] = b'a'
self.assertRaises(ValueError, f)
self.assertEqual(bytearray(source=b'abc'), bytearray(b'abc'))
self.assertEqual(bytearray(source=2), bytearray(b'\x00\x00'))
self.assertEqual(bytearray(b'abc').__alloc__(), 4)
self.assertEqual(bytearray().__alloc__(), 0)
def test_bytes(self):
self.assertEqual(hash(b'abc'), hash(b'abc'))
self.assertEqual(b'abc', B'abc')
def test_operators(self):
for testType in types:
self.assertRaises(TypeError, lambda : testType(b'abc') * None)
self.assertRaises(TypeError, lambda : testType(b'abc') + None)
self.assertRaises(TypeError, lambda : None * testType(b'abc'))
self.assertRaises(TypeError, lambda : None + testType(b'abc'))
self.assertEqual(testType(b'abc') * 2, b'abcabc')
self.assertEqual(testType(b'abc')[0], ord('a'))
self.assertEqual(testType(b'abc')[-1], ord('c'))
for otherType in types:
self.assertEqual(testType(b'abc') + otherType(b'def'), b'abcdef')
resType = type(testType(b'abc') + otherType(b'def'))
if testType == bytearray:
self.assertEqual(resType, bytearray)
else:
self.assertEqual(resType, bytes)
self.assertEqual(b'ab' in testType(b'abcd'), True)
# 2.6 doesn't allow this for testType=bytes, so test for 3.0 in this case
if testType is not bytes or hasattr(bytes, '__iter__'):
self.assertEqual(ord(b'a') in testType(b'abcd'), True)
self.assertRaises(ValueError, lambda : 256 in testType(b'abcd'))
x = b'abc'
self.assertEqual(x * 1, x)
self.assertEqual(1 * x, x)
self.assertEqual(id(x), id(x * 1))
self.assertEqual(id(x), id(1 * x))
x = bytearray(b'abc')
self.assertEqual(x * 1, x)
self.assertEqual(1 * x, x)
self.assertTrue(id(x) != id(x * 1))
self.assertTrue(id(x) != id(1 * x))
def test_init(self):
for testType in types:
if testType != str: # skip on Cpy 2.6 for str type
self.assertRaises(TypeError, testType, None, 'ascii')
self.assertRaises(TypeError, testType, 'abc', None)
self.assertRaises(TypeError, testType, [None])
self.assertEqual(testType('abc', 'ascii'), b'abc')
self.assertEqual(testType(0), b'')
self.assertEqual(testType(5), b'\x00\x00\x00\x00\x00')
self.assertRaises(ValueError, testType, [256])
self.assertRaises(ValueError, testType, [257])
testType(list(range(256)))
def f():
yield 42
self.assertEqual(bytearray(f()), b'*')
def test_slicing(self):
for testType in types:
self.assertEqual(testType(b'abc')[0:3], b'abc')
self.assertEqual(testType(b'abc')[0:2], b'ab')
self.assertEqual(testType(b'abc')[3:0:2], b'')
self.assertEqual(testType(b'abc')[3:0:2], b'')
self.assertEqual(testType(b'abc')[3:0:-2], b'c')
self.assertEqual(testType(b'abc')[3:0:-2], b'c')
self.assertEqual(testType(b'abc')[0:3:1], b'abc')
self.assertEqual(testType(b'abc')[0:2:1], b'ab')
self.assertEqual(testType(b'abc')[0:3:2], b'ac')
self.assertEqual(testType(b'abc')[0:2:2], b'a')
self.assertEqual(testType(b'abc')[0:3:-1], b'')
self.assertEqual(testType(b'abc')[0:2:-1], b'')
self.assertEqual(testType(b'abc')[3:0:-1], b'cb')
self.assertEqual(testType(b'abc')[2:0:-1], b'cb')
self.assertRaises(TypeError, testType(b'abc').__getitem__, None)
def test_ord(self):
for testType in types:
self.assertEqual(ord(testType(b'a')), 97)
self.assertRaisesPartialMessage(TypeError, "expected a character, but string of length 2 found", ord, testType(b'aa'))
def test_pickle(self):
import pickle
for testType in types:
self.assertEqual(pickle.loads(pickle.dumps(testType(list(range(256))))), testType(list(range(256))))
@skipUnlessIronPython()
def test_zzz_cli_features(self):
import System
import clr
clr.AddReference('Microsoft.Dynamic')
import Microsoft
for testType in types:
self.assertEqual(testType(b'abc').Count, 3)
self.assertEqual(bytearray(b'abc').Contains(ord('a')), True)
self.assertEqual(list(System.Collections.IEnumerable.GetEnumerator(bytearray(b'abc'))), [ord('a'), ord('b'), ord('c')])
self.assertEqual(testType(b'abc').IndexOf(ord('a')), 0)
self.assertEqual(testType(b'abc').IndexOf(ord('d')), -1)
myList = System.Collections.Generic.List[System.Byte]()
myList.Add(ord('a'))
myList.Add(ord('b'))
myList.Add(ord('c'))
self.assertEqual(testType(b'').join([myList]), b'abc')
# bytearray
'''
self.assertEqual(bytearray(b'abc') == 'abc', False)
if not is_net40:
self.assertEqual(Microsoft.Scripting.IValueEquality.ValueEquals(bytearray(b'abc'), 'abc'), False)
'''
self.assertEqual(bytearray(b'abc') == b'abc', True)
self.assertEqual(b'abc'.IsReadOnly, True)
self.assertEqual(bytearray(b'abc').IsReadOnly, False)
self.assertEqual(bytearray(b'abc').Remove(ord('a')), True)
self.assertEqual(bytearray(b'abc').Remove(ord('d')), False)
x = bytearray(b'abc')
x.Clear()
self.assertEqual(x, b'')
x.Add(ord('a'))
self.assertEqual(x, b'a')
self.assertEqual(x.IndexOf(ord('a')), 0)
self.assertEqual(x.IndexOf(ord('b')), -1)
x.Insert(0, ord('b'))
self.assertEqual(x, b'ba')
x.RemoveAt(0)
self.assertEqual(x, b'a')
System.Collections.Generic.IList[System.Byte].__setitem__(x, 0, ord('b'))
self.assertEqual(x, b'b')
# bytes
self.assertRaises(System.InvalidOperationException, b'abc'.Remove, ord('a'))
self.assertRaises(System.InvalidOperationException, b'abc'.Remove, ord('d'))
self.assertRaises(System.InvalidOperationException, b'abc'.Clear)
self.assertRaises(System.InvalidOperationException, b'abc'.Add, ord('a'))
self.assertRaises(System.InvalidOperationException, b'abc'.Insert, 0, ord('b'))
self.assertRaises(System.InvalidOperationException, b'abc'.RemoveAt, 0)
self.assertRaises(System.InvalidOperationException, System.Collections.Generic.IList[System.Byte].__setitem__, b'abc', 0, ord('b'))
lst = System.Collections.Generic.List[System.Byte]()
lst.Add(42)
self.assertEqual(ord(lst), 42)
lst.Add(42)
self.assertRaisesMessage(TypeError, "expected a character, but string of length 2 found", ord, lst)
def test_bytes_hashing(self):
"""test interaction of bytes w/ hashing modules"""
import hashlib
for hashLib in (hashlib.sha1, hashlib.sha256, hashlib.sha512, hashlib.sha384, hashlib.md5):
x = hashLib(b'abc')
x.update(b'abc')
#For now just make sure this doesn't throw
temp = hashLib(bytearray(b'abc'))
x.update(bytearray(b'abc'))
def test_add(self):
self.assertEqual(bytearray(b"abc") + memoryview(b"def"), b"abcdef")
run_test(__name__)
| 43.625
| 153
| 0.553585
|
870195ede500f56ef184fb76a06c2b31374b2ea0
| 291
|
py
|
Python
|
config.py
|
azukacchi/twitter_autobase
|
5a6317f611388efa2668f680fe368b48cfaeb18a
|
[
"MIT"
] | 10
|
2020-11-02T04:19:34.000Z
|
2022-03-16T08:09:06.000Z
|
config.py
|
azukacchi/twitter_autobase
|
5a6317f611388efa2668f680fe368b48cfaeb18a
|
[
"MIT"
] | null | null | null |
config.py
|
azukacchi/twitter_autobase
|
5a6317f611388efa2668f680fe368b48cfaeb18a
|
[
"MIT"
] | 7
|
2020-10-01T14:26:44.000Z
|
2022-01-15T16:17:34.000Z
|
consumer_key = "YOUR API KEY HERE"
consumer_secret = "YOUR API KEY SECRET HERE"
access_token = "YOUR ACCESS TOKEN HERE"
access_token_secret = "YOUR ACCESS TOKEN SECRET HERE"
trigger = "triggerword"
timezone = 7 #change this into your timezone. for example if your timezone is GMT+7 you put 7
| 48.5
| 93
| 0.776632
|
3124e6cf9c542547ded61787be5441d1d53f604a
| 5,793
|
py
|
Python
|
src/tests/catwalk_tests/test_utils.py
|
adunmore/triage
|
51f4e5bb73740378d22de16de4b15c78a1feea7b
|
[
"MIT"
] | null | null | null |
src/tests/catwalk_tests/test_utils.py
|
adunmore/triage
|
51f4e5bb73740378d22de16de4b15c78a1feea7b
|
[
"MIT"
] | null | null | null |
src/tests/catwalk_tests/test_utils.py
|
adunmore/triage
|
51f4e5bb73740378d22de16de4b15c78a1feea7b
|
[
"MIT"
] | null | null | null |
from triage.component.catwalk.utils import (
filename_friendly_hash,
save_experiment_and_get_hash,
associate_models_with_experiment,
associate_matrices_with_experiment,
missing_model_hashes,
missing_matrix_uuids,
sort_predictions_and_labels,
)
from triage.component.results_schema.schema import Matrix, Model
from triage.component.catwalk.db import ensure_db
from sqlalchemy import create_engine
import testing.postgresql
import datetime
import re
import numpy
from numpy.testing import assert_array_equal
import pytest
def test_filename_friendly_hash():
data = {
"stuff": "stuff",
"other_stuff": "more_stuff",
"a_datetime": datetime.datetime(2015, 1, 1),
"a_date": datetime.date(2016, 1, 1),
"a_number": 5.0,
}
output = filename_friendly_hash(data)
assert isinstance(output, str)
assert re.match(r"^[\w]+$", output) is not None
# make sure ordering keys differently doesn't change the hash
new_output = filename_friendly_hash(
{
"other_stuff": "more_stuff",
"stuff": "stuff",
"a_datetime": datetime.datetime(2015, 1, 1),
"a_date": datetime.date(2016, 1, 1),
"a_number": 5.0,
}
)
assert new_output == output
# make sure new data hashes to something different
new_output = filename_friendly_hash({"stuff": "stuff", "a_number": 5.0})
assert new_output != output
def test_filename_friendly_hash_stability():
nested_data = {"one": "two", "three": {"four": "five", "six": "seven"}}
output = filename_friendly_hash(nested_data)
# 1. we want to make sure this is stable across different runs
# so hardcode an expected value
assert output == "9a844a7ebbfd821010b1c2c13f7391e6"
other_nested_data = {"one": "two", "three": {"six": "seven", "four": "five"}}
new_output = filename_friendly_hash(other_nested_data)
assert output == new_output
def test_save_experiment_and_get_hash():
# no reason to make assertions on the config itself, use a basic dict
experiment_config = {"one": "two"}
with testing.postgresql.Postgresql() as postgresql:
engine = create_engine(postgresql.url())
ensure_db(engine)
exp_hash = save_experiment_and_get_hash(experiment_config, engine)
assert isinstance(exp_hash, str)
new_hash = save_experiment_and_get_hash(experiment_config, engine)
assert new_hash == exp_hash
def test_missing_model_hashes():
with testing.postgresql.Postgresql() as postgresql:
db_engine = create_engine(postgresql.url())
ensure_db(db_engine)
experiment_hash = save_experiment_and_get_hash({}, db_engine)
model_hashes = ['abcd', 'bcde', 'cdef']
# if we associate model hashes with an experiment but don't actually train the models
# they should show up as missing
associate_models_with_experiment(experiment_hash, model_hashes, db_engine)
assert missing_model_hashes(experiment_hash, db_engine) == model_hashes
# if we insert a model row they should no longer be considered missing
db_engine.execute(
f"insert into {Model.__table__.fullname} (model_hash) values (%s)",
model_hashes[0]
)
assert missing_model_hashes(experiment_hash, db_engine) == model_hashes[1:]
def test_missing_matrix_uuids():
with testing.postgresql.Postgresql() as postgresql:
db_engine = create_engine(postgresql.url())
ensure_db(db_engine)
experiment_hash = save_experiment_and_get_hash({}, db_engine)
matrix_uuids = ['abcd', 'bcde', 'cdef']
# if we associate matrix uuids with an experiment but don't actually build the matrices
# they should show up as missing
associate_matrices_with_experiment(experiment_hash, matrix_uuids, db_engine)
assert missing_matrix_uuids(experiment_hash, db_engine) == matrix_uuids
# if we insert a matrix row they should no longer be considered missing
db_engine.execute(
f"insert into {Matrix.__table__.fullname} (matrix_uuid) values (%s)",
matrix_uuids[0]
)
assert missing_matrix_uuids(experiment_hash, db_engine) == matrix_uuids[1:]
def test_sort_predictions_and_labels():
predictions = numpy.array([0.5, 0.4, 0.6, 0.5])
labels = numpy.array([0, 0, 1, 1])
# best sort
sorted_predictions, sorted_labels = sort_predictions_and_labels(
predictions, labels, tiebreaker='best'
)
assert_array_equal(sorted_predictions, numpy.array([0.6, 0.5, 0.5, 0.4]))
assert_array_equal(sorted_labels, numpy.array([1, 1, 0, 0]))
# worst wort
sorted_predictions, sorted_labels = sort_predictions_and_labels(
predictions, labels, tiebreaker='worst'
)
assert_array_equal(sorted_predictions, numpy.array([0.6, 0.5, 0.5, 0.4]))
assert_array_equal(sorted_labels, numpy.array([1, 0, 1, 0]))
# random tiebreaker needs a seed
with pytest.raises(ValueError):
sort_predictions_and_labels(predictions, labels, tiebreaker='random')
# random tiebreaker respects the seed
sorted_predictions, sorted_labels = sort_predictions_and_labels(
predictions,
labels,
tiebreaker='random',
sort_seed=1234
)
assert_array_equal(sorted_predictions, numpy.array([0.6, 0.5, 0.5, 0.4]))
assert_array_equal(sorted_labels, numpy.array([1, 1, 0, 0]))
sorted_predictions, sorted_labels = sort_predictions_and_labels(
predictions,
labels,
tiebreaker='random',
sort_seed=24376234
)
assert_array_equal(sorted_predictions, numpy.array([0.6, 0.5, 0.5, 0.4]))
assert_array_equal(sorted_labels, numpy.array([1, 0, 1, 0]))
| 37.134615
| 95
| 0.689625
|
ef0caff0ae6ff653566417238cad39f602dc2a8d
| 2,250
|
py
|
Python
|
tensorflow/compiler/xla/python/tpu_driver/client/tpu_client.py
|
devinlife/tensorflow
|
1445444c15a396410f25ae91b7d1c19d724e2afc
|
[
"Apache-2.0"
] | 2
|
2020-09-18T17:14:16.000Z
|
2021-03-24T11:39:13.000Z
|
tensorflow/compiler/xla/python/tpu_driver/client/tpu_client.py
|
devinlife/tensorflow
|
1445444c15a396410f25ae91b7d1c19d724e2afc
|
[
"Apache-2.0"
] | 5
|
2021-08-25T16:16:44.000Z
|
2022-02-10T02:04:51.000Z
|
tensorflow/compiler/xla/python/tpu_driver/client/tpu_client.py
|
devinlife/tensorflow
|
1445444c15a396410f25ae91b7d1c19d724e2afc
|
[
"Apache-2.0"
] | 1
|
2018-09-05T14:48:36.000Z
|
2018-09-05T14:48:36.000Z
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""XLA LocalClient interface for interacting with TPUs via the TPU driver."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import logging
from tensorflow.compiler.xla.python.tpu_driver.client import tpu_client_extension as _tpu_client
class TpuBackend(object):
"""XLA backend implemented using the Tpu driver API."""
# Cache the backends to prevent double driver initializations.
_local_backend = None
@staticmethod
def create(worker=None, force=False):
"""Constructs a Cloud TPU backend."""
# `force` == True will skip caching any backends (if applicable) and will
# always try to create a new client.
if worker is None:
raise ValueError(
'Failed to create TpuBackend. The `worker` parameter must not be '
'`None`. Use `local` to connect to a local TPU or '
'`grpc://host:port` to connect to a remote TPU.')
if worker == 'local' or 'local://' in worker:
# We usually want to cache for local backends to prevent double
# initialization, except where `force` == True.
if worker == 'local':
worker = 'local://'
if force:
return _tpu_client.TpuClient.Get(worker)
if TpuBackend._local_backend is None:
logging.info('Starting the local TPU driver.')
TpuBackend._local_backend = _tpu_client.TpuClient.Get(worker)
return TpuBackend._local_backend
else:
# We do not cache for non-local backends.
return _tpu_client.TpuClient.Get(worker)
| 39.473684
| 96
| 0.692
|
26b98da149e26ec4f1e7f6bc829cd5d9580f2b4d
| 2,793
|
py
|
Python
|
HW2/train_linear_euclidean.py
|
a0919610611/NCTU-ML-2016
|
ba5e77868cb942026c56c4e48718feb36022a1e8
|
[
"MIT"
] | null | null | null |
HW2/train_linear_euclidean.py
|
a0919610611/NCTU-ML-2016
|
ba5e77868cb942026c56c4e48718feb36022a1e8
|
[
"MIT"
] | null | null | null |
HW2/train_linear_euclidean.py
|
a0919610611/NCTU-ML-2016
|
ba5e77868cb942026c56c4e48718feb36022a1e8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from parse_data import get_data
from draw_confusion_matrix import plot_confusion_matrix
import matplotlib.pyplot as plt
import numpy as np
from sklearn.metrics import confusion_matrix, accuracy_score
import time
from sklearn.neighbors import NearestNeighbors, KNeighborsClassifier, DistanceMetric
from sklearn.model_selection import KFold
if __name__ == '__main__':
data = get_data()
X = data['feature_matrix']
Y = data['target_vector']
KNN_rs = KNeighborsClassifier(n_neighbors=5, algorithm='brute', metric='euclidean')
start_time = time.time()
KNN_rs.fit(X, Y)
used_time = time.time() - start_time
print("Linear Search using euclidean distance with resubstitution method training time is %s seconds" % used_time)
start_time = time.time()
predicted_rs = KNN_rs.predict(X)
used_time = time.time() - start_time
print("Linear Search using euclidean distance with resubstitution method querying time is %s seconds" % used_time)
cfm_rs = confusion_matrix(Y, predicted_rs, labels=range(1, 11))
print("Linear Search using euclidean distance with resubstitution method confusion matrix is ")
print(cfm_rs)
ac_rate = accuracy_score(Y, predicted_rs)
print("Linear Search using euclidean distance with resubstitution method accurary score is %s" % ac_rate)
plt.figure()
plot_confusion_matrix(cfm_rs, classes=range(1, 11),
title='Linear Search using euclidean distance with resubstitution method')
k = 20
kf = KFold(n_splits=k, shuffle=True)
kf.get_n_splits(X)
cfm_kfold = np.zeros(shape=(10, 10,), dtype=np.int64)
i = 1
for train_index, test_index in kf.split(X):
X_train, X_test = X[train_index], X[test_index]
Y_train, Y_test = Y[train_index], Y[test_index]
clf_KFold = KNeighborsClassifier(n_neighbors=5, algorithm='brute', metric='euclidean')
start_time = time.time()
clf_KFold.fit(X_train, Y_train)
used_time = time.time() - start_time
print("%d fold traing time is %s" % (i, used_time))
start_time = time.time()
predicted_thisFold = clf_KFold.predict(X_test)
used_time = time.time() - start_time
print("%d fold query time is %s" % (i, used_time))
cfm_thisFold = confusion_matrix(Y_test, predicted_thisFold, labels=range(1, 11))
print("%d fold confusion matrix is" % i)
print(cfm_thisFold)
print("%d fold ac rate is %s" % (i, accuracy_score(Y_test, predicted_thisFold)))
cfm_kfold += cfm_thisFold
i += 1
plt.figure()
plot_confusion_matrix(cfm_kfold, classes=range(1, 11),
title='Linear Search using euclidean distance with KFold method')
plt.show()
| 45.786885
| 118
| 0.693161
|
4da26c83f6007868d7084e38ba476002fcc9045d
| 1,830
|
py
|
Python
|
ARIMA2 - Temperature Difference.py
|
mcvenkat/Python-Programs
|
2ff66bbd5b07c8e093b11360e1dcac06740a5024
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
ARIMA2 - Temperature Difference.py
|
mcvenkat/Python-Programs
|
2ff66bbd5b07c8e093b11360e1dcac06740a5024
|
[
"CC0-1.0",
"MIT"
] | 5
|
2020-05-22T14:10:02.000Z
|
2022-03-25T19:13:05.000Z
|
ARIMA2 - Temperature Difference.py
|
mcvenkat/Python-Programs
|
2ff66bbd5b07c8e093b11360e1dcac06740a5024
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 29 11:24:49 2020
@author: 766810
"""
# line plot of time series
from pandas import read_csv
from matplotlib import pyplot
# load dataset
series = read_csv('daily-min-temperatures.csv', header=0, index_col=0)
# display first few rows
print(series.head(20))
# line plot of dataset
series.plot()
pyplot.show()
# split the dataset
from pandas import read_csv
series = read_csv('daily-min-temperatures.csv', header=0, index_col=0)
split_point = len(series) - 7
dataset, validation = series[0:split_point], series[split_point:]
print('Dataset %d, Validation %d' % (len(dataset), len(validation)))
dataset.to_csv('dataset.csv', index=False)
validation.to_csv('validation.csv', index=False)
#Develop Model
# create a differenced series
from pandas import read_csv
from statsmodels.tsa.arima_model import ARIMA
import numpy
# create a differenced series
def difference(dataset, interval=1):
diff = list()
for i in range(interval, len(dataset)):
value = dataset[i] - dataset[i - interval]
diff.append(value)
return numpy.array(diff)
# invert differenced value
def inverse_difference(history, yhat, interval=1):
return yhat + history[-interval]
# load dataset
series = read_csv('dataset.csv', header=None)
# seasonal difference
X = series.values
days_in_year = 365
differenced = difference(X, days_in_year)
# fit model
model = ARIMA(differenced, order=(7,0,1))
model_fit = model.fit(disp=0)
# multi-step out-of-sample forecast
forecast = model_fit.forecast(steps=7)[0]
# invert the differenced forecast to something usable
history = [x for x in X]
day = 1
for yhat in forecast:
inverted = inverse_difference(history, yhat, days_in_year)
print('Day %d: %f' % (day, inverted))
history.append(inverted)
day += 1
| 27.727273
| 71
| 0.718579
|
ad20698d92e0e6d8f450749e1e10f802c17ab1bb
| 4,026
|
py
|
Python
|
cripts/services/analysis_result.py
|
lakiw/cripts
|
43f62891a3724e1ec60629887d97c421fb302163
|
[
"MIT"
] | 2
|
2017-04-06T12:26:11.000Z
|
2018-11-05T19:17:15.000Z
|
cripts/services/analysis_result.py
|
lakiw/cripts
|
43f62891a3724e1ec60629887d97c421fb302163
|
[
"MIT"
] | 9
|
2016-09-28T10:19:10.000Z
|
2017-02-24T17:58:43.000Z
|
cripts/services/analysis_result.py
|
lakiw/cripts
|
43f62891a3724e1ec60629887d97c421fb302163
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from mongoengine import Document, StringField, ListField, EmbeddedDocument
from mongoengine import DynamicEmbeddedDocument, DynamicField, UUIDField
from mongoengine import DictField, EmbeddedDocumentField, BooleanField
from cripts.core.cripts_mongoengine import CriptsDocument, CriptsSchemaDocument
from cripts.core.cripts_mongoengine import CriptsDocumentFormatter
# Embedded Documents common to most classes
class AnalysisConfig(DynamicEmbeddedDocument, CriptsDocumentFormatter):
"""
Embedded Analysis Configuration dictionary.
"""
meta = {}
class EmbeddedAnalysisResultLog(EmbeddedDocument, CriptsDocumentFormatter):
"""
Log entry for a service run.
"""
message = StringField()
#TODO: this should be a datetime object
datetime = StringField()
level = StringField()
class AnalysisResult(CriptsDocument, CriptsSchemaDocument, CriptsDocumentFormatter,
Document):
"""
Analysis Result from running an analytic service.
"""
meta = {
"cripts_type": "AnalysisResult",
"collection": settings.COL_ANALYSIS_RESULTS,
"latest_schema_version": 1,
"schema_doc": {
'analyst': 'Analyst who ran the service.',
'analysis_id': 'Unique ID for this service execution.',
'analysis_type': 'Type of analysis this is.',
'config': 'Configuration options used for this execution.',
'distributed': 'Distributed for this execution.',
'finish_date': 'Date execution finished.',
'log': 'Log entries for this execution.',
'object_type': 'Type of TLO this is for.',
'object_id': 'ObjectId of the TLO.',
'results': 'Analysis results.',
'service_name': 'Name of the service.',
'source': 'Source of the service.',
'start_date': 'Date execution started.',
'status': 'Status of the execution.',
'template': 'Custom template to render results.',
'version': 'Version of the service used.',
},
"jtable_opts": {
'details_url': 'cripts.services.views.analysis_result',
'details_url_key': 'id',
'default_sort': "start_date DESC",
'searchurl': 'cripts.services.views.analysis_results_listing',
'fields': [ "object_type", "service_name", "version",
"start_date", "finish_date", "results",
"object_id", "id"],
'jtopts_fields': [ "details",
"object_type",
"service_name",
"version",
"start_date",
"finish_date",
"results",
"id"],
'hidden_fields': ["object_id", "id"],
'linked_fields': [ "object_type", "service_name" ],
'details_link': 'details',
'no_sort': ['details']
}
}
#TODO: these should be datetime objects, not strings
analyst = StringField()
analysis_id = UUIDField(binary=False)
analysis_type = StringField(db_field="type")
config = EmbeddedDocumentField(AnalysisConfig)
distributed = BooleanField()
finish_date = StringField()
log = ListField(EmbeddedDocumentField(EmbeddedAnalysisResultLog))
object_type = StringField(required=True)
object_id = StringField(required=True)
results = ListField(DynamicField(DictField))
service_name = StringField()
source = StringField()
start_date = StringField()
status = StringField()
template = StringField()
version = StringField()
| 41.081633
| 87
| 0.568058
|
8db1382bcd397928ccc70745510e86baeda6501f
| 3,692
|
py
|
Python
|
examples/basics/visuals/visual_filters.py
|
lcampagn/vispy
|
28c25d6904d697cde9bb4c37909bc3f934621134
|
[
"BSD-3-Clause"
] | 1
|
2015-12-03T02:03:50.000Z
|
2015-12-03T02:03:50.000Z
|
examples/basics/visuals/visual_filters.py
|
lcampagn/vispy
|
28c25d6904d697cde9bb4c37909bc3f934621134
|
[
"BSD-3-Clause"
] | 19
|
2015-06-16T14:33:22.000Z
|
2015-07-27T21:18:15.000Z
|
examples/basics/visuals/visual_filters.py
|
astrofrog/vispy
|
fa5e2eab9bb3d956f87ae68a56e342913e58a305
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# vispy: gallery 1
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Demonstration of Line visual with arbitrary transforms.
Several Line visuals are displayed that all have the same vertex position
information, but different transformations.
"""
import numpy as np
from vispy import app, gloo, visuals
from vispy.visuals.transforms import STTransform
from vispy.visuals.components import Clipper, Alpha, ColorFilter
from vispy.visuals.shaders import Function
from vispy.geometry import Rect
# vertex positions of data to draw
N = 400
pos = np.zeros((N, 2), dtype=np.float32)
pos[:, 0] = np.linspace(0, 350, N)
pos[:, 1] = np.random.normal(size=N, scale=20, loc=0)
class Canvas(app.Canvas):
def __init__(self):
# Define several Line visuals that use the same position data
self.lines = [visuals.LineVisual(pos=pos)
for i in range(6)]
self.lines[0].transform = STTransform(translate=(0, 50))
# Clipping filter (requires update when window is resized)
self.lines[1].transform = STTransform(translate=(400, 50))
self.clipper = Clipper([500, 725, 200, 50])
self.lines[1].attach(self.clipper)
# Opacity filter
self.lines[2].transform = STTransform(translate=(0, 150))
self.lines[2].attach(Alpha(0.4))
# Color filter (for anaglyph stereo)
self.lines[3].transform = STTransform(translate=(400, 150))
self.lines[3].attach(ColorFilter([1, 0, 0, 1]))
# A custom filter
class Hatching(object):
def __init__(self):
self.shader = Function("""
void screen_filter() {
float f = gl_FragCoord.x * 0.4 + gl_FragCoord.y;
f = mod(f, 20);
if( f < 5.0 ) {
discard;
}
if( f < 20.0 ) {
gl_FragColor.g = gl_FragColor.g + 0.05 * (20-f);
}
}
""")
def _attach(self, visual):
visual._get_hook('frag', 'post').add(self.shader())
self.lines[4].transform = STTransform(translate=(0, 250))
self.lines[4].attach(Hatching())
# Mixing filters
self.lines[5].transform = STTransform(translate=(400, 250))
self.lines[5].attach(ColorFilter([1, 0, 0, 1]))
self.lines[5].attach(Hatching())
app.Canvas.__init__(self, keys='interactive', size=(800, 800))
for line in self.lines:
tr_sys = visuals.transforms.TransformSystem(self)
tr_sys.visual_to_document = line.transform
line.tr_sys = tr_sys
self.show(True)
def on_draw(self, ev):
gloo.clear('black', depth=True)
gloo.set_viewport(0, 0, *self.physical_size)
for line in self.lines:
line.draw(line.tr_sys)
def on_resize(self, event):
for line in self.lines:
# let the transform systems know that the window has resized
line.tr_sys.auto_configure()
# Need to update clipping boundaries if the window resizes.
trs = self.lines[1].tr_sys
tr = trs.document_to_framebuffer * trs.visual_to_document
self.clipper.bounds = tr.map(Rect(50, -15, 250, 30))
if __name__ == '__main__':
win = Canvas()
import sys
if sys.flags.interactive != 1:
app.run()
| 33.87156
| 76
| 0.567443
|
bf85b76d0115ade4102970675205dbcf9f25aef0
| 208
|
py
|
Python
|
python_practice/9.7test5.py
|
ccom33/python_practice
|
9a3551610c46b0bae15542575033e8ed7e967289
|
[
"MIT"
] | null | null | null |
python_practice/9.7test5.py
|
ccom33/python_practice
|
9a3551610c46b0bae15542575033e8ed7e967289
|
[
"MIT"
] | null | null | null |
python_practice/9.7test5.py
|
ccom33/python_practice
|
9a3551610c46b0bae15542575033e8ed7e967289
|
[
"MIT"
] | null | null | null |
a=5
b= a==5
print(b)
member=['아빠투르', '뭐이런놈', 'Haze', 'JorOz']
print(member)
정보=['김선재','수성구 범물동','가리는건없음']
for m in 정보:
print(m, '입니다')
season = ['봄','여름','가을','겨울']
for m in season:
print(m, '계절')
| 13.866667
| 40
| 0.538462
|
b5bed87824c9c55b4ed9f9dfed97ca0908efafc9
| 3,245
|
py
|
Python
|
Website/farmacy_django/farmacy_django/settings.py
|
BedoEbied/Farmacy
|
4c75b83edeb12389f18f2f10f5dffa7f7c5bcbe9
|
[
"MIT"
] | 1
|
2021-04-09T08:39:29.000Z
|
2021-04-09T08:39:29.000Z
|
Website/farmacy_django/farmacy_django/settings.py
|
BedoEbied/Farmacy
|
4c75b83edeb12389f18f2f10f5dffa7f7c5bcbe9
|
[
"MIT"
] | null | null | null |
Website/farmacy_django/farmacy_django/settings.py
|
BedoEbied/Farmacy
|
4c75b83edeb12389f18f2f10f5dffa7f7c5bcbe9
|
[
"MIT"
] | 1
|
2020-01-15T09:58:50.000Z
|
2020-01-15T09:58:50.000Z
|
"""
Django settings for farmacy_django project.
Generated by 'django-admin startproject' using Django 2.2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')04+o*sd6^9@bw&u$x72h$i!!!7o5y7s@oa*#0ek(!abbayd@p'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'main.apps.MainConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'farmacy_django.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'farmacy_django.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'assets'),
)
| 25.753968
| 91
| 0.694299
|
0ab99ecc434d8da90d47d27b80e13d441cef6b73
| 2,030
|
py
|
Python
|
test/test_utils.py
|
meuns/galgebra
|
3fea69ff4c4ca8f8afea083b697ef9d5112824b9
|
[
"BSD-3-Clause"
] | 1
|
2016-05-08T08:13:10.000Z
|
2016-05-08T08:13:10.000Z
|
test/test_utils.py
|
meuns/galgebra
|
3fea69ff4c4ca8f8afea083b697ef9d5112824b9
|
[
"BSD-3-Clause"
] | 1
|
2019-11-21T18:59:22.000Z
|
2019-11-26T08:37:26.000Z
|
test/test_utils.py
|
meuns/galgebra
|
3fea69ff4c4ca8f8afea083b697ef9d5112824b9
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from sympy import expand, S, simplify
from galgebra.ga import Ga
from galgebra.mv import Mv
from galgebra.metric import Simp
def com(A, B):
"""
I like free functions...
"""
return Ga.com(A, B)
class TestCase(unittest.TestCase):
def assertEqual(self, first, second):
"""
Compare two expressions are equals.
"""
if isinstance(first, Mv):
first = first.obj
if isinstance(second, Mv):
second = second.obj
# We need to help sympy a little...
first = Simp.apply(expand(first))
second = Simp.apply(expand(second))
# Check
diff = simplify(first - second)
self.assertTrue(diff == 0, "\n%s\n==\n%s\n%s" % (first, second, diff))
def assertProjEqual(self, first, second):
"""
Compare two points, two planes or two lines up to a scalar.
"""
assert isinstance(first, Mv)
assert isinstance(second, Mv)
# TODO: this should use Mv methods and not the derived test case methods...
first /= self.norm(first)
second /= self.norm(second)
# We need to help sympy a little...
X = Simp.apply(expand(first.obj))
Y = Simp.apply(expand(second.obj))
# We can't easily retrieve the sign, so we test both
diff = simplify(X.obj - Y.obj)
if diff != S.Zero:
diff = simplify(X.obj + Y.obj)
self.assertTrue(diff == S.Zero, "\n%s\n==\n%s" % (X, Y))
def assertNotEqual(self, first, second):
"""
Compare two expressions are not equals.
"""
if isinstance(first, Mv):
first = first.obj
if isinstance(second, Mv):
second = second.obj
# We need to help sympy a little...
first = Simp.apply(expand(first))
second = Simp.apply(expand(second))
# Check
diff = simplify(first - second)
self.assertTrue(diff != 0, "\n%s\n!=\n%s\n%s" % (first, second, diff))
| 26.363636
| 83
| 0.564532
|
c910a58901976bc19cc5f2c9173bf89a7dcca98a
| 1,980
|
py
|
Python
|
src/deep_rlsp/envs/gridworlds/apples_spec.py
|
HumanCompatibleAI/deep-rlsp
|
81941693aba2aa9157ca96e96567f4e3cb95fbc3
|
[
"MIT"
] | 24
|
2021-04-17T21:32:43.000Z
|
2021-08-07T17:20:15.000Z
|
src/deep_rlsp/envs/gridworlds/apples_spec.py
|
HumanCompatibleAI/deep-rlsp
|
81941693aba2aa9157ca96e96567f4e3cb95fbc3
|
[
"MIT"
] | null | null | null |
src/deep_rlsp/envs/gridworlds/apples_spec.py
|
HumanCompatibleAI/deep-rlsp
|
81941693aba2aa9157ca96e96567f4e3cb95fbc3
|
[
"MIT"
] | 7
|
2021-04-17T21:32:48.000Z
|
2022-02-09T04:18:39.000Z
|
import numpy as np
from deep_rlsp.envs.gridworlds.apples import ApplesState
from deep_rlsp.envs.gridworlds.env import Direction
class ApplesSpec(object):
def __init__(
self,
height,
width,
init_state,
apple_regen_probability,
bucket_capacity,
include_location_features,
):
"""See ApplesEnv.__init__ in apples.py for details."""
self.height = height
self.width = width
self.init_state = init_state
self.apple_regen_probability = apple_regen_probability
self.bucket_capacity = bucket_capacity
self.include_location_features = include_location_features
# In the diagrams below, T is a tree, B is a bucket, C is a carpet, A is the
# agent. Each tuple is of the form (spec, current state, task R, true R).
APPLES_PROBLEMS = {
# -----
# |T T|
# | |
# | B |
# | |
# |A T|
# -----
# After 11 actions (riuiruuildi), it looks like this:
# -----
# |T T|
# | A |
# | B |
# | |
# | T|
# -----
# Where the agent has picked the right trees once and put the fruit in the
# basket.
"default": (
ApplesSpec(
5,
3,
ApplesState(
agent_pos=(0, 0, 2),
tree_states={(0, 0): True, (2, 0): True, (2, 4): True},
bucket_states={(1, 2): 0},
carrying_apple=False,
),
apple_regen_probability=0.1,
bucket_capacity=10,
include_location_features=True,
),
ApplesState(
agent_pos=(Direction.get_number_from_direction(Direction.SOUTH), 1, 1),
tree_states={(0, 0): True, (2, 0): False, (2, 4): True},
bucket_states={(1, 2): 2},
carrying_apple=False,
),
np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
)
}
| 28.285714
| 83
| 0.529293
|
2e603b833aa6dbc15dca560203459423f234c7c0
| 7,695
|
py
|
Python
|
traod/traod/generator.py
|
illenseer/lpod
|
d473ada69f60aefbfd9ec70da8f63c56e46a83ea
|
[
"MIT"
] | 1
|
2020-11-30T00:11:22.000Z
|
2020-11-30T00:11:22.000Z
|
traod/traod/generator.py
|
illenseer/lpod
|
d473ada69f60aefbfd9ec70da8f63c56e46a83ea
|
[
"MIT"
] | null | null | null |
traod/traod/generator.py
|
illenseer/lpod
|
d473ada69f60aefbfd9ec70da8f63c56e46a83ea
|
[
"MIT"
] | null | null | null |
from __future__ import (print_function, division, absolute_import,
unicode_literals)
def generate_prog(ast, backend, generator, strategy):
"""
Generate logic program from AST corresponding to backend, generator and
strategy.
"""
if generator == 'split':
return generate_split(ast, backend, strategy)
elif generator == 'cabalar':
return generate_cabalar(ast, backend, strategy)
def generate_split(ast, backend, strategy):
"""
Generate program with with split programs via choice rules.
"""
stmts = []
od_count = 0
max_deg_count = 0
for statement in ast.statements:
additional_rules = []
if statement.body:
body = ''.join(statement.body)
else:
body = ''
if statement.head:
if statement.head.ordered_disjunction:
od_count += 1
deg_count = 0
ods = ''.join(statement.head.ordered_disjunction).split(';;')
choice = ''
ld_el = ''
for od in ods:
deg_count += 1
if deg_count > max_deg_count:
max_deg_count = deg_count
choice_el = 'od_atoms({r},{d})'.format(
r=od_count,
d=deg_count
)
if deg_count != 1:
choice += ';'
choice += choice_el
ar = '{od}:-{ch}{ld}{body}'.format(
od=od,
ch=choice_el,
ld=ld_el,
body=',' + body + '.' if statement.body else '.'
)
additional_rules.append(ar)
arc = ':-not {ch},{od}{ld}{body}'.format(
ch=choice_el,
od=od,
ld=ld_el,
body=',' + body + '.' if statement.body else '.'
)
additional_rules.append(arc)
ld_el += ',not {}'.format(od)
head = '{{{choice}}}=1'.format(choice=choice)
ar = 'od_body({nr}){body}.'.format(
nr=od_count,
body=':-' + body if statement.body else ''
)
additional_rules.append(ar)
ar = 'satisfied({nr},1):-not od_body({nr}).'.format(nr=od_count)
additional_rules.append(ar)
elif statement.head.atom:
head = ''.join(statement.head.atom)
elif statement.head.choice:
head = ''.join(statement.head.choice)
elif statement.head.aggregate:
head = ''.join(statement.head.aggregate)
else:
head=''
if head or body:
stmt = '{head}{body}.'.format(
head=head,
body=':-' + body if statement.body else ''
)
stmts.append(stmt)
for rule in additional_rules:
stmts.append(rule)
generic_satisfied = 'satisfied(R,D):-od_atoms(R,D).'
stmts.append(generic_satisfied)
if backend == 'metalpod':
stmts.append('optimize({}).'.format(strategy))
elif backend == 'asprin':
asprin_stmts = generate_asprin_preference_spec(
strategy,
od_count,
max_deg_count
)
stmts += asprin_stmts
return '\n'.join(stmts)
def generate_cabalar(ast, backend, strategy):
"""
Generate program with with Cabalar translation for ordered disjunctions.
"""
stmts = []
od_count = 0
max_deg_count = 0
for statement in ast.statements:
additional_rules = []
if statement.body:
body = ''.join(statement.body)
else:
body = ''
if statement.head:
if statement.head.ordered_disjunction:
od_count += 1
deg_count = 0
ods = ''.join(statement.head.ordered_disjunction).split(';;')
head = ''
ld_el = ''
ar = 'od_body({nr}){body}.'.format(
nr=od_count,
body=':-' + body if statement.body else ''
)
additional_rules.append(ar)
ar = 'satisfied({nr},1):-not od_body({nr}).'.format(nr=od_count)
additional_rules.append(ar)
for od in ods:
deg_count += 1
if deg_count > max_deg_count:
max_deg_count = deg_count
od_atom = 'satisfied({r},{d})'.format(
r=od_count,
d=deg_count
)
ar = '{od}:-not not {od}{ld}{body}'.format(
od=od,
ld=ld_el,
body=',' + body + '.' if statement.body else '.'
)
additional_rules.append(ar)
ar = '{od_atom}:-not not {od}{ld}{body}'.format(
od_atom=od_atom,
od=od,
ld=ld_el,
body=',' + body + '.' if statement.body else '.'
)
additional_rules.append(ar)
ld_el += ',not {}'.format(od)
if not body:
ld_el = ld_el[1:]
ar = ':-{body}{ld}.'.format(body=body,ld=ld_el)
additional_rules.append(ar)
body = ''
elif statement.head.atom:
head = ''.join(statement.head.atom)
elif statement.head.choice:
head = ''.join(statement.head.choice)
elif statement.head.aggregate:
head = ''.join(statement.head.aggregate)
else:
head=''
if head or body:
stmt = '{head}{body}.'.format(
head=head,
body=':-' + body if statement.body else ''
)
stmts.append(stmt)
for rule in additional_rules:
stmts.append(rule)
if backend == 'metalpod':
stmts.append('optimize({}).'.format(strategy))
elif backend == 'asprin':
asprin_stmts = generate_asprin_preference_spec(
strategy,
od_count,
max_deg_count
)
stmts += asprin_stmts
return '\n'.join(stmts)
def generate_asprin_preference_spec(strategy, od_count, deg_count):
"""
Generate preference rules for asprin.
"""
stmts = []
stmts.append('deg(1..{}).'.format(deg_count))
stmts.append('rule(1..{}).'.format(od_count))
if strategy == 'pareto':
pref = (
'#preference(od(R),less(weight))'
'{D,R::satisfied(R,D):deg(D)}:rule(R).\n'
'#preference(all,pareto){name(od(R)):rule(R)}.\n'
'#optimize(all).'
)
elif strategy == 'incl':
pref = (
'#preference(od(D),superset)'
'{{satisfied(R,D):rule(R)}}:deg(D).\n'
'#preference(all,lexico){{O::name(od(D)):deg(D),O={md}-D}}.\n'
'#optimize(all).'
).format(md=deg_count+1)
elif strategy == 'card':
pref = (
'#preference(od(D),more(cardinality))'
'{{satisfied(R,D):rule(R)}}:deg(D).\n'
'#preference(all,lexico){{O::name(od(D)):deg(D),O={md}-D}}.\n'
'#optimize(all).'
).format(md=deg_count+1)
stmts.append(pref)
return stmts
| 32.605932
| 80
| 0.460559
|
991817c1566c57fa119f8190e6456a711e5cacdb
| 1,392
|
py
|
Python
|
salt/modules/logmod.py
|
Noah-Huppert/salt
|
998c382f5f2c3b4cbf7d96aa6913ada6993909b3
|
[
"Apache-2.0"
] | 19
|
2016-01-29T14:37:52.000Z
|
2022-03-30T18:08:01.000Z
|
salt/modules/logmod.py
|
Noah-Huppert/salt
|
998c382f5f2c3b4cbf7d96aa6913ada6993909b3
|
[
"Apache-2.0"
] | 223
|
2016-03-02T16:39:41.000Z
|
2022-03-03T12:26:35.000Z
|
salt/modules/logmod.py
|
Noah-Huppert/salt
|
998c382f5f2c3b4cbf7d96aa6913ada6993909b3
|
[
"Apache-2.0"
] | 64
|
2016-02-04T19:45:26.000Z
|
2021-12-15T02:02:31.000Z
|
# -*- coding: utf-8 -*-
"""
On-demand logging
=================
.. versionadded:: 2017.7.0
The sole purpose of this module is logging messages in the (proxy) minion.
It comes very handy when debugging complex Jinja templates, for example:
.. code-block:: jinja
{%- for var in range(10) %}
{%- do salt.log.info(var) -%}
{%- endfor %}
CLI Example:
.. code-block:: bash
salt '*' log.error "Please don't do that, this module is not for CLI use!"
"""
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging
log = logging.getLogger(__name__)
__virtualname__ = "log"
__proxyenabled__ = ["*"]
def __virtual__():
return __virtualname__
def debug(message):
"""
Log message at level DEBUG.
"""
log.debug(message)
return True
def info(message):
"""
Log message at level INFO.
"""
log.info(message)
return True
def warning(message):
"""
Log message at level WARNING.
"""
log.warning(message)
return True
def error(message):
"""
Log message at level ERROR.
"""
log.error(message)
return True
def critical(message):
"""
Log message at level CRITICAL.
"""
log.critical(message)
return True
def exception(message):
"""
Log message at level EXCEPTION.
"""
log.exception(message)
return True
| 16.571429
| 78
| 0.627874
|
dba796b98c9310b1e69b650c1f5e09cff86efa7f
| 1,580
|
py
|
Python
|
nomadgram/images/models.py
|
nanobeauty/nomadgram
|
e8792405ec2620d98bef980fd3d5cdfe358d2210
|
[
"MIT"
] | null | null | null |
nomadgram/images/models.py
|
nanobeauty/nomadgram
|
e8792405ec2620d98bef980fd3d5cdfe358d2210
|
[
"MIT"
] | 5
|
2020-06-05T19:24:18.000Z
|
2021-09-08T00:38:57.000Z
|
nomadgram/images/models.py
|
nanobeauty/nomadgram
|
e8792405ec2620d98bef980fd3d5cdfe358d2210
|
[
"MIT"
] | null | null | null |
from django.db import models
from nomadgram.users import models as user_models
# Create your models here.
class TimeStampedModel(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Image(TimeStampedModel):
""" Image Model """
file = models.ImageField()
location = models.CharField(max_length=140)
caption = models.TextField()
creator = models.ForeignKey(user_models.User, on_delete=models.PROTECT, null=True, related_name='images')
@property
def comment_count(self):
return self.comments.all().count()
@property
def like_count(self):
return self.likes.all().count()
def __str__(self):
return '{} - {}'.format(self.location, self.caption)
class Meta:
ordering = ['-created_at']
class Comment(TimeStampedModel):
""" Comment Model """
message = models.TextField()
creator = models.ForeignKey(user_models.User, on_delete=models.PROTECT, null=True)
image = models.ForeignKey(Image, on_delete=models.PROTECT, null=True, related_name='comments')
def __str__(self):
return self.message
class Like(TimeStampedModel):
""" Like Model """
creator = models.ForeignKey(user_models.User, on_delete=models.PROTECT, null=True)
image = models.ForeignKey(Image, on_delete=models.PROTECT, null=True, related_name='likes')
def __str__(self):
return 'User: {} - Image Caption: {}'.format(self.creator.username, self.image.caption)
| 27.241379
| 109
| 0.691772
|
92260ace28a6f880c3173480d92b689fc8220f4f
| 73,487
|
py
|
Python
|
PythonVirtEnv/Lib/site-packages/plotly/graph_objs/barpolar/marker/_colorbar.py
|
zuhorski/EPL_Project
|
2d2417652879cfbe33c44c003ad77b7222590849
|
[
"MIT"
] | 2
|
2021-07-18T11:39:56.000Z
|
2021-11-06T17:13:05.000Z
|
venv/Lib/site-packages/plotly/graph_objs/barpolar/marker/_colorbar.py
|
wakisalvador/constructed-misdirection
|
74779e9ec640a11bc08d5d1967c85ac4fa44ea5e
|
[
"Unlicense"
] | null | null | null |
venv/Lib/site-packages/plotly/graph_objs/barpolar/marker/_colorbar.py
|
wakisalvador/constructed-misdirection
|
74779e9ec640a11bc08d5d1967c85ac4fa44ea5e
|
[
"Unlicense"
] | null | null | null |
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class ColorBar(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "barpolar.marker"
_path_str = "barpolar.marker.colorbar"
_valid_props = {
"bgcolor",
"bordercolor",
"borderwidth",
"dtick",
"exponentformat",
"len",
"lenmode",
"minexponent",
"nticks",
"outlinecolor",
"outlinewidth",
"separatethousands",
"showexponent",
"showticklabels",
"showtickprefix",
"showticksuffix",
"thickness",
"thicknessmode",
"tick0",
"tickangle",
"tickcolor",
"tickfont",
"tickformat",
"tickformatstopdefaults",
"tickformatstops",
"ticklabeloverflow",
"ticklabelposition",
"ticklen",
"tickmode",
"tickprefix",
"ticks",
"ticksuffix",
"ticktext",
"ticktextsrc",
"tickvals",
"tickvalssrc",
"tickwidth",
"title",
"titlefont",
"titleside",
"x",
"xanchor",
"xpad",
"y",
"yanchor",
"ypad",
}
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the color of padded area.
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["bgcolor"]
@bgcolor.setter
def bgcolor(self, val):
self["bgcolor"] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the axis line color.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["bordercolor"]
@bordercolor.setter
def bordercolor(self, val):
self["bordercolor"] = val
# borderwidth
# -----------
@property
def borderwidth(self):
"""
Sets the width (in px) or the border enclosing this color bar.
The 'borderwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["borderwidth"]
@borderwidth.setter
def borderwidth(self, val):
self["borderwidth"] = val
# dtick
# -----
@property
def dtick(self):
"""
Sets the step in-between ticks on this axis. Use with `tick0`.
Must be a positive number, or special strings available to
"log" and "date" axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick number. For
example, to set a tick mark at 1, 10, 100, 1000, ... set dtick
to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2.
To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special values;
"L<f>", where `f` is a positive number, gives ticks linearly
spaced in value (but not position). For example `tick0` = 0.1,
`dtick` = "L0.5" will put ticks at 0.1, 0.6, 1.1, 1.6 etc. To
show powers of 10 plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is ignored for "D1" and
"D2". If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval between
ticks to one day, set `dtick` to 86400000.0. "date" also has
special values "M<n>" gives ticks spaced by a number of months.
`n` must be a positive integer. To set ticks on the 15th of
every third month, set `tick0` to "2000-01-15" and `dtick` to
"M3". To set ticks every 4 years, set `dtick` to "M48"
The 'dtick' property accepts values of any type
Returns
-------
Any
"""
return self["dtick"]
@dtick.setter
def dtick(self, val):
self["dtick"] = val
# exponentformat
# --------------
@property
def exponentformat(self):
"""
Determines a formatting rule for the tick exponents. For
example, consider the number 1,000,000,000. If "none", it
appears as 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If "SI", 1G. If
"B", 1B.
The 'exponentformat' property is an enumeration that may be specified as:
- One of the following enumeration values:
['none', 'e', 'E', 'power', 'SI', 'B']
Returns
-------
Any
"""
return self["exponentformat"]
@exponentformat.setter
def exponentformat(self, val):
self["exponentformat"] = val
# len
# ---
@property
def len(self):
"""
Sets the length of the color bar This measure excludes the
padding of both ends. That is, the color bar length is this
length minus the padding on both ends.
The 'len' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["len"]
@len.setter
def len(self, val):
self["len"] = val
# lenmode
# -------
@property
def lenmode(self):
"""
Determines whether this color bar's length (i.e. the measure in
the color variation direction) is set in units of plot
"fraction" or in *pixels. Use `len` to set the value.
The 'lenmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['fraction', 'pixels']
Returns
-------
Any
"""
return self["lenmode"]
@lenmode.setter
def lenmode(self, val):
self["lenmode"] = val
# minexponent
# -----------
@property
def minexponent(self):
"""
Hide SI prefix for 10^n if |n| is below this number. This only
has an effect when `tickformat` is "SI" or "B".
The 'minexponent' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["minexponent"]
@minexponent.setter
def minexponent(self, val):
self["minexponent"] = val
# nticks
# ------
@property
def nticks(self):
"""
Specifies the maximum number of ticks for the particular axis.
The actual number of ticks will be chosen automatically to be
less than or equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
The 'nticks' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self["nticks"]
@nticks.setter
def nticks(self, val):
self["nticks"] = val
# outlinecolor
# ------------
@property
def outlinecolor(self):
"""
Sets the axis line color.
The 'outlinecolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["outlinecolor"]
@outlinecolor.setter
def outlinecolor(self, val):
self["outlinecolor"] = val
# outlinewidth
# ------------
@property
def outlinewidth(self):
"""
Sets the width (in px) of the axis line.
The 'outlinewidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["outlinewidth"]
@outlinewidth.setter
def outlinewidth(self, val):
self["outlinewidth"] = val
# separatethousands
# -----------------
@property
def separatethousands(self):
"""
If "true", even 4-digit integers are separated
The 'separatethousands' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["separatethousands"]
@separatethousands.setter
def separatethousands(self, val):
self["separatethousands"] = val
# showexponent
# ------------
@property
def showexponent(self):
"""
If "all", all exponents are shown besides their significands.
If "first", only the exponent of the first tick is shown. If
"last", only the exponent of the last tick is shown. If "none",
no exponents appear.
The 'showexponent' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showexponent"]
@showexponent.setter
def showexponent(self, val):
self["showexponent"] = val
# showticklabels
# --------------
@property
def showticklabels(self):
"""
Determines whether or not the tick labels are drawn.
The 'showticklabels' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showticklabels"]
@showticklabels.setter
def showticklabels(self, val):
self["showticklabels"] = val
# showtickprefix
# --------------
@property
def showtickprefix(self):
"""
If "all", all tick labels are displayed with a prefix. If
"first", only the first tick is displayed with a prefix. If
"last", only the last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
The 'showtickprefix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showtickprefix"]
@showtickprefix.setter
def showtickprefix(self, val):
self["showtickprefix"] = val
# showticksuffix
# --------------
@property
def showticksuffix(self):
"""
Same as `showtickprefix` but for tick suffixes.
The 'showticksuffix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showticksuffix"]
@showticksuffix.setter
def showticksuffix(self, val):
self["showticksuffix"] = val
# thickness
# ---------
@property
def thickness(self):
"""
Sets the thickness of the color bar This measure excludes the
size of the padding, ticks and labels.
The 'thickness' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["thickness"]
@thickness.setter
def thickness(self, val):
self["thickness"] = val
# thicknessmode
# -------------
@property
def thicknessmode(self):
"""
Determines whether this color bar's thickness (i.e. the measure
in the constant color direction) is set in units of plot
"fraction" or in "pixels". Use `thickness` to set the value.
The 'thicknessmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['fraction', 'pixels']
Returns
-------
Any
"""
return self["thicknessmode"]
@thicknessmode.setter
def thicknessmode(self, val):
self["thicknessmode"] = val
# tick0
# -----
@property
def tick0(self):
"""
Sets the placement of the first tick on this axis. Use with
`dtick`. If the axis `type` is "log", then you must take the
log of your starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when `dtick`=*L<f>* (see
`dtick` for more info). If the axis `type` is "date", it should
be a date string, like date data. If the axis `type` is
"category", it should be a number, using the scale where each
category is assigned a serial number from zero in the order it
appears.
The 'tick0' property accepts values of any type
Returns
-------
Any
"""
return self["tick0"]
@tick0.setter
def tick0(self, val):
self["tick0"] = val
# tickangle
# ---------
@property
def tickangle(self):
"""
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the tick
labels vertically.
The 'tickangle' property is a angle (in degrees) that may be
specified as a number between -180 and 180. Numeric values outside this
range are converted to the equivalent value
(e.g. 270 is converted to -90).
Returns
-------
int|float
"""
return self["tickangle"]
@tickangle.setter
def tickangle(self, val):
self["tickangle"] = val
# tickcolor
# ---------
@property
def tickcolor(self):
"""
Sets the tick color.
The 'tickcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["tickcolor"]
@tickcolor.setter
def tickcolor(self, val):
self["tickcolor"] = val
# tickfont
# --------
@property
def tickfont(self):
"""
Sets the color bar's tick label font
The 'tickfont' property is an instance of Tickfont
that may be specified as:
- An instance of :class:`plotly.graph_objs.barpolar.marker.colorbar.Tickfont`
- A dict of string/value properties that will be passed
to the Tickfont constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
plotly.graph_objs.barpolar.marker.colorbar.Tickfont
"""
return self["tickfont"]
@tickfont.setter
def tickfont(self, val):
self["tickfont"] = val
# tickformat
# ----------
@property
def tickformat(self):
"""
Sets the tick label formatting rule using d3 formatting mini-
languages which are very similar to those in Python. For
numbers, see: https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format. And for dates
see: https://github.com/d3/d3-time-format#locale_format. We add
two items to d3's date formatter: "%h" for half of the year as
a decimal number as well as "%{n}f" for fractional seconds with
n digits. For example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display "09~15~23.46"
The 'tickformat' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["tickformat"]
@tickformat.setter
def tickformat(self, val):
self["tickformat"] = val
# tickformatstops
# ---------------
@property
def tickformatstops(self):
"""
The 'tickformatstops' property is a tuple of instances of
Tickformatstop that may be specified as:
- A list or tuple of instances of plotly.graph_objs.barpolar.marker.colorbar.Tickformatstop
- A list or tuple of dicts of string/value properties that
will be passed to the Tickformatstop constructor
Supported dict properties:
dtickrange
range [*min*, *max*], where "min", "max" -
dtick values which describe some zoom level, it
is possible to omit "min" or "max" value by
passing "null"
enabled
Determines whether or not this stop is used. If
`false`, this stop is ignored even within its
`dtickrange`.
name
When used in a template, named items are
created in the output figure in addition to any
items the figure already has in this array. You
can modify these items in the output figure by
making your own item with `templateitemname`
matching this `name` alongside your
modifications (including `visible: false` or
`enabled: false` to hide it). Has no effect
outside of a template.
templateitemname
Used to refer to a named item in this array in
the template. Named items from the template
will be created even without a matching item in
the input figure, but you can modify one by
making an item with `templateitemname` matching
its `name`, alongside your modifications
(including `visible: false` or `enabled: false`
to hide it). If there is no template or no
matching item, this item will be hidden unless
you explicitly show it with `visible: true`.
value
string - dtickformat for described zoom level,
the same as "tickformat"
Returns
-------
tuple[plotly.graph_objs.barpolar.marker.colorbar.Tickformatstop]
"""
return self["tickformatstops"]
@tickformatstops.setter
def tickformatstops(self, val):
self["tickformatstops"] = val
# tickformatstopdefaults
# ----------------------
@property
def tickformatstopdefaults(self):
"""
When used in a template (as layout.template.data.barpolar.marke
r.colorbar.tickformatstopdefaults), sets the default property
values to use for elements of
barpolar.marker.colorbar.tickformatstops
The 'tickformatstopdefaults' property is an instance of Tickformatstop
that may be specified as:
- An instance of :class:`plotly.graph_objs.barpolar.marker.colorbar.Tickformatstop`
- A dict of string/value properties that will be passed
to the Tickformatstop constructor
Supported dict properties:
Returns
-------
plotly.graph_objs.barpolar.marker.colorbar.Tickformatstop
"""
return self["tickformatstopdefaults"]
@tickformatstopdefaults.setter
def tickformatstopdefaults(self, val):
self["tickformatstopdefaults"] = val
# ticklabeloverflow
# -----------------
@property
def ticklabeloverflow(self):
"""
Determines how we handle tick labels that would overflow either
the graph div or the domain of the axis. The default value for
inside tick labels is *hide past domain*. In other cases the
default is *hide past div*.
The 'ticklabeloverflow' property is an enumeration that may be specified as:
- One of the following enumeration values:
['allow', 'hide past div', 'hide past domain']
Returns
-------
Any
"""
return self["ticklabeloverflow"]
@ticklabeloverflow.setter
def ticklabeloverflow(self, val):
self["ticklabeloverflow"] = val
# ticklabelposition
# -----------------
@property
def ticklabelposition(self):
"""
Determines where tick labels are drawn.
The 'ticklabelposition' property is an enumeration that may be specified as:
- One of the following enumeration values:
['outside', 'inside', 'outside top', 'inside top',
'outside bottom', 'inside bottom']
Returns
-------
Any
"""
return self["ticklabelposition"]
@ticklabelposition.setter
def ticklabelposition(self, val):
self["ticklabelposition"] = val
# ticklen
# -------
@property
def ticklen(self):
"""
Sets the tick length (in px).
The 'ticklen' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["ticklen"]
@ticklen.setter
def ticklen(self, val):
self["ticklen"] = val
# tickmode
# --------
@property
def tickmode(self):
"""
Sets the tick mode for this axis. If "auto", the number of
ticks is set via `nticks`. If "linear", the placement of the
ticks is determined by a starting position `tick0` and a tick
step `dtick` ("linear" is the default value if `tick0` and
`dtick` are provided). If "array", the placement of the ticks
is set via `tickvals` and the tick text is `ticktext`. ("array"
is the default value if `tickvals` is provided).
The 'tickmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['auto', 'linear', 'array']
Returns
-------
Any
"""
return self["tickmode"]
@tickmode.setter
def tickmode(self, val):
self["tickmode"] = val
# tickprefix
# ----------
@property
def tickprefix(self):
"""
Sets a tick label prefix.
The 'tickprefix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["tickprefix"]
@tickprefix.setter
def tickprefix(self, val):
self["tickprefix"] = val
# ticks
# -----
@property
def ticks(self):
"""
Determines whether ticks are drawn or not. If "", this axis'
ticks are not drawn. If "outside" ("inside"), this axis' are
drawn outside (inside) the axis lines.
The 'ticks' property is an enumeration that may be specified as:
- One of the following enumeration values:
['outside', 'inside', '']
Returns
-------
Any
"""
return self["ticks"]
@ticks.setter
def ticks(self, val):
self["ticks"] = val
# ticksuffix
# ----------
@property
def ticksuffix(self):
"""
Sets a tick label suffix.
The 'ticksuffix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["ticksuffix"]
@ticksuffix.setter
def ticksuffix(self, val):
self["ticksuffix"] = val
# ticktext
# --------
@property
def ticktext(self):
"""
Sets the text displayed at the ticks position via `tickvals`.
Only has an effect if `tickmode` is set to "array". Used with
`tickvals`.
The 'ticktext' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["ticktext"]
@ticktext.setter
def ticktext(self, val):
self["ticktext"] = val
# ticktextsrc
# -----------
@property
def ticktextsrc(self):
"""
Sets the source reference on Chart Studio Cloud for ticktext .
The 'ticktextsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["ticktextsrc"]
@ticktextsrc.setter
def ticktextsrc(self, val):
self["ticktextsrc"] = val
# tickvals
# --------
@property
def tickvals(self):
"""
Sets the values at which ticks on this axis appear. Only has an
effect if `tickmode` is set to "array". Used with `ticktext`.
The 'tickvals' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["tickvals"]
@tickvals.setter
def tickvals(self, val):
self["tickvals"] = val
# tickvalssrc
# -----------
@property
def tickvalssrc(self):
"""
Sets the source reference on Chart Studio Cloud for tickvals .
The 'tickvalssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["tickvalssrc"]
@tickvalssrc.setter
def tickvalssrc(self, val):
self["tickvalssrc"] = val
# tickwidth
# ---------
@property
def tickwidth(self):
"""
Sets the tick width (in px).
The 'tickwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["tickwidth"]
@tickwidth.setter
def tickwidth(self, val):
self["tickwidth"] = val
# title
# -----
@property
def title(self):
"""
The 'title' property is an instance of Title
that may be specified as:
- An instance of :class:`plotly.graph_objs.barpolar.marker.colorbar.Title`
- A dict of string/value properties that will be passed
to the Title constructor
Supported dict properties:
font
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
side
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
text
Sets the title of the color bar. Note that
before the existence of `title.text`, the
title's contents used to be defined as the
`title` attribute itself. This behavior has
been deprecated.
Returns
-------
plotly.graph_objs.barpolar.marker.colorbar.Title
"""
return self["title"]
@title.setter
def title(self, val):
self["title"] = val
# titlefont
# ---------
@property
def titlefont(self):
"""
Deprecated: Please use barpolar.marker.colorbar.title.font
instead. Sets this color bar's title font. Note that the
title's font used to be set by the now deprecated `titlefont`
attribute.
The 'font' property is an instance of Font
that may be specified as:
- An instance of :class:`plotly.graph_objs.barpolar.marker.colorbar.title.Font`
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
"""
return self["titlefont"]
@titlefont.setter
def titlefont(self, val):
self["titlefont"] = val
# titleside
# ---------
@property
def titleside(self):
"""
Deprecated: Please use barpolar.marker.colorbar.title.side
instead. Determines the location of color bar's title with
respect to the color bar. Note that the title's location used
to be set by the now deprecated `titleside` attribute.
The 'side' property is an enumeration that may be specified as:
- One of the following enumeration values:
['right', 'top', 'bottom']
Returns
-------
"""
return self["titleside"]
@titleside.setter
def titleside(self, val):
self["titleside"] = val
# x
# -
@property
def x(self):
"""
Sets the x position of the color bar (in plot fraction).
The 'x' property is a number and may be specified as:
- An int or float in the interval [-2, 3]
Returns
-------
int|float
"""
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
# xanchor
# -------
@property
def xanchor(self):
"""
Sets this color bar's horizontal position anchor. This anchor
binds the `x` position to the "left", "center" or "right" of
the color bar.
The 'xanchor' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'center', 'right']
Returns
-------
Any
"""
return self["xanchor"]
@xanchor.setter
def xanchor(self, val):
self["xanchor"] = val
# xpad
# ----
@property
def xpad(self):
"""
Sets the amount of padding (in px) along the x direction.
The 'xpad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["xpad"]
@xpad.setter
def xpad(self, val):
self["xpad"] = val
# y
# -
@property
def y(self):
"""
Sets the y position of the color bar (in plot fraction).
The 'y' property is a number and may be specified as:
- An int or float in the interval [-2, 3]
Returns
-------
int|float
"""
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
# yanchor
# -------
@property
def yanchor(self):
"""
Sets this color bar's vertical position anchor This anchor
binds the `y` position to the "top", "middle" or "bottom" of
the color bar.
The 'yanchor' property is an enumeration that may be specified as:
- One of the following enumeration values:
['top', 'middle', 'bottom']
Returns
-------
Any
"""
return self["yanchor"]
@yanchor.setter
def yanchor(self, val):
self["yanchor"] = val
# ypad
# ----
@property
def ypad(self):
"""
Sets the amount of padding (in px) along the y direction.
The 'ypad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["ypad"]
@ypad.setter
def ypad(self, val):
self["ypad"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing this
color bar.
dtick
Sets the step in-between ticks on this axis. Use with
`tick0`. Must be a positive number, or special strings
available to "log" and "date" axes. If the axis `type`
is "log", then ticks are set every 10^(n*dtick) where n
is the tick number. For example, to set a tick mark at
1, 10, 100, 1000, ... set dtick to 1. To set tick marks
at 1, 100, 10000, ... set dtick to 2. To set tick marks
at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special
values; "L<f>", where `f` is a positive number, gives
ticks linearly spaced in value (but not position). For
example `tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus
small digits between, use "D1" (all digits) or "D2"
(only 2 and 5). `tick0` is ignored for "D1" and "D2".
If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to 86400000.0.
"date" also has special values "M<n>" gives ticks
spaced by a number of months. `n` must be a positive
integer. To set ticks on the 15th of every third month,
set `tick0` to "2000-01-15" and `dtick` to "M3". To set
ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick exponents.
For example, consider the number 1,000,000,000. If
"none", it appears as 1,000,000,000. If "e", 1e+9. If
"E", 1E+9. If "power", 1x10^9 (with 9 in a super
script). If "SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure excludes
the padding of both ends. That is, the color bar length
is this length minus the padding on both ends.
lenmode
Determines whether this color bar's length (i.e. the
measure in the color variation direction) is set in
units of plot "fraction" or in *pixels. Use `len` to
set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this number.
This only has an effect when `tickformat` is "SI" or
"B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks will be
chosen automatically to be less than or equal to
`nticks`. Has an effect only if `tickmode` is set to
"auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of the
first tick is shown. If "last", only the exponent of
the last tick is shown. If "none", no exponents appear.
showticklabels
Determines whether or not the tick labels are drawn.
showtickprefix
If "all", all tick labels are displayed with a prefix.
If "first", only the first tick is displayed with a
prefix. If "last", only the last tick is displayed with
a suffix. If "none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This measure
excludes the size of the padding, ticks and labels.
thicknessmode
Determines whether this color bar's thickness (i.e. the
measure in the constant color direction) is set in
units of plot "fraction" or in "pixels". Use
`thickness` to set the value.
tick0
Sets the placement of the first tick on this axis. Use
with `dtick`. If the axis `type` is "log", then you
must take the log of your starting tick (e.g. to set
the starting tick to 100, set the `tick0` to 2) except
when `dtick`=*L<f>* (see `dtick` for more info). If the
axis `type` is "date", it should be a date string, like
date data. If the axis `type` is "category", it should
be a number, using the scale where each category is
assigned a serial number from zero in the order it
appears.
tickangle
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the
tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format. And for
dates see: https://github.com/d3/d3-time-
format#locale_format. We add two items to d3's date
formatter: "%h" for half of the year as a decimal
number as well as "%{n}f" for fractional seconds with n
digits. For example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display "09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.barpolar.marker
.colorbar.Tickformatstop` instances or dicts with
compatible properties
tickformatstopdefaults
When used in a template (as layout.template.data.barpol
ar.marker.colorbar.tickformatstopdefaults), sets the
default property values to use for elements of
barpolar.marker.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of the
axis. The default value for inside tick labels is *hide
past domain*. In other cases the default is *hide past
div*.
ticklabelposition
Determines where tick labels are drawn.
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto", the number
of ticks is set via `nticks`. If "linear", the
placement of the ticks is determined by a starting
position `tick0` and a tick step `dtick` ("linear" is
the default value if `tick0` and `dtick` are provided).
If "array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`. ("array" is
the default value if `tickvals` is provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If "", this
axis' ticks are not drawn. If "outside" ("inside"),
this axis' are drawn outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position via
`tickvals`. Only has an effect if `tickmode` is set to
"array". Used with `tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud for
ticktext .
tickvals
Sets the values at which ticks on this axis appear.
Only has an effect if `tickmode` is set to "array".
Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud for
tickvals .
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.barpolar.marker.colorbar.T
itle` instance or dict with compatible properties
titlefont
Deprecated: Please use
barpolar.marker.colorbar.title.font instead. Sets this
color bar's title font. Note that the title's font used
to be set by the now deprecated `titlefont` attribute.
titleside
Deprecated: Please use
barpolar.marker.colorbar.title.side instead. Determines
the location of color bar's title with respect to the
color bar. Note that the title's location used to be
set by the now deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position anchor. This
anchor binds the `x` position to the "left", "center"
or "right" of the color bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor This
anchor binds the `y` position to the "top", "middle" or
"bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
"""
_mapped_properties = {
"titlefont": ("title", "font"),
"titleside": ("title", "side"),
}
def __init__(
self,
arg=None,
bgcolor=None,
bordercolor=None,
borderwidth=None,
dtick=None,
exponentformat=None,
len=None,
lenmode=None,
minexponent=None,
nticks=None,
outlinecolor=None,
outlinewidth=None,
separatethousands=None,
showexponent=None,
showticklabels=None,
showtickprefix=None,
showticksuffix=None,
thickness=None,
thicknessmode=None,
tick0=None,
tickangle=None,
tickcolor=None,
tickfont=None,
tickformat=None,
tickformatstops=None,
tickformatstopdefaults=None,
ticklabeloverflow=None,
ticklabelposition=None,
ticklen=None,
tickmode=None,
tickprefix=None,
ticks=None,
ticksuffix=None,
ticktext=None,
ticktextsrc=None,
tickvals=None,
tickvalssrc=None,
tickwidth=None,
title=None,
titlefont=None,
titleside=None,
x=None,
xanchor=None,
xpad=None,
y=None,
yanchor=None,
ypad=None,
**kwargs
):
"""
Construct a new ColorBar object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.barpolar.marker.ColorBar`
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing this
color bar.
dtick
Sets the step in-between ticks on this axis. Use with
`tick0`. Must be a positive number, or special strings
available to "log" and "date" axes. If the axis `type`
is "log", then ticks are set every 10^(n*dtick) where n
is the tick number. For example, to set a tick mark at
1, 10, 100, 1000, ... set dtick to 1. To set tick marks
at 1, 100, 10000, ... set dtick to 2. To set tick marks
at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special
values; "L<f>", where `f` is a positive number, gives
ticks linearly spaced in value (but not position). For
example `tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus
small digits between, use "D1" (all digits) or "D2"
(only 2 and 5). `tick0` is ignored for "D1" and "D2".
If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to 86400000.0.
"date" also has special values "M<n>" gives ticks
spaced by a number of months. `n` must be a positive
integer. To set ticks on the 15th of every third month,
set `tick0` to "2000-01-15" and `dtick` to "M3". To set
ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick exponents.
For example, consider the number 1,000,000,000. If
"none", it appears as 1,000,000,000. If "e", 1e+9. If
"E", 1E+9. If "power", 1x10^9 (with 9 in a super
script). If "SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure excludes
the padding of both ends. That is, the color bar length
is this length minus the padding on both ends.
lenmode
Determines whether this color bar's length (i.e. the
measure in the color variation direction) is set in
units of plot "fraction" or in *pixels. Use `len` to
set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this number.
This only has an effect when `tickformat` is "SI" or
"B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks will be
chosen automatically to be less than or equal to
`nticks`. Has an effect only if `tickmode` is set to
"auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of the
first tick is shown. If "last", only the exponent of
the last tick is shown. If "none", no exponents appear.
showticklabels
Determines whether or not the tick labels are drawn.
showtickprefix
If "all", all tick labels are displayed with a prefix.
If "first", only the first tick is displayed with a
prefix. If "last", only the last tick is displayed with
a suffix. If "none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This measure
excludes the size of the padding, ticks and labels.
thicknessmode
Determines whether this color bar's thickness (i.e. the
measure in the constant color direction) is set in
units of plot "fraction" or in "pixels". Use
`thickness` to set the value.
tick0
Sets the placement of the first tick on this axis. Use
with `dtick`. If the axis `type` is "log", then you
must take the log of your starting tick (e.g. to set
the starting tick to 100, set the `tick0` to 2) except
when `dtick`=*L<f>* (see `dtick` for more info). If the
axis `type` is "date", it should be a date string, like
date data. If the axis `type` is "category", it should
be a number, using the scale where each category is
assigned a serial number from zero in the order it
appears.
tickangle
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the
tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format. And for
dates see: https://github.com/d3/d3-time-
format#locale_format. We add two items to d3's date
formatter: "%h" for half of the year as a decimal
number as well as "%{n}f" for fractional seconds with n
digits. For example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display "09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.barpolar.marker
.colorbar.Tickformatstop` instances or dicts with
compatible properties
tickformatstopdefaults
When used in a template (as layout.template.data.barpol
ar.marker.colorbar.tickformatstopdefaults), sets the
default property values to use for elements of
barpolar.marker.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of the
axis. The default value for inside tick labels is *hide
past domain*. In other cases the default is *hide past
div*.
ticklabelposition
Determines where tick labels are drawn.
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto", the number
of ticks is set via `nticks`. If "linear", the
placement of the ticks is determined by a starting
position `tick0` and a tick step `dtick` ("linear" is
the default value if `tick0` and `dtick` are provided).
If "array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`. ("array" is
the default value if `tickvals` is provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If "", this
axis' ticks are not drawn. If "outside" ("inside"),
this axis' are drawn outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position via
`tickvals`. Only has an effect if `tickmode` is set to
"array". Used with `tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud for
ticktext .
tickvals
Sets the values at which ticks on this axis appear.
Only has an effect if `tickmode` is set to "array".
Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud for
tickvals .
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.barpolar.marker.colorbar.T
itle` instance or dict with compatible properties
titlefont
Deprecated: Please use
barpolar.marker.colorbar.title.font instead. Sets this
color bar's title font. Note that the title's font used
to be set by the now deprecated `titlefont` attribute.
titleside
Deprecated: Please use
barpolar.marker.colorbar.title.side instead. Determines
the location of color bar's title with respect to the
color bar. Note that the title's location used to be
set by the now deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position anchor. This
anchor binds the `x` position to the "left", "center"
or "right" of the color bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor This
anchor binds the `y` position to the "top", "middle" or
"bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
Returns
-------
ColorBar
"""
super(ColorBar, self).__init__("colorbar")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.barpolar.marker.ColorBar
constructor must be a dict or
an instance of :class:`plotly.graph_objs.barpolar.marker.ColorBar`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("bgcolor", None)
_v = bgcolor if bgcolor is not None else _v
if _v is not None:
self["bgcolor"] = _v
_v = arg.pop("bordercolor", None)
_v = bordercolor if bordercolor is not None else _v
if _v is not None:
self["bordercolor"] = _v
_v = arg.pop("borderwidth", None)
_v = borderwidth if borderwidth is not None else _v
if _v is not None:
self["borderwidth"] = _v
_v = arg.pop("dtick", None)
_v = dtick if dtick is not None else _v
if _v is not None:
self["dtick"] = _v
_v = arg.pop("exponentformat", None)
_v = exponentformat if exponentformat is not None else _v
if _v is not None:
self["exponentformat"] = _v
_v = arg.pop("len", None)
_v = len if len is not None else _v
if _v is not None:
self["len"] = _v
_v = arg.pop("lenmode", None)
_v = lenmode if lenmode is not None else _v
if _v is not None:
self["lenmode"] = _v
_v = arg.pop("minexponent", None)
_v = minexponent if minexponent is not None else _v
if _v is not None:
self["minexponent"] = _v
_v = arg.pop("nticks", None)
_v = nticks if nticks is not None else _v
if _v is not None:
self["nticks"] = _v
_v = arg.pop("outlinecolor", None)
_v = outlinecolor if outlinecolor is not None else _v
if _v is not None:
self["outlinecolor"] = _v
_v = arg.pop("outlinewidth", None)
_v = outlinewidth if outlinewidth is not None else _v
if _v is not None:
self["outlinewidth"] = _v
_v = arg.pop("separatethousands", None)
_v = separatethousands if separatethousands is not None else _v
if _v is not None:
self["separatethousands"] = _v
_v = arg.pop("showexponent", None)
_v = showexponent if showexponent is not None else _v
if _v is not None:
self["showexponent"] = _v
_v = arg.pop("showticklabels", None)
_v = showticklabels if showticklabels is not None else _v
if _v is not None:
self["showticklabels"] = _v
_v = arg.pop("showtickprefix", None)
_v = showtickprefix if showtickprefix is not None else _v
if _v is not None:
self["showtickprefix"] = _v
_v = arg.pop("showticksuffix", None)
_v = showticksuffix if showticksuffix is not None else _v
if _v is not None:
self["showticksuffix"] = _v
_v = arg.pop("thickness", None)
_v = thickness if thickness is not None else _v
if _v is not None:
self["thickness"] = _v
_v = arg.pop("thicknessmode", None)
_v = thicknessmode if thicknessmode is not None else _v
if _v is not None:
self["thicknessmode"] = _v
_v = arg.pop("tick0", None)
_v = tick0 if tick0 is not None else _v
if _v is not None:
self["tick0"] = _v
_v = arg.pop("tickangle", None)
_v = tickangle if tickangle is not None else _v
if _v is not None:
self["tickangle"] = _v
_v = arg.pop("tickcolor", None)
_v = tickcolor if tickcolor is not None else _v
if _v is not None:
self["tickcolor"] = _v
_v = arg.pop("tickfont", None)
_v = tickfont if tickfont is not None else _v
if _v is not None:
self["tickfont"] = _v
_v = arg.pop("tickformat", None)
_v = tickformat if tickformat is not None else _v
if _v is not None:
self["tickformat"] = _v
_v = arg.pop("tickformatstops", None)
_v = tickformatstops if tickformatstops is not None else _v
if _v is not None:
self["tickformatstops"] = _v
_v = arg.pop("tickformatstopdefaults", None)
_v = tickformatstopdefaults if tickformatstopdefaults is not None else _v
if _v is not None:
self["tickformatstopdefaults"] = _v
_v = arg.pop("ticklabeloverflow", None)
_v = ticklabeloverflow if ticklabeloverflow is not None else _v
if _v is not None:
self["ticklabeloverflow"] = _v
_v = arg.pop("ticklabelposition", None)
_v = ticklabelposition if ticklabelposition is not None else _v
if _v is not None:
self["ticklabelposition"] = _v
_v = arg.pop("ticklen", None)
_v = ticklen if ticklen is not None else _v
if _v is not None:
self["ticklen"] = _v
_v = arg.pop("tickmode", None)
_v = tickmode if tickmode is not None else _v
if _v is not None:
self["tickmode"] = _v
_v = arg.pop("tickprefix", None)
_v = tickprefix if tickprefix is not None else _v
if _v is not None:
self["tickprefix"] = _v
_v = arg.pop("ticks", None)
_v = ticks if ticks is not None else _v
if _v is not None:
self["ticks"] = _v
_v = arg.pop("ticksuffix", None)
_v = ticksuffix if ticksuffix is not None else _v
if _v is not None:
self["ticksuffix"] = _v
_v = arg.pop("ticktext", None)
_v = ticktext if ticktext is not None else _v
if _v is not None:
self["ticktext"] = _v
_v = arg.pop("ticktextsrc", None)
_v = ticktextsrc if ticktextsrc is not None else _v
if _v is not None:
self["ticktextsrc"] = _v
_v = arg.pop("tickvals", None)
_v = tickvals if tickvals is not None else _v
if _v is not None:
self["tickvals"] = _v
_v = arg.pop("tickvalssrc", None)
_v = tickvalssrc if tickvalssrc is not None else _v
if _v is not None:
self["tickvalssrc"] = _v
_v = arg.pop("tickwidth", None)
_v = tickwidth if tickwidth is not None else _v
if _v is not None:
self["tickwidth"] = _v
_v = arg.pop("title", None)
_v = title if title is not None else _v
if _v is not None:
self["title"] = _v
_v = arg.pop("titlefont", None)
_v = titlefont if titlefont is not None else _v
if _v is not None:
self["titlefont"] = _v
_v = arg.pop("titleside", None)
_v = titleside if titleside is not None else _v
if _v is not None:
self["titleside"] = _v
_v = arg.pop("x", None)
_v = x if x is not None else _v
if _v is not None:
self["x"] = _v
_v = arg.pop("xanchor", None)
_v = xanchor if xanchor is not None else _v
if _v is not None:
self["xanchor"] = _v
_v = arg.pop("xpad", None)
_v = xpad if xpad is not None else _v
if _v is not None:
self["xpad"] = _v
_v = arg.pop("y", None)
_v = y if y is not None else _v
if _v is not None:
self["y"] = _v
_v = arg.pop("yanchor", None)
_v = yanchor if yanchor is not None else _v
if _v is not None:
self["yanchor"] = _v
_v = arg.pop("ypad", None)
_v = ypad if ypad is not None else _v
if _v is not None:
self["ypad"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 35.812378
| 101
| 0.559378
|
9e50304d7b012f2b992a20b2ce2dcbebf500c1a5
| 55,755
|
py
|
Python
|
Multiple_Tic_Tac.py
|
jberkow713/Multiple-Tic-Tac-Toe
|
f5dd3630adfd038f472f2752a262b0c276760985
|
[
"MIT"
] | null | null | null |
Multiple_Tic_Tac.py
|
jberkow713/Multiple-Tic-Tac-Toe
|
f5dd3630adfd038f472f2752a262b0c276760985
|
[
"MIT"
] | null | null | null |
Multiple_Tic_Tac.py
|
jberkow713/Multiple-Tic-Tac-Toe
|
f5dd3630adfd038f472f2752a262b0c276760985
|
[
"MIT"
] | null | null | null |
import turtle
import os
import math
import random
from copy import deepcopy
import numpy as np
global Move_list
Move_list = []
# Use https://trinket.io/features/pygame to convert this to an online embedded game, when the
# Website is up
#Webflow
#Setting up initial gameplay variables
# sc = turtle.Screen()
# sc.setup(600, 600)
# #TODO ---make so if player hits square that is already hit, it will prompt to hit new square, and not give error
# Variable8= ""
# Choice3 = False
# while Choice3 == False:
# list_words = ["yes", "no"]
# Variable8 = input("Play vs computer? Enter yes, or no: \n")
# # Variable8 = turtle.textinput("Play vs computer?", "Enter yes, or no")
# if Variable8 in list_words:
# Choice3 = True
# Variable8 = Variable8
# if Variable8 == "no":
# Variable1 = 580
# Var2 = 0
# Choice = False
# while Choice == False:
# list_nums = str([i for i in range(3,21)])
# # Variable2 = turtle.textinput("Dimensions of Board ", "Enter Rows from 3-20 inclusive")
# Variable2 = input("Dimensions of Board? Enter Rows from 3-20 inclusive: \n")
# # Variable2 = (input("Please enter how many rows and columns you wish the board to have. Input needs to be an integer between 3 and 25, inclusive: \n"))
# #Force user input to be in list of values, while not breaking program if they type a string
# if Variable2 != '':
# if Variable2 in list_nums:
# Var2 = int(Variable2)
# Choice = True
# Variable2 = Var2
# Variable3 = int(Variable2) ** 2
# Choice2 = False
# while Choice2 == False:
# if Var2 == 3:
# list_nums2 = str([3])
# # print("I am a 3")
# elif Var2 == 4:
# list_nums2 = str([4])
# elif Var2 == 5:
# list_nums2 = str([5])
# # print("I am a 5")
# elif Var2 == 6:
# list_nums2 = str([6])
# elif Var2 == 7:
# list_nums2 = str([5, 6, 7])
# elif Var2 > 7:
# list_nums2 = str([i for i in range(3, (Var2 + 1))])
# # Variable4 = turtle.textinput("Required number of tiles to win ", "Enter consecutive squares needed to win")
# choice3 = False
# while choice3 == False:
# if Variable2 <=6:
# print(f"You must choose {Variable2} tiles needed to win")
# elif Variable2 == 7:
# print(f"You can choose 5,6, or 7 tiles needed to win")
# else:
# print(f"You must choose between {math.ceil(Variable2/2)} and {Variable2} tiles needed to win, inclusive.")
# Variable4 = input("Required number of tiles to win ? Enter consecutive squares needed to win: \n")
# list_nums2 = str([i for i in range(3,21)])
# if Variable4 != '':
# if Variable4 in list_nums2:
# Variable4 = int(Variable4)
# choice3 = True
# Variable4 = int(Variable4)
# #Force user input to be in list of values, while not breaking program if they type a string
# if Var2 > 7:
# if Variable4 <= Var2 and Variable4 > (.5* Var2):
# Choice2 = True
# if Var2 <=6:
# if Variable4 == Var2:
# Choice2 = True
# if Var2 == 7:
# if Variable4 >=5 and Variable4 <=7:
# Choice2 = True
# if Variable8 == "yes":
# Variable1 = 580
# Var2 = 0
# Choice = False
# while Choice == False:
# list_nums = str([i for i in range(3,21,2)])
# # Variable2 = turtle.textinput("Dimensions of Board ", "Enter Rows from 3-20 inclusive, Odd #s Only")
# Variable2 = input("Dimensions of Board ? Enter Rows from 3-20 inclusive, Odd #s Only: \n")
# # Variable2 = (input("Please enter how many rows and columns you wish the board to have. Input needs to be an integer between 3 and 25, inclusive: \n"))
# #Force user input to be in list of values, while not breaking program if they type a string
# if Variable2 != '':
# if Variable2 in list_nums:
# Var2 = int(Variable2)
# Choice = True
# Variable2 = Var2
# Variable3 = int(Variable2) ** 2
# #if Variable4 <= Variable2 and Variable4 > (.5* Variable2):
# Choice2 = False
# while Choice2 == False:
# if Var2 == 3:
# list_nums2 = str([3])
# # print("I am a 3")
# elif Var2 == 4:
# list_nums2 = str([4])
# elif Var2 == 5:
# list_nums2 = str([5])
# # print("I am a 5")
# elif Var2 == 6:
# list_nums2 = str([6])
# elif Var2 == 7:
# list_nums2 = str([5, 6, 7])
# elif Var2 > 7:
# list_nums2 = str([i for i in range(3, (Var2 + 1))])
# # Variable4 = turtle.textinput("Required number of tiles to win ", "Enter consecutive squares needed to win")
# choice3 = False
# while choice3 == False:
# if Variable2 <=6:
# print(f"You must choose {Variable2} tiles needed to win")
# elif Variable2 == 7:
# print(f"You can choose 5,6, or 7 tiles needed to win")
# else:
# print(f"You must choose between {math.ceil(Variable2/2)} and {Variable2} tiles needed to win, inclusive.")
# Variable4 = input("Required number of tiles to win ? Enter consecutive squares needed to win: \n")
# list_nums = str([i for i in range(3,21)])
# if Variable4 != '':
# if Variable4 in list_nums2:
# Variable4 = int(Variable4)
# choice3 = True
# Variable4 = int(Variable4)
# #Force user input to be in list of values, while not breaking program if they type a string
# if Var2 > 7:
# if Variable4 <= Var2 and Variable4 > (.5* Var2):
# Choice2 = True
# if Var2 <=6:
# if Variable4 == Var2:
# Choice2 = True
# if Var2 == 7:
# if Variable4 >=5 and Variable4 <=7:
# Choice2 = True
# Variable10a= ""
# Choice4 = False
# while Choice4 == False:
# list_words = ["yes", "no"]
# Variable10 = input("Play game till end? Enter yes, or no: \n")
# # Variable10 = turtle.textinput("Play game till end?", "Enter yes, or no")
# if Variable10 in list_words:
# Choice4 = True
# Variable10a = Variable10
# computer = turtle.Turtle()
# computer.color("blue")
# computer.shape("square")
# computer.penup()
# computer.speed(0)
# computer.setposition(0, 0)
# computer.hideturtle()
def Create_Player_Custom_Commands(Boardsize, Squares):
'''
Setting up custom size movement and x,o drawings to implement player versus computer matches
Input: Boardsize, Squares in total on board
Output: Custom player controls for the specific board
'''
global is_done_with_player_move
is_done_with_player_move = False
Square_Length = round((Boardsize / np.sqrt(Squares)))
#speed is used for how far player moves with keystroke
speed = Square_Length
player = turtle.Turtle()
player.color("red")
player.shape("triangle")
player.penup()
player.speed(0)
Starting_pos_x = -(Boardsize/2) + .5*(Square_Length) + (((np.sqrt(Squares)-1)/2) * Square_Length)
Starting_pos_y = (Boardsize/2) - .5*(Square_Length) - (((np.sqrt(Squares)-1)/2) * Square_Length)
player.setposition(Starting_pos_x , Starting_pos_y )
player.setheading(90)
movement = (Boardsize/Squares)*1.5
def move_left():
x = player.xcor()
x -= speed
if x < -(Boardsize/2)+ .5*(Square_Length):
x = -(Boardsize/2)+ .5*(Square_Length)
player.setx(x)
player.setpos(x, player.ycor())
def move_right():
x = player.xcor()
x += speed
if x > (Boardsize/2) - .5*(Square_Length):
x = (Boardsize/2) - .5*(Square_Length)
player.setx(x)
player.setpos(x, player.ycor())
def move_up():
y = player.ycor()
y += speed
if y > (Boardsize/2) - .5*(Square_Length):
y = (Boardsize/2) - .5*(Square_Length)
player.sety(y)
player.setpos(player.xcor(), y)
def move_down():
y = player.ycor()
y -= speed
if y < -(Boardsize/2)+ .5*(Square_Length):
y = -(Boardsize/2)+ .5*(Square_Length)
player.sety(y)
player.setpos(player.xcor(), y)
def draw_circle():
turtle.pensize(2.5)
a = player.xcor()
b = player.ycor()
coord_value = [a, b]
turtle.hideturtle()
turtle.penup()
turtle.setpos(a, (b-movement))
turtle.pendown()
turtle.circle(movement)
turtle.hideturtle()
def draw_x():
turtle.pensize(2.5)
a = player.xcor()
b = player.ycor()
turtle.hideturtle()
turtle.penup()
coord_value = [a, b]
turtle.setposition(a-movement,b+movement)
turtle.pendown()
turtle.setposition(a+movement,b-movement)
turtle.penup()
turtle.setposition(a-movement,b-movement)
turtle.pendown()
turtle.setposition(a+movement,b+ movement)
# print(coord_value)
global Player_COORD
Player_COORD = coord_value
is_done_with_player_move = True
turtle.listen()
turtle.onkey(move_left, "Left")
turtle.onkey(move_right, "Right")
turtle.onkey(move_up, "Up")
turtle.onkey(move_down, "Down")
# turtle.onkey(draw_circle, "o")
turtle.onkey(draw_x, "x")
def Create_Board(Boardsize, Squares, Screen_Color, Screen_Title, Line_Color, Line_Size):
'''
This function creates the board based on custom boardsize, squares, etc
'''
Total_Horizontal_Lines = (math.sqrt(Squares)-1)
Total_Vertical_Lines = (math.sqrt(Squares)-1)
Total_Vertical_Distance = Boardsize
Total_Horizontal_Distance = Boardsize
Distance_in_Between_Lines = (Boardsize / math.sqrt(Squares))
#Have to implement Coordinate system
First_Vertical_Line_X_Coords = -(Boardsize/2) + Distance_in_Between_Lines
First_Vertical_Line_Y_Coords = (Boardsize/2)
First_Horizontal_Line_X_Coords = -(Boardsize/2)
First_Horizontal_Line_Y_Coords = (Boardsize/2) - Distance_in_Between_Lines
screen = turtle.Screen()
screen.screensize(Boardsize,Boardsize)
screen.bgcolor(Screen_Color)
screen.title(Screen_Title)
Remaining_lines = Total_Vertical_Lines
Current_X = First_Vertical_Line_X_Coords
Current_Y = First_Vertical_Line_Y_Coords
while Remaining_lines > 0:
border_pen = turtle.Turtle()
border_pen.speed(0)
border_pen.color(Line_Color)
border_pen.penup()
border_pen.setposition(Current_X, Current_Y)
border_pen.pendown()
border_pen.pensize(Line_Size)
border_pen.setheading(270)
border_pen.fd(Boardsize)
border_pen.hideturtle
Current_X += Distance_in_Between_Lines
Remaining_lines -=1
Remaining_lines = Total_Horizontal_Lines
Current_X = First_Horizontal_Line_X_Coords
Current_Y = First_Horizontal_Line_Y_Coords
while Remaining_lines > 0:
border_pen = turtle.Turtle()
border_pen.speed(0)
border_pen.color(Line_Color)
border_pen.penup()
border_pen.setposition(Current_X, Current_Y)
border_pen.pendown()
border_pen.pensize(Line_Size)
border_pen.setheading(0)
border_pen.fd(Boardsize)
border_pen.hideturtle()
Current_Y -= Distance_in_Between_Lines
Remaining_lines -=1
def create_key_dict_and_coords(Boardsize, Squares):
'''
This function takes Boardsize, number of Squares, creates a list of each Square as a key,
and the key's value corresponds to an [X, Y, coordinate] list, returns a dictionary
'''
Name_of_Spots = list(range(0, Squares))
Square_Length = round((Boardsize / np.sqrt(Squares)))
Mid_Square_Length = (Square_Length / 2)
Max_X_Coordinate = (Boardsize/2) - Mid_Square_Length
Min_Y_Coordinate = -(Boardsize/2) + Mid_Square_Length
Coordinate_list = []
Len_Coordinate_list = len(Name_of_Spots)
Starting_X_Coordinate = -(Boardsize/2) + Mid_Square_Length
Starting_Y_Coordinate = (Boardsize/2) - Mid_Square_Length
X_coord = (Starting_X_Coordinate)
Y_coord = (Starting_Y_Coordinate)
while Len_Coordinate_list > 0:
Individ_coord = []
Individ_coord.append(X_coord)
Individ_coord.append(Y_coord)
Len_Coordinate_list -=1
Coordinate_list.append(Individ_coord)
if Len_Coordinate_list % int(math.sqrt(Squares)) == 0:
X_coord = Starting_X_Coordinate
Y_coord -= Square_Length
elif Len_Coordinate_list % int(math.sqrt(Squares)) != 0:
X_coord += Square_Length
Key_Dict = dict(zip(Name_of_Spots, Coordinate_list))
# print(Key_Dict)
return Key_Dict
def create_remaining_dict(Squares, Squares_to_win):
'''
Takes in total squares on the board and squares needed to win
Returns a dictionary of all potential winning lines as keys, and how many consecutive squares
are needed to win as their values
'''
Matrix = []
list_size = int(np.sqrt(Squares))
starting = 0
ending = int(np.sqrt(Squares))
len_matrix = Squares
while len_matrix > 0:
list_to_add = []
for i in range(starting, ending):
list_to_add.append(i)
Matrix.append(list_to_add)
starting += list_size
ending += list_size
len_matrix -= list_size
#Winning_Lines represents all possible winning lines in the grid
Winning_Lines = []
Horizontal_lines_in_Matrix = int(math.sqrt(Squares))
Total_Squares = 0
smaller_list = []
Row_index = 0
Column_Idx = 0
Total_Rows = Horizontal_lines_in_Matrix
Total_Squares_per_iteration = int(Squares_to_win)
Iterations_per_row = int(math.sqrt(Squares)-Squares_to_win)+1
Starting_Column = 0
Starting_Row = 0
while Horizontal_lines_in_Matrix > 0:
while Total_Squares < (Total_Squares_per_iteration * Iterations_per_row * Horizontal_lines_in_Matrix):
smaller_list.append(Matrix[Row_index][Column_Idx])
Column_Idx +=1
Total_Squares +=1
#When we come back to this loop, we start off where we last were, only 1 row lower
if Total_Squares == (Total_Squares_per_iteration * Iterations_per_row) :
a = smaller_list[:]
# print(a)
Winning_Lines.append(a)
smaller_list.clear()
Starting_Row +=1
Row_index = Starting_Row
Total_Squares = 0
Horizontal_lines_in_Matrix -=1
Starting_Column = 0
Column_Idx = Starting_Column
break
if Total_Squares % Total_Squares_per_iteration == 0:
b = smaller_list[:]
Winning_Lines.append(b)
smaller_list.clear()
Starting_Column +=1
Column_Idx = Starting_Column
break
Vertical_lines_in_Matrix = math.sqrt(Squares)
Total_Squares = 0
smaller_list = []
Row_index = 0
Column_Idx = 0
Total_Columns = Vertical_lines_in_Matrix
Total_Squares_per_iteration = int(Squares_to_win)
Iterations_per_row = int(math.sqrt(Squares)-Squares_to_win)+1
Starting_Column = 0
Starting_Row = 0
while Vertical_lines_in_Matrix > 0:
while Total_Squares < (Total_Squares_per_iteration * Iterations_per_row * Vertical_lines_in_Matrix):
smaller_list.append(Matrix[Row_index][Column_Idx])
Row_index +=1
Total_Squares +=1
if Total_Squares == (Total_Squares_per_iteration * Iterations_per_row) :
a = smaller_list[:]
Winning_Lines.append(a)
smaller_list.clear()
Starting_Column +=1
Column_Idx = Starting_Column
Total_Squares = 0
Vertical_lines_in_Matrix -=1
Starting_Row = 0
Row_index = Starting_Row
break
if Total_Squares % Total_Squares_per_iteration == 0:
b = smaller_list[:]
Winning_Lines.append(b)
smaller_list.clear()
Starting_Row +=1
Row_index = Starting_Row
break
Rows_to_iterate_using_Diagonals = int(1 + (math.sqrt(Squares)-Squares_to_win))
Iterations_per_row_using_Diagonals = 1 + (math.sqrt(Squares)-Squares_to_win)
Total_Squares_per_iteration = int(Squares_to_win)
Total_Rows = int(Rows_to_iterate_using_Diagonals)
smaller_list = []
Total_Squares = 0
Row_index = 0
Column_Idx = 0
Starting_Column = 0
Starting_Row = 0
while Rows_to_iterate_using_Diagonals > 0:
while Total_Squares < (Total_Squares_per_iteration * Iterations_per_row_using_Diagonals* Rows_to_iterate_using_Diagonals):
smaller_list.append(Matrix[Row_index][Column_Idx])
Row_index +=1
Column_Idx +=1
Total_Squares +=1
if Total_Squares == (Total_Squares_per_iteration * Iterations_per_row_using_Diagonals) :
a = smaller_list[:]
Winning_Lines.append(a)
smaller_list.clear()
Starting_Row +=1
Row_index = Starting_Row
Starting_Column = 0
Column_Idx = Starting_Column
Total_Squares = 0
Rows_to_iterate_using_Diagonals -=1
break
if Total_Squares % Total_Squares_per_iteration == 0:
b = smaller_list[:]
Winning_Lines.append(b)
smaller_list.clear()
Starting_Column +=1
Column_Idx = Starting_Column
Row_index = Starting_Row
break
Rows_to_iterate_using_Diagonals = int(1 + (math.sqrt(Squares)-Squares_to_win))
Iterations_per_row_using_Diagonals = 1 + (math.sqrt(Squares)-Squares_to_win)
Total_Squares_per_iteration = int(Squares_to_win)
Total_Rows = int(Rows_to_iterate_using_Diagonals)
smaller_list = []
Total_Squares = 0
Row_index = int(math.sqrt(Squares)-1)
Column_Idx = 0
Starting_Column = 0
Starting_Row = int(math.sqrt(Squares)-1)
while Rows_to_iterate_using_Diagonals > 0:
while Total_Squares < (Total_Squares_per_iteration * Iterations_per_row_using_Diagonals* Rows_to_iterate_using_Diagonals):
smaller_list.append(Matrix[Row_index][Column_Idx])
Row_index -=1
Column_Idx +=1
Total_Squares +=1
if Total_Squares == (Total_Squares_per_iteration * Iterations_per_row_using_Diagonals) :
a = smaller_list[:]
Winning_Lines.append(a)
smaller_list.clear()
Starting_Row -=1
Row_index = Starting_Row
Starting_Column = 0
Column_Idx = Starting_Column
Total_Squares = 0
Rows_to_iterate_using_Diagonals -=1
break
if Total_Squares % Total_Squares_per_iteration == 0:
b = smaller_list[:]
Winning_Lines.append(b)
smaller_list.clear()
Starting_Column +=1
Column_Idx = Starting_Column
Row_index = Starting_Row
break
Count_Lists = [Squares_to_win] * len(Winning_Lines)
Winning_Lines_Tuples = [tuple(x) for x in Winning_Lines]
Remaining_dict = dict(zip(Winning_Lines_Tuples, Count_Lists))
return Remaining_dict
def create_Dictionary_List(Squares, Squares_to_win, Number_of_Computer_Players):
#We are going to create a Dictionary list for all players, and an opponent will basically reference all other keys in
# the dictionary that do not have his name as the key, this will then be the opponent dictionary, and he will access
#his name as key, for his Own dictionary
Individual_Dict = create_remaining_dict(Squares**2, Squares_to_win)
Computer_player_count = Number_of_Computer_Players
Computer_Names = ['red square', 'blue square', 'green square', 'red circle', 'blue circle', 'green circle']
comp_list = []
for i in range (0,Computer_player_count):
x = Computer_Names[i]
comp_list.append(x)
#This represents each player and their corresponding dictionary
List = []
Nested_dict_list = []
for i in comp_list:
List.append(Individual_Dict)
Nested_dict_list = dict(zip(comp_list, List))
return Nested_dict_list
#Have to add this to a list
def neighbors(Matrix, row, column):
'''
Helper function for the adjacency function. Finds all neighbors of each spot on the board,
for a given spot. Sorts their values and returns the list of neighbors.
'''
Neighbors = []
len_matrix = len(Matrix)-1
if column < len_matrix:
e = Matrix[row][column+1]
Neighbors.append(e)
if column < len_matrix and row > 0:
c = Matrix[row-1][column+1]
Neighbors.append(c)
if column > 0:
d = Matrix[row][column-1]
Neighbors.append(d)
if column > 0 and row > 0:
a = Matrix[row-1][column-1]
Neighbors.append(a)
if row > 0:
b = Matrix[row-1][column]
Neighbors.append(b)
if row < len_matrix:
g = Matrix[row+1][column]
Neighbors.append(g)
if row < len_matrix and column < len_matrix:
h = Matrix[row+1][column+1]
Neighbors.append(h)
if row < len_matrix and column > 0:
f = Matrix[row+1][column-1]
Neighbors.append(f)
Neighbors.sort()
return Neighbors
def Adjacency_Dict(Squares):
'''
Returns dictionary of individual squares as keys, and their adjacent spots on the board
as values.
'''
#Create the Matrix from the Squares
Matrix = []
list_size = int(np.sqrt(Squares))
starting = 0
ending = int(np.sqrt(Squares))
len_matrix = Squares
while len_matrix > 0:
list_to_add = []
for i in range(starting, ending):
list_to_add.append(i)
Matrix.append(list_to_add)
starting += list_size
ending += list_size
len_matrix -= list_size
a = int(len(Matrix))
x= 0
y= 0
Adjacency_list = []
while a > 0:
output = neighbors(Matrix,x,y)
Adjacency_list.append(output)
y+=1
if y == len(Matrix):
y = 0
x+=1
a-=1
Name_of_Squares = list(range(0, Squares))
Adjacency_dict = dict(zip(Name_of_Squares, Adjacency_list))
return(Adjacency_dict)
def comp_draw_customized_x(boardsize, squares):
'''
Creates customized shape drawing of x for the computer based on the boardsize and squares
on the board.
'''
turtle.pensize(2.5)
turtle.color("blue")
a = computer.xcor()
b = computer.ycor()
turtle.hideturtle()
turtle.penup()
coord_value = [a, b]
movement = (boardsize/squares)*1.5
turtle.setposition(a-movement,b+movement)
turtle.pendown()
turtle.setposition(a+movement,b-movement)
turtle.penup()
turtle.setposition(a-movement,b-movement)
turtle.pendown()
turtle.setposition(a+movement,b+ movement)
return coord_value
def computer_draw_customized_circle(boardsize, squares):
'''
Creates customized shape drawing of circle for the computer based on the boardsize and squares
on the board.
'''
movement = (boardsize/squares)*1.5
turtle.pensize(2.5)
turtle.color("red")
a = computer.xcor()
b = computer.ycor()
coord_value = [a, b]
turtle.hideturtle()
turtle.penup()
turtle.setpos(a, (b-movement))
turtle.pendown()
turtle.circle(movement)
turtle.hideturtle()
return coord_value
def create_updated_dictionary(Remaining_dict, Squares_to_win):
'''
Creates an updated dictionary
'''
X_list = ["winning_lines", "opponent_winning_lines", "sum_of_remaining_lines", "Can_increase_winning_lines", "Can_lower_opponent_lines"]
X_list2 = [0, 0, (len(Remaining_dict) * Squares_to_win), True, True]
Updated_Dict = dict(zip(X_list, X_list2))
return Updated_Dict
def key_name(dictionary, coordinate):
'''
Finds particular key based on the given coordinate.
'''
for key, value in dictionary.items():
if value == coordinate:
position = key
return position
def decrease_values(dictionary, key_name, updated_dictionary):
'''
Takes in a key, and reduces all winning line values that contain that key.
'''
list_of_keys = []
for key, value in dictionary.items():
for keys in key:
if key_name == keys:
list_of_keys.append(key)
for winning_line in list_of_keys:
for key in dictionary.keys():
if winning_line == key:
dictionary[winning_line] = dictionary[winning_line]-1
Count = 0
for values in dictionary.values():
Count +=values
updated_dictionary["sum_of_remaining_lines"] = Count
Values = []
for value in dictionary.values():
Values.append(value)
if min(Values) == 0:
return 0
else:
return dictionary
def decrease_values_multiple_players(dictionary, key_name):
'''
Takes in a key and reduces all winning lines in multiple player game
'''
for value in dictionary.values():
for key_lists in value.keys():
if key_name in key_lists:
value[key_lists] = value[key_lists]-1
Values = []
for value in dictionary.values():
for valuez in value.values():
Values.append(valuez)
if min(Values) == 0:
return 0
else:
return dictionary
def remove_dict(Key_Dictionary, coordinate):
'''
Removes the key from the dictionary if the coordinate corresponds to the key.
'''
for key,value in Key_Dictionary.items():
if value == coordinate:
storedvalue = key
Key_Dictionary.pop(storedvalue)
return Key_Dictionary
# move_list = []
# for key,value in Key_Dictionary.items():
# if value == coordinate:
# move_list.append(key)
# We need to define a way to reference all opponent dictionaries, we will reference the key dictionary and the pieces removed
#from the key dictionary will go into the list of moves, which will be one general list
#So all dictionaries will be referenced through a for loop, so we need a way to get all opponent dictionaries into one, we
#Can nest them
def Divide_Dictionary(Dictionary, Player):
Player_Name = []
Player_Values = []
Opponent_Names = []
Opponent_Values = []
for k,v in Dictionary.items():
if k == Player:
Player_Name.append(k)
Player_Values.append(v)
elif k != Player:
Opponent_Names.append(k)
Opponent_Values.append(v)
Player_Dict = dict(zip(Player_Name, Player_Values))
Opponent_Dict = dict(zip(Opponent_Names, Opponent_Values))
return Player_Dict, Opponent_Dict
def Multiple_Terminator_Move(Player_Piece, Dictionary_List, Key_Dictionary, Starting_count, \
Adjacency_Dict, List_of_Moves):
'''
This is the brain of the game. It is the algorithm behind the computer's moves and logic.
This is meant for multiple AI players
'''
Player_Dict = Divide_Dictionary(Dictionary_List, Player_Piece)[0]
Opponent_Dict = Divide_Dictionary(Dictionary_List, Player_Piece)[1]
for v in Player_Dict.values():
Your_Dictionary = v
Opponent_Dictionary = []
for v in Opponent_Dict.values():
Opponent_Dictionary.append(v)
#Your Dictionary = Dictionary with Winning_Lines as keys, and counts as values
#Opponent Dictionary = List of Dictionaries, in Each Dictionary, Winning Lines as keys, counts as values
#Remaining Keys represents possible spots to move to in any given move
Remaining_Keys = []
for key in Key_Dictionary.keys():
Remaining_Keys.append(key)
# puts blocking keys in Keys_to_Remove against multiple opponents
Keys_to_Remove = []
for Dictionary in Opponent_Dictionary:
for winning_line, count in Dictionary.items():
for Line, Count in Your_Dictionary.items():
if winning_line == Line:
if count == 1 and Count == Starting_count:
for keys in Line:
if keys in Remaining_Keys:
Keys_to_Remove.append(keys)
#Setting up winning move if possible against multiple opponents:
Keys_to_win = []
for Dictionary in Opponent_Dictionary:
for winning_line, count in Dictionary.items():
for Line, Count in Your_Dictionary.items():
if winning_line == Line:
if Count == 1 and count == Starting_count:
for keys in Line:
if keys in Remaining_Keys:
Keys_to_win.append(keys)
#Check if you can win
if len(Keys_to_win)> 0:
for position, coord in Key_Dictionary.items():
if Keys_to_win[0] == position:
coordinates = coord
computer.setpos(coordinates[0],coordinates[1])
List_of_Moves.append(Keys_to_win[0])
return
#Check if you MUST block winning move of opponent
if len(Keys_to_Remove)> 0:
for position, coord in Key_Dictionary.items():
if Keys_to_Remove[0] == position:
coordinates = coord
computer.setpos(coordinates[0],coordinates[1])
List_of_Moves.append(Keys_to_Remove[0])
return
Keys_Remaining = len(Remaining_Keys)
Winning_Line_Count = []
Count = 0
index = 0
List_of_Opponent_Moves = []
List_of_your_moves = []
Opp_Adjacency_list = []
Your_Adjacency_list = []
while Keys_Remaining > 0:
key = Remaining_Keys[index]
#value winning lines highly to stop opponent, value count highly, super highly in the beginning, this is only
#to block winning lines
for Winning_lines, valuess in Your_Dictionary.items():
for Dictionary in Opponent_Dictionary:
for Winning_linez, valuezz in Dictionary.items():
if Winning_lines == Winning_linez:
if valuezz < Starting_count and valuess == Starting_count:
if key in Winning_lines:
List_of_Opponent_Moves.append(key)
# if key not in Opp_winning_lines:
# Opp_winning_lines.append(key)
if valuess - valuezz <= .5 * Starting_count:
#Make incentive on smaller board to block quicker, as one slip up early can cost game
#Notice it is to the 8th power here, again bigger than continuing winning line, which is
# only raised to the 6th power
if Starting_count <= 6:
Count += (valuess - valuezz)**8
#Disincentivize blocking early on big boards, no need, better to expand
elif Starting_count > 6:
Count += (valuess - valuezz)**2
elif valuess - valuezz > .5 * Starting_count:
#Not as important to block once it's too late in small games, by that point, it should have
#already blocked, but still more value than adjacency moves
if Starting_count < 6:
Count += (valuess - valuezz)**7
#Make the incentive on a bigger board, to wait until blocks are >half the amount to win
#Before you force the block, allow for more expansion...notice its to the 7th power here,
# which makes it more important than continuing your original line, which is only raised to 6th
elif Starting_count > 6:
Count += (valuess - valuezz)**7
#improve your winning line if possible, value count highly, this is only to improve existing winning lines
for Dictionary in Opponent_Dictionary:
for Winning_linez, valuezz in Dictionary.items():
for Winning_lines, values in Your_Dictionary.items():
if Winning_lines == Winning_linez:
if values < Starting_count and valuezz == Starting_count:
if key in Winning_lines:
List_of_your_moves.append(key)
#incentivizing continuing the winning lines in progress on any board size,
#This incentivizes expanding on early winning lines
if valuezz - values < .5 * Starting_count:
Count += (valuezz - values)**6
#After blocking opponents early lines in small games, or late lines in big games,
# raising to the power of 4 here trumps the 3rd and 2nd power in the other games,
# meaning this will take precendence over blocking in those situations
elif valuezz - values >= .5 * Starting_count:
Count += (valuezz - values)**4
#We need something to implement finding new winning lines
for Dictionary in Opponent_Dictionary:
for Winning_lines, values in Dictionary.items():
for Winning_linez, valuez in Your_Dictionary.items():
if Winning_lines == Winning_linez:
if values == Starting_count and valuez == Starting_count:
if key in Winning_lines:
count += Starting_count
#Value 2nd highly if spot is in adjacency Dict of opponent moves
for keys, adjacency_list in Adjacency_Dict.items():
for keyz in List_of_Opponent_Moves:
if keyz == keys:
if key in adjacency_list:
Opp_Adjacency_list.append(key)
Count +=len(adjacency_list)
#Value 2nd highly if spot is in adjacency Dict of your moves
for ky, adj_list in Adjacency_Dict.items():
for kyz in List_of_your_moves:
if kyz == ky:
if key in adj_list:
Your_Adjacency_list.append(key)
Count +=len(adjacency_list)
#Append the count, clear all the containers, repeat
Winning_Line_Count.append(Count)
Count = 0
List_of_Opponent_Moves.clear()
List_of_your_moves.clear()
Opp_Adjacency_list.clear()
Your_Adjacency_list.clear()
index +=1
Keys_Remaining -=1
Best_Choice = dict(zip(Remaining_Keys, Winning_Line_Count))
Random_Final_Choiz = []
Random_Best_Choice = []
Random_Key = []
for key, value in Best_Choice.items():
Random_Best_Choice.append(value)
max_val = max(Random_Best_Choice)
for key, value in Best_Choice.items():
if value == max_val:
Random_Key.append(key)
if len(Random_Key) > 1:
random_guy = random.randint(0, (len(Random_Key)-1))
Random_Final_Choiz.append(Random_Key[random_guy])
for position, coord in Key_Dictionary.items():
if Random_Final_Choiz[0] == position:
coordinates = coord
computer.setpos(coordinates[0],coordinates[1])
List_of_Moves.append(Random_Final_Choiz[0])
return
Best_Key = max(Best_Choice, key=Best_Choice.get)
for position, coord in Key_Dictionary.items():
if Best_Key == position:
coordinates = coord
computer.setpos(coordinates[0],coordinates[1])
List_of_Moves.append(Best_Key)
return
global PLAYER_TURN
PLAYER_TURN = True
def Play_Game(Boardsize, Squares, Squares_to_win, Player=False, Computer_Players):
'''
Function to play entire game using all other functions. One ring, to rule them all!
'''
Create_Board(Boardsize, Squares, "white", "Tic-Tac-Toe", "black", 2.5)
Key_Dictionary = create_key_dict_and_coords(Boardsize, Squares)
Dictionary_List = create_Dictionary_List(Squares, Squares_to_win, Computer_Players)
Adjacency_Dict1 = Adjacency_Dict(Squares)
List_of_X_moves = []
List_of_O_moves = []
Updated_Dict = create_updated_dictionary(Remaining_Dict_O, Squares_to_win)
#If the computers are playing versus themselves:
if Player==False:
Count = 0
random_start = random.randint(0,1)
if random_start == 0:
Variable = 1
else:
Variable = -1
Game_over = False
while Count <Squares and Game_over == False:
while Variable == 1:
Terminator_Move(Remaining_Dict_O, Remaining_Dict_X, Key_Dictionary, Squares_to_win, List_of_X_moves, List_of_O_moves,\
Adjacency_Dict1)
Coordinat = (computer_draw_customized_circle(Boardsize, Squares))
key = (key_name(Key_Dictionary, Coordinat))
if decrease_values(Remaining_Dict_O, key, Updated_Dict) == 0:
print("O WINS!!!")
Game_over = True
break
remove_dict(Key_Dictionary, Coordinat)
if Variable10a == "no":
Winning_line_O_counts= []
Winning_line_X_counts = []
for value in Remaining_Dict_O.values():
Winning_line_O_counts.append(value)
for value in Remaining_Dict_X.values():
Winning_line_X_counts.append(value)
max_val_O = max(Winning_line_O_counts)
max_val_X = max(Winning_line_X_counts)
if max_val_O < Squares_to_win and max_val_X < Squares_to_win:
print("The game can no longer be won!")
turtle.textinput("The game can no longer be won! ", "Press Stop to restart the game")
turtle.bye()
Variable *= -1
Count +=1
if Count == Squares:
break
while Variable == -1:
if Count == Squares:
break
Terminator_Move(Remaining_Dict_X, Remaining_Dict_O, Key_Dictionary, Squares_to_win, List_of_O_moves, List_of_X_moves,\
Adjacency_Dict1)
Coordinat = (comp_draw_customized_x(Boardsize, Squares))
key = (key_name(Key_Dictionary, Coordinat))
if decrease_values(Remaining_Dict_X, key, Updated_Dict) == 0:
print("X WINS!!!")
Game_over = True
break
remove_dict(Key_Dictionary, Coordinat)
if Variable10a == "no":
Winning_line_O_counts= []
Winning_line_X_counts = []
for value in Remaining_Dict_O.values():
Winning_line_O_counts.append(value)
for value in Remaining_Dict_X.values():
Winning_line_X_counts.append(value)
max_val_O = max(Winning_line_O_counts)
max_val_X = max(Winning_line_X_counts)
if max_val_O < Squares_to_win and max_val_X < Squares_to_win:
print("The game can no longer be won!")
turtle.textinput("The game can no longer be won! ", "Press Stop to restart the game")
turtle.bye()
Variable *=-1
Count +=1
if Count == Squares:
break
#If the player is interacting with the computer:
if Player==True:
random_start = random.randint(0,1)
if random_start == 0:
Player=False
else:
Player=True
Game_over = False
while Game_over == False:
while Player==False:
Terminator_Move(Remaining_Dict_O, Remaining_Dict_X, Key_Dictionary, Squares_to_win, List_of_X_moves, List_of_O_moves,\
Adjacency_Dict1)
Coordinat = (computer_draw_customized_circle(Boardsize, Squares))
key = (key_name(Key_Dictionary, Coordinat))
if decrease_values(Remaining_Dict_O, key, Updated_Dict) == 0:
print("O WINS!!!")
Game_over = True
remove_dict(Key_Dictionary, Coordinat)
Player=True
while Player==True:
def switch_players():
'''
This function allows the player to use the prompt to finish their loop,
go through the computers loop and end up back at their loop again.
This allows us to run the turtle commands while keeping the game open
with minimum interruption.
'''
key = (key_name(Key_Dictionary, Player_COORD))
List_of_X_moves.append(key)
if decrease_values(Remaining_Dict_X, key, Updated_Dict) == 0:
print("X WINS!!!")
turtle.textinput("X has won!", "Press Stop to restart the game")
turtle.bye()
remove_dict(Key_Dictionary, Player_COORD)
#Check to see if game can no longer be won by either player
if Variable10a == "no":
Winning_line_O_counts= []
Winning_line_X_counts = []
for value in Remaining_Dict_O.values():
Winning_line_O_counts.append(value)
for value in Remaining_Dict_X.values():
Winning_line_X_counts.append(value)
max_val_O = max(Winning_line_O_counts)
max_val_X = max(Winning_line_X_counts)
if max_val_O < Squares_to_win and max_val_X < Squares_to_win:
print("The game can no longer be won!")
turtle.textinput("The game can no longer be won!", "Press Stop to restart the game")
turtle.bye()
Len_list = []
for key in Key_Dictionary.keys():
Len_list.append(key)
if len(Len_list) == 0:
print("Thanks for playing!")
turtle.textinput("Thanks for playing!", "Press Stop to restart the game")
turtle.bye()
Terminator_Move(Remaining_Dict_O, Remaining_Dict_X, Key_Dictionary, Squares_to_win, List_of_X_moves, List_of_O_moves,\
Adjacency_Dict1)
Coordinat = (computer_draw_customized_circle(Boardsize, Squares))
key = (key_name(Key_Dictionary, Coordinat))
if decrease_values(Remaining_Dict_O, key, Updated_Dict) == 0:
print("O WINS!!!")
turtle.textinput("O has won! ", "Press Stop to restart the game")
turtle.bye()
list_o_keys = []
remove_dict(Key_Dictionary, Coordinat)
#Check to see if game can no longer be won by either player
if Variable10a == "no":
Winning_line_O_counts= []
Winning_line_X_counts = []
for value in Remaining_Dict_O.values():
Winning_line_O_counts.append(value)
for value in Remaining_Dict_X.values():
Winning_line_X_counts.append(value)
max_val_O = max(Winning_line_O_counts)
max_val_X = max(Winning_line_X_counts)
if max_val_O < Squares_to_win and max_val_X < Squares_to_win:
print("The game can no longer be won!")
turtle.textinput("The game can no longer be won! ", "Press Stop to restart the game")
turtle.bye()
for key in Key_Dictionary.keys():
list_o_keys.append(key)
if (len(list_o_keys)) == 0:
print("Thanks for playing!")
turtle.textinput("Thanks for playing! ", "Press Stop to restart the game")
turtle.bye()
global PLAYER_TURN
PLAYER_TURN = True
Square_Length = round((Boardsize / np.sqrt(Squares)))
speed = Square_Length
player = turtle.Turtle()
player.color("green")
player.shape("triangle")
player.penup()
player.speed(0)
player.shapesize(.8, .8, .8)
Starting_pos_x = -(Boardsize/2) + .5*(Square_Length) + (((np.sqrt(Squares)-1)/2) * Square_Length)
Starting_pos_y = (Boardsize/2) - .5*(Square_Length) - (((np.sqrt(Squares)-1)/2) * Square_Length)
player.setposition(Starting_pos_x , Starting_pos_y )
player.setheading(90)
movement = (Boardsize/Squares)*1.5
def draw_x():
'''
This is the player interaction function. Will allow the player to mark an x
when it is his or her turn to move. Will force the player to be in an unmarked
spot before allowing the X to be marked. Will then loop into the switch_players
function.
'''
turtle.pensize(2.5)
a = player.xcor()
b = player.ycor()
turtle.hideturtle()
turtle.penup()
turtle.color("blue")
coord_value = [a, b]
list_of_Coords = []
for key, coordinate in Key_Dictionary.items():
list_of_Coords.append(coordinate)
if coord_value in list_of_Coords:
turtle.setposition(a-movement,b+movement)
turtle.pendown()
turtle.setposition(a+movement,b-movement)
turtle.penup()
turtle.setposition(a-movement,b-movement)
turtle.pendown()
turtle.setposition(a+movement,b+ movement)
global Player_COORD
Player_COORD = coord_value
global PLAYER_TURN
PLAYER_TURN = False
switch_players()
def move_left():
'''
Left command if it is the player's turn
'''
if PLAYER_TURN == True:
x = player.xcor()
x -= speed
if x < -(Boardsize/2)+ .5*(Square_Length):
x = -(Boardsize/2)+ .5*(Square_Length)
player.setx(x)
player.setpos(x, player.ycor())
def move_right():
'''
Right command if it is the player's turn
'''
if PLAYER_TURN == True:
x = player.xcor()
x += speed
if x > (Boardsize/2) - .5*(Square_Length):
x = -(Boardsize/2) + .5*(Square_Length) + ((np.sqrt(Squares)-1) * Square_Length)
player.setx(x)
player.setpos(x, player.ycor())
def move_up():
'''
Up command if it is the player's turn
'''
if PLAYER_TURN == True:
y = player.ycor()
y += speed
if y > (Boardsize/2) - .5*(Square_Length):
y = (Boardsize/2) - .5*(Square_Length)
player.sety(y)
player.setpos(player.xcor(), y)
def move_down():
'''
Down command if it is the player's turn
'''
if PLAYER_TURN == True:
y = player.ycor()
y -= speed
if y < -(Boardsize/2)+ .5*(Square_Length):
y = (Boardsize/2) - .5*(Square_Length) - ((np.sqrt(Squares)-1) * Square_Length)
player.sety(y)
player.setpos(player.xcor(), y)
def quit_game():
'''
Allows player to quit game by pressing Esc at any time.
'''
turtle.textinput("See you later friend! ", "Press Enter to quit")
turtle.bye()
turtle.listen()
turtle.onkey(move_left, "Left")
turtle.onkey(move_right, "Right")
turtle.onkey(move_up, "Up")
turtle.onkey(move_down, "Down")
# turtle.onkey(draw_circle, "o")
turtle.onkey(draw_x, "x")
turtle.onkey(quit_game, "Escape")
turtle.mainloop()
#This sets the main game loop using the prompts from the beginning
# If player chooses to play computer, use this loop
# if Variable8 == "yes":
# print("Place mouse cursor over the board and click on the board to enable player controls. \n Move with the arrow keys, and mark your square with 'x' key ")
# print("To restart the game, click the stop button, and then click run again.")
# Play_Game(Variable1, Variable3, Variable4, Player=True)
# # If player chooses not to play computer, use this loop
# if Variable8 == "no":
# print("To restart the game, click the stop button, and then click run again.")
# Play_Game(Variable1, Variable3, Variable4, Player=False)
A = create_Dictionary_List(3, 3, 6)
B = Divide_Dictionary(A, 'red square')
print(B[0])
print(B[1])
| 37.294314
| 166
| 0.530284
|
41dfbe3956584c9a6771ab964d2b8eb3651cb011
| 314
|
py
|
Python
|
tests/integration/test_toggle_mal.py
|
Cyniikal/labelbox-python
|
526fb8235c245a3c6161af57c354a47d68385bab
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_toggle_mal.py
|
Cyniikal/labelbox-python
|
526fb8235c245a3c6161af57c354a47d68385bab
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_toggle_mal.py
|
Cyniikal/labelbox-python
|
526fb8235c245a3c6161af57c354a47d68385bab
|
[
"Apache-2.0"
] | null | null | null |
def test_enable_model_assisted_labeling(project):
response = project.enable_model_assisted_labeling()
assert response == True
response = project.enable_model_assisted_labeling(True)
assert response == True
response = project.enable_model_assisted_labeling(False)
assert response == False
| 31.4
| 60
| 0.77707
|
499390dbb2bec4d151e4f2dd3e68482afb7a6098
| 2,505
|
py
|
Python
|
data/p4VQE/R1/benchmark/startQiskit_QC70.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p4VQE/R1/benchmark/startQiskit_QC70.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p4VQE/R1/benchmark/startQiskit_QC70.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=3
# total number=9
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.cx(input_qubit[1],input_qubit[0]) # number=5
prog.cx(input_qubit[1],input_qubit[0]) # number=6
prog.y(input_qubit[2]) # number=7
prog.y(input_qubit[2]) # number=8
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5200
writefile = open("../data/startQiskit_QC70.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_5_yorktown")
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| 27.228261
| 118
| 0.633932
|
24eb5708cdce7f1dd54f60f0bf7ad51a10095a14
| 1,018
|
py
|
Python
|
generator/group.py
|
nikor1337/ironpython_training
|
022e2c48193fa81c3c3d7530ae0b9d939e50687e
|
[
"Apache-2.0"
] | null | null | null |
generator/group.py
|
nikor1337/ironpython_training
|
022e2c48193fa81c3c3d7530ae0b9d939e50687e
|
[
"Apache-2.0"
] | null | null | null |
generator/group.py
|
nikor1337/ironpython_training
|
022e2c48193fa81c3c3d7530ae0b9d939e50687e
|
[
"Apache-2.0"
] | null | null | null |
from model.group import Group
import random
import string
import os.path
import getopt
import sys
import time
import clr
clr.AddReferenceByName('Microsoft.Office.Interop.Excel, Version=15.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
from Microsoft.Office.Interop import Excel
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["number of groups", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = "data/groups.xlsx"
for o, a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + string.punctuation + " "*10
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata = [Group(name="")] + [
Group(name=random_string("name", 10))
for i in range(n)
]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
excel = Excel.ApplicationClass()
excel.Visible = True
time.sleep(10)
| 22.622222
| 124
| 0.683694
|
a688b11506e538d6f7785057e584b826bfae76c2
| 3,864
|
py
|
Python
|
Proyecto1/venv/Lib/site-packages/telegram/inline/inlinequeryresultcachedgif.py
|
RicardoVinicioJara/LugaresTusisticos
|
d92264231c471cecbc17a2279bedb3779424147c
|
[
"CC-BY-3.0"
] | 1
|
2021-12-27T21:37:04.000Z
|
2021-12-27T21:37:04.000Z
|
Proyecto1/venv/Lib/site-packages/telegram/inline/inlinequeryresultcachedgif.py
|
RicardoVinicioJara/LugaresTusisticos
|
d92264231c471cecbc17a2279bedb3779424147c
|
[
"CC-BY-3.0"
] | null | null | null |
Proyecto1/venv/Lib/site-packages/telegram/inline/inlinequeryresultcachedgif.py
|
RicardoVinicioJara/LugaresTusisticos
|
d92264231c471cecbc17a2279bedb3779424147c
|
[
"CC-BY-3.0"
] | 1
|
2021-05-23T18:37:37.000Z
|
2021-05-23T18:37:37.000Z
|
#!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2020
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains the classes that represent Telegram InlineQueryResultCachedGif."""
from telegram import InlineQueryResult
from telegram.utils.helpers import DEFAULT_NONE
class InlineQueryResultCachedGif(InlineQueryResult):
"""
Represents a link to an animated GIF file stored on the Telegram servers. By default, this
animated GIF file will be sent by the user with an optional caption. Alternatively, you can
use :attr:`input_message_content` to send a message with specified content instead of
the animation.
Attributes:
type (:obj:`str`): 'gif'.
id (:obj:`str`): Unique identifier for this result, 1-64 bytes.
gif_file_id (:obj:`str`): A valid file identifier for the GIF file.
title (:obj:`str`): Optional. Title for the result.
caption (:obj:`str`): Optional. Caption of the GIF file to be sent, 0-1024 characters
after entities parsing.
parse_mode (:obj:`str`): Optional. Send Markdown or HTML, if you want Telegram apps to show
bold, italic, fixed-width text or inline URLs in the media caption. See the constants
in :class:`telegram.ParseMode` for the available modes.
reply_markup (:class:`telegram.InlineKeyboardMarkup`): Optional. Inline keyboard attached
to the message.
input_message_content (:class:`telegram.InputMessageContent`): Optional. Content of the
message to be sent instead of the gif.
Args:
id (:obj:`str`): Unique identifier for this result, 1-64 bytes.
gif_file_id (:obj:`str`): A valid file identifier for the GIF file.
title (:obj:`str`, optional): Title for the result.caption (:obj:`str`, optional):
caption (:obj:`str`, optional): Caption of the GIF file to be sent, 0-1024 characters
after entities parsing.
parse_mode (:obj:`str`, optional): Send Markdown or HTML, if you want Telegram apps to show
bold, italic, fixed-width text or inline URLs in the media caption. See the constants
in :class:`telegram.ParseMode` for the available modes.
reply_markup (:class:`telegram.InlineKeyboardMarkup`, optional): Inline keyboard attached
to the message.
input_message_content (:class:`telegram.InputMessageContent`, optional): Content of the
message to be sent instead of the gif.
**kwargs (:obj:`dict`): Arbitrary keyword arguments.
"""
def __init__(self,
id,
gif_file_id,
title=None,
caption=None,
reply_markup=None,
input_message_content=None,
parse_mode=DEFAULT_NONE,
**kwargs):
# Required
super().__init__('gif', id)
self.gif_file_id = gif_file_id
# Optionals
self.title = title
self.caption = caption
self.parse_mode = parse_mode
self.reply_markup = reply_markup
self.input_message_content = input_message_content
| 46.554217
| 99
| 0.675207
|
0e7dee919e933524f7e86bde069ad2480a19b446
| 8,787
|
py
|
Python
|
parse.py
|
ericsims/LTSpiceJobServer
|
6a88ffb84aa05c6d61ccd072045bfc6f4b44d39d
|
[
"MIT"
] | null | null | null |
parse.py
|
ericsims/LTSpiceJobServer
|
6a88ffb84aa05c6d61ccd072045bfc6f4b44d39d
|
[
"MIT"
] | null | null | null |
parse.py
|
ericsims/LTSpiceJobServer
|
6a88ffb84aa05c6d61ccd072045bfc6f4b44d39d
|
[
"MIT"
] | null | null | null |
import subprocess
import re
from queue import Queue
from threading import Thread
from time import time
import itertools
import math
import string
import random
LTSpicePath = 'C:\Program Files\LTC\LTspiceXVII\XVIIx64.exe'
file = 'example LTSpice sims/BuckBoost'
##file = 'example LTSpice sims/MonteCarlo'
returned_value = subprocess.run([LTSpicePath, '-netlist', file+'.asc'], shell=True, check=True)
print('returned value:', returned_value)
lines = []
stepvals = []
stepvars = []
class LTSpiceWorker(Thread):
def __init__(self, queue):
Thread.__init__(self)
self.queue = queue
def run(self):
while True:
# Get the work from the queue and expand the tuple
LTSpicePath_, job = self.queue.get()
try:
returned_value = subprocess.run([LTSpicePath_, '-b', '{}.net'.format(job)], shell=True, check=True)
## print('job:{} returned:{}'.format(job,returned_value))
finally:
self.queue.task_done()
def parseStepDirec(stepdirective):
## print("step: ", stepdirective)
str_split = stepdirective.split()
mode = None
var = None
if str_split[1] == 'param':
var = str_split[2]
if str_split[3] == 'list':
mode = 'list'
params = str_split[4:]
else:
mode = 'lin'
params = str_split[3:]
elif str_split[1] == 'oct':
mode = 'oct'
var = str_split[3]
params = str_split[4:]
elif str_split[1] == 'dec':
mode = 'dec'
var = str_split[3]
params = str_split[4:]
if mode == 'list':
values = params
elif mode == 'lin':
values = []
if not('.' in params[0] or '.' in params[1] or '.' in params[2]):
start = int(params[0])
end = int(params[1])
inc = int(params[2])
else:
start = float(params[0])
end = float(params[1])
inc = float(params[2])
x=start
while x < end:
values.append(x)
x += inc
values.append(end)
# TODO parse dec
# TODO parse oct
else:
raise NameError('not a valid step command???')
## print("mode: {}, var: {}, params:{}".format(mode,var,params))
## print("values:{}".format(values))
stepvars.append(var)
stepvals.append(tuple(values[:]))
return ''
## paramStrList = []
## for value in values:
## paramStrList.append('.param {} {}'.format(var, value))
## print('.param {} {}'.format(var, value))
## return paramStrList
specialdirectives=[\
{'str':'.lib', 'keepcase':True, 'function':None},\
{'str':'.wave', 'keepcase':True, 'function':None},\
{'str':'.step', 'keepcase':False, 'function':parseStepDirec}]
def evalLines(parent, bucket, start, end):
for i in range(start,end):
if isinstance(lines[i], list):
a = lines[i][:]
a0 = a.pop(0)
while a:
newbuck = bucket[:]
newbuck.append(a.pop(0))
evalLines(parent, newbuck, i+1, end)
parent.append(newbuck)
bucket.append(a0)
else:
bucket.append(lines[i])
with open(file+".net", 'r') as fp:
line = fp.readline()
cnt = 0
while line:
# replace any double spaces
line = re.sub(re.compile(' +'), ' ', line)
isaspecialdirec = False
for direc in specialdirectives:
line_templower = line.lower().strip()
if line_templower.startswith(direc['str']):
isaspecialdirec = True
if not direc['keepcase']:
line=line_templower
else:
line=line.strip()
if not direc['function'] is None:
line = direc['function'](line)
lines.append(line.strip())
print("Line {:>4}: {:<8} {}".format(cnt, direc['str'], line))
if not isaspecialdirec:
lines.append(line.strip())
print("Line {:>4}: {:<8} {}".format(cnt, '', line.strip()))
line = fp.readline()
cnt += 1
print()
def execRuns(workerCount_):
runList_ = []
combinations = [list(sub) for sub in list(itertools.product(*stepvals))[:]]
numberofsteps = len(combinations)
indexlist = []
if numberofsteps > 1:
lookuptable = []
j = 0
s = 0
for vi in range(numberofsteps):
indexlist.append((j,s))
print('j{}, s{}, vi{}, v{}'.format(j,s,vi,combinations[vi]))
if not (len(lookuptable) > j):
lookuptable.append([])
lookuptable[j].append(combinations[vi])
j += 1
if j >= workerCount_:
j = 0
s += 1
for j in range(len(lookuptable)):
out = open('j{}.net'.format(j),'w')
runList_.append('j{}'.format(j))
newlines = lines[:]
## print()
## print('j{}'.format(j))
dummylength = len(lookuptable[j])
if dummylength > 1:
## dummyvar = ''.join(random.choice(string.ascii_lowercase) for i in range(16))
dummyvar = 'temporarystepvarx'
## print('.step param {} 0 {} 1'.format(dummyvar,dummylength-1))
newlines.insert(len(newlines)-2,'.step param {} 0 {} 1'.format(dummyvar,dummylength-1))
for x in range(len(stepvars)):
cmd = '.param {} table({}'.format(stepvars[x],dummyvar)
for y in range(len(lookuptable[j])):
cmd+=(',{},{}'.format(y,lookuptable[j][y][x]))
cmd+=(')')
newlines.insert(len(newlines)-2,cmd)
## print(cmd)
else:
for x in range(len(stepvars)):
cmd = '.param {} {}'.format(stepvars[x],lookuptable[j][0][x])
newlines.insert(len(newlines)-2,cmd)
## print(cmd)
for line in newlines:
out.write('{}\n'.format(line))
out.close()
else:
indexlist.append((0,0))
out = open('j0.net','w')
runList_.append('j0')
for line in lines:
out.write('{}\n'.format(line))
out.close()
print()
print('doing {} steps in {} runs with {} workers'.format(len(combinations), len(runList_), workerCount_))
start = time()
queue = Queue()
for x in range(workerCount_):
worker = LTSpiceWorker(queue)
worker.daemon = True
worker.start()
for run in runList_:
queue.put((LTSpicePath, run))
queue.join()
end = time()
elapsed = end - start
print('elapsed: ', elapsed)
return (runList_,indexlist)
(runs, indexlist) = execRuns(16)
import ltspice
import struct
outdata = [None] * len(runs)
for r in range(len(runs)):
outdata[r] = ltspice.Ltspice('{}.raw'.format(runs[r]))
outdata[r].parse()
totalpoints = 0
for dat in outdata:
totalpoints += dat._point_num
##totalpoints = outdata[0]._point_num
# TODO: this only works for TRANS...
out = open('outdata.raw','w', encoding='UTF16', newline='')
out.write('Title: * {}\n'.format('i forgot what the input file was'))
out.write('Date: {}\n'.format(outdata[0].date.strip()))
out.write('Plotname: {}\n'.format(outdata[0].plot_name.strip())) # TODO: copying this name might not always work
out.write('Flags: {}\n'.format(outdata[0].flags.strip())) # TODO: copying this name might not always work
out.write('No. Variables: {}\n'.format(outdata[0]._variable_num))
out.write('No. Points: {}\n'.format(totalpoints))
out.write('Offset: {}\n'.format('0.0000000000000000e+000')) # not sure how to deal with this yet
out.write('Command: {}\n'.format('idk does this even matter')) # does this matter?
out.write('Variables:\n')
for i in range(outdata[0].getVariableNumber()):
out.write('\t{}\t{}\t{}\n'.format(i, outdata[0].getVariableNames()[i], outdata[0].getVariableTypes()[i]))
out.write('Binary:\n') # again, this might now always work
out.close()
out = open('outdata.raw','ab')
##indexlist=[(14,0),(15,0)]
for (j,s) in indexlist:
print('j:{} s{} {} - {}'.format(j,s,outdata[j]._case_split_point[s],outdata[j]._case_split_point[s+1]))
for l in range(outdata[j]._case_split_point[s],outdata[j]._case_split_point[s+1]):
out.write(struct.pack('d',outdata[j].time_raw[l]))
out.write(outdata[j].data_raw[l][2:].tobytes())
## print('{} {} {}'.format(l,struct.pack('d',outdata[0].time_raw[l]),outdata[0].data_raw[l][2:].tobytes()))
out.flush()
out.close()
##if steps > 1:
# stepped sim
| 33.033835
| 115
| 0.546375
|
8266960e3ae63b7d7e5bde658ac258b6f46975ad
| 3,186
|
py
|
Python
|
predict.py
|
mayyasy/Image-Classifier-AIPND
|
01a1d24183ea4cab3b673588c2fd39b93f41b2b6
|
[
"MIT"
] | null | null | null |
predict.py
|
mayyasy/Image-Classifier-AIPND
|
01a1d24183ea4cab3b673588c2fd39b93f41b2b6
|
[
"MIT"
] | null | null | null |
predict.py
|
mayyasy/Image-Classifier-AIPND
|
01a1d24183ea4cab3b673588c2fd39b93f41b2b6
|
[
"MIT"
] | null | null | null |
'''
change directory to ImageClassifier
sample run 1: python predict.py (filepath 'flowers/test/10/image_07090.jpg' included as default sample test)
sample run 2: python predict.py --filepath 'flowers/1/image_06743.jpg'
'''
import argparse
import torch
import torch.nn.functional as F
import numpy as np
import json
import os
import random
from torch.autograd import Variable
from torchvision import transforms, models
from PIL import Image
from utils import load_checkpoint, load_cat_names
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--checkpoint', action='store', default='checkpoint.pth')
parser.add_argument('--top_k', dest='top_k', default='3')
parser.add_argument('--filepath', dest='filepath', default='flowers/test/10/image_07090.jpg') # default sample test
parser.add_argument('--category_names', dest='category_names', default='cat_to_name.json')
parser.add_argument('--gpu', action='store', default='gpu')
return parser.parse_args()
def process_image(image):
''' Scales, crops, and normalizes a PIL image for a PyTorch model,
returns an Numpy array
'''
# TODO: Process a PIL image for use in a PyTorch model
img_pil = Image.open(image) # use Image
adjustments = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
image = adjustments(img_pil)
return image
def predict(image_path, model, topk=3, gpu='gpu'):
''' Get probability values (indeces) and respective flower classes.
'''
# TODO: Implement the code to predict the class from an image file
if gpu == 'gpu':
model = model.cuda()
else:
model = model.cpu()
img_torch = process_image(image_path)
img_torch = img_torch.unsqueeze_(0)
img_torch = img_torch.float()
if gpu == 'gpu':
with torch.no_grad():
output = model.forward(img_torch.cuda())
else:
with torch.no_grad():
output=model.forward(img_torch)
probability = F.softmax(output.data,dim=1) # use F
probs = np.array(probability.topk(topk)[0][0])
index_to_class = {val: key for key, val in model.class_to_idx.items()} # from reviewer advice
top_classes = [np.int(index_to_class[each]) for each in np.array(probability.topk(topk)[1][0])]
return probs, top_classes
def main():
args = parse_args()
gpu = args.gpu
model = load_checkpoint(args.checkpoint)
cat_to_name = load_cat_names(args.category_names)
img_path = args.filepath
probs, classes = predict(img_path, model, int(args.top_k), gpu)
labels = [cat_to_name[str(index)] for index in classes]
probability = probs
print('File selected: ' + img_path)
print(labels)
print(probability)
i=0 # prints out top_k classes and probabilities according to user
while i < len(labels):
print("{} with a probability of {:1f}".format(labels[i], probability[i]))
i += 1 # cycle through
if __name__ == "__main__":
main()
| 31.544554
| 119
| 0.670433
|
69e1daa6493501dbe407a71f15e936c72ecf29cb
| 3,463
|
py
|
Python
|
test/test_parser.py
|
poletaevvlad/CubeLang
|
06f34e7204f4656589eb33c4afe660383704861f
|
[
"MIT"
] | 1
|
2019-12-04T04:02:11.000Z
|
2019-12-04T04:02:11.000Z
|
test/test_parser.py
|
poletaevvlad/CubeLang
|
06f34e7204f4656589eb33c4afe660383704861f
|
[
"MIT"
] | null | null | null |
test/test_parser.py
|
poletaevvlad/CubeLang
|
06f34e7204f4656589eb33c4afe660383704861f
|
[
"MIT"
] | null | null | null |
import pytest
from cubelang.actions import Turn, Action, Rotate, TurningType
from cubelang.orientation import Side
from cubelang.parser import ParsingError, parse_actions
def test_valid_parsing():
actions = list(parse_actions("R L2 U' RU L"))
expected_sides = [TurningType.VERTICAL, TurningType.VERTICAL,
TurningType.HORIZONTAL, TurningType.VERTICAL,
TurningType.HORIZONTAL, TurningType.VERTICAL]
expected_turns = [1, 2, 1, 1, 3, 3]
assert len(actions) == len(expected_sides)
for action, type, turns in zip(actions, expected_sides, expected_turns):
assert isinstance(action, Turn)
assert action.type == type
assert action.turns == turns
@pytest.mark.parametrize("text, type, indices, turns", [
("F", TurningType.SLICE, [1], 1),
("F'", TurningType.SLICE, [1], 3),
("B", TurningType.SLICE, [-1], 3),
("B'", TurningType.SLICE, [-1], 1),
("R", TurningType.VERTICAL, [-1], 1),
("R'", TurningType.VERTICAL, [-1], 3),
("L", TurningType.VERTICAL, [1], 3),
("L'", TurningType.VERTICAL, [1], 1),
("U", TurningType.HORIZONTAL, [1], 3),
("U'", TurningType.HORIZONTAL, [1], 1),
("D", TurningType.HORIZONTAL, [-1], 1),
("D'", TurningType.HORIZONTAL, [-1], 3),
("L[2]", TurningType.VERTICAL, [2], 3),
("L[:2]", TurningType.VERTICAL, [..., 2], 3),
("L2[1:2]", TurningType.VERTICAL, [1, ..., 2], 2),
("L[2:]", TurningType.VERTICAL, [2, ...], 3),
("L[1,3]", TurningType.VERTICAL, [1, 3], 3),
("L'[1:2,3]", TurningType.VERTICAL, [1, ..., 2, 3], 1),
("L[1,2:3]", TurningType.VERTICAL, [1, 2, ..., 3], 3)
])
def test_turn_single(text: str, type: TurningType, indices: int, turns: int):
actions = list(parse_actions(text))
assert len(actions) == 1
action = actions[0]
assert isinstance(action, Turn)
assert action.indices == indices
assert action.turns == turns
assert action.type == type
assert text == str(action)
def test_rotation():
actions = list(parse_actions("X X' Y' Z2"))
expected_sides = [Side.RIGHT, Side.LEFT, Side.BOTTOM, Side.FRONT]
expected_twice = [False, False, False, True]
assert len(actions) == len(expected_sides)
for action, side, twice in zip(actions, expected_sides, expected_twice):
assert isinstance(action, Rotate)
assert action.twice == twice
assert action.axis_side == side
@pytest.mark.parametrize("action, expected", [
(Turn(Side.FRONT, 1, 1), "F"),
(Turn(Side.FRONT, 1, 2), "F2"),
(Turn(Side.FRONT, 1, 3), "F'"),
(Turn(Side.LEFT, 1, 1), "L"),
(Turn(Side.RIGHT, 1, 2), "R2"),
(Turn(Side.BACK, 1, 3), "B'"),
(Turn(Side.TOP, 1, 1), "U"),
(Turn(Side.BOTTOM, 1, 2), "D2"),
(Rotate(Side.FRONT, False), "Z"),
(Rotate(Side.BACK, False), "Z'"),
(Rotate(Side.BACK, True), "Z2"),
(Rotate(Side.FRONT, True), "Z2"),
(Rotate(Side.RIGHT, False), "X"),
(Rotate(Side.BOTTOM, False), "Y'")
])
def test_representation(action: Action, expected: str):
actual = str(action)
assert expected == actual
@pytest.mark.parametrize("text, column", [
("Q", 0), ("X[1]", 1), ("R[", 2), ("R2'", 2), ("R'2", 2), ("R:", 1), ("R[]", 2),
("R[1,]", 4), ("R[,1]", 2), ("R[1,:]", 4), ("R[1:, 1]", 4)
])
def test_illegal_parsing(text: str, column: int):
with pytest.raises(ParsingError) as e:
parse_actions(text)
assert e.value.column == column
| 34.63
| 84
| 0.591684
|
3b5856b56447b12b9626b1dfc887dd74c4dba87c
| 1,842
|
py
|
Python
|
main.py
|
Infosecurity-LLC/thehive_incidents_pusher
|
72120e51bb85776e008485b3849f64732fde0553
|
[
"Apache-2.0"
] | 1
|
2022-02-04T10:01:02.000Z
|
2022-02-04T10:01:02.000Z
|
main.py
|
Infosecurity-LLC/thehive_incidents_pusher
|
72120e51bb85776e008485b3849f64732fde0553
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
Infosecurity-LLC/thehive_incidents_pusher
|
72120e51bb85776e008485b3849f64732fde0553
|
[
"Apache-2.0"
] | 1
|
2022-02-04T10:01:03.000Z
|
2022-02-04T10:01:03.000Z
|
import logging
import os
import sys
import threading
from appmetrics import metrics
from socutils import get_settings
from modules.app_metrics import register_app_metrics
from modules.logging import prepare_logging
logger = logging.getLogger('thehive_incidents_pusher')
def main(settings_file_path: str = 'data/settings.yaml'):
settings_file_path = os.getenv("APP_CONFIG_PATH", settings_file_path)
settings = get_settings(settings_file_path)
prepare_logging(settings)
register_app_metrics()
logger.info("Application start")
logger.info("Load config from %s", settings_file_path)
from modules.pusher import TheHivePusher
pusher = TheHivePusher(settings['thehive'], settings['hbase_event_loader'])
from modules.kafka_consumer import prepare_consumer
consumer = prepare_consumer(settings)
consumer.create_consumer()
from modules.app_metrics import run_metrics_webserver
metrics_thread = threading.Thread(target=run_metrics_webserver, daemon=True)
metrics_thread.start()
try:
for message in consumer.read_topic():
logger.info("Read message from topic %s: %s", message.topic, str(message.value))
metrics.notify('received_kafka_messages', 1)
pusher.push(message.value)
logger.info("Successfully processed message")
consumer.consumer.commit()
except Exception as err:
logger.error("Exception, which type is %s, is detecting during consuming messages: %s", type(err), str(err))
sys.exit(1)
except (KeyboardInterrupt, StopIteration) as err:
logger.warning("Unexpected processing interruption: %s", str(err))
sys.exit(0)
except BaseException as e:
logger.error("Some wtf shit is happened: %s", str(e))
sys.exit(42)
if __name__ == '__main__':
main()
| 34.111111
| 116
| 0.722041
|
b0014f9945929b63e9e3c3c16723979e55b274b7
| 7,080
|
py
|
Python
|
ainnovation_dcim/service/workflow/workflow_transition_service.py
|
ltxwanzl/ainnovation_dcim
|
b065489e2aa69729c0fd5142cf75d8caa7788b31
|
[
"Apache-2.0"
] | null | null | null |
ainnovation_dcim/service/workflow/workflow_transition_service.py
|
ltxwanzl/ainnovation_dcim
|
b065489e2aa69729c0fd5142cf75d8caa7788b31
|
[
"Apache-2.0"
] | null | null | null |
ainnovation_dcim/service/workflow/workflow_transition_service.py
|
ltxwanzl/ainnovation_dcim
|
b065489e2aa69729c0fd5142cf75d8caa7788b31
|
[
"Apache-2.0"
] | null | null | null |
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.db.models import Q
from workflow.models import Transition
from service.base_service import BaseService
from service.common.log_service import auto_log
class WorkflowTransitionService(BaseService):
def __init__(self):
pass
@classmethod
@auto_log
def get_state_transition_queryset(cls, state_id: int)->tuple:
"""
获取状态可以执行的操作
get state can do transitions queryset
:param state_id:
:return:
"""
return True, Transition.objects.filter(is_deleted=0, source_state_id=state_id).all()
@classmethod
@auto_log
def get_workflow_transition_by_id(cls, transition_id: int)->tuple:
"""
获取transition
get transition by id
:param transition_id:
:return:
"""
return True, Transition.objects.filter(is_deleted=0, id=transition_id).first()
@classmethod
@auto_log
def get_transition_by_args(cls, arg_dict: dict)->tuple:
"""
获取流转
get transtion list by params
:param arg_dict: 条件字典
:return:
"""
arg_dict.update(is_deleted=0)
return True, Transition.objects.filter(**arg_dict).all()
@classmethod
@auto_log
def get_transitions_serialize_by_workflow_id(cls, workflow_id: int, per_page: int=10, page: int=1,
query_value: str='')->tuple:
"""
根据workflow id获取工作流的流转记录
get transition serialize record by workflow and params
:param workflow_id:
:param per_page:
:param page:
:param query_value:
:return:
"""
if not workflow_id:
return False, 'except workflow_id but not provided'
query_params = Q(workflow_id=workflow_id, is_deleted=False)
if query_value:
query_params &= Q(name__contains=query_value)
workflow_transitions = Transition.objects.filter(query_params)
paginator = Paginator(workflow_transitions, per_page)
try:
workflow_transitions_result_paginator = paginator.page(page)
except PageNotAnInteger:
workflow_transitions_result_paginator = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results
workflow_transitions_result_paginator = paginator.page(paginator.num_pages)
workflow_transitions_object_list = workflow_transitions_result_paginator.object_list
workflow_transitions_restful_list = []
for workflow_transitions_object in workflow_transitions_object_list:
source_state_info = {}
destination_state_info = {}
from service.workflow.workflow_state_service import workflow_state_service_ins
flag1, source_state_obj = workflow_state_service_ins.get_workflow_state_by_id(
workflow_transitions_object.source_state_id)
flag2, destination_state_obj = workflow_state_service_ins.get_workflow_state_by_id(
workflow_transitions_object.destination_state_id)
if flag1 and source_state_obj:
source_state_info['name'] = source_state_obj.name
source_state_info['id'] = source_state_obj.id
else:
source_state_info['name'] = '未知'
source_state_info['id'] = workflow_transitions_object.source_state_id
if flag2 and destination_state_obj:
destination_state_info['name'] = destination_state_obj.name
destination_state_info['id'] = destination_state_obj.id
else:
if workflow_transitions_object.condition_expression != '[]':
destination_state_info['name'] = '见条件表达式'
else:
destination_state_info['name'] = '请指定目标状态或设置条件表达式'
destination_state_info['id'] = workflow_transitions_object.destination_state_id
result_dict = workflow_transitions_object.get_dict()
result_dict['source_state_info'] = source_state_info
result_dict['destination_state_info'] = destination_state_info
workflow_transitions_restful_list.append(result_dict)
return True, dict(workflow_transitions_restful_list=workflow_transitions_restful_list,
paginator_info=dict(per_page=per_page, page=page, total=paginator.count))
@classmethod
@auto_log
def add_workflow_transition(cls, workflow_id: int, name: str, transition_type_id: int, timer: int,
source_state_id: int, destination_state_id: int, condition_expression: str,
attribute_type_id: int, field_require_check: int, alert_enable: int, alert_text: str,
creator: str)->tuple:
transition_obj = Transition(workflow_id=workflow_id, name=name, transition_type_id=transition_type_id,
timer=timer, source_state_id=source_state_id,
destination_state_id=destination_state_id,
condition_expression=condition_expression,
attribute_type_id=attribute_type_id, field_require_check=field_require_check,
alert_enable=alert_enable, alert_text=alert_text, creator=creator)
transition_obj.save()
return True, dict(transition_id=transition_obj.id)
@classmethod
@auto_log
def edit_workflow_transition(cls, transition_id: int, workflow_id: int, name, transition_type_id: int, timer: int,
source_state_id: int, destination_state_id: int, condition_expression: str,
attribute_type_id: int, field_require_check: int, alert_enable: int,
alert_text: str)->tuple:
transition_queryset = Transition.objects.filter(is_deleted=0, id=transition_id)
if transition_queryset:
transition_queryset.update(workflow_id=workflow_id, name=name, transition_type_id=transition_type_id,
timer=timer, source_state_id=source_state_id,
destination_state_id=destination_state_id,
condition_expression=condition_expression,
attribute_type_id=attribute_type_id, field_require_check=field_require_check,
alert_enable=alert_enable, alert_text=alert_text)
return True, ''
@classmethod
@auto_log
def del_workflow_transition(cls, transition_id: int)->tuple:
transition_queryset = Transition.objects.filter(is_deleted=0, id=transition_id)
if transition_queryset:
transition_queryset.update(is_deleted=1)
return True, ''
workflow_transition_service_ins = WorkflowTransitionService()
| 45.974026
| 118
| 0.646893
|
75383adf5feb90f79f772a1e7d2241514292829c
| 2,365
|
py
|
Python
|
backend/bin/test/test_helpers/test_environment_helper.py
|
anjo-ba/PCAP-Analyzer
|
ccb13caba9c0c05a7643e63c57575b56ab1233cb
|
[
"MIT"
] | 4
|
2019-03-29T08:45:36.000Z
|
2021-11-11T00:49:36.000Z
|
backend/bin/test/test_helpers/test_environment_helper.py
|
anjo-ba/PCAP-Analyzer
|
ccb13caba9c0c05a7643e63c57575b56ab1233cb
|
[
"MIT"
] | 9
|
2019-04-03T18:10:19.000Z
|
2020-08-16T12:13:34.000Z
|
backend/bin/test/test_helpers/test_environment_helper.py
|
anjo-ba/PCAP-Analyzer
|
ccb13caba9c0c05a7643e63c57575b56ab1233cb
|
[
"MIT"
] | 4
|
2019-05-09T15:33:23.000Z
|
2022-02-06T08:01:23.000Z
|
import unittest
from os import path
from unittest.mock import patch
from main.helpers.environment_helper import EnvironmentHelper
class Process(object):
def __init__(self, name) -> None:
self.info = {"name": name}
class TestEnrivonmentHelperMethods(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.environment_helper = EnvironmentHelper()
cls.traffic_analyzer_base_path = path.join("/opt", "splunk", "etc", "apps", "traffic-analyzer")
cls.local_base_path = path.join(cls.traffic_analyzer_base_path, "local")
cls.lookup_base_path = path.join(cls.traffic_analyzer_base_path, "lookups")
cls.tmp_base_path = path.join("/tmp")
cls.file_path = path.join("..", "files")
cls.development_variables = {
"environment": "development",
"csv_tmp_path": cls.file_path,
"csv_list_path": cls.file_path,
"csv_capture_path": cls.file_path,
"dns_request_files": cls.file_path,
"configuration_folder": path.join("..", "..", "..", "frontend", "local")
}
cls.production_variables = {
"environment": "production",
"csv_tmp_path": path.join(cls.traffic_analyzer_base_path, "bin", "files"),
"csv_list_path": path.join(cls.lookup_base_path, "lists"),
"csv_capture_path": path.join(cls.lookup_base_path, "captures"),
"dns_request_files": path.join(cls.lookup_base_path, "dns_request_files"),
"configuration_folder": path.join(cls.local_base_path)
}
@patch("psutil.process_iter")
def test_development_variables(self, process_iter) -> None:
process = Process("dev_test")
process_iter.return_value = [process]
self.assertEqual(self.environment_helper.get_environment(), self.development_variables)
@patch("psutil.process_iter")
def test_production_variables(self, process_iter) -> None:
process = Process("splunkd")
process_iter.return_value = [process]
self.assertEqual(self.environment_helper.get_environment(), self.production_variables)
if __name__ == "__main__":
suite = unittest.TestLoader().loadTestsFromTestCase(TestEnrivonmentHelperMethods)
unittest.TextTestRunner(verbosity=2).run(suite)
| 42.232143
| 104
| 0.659197
|
7357c6edf2ea80ce668550750b2eb23f770ce1cc
| 1,439
|
py
|
Python
|
config.py
|
maxbbender/flaskee
|
4b1df9f13ae6302a7b2ab9607b131644cf08f5ea
|
[
"MIT"
] | null | null | null |
config.py
|
maxbbender/flaskee
|
4b1df9f13ae6302a7b2ab9607b131644cf08f5ea
|
[
"MIT"
] | null | null | null |
config.py
|
maxbbender/flaskee
|
4b1df9f13ae6302a7b2ab9607b131644cf08f5ea
|
[
"MIT"
] | null | null | null |
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'domislove'
FLASK_CONFIG = os.environ.get('FLASK_CONFIG') or 'default'
@staticmethod
def init_app(app):
app.logger.handlers = []
# log to syslog
import logging
import sys
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
ch.setFormatter(formatter)
app.logger.addHandler(ch)
pass
class DevelopmentConfig(Config):
@classmethod
def init_app(cls, app):
Config.init_app(app)
import logging
from slack_log_handler import SlackLogHandler
slackHandler = SlackLogHandler(Config.SLACK_LOG_URL)
slackHandler.setLevel(logging.WARNING)
app.logger.addHandler(slackHandler)
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or "sqlite://"
class ProductionConfig(Config):
TESTING = False
DEBUG = False
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
| 26.648148
| 93
| 0.677554
|
bc9ac664aab61de645c6938f01d9971fa3f5edef
| 528
|
py
|
Python
|
python/hw-opencv.py
|
OGLinuk/ptp
|
bf498759d9958e4fbf4bda30e2b069efa6b7ff5c
|
[
"Apache-2.0"
] | 1
|
2022-01-31T04:55:59.000Z
|
2022-01-31T04:55:59.000Z
|
python/hw-opencv.py
|
oglinuk/labs
|
619f25238cd1631ee3446cb91e46e2376bea6dba
|
[
"Apache-2.0"
] | null | null | null |
python/hw-opencv.py
|
oglinuk/labs
|
619f25238cd1631ee3446cb91e46e2376bea6dba
|
[
"Apache-2.0"
] | 1
|
2018-10-05T01:54:12.000Z
|
2018-10-05T01:54:12.000Z
|
import cv2
import numpy as np
def main():
cap = cv2.VideoCapture(0)
while (True):
# Capture frames
_, frame = cap.read()
# Convert frame color to grayscale
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Display the video
cv2.imshow('Video', gray)
# Wait for the q key to be pressed
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release the capture
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
| 20.307692
| 54
| 0.573864
|
bd5277d7413276941fe40d49b13a2f3cc32eed54
| 5,271
|
py
|
Python
|
tools/cpu_evaluation_tools.py
|
TaoweiZhang/MegEngine
|
bd3c4a05274f69dacca6097d8cbadbb34c7cc2e4
|
[
"Apache-2.0"
] | 1
|
2022-03-21T03:13:45.000Z
|
2022-03-21T03:13:45.000Z
|
tools/cpu_evaluation_tools.py
|
TaoweiZhang/MegEngine
|
bd3c4a05274f69dacca6097d8cbadbb34c7cc2e4
|
[
"Apache-2.0"
] | null | null | null |
tools/cpu_evaluation_tools.py
|
TaoweiZhang/MegEngine
|
bd3c4a05274f69dacca6097d8cbadbb34c7cc2e4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
"""
purpose: Used to simply measure CPU performance by running several basic models.
how to use: python3 cpu_evaluation_tools.py --help for more details, now need to args:
--load_and_run_file: path of load_and_run binary, please refs to ../scripts/cmake-build/BUILD_README.md to build it.
--models_dir: path of model directory.
how to config test device info: config device[name/login_name/ip/port/thread_number].
"""
import argparse
import logging
import os
import re
import subprocess
# test device
device = {
"name": "hwmt40p",
"login_name": "hwmt40p-K9000-maliG78",
"ip": "box86.br.megvii-inc.com",
"port": 2200,
"thread_number": 3,
}
# test models
test_cpu_models = [
"inceptionv2",
"mobilenetv1",
"mobilenetv2",
"resnet18",
"resnet50",
"shufflenetv2",
"vgg16",
]
class SshConnector:
"""imp ssh control master connector"""
ip = None
port = None
login_name = None
def setup(self, login_name, ip, port):
self.ip = ip
self.login_name = login_name
self.port = port
def copy(self, src_list, dst_dir):
assert isinstance(src_list, list), "code issue happened!!"
assert isinstance(dst_dir, str), "code issue happened!!"
for src in src_list:
cmd = 'rsync --progress -a -e "ssh -p {}" {} {}@{}:{}'.format(
self.port, src, self.login_name, self.ip, dst_dir
)
logging.debug("ssh run cmd: {}".format(cmd))
subprocess.check_call(cmd, shell=True)
def cmd(self, cmd):
output = ""
assert isinstance(cmd, list), "code issue happened!!"
for sub_cmd in cmd:
p_cmd = 'ssh -p {} {}@{} "{}" '.format(
self.port, self.login_name, self.ip, sub_cmd
)
logging.debug("ssh run cmd: {}".format(p_cmd))
output = output + subprocess.check_output(p_cmd, shell=True).decode("utf-8")
return output
def get_finally_bench_resulut_from_log(raw_log) -> float:
# raw_log --> avg_time=23.331ms -->23.331ms
h = re.findall(r"avg_time=.*ms ", raw_log)[-1][9:]
# to 23.331
h = h[: h.find("ms")]
# to float
h = float(h)
return h
def main():
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("--models_dir", help="models dir", required=True)
parser.add_argument(
"--load_and_run_file", help="path for load_and_run", required=True
)
args = parser.parse_args()
assert os.path.isdir(
args.models_dir
), "invalid args for models_dir, need a dir for models"
assert os.path.isfile(args.load_and_run_file), "invalid args for load_and_run_file"
for m in test_cpu_models:
assert os.path.isfile(
os.path.join(args.models_dir, m)
), "invalid args for models_dir, need put model: {} to args.models_dir".format(
test_cpu_models
)
# init device
ssh = SshConnector()
ssh.setup(device["login_name"], device["ip"], device["port"])
# create test dir
workspace = "cpu_evaluation_workspace"
ssh.cmd(["mkdir -p {}".format(workspace)])
# copy load_and_run_file
ssh.copy([args.load_and_run_file], workspace)
# call test
result = []
for m in test_cpu_models:
m_path = os.path.join(args.models_dir, m)
# copy model file
ssh.copy([m_path], workspace)
# run single thread
sub_b = ["-cpu", "-multithread {}".format(device["thread_number"])]
for b in sub_b:
cmd = []
cmd0 = "cd {} && rm -rf fastrun.cache".format(workspace)
cmd1 = "cd {} && ./load_and_run {} --fast-run --fast_run_algo_policy fastrun.cache --iter 1 --warmup-iter 1 --no-sanity-check --weight-preprocess".format(
workspace, m, b
)
cmd2 = "cd {} && ./load_and_run {} {} --fast_run_algo_policy fastrun.cache --iter 20 --warmup-iter 5 --no-sanity-check --weight-preprocess --record-comp-seq".format(
workspace, m, b
)
cmd.append(cmd0)
cmd.append(cmd1)
cmd.append(cmd2)
raw_log = ssh.cmd(cmd)
# logging.debug(raw_log)
ret = get_finally_bench_resulut_from_log(raw_log)
logging.debug("model: {} with backend: {} result is: {}".format(m, b, ret))
result.append(ret)
total_time = 0.0
for r in result:
total_time += r
logging.debug("total time is: {}".format(total_time))
score = 100000.0 / total_time * 1000
logging.debug("device: {} score is: {}".format(device["name"], score))
if __name__ == "__main__":
LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s"
DATE_FORMAT = "%Y/%m/%d %H:%M:%S"
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT, datefmt=DATE_FORMAT)
main()
| 34.45098
| 177
| 0.618668
|
ad628722cf2f47690e270d919c1b95feaab8dea1
| 2,336
|
py
|
Python
|
phantomRun.py
|
benhastings/SeGrid_EC2
|
b5b6368b269bdd12b567f3e3519261930fa11b87
|
[
"BSD-3-Clause"
] | null | null | null |
phantomRun.py
|
benhastings/SeGrid_EC2
|
b5b6368b269bdd12b567f3e3519261930fa11b87
|
[
"BSD-3-Clause"
] | null | null | null |
phantomRun.py
|
benhastings/SeGrid_EC2
|
b5b6368b269bdd12b567f3e3519261930fa11b87
|
[
"BSD-3-Clause"
] | null | null | null |
#import subprocess
from subprocess import Popen, PIPE
import sys
import urllib2
import time
import csv
import random
import socket
env=sys.argv[1]
duration=int(sys.argv[2])
#statsDHost='ec2-54-80-6-76.compute-1.amazonaws.com'
statsDHost='statsd.elsst.com'
"""
Input Data collection/Definition
"""
PII=[]
try:
csvRd = csv.reader(open('/home/ubuntu/piis.csv','rb'))
piiCount = 500000
except:
csvRd = csv.reader(open('C:/Scripts/piis-1m.csv','rb'))
piiCount = 1000000
for j in csvRd:
PII.append(j)
"""
PII=['S0023643896900377','S2095254614000271','S2095254614000337','S0966636213001173','S2095254614000313']
piiCount=5
"""
"""
Define UDP connection to send data to statsD
"""
UDPSock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
## statsd host & port
addr=(statsDHost,8125)
#Define end of test based on input above
endTime = int(time.time()+duration)
#endTime = int(time.time()+30)
if env.find('sdfe') > -1:
envPrint=env[:env.find('sdfe')]
elif env.find('cdc')> -1:
envPrint=env[:env.find('-www')]
else:
envPrint='prod'
#print envPrint
#while loops>0:
while endTime > int(time.time()):
l=[]
loop=5
while loop>0:
idx = int(random.random()*piiCount)
idxPii=idx
#print('articleIDX:'+str(idx))
inputPII0=str(PII[idxPii]).strip('[\']')
inputPII1=str(PII[idxPii+1]).strip('[\']')
inputPII2=str(PII[idxPii+2]).strip('[\']')
inputPII3=str(PII[idxPii+3]).strip('[\']')
inputPII4=str(PII[idxPii+4]).strip('[\']')
#inputPII='S0008874905000535'
#print(inputPII0+' '+inputPII1+' '+inputPII2)
#print 'I am trying the phantomJS request now'
#ex=Popen('phantomjs article.js '+hostNm+' '+inputPII+' '+renderArticles,stdout=PIPE)#,close_fds=True,shell=True)
count='sd.article.phantom.'+envPrint+'.total:5|c'
l.append(count+'\n')
ex=Popen(['phantomjs', 'articleCrawl.js',env,inputPII0,inputPII1,inputPII2,inputPII3,inputPII4],stdout=PIPE)#,close_fds=True,shell=True)
exOut=ex.communicate()
#print('ex.communicate below:')
#print(exOut)
#print(exOut[0])
#print(inputPII)
time.sleep(.25)
loop=loop-1
# statsDdata=''.join(l)
# print(statsDdata)
# UDPSock.sendto(statsDdata,addr)
tmStmp=time.time()
# with open("articleCount.log", "a") as myfile:
# myfile.write(str(tmStmp)+'|'+count+'\n')
#print(count)
UDPSock.sendto(count,addr)
| 24.589474
| 137
| 0.682791
|
e91a8bfda2f1ee2dad36febba3224984bf0a733d
| 442
|
py
|
Python
|
picture.py
|
ysndr/google-hashcode-2019
|
d31a932aa2a47c4bdec92818d6041905db33d730
|
[
"MIT"
] | null | null | null |
picture.py
|
ysndr/google-hashcode-2019
|
d31a932aa2a47c4bdec92818d6041905db33d730
|
[
"MIT"
] | null | null | null |
picture.py
|
ysndr/google-hashcode-2019
|
d31a932aa2a47c4bdec92818d6041905db33d730
|
[
"MIT"
] | null | null | null |
class Picture():
def __init__(self, pid, rotation, tags):
self.id = pid
self.rotation = rotation
self.tags = tags
def score(self, other):
return min(
len(self.tags.intersection(other.tags)),
len(self.tags.difference(other.tags)),
len(other.tags.difference(self.tags))
)
def __repr__(self):
return "(id: '%s', tags: %s)" % (self.id, self.tags)
| 26
| 60
| 0.552036
|
e44947709901faa5fbf9002fc6d36abaecd20026
| 31,146
|
py
|
Python
|
test/IECore/LinkedSceneTest.py
|
danieldresser/cortex
|
33f50c097adbc0c6a87259d0be4d7344c3c7026f
|
[
"BSD-3-Clause"
] | null | null | null |
test/IECore/LinkedSceneTest.py
|
danieldresser/cortex
|
33f50c097adbc0c6a87259d0be4d7344c3c7026f
|
[
"BSD-3-Clause"
] | null | null | null |
test/IECore/LinkedSceneTest.py
|
danieldresser/cortex
|
33f50c097adbc0c6a87259d0be4d7344c3c7026f
|
[
"BSD-3-Clause"
] | 1
|
2019-11-29T12:34:11.000Z
|
2019-11-29T12:34:11.000Z
|
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import gc
import sys
import os
import math
import unittest
import IECore
class LinkedSceneTest( unittest.TestCase ) :
@staticmethod
def compareBBox( box1, box2 ):
errorTolerance = IECore.V3d(1e-5, 1e-5, 1e-5)
boxTmp = IECore.Box3d( box1.min - errorTolerance, box1.max + errorTolerance )
if not boxTmp.contains( box2 ):
return False
boxTmp = IECore.Box3d( box2.min - errorTolerance, box2.max + errorTolerance )
if not boxTmp.contains( box1 ):
return False
return True
def testSupportedExtension( self ) :
self.assertTrue( "lscc" in IECore.SceneInterface.supportedExtensions() )
self.assertTrue( "lscc" in IECore.SceneInterface.supportedExtensions( IECore.IndexedIO.OpenMode.Read ) )
self.assertTrue( "lscc" in IECore.SceneInterface.supportedExtensions( IECore.IndexedIO.OpenMode.Write ) )
self.assertTrue( "lscc" in IECore.SceneInterface.supportedExtensions( IECore.IndexedIO.OpenMode.Write + IECore.IndexedIO.OpenMode.Read ) )
self.assertFalse( "lscc" in IECore.SceneInterface.supportedExtensions( IECore.IndexedIO.OpenMode.Append ) )
def testFactoryFunction( self ):
# test Write factory function
m = IECore.SceneInterface.create( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Write )
self.assertTrue( isinstance( m, IECore.LinkedScene ) )
self.assertEqual( m.fileName(), "/tmp/test.lscc" )
self.assertRaises( RuntimeError, m.readBound, 0.0 )
del m
# test Read factory function
m = IECore.SceneInterface.create( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Read )
self.assertTrue( isinstance( m, IECore.LinkedScene ) )
self.assertEqual( m.fileName(), "/tmp/test.lscc" )
m.readBound( 0.0 )
def testConstructors( self ):
# test Read from a previously opened scene.
m = IECore.SceneCache( "test/IECore/data/sccFiles/animatedSpheres.scc", IECore.IndexedIO.OpenMode.Read )
l = IECore.LinkedScene( m )
# test Write mode
m = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Write )
self.assertTrue( isinstance( m, IECore.LinkedScene ) )
self.assertEqual( m.fileName(), "/tmp/test.lscc" )
self.assertRaises( RuntimeError, m.readBound, 0.0 )
del m
# test Read mode
m = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Read )
self.assertTrue( isinstance( m, IECore.LinkedScene ) )
self.assertEqual( m.fileName(), "/tmp/test.lscc" )
m.readBound( 0.0 )
def testAppendRaises( self ) :
self.assertRaises( RuntimeError, IECore.SceneInterface.create, "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Append )
self.assertRaises( RuntimeError, IECore.LinkedScene, "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Append )
def testReadNonExistentRaises( self ) :
self.assertRaises( RuntimeError, IECore.LinkedScene, "iDontExist.lscc", IECore.IndexedIO.OpenMode.Read )
def testLinkAttribute( self ):
self.assertEqual( IECore.LinkedScene.linkAttribute, "sceneInterface:link" )
m = IECore.SceneCache( "test/IECore/data/sccFiles/animatedSpheres.scc", IECore.IndexedIO.OpenMode.Read )
attr = IECore.LinkedScene.linkAttributeData( m )
expectedAttr = IECore.CompoundData(
{
"fileName": IECore.StringData("test/IECore/data/sccFiles/animatedSpheres.scc"),
"root": IECore.InternedStringVectorData( [] )
}
)
self.assertEqual( attr, expectedAttr )
A = m.child("A")
attr = IECore.LinkedScene.linkAttributeData( A )
expectedAttr = IECore.CompoundData(
{
"fileName": IECore.StringData("test/IECore/data/sccFiles/animatedSpheres.scc"),
"root": IECore.InternedStringVectorData( [ 'A' ] )
}
)
self.assertEqual( attr, expectedAttr )
A = m.child("A")
attr = IECore.LinkedScene.linkAttributeData( A, 10.0 )
expectedAttr['time'] = IECore.DoubleData(10.0)
self.assertEqual( attr, expectedAttr )
def testWriting( self ):
generateTestFiles = False # change this to True to recreate the LinkedScene files for other tests.
if generateTestFiles :
outputPath = "test/IECore/data/sccFiles"
else :
outputPath = "/tmp"
m = IECore.SceneCache( "test/IECore/data/sccFiles/animatedSpheres.scc", IECore.IndexedIO.OpenMode.Read )
A = m.child("A")
l = IECore.LinkedScene( os.path.join(outputPath,"instancedSpheres.lscc"), IECore.IndexedIO.OpenMode.Write )
i0 = l.createChild("instance0")
i0.writeLink( m )
i1 = l.createChild("instance1")
i1.writeLink( m )
i1.writeAttribute( "testAttr", IECore.StringData("test"), 0 )
i1.writeTransform( IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 1, 0, 0 ) ) ), 0.0 )
i2 = l.createChild("instance2")
i2.writeLink( A )
i2.writeTransform( IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 2, 0, 0 ) ) ), 0.0 )
self.assertRaises( RuntimeError, i2.createChild, "cannotHaveChildrenAtLinks" )
i2.writeTags( ["canHaveTagsAtLinks"] )
self.assertRaises( RuntimeError, i2.writeObject, IECore.SpherePrimitive( 1 ), 0.0 ) # cannot save objects at link locations.
b1 = l.createChild("branch1")
b1.writeObject( IECore.SpherePrimitive( 1 ), 0.0 )
self.assertRaises( RuntimeError, b1.writeLink, A )
b2 = l.createChild("branch2")
c2 = b2.createChild("child2")
self.assertRaises( RuntimeError, b2.writeLink, A )
del i0, i1, i2, l, b1, b2, c2
l = IECore.LinkedScene( os.path.join(outputPath,"instancedSpheres.lscc"), IECore.IndexedIO.OpenMode.Read )
self.assertEqual( l.numBoundSamples(), 4 )
self.assertEqual( set(l.childNames()), set(['instance0','instance1','instance2','branch1','branch2']) )
i0 = l.child("instance0")
self.assertEqual( i0.numBoundSamples(), 4 )
self.failUnless( LinkedSceneTest.compareBBox( i0.readBoundAtSample(0), IECore.Box3d( IECore.V3d( -1,-1,-1 ), IECore.V3d( 2,2,1 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( i0.readBoundAtSample(1), IECore.Box3d( IECore.V3d( -1,-1,-1 ), IECore.V3d( 3,3,1 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( i0.readBoundAtSample(2), IECore.Box3d( IECore.V3d( -2,-1,-2 ), IECore.V3d( 4,5,2 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( i0.readBoundAtSample(3), IECore.Box3d( IECore.V3d( -3,-1,-3 ), IECore.V3d( 4,6,3 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( i0.readBound(0), IECore.Box3d( IECore.V3d( -1,-1,-1 ), IECore.V3d( 2,2,1 ) ) ) )
A = i0.child("A")
self.failUnless( LinkedSceneTest.compareBBox( A.readBoundAtSample(0), IECore.Box3d(IECore.V3d( -1,-1,-1 ), IECore.V3d( 1,1,1 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( A.readBoundAtSample(1), IECore.Box3d(IECore.V3d( -1,-1,-1 ), IECore.V3d( 1,1,1 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( A.readBoundAtSample(2), IECore.Box3d(IECore.V3d( 0,-1,-1 ), IECore.V3d( 2,1,1 ) ) ) )
self.assertEqual( i0.readTransform( 0 ), IECore.M44dData( IECore.M44d() ) )
i1 = l.child("instance1")
self.assertEqual( i1.numBoundSamples(), 4 )
self.failUnless( LinkedSceneTest.compareBBox( i1.readBoundAtSample(0), IECore.Box3d( IECore.V3d( -1,-1,-1 ), IECore.V3d( 2,2,1 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( i1.readBoundAtSample(2), IECore.Box3d( IECore.V3d( -2,-1,-2 ), IECore.V3d( 4,5,2 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( i1.readBoundAtSample(3), IECore.Box3d( IECore.V3d( -3,-1,-3 ), IECore.V3d( 4,6,3 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( i1.readBound(0), IECore.Box3d( IECore.V3d( -1,-1,-1 ), IECore.V3d( 2,2,1 ) ) ) )
self.assertEqual( i1.readTransform( 0 ), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 1, 0, 0 ) ) ) )
self.assertEqual( i1.readAttribute( "testAttr", 0 ), IECore.StringData("test") )
i2 = l.child("instance2")
self.assertEqual( i2.numBoundSamples(), 3 )
self.failUnless( LinkedSceneTest.compareBBox( i2.readBoundAtSample(0), IECore.Box3d(IECore.V3d( -1,-1,-1 ), IECore.V3d( 1,1,1 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( i2.readBoundAtSample(1), IECore.Box3d(IECore.V3d( -1,-1,-1 ), IECore.V3d( 1,1,1 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( i2.readBoundAtSample(2), IECore.Box3d(IECore.V3d( 0,-1,-1 ), IECore.V3d( 2,1,1 ) ) ) )
self.assertEqual( i2.readTransform( 0 ), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 2, 0, 0 ) ) ) )
self.assertTrue( i2.hasTag( "canHaveTagsAtLinks" ) )
self.assertTrue( l.hasTag( "canHaveTagsAtLinks" ) ) # tags propagate up
self.assertTrue( i2.child("a").hasTag( "canHaveTagsAtLinks" ) ) # tags at link locations propagate down as well
self.assertEqual( l.scene( [ 'instance0' ] ).path(), [ 'instance0' ] )
self.assertEqual( l.scene( [ 'instance0', 'A' ] ).path(), [ 'instance0', 'A' ] )
self.assertEqual( i0.path(), [ 'instance0' ] )
# test saving a two level LinkedScene
l2 = IECore.LinkedScene( os.path.join(outputPath,"environment.lscc"), IECore.IndexedIO.OpenMode.Write )
base = l2.createChild("base")
t1 = base.createChild("test1")
t1.writeLink( l )
t2 = base.createChild("test2")
t2.writeLink( i0 )
t3 = base.createChild("test3")
t3.writeLink( i1 )
t4 = base.createChild("test4")
t4.writeLink( i2 )
t5 = base.createChild("test5")
t5.writeLink( A )
del l2, t1, t2, t3, t4, t5
def testWriteLinkAnimatedTransform( self ):
messageHandler = IECore.CapturingMessageHandler()
with messageHandler :
m = IECore.SceneCache( "test/IECore/data/sccFiles/animatedSpheres.scc", IECore.IndexedIO.OpenMode.Read )
l = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Write )
i0 = l.createChild("instance0")
i0.writeLink( m )
# this was causing a problem upon deleting l, as the first transform sample doesn't coincide with the
# first bound sample in the link
i0.writeTransform( IECore.M44dData( IECore.M44d() ), 5.0 )
i0.writeTransform( IECore.M44dData( IECore.M44d() ), 6.0 )
del i0, l, m
if len( messageHandler.messages ):
self.fail( messageHandler.messages[0].message )
def testTimeRemapping( self ):
m = IECore.SceneCache( "test/IECore/data/sccFiles/animatedSpheres.scc", IECore.IndexedIO.OpenMode.Read )
l = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Write )
# save animated spheres with double the speed and with offset, using less samples (time remapping)
i0 = l.createChild("instance0")
i0.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 0.0 ), 1.0 )
i0.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 3.0 ), 2.0 )
# save animated spheres with same speed and with offset, same samples (time remapping is identity)
i1 = l.createChild("instance1")
i1.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 0.0 ), 1.0 )
i1.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 1.0 ), 2.0 )
i1.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 2.0 ), 3.0 )
i1.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 3.0 ), 4.0 )
# save animated spheres with half the speed, adding more samples to a range of the original (time remapping)
i2 = l.createChild("instance2")
i2.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 0.0 ), 0.0 )
i2.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 0.5 ), 1.0 )
i2.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 1.0 ), 2.0 )
del i0, i1, i2, l
l = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Read )
self.assertEqual( l.numBoundSamples(), 5 )
self.assertEqual( l.hasAttribute( "sceneInterface:link.time" ), False )
i0 = l.child("instance0")
self.assertEqual( i0.hasAttribute( "sceneInterface:link.time" ), True )
self.assertEqual( i0.readAttribute( "sceneInterface:link.time", 1 ).value, 0 )
self.assertEqual( i0.readAttribute( "sceneInterface:link.time", 2 ).value, 3 )
self.assertEqual( i0.numBoundSamples(), 2 )
self.assertEqual( i0.numTransformSamples(), 1 )
self.assertEqual( i0.readTransformAtSample(0), IECore.M44dData() )
A0 = i0.child("A")
self.assertEqual( A0.hasAttribute( "sceneInterface:link.time" ), False )
self.assertEqual( A0.numBoundSamples(), 2 )
self.assertEqual( A0.numTransformSamples(), 2 )
self.failUnless( LinkedSceneTest.compareBBox( A0.readBoundAtSample(0), IECore.Box3d(IECore.V3d( -1,-1,-1 ), IECore.V3d( 1,1,1 ) ) ) )
self.failUnless( LinkedSceneTest.compareBBox( A0.readBoundAtSample(1), IECore.Box3d(IECore.V3d( 0,-1,-1 ), IECore.V3d( 2,1,1 ) ) ) )
self.assertEqual( A0.readTransformAtSample(0), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 1, 0, 0 ) ) ) )
self.assertEqual( A0.readTransformAtSample(1), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 2, 0, 0 ) ) ) )
i1 = l.child("instance1")
self.assertEqual( i1.hasAttribute( "sceneInterface:link.time" ), True )
self.assertEqual( i1.readAttribute( "sceneInterface:link.time", 1 ).value, 0 )
self.assertEqual( i1.readAttribute( "sceneInterface:link.time", 2 ).value, 1 )
self.assertEqual( i1.readAttribute( "sceneInterface:link.time", 3 ).value, 2 )
self.assertEqual( i1.readAttribute( "sceneInterface:link.time", 4 ).value, 3 )
self.assertEqual( i1.numBoundSamples(), 4 )
self.assertEqual( i1.numTransformSamples(), 1 )
A1 = i1.child("A")
self.assertEqual( A1.numTransformSamples(), 4 )
self.assertEqual( A1.readTransformAtSample(0), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 1, 0, 0 ) ) ) )
self.assertEqual( A1.readTransformAtSample(1), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 2, 0, 0 ) ) ) )
self.assertEqual( A1.readTransformAtSample(2), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 2, 0, 0 ) ) ) )
self.assertEqual( A1.readTransformAtSample(3), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 2, 0, 0 ) ) ) )
self.assertEqual( A1.hasAttribute( "sceneInterface:link.time" ), False )
i2 = l.child("instance2")
self.assertEqual( i2.hasAttribute( "sceneInterface:link.time" ), True )
self.assertEqual( i2.readAttribute( "sceneInterface:link.time", 0 ).value, 0 )
self.assertEqual( i2.readAttribute( "sceneInterface:link.time", 1 ).value, 0.5 )
self.assertEqual( i2.readAttribute( "sceneInterface:link.time", 2 ).value, 1 )
self.assertEqual( i2.numBoundSamples(), 3 )
self.assertEqual( i2.numTransformSamples(), 1 )
A2 = i2.child("A")
self.assertEqual( A2.numBoundSamples(), 3 )
self.assertEqual( A2.numTransformSamples(), 3 )
self.assertEqual( A2.readTransform(1.0), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 1.5, 0, 0 ) ) ) )
self.assertEqual( A2.readTransformAtSample(0), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 1, 0, 0 ) ) ) )
self.assertEqual( A2.readTransformAtSample(1), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 1.5, 0, 0 ) ) ) )
self.assertEqual( A2.readTransformAtSample(2), IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 2, 0, 0 ) ) ) )
self.assertEqual( A2.hasAttribute( "sceneInterface:link.time" ), False )
def testNestedTimeRemapping( self ):
m = IECore.SceneCache( "test/IECore/data/sccFiles/animatedSpheres.scc", IECore.IndexedIO.OpenMode.Read )
A = m.child("A")
l2 = IECore.LinkedScene( "/tmp/test3.lscc", IECore.IndexedIO.OpenMode.Write )
t2 = l2.createChild("transform2")
i2 = t2.createChild("instance2")
i2.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 0.0 ), 0.0 )
i2.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( m, 2.0 ), 1.0 )
del l2, i2, t2
l2 = IECore.LinkedScene( "/tmp/test3.lscc", IECore.IndexedIO.OpenMode.Read )
l1 = IECore.LinkedScene( "/tmp/test2.lscc", IECore.IndexedIO.OpenMode.Write )
t1 = l1.createChild("transform1")
i1 = t1.createChild("instance1")
i1.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( l2, 0.0 ), 0.0 )
i1.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( l2, 2.0 ), 1.0 )
del l1, i1, t1
l1 = IECore.LinkedScene( "/tmp/test2.lscc", IECore.IndexedIO.OpenMode.Read )
l0 = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Write )
t0 = l0.createChild("transform0")
i0 = t0.createChild("instance0")
i0.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( l1, 0.0 ), 0.0 )
i0.writeAttribute( IECore.LinkedScene.linkAttribute, IECore.LinkedScene.linkAttributeData( l1, 2.0 ), 1.0 )
del l0, i0, t0
l0 = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Read )
l = IECore.LinkedScene( "/tmp/testTop.lscc", IECore.IndexedIO.OpenMode.Write )
t = l.createChild("transform")
i = t.createChild("instance")
i.writeLink( l0 )
del l, i, t
del m, l0, l1, l2
l = IECore.LinkedScene( "/tmp/testTop.lscc", IECore.IndexedIO.OpenMode.Read )
t = l.child("transform")
i = t.child("instance")
t0 = i.child("transform0")
i0 = t0.child("instance0")
t1 = i0.child("transform1")
i1 = t1.child("instance1")
t2 = i1.child("transform2")
i2 = t2.child("instance2")
A = i2.child("A")
# this location shouldn't be retimed:
self.assertEqual( i.hasAttribute( "sceneInterface:link.time" ), True )
self.assertEqual( i.readAttribute( "sceneInterface:link.time", 0.25 ).value, 0.25 )
# this location should be sped up by a factor of 2:
self.assertEqual( i0.hasAttribute( "sceneInterface:link.time" ), True )
self.assertEqual( i0.readAttribute( "sceneInterface:link.time", 0.25 ).value, 0.5 )
# this one is remapped twice, so it's sped up by a factor of 4:
self.assertEqual( i1.hasAttribute( "sceneInterface:link.time" ), True )
self.assertEqual( i1.readAttribute( "sceneInterface:link.time", 0.25 ).value, 1 )
# and this one is remapped three times, so it's sped up by a factor of 8:
self.assertEqual( i2.hasAttribute( "sceneInterface:link.time" ), True )
self.assertEqual( i2.readAttribute( "sceneInterface:link.time", 0.25 ).value, 2 )
# sanity check:
self.assertEqual( i.readAttribute( "sceneInterface:link.time", 0 ).value, 0 )
self.assertEqual( i0.readAttribute( "sceneInterface:link.time", 0 ).value, 0 )
self.assertEqual( i1.readAttribute( "sceneInterface:link.time", 0 ).value, 0 )
self.assertEqual( i2.readAttribute( "sceneInterface:link.time", 0 ).value, 0 )
# test multiple retiming of the transform:
m = IECore.SceneCache( "test/IECore/data/sccFiles/animatedSpheres.scc", IECore.IndexedIO.OpenMode.Read )
Aa = m.child("A")
self.assertEqual( Aa.readTransformAsMatrix( 0.1 ), A.readTransformAsMatrix( 0.1 / 8 ) )
self.assertEqual( Aa.readTransformAsMatrix( 0.2 ), A.readTransformAsMatrix( 0.2 / 8 ) )
self.assertEqual( Aa.readTransformAsMatrix( 0.3 ), A.readTransformAsMatrix( 0.3 / 8 ) )
self.assertEqual( Aa.readTransformAsMatrix( 0.4 ), A.readTransformAsMatrix( 0.4 / 8 ) )
self.assertEqual( Aa.readTransformAsMatrix( 0.5 ), A.readTransformAsMatrix( 0.5 / 8 ) )
self.assertEqual( Aa.readTransformAsMatrix( 0.6 ), A.readTransformAsMatrix( 0.6 / 8 ) )
self.assertEqual( Aa.readTransformAsMatrix( 0.7 ), A.readTransformAsMatrix( 0.7 / 8 ) )
self.assertEqual( Aa.readTransformAsMatrix( 0.8 ), A.readTransformAsMatrix( 0.8 / 8 ) )
self.assertEqual( Aa.readTransformAsMatrix( 0.9 ), A.readTransformAsMatrix( 0.9 / 8 ) )
def testReading( self ):
def recurseCompare( basePath, virtualScene, realScene, atLink = True ) :
self.assertEqual( basePath, virtualScene.path() )
if not atLink : # attributes and tranforms at link location are not loaded.
self.assertEqual( set(virtualScene.attributeNames()), set(realScene.attributeNames()) )
for attr in realScene.attributeNames() :
self.assertTrue( virtualScene.hasAttribute( attr ) )
self.assertEqual( virtualScene.numAttributeSamples(attr), realScene.numAttributeSamples(attr) )
for s in xrange(0,virtualScene.numAttributeSamples(attr)) :
self.assertEqual( virtualScene.readAttributeAtSample(attr, s), realScene.readAttributeAtSample(attr, s) )
self.assertEqual( virtualScene.numTransformSamples(), realScene.numTransformSamples() )
for s in xrange(0,virtualScene.numTransformSamples()) :
self.assertEqual( virtualScene.readTransformAtSample(s), realScene.readTransformAtSample(s) )
self.assertEqual( virtualScene.numBoundSamples(), realScene.numBoundSamples() )
for s in xrange(0,virtualScene.numBoundSamples()) :
self.assertEqual( virtualScene.readBoundAtSample(s), realScene.readBoundAtSample(s) )
self.assertEqual( virtualScene.hasObject(), realScene.hasObject() )
if virtualScene.hasObject() :
self.assertEqual( virtualScene.numObjectSamples(), realScene.numObjectSamples() )
for s in xrange(0,virtualScene.numObjectSamples()) :
self.assertEqual( virtualScene.readObjectAtSample(s), realScene.readObjectAtSample(s) )
self.assertEqual( set(virtualScene.childNames()), set(realScene.childNames()) )
for c in virtualScene.childNames() :
self.assertTrue( virtualScene.hasChild(c) )
recurseCompare( basePath + [ str(c) ], virtualScene.child(c), realScene.child(c), False )
env = IECore.LinkedScene( "test/IECore/data/sccFiles/environment.lscc", IECore.IndexedIO.OpenMode.Read ) # created by testWriting() when generateTestFiles=True
l = IECore.LinkedScene( "test/IECore/data/sccFiles/instancedSpheres.lscc", IECore.IndexedIO.OpenMode.Read ) # created by testWriting() when generateTestFiles=True
m = IECore.SceneCache( "test/IECore/data/sccFiles/animatedSpheres.scc", IECore.IndexedIO.OpenMode.Read )
base = env.child('base')
self.assertEqual( set(base.childNames()), set(['test1','test2','test3','test4','test5']) )
test1 = base.child('test1')
self.assertEqual( test1.path(), [ "base", "test1" ] )
recurseCompare( test1.path(), test1, l )
test2 = base.child('test2')
self.assertEqual( test2.path(), [ "base", "test2" ] )
recurseCompare( test2.path(), test2, l.child('instance0') )
test3 = base.child('test3')
self.assertEqual( test3.path(), [ "base", "test3" ] )
recurseCompare( test3.path(), test3, l.child('instance1') )
test4 = base.child('test4')
self.assertEqual( test4.path(), [ "base", "test4" ] )
recurseCompare( test4.path(), test4, l.child('instance2') )
test5 = base.child('test5')
self.assertEqual( test5.path(), [ "base", "test5" ] )
recurseCompare( test5.path(), test5, l.child('instance1').child('A') )
# attributes like sceneInterface:link.root, sceneInterface:link.fileName, and sceneInterface:link.time shouldn't show up at links, although they might be there...
self.assertEqual( test1.child('instance0').attributeNames(), [] )
self.assertEqual( test1.child('instance1').attributeNames(), [ 'testAttr' ] )
self.assertEqual( test1.child('instance2').attributeNames(), [] )
# hasAttribute should tell the truth though...
self.assertEqual( test1.child('instance0').hasAttribute( "sceneInterface:link.fileName" ), True )
self.assertEqual( test1.child('instance0').hasAttribute( "sceneInterface:link.root" ), True )
self.assertEqual( test1.child('instance1').hasAttribute( "sceneInterface:link.fileName" ), True )
self.assertEqual( test1.child('instance1').hasAttribute( "sceneInterface:link.root" ), True )
self.assertEqual( test1.child('instance2').hasAttribute( "sceneInterface:link.fileName" ), True )
self.assertEqual( test1.child('instance2').hasAttribute( "sceneInterface:link.root" ), True )
self.assertEqual( test1.child('instance0').path(), [ "base", "test1", "instance0" ] )
recurseCompare( test1.child('instance0').path(), test1.child('instance0'), m )
recurseCompare( test2.path(), test2, m )
recurseCompare( test3.path(), test3, m )
recurseCompare( test4.path(), test4, m.child('A') )
recurseCompare( test5.path(), test5, m.child('A') )
recurseCompare( test1.path(), env.scene( [ 'base', 'test1' ] ), l )
recurseCompare( test1.path(), env.scene( [ 'base' ] ).child( 'test1' ), l )
def testTags( self ) :
def testSet( values ):
return set( map( lambda s: IECore.InternedString(s), values ) )
# create a base scene
l = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Write )
a = l.createChild('a')
a.writeTags( [ "testA" ] )
b = l.createChild('b')
b.writeTags( [ "testB" ] )
l.writeTags( [ "tags" ] )
del a, b, l
# now create a linked scene that should inherit the tags from the base one, plus add other ones
l = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Read )
a = l.child('a')
b = l.child('b')
self.assertEqual( set(l.readTags()), testSet(["testA", "testB", "tags"]) )
self.assertEqual( set(l.readTags(includeChildren=False)), testSet(["tags"]) )
self.assertEqual( set(a.readTags()), testSet(["testA"]) )
self.assertEqual( set(a.readTags(includeChildren=False)), testSet(["testA"]) )
self.assertEqual( set(b.readTags()), testSet(["testB"]) )
self.assertEqual( set(b.readTags(includeChildren=False)), testSet(["testB"]) )
self.assertTrue( l.hasTag("testA") )
self.assertTrue( l.hasTag("testB") )
self.assertFalse( l.hasTag("testA", includeChildren=False) )
self.assertFalse( l.hasTag("testB", includeChildren=False) )
self.assertTrue( a.hasTag("testA") )
self.assertFalse( a.hasTag("testB") )
self.assertTrue( b.hasTag("testB") )
self.assertFalse( b.hasTag("testA") )
l2 = IECore.LinkedScene( "/tmp/test2.lscc", IECore.IndexedIO.OpenMode.Write )
A = l2.createChild('A')
A.writeLink( l )
A.writeTags( ['linkedA'] ) # creating tag after link
B = l2.createChild('B')
B.writeLink( a )
C = l2.createChild('C')
c = C.createChild('c')
c.writeLink( l )
C.writeTags( [ 'C' ] )
D = l2.createChild('D')
D.writeTags( [ 'D' ] )
D.writeLink( a ) # creating link after tag
del l, a, b, l2, A, B, C, c, D
l2 = IECore.LinkedScene( "/tmp/test2.lscc", IECore.IndexedIO.OpenMode.Read )
A = l2.child("A")
Aa = A.child("a")
B = l2.child("B")
C = l2.child("C")
c = C.child("c")
ca = c.child("a")
D = l2.child("D")
self.assertTrue( l2.hasTag("testA") )
self.assertTrue( l2.hasTag("testB") )
self.assertFalse( l2.hasTag("t") )
self.assertEqual( set(l2.readTags()), testSet(["testA", "testB","tags", "C", "D","linkedA"]) )
self.assertEqual( set(l2.readTags(includeChildren=False)), testSet([]) )
self.assertEqual( set(A.readTags()), testSet(["testA","testB", "tags","linkedA"]) )
self.assertTrue( A.hasTag( "linkedA" ) )
self.assertTrue( A.hasTag( "tags" ) )
self.assertTrue( A.hasTag( "testA" ) )
self.assertTrue( A.hasTag( "testB" ) )
self.assertFalse( A.hasTag("C") )
self.assertEqual( set(A.readTags(includeChildren=False)), testSet(["tags","linkedA"]) )
self.assertEqual( set(Aa.readTags()), testSet(["testA", "linkedA"]) )
self.assertEqual( set(Aa.readTags(includeChildren=False)), testSet(["testA"]) )
self.assertTrue( Aa.hasTag("testA") )
self.assertFalse( Aa.hasTag("testB") )
self.assertEqual( set(B.readTags()), testSet(["testA"]) )
self.assertEqual( set(C.readTags()), testSet(["testA","testB","tags","C"]) )
self.assertEqual( set(C.readTags(includeChildren=False)), testSet(["C"]) )
self.assertEqual( set(c.readTags()), testSet(["testA", "testB","tags"]) )
self.assertEqual( set(c.readTags(includeChildren=False)), testSet(["tags"]) )
self.assertEqual( set(ca.readTags()), testSet(["testA"]) )
self.assertTrue( ca.hasTag("testA") )
self.assertFalse( ca.hasTag("testB") )
self.assertEqual( set(C.readTags(includeChildren=False)), testSet(["C"]) )
self.assertEqual( set(D.readTags()), testSet(["D", "testA"]) )
def testMissingLinkedScene( self ) :
import shutil
shutil.copyfile( "test/IECore/data/sccFiles/animatedSpheres.scc", "/tmp/toBeRemoved.scc" )
m = IECore.SceneCache( "/tmp/toBeRemoved.scc", IECore.IndexedIO.OpenMode.Read )
A = m.child("A")
l = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Write )
i0 = l.createChild("instance0")
i0.writeLink( m )
i1 = l.createChild("instance1")
i1.writeLink( m )
i1.writeTransform( IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 1, 0, 0 ) ) ), 0.0 )
i2 = l.createChild("instance2")
i2.writeLink( A )
i2.writeTransform( IECore.M44dData( IECore.M44d.createTranslated( IECore.V3d( 2, 0, 0 ) ) ), 0.0 )
del i0, i1, i2, l, m, A
l = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Read )
self.assertEqual( sorted(l.childNames()), [ "instance0", "instance1", "instance2" ] )
i0 = l.child( "instance0" )
self.assertEqual( sorted(i0.childNames()), [ "A", "B" ] )
i1 = l.child( "instance1" )
self.assertEqual( sorted(i1.childNames()), [ "A", "B" ] )
i2 = l.child( "instance2" )
self.assertEqual( i2.childNames(), [ "a" ] )
del l, i0, i1, i2
os.remove( "/tmp/toBeRemoved.scc" )
IECore.SharedSceneInterfaces.clear()
l = IECore.LinkedScene( "/tmp/test.lscc", IECore.IndexedIO.OpenMode.Read )
self.assertEqual( sorted(l.childNames()), [ "instance0", "instance1", "instance2" ] )
i0 = l.child( "instance0" )
self.assertEqual( i0.childNames(), [] )
i1 = l.child( "instance1" )
self.assertEqual( i1.childNames(), [] )
i2 = l.child( "instance2" )
self.assertEqual( i2.childNames(), [] )
if __name__ == "__main__":
unittest.main()
| 49.674641
| 164
| 0.705259
|
11b72c10f7d0184752cfe02070f68f143cda3cdf
| 19,989
|
py
|
Python
|
tests/rest/admin/test_event_reports.py
|
cHAuHaNz/synapse
|
79bfe966e08a2212cc2fae2b00f5efb2c2185543
|
[
"Apache-2.0"
] | 1
|
2020-11-04T14:12:27.000Z
|
2020-11-04T14:12:27.000Z
|
tests/rest/admin/test_event_reports.py
|
cHAuHaNz/synapse
|
79bfe966e08a2212cc2fae2b00f5efb2c2185543
|
[
"Apache-2.0"
] | null | null | null |
tests/rest/admin/test_event_reports.py
|
cHAuHaNz/synapse
|
79bfe966e08a2212cc2fae2b00f5efb2c2185543
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import synapse.rest.admin
from synapse.api.errors import Codes
from synapse.rest.client.v1 import login, room
from synapse.rest.client.v2_alpha import report_event
from tests import unittest
class EventReportsTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
report_event.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.other_user = self.register_user("user", "pass")
self.other_user_tok = self.login("user", "pass")
self.room_id1 = self.helper.create_room_as(
self.other_user, tok=self.other_user_tok, is_public=True
)
self.helper.join(self.room_id1, user=self.admin_user, tok=self.admin_user_tok)
self.room_id2 = self.helper.create_room_as(
self.other_user, tok=self.other_user_tok, is_public=True
)
self.helper.join(self.room_id2, user=self.admin_user, tok=self.admin_user_tok)
# Two rooms and two users. Every user sends and reports every room event
for i in range(5):
self._create_event_and_report(
room_id=self.room_id1, user_tok=self.other_user_tok,
)
for i in range(5):
self._create_event_and_report(
room_id=self.room_id2, user_tok=self.other_user_tok,
)
for i in range(5):
self._create_event_and_report(
room_id=self.room_id1, user_tok=self.admin_user_tok,
)
for i in range(5):
self._create_event_and_report(
room_id=self.room_id2, user_tok=self.admin_user_tok,
)
self.url = "/_synapse/admin/v1/event_reports"
def test_no_auth(self):
"""
Try to get an event report without authentication.
"""
request, channel = self.make_request("GET", self.url, b"{}")
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_requester_is_no_admin(self):
"""
If the user is not a server admin, an error 403 is returned.
"""
request, channel = self.make_request(
"GET", self.url, access_token=self.other_user_tok,
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_default_success(self):
"""
Testing list of reported events
"""
request, channel = self.make_request(
"GET", self.url, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(len(channel.json_body["event_reports"]), 20)
self.assertNotIn("next_token", channel.json_body)
self._check_fields(channel.json_body["event_reports"])
def test_limit(self):
"""
Testing list of reported events with limit
"""
request, channel = self.make_request(
"GET", self.url + "?limit=5", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(len(channel.json_body["event_reports"]), 5)
self.assertEqual(channel.json_body["next_token"], 5)
self._check_fields(channel.json_body["event_reports"])
def test_from(self):
"""
Testing list of reported events with a defined starting point (from)
"""
request, channel = self.make_request(
"GET", self.url + "?from=5", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(len(channel.json_body["event_reports"]), 15)
self.assertNotIn("next_token", channel.json_body)
self._check_fields(channel.json_body["event_reports"])
def test_limit_and_from(self):
"""
Testing list of reported events with a defined starting point and limit
"""
request, channel = self.make_request(
"GET", self.url + "?from=5&limit=10", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(channel.json_body["next_token"], 15)
self.assertEqual(len(channel.json_body["event_reports"]), 10)
self._check_fields(channel.json_body["event_reports"])
def test_filter_room(self):
"""
Testing list of reported events with a filter of room
"""
request, channel = self.make_request(
"GET",
self.url + "?room_id=%s" % self.room_id1,
access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 10)
self.assertEqual(len(channel.json_body["event_reports"]), 10)
self.assertNotIn("next_token", channel.json_body)
self._check_fields(channel.json_body["event_reports"])
for report in channel.json_body["event_reports"]:
self.assertEqual(report["room_id"], self.room_id1)
def test_filter_user(self):
"""
Testing list of reported events with a filter of user
"""
request, channel = self.make_request(
"GET",
self.url + "?user_id=%s" % self.other_user,
access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 10)
self.assertEqual(len(channel.json_body["event_reports"]), 10)
self.assertNotIn("next_token", channel.json_body)
self._check_fields(channel.json_body["event_reports"])
for report in channel.json_body["event_reports"]:
self.assertEqual(report["user_id"], self.other_user)
def test_filter_user_and_room(self):
"""
Testing list of reported events with a filter of user and room
"""
request, channel = self.make_request(
"GET",
self.url + "?user_id=%s&room_id=%s" % (self.other_user, self.room_id1),
access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 5)
self.assertEqual(len(channel.json_body["event_reports"]), 5)
self.assertNotIn("next_token", channel.json_body)
self._check_fields(channel.json_body["event_reports"])
for report in channel.json_body["event_reports"]:
self.assertEqual(report["user_id"], self.other_user)
self.assertEqual(report["room_id"], self.room_id1)
def test_valid_search_order(self):
"""
Testing search order. Order by timestamps.
"""
# fetch the most recent first, largest timestamp
request, channel = self.make_request(
"GET", self.url + "?dir=b", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(len(channel.json_body["event_reports"]), 20)
report = 1
while report < len(channel.json_body["event_reports"]):
self.assertGreaterEqual(
channel.json_body["event_reports"][report - 1]["received_ts"],
channel.json_body["event_reports"][report]["received_ts"],
)
report += 1
# fetch the oldest first, smallest timestamp
request, channel = self.make_request(
"GET", self.url + "?dir=f", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(len(channel.json_body["event_reports"]), 20)
report = 1
while report < len(channel.json_body["event_reports"]):
self.assertLessEqual(
channel.json_body["event_reports"][report - 1]["received_ts"],
channel.json_body["event_reports"][report]["received_ts"],
)
report += 1
def test_invalid_search_order(self):
"""
Testing that a invalid search order returns a 400
"""
request, channel = self.make_request(
"GET", self.url + "?dir=bar", access_token=self.admin_user_tok,
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual("Unknown direction: bar", channel.json_body["error"])
def test_limit_is_negative(self):
"""
Testing that a negative limit parameter returns a 400
"""
request, channel = self.make_request(
"GET", self.url + "?limit=-5", access_token=self.admin_user_tok,
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
def test_from_is_negative(self):
"""
Testing that a negative from parameter returns a 400
"""
request, channel = self.make_request(
"GET", self.url + "?from=-5", access_token=self.admin_user_tok,
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
def test_next_token(self):
"""
Testing that `next_token` appears at the right place
"""
# `next_token` does not appear
# Number of results is the number of entries
request, channel = self.make_request(
"GET", self.url + "?limit=20", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(len(channel.json_body["event_reports"]), 20)
self.assertNotIn("next_token", channel.json_body)
# `next_token` does not appear
# Number of max results is larger than the number of entries
request, channel = self.make_request(
"GET", self.url + "?limit=21", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(len(channel.json_body["event_reports"]), 20)
self.assertNotIn("next_token", channel.json_body)
# `next_token` does appear
# Number of max results is smaller than the number of entries
request, channel = self.make_request(
"GET", self.url + "?limit=19", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(len(channel.json_body["event_reports"]), 19)
self.assertEqual(channel.json_body["next_token"], 19)
# Check
# Set `from` to value of `next_token` for request remaining entries
# `next_token` does not appear
request, channel = self.make_request(
"GET", self.url + "?from=19", access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(channel.json_body["total"], 20)
self.assertEqual(len(channel.json_body["event_reports"]), 1)
self.assertNotIn("next_token", channel.json_body)
def _create_event_and_report(self, room_id, user_tok):
"""Create and report events
"""
resp = self.helper.send(room_id, tok=user_tok)
event_id = resp["event_id"]
request, channel = self.make_request(
"POST",
"rooms/%s/report/%s" % (room_id, event_id),
json.dumps({"score": -100, "reason": "this makes me sad"}),
access_token=user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
def _check_fields(self, content):
"""Checks that all attributes are present in an event report
"""
for c in content:
self.assertIn("id", c)
self.assertIn("received_ts", c)
self.assertIn("room_id", c)
self.assertIn("event_id", c)
self.assertIn("user_id", c)
self.assertIn("sender", c)
self.assertIn("canonical_alias", c)
self.assertIn("name", c)
self.assertIn("score", c)
self.assertIn("reason", c)
class EventReportDetailTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
report_event.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.other_user = self.register_user("user", "pass")
self.other_user_tok = self.login("user", "pass")
self.room_id1 = self.helper.create_room_as(
self.other_user, tok=self.other_user_tok, is_public=True
)
self.helper.join(self.room_id1, user=self.admin_user, tok=self.admin_user_tok)
self._create_event_and_report(
room_id=self.room_id1, user_tok=self.other_user_tok,
)
# first created event report gets `id`=2
self.url = "/_synapse/admin/v1/event_reports/2"
def test_no_auth(self):
"""
Try to get event report without authentication.
"""
request, channel = self.make_request("GET", self.url, b"{}")
self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
def test_requester_is_no_admin(self):
"""
If the user is not a server admin, an error 403 is returned.
"""
request, channel = self.make_request(
"GET", self.url, access_token=self.other_user_tok,
)
self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
def test_default_success(self):
"""
Testing get a reported event
"""
request, channel = self.make_request(
"GET", self.url, access_token=self.admin_user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self._check_fields(channel.json_body)
def test_invalid_report_id(self):
"""
Testing that an invalid `report_id` returns a 400.
"""
# `report_id` is negative
request, channel = self.make_request(
"GET",
"/_synapse/admin/v1/event_reports/-123",
access_token=self.admin_user_tok,
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
"The report_id parameter must be a string representing a positive integer.",
channel.json_body["error"],
)
# `report_id` is a non-numerical string
request, channel = self.make_request(
"GET",
"/_synapse/admin/v1/event_reports/abcdef",
access_token=self.admin_user_tok,
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
"The report_id parameter must be a string representing a positive integer.",
channel.json_body["error"],
)
# `report_id` is undefined
request, channel = self.make_request(
"GET",
"/_synapse/admin/v1/event_reports/",
access_token=self.admin_user_tok,
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
self.assertEqual(
"The report_id parameter must be a string representing a positive integer.",
channel.json_body["error"],
)
def test_report_id_not_found(self):
"""
Testing that a not existing `report_id` returns a 404.
"""
request, channel = self.make_request(
"GET",
"/_synapse/admin/v1/event_reports/123",
access_token=self.admin_user_tok,
)
self.assertEqual(404, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
self.assertEqual("Event report not found", channel.json_body["error"])
def _create_event_and_report(self, room_id, user_tok):
"""Create and report events
"""
resp = self.helper.send(room_id, tok=user_tok)
event_id = resp["event_id"]
request, channel = self.make_request(
"POST",
"rooms/%s/report/%s" % (room_id, event_id),
json.dumps({"score": -100, "reason": "this makes me sad"}),
access_token=user_tok,
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
def _check_fields(self, content):
"""Checks that all attributes are present in a event report
"""
self.assertIn("id", content)
self.assertIn("received_ts", content)
self.assertIn("room_id", content)
self.assertIn("event_id", content)
self.assertIn("user_id", content)
self.assertIn("sender", content)
self.assertIn("canonical_alias", content)
self.assertIn("name", content)
self.assertIn("event_json", content)
self.assertIn("score", content)
self.assertIn("reason", content)
self.assertIn("auth_events", content["event_json"])
self.assertIn("type", content["event_json"])
self.assertIn("room_id", content["event_json"])
self.assertIn("sender", content["event_json"])
self.assertIn("content", content["event_json"])
| 38.001901
| 88
| 0.626344
|
fe1dd34e6ed33673271bf42f58898744e05e30de
| 4,799
|
py
|
Python
|
tests/snmp/test_snmp_lldp.py
|
xwjiang2021/sonic-mgmt
|
82c446b9fb016eb070af765aa9d9999e55b27342
|
[
"Apache-2.0"
] | 2
|
2021-11-24T09:33:41.000Z
|
2021-12-03T09:08:29.000Z
|
tests/snmp/test_snmp_lldp.py
|
xwjiang2021/sonic-mgmt
|
82c446b9fb016eb070af765aa9d9999e55b27342
|
[
"Apache-2.0"
] | null | null | null |
tests/snmp/test_snmp_lldp.py
|
xwjiang2021/sonic-mgmt
|
82c446b9fb016eb070af765aa9d9999e55b27342
|
[
"Apache-2.0"
] | null | null | null |
import logging
import re
import pytest
from tests.common.helpers.snmp_helpers import get_snmp_facts
pytestmark = [
pytest.mark.topology('any'),
pytest.mark.device_type('vs')
]
logger = logging.getLogger(__name__)
@pytest.fixture(scope="module", autouse="True")
def lldp_setup(duthosts, enum_rand_one_per_hwsku_hostname, patch_lldpctl, unpatch_lldpctl, localhost):
duthost = duthosts[enum_rand_one_per_hwsku_hostname]
if duthost.is_supervisor_node():
pytest.skip("LLDP not supported on supervisor node")
patch_lldpctl(localhost, duthost)
yield
unpatch_lldpctl(localhost, duthost)
@pytest.mark.bsl
def test_snmp_lldp(duthosts, enum_rand_one_per_hwsku_hostname, localhost, creds_all_duts, tbinfo):
"""
Test checks for ieee802_1ab MIBs:
- lldpLocalSystemData 1.0.8802.1.1.2.1.3
- lldpLocPortTable 1.0.8802.1.1.2.1.3.7
- lldpLocManAddrTable 1.0.8802.1.1.2.1.3.8
- lldpRemTable 1.0.8802.1.1.2.1.4.1
- lldpRemManAddrTable 1.0.8802.1.1.2.1.4.2
For local data check if every OID has value
For remote values check for availability for at least 80% of minigraph neighbors
(similar to lldp test)
"""
duthost = duthosts[enum_rand_one_per_hwsku_hostname]
if duthost.is_supervisor_node():
pytest.skip("LLDP not supported on supervisor node")
hostip = duthost.host.options['inventory_manager'].get_host(duthost.hostname).vars['ansible_host']
snmp_facts = get_snmp_facts(localhost, host=hostip, version="v2c", community=creds_all_duts[duthost.hostname]["snmp_rocommunity"], wait=True)['ansible_facts']
mg_facts = {}
for asic_id in duthost.get_asic_ids():
mg_facts_ns = duthost.asic_instance(asic_id).get_extended_minigraph_facts(tbinfo)['minigraph_neighbors']
if mg_facts_ns is not None:
mg_facts.update(mg_facts_ns)
logger.info('snmp_lldp: {}'.format(snmp_facts['snmp_lldp']))
for k in ['lldpLocChassisIdSubtype', 'lldpLocChassisId', 'lldpLocSysName', 'lldpLocSysDesc']:
assert snmp_facts['snmp_lldp'][k]
assert "No Such Object currently exists" not in snmp_facts['snmp_lldp'][k]
# Check if lldpLocPortTable is present for all ports
for k, v in snmp_facts['snmp_interfaces'].items():
if "Ethernet" in v['name'] or "eth" in v['name']:
for oid in ['lldpLocPortIdSubtype', 'lldpLocPortId', 'lldpLocPortDesc']:
assert v.has_key(oid)
assert "No Such Object currently exists" not in v[oid]
# Check if lldpLocManAddrTable is present
for k in ['lldpLocManAddrLen', \
'lldpLocManAddrIfSubtype', \
'lldpLocManAddrIfId', \
'lldpLocManAddrOID']:
assert snmp_facts['snmp_lldp'][k]
assert "No Such Object currently exists" not in snmp_facts['snmp_lldp'][k]
minigraph_lldp_nei = []
for k, v in mg_facts.items():
if "server" not in v['name'].lower():
minigraph_lldp_nei.append(k)
logger.info('minigraph_lldp_nei: {}'.format(minigraph_lldp_nei))
# Check if lldpRemTable is present
active_intf = []
for k, v in snmp_facts['snmp_interfaces'].items():
if v.has_key("lldpRemChassisIdSubtype") and \
v.has_key("lldpRemChassisId") and \
v.has_key("lldpRemPortIdSubtype") and \
v.has_key("lldpRemPortId") and \
v.has_key("lldpRemPortDesc") and \
v.has_key("lldpRemSysName") and \
v.has_key("lldpRemSysDesc") and \
v.has_key("lldpRemSysCapSupported") and \
v.has_key("lldpRemSysCapEnabled"):
active_intf.append(k)
logger.info('lldpRemTable: {}'.format(active_intf))
assert len(active_intf) >= len(minigraph_lldp_nei) * 0.8
# skip neighbors that do not send chassis information via lldp
lldp_facts= {}
for asic_id in duthost.get_asic_ids():
lldp_facts_ns = duthost.lldpctl_facts(asic_instance_id=asic_id)['ansible_facts']['lldpctl']
if lldp_facts_ns is not None:
lldp_facts.update(lldp_facts_ns)
pattern = re.compile(r'^eth0|^Ethernet-IB')
nei = [k for k, v in lldp_facts.items() if not re.match(pattern, k) and v['chassis'].has_key('mgmt-ip') ]
logger.info("neighbors {} send chassis management IP information".format(nei))
# Check if lldpRemManAddrTable is present
active_intf = []
for k, v in snmp_facts['snmp_interfaces'].items():
if v.has_key("lldpRemManAddrIfSubtype") and \
v.has_key("lldpRemManAddrIfId") and \
v.has_key("lldpRemManAddrOID") and \
v['name'] != 'eth0' and 'Etherent-IB' not in v['name']:
active_intf.append(k)
logger.info('lldpRemManAddrTable: {}'.format(active_intf))
assert len(active_intf) == len(nei)
| 41.37069
| 162
| 0.675974
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.