hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0ab137aadb320eb130c9c45a932452978fad5116
| 1,160
|
py
|
Python
|
robonaldo/context/entities.py
|
xtrm-en/robonaldo
|
91601bad88043effbb717e40467526fe11bd4cb5
|
[
"0BSD"
] | null | null | null |
robonaldo/context/entities.py
|
xtrm-en/robonaldo
|
91601bad88043effbb717e40467526fe11bd4cb5
|
[
"0BSD"
] | null | null | null |
robonaldo/context/entities.py
|
xtrm-en/robonaldo
|
91601bad88043effbb717e40467526fe11bd4cb5
|
[
"0BSD"
] | null | null | null |
import time
class Updatable:
def __init__(self, last_update: int = -1):
self.last_update = last_update
def update(self) -> None:
self.last_update = int(time.time() * 1000)
def get_delta(self) -> int:
return time.time() * 1000 - self.last_update
class Positionable(Updatable):
def __init__(self, position: (float, float) = (0, 0), last_update: int = -1):
super().__init__(last_update)
self.position = position
@property
def x(self) -> float:
return self.position[0]
@property
def y(self) -> float:
return self.position[1]
def update(self, position: (float, float)) -> None:
if position is None:
return
super().update()
self.position = position
class Rotatable(Positionable):
def __init__(self, position: (float, float), rotation: float):
super().__init__(position)
self.rotation = rotation
def update(self, position: (float, float), rotation: float) -> None:
if position is None or rotation is None:
return
super().update(position)
self.rotation = rotation
| 25.777778
| 81
| 0.607759
|
4d629a6c9384bcae2fbff1d73ac2f10b0dbe0881
| 679
|
py
|
Python
|
scripts/touchscreens.py
|
markusa380/yourcontrols
|
db77977e44816873346ee177567d06f19b858cf0
|
[
"Apache-2.0"
] | null | null | null |
scripts/touchscreens.py
|
markusa380/yourcontrols
|
db77977e44816873346ee177567d06f19b858cf0
|
[
"Apache-2.0"
] | null | null | null |
scripts/touchscreens.py
|
markusa380/yourcontrols
|
db77977e44816873346ee177567d06f19b858cf0
|
[
"Apache-2.0"
] | null | null | null |
import os
import yaml
PREFIX_PREFIX = "YCB_"
SPACING = " "
yaml_file = open("out/touchscreenkeys.yaml", "w")
js_file = open("out/TouchScreenKeys.js", "w")
js_file.write("var instrumentButtonMapping = {\n")
count = 0
for filename in os.listdir("touchscreens/"):
data = yaml.load(open("touchscreens/" + filename, "r"), Loader=yaml.Loader)
for entry in data:
for element in entry["elements"]:
for prefix in entry["instruments"]:
yaml_file.write(f"{PREFIX_PREFIX}{prefix}#{element}: {count}\n")
js_file.write(f"{SPACING}\"{prefix}_{element}\": {count},\n")
count += 1
js_file.write("}")
| 30.863636
| 80
| 0.608247
|
1c4a24828c826c88606ac6a3b6cd095dfec7005a
| 15,703
|
py
|
Python
|
tests/chainer_tests/test_optimizer.py
|
toshihikoyanase/chainer
|
65b34a19d28f60f732c7069163ca23c710a309f4
|
[
"MIT"
] | null | null | null |
tests/chainer_tests/test_optimizer.py
|
toshihikoyanase/chainer
|
65b34a19d28f60f732c7069163ca23c710a309f4
|
[
"MIT"
] | 2
|
2018-01-09T23:05:30.000Z
|
2018-01-19T01:19:34.000Z
|
tests/chainer_tests/test_optimizer.py
|
bkvogel/chainer
|
894cd5d008f11eccdf6e1d7106f5b8bfff9ce005
|
[
"MIT"
] | null | null | null |
import copy
import unittest
import warnings
import mock
import numpy as np
import chainer
from chainer import backend
from chainer.backends import cuda
from chainer import optimizer
from chainer import optimizers
from chainer import testing
from chainer.testing import attr
class TestHyperparameter(unittest.TestCase):
def setUp(self):
self.parent = optimizer.Hyperparameter()
self.parent.x = 1
self.parent.y = 2
self.child = optimizer.Hyperparameter(self.parent)
self.child.y = 3
self.child.z = 4
def test_getattr(self):
self.assertTrue(hasattr(self.parent, 'x'))
self.assertEqual(self.parent.x, 1)
self.assertTrue(hasattr(self.parent, 'y'))
self.assertEqual(self.parent.y, 2)
self.assertFalse(hasattr(self.parent, 'z'))
self.assertTrue(hasattr(self.child, 'x'))
self.assertEqual(self.child.x, 1)
self.assertTrue(hasattr(self.child, 'y'))
self.assertEqual(self.child.y, 3)
self.assertTrue(hasattr(self.child, 'z'))
self.assertEqual(self.child.z, 4)
def test_get_dict(self):
self.assertEqual(self.parent.get_dict(), {'x': 1, 'y': 2})
self.assertEqual(self.child.get_dict(), {'x': 1, 'y': 3, 'z': 4})
def test_repr(self):
self.assertEqual(repr(self.parent), 'Hyperparameter(x=1, y=2)')
self.assertEqual(repr(self.child), 'Hyperparameter(x=1, y=3, z=4)')
def test_deep_copy(self):
parent_copy, child_copy = copy.deepcopy([self.parent, self.child])
self.assertEqual(self.child.get_dict(), child_copy.get_dict())
self.assertEqual(self.parent.get_dict(), parent_copy.get_dict())
self.assertIs(child_copy.parent, parent_copy)
class TestUpdateRule(unittest.TestCase):
def setUp(self):
self.data = np.ones((2, 3), np.float32)
self.grad = np.ones_like(self.data)
self.var = chainer.Variable(self.data, grad=self.grad)
self.update_rule = optimizer.UpdateRule()
self.update_rule.update_core_cpu = mock.MagicMock()
self.update_rule.update_core_gpu = mock.MagicMock()
def test_update_cpu(self):
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core_cpu.call_count, 1)
self.assertEqual(self.update_rule.update_core_gpu.call_count, 0)
@attr.gpu
def test_update_gpu(self):
self.var.to_gpu()
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core_cpu.call_count, 0)
self.assertEqual(self.update_rule.update_core_gpu.call_count, 1)
def check_add_hook(self, hook):
self.update_rule.update(self.var)
self.assertEqual(hook.call_count, 1)
args = hook.call_args_list[0][0]
self.assertEqual(len(args), 2)
self.assertIs(args[0], self.update_rule)
self.assertIs(args[1], self.var)
def test_add_hook(self):
hook = mock.MagicMock()
self.update_rule.add_hook(hook)
self.check_add_hook(hook)
def test_add_hook_with_name(self):
hook = mock.MagicMock()
self.update_rule.add_hook(hook, name='hook')
self.check_add_hook(hook)
def test_remove_hook(self):
hook = mock.MagicMock()
self.update_rule.add_hook(hook, name='hook')
self.update_rule.remove_hook('hook')
self.update_rule.update(self.var)
self.assertEqual(hook.call_count, 0)
def test_add_hook_with_function_name(self):
hook_body = mock.MagicMock()
def foo(update_rule, data, grad):
hook_body(update_rule, data, grad)
self.update_rule.add_hook(foo)
self.update_rule.remove_hook('foo')
self.update_rule.update(self.var)
self.assertEqual(hook_body.call_count, 0)
def test_add_hook_no_name(self):
class CallableWithoutName(object):
def __call__(self, update_rule, param):
pass
with self.assertRaises(ValueError):
self.update_rule.add_hook(CallableWithoutName())
def test_add_hook_duplicated_name(self):
self.update_rule.add_hook(mock.MagicMock(), name='foo')
with self.assertRaises(ValueError):
self.update_rule.add_hook(mock.MagicMock(), name='foo')
def test_remove_hook_not_exist(self):
with self.assertRaises(KeyError):
self.update_rule.remove_hook('foo')
def test_disabled_update_rule(self):
self.update_rule.update_core = mock.MagicMock()
self.update_rule.enabled = False
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core.call_count, 0)
self.update_rule.enabled = True
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core.call_count, 1)
def setup_state(self):
def init_state(data):
state = self.update_rule.state
state['a'] = 0
state['b'] = np.array([1, 2, 3], dtype=np.float32)
self.update_rule.init_state = init_state
@attr.gpu
def test_state_copy_to_gpu(self):
self.setup_state()
def update_core(param):
self.assertIsInstance(self.update_rule.state['a'], int)
self.assertIsInstance(self.update_rule.state['b'], cuda.ndarray)
self.update_rule.update_core = update_core
self.var.to_gpu()
self.update_rule.update(self.var)
@attr.multi_gpu(2)
def test_state_copy_to_another_gpu(self):
self.setup_state()
def update_core(param):
self.assertIsInstance(self.update_rule.state['b'], cuda.ndarray)
self.assertEqual(self.update_rule.state['b'].device.id, 1)
# call update with arrays on GPU 0 (tested by another method)
self.update_rule.update_core = lambda param: None
self.update_rule.update(chainer.Variable(
cuda.to_gpu(self.data, 0), grad=cuda.to_gpu(self.grad, 0)))
# check if it copies the states correctly when arrays on another GPU
# are passed
self.update_rule.update_core = update_core
self.update_rule.update(chainer.Variable(
cuda.to_gpu(self.data, 1), grad=cuda.to_gpu(self.grad, 1)))
@attr.gpu
def test_state_copy_to_cpu(self):
self.setup_state()
def update_core(param):
self.assertIsInstance(self.update_rule.state['a'], int)
self.assertIsInstance(self.update_rule.state['b'], np.ndarray)
self.var.to_gpu()
self.update_rule.update(self.var)
self.var.to_cpu()
self.update_rule.update_core = update_core
self.update_rule.update(self.var)
class TestOptimizer(unittest.TestCase):
def setUp(self):
self.optimizer = optimizer.Optimizer()
def test_new_epoch(self):
self.optimizer.new_epoch()
self.assertEqual(1, self.optimizer.epoch)
def test_invalid_new_epoch(self):
self.optimizer.use_auto_new_epoch = True
with self.assertRaises(RuntimeError):
self.optimizer.new_epoch()
def test_auto_new_epoch(self):
self.optimizer.use_auto_new_epoch = True
self.optimizer.new_epoch(auto=True)
self.assertEqual(1, self.optimizer.epoch)
def test_invalid_auto_new_epoch(self):
with self.assertRaises(RuntimeError):
self.optimizer.new_epoch(auto=True)
class TestOptimizerHook(unittest.TestCase):
def setUp(self):
self.optimizer = optimizer.Optimizer()
self.target = SimpleLink(
np.arange(6, dtype=np.float32).reshape(2, 3),
np.arange(3, -3, -1, dtype=np.float32).reshape(2, 3))
def test_add_hook(self):
h1 = mock.MagicMock(timing='pre')
h1.call_for_each_param = False
self.optimizer.setup(self.target)
self.optimizer.add_hook(h1, 'h1')
self.optimizer.call_hooks()
h1.assert_called_with(self.optimizer)
def test_add_hook_call_for_each_param(self):
h1 = mock.MagicMock(timing='pre')
h1.call_for_each_param = True
self.optimizer.setup(self.target)
self.optimizer.add_hook(h1, 'h1')
self.optimizer.call_hooks()
h1.assert_called_with(self.target.param.update_rule, self.target.param)
def test_remove_hook(self):
h1 = mock.MagicMock(timing='pre')
self.optimizer.setup(self.target)
self.optimizer.add_hook(h1, 'h1')
self.optimizer.remove_hook('h1')
self.optimizer.call_hooks()
self.assertFalse(h1.called)
def test_duplicated_hook(self):
self.optimizer.setup(self.target)
self.optimizer.add_hook(lambda s: None, 'h1', timing='pre')
with self.assertRaises(KeyError):
self.optimizer.add_hook(lambda s: None, 'h1', timing='pre')
def test_invalid_hook(self):
with self.assertRaises(TypeError):
self.optimizer.add_hook(1)
def test_add_hook_before_setup(self):
with self.assertRaises(RuntimeError):
self.optimizer.add_hook(lambda s: None, 'h1')
class SimpleLink(chainer.Link):
def __init__(self, w, g):
super(SimpleLink, self).__init__()
with self.init_scope():
self.param = chainer.Parameter(w)
self.param.grad = g
class TestGradientMethod(unittest.TestCase):
def setUp(self):
self.optimizer = chainer.GradientMethod()
self.target = chainer.ChainList(
SimpleLink(np.arange(3).astype(np.float32),
np.arange(3).astype(np.float32)),
SimpleLink(np.arange(3).astype(np.float32),
np.arange(3).astype(np.float32)))
self.optimizer.create_update_rule = mock.MagicMock
def setup_cpu(self):
self.optimizer.setup(self.target)
def setup_gpu(self, device=None):
self.target.to_gpu(device)
self.optimizer.setup(self.target)
def test_setup(self):
create_update_rule = mock.MagicMock()
self.optimizer.create_update_rule = create_update_rule
self.optimizer.setup(self.target)
self.assertEqual(create_update_rule.call_count, 2)
self.assertEqual(create_update_rule.call_args_list[0], [(), {}])
self.assertEqual(create_update_rule.call_args_list[1], [(), {}])
def check_update(self):
self.assertEqual(self.optimizer.t, 0)
self.optimizer.update()
self.assertEqual(self.optimizer.t, 1)
self.target[0].param.update_rule.update.assert_called_once_with(
self.target[0].param)
self.target[1].param.update_rule.update.assert_called_once_with(
self.target[1].param)
def test_update_cpu(self):
self.setup_cpu()
self.check_update()
@attr.gpu
def test_update_gpu(self):
self.setup_gpu()
self.check_update()
@testing.parameterize(*testing.product({
'shape': [(4, 3, 2)],
'dtype': [np.float16, np.float32, np.float64],
'loss_scale': [None, 1, 10],
}))
class TestGradientMethodLossScale(unittest.TestCase):
def setUp(self):
param0_data = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
param0_grad = np.copy(param0_data)
param1_data = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
param1_grad = np.copy(param1_data)
self.target = chainer.ChainList(
SimpleLink(param0_data, param0_grad),
SimpleLink(param1_data, param1_grad))
lr = 1.0
if self.loss_scale is not None:
lr = self.loss_scale
for i in range(2):
self.target[i].param._loss_scale = self.loss_scale
self.optimizer = chainer.optimizers.SGD(lr)
def setup_cpu(self):
self.optimizer.setup(self.target)
def setup_gpu(self, device=None):
self.target.to_gpu(device)
self.optimizer.setup(self.target)
def check_update(self):
self.optimizer.update()
xp = backend.get_array_module(self.target[0].param)
expected_data = xp.zeros(self.shape, dtype=self.dtype)
rtol, atol = 1e-4, 1e-5
if self.dtype is np.float16:
rtol, atol = 1e-1, 1e-2
for i in range(2):
testing.assert_allclose(self.target[i].param.data, expected_data,
rtol=rtol, atol=atol)
def test_update_cpu(self):
self.setup_cpu()
self.check_update()
@attr.gpu
def test_update_gpu(self):
self.setup_gpu()
self.check_update()
class TestCleargradHook(unittest.TestCase):
def setUp(self):
self.target = SimpleLink(
np.arange(6, dtype=np.float32).reshape(2, 3),
np.arange(3, -3, -1, dtype=np.float32).reshape(2, 3))
def check_cleargrad(self):
opt = optimizers.SGD(lr=1)
opt.setup(self.target)
opt.add_hook(CleargradHook(self))
opt.add_hook(DummyHook(self))
opt.update()
def test_cleargrad_cpu(self):
self.check_cleargrad()
@attr.gpu
def test_cleargrad_gpu(self):
self.target.to_gpu()
self.check_cleargrad()
class DummyOptimizer(chainer.GradientMethod):
def __init__(self, test):
super(DummyOptimizer, self).__init__()
self.test = test
def create_update_rule(self):
return mock.MagicMock()
class DummyHook(object):
name = 'Dummy'
timing = 'pre'
def __init__(self, test):
self.test = test
def __call__(self, opt):
for param in opt.target.params():
# Confirm all grads are not None
self.test.assertIsNotNone(param.grad)
class CleargradHook(object):
name = 'Cleargrad'
timing = 'pre'
def __init__(self, _):
pass
def __call__(self, opt):
for param in opt.target.params():
# Clear all grads
param.cleargrad()
class TestGradientMethodClearGrads(unittest.TestCase):
def setUp(self):
self.optimizer = DummyOptimizer(self)
self.target = SimpleLink(
np.arange(3).astype(np.float32),
np.arange(3).astype(np.float32))
self.optimizer.setup(self.target)
self.optimizer.add_hook(DummyHook(self))
def test_update(self):
self.target.cleargrads()
self.optimizer.update()
class TestDeprecatedOptimizerHooksEmitsWarning(unittest.TestCase):
def setUp(self):
self.context = warnings.catch_warnings(record=True)
self.warnings = self.context.__enter__()
warnings.filterwarnings(action='always', category=DeprecationWarning)
def tearDown(self):
self.context.__exit__()
def test_gradient_clipping(self):
chainer.optimizer.GradientClipping(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_gradient_hard_clipping(self):
chainer.optimizer.GradientHardClipping(1., 2.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_gradient_noise(self):
chainer.optimizer.GradientNoise(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_lasso(self):
chainer.optimizer.Lasso(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_weight_decay(self):
chainer.optimizer.WeightDecay(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
testing.run_module(__name__, __file__)
| 32.244353
| 79
| 0.650513
|
94c89bb55a44131467dcc7bd7f03a302328f7022
| 5,997
|
py
|
Python
|
tests/test_bake_project.py
|
imartinezl/cookiecutter-python
|
401af89f199a42d5335318e3dc9d66a5726ccd8e
|
[
"MIT"
] | null | null | null |
tests/test_bake_project.py
|
imartinezl/cookiecutter-python
|
401af89f199a42d5335318e3dc9d66a5726ccd8e
|
[
"MIT"
] | null | null | null |
tests/test_bake_project.py
|
imartinezl/cookiecutter-python
|
401af89f199a42d5335318e3dc9d66a5726ccd8e
|
[
"MIT"
] | null | null | null |
from contextlib import contextmanager
import shlex
import os
import sys
import subprocess
import yaml
import datetime
from cookiecutter.utils import rmtree
from click.testing import CliRunner
import importlib
@contextmanager
def inside_dir(dirpath):
"""
Execute code from inside the given directory
:param dirpath: String, path of the directory the command is being run.
"""
old_path = os.getcwd()
try:
os.chdir(dirpath)
yield
finally:
os.chdir(old_path)
@contextmanager
def bake_in_temp_dir(cookies, *args, **kwargs):
"""
Delete the temporal directory that is created when executing the tests
:param cookies: pytest_cookies.Cookies,
cookie to be baked and its temporal files will be removed
"""
result = cookies.bake(*args, **kwargs)
try:
yield result
finally:
rmtree(str(result.project))
def run_inside_dir(command, dirpath):
"""
Run a command from inside a given directory, returning the exit status
:param command: Command that will be executed
:param dirpath: String, path of the directory the command is being run.
"""
with inside_dir(dirpath):
return subprocess.check_call(shlex.split(command))
def check_output_inside_dir(command, dirpath):
"Run a command from inside a given directory, returning the command output"
with inside_dir(dirpath):
return subprocess.check_output(shlex.split(command))
def test_year_compute_in_license_file(cookies):
with bake_in_temp_dir(cookies) as result:
license_file_path = result.project.join('LICENSE')
now = datetime.datetime.now()
assert str(now.year) in license_file_path.read()
def project_info(result):
"""Get toplevel dir, project_slug, and project dir from baked cookies"""
project_path = str(result.project)
project_slug = os.path.split(project_path)[-1]
project_dir = os.path.join(project_path, project_slug)
return project_path, project_slug, project_dir
def test_bake_with_defaults(cookies):
with bake_in_temp_dir(cookies) as result:
assert result.project.isdir()
assert result.exit_code == 0
assert result.exception is None
found_toplevel_files = [f.basename for f in result.project.listdir()]
assert 'setup.py' in found_toplevel_files
assert 'python_boilerplate' in found_toplevel_files
assert 'tox.ini' in found_toplevel_files
assert 'tests' in found_toplevel_files
def test_bake_and_run_tests(cookies):
with bake_in_temp_dir(cookies) as result:
assert result.project.isdir()
run_inside_dir('python setup.py test', str(result.project)) == 0
print("test_bake_and_run_tests path", str(result.project))
def test_bake_withspecialchars_and_run_tests(cookies):
"""Ensure that a `author` with double quotes does not break setup.py"""
with bake_in_temp_dir(
cookies,
extra_context={'author': 'name "quote" name'}
) as result:
assert result.project.isdir()
run_inside_dir('python setup.py test', str(result.project)) == 0
def test_bake_with_apostrophe_and_run_tests(cookies):
"""Ensure that a `author` with apostrophes does not break setup.py"""
with bake_in_temp_dir(
cookies,
extra_context={'author': "O'connor"}
) as result:
assert result.project.isdir()
run_inside_dir('python setup.py test', str(result.project)) == 0
def test_make_help(cookies):
with bake_in_temp_dir(cookies) as result:
# The supplied Makefile does not support win32
if sys.platform != "win32":
output = check_output_inside_dir(
'make help',
str(result.project)
)
assert b"check code coverage quickly with the default Python" in \
output
def test_bake_selecting_license(cookies):
license_strings = {
'MIT license': 'MIT ',
'BSD license': 'Redistributions of source code must retain the ' +
'above copyright notice, this',
'ISC license': 'ISC License',
'Apache Software License 2.0':
'Licensed under the Apache License, Version 2.0',
'GNU General Public License v3': 'GNU GENERAL PUBLIC LICENSE',
}
for license, target_string in license_strings.items():
with bake_in_temp_dir(
cookies,
extra_context={'open_source_license': license}
) as result:
assert target_string in result.project.join('LICENSE').read()
assert license in result.project.join('setup.py').read()
def test_bake_not_open_source(cookies):
with bake_in_temp_dir(
cookies,
extra_context={'open_source_license': 'Not open source'}
) as result:
found_toplevel_files = [f.basename for f in result.project.listdir()]
assert 'setup.py' in found_toplevel_files
assert 'LICENSE' not in found_toplevel_files
assert 'License' not in result.project.join('README.rst').read()
def test_using_pytest(cookies):
with bake_in_temp_dir(cookies) as result:
assert result.project.isdir()
test_file_path = result.project.join(
'tests/test_python_boilerplate.py'
)
lines = test_file_path.readlines()
assert "import pytest" in ''.join(lines)
# Test the new pytest target
run_inside_dir('python setup.py pytest', str(result.project)) == 0
# Test the test alias (which invokes pytest)
run_inside_dir('python setup.py test', str(result.project)) == 0
def test_not_using_pytest(cookies):
with bake_in_temp_dir(cookies) as result:
assert result.project.isdir()
test_file_path = result.project.join(
'tests/test_python_boilerplate.py'
)
lines = test_file_path.readlines()
assert "import unittest" in ''.join(lines)
assert "import pytest" not in ''.join(lines)
| 33.316667
| 79
| 0.675504
|
ed0014e5ee2db303ae3e62d2705c1d5eb230176b
| 1,782
|
py
|
Python
|
06/part2/main.py
|
orrinjelo/AdventOfCode2018
|
7455737bebfb56d7912c8f8760a55ea0a5b240a3
|
[
"MIT"
] | 1
|
2020-12-14T21:05:28.000Z
|
2020-12-14T21:05:28.000Z
|
06/part2/main.py
|
orrinjelo/AdventOfCode2018
|
7455737bebfb56d7912c8f8760a55ea0a5b240a3
|
[
"MIT"
] | null | null | null |
06/part2/main.py
|
orrinjelo/AdventOfCode2018
|
7455737bebfb56d7912c8f8760a55ea0a5b240a3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import os, sys
import numpy as np
from operator import itemgetter
from pprint import pprint
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..'))
from utils.timeit import timeit
def print_field(f):
X,Y = f.shape
for y in range(Y):
for x in range(X):
print(f"{f[x,y] if f[x,y] > 0 else '..':2} ", end='')
print('')
@timeit
def parse_lines(lines):
return [tuple(map(lambda x: int(x), line.split(','))) for line in lines]
@timeit
def make_field(points):
min_x = min(min(points)[0], 0)
max_x = max(points)[0]
min_y = min(min(points, key=itemgetter(1))[1], 0)
max_y = max(points, key=itemgetter(1))[1]
field = np.zeros((max_x + 1, max_y + 1), dtype=int)
return field
@timeit
def infect_full(field, points):
X,Y = field.shape
def calc_distance(x,y,p):
return abs(p[0]-x) * abs(p[1]-y)
for x in range(X):
for y in range(Y):
best = (0, X+Y)
if field[x,y] != 0:
continue
for p in range(len(points)):
dist = calc_distance(x,y,points[p])
field[x, y] += dist
return field
@timeit
def day6_part2(input):
points = parse_lines(input)
field = make_field(points)
infect_full(field, points)
res = len(np.where(field < 10000)[0])
return res
if __name__ == '__main__':
# Argument parsing
if len(sys.argv) <= 1:
sys.exit('No entries given. Answer: 0')
if len(sys.argv) > 2:
sys.exit('Too many arguments.')
# Load file
print('Loading file: {}'.format(sys.argv[1]))
with open(sys.argv[1], 'r') as f:
lines = f.readlines()
print(f'Answer: {day6_part2(lines)}')
| 25.098592
| 86
| 0.571268
|
4a73a491a3e71b275511be8a6c8a5c86a04c6f4a
| 2,771
|
py
|
Python
|
coco/scanner.py
|
alv16106/compiler
|
810636ce0a66b44467e7874dc75b32e0385917cb
|
[
"MIT"
] | null | null | null |
coco/scanner.py
|
alv16106/compiler
|
810636ce0a66b44467e7874dc75b32e0385917cb
|
[
"MIT"
] | null | null | null |
coco/scanner.py
|
alv16106/compiler
|
810636ce0a66b44467e7874dc75b32e0385917cb
|
[
"MIT"
] | null | null | null |
class Token:
def __init__(self, t, val, pos):
self.t = t
self.val = val
self.pos = pos
def __str__(self):
values = (self.t.name, self.val, self.pos)
return 'Token of type "\033[1m%s\033[0m with value %s in position %s' % values
class Scanner:
def __init__(self, s, table, EOF):
self.buf = s
self.bufLen = len(s)
self.pos = 0
self.lines = s.splitlines()
self.line = 0
self.tokens = []
self.scanTable = table
self.errors = []
self.ignore = set([chr(9), chr(10), chr(13), " "])
self.EOF = EOF
def get_token(self):
if self.pos >= self.bufLen:
t = (self.EOF, "EOF", self.bufLen)
return Token(*t)
while self.buf[self.pos] in self.ignore:
self.pos += 1
accepted = []
start = self.pos
while True:
if self.pos < self.bufLen:
c = self.buf[self.pos]
n = self.scanTable.setMove(c)
# Move to next character
self.pos += 1
# if we can continue without error
if n:
good = None
for state in n:
good = state if state.accepting else None
#see if n is accepting
if good:
t = (good.pertenency, self.buf[start:self.pos], self.pos)
# we found a token, add to memory and search for more
if not self.scanTable.setCanMove(self.peek()):
token = Token(*t)
self.scanTable.reset()
self.tokens.append(token)
return token
# print('Puede haber mas')
accepted.append(t)
else:
# if we already had an acceptance state before, rollback
if accepted:
token = Token(*accepted.pop())
self.scanTable.reset()
self.pos = token.pos
self.tokens.append(token)
return token
self.errors.append('Error en pos: %d' % self.pos)
print('error no mach nada')
else:
t = (self.EOF, "EOF", self.bufLen)
return Token(*t)
def peek(self):
if self.pos < self.bufLen:
return self.buf[self.pos]
return 'EOF'
def ignoreUntil(self, string):
while self.buf[self.pos] != string:
print('ignoring', self.buf[self.pos])
self.pos += 1
| 32.988095
| 86
| 0.447492
|
ef2b9abd0cd7ab2c492bcc9dda09a7775be75c8d
| 10,609
|
py
|
Python
|
kde/stat_tools.py
|
atfienberg/kde
|
a94623c382813fabe6e41993f8686a4e511346a8
|
[
"MIT"
] | 3
|
2021-07-08T01:51:16.000Z
|
2022-03-02T00:49:21.000Z
|
kde/stat_tools.py
|
godbolt/kde
|
f1154dd1a0d5875b4eb117132fee54344f28bab5
|
[
"MIT"
] | null | null | null |
kde/stat_tools.py
|
godbolt/kde
|
f1154dd1a0d5875b4eb117132fee54344f28bab5
|
[
"MIT"
] | null | null | null |
# pylint: disable=line-too-long, invalid-name
from __future__ import absolute_import, division, print_function
__license__ = """MIT License
Copyright (c) 2014-2019 Sebastian Schoenen and Martin Leuermann
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import numpy as np
def rebin(a, *args, **kwargs):
"""Rebin ndarray data into a smaller ndarray of the same rank whose
dimensions are factors of the original dimensions. eg. An array with 6
columns and 4 rows can be reduced to have 6,3,2 or 1 columns and 4,2 or 1
rows.
Examples
--------
>>> a = np.rand(6, 4)
>>> b = rebin(a, 3, 2)
>>> print(b.shape)
(2, 2)
>>> a = np.rand(6)
>>> b = rebin(a, 2)
>>> print b.shape
(3,)
"""
method = kwargs.get("method", "sum")
verbose = kwargs.get("verbose", False)
shape = a.shape
lenShape = len(shape)
factor = np.asarray(shape) / np.asarray(args) # pylint: disable=unused-variable
evList = (
['a.reshape('] +
['args[%d],factor[%d],'%(i, i) for i in range(lenShape)] +
[')'] + ['.sum(%d)'%(i+1) for i in range(lenShape)]
)
if method == "sum":
pass
elif method == "average":
evList += ['/factor[%d]'%i for i in range(lenShape)]
else:
raise AttributeError("method: %s not defined" % method)
evStr = ''.join(evList)
if verbose:
print(evStr)
return eval(evStr) # pylint: disable=eval-used
def covariance_form(point, mean, cov):
"""Calculate 2D map of covariance form (2D quadratic approximation to
-2lnL)
"""
cov_inv = np.linalg.inv(cov)
diff = point - mean
stats = []
for y_i in range(len(diff)):
current_y = []
for x_i in range(len(diff[y_i])):
a = np.matrix(diff[y_i][x_i])
current_y.append((a * cov_inv * a.transpose()).item(0))
stats.append(current_y)
return np.array(stats)
def estimate_cov_from_contour(xaxis, yaxis, zmesh, point):
"""Calculate estimate of covariance matrix from 2D Hessian of -2lnL
Note:
RectBivariateSpline expects zmesh to have shape (len(xaxis), len(yaxis))
but my mesh has shape (len(yaxis), len(xaxis)) thus everything is mirrored
"""
from scipy.interpolate import RectBivariateSpline
x, y = point
spline = RectBivariateSpline(yaxis, xaxis, np.asarray(zmesh))
dx2 = 0.5 * spline(y, x, mth=None, dx=0, dy=2, grid=False)
dy2 = 0.5 * spline(y, x, mth=None, dx=2, dy=0, grid=False)
dxdy = 0.5 * spline(y, x, mth=None, dx=1, dy=1, grid=False)
hessian = np.matrix([[dx2, dxdy], [dxdy, dy2]])
cov = np.linalg.inv(hessian)
return cov
def interpolate_statistic(xaxis, yaxis, zmesh, xaxis_new, yaxis_new):
"""Calculate 2D spline surface of -2lnL test-statistic.
The same spline is used to calculate derivatives in
"estimate_cov_from_contour(xaxis, yaxis, zmesh, point)"
Note:
RectBivariateSpline expects zmesh to have shape (len(xaxis), len(yaxis))
but my mesh has shape (len(yaxis), len(xaxis))
thus everything is mirrored
"""
from scipy.interpolate import RectBivariateSpline
spline = RectBivariateSpline(yaxis, xaxis, np.asarray(zmesh))
stats = [[spline(yaxis_new[yi], xaxis_new[xi], mth=None, dx=0, dy=0, grid=False)
for xi in range(len(xaxis_new))]
for yi in range(len(yaxis_new))]
return np.array(stats)
def wilks_test(profiles):
"""Calculate the compatibility of statistically independent measurements.
Here, we assume that Wilks' theorem holds.
Parameters
----------
profiles : list of (x, y, llh) for different measurements
"""
from scipy.stats import chisqprob
from scipy.special import erfinv
xmin, xmax = +np.inf, -np.inf
ymin, ymax = +np.inf, -np.inf
for x, y, _ in profiles:
xmin_, xmax_ = np.min(x), np.max(x)
if xmin_ < xmin:
xmin = xmin_
if xmax_ > xmax:
xmax = xmax_
ymin_, ymax_ = np.min(y), np.max(y)
if ymin_ < ymin:
ymin = ymin_
if ymax_ > ymax:
ymax = ymax_
x = np.linspace(xmin, xmax, 1000)
y = np.linspace(ymin, ymax, 1000)
sum_llhs = 0
for xpar, ypar, llhs in profiles:
sum_llhs += interpolate_statistic(xpar, ypar, llhs, x, y)
chi2 = np.min(sum_llhs)
ndof = 2 * (len(profiles) - 1)
pvalue = chisqprob(chi2, ndof)
nsigma = erfinv(1 - pvalue) * np.sqrt(2) # 2-sided significance
return (chi2, ndof, pvalue, nsigma)
def walds_test(profile1, profile2):
"""Calculate the compatibility of two statistically independent
measurements using normal approximation (Wald's method).
This assumes that the log-likelihood space is approximately elliptically.
Parameters
----------
profile1 : (x,y,llh) for measurement 1
profile2 : (x,y,llh) for measurement 2
"""
from scipy.stats import chisqprob
from scipy.special import erfinv
bestfits, covariances = [], []
for x, y, llhs in [profile1, profile2]:
idx_min = np.unravel_index(llhs.argmin(), llhs.shape)
bestfit = x[idx_min[1]], y[idx_min[0]]
bestfits.append(bestfit)
covariance = estimate_cov_from_contour(x, y, llhs, bestfit)
covariances.append(covariance)
diff = np.matrix(bestfits[0]) - np.matrix(bestfits[1])
cov_inv = np.linalg.inv(covariances[0] + covariances[1])
chi2 = diff*cov_inv*diff.transpose()
ndof = 2
pvalue = chisqprob(chi2, ndof)
nsigma = erfinv(1-pvalue) * np.sqrt(2) # 2-sided significance
return (chi2, ndof, pvalue, nsigma)
def _weighted_quantile_arg(values, weights, q=0.5):
indices = np.argsort(values)
sorted_indices = np.arange(len(values))[indices]
medianidx = (weights[indices].cumsum()/weights[indices].sum()).searchsorted(q)
if (medianidx >= 0) and (medianidx < len(values)):
return sorted_indices[medianidx]
return np.nan
def weighted_quantile(values, weights, q=0.5):
if len(values) != len(weights):
raise ValueError("shape of `values` and `weights` doesn't match!")
index = _weighted_quantile_arg(values, weights, q=q)
if index != np.nan:
return values[index]
return np.nan
def weighted_median(values, weights):
return weighted_quantile(values, weights, q=0.5)
def weighted_cov(m, y=None, weights=None, bias=0):
"""Estimate a (weighted) covariance matrix, given data.
Covariance indicates the level to which two variables vary together.
If we examine N-dimensional samples, :math:`X = [x_1, x_2, ... x_N]^T`,
then the covariance matrix element :math:`C_{ij}` is the covariance of
:math:`x_i` and :math:`x_j`. The element :math:`C_{ii}` is the variance
of :math:`x_i`.
Parameters
----------
m : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `m` represents a variable, and each column a single
observation of all those variables.
y : array_like, optional
An additional set of variables and observations. `y` has the same
form as that of `m`.
weights : array_like, optional
A 1-D array containing the weights of the data points. This option
should be used if data points have different weights in order to
calculate the weighted covariance.
bias : int, optional
Default normalization is by ``(N - 1)``, where ``N`` is the number of
observations given (unbiased estimate). If `bias` is 1, then
normalization is by ``N``.
Returns
-------
out : ndarray
The covariance matrix of the variables.
Examples
--------
Consider two variables, :math:`x_0` and :math:`x_1`, which
correlate perfectly, but in opposite directions:
>>> x = np.array([[0, 2], [1, 1], [2, 0]]).T
>>> x
array([[0, 1, 2],
[2, 1, 0]])
Note how :math:`x_0` increases while :math:`x_1` decreases. The covariance
matrix shows this clearly:
>>> weighted_cov(x)
array([[ 1., -1.],
[-1., 1.]])
Note that element :math:`C_{0,1}`, which shows the correlation between
:math:`x_0` and :math:`x_1`, is negative.
Further, note how `x` and `y` are combined:
>>> x = [-2.1, -1, 4.3]
>>> y = [3, 1.1, 0.12]
>>> X = np.vstack((x,y))
>>> print(weighted_cov(X))
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print(weighted_cov(x, y))
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print(weighted_cov(x))
11.71
"""
X = np.array(m, ndmin=2, dtype=float)
if X.size == 0:
# handle empty arrays
return np.array(m)
axis = 0
tup = (slice(None), np.newaxis)
N = X.shape[1]
if weights is not None:
weights = np.asarray(weights)/np.sum(weights)
if len(weights) != N:
raise ValueError("unequal dimension of `data` and `weights`.")
if y is not None:
y = np.array(y, copy=False, ndmin=2, dtype=float)
X = np.concatenate((X, y), axis)
X -= np.average(X, axis=1-axis, weights=weights)[tup]
if bias == 0:
if weights is not None:
fact = np.sum(weights) / (np.sum(weights)**2 - np.sum(weights**2))
else:
fact = 1 / (N - 1)
else:
if weights is not None:
fact = 1 / np.sum(weights)
else:
fact = 1 / N
if weights is not None:
return (np.dot(weights * X, X.T.conj()) * fact).squeeze()
return (np.dot(X, X.T.conj()) * fact).squeeze()
| 31.480712
| 84
| 0.62758
|
dd510374c1b4679771fac16fc6e4057278d95284
| 15,692
|
py
|
Python
|
videoanalyst/engine/tester/tester_impl/davis.py
|
yutliu/betterSAT
|
fb983f43b12352f9ee6ae40b4e0954f6ba502fb8
|
[
"MIT"
] | null | null | null |
videoanalyst/engine/tester/tester_impl/davis.py
|
yutliu/betterSAT
|
fb983f43b12352f9ee6ae40b4e0954f6ba502fb8
|
[
"MIT"
] | null | null | null |
videoanalyst/engine/tester/tester_impl/davis.py
|
yutliu/betterSAT
|
fb983f43b12352f9ee6ae40b4e0954f6ba502fb8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*
import copy
import itertools
import math
import os
from os import makedirs
from os.path import isdir, join
import cv2
import numpy as np
from loguru import logger
from PIL import Image
from tqdm import tqdm
import torch
import torch.multiprocessing as mp
from torch.multiprocessing import Manager, Pool
from videoanalyst.evaluation import davis_benchmark
from videoanalyst.utils import ensure_dir
from ..tester_base import TRACK_TESTERS, VOS_TESTERS, TesterBase
@VOS_TESTERS.register
class DAVISTester(TesterBase):
r"""
Tester to test the davis2017 dataset, the result is saved as follows
exp_dir/logs/$dataset_name$/$tracker_name/
|-baseline/$video_name$/ folder of result files
|-eval_result.csv evaluation result file
Hyper-parameters
----------------
device_num: int
number of gpu for test
data_root: str
davis2017 dataset root directory. dict(dataset_name: path_to_root)
dataset_names: str
daataset name (DAVIS2017)
save_video: bool
save videos with predicted mask overlap for visualization and debug
save_patch: bool
"""
extra_hyper_params = dict(device_num=1,
data_root="datasets/DAVIS",
dataset_names=[
"DAVIS2017",
],
save_video=False,
save_patch=False)
def __init__(self, *args, **kwargs):
r"""
Crete tester with config and pipeline
Arguments
---------
cfg: CfgNode
parent config, (e.g. model / pipeline / tester)
pipeline: PipelineBase
pipeline to test
"""
super(DAVISTester, self).__init__(*args, **kwargs)
self._state['speed'] = -1
self.iou_eval_thres = np.arange(0.3, 0.5, 0.05)
def test(self):
r"""
Run test
"""
# set dir
self.tracker_name = self._hyper_params["exp_name"]
for dataset_name in self._hyper_params["dataset_names"]:
self.dataset_name = dataset_name
self.tracker_dir = os.path.join(self._hyper_params["exp_save"],
self.dataset_name)
self.save_root_dir = os.path.join(self.tracker_dir,
self.tracker_name, "baseline")
ensure_dir(self.save_root_dir)
# track videos
self.run_tracker()
# evaluation
eval_result = self.evaluation('default_hp')
return dict(main_performance=eval_result["JF"])
def run_tracker(self):
"""
Run self.pipeline on DAVIS
"""
num_gpu = self._hyper_params["device_num"]
all_devs = [torch.device("cuda:%d" % i) for i in range(num_gpu)]
logger.info('runing test on devices {}'.format(all_devs))
davis_root = self._hyper_params["data_root"]
logger.info('Using dataset %s at: %s' % (self.dataset_name, davis_root))
# setup dataset
dataset = davis_benchmark.load_dataset(davis_root, self.dataset_name) # OrderedDict:30 bike-packing, blackswan, bmx-trees...
self.dataset = dataset
keys = list(dataset.keys()) # list:30
keys.sort()
nr_records = len(keys)
pbar = tqdm(total=nr_records)
mean_speed = -1
speed_list = []
manager = Manager()
speed_queue = manager.Queue(500)
# set worker
if num_gpu == 0:
self.worker(keys, all_devs[0], self.dataset, speed_queue)
for i in range(nr_records):
s = speed_queue.get()
speed_list.append(s)
pbar.update(1)
else:
nr_video = math.ceil(nr_records / num_gpu) # 每个gpu负载的测试视频数量
procs = []
for i in range(num_gpu):
start = i * nr_video # 0
end = min(start + nr_video, nr_records) # 两块gpu, 30/2 = 15
split_records = keys[start:end]
proc = mp.Process(target=self.worker,
args=(split_records, all_devs[i],
self.dataset, speed_queue))
logger.info('process:%d, start:%d, end:%d' % (i, start, end))
proc.start()
procs.append(proc)
for i in range(nr_records):
s = speed_queue.get()
speed_list.append(s)
pbar.update(1)
for p in procs:
p.join()
# print result
mean_speed = float(np.mean(speed_list))
logger.info('Mean Speed: {:.2f} FPS'.format(mean_speed))
self._state['speed'] = mean_speed
def worker(self, records, dev, dataset, speed_queue=None):
tracker = self._pipeline
tracker.set_device(dev)
for v_id, video in enumerate(records):
speed = self.track_single_video_vos(tracker, dataset[video])
if speed_queue is not None:
speed_queue.put_nowait(speed)
def evaluation(self, search_task_name):
r"""
Run evaluation & write result to csv file under self.tracker_dir
"""
results_path = join(self.save_root_dir, 'results_multi')
davis_data_path = self._hyper_params["data_root"]
eval_dump_path = join(self.save_root_dir, 'dump')
if not isdir(eval_dump_path): makedirs(eval_dump_path)
csv_name_global_path = join(eval_dump_path,
search_task_name + '_global_results.csv')
csv_name_per_sequence_path = join(
eval_dump_path, search_task_name + '_name_per_sequence_results.csv')
version = self.dataset_name[-4:]
hp_dict = {}
return davis_benchmark.davis2017_eval(davis_data_path,
results_path,
csv_name_global_path,
csv_name_per_sequence_path,
hp_dict,
version=version)
# video = dict[3]: anno_files, image_files, name
def track_single_video_vos(self, tracker, video, mot_enable=True):
'''
perfrom semi-supervised video object segmentation for single video
:param tracker: tracker pipeline
:param video: video info
:param mot_enable: if true, perform instance level segmentation on davis, otherwise semantic
'''
image_files = video['image_files']
annos = [np.array(Image.open(x)) for x in video['anno_files']]
if 'anno_init_files' in video:
annos_init = [
np.array(Image.open(x)) for x in video['anno_init_files']
]
else:
annos_init = [annos[0]] # mask np.array[h, w] :0, 1...
if not mot_enable:
annos = [(anno > 0).astype(np.uint8) for anno in annos]
annos_init = [(anno_init > 0).astype(np.uint8)
for anno_init in annos_init]
if 'start_frame' in video:
object_ids = [int(id) for id in video['start_frame']]
else:
object_ids = [o_id for o_id in np.unique(annos[0]) if o_id != 0] # 视频有几个 要分割 的物体
if len(object_ids) != len(annos_init):
annos_init = annos_init * len(object_ids) # list[2]
object_num = len(object_ids)
toc = 0
pred_masks = np.zeros((object_num, len(image_files), annos[0].shape[0],
annos[0].shape[1])) - 1 # 所有图片的GT np.array[object_num, num_imgs, h, w]
if self._hyper_params['save_video']:
track_boxes = np.zeros((object_num, len(image_files), 4))
track_mask_boxes = np.zeros((object_num, len(image_files), 4))
track_mask_score = np.zeros((object_num, len(image_files)))
track_score = np.zeros((object_num, len(image_files)))
state_score = np.zeros((object_num, len(image_files)))
if self._hyper_params['save_patch']:
patch_list = []
for obj_id, o_id in enumerate(object_ids): # list[object_num], [1, 2, ...]
obj_patch_list = []
logger.info('{} th object in video {}'.format(o_id, video['name']))
if 'start_frame' in video:
start_frame = video['start_frame'][str(o_id)]
end_frame = video['end_frame'][str(o_id)]
else:
start_frame, end_frame = 0, len(image_files)
for f, image_file in enumerate(tqdm(image_files)): # image_files list[num_images], 图片路径
im = cv2.imread(image_file)
img_h, img_w = im.shape[0], im.shape[1]
tic = cv2.getTickCount()
if f == start_frame: # init
mask = (annos_init[obj_id] == o_id).astype(np.uint8) # np.array[h, w]: 0, 1
x, y, w, h = cv2.boundingRect((mask).astype(np.uint8))
tracker.init(im, np.array([x, y, w, h]), mask)
elif end_frame >= f > start_frame: # tracking
mask = tracker.update(im) # np.array[h, w]
if self._hyper_params['save_video']:
rect_mask = tracker._state['mask_rect']
mask_score = tracker._state['conf_score']
track_boxes[obj_id, f, :] = tracker._state['track_box']
track_mask_boxes[obj_id, f, :] = rect_mask
track_mask_score[obj_id, f] = mask_score
track_score[obj_id, f] = tracker._state["track_score"]
state_score[obj_id, f] = tracker._state["state_score"]
if self._hyper_params['save_patch']:
patch = tracker._state['patch_prediction']
obj_patch_list.append(patch)
toc += cv2.getTickCount() - tic
if end_frame >= f >= start_frame:
pred_masks[obj_id, f, :, :] = mask
if self._hyper_params['save_patch']:
patch_list.append(obj_patch_list)
toc /= cv2.getTickFrequency()
if len(annos) == len(image_files):
multi_mean_iou = davis_benchmark.MultiBatchIouMeter(
self.iou_eval_thres,
pred_masks,
annos,
start=video['start_frame'] if 'start_frame' in video else None,
end=video['end_frame'] if 'end_frame' in video else None)
for i in range(object_num):
for j, thr in enumerate(self.iou_eval_thres):
logger.info(
'Fusion Multi Object{:20s} IOU at {:.2f}: {:.4f}'.format(
video['name'] + '_' + str(i + 1), thr,
multi_mean_iou[i, j]))
if self._hyper_params['save_patch']:
video_path = join(self.save_root_dir, 'patches', video['name'])
logger.info('save patches path: {}'.format(video_path))
if not isdir(video_path): makedirs(video_path)
for i in range(len(patch_list)):
patch_images = patch_list[i]
for frame_id, patch_image in enumerate(patch_images):
cv2.imwrite(
join(video_path, 'obj_{}_{}.png'.format(i, frame_id)),
patch_image)
video_path = join(self.save_root_dir, 'results_multi', video['name'])
logger.info('save mask path:{}'.format(video_path))
if not isdir(video_path): makedirs(video_path)
pred_mask_final = np.array(pred_masks)
pred_mask_final = (
np.argmax(pred_mask_final, axis=0).astype('uint8') +
1) * (np.max(pred_mask_final, axis=0) >
tracker._hyper_params['mask_pred_thresh']).astype('uint8')
for i in range(pred_mask_final.shape[0]):
mask_label = pred_mask_final[i].astype(np.uint8)
cv2.imwrite(
join(video_path,
image_files[i].split('/')[-1].split('.')[0] + '.png'),
mask_label)
logger.info(
'({:d}) Video: {:12s} Time: {:02.1f}s Speed: {:3.1f}fps'.format(
o_id, video['name'], toc,
f * len(object_ids) / toc))
speed = f * len(object_ids) / toc
logger.info("{} speed: {}".format(video['name'], speed))
if self._hyper_params['save_video']:
video_path = join(self.save_root_dir, 'save_video')
if not isdir(video_path): makedirs(video_path)
logger.info('save video as : {}'.format(video_path))
VideoOut = cv2.VideoWriter(
video_path + '/' + video['name'] + '.avi',
cv2.VideoWriter_fourcc(*'MJPG'), 10.0, (img_w, img_h))
for f, image_file in enumerate(image_files):
img = cv2.imread(image_file)
mask_f = pred_mask_final[f, :, :]
img = davis_benchmark.overlay_semantic_mask(img,
mask_f,
alpha=0.3,
contour_thickness=1)
for i in range(object_num):
rect = track_boxes[i, f]
rect = [int(l) for l in rect]
rect_mask = track_mask_boxes[i, f]
rect_mask = [int(l) for l in rect_mask]
mask_score = round(track_mask_score[i, f], 2)
track_score_ = round(track_score[i, f], 2)
state_score_ = round(state_score[i, f], 2)
color = davis_benchmark.labelcolormap(object_num + 1)[i + 1]
color_tuple = (int(color[0]), int(color[1]), int(color[2]))
cv2.putText(img,
'Frame : ' + str(f), (10, 20),
cv2.FONT_HERSHEY_SIMPLEX,
0.6, (255, 255, 255),
thickness=2)
cv2.rectangle(img, (rect[0], rect[1]),
(rect[0] + rect[2], rect[1] + rect[3]),
color=color_tuple,
thickness=2)
if rect_mask[0] > 0:
cv2.rectangle(img, (rect_mask[0], rect_mask[1]),
(rect_mask[0] + rect_mask[2],
rect_mask[1] + rect_mask[3]),
color=(255, 255, 255),
thickness=2)
if f > 0:
cv2.putText(img,
'M {} T{} S {}'.format(
mask_score, track_score_, state_score_),
(rect[0], max(rect[1], 5) + 10),
cv2.FONT_HERSHEY_SIMPLEX,
0.5,
color=(0, 0, 255),
thickness=2)
VideoOut.write(img)
VideoOut.release()
return speed
DAVISTester.default_hyper_params = copy.deepcopy(
DAVISTester.default_hyper_params)
DAVISTester.default_hyper_params.update(DAVISTester.extra_hyper_params)
| 43.22865
| 132
| 0.52001
|
25f99dbbb459e637966ecb8f8643538c930f1f4f
| 10,651
|
py
|
Python
|
dev/services/wms/ows_refactored/fc/style_fc_cfg.py
|
MatthewJA/dea-config
|
a843dbaf3e283a626791783d7318737932c2ab6f
|
[
"Apache-2.0"
] | null | null | null |
dev/services/wms/ows_refactored/fc/style_fc_cfg.py
|
MatthewJA/dea-config
|
a843dbaf3e283a626791783d7318737932c2ab6f
|
[
"Apache-2.0"
] | null | null | null |
dev/services/wms/ows_refactored/fc/style_fc_cfg.py
|
MatthewJA/dea-config
|
a843dbaf3e283a626791783d7318737932c2ab6f
|
[
"Apache-2.0"
] | null | null | null |
from ows_refactored.ows_legend_cfg import (
legend_idx_0_100_pixel_fc_25ticks,
legend_idx_0_100_pixel_fc_bs_25ticks,
legend_idx_0_100_pixel_fc_ngv_25ticks,
)
style_fc_simple_rgb = {
"name": "simple_rgb",
"title": "Simple RGB",
"abstract": "Simple true-colour image, using the red, green and blue bands",
"components": {
"red": {"BS_PC_50": 1.0},
"green": {"PV_PC_50": 1.0},
"blue": {"NPV_PC_50": 1.0},
},
"scale_range": [0.0, 100.0],
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
}
style_fc_simple = {
"name": "simple_fc",
"title": "Fractional Cover",
"abstract": "Fractional cover representation, with green vegetation in green, dead vegetation in blue, and bare soil in red",
"components": {"red": {"BS": 1.0}, "green": {"PV": 1.0}, "blue": {"NPV": 1.0}},
"scale_range": [0.0, 100.0],
"pq_masks": [
{
"flags": {"dry": True},
},
{
"flags": {
"terrain_or_low_angle": False,
"high_slope": False,
"cloud_shadow": False,
"cloud": False,
"sea": False,
}
},
],
}
style_fc_gv_10 = {
"name": "green_veg_10",
"title": "10th Percentile",
"abstract": "10th Percentile of Green Vegetation",
"index_function": {
"function": "datacube_ows.band_utils.single_band",
"mapped_bands": True,
"kwargs": {
"band": "PV_PC_10",
},
},
"include_in_feature_info": False,
"needed_bands": ["PV_PC_10"],
"color_ramp": [
{
"value": 0,
"color": "#ffffcc",
},
{
"value": 25,
"color": "#c2e699",
},
{
"value": 50,
"color": "#78c679",
},
{
"value": 75,
"color": "#31a354",
},
{
"value": 100,
"color": "#006837",
},
],
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
"legend": legend_idx_0_100_pixel_fc_25ticks,
}
style_fc_gv_50 = {
"name": "green_veg_50",
"title": "50th Percentile",
"abstract": "50th Percentile of Green Vegetation",
"index_function": {
"function": "datacube_ows.band_utils.single_band",
"mapped_bands": True,
"kwargs": {
"band": "PV_PC_50",
},
},
"include_in_feature_info": False,
"needed_bands": ["PV_PC_50"],
"color_ramp": [
{"value": 0, "color": "#ffffcc"},
{"value": 25, "color": "#c2e699"},
{"value": 50, "color": "#78c679"},
{"value": 75, "color": "#31a354"},
{"value": 100, "color": "#006837"},
],
# old behaviour was wrong. This is what Leo and Emma requested
"legend": legend_idx_0_100_pixel_fc_25ticks,
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
}
style_fc_gv_90 = {
"name": "green_veg_90",
"title": "90th Percentile",
"abstract": "90th Percentile of Green Vegetation",
"index_function": {
"function": "datacube_ows.band_utils.single_band",
"mapped_bands": True,
"kwargs": {
"band": "PV_PC_90",
},
},
"include_in_feature_info": False,
"needed_bands": ["PV_PC_90"],
"color_ramp": [
{"value": 0, "color": "#ffffcc"},
{"value": 25, "color": "#c2e699"},
{"value": 50, "color": "#78c679"},
{"value": 75, "color": "#31a354"},
{"value": 100, "color": "#006837"},
],
# old behaviour was wrong. This is what Leo and Emma requested
"legend": legend_idx_0_100_pixel_fc_25ticks,
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
}
style_fc_ngv_10 = {
"name": "non_green_veg_10",
"title": "10th Percentile",
"abstract": "10th Percentile of Non Green Vegetation",
"index_function": {
"function": "datacube_ows.band_utils.single_band",
"mapped_bands": True,
"kwargs": {
"band": "NPV_PC_10",
},
},
"include_in_feature_info": False,
"needed_bands": ["NPV_PC_10"],
"color_ramp": [
{
"value": 0,
"color": "#ffffd4",
},
{"value": 25, "color": "#fed98e", "legend": {}},
{
"value": 50,
"color": "#fe9929",
},
{
"value": 75,
"color": "#d95f0e",
},
{
"value": 100,
"color": "#993404",
},
],
# Emulates what we had previously
"legend": legend_idx_0_100_pixel_fc_ngv_25ticks,
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
}
style_fc_ngv_50 = {
"name": "non_green_veg_50",
"title": "50th Percentile",
"abstract": "50th Percentile of Non Green Vegetation",
"index_function": {
"function": "datacube_ows.band_utils.single_band",
"mapped_bands": True,
"kwargs": {
"band": "NPV_PC_50",
},
},
"include_in_feature_info": False,
"needed_bands": ["NPV_PC_50"],
"color_ramp": [
{"value": 0, "color": "#ffffd4"},
{"value": 25, "color": "#fed98e"},
{"value": 50, "color": "#fe9929"},
{"value": 75, "color": "#d95f0e"},
{"value": 100, "color": "#993404"},
],
# old behaviour was wrong. This is what Leo and Emma requested
"legend": legend_idx_0_100_pixel_fc_ngv_25ticks,
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
}
style_fc_ngv_90 = {
"name": "non_green_veg_90",
"title": "90th Percentile",
"abstract": "90th Percentile of Non Green Vegetation",
"index_function": {
"function": "datacube_ows.band_utils.single_band",
"mapped_bands": True,
"kwargs": {
"band": "NPV_PC_90",
},
},
"include_in_feature_info": False,
"needed_bands": ["NPV_PC_90"],
"color_ramp": [
{"value": 0, "color": "#ffffd4"},
{"value": 25, "color": "#fed98e"},
{"value": 50, "color": "#fe9929"},
{"value": 75, "color": "#d95f0e"},
{"value": 100, "color": "#993404"},
],
# old behaviour was wrong. This is what Leo and Emma requested
"legend": legend_idx_0_100_pixel_fc_ngv_25ticks,
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
}
style_fc_bs_10 = {
"name": "bare_ground_10",
"title": "10th Percentile",
"abstract": "10th Percentile of Bare Soil",
"index_function": {
"function": "datacube_ows.band_utils.single_band",
"mapped_bands": True,
"kwargs": {
"band": "BS_PC_10",
},
},
"include_in_feature_info": False,
"needed_bands": ["BS_PC_10"],
"color_ramp": [
{
"value": 0,
"color": "#feebe2",
},
{
"value": 25,
"color": "#fbb4b9",
},
{
"value": 50,
"color": "#f768a1",
},
{
"value": 75,
"color": "#c51b8a",
},
{
"value": 100,
"color": "#7a0177",
},
],
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
# Emulates what we had previously
"legend": legend_idx_0_100_pixel_fc_bs_25ticks,
}
style_fc_bs_50 = {
"name": "bare_ground_50",
"title": "50th Percentile",
"abstract": "50th Percentile of Bare Soil",
"index_function": {
"function": "datacube_ows.band_utils.single_band",
"mapped_bands": True,
"kwargs": {
"band": "BS_PC_50",
},
},
"include_in_feature_info": False,
"needed_bands": ["BS_PC_50"],
"color_ramp": [
{"value": 0, "color": "#feebe2"},
{"value": 25, "color": "#fbb4b9"},
{"value": 50, "color": "#f768a1"},
{"value": 75, "color": "#c51b8a"},
{"value": 100, "color": "#7a0177"},
],
# Old behaviour was wrong - this is what Leo and Emma have requested.
"legend": legend_idx_0_100_pixel_fc_bs_25ticks,
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
}
style_fc_bs_90 = {
"name": "bare_ground_90",
"title": "90th Percentile",
"abstract": "90th Percentile of Bare Soil",
"index_function": {
"function": "datacube_ows.band_utils.single_band",
"mapped_bands": True,
"kwargs": {
"band": "BS_PC_90",
},
},
"include_in_feature_info": False,
"needed_bands": ["BS_PC_90"],
"color_ramp": [
{"value": 0, "color": "#feebe2"},
{"value": 25, "color": "#fbb4b9"},
{"value": 50, "color": "#f768a1"},
{"value": 75, "color": "#c51b8a"},
{"value": 100, "color": "#7a0177"},
],
# Old behaviour was wrong - this is what Leo and Emma have requested.
"legend": legend_idx_0_100_pixel_fc_bs_25ticks,
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
}
style_fc_rgb = {
"name": "fc_rgb",
"title": "Three-band fractional cover",
"abstract": "Fractional cover medians - red is bare soil, green is green vegetation and blue is non-green vegetation",
"components": {
"red": {"BS_PC_50": 1.0},
"green": {"PV_PC_50": 1.0},
"blue": {"NPV_PC_50": 1.0},
},
"scale_range": [0.0, 100.0],
"pq_masks": [
{
"flags": {
"sea": True,
},
"invert": True,
},
],
"legend": {
"show_legend": True,
"url": "https://data.dea.ga.gov.au/fractional-cover/FC_legend.png",
},
}
styles_fc_gv_list = [
style_fc_gv_10,
style_fc_gv_50,
style_fc_gv_90,
]
styles_fc_ngv_list = [
style_fc_ngv_10,
style_fc_ngv_50,
style_fc_ngv_90,
]
styles_fc_bare_list = [
style_fc_bs_10,
style_fc_bs_50,
style_fc_bs_90,
]
| 25.420048
| 129
| 0.477608
|
afc6783a0bb0ea7be6d5fdc1a2c2d0cc8764dac9
| 8,342
|
py
|
Python
|
homeassistant/components/xiaomi_miio/__init__.py
|
marcelblijleven/core
|
6d13466f8a9b157609227046e5ee542d1a261d0f
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/xiaomi_miio/__init__.py
|
marcelblijleven/core
|
6d13466f8a9b157609227046e5ee542d1a261d0f
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/xiaomi_miio/__init__.py
|
marcelblijleven/core
|
6d13466f8a9b157609227046e5ee542d1a261d0f
|
[
"Apache-2.0"
] | null | null | null |
"""Support for Xiaomi Miio."""
from datetime import timedelta
import logging
import async_timeout
from miio import AirHumidifier, AirHumidifierMiot, DeviceException
from miio.gateway.gateway import GatewayException
from homeassistant import config_entries, core
from homeassistant.const import CONF_HOST, CONF_TOKEN
from homeassistant.core import callback
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
ATTR_AVAILABLE,
CONF_DEVICE,
CONF_FLOW_TYPE,
CONF_GATEWAY,
CONF_MODEL,
DOMAIN,
KEY_COORDINATOR,
KEY_DEVICE,
KEY_MIGRATE_ENTITY_NAME,
MODELS_AIR_MONITOR,
MODELS_FAN,
MODELS_HUMIDIFIER,
MODELS_HUMIDIFIER_MIOT,
MODELS_LIGHT,
MODELS_SWITCH,
MODELS_VACUUM,
)
from .gateway import ConnectXiaomiGateway
_LOGGER = logging.getLogger(__name__)
GATEWAY_PLATFORMS = ["alarm_control_panel", "light", "sensor", "switch"]
SWITCH_PLATFORMS = ["switch"]
FAN_PLATFORMS = ["fan"]
HUMIDIFIER_PLATFORMS = ["humidifier", "number", "select", "sensor", "switch"]
LIGHT_PLATFORMS = ["light"]
VACUUM_PLATFORMS = ["vacuum"]
AIR_MONITOR_PLATFORMS = ["air_quality", "sensor"]
async def async_setup_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up the Xiaomi Miio components from a config entry."""
hass.data.setdefault(DOMAIN, {})
if entry.data[
CONF_FLOW_TYPE
] == CONF_GATEWAY and not await async_setup_gateway_entry(hass, entry):
return False
return bool(
entry.data[CONF_FLOW_TYPE] != CONF_DEVICE
or await async_setup_device_entry(hass, entry)
)
@callback
def get_platforms(config_entry):
"""Return the platforms belonging to a config_entry."""
model = config_entry.data[CONF_MODEL]
flow_type = config_entry.data[CONF_FLOW_TYPE]
if flow_type == CONF_GATEWAY:
return GATEWAY_PLATFORMS
if flow_type == CONF_DEVICE:
if model in MODELS_SWITCH:
return SWITCH_PLATFORMS
if model in MODELS_HUMIDIFIER:
return HUMIDIFIER_PLATFORMS
if model in MODELS_FAN:
return FAN_PLATFORMS
if model in MODELS_LIGHT:
return LIGHT_PLATFORMS
for vacuum_model in MODELS_VACUUM:
if model.startswith(vacuum_model):
return VACUUM_PLATFORMS
for air_monitor_model in MODELS_AIR_MONITOR:
if model.startswith(air_monitor_model):
return AIR_MONITOR_PLATFORMS
_LOGGER.error(
"Unsupported device found! Please create an issue at "
"https://github.com/syssi/xiaomi_airpurifier/issues "
"and provide the following data: %s",
model,
)
return []
async def async_create_miio_device_and_coordinator(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up a data coordinator and one miio device to service multiple entities."""
model = entry.data[CONF_MODEL]
host = entry.data[CONF_HOST]
token = entry.data[CONF_TOKEN]
name = entry.title
device = None
migrate_entity_name = None
if model not in MODELS_HUMIDIFIER:
return
if model in MODELS_HUMIDIFIER_MIOT:
device = AirHumidifierMiot(host, token)
else:
device = AirHumidifier(host, token, model=model)
# Removing fan platform entity for humidifiers and cache the name and entity name for migration
entity_registry = er.async_get(hass)
entity_id = entity_registry.async_get_entity_id("fan", DOMAIN, entry.unique_id)
if entity_id:
# This check is entities that have a platform migration only and should be removed in the future
migrate_entity_name = entity_registry.async_get(entity_id).name
entity_registry.async_remove(entity_id)
async def async_update_data():
"""Fetch data from the device using async_add_executor_job."""
try:
async with async_timeout.timeout(10):
return await hass.async_add_executor_job(device.status)
except DeviceException as ex:
raise UpdateFailed(ex) from ex
# Create update miio device and coordinator
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=name,
update_method=async_update_data,
# Polling interval. Will only be polled if there are subscribers.
update_interval=timedelta(seconds=60),
)
hass.data[DOMAIN][entry.entry_id] = {
KEY_DEVICE: device,
KEY_COORDINATOR: coordinator,
}
if migrate_entity_name:
hass.data[DOMAIN][entry.entry_id][KEY_MIGRATE_ENTITY_NAME] = migrate_entity_name
# Trigger first data fetch
await coordinator.async_config_entry_first_refresh()
async def async_setup_gateway_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up the Xiaomi Gateway component from a config entry."""
host = entry.data[CONF_HOST]
token = entry.data[CONF_TOKEN]
name = entry.title
gateway_id = entry.unique_id
# For backwards compat
if entry.unique_id.endswith("-gateway"):
hass.config_entries.async_update_entry(entry, unique_id=entry.data["mac"])
entry.async_on_unload(entry.add_update_listener(update_listener))
# Connect to gateway
gateway = ConnectXiaomiGateway(hass, entry)
if not await gateway.async_connect_gateway(host, token):
return False
gateway_info = gateway.gateway_info
gateway_model = f"{gateway_info.model}-{gateway_info.hardware_version}"
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
connections={(dr.CONNECTION_NETWORK_MAC, gateway_info.mac_address)},
identifiers={(DOMAIN, gateway_id)},
manufacturer="Xiaomi",
name=name,
model=gateway_model,
sw_version=gateway_info.firmware_version,
)
def update_data():
"""Fetch data from the subdevice."""
data = {}
for sub_device in gateway.gateway_device.devices.values():
try:
sub_device.update()
except GatewayException as ex:
_LOGGER.error("Got exception while fetching the state: %s", ex)
data[sub_device.sid] = {ATTR_AVAILABLE: False}
else:
data[sub_device.sid] = {ATTR_AVAILABLE: True}
return data
async def async_update_data():
"""Fetch data from the subdevice using async_add_executor_job."""
return await hass.async_add_executor_job(update_data)
# Create update coordinator
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=name,
update_method=async_update_data,
# Polling interval. Will only be polled if there are subscribers.
update_interval=timedelta(seconds=10),
)
hass.data[DOMAIN][entry.entry_id] = {
CONF_GATEWAY: gateway.gateway_device,
KEY_COORDINATOR: coordinator,
}
for platform in GATEWAY_PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
return True
async def async_setup_device_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up the Xiaomi Miio device component from a config entry."""
platforms = get_platforms(entry)
await async_create_miio_device_and_coordinator(hass, entry)
if not platforms:
return False
entry.async_on_unload(entry.add_update_listener(update_listener))
hass.config_entries.async_setup_platforms(entry, platforms)
return True
async def async_unload_entry(
hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry
):
"""Unload a config entry."""
platforms = get_platforms(config_entry)
unload_ok = await hass.config_entries.async_unload_platforms(
config_entry, platforms
)
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
async def update_listener(
hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry
):
"""Handle options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
| 31.961686
| 104
| 0.70211
|
8eaf1e9d2c1edb679c00df94df60d5eba378e295
| 172
|
py
|
Python
|
spacy/lang/el/lemmatizer/_nouns_irreg.py
|
algteam/spacy_zh_model
|
0b0cba1a3964aa426e5f96087849c90e69e2a89d
|
[
"MIT"
] | 5
|
2019-04-19T06:27:29.000Z
|
2019-12-02T13:30:47.000Z
|
spacy/lang/el/lemmatizer/_nouns_irreg.py
|
algteam/spacy_zh_model
|
0b0cba1a3964aa426e5f96087849c90e69e2a89d
|
[
"MIT"
] | null | null | null |
spacy/lang/el/lemmatizer/_nouns_irreg.py
|
algteam/spacy_zh_model
|
0b0cba1a3964aa426e5f96087849c90e69e2a89d
|
[
"MIT"
] | 2
|
2019-04-19T06:27:18.000Z
|
2019-10-04T12:39:15.000Z
|
# coding: utf8
from __future__ import unicode_literals
NOUNS_IRREG = {
"λευτεριά": ("ελευθερία",),
"καφέδες": ("καφές",),
"ποιήματα": ("ποίημα",),
}
| 17.2
| 40
| 0.581395
|
ef92351d6fcaabb7998206c3dede14471069c4b8
| 1,820
|
py
|
Python
|
get_quarter.py
|
DazEB2/SimplePyScripts
|
1dde0a42ba93fe89609855d6db8af1c63b1ab7cc
|
[
"CC-BY-4.0"
] | null | null | null |
get_quarter.py
|
DazEB2/SimplePyScripts
|
1dde0a42ba93fe89609855d6db8af1c63b1ab7cc
|
[
"CC-BY-4.0"
] | null | null | null |
get_quarter.py
|
DazEB2/SimplePyScripts
|
1dde0a42ba93fe89609855d6db8af1c63b1ab7cc
|
[
"CC-BY-4.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
import datetime as DT
def get_quarter(month_or_date=None) -> int:
dt = month_or_date
if dt is None:
dt = DT.date.today()
if isinstance(dt, int):
month = dt
else:
month = dt.month
if month in (1, 2, 3):
return 1
elif month in (4, 5, 6):
return 2
elif month in (7, 8, 9):
return 3
elif month in (10, 11, 12):
return 4
else:
raise Exception('Invalid "month": {}'.format(month))
def get_quarter_num(month_or_date=None) -> str:
return ['I', 'II', 'III', 'IV'][get_quarter(month_or_date) - 1]
if __name__ == '__main__':
print(get_quarter())
print()
print(get_quarter(DT.datetime.today()))
print(get_quarter(DT.date.today()))
print()
print(get_quarter_num())
print()
for dt in [DT.date(2018, month=i + 1, day=1) for i in range(12)]:
print(dt, get_quarter(dt))
assert get_quarter(1) == 1
assert get_quarter(2) == 1
assert get_quarter(3) == 1
assert get_quarter(4) == 2
assert get_quarter(5) == 2
assert get_quarter(6) == 2
assert get_quarter(7) == 3
assert get_quarter(8) == 3
assert get_quarter(9) == 3
assert get_quarter(10) == 4
assert get_quarter(11) == 4
assert get_quarter(12) == 4
assert get_quarter_num(1) == "I"
assert get_quarter_num(2) == "I"
assert get_quarter_num(3) == "I"
assert get_quarter_num(4) == "II"
assert get_quarter_num(5) == "II"
assert get_quarter_num(6) == "II"
assert get_quarter_num(7) == "III"
assert get_quarter_num(8) == "III"
assert get_quarter_num(9) == "III"
assert get_quarter_num(10) == "IV"
assert get_quarter_num(11) == "IV"
assert get_quarter_num(12) == "IV"
| 23.636364
| 69
| 0.597253
|
3a248270c0d7d41a5518adbfa8981072c857fef2
| 30
|
py
|
Python
|
foliant/meta_commands/draw/__init__.py
|
foliant-docs/foliantcontrib.project_graph
|
2dae1047908986cbed6a7e53d6b0590e79a2a2fa
|
[
"MIT"
] | null | null | null |
foliant/meta_commands/draw/__init__.py
|
foliant-docs/foliantcontrib.project_graph
|
2dae1047908986cbed6a7e53d6b0590e79a2a2fa
|
[
"MIT"
] | null | null | null |
foliant/meta_commands/draw/__init__.py
|
foliant-docs/foliantcontrib.project_graph
|
2dae1047908986cbed6a7e53d6b0590e79a2a2fa
|
[
"MIT"
] | null | null | null |
from .draw import MetaCommand
| 15
| 29
| 0.833333
|
3d71e4b846ebf1ad6948fd22991dd46b418e8fce
| 4,628
|
py
|
Python
|
venv/Lib/site-packages/win32com/demos/outlookAddin.py
|
dasxran/seleniumMachineLearning
|
3098f836913a89847cb9e308189383a4ea981139
|
[
"MIT"
] | 64
|
2020-07-22T06:24:18.000Z
|
2022-03-27T10:48:15.000Z
|
venv/Lib/site-packages/win32com/demos/outlookAddin.py
|
dasxran/seleniumMachineLearning
|
3098f836913a89847cb9e308189383a4ea981139
|
[
"MIT"
] | 20
|
2021-05-03T18:02:23.000Z
|
2022-03-12T12:01:04.000Z
|
venv/Lib/site-packages/win32com/demos/outlookAddin.py
|
dasxran/seleniumMachineLearning
|
3098f836913a89847cb9e308189383a4ea981139
|
[
"MIT"
] | 18
|
2021-11-12T03:15:45.000Z
|
2022-03-25T05:29:00.000Z
|
# A demo plugin for Microsoft Outlook (NOT Outlook Express)
#
# This addin simply adds a new button to the main Outlook toolbar,
# and displays a message box when clicked. Thus, it demonstrates
# how to plug in to Outlook itself, and hook outlook events.
#
# Additionally, each time a new message arrives in the Inbox, a message
# is printed with the subject of the message.
#
# To register the addin, simply execute:
# outlookAddin.py
# This will install the COM server, and write the necessary
# AddIn key to Outlook
#
# To unregister completely:
# outlookAddin.py --unregister
#
# To debug, execute:
# outlookAddin.py --debug
#
# Then open Pythonwin, and select "Tools->Trace Collector Debugging Tool"
# Restart Outlook, and you should see some output generated.
#
# NOTE: If the AddIn fails with an error, Outlook will re-register
# the addin to not automatically load next time Outlook starts. To
# correct this, simply re-register the addin (see above)
from win32com import universal
from win32com.server.exception import COMException
from win32com.client import gencache, DispatchWithEvents
import winerror
import pythoncom
from win32com.client import constants
import sys
# Support for COM objects we use.
gencache.EnsureModule('{00062FFF-0000-0000-C000-000000000046}', 0, 9, 0, bForDemand=True) # Outlook 9
gencache.EnsureModule('{2DF8D04C-5BFA-101B-BDE5-00AA0044DE52}', 0, 2, 1, bForDemand=True) # Office 9
# The TLB defining the interfaces we implement
universal.RegisterInterfaces('{AC0714F2-3D04-11D1-AE7D-00A0C90F26F4}', 0, 1, 0, ["_IDTExtensibility2"])
class ButtonEvent:
def OnClick(self, button, cancel):
import win32ui # Possible, but not necessary, to use a Pythonwin GUI
win32ui.MessageBox("Hello from Python")
return cancel
class FolderEvent:
def OnItemAdd(self, item):
try:
print("An item was added to the inbox with subject:", item.Subject)
except AttributeError:
print("An item was added to the inbox, but it has no subject! - ", repr(item))
class OutlookAddin:
_com_interfaces_ = ['_IDTExtensibility2']
_public_methods_ = []
_reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER
_reg_clsid_ = "{0F47D9F3-598B-4d24-B7E3-92AC15ED27E2}"
_reg_progid_ = "Python.Test.OutlookAddin"
_reg_policy_spec_ = "win32com.server.policy.EventHandlerPolicy"
def OnConnection(self, application, connectMode, addin, custom):
print("OnConnection", application, connectMode, addin, custom)
# ActiveExplorer may be none when started without a UI (eg, WinCE synchronisation)
activeExplorer = application.ActiveExplorer()
if activeExplorer is not None:
bars = activeExplorer.CommandBars
toolbar = bars.Item("Standard")
item = toolbar.Controls.Add(Type=constants.msoControlButton, Temporary=True)
# Hook events for the item
item = self.toolbarButton = DispatchWithEvents(item, ButtonEvent)
item.Caption="Python"
item.TooltipText = "Click for Python"
item.Enabled = True
# And now, for the sake of demonstration, setup a hook for all new messages
inbox = application.Session.GetDefaultFolder(constants.olFolderInbox)
self.inboxItems = DispatchWithEvents(inbox.Items, FolderEvent)
def OnDisconnection(self, mode, custom):
print("OnDisconnection")
def OnAddInsUpdate(self, custom):
print("OnAddInsUpdate", custom)
def OnStartupComplete(self, custom):
print("OnStartupComplete", custom)
def OnBeginShutdown(self, custom):
print("OnBeginShutdown", custom)
def RegisterAddin(klass):
import winreg
key = winreg.CreateKey(winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Office\\Outlook\\Addins")
subkey = winreg.CreateKey(key, klass._reg_progid_)
winreg.SetValueEx(subkey, "CommandLineSafe", 0, winreg.REG_DWORD, 0)
winreg.SetValueEx(subkey, "LoadBehavior", 0, winreg.REG_DWORD, 3)
winreg.SetValueEx(subkey, "Description", 0, winreg.REG_SZ, klass._reg_progid_)
winreg.SetValueEx(subkey, "FriendlyName", 0, winreg.REG_SZ, klass._reg_progid_)
def UnregisterAddin(klass):
import winreg
try:
winreg.DeleteKey(winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Office\\Outlook\\Addins\\" + klass._reg_progid_)
except WindowsError:
pass
if __name__ == '__main__':
import win32com.server.register
win32com.server.register.UseCommandLine(OutlookAddin)
if "--unregister" in sys.argv:
UnregisterAddin(OutlookAddin)
else:
RegisterAddin(OutlookAddin)
| 40.243478
| 121
| 0.721046
|
a92d16a0a6f860d7843b0822d0af32da610bd45d
| 16,471
|
py
|
Python
|
tempest/common/preprov_creds.py
|
azorge/tempest
|
549dfc93fb7e3d6d8566064a60a6069deae5c8eb
|
[
"Apache-2.0"
] | 1
|
2021-05-21T08:24:02.000Z
|
2021-05-21T08:24:02.000Z
|
tempest/common/preprov_creds.py
|
azorge/tempest
|
549dfc93fb7e3d6d8566064a60a6069deae5c8eb
|
[
"Apache-2.0"
] | null | null | null |
tempest/common/preprov_creds.py
|
azorge/tempest
|
549dfc93fb7e3d6d8566064a60a6069deae5c8eb
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
import os
from oslo_concurrency import lockutils
from oslo_log import log as logging
import six
import yaml
from tempest import clients
from tempest.common import fixed_network
from tempest import exceptions
from tempest.lib import auth
from tempest.lib.common import cred_provider
from tempest.lib import exceptions as lib_exc
LOG = logging.getLogger(__name__)
def read_accounts_yaml(path):
try:
with open(path, 'r') as yaml_file:
accounts = yaml.safe_load(yaml_file)
except IOError:
raise lib_exc.InvalidConfiguration(
'The path for the test accounts file: %s '
'could not be found' % path)
return accounts
class PreProvisionedCredentialProvider(cred_provider.CredentialProvider):
# Exclude from the hash fields specific to v2 or v3 identity API
# i.e. only include user*, project*, tenant* and password
HASH_CRED_FIELDS = (set(auth.KeystoneV2Credentials.ATTRIBUTES) &
set(auth.KeystoneV3Credentials.ATTRIBUTES))
def __init__(self, identity_version, test_accounts_file,
accounts_lock_dir, name=None, credentials_domain=None,
admin_role=None, object_storage_operator_role=None,
object_storage_reseller_admin_role=None):
"""Credentials provider using pre-provisioned accounts
This credentials provider loads the details of pre-provisioned
accounts from a YAML file, in the format specified by
`etc/accounts.yaml.sample`. It locks accounts while in use, using the
external locking mechanism, allowing for multiple python processes
to share a single account file, and thus running tests in parallel.
The accounts_lock_dir must be generated using `lockutils.get_lock_path`
from the oslo.concurrency library. For instance:
accounts_lock_dir = os.path.join(lockutils.get_lock_path(CONF),
'test_accounts')
Role names for object storage are optional as long as the
`operator` and `reseller_admin` credential types are not used in the
accounts file.
:param identity_version: identity version of the credentials
:param admin_role: name of the admin role
:param test_accounts_file: path to the accounts YAML file
:param accounts_lock_dir: the directory for external locking
:param name: name of the hash file (optional)
:param credentials_domain: name of the domain credentials belong to
(if no domain is configured)
:param object_storage_operator_role: name of the role
:param object_storage_reseller_admin_role: name of the role
"""
super(PreProvisionedCredentialProvider, self).__init__(
identity_version=identity_version, name=name,
admin_role=admin_role, credentials_domain=credentials_domain)
self.test_accounts_file = test_accounts_file
if test_accounts_file:
accounts = read_accounts_yaml(self.test_accounts_file)
else:
raise lib_exc.InvalidCredentials("No accounts file specified")
self.hash_dict = self.get_hash_dict(
accounts, admin_role, object_storage_operator_role,
object_storage_reseller_admin_role)
self.accounts_dir = accounts_lock_dir
self._creds = {}
@classmethod
def _append_role(cls, role, account_hash, hash_dict):
if role in hash_dict['roles']:
hash_dict['roles'][role].append(account_hash)
else:
hash_dict['roles'][role] = [account_hash]
return hash_dict
@classmethod
def get_hash_dict(cls, accounts, admin_role,
object_storage_operator_role=None,
object_storage_reseller_admin_role=None):
hash_dict = {'roles': {}, 'creds': {}, 'networks': {}}
# Loop over the accounts read from the yaml file
for account in accounts:
roles = []
types = []
resources = []
if 'roles' in account:
roles = account.pop('roles')
if 'types' in account:
types = account.pop('types')
if 'resources' in account:
resources = account.pop('resources')
temp_hash = hashlib.md5()
account_for_hash = dict((k, v) for (k, v) in account.items()
if k in cls.HASH_CRED_FIELDS)
temp_hash.update(six.text_type(account_for_hash).encode('utf-8'))
temp_hash_key = temp_hash.hexdigest()
hash_dict['creds'][temp_hash_key] = account
for role in roles:
hash_dict = cls._append_role(role, temp_hash_key,
hash_dict)
# If types are set for the account append the matching role
# subdict with the hash
for type in types:
if type == 'admin':
hash_dict = cls._append_role(admin_role, temp_hash_key,
hash_dict)
elif type == 'operator':
if object_storage_operator_role:
hash_dict = cls._append_role(
object_storage_operator_role, temp_hash_key,
hash_dict)
else:
msg = ("Type 'operator' configured, but no "
"object_storage_operator_role specified")
raise lib_exc.InvalidCredentials(msg)
elif type == 'reseller_admin':
if object_storage_reseller_admin_role:
hash_dict = cls._append_role(
object_storage_reseller_admin_role,
temp_hash_key,
hash_dict)
else:
msg = ("Type 'reseller_admin' configured, but no "
"object_storage_reseller_admin_role specified")
raise lib_exc.InvalidCredentials(msg)
# Populate the network subdict
for resource in resources:
if resource == 'network':
hash_dict['networks'][temp_hash_key] = resources[resource]
else:
LOG.warning(
'Unknown resource type %s, ignoring this field',
resource
)
return hash_dict
def is_multi_user(self):
return len(self.hash_dict['creds']) > 1
def is_multi_tenant(self):
return self.is_multi_user()
def _create_hash_file(self, hash_string):
path = os.path.join(os.path.join(self.accounts_dir, hash_string))
if not os.path.isfile(path):
with open(path, 'w') as fd:
fd.write(self.name)
return True
return False
@lockutils.synchronized('test_accounts_io', external=True)
def _get_free_hash(self, hashes):
# Cast as a list because in some edge cases a set will be passed in
hashes = list(hashes)
if not os.path.isdir(self.accounts_dir):
os.mkdir(self.accounts_dir)
# Create File from first hash (since none are in use)
self._create_hash_file(hashes[0])
return hashes[0]
names = []
for _hash in hashes:
res = self._create_hash_file(_hash)
if res:
return _hash
else:
path = os.path.join(os.path.join(self.accounts_dir,
_hash))
with open(path, 'r') as fd:
names.append(fd.read())
msg = ('Insufficient number of users provided. %s have allocated all '
'the credentials for this allocation request' % ','.join(names))
raise lib_exc.InvalidCredentials(msg)
def _get_match_hash_list(self, roles=None):
hashes = []
if roles:
# Loop over all the creds for each role in the subdict and generate
# a list of cred lists for each role
for role in roles:
temp_hashes = self.hash_dict['roles'].get(role, None)
if not temp_hashes:
raise lib_exc.InvalidCredentials(
"No credentials with role: %s specified in the "
"accounts ""file" % role)
hashes.append(temp_hashes)
# Take the list of lists and do a boolean and between each list to
# find the creds which fall under all the specified roles
temp_list = set(hashes[0])
for hash_list in hashes[1:]:
temp_list = temp_list & set(hash_list)
hashes = temp_list
else:
hashes = self.hash_dict['creds'].keys()
# NOTE(mtreinish): admin is a special case because of the increased
# privilege set which could potentially cause issues on tests where
# that is not expected. So unless the admin role isn't specified do
# not allocate admin.
admin_hashes = self.hash_dict['roles'].get(self.admin_role,
None)
if ((not roles or self.admin_role not in roles) and
admin_hashes):
useable_hashes = [x for x in hashes if x not in admin_hashes]
else:
useable_hashes = hashes
return useable_hashes
def _sanitize_creds(self, creds):
temp_creds = creds.copy()
temp_creds.pop('password')
return temp_creds
def _get_creds(self, roles=None):
useable_hashes = self._get_match_hash_list(roles)
if len(useable_hashes) == 0:
msg = 'No users configured for type/roles %s' % roles
raise lib_exc.InvalidCredentials(msg)
free_hash = self._get_free_hash(useable_hashes)
clean_creds = self._sanitize_creds(
self.hash_dict['creds'][free_hash])
LOG.info('%s allocated creds:\n%s', self.name, clean_creds)
return self._wrap_creds_with_network(free_hash)
@lockutils.synchronized('test_accounts_io', external=True)
def remove_hash(self, hash_string):
hash_path = os.path.join(self.accounts_dir, hash_string)
if not os.path.isfile(hash_path):
LOG.warning('Expected an account lock file %s to remove, but '
'one did not exist', hash_path)
else:
os.remove(hash_path)
if not os.listdir(self.accounts_dir):
os.rmdir(self.accounts_dir)
def get_hash(self, creds):
for _hash in self.hash_dict['creds']:
# Comparing on the attributes that are expected in the YAML
init_attributes = creds.get_init_attributes()
# Only use the attributes initially used to calculate the hash
init_attributes = [x for x in init_attributes if
x in self.HASH_CRED_FIELDS]
hash_attributes = self.hash_dict['creds'][_hash].copy()
# NOTE(andreaf) Not all fields may be available on all credentials
# so defaulting to None for that case.
if all([getattr(creds, k, None) == hash_attributes.get(k, None) for
k in init_attributes]):
return _hash
raise AttributeError('Invalid credentials %s' % creds)
def remove_credentials(self, creds):
_hash = self.get_hash(creds)
clean_creds = self._sanitize_creds(self.hash_dict['creds'][_hash])
self.remove_hash(_hash)
LOG.info("%s returned allocated creds:\n%s", self.name, clean_creds)
def get_primary_creds(self):
if self._creds.get('primary'):
return self._creds.get('primary')
net_creds = self._get_creds()
self._creds['primary'] = net_creds
return net_creds
def get_alt_creds(self):
if self._creds.get('alt'):
return self._creds.get('alt')
net_creds = self._get_creds()
self._creds['alt'] = net_creds
return net_creds
def get_creds_by_roles(self, roles, force_new=False):
roles = list(set(roles))
exist_creds = self._creds.get(six.text_type(roles).encode(
'utf-8'), None)
# The force kwarg is used to allocate an additional set of creds with
# the same role list. The index used for the previously allocation
# in the _creds dict will be moved.
if exist_creds and not force_new:
return exist_creds
elif exist_creds and force_new:
# NOTE(andreaf) In py3.x encode returns bytes, and b'' is bytes
# In py2.7 encode returns strings, and b'' is still string
new_index = six.text_type(roles).encode('utf-8') + b'-' + \
six.text_type(len(self._creds)).encode('utf-8')
self._creds[new_index] = exist_creds
net_creds = self._get_creds(roles=roles)
self._creds[six.text_type(roles).encode('utf-8')] = net_creds
return net_creds
def clear_creds(self):
for creds in self._creds.values():
self.remove_credentials(creds)
def get_admin_creds(self):
return self.get_creds_by_roles([self.admin_role])
def is_role_available(self, role):
if self.hash_dict['roles'].get(role):
return True
return False
def admin_available(self):
return self.is_role_available(self.admin_role)
def _wrap_creds_with_network(self, hash):
creds_dict = self.hash_dict['creds'][hash]
# Make sure a domain scope if defined for users in case of V3
# Make sure a tenant is available in case of V2
creds_dict = self._extend_credentials(creds_dict)
# This just builds a Credentials object, it does not validate
# nor fill with missing fields.
credential = auth.get_credentials(
auth_url=None, fill_in=False,
identity_version=self.identity_version, **creds_dict)
net_creds = cred_provider.TestResources(credential)
net_clients = clients.Manager(credentials=credential)
compute_network_client = net_clients.compute_networks_client
net_name = self.hash_dict['networks'].get(hash, None)
try:
network = fixed_network.get_network_from_name(
net_name, compute_network_client)
except exceptions.InvalidTestResource:
network = {}
net_creds.set_resources(network=network)
return net_creds
def _extend_credentials(self, creds_dict):
# Add or remove credential domain fields to fit the identity version
domain_fields = set(x for x in auth.KeystoneV3Credentials.ATTRIBUTES
if 'domain' in x)
msg = 'Assuming they are valid in the default domain.'
if self.identity_version == 'v3':
if not domain_fields.intersection(set(creds_dict.keys())):
msg = 'Using credentials %s for v3 API calls. ' + msg
LOG.warning(msg, self._sanitize_creds(creds_dict))
creds_dict['domain_name'] = self.credentials_domain
if self.identity_version == 'v2':
if domain_fields.intersection(set(creds_dict.keys())):
msg = 'Using credentials %s for v2 API calls. ' + msg
LOG.warning(msg, self._sanitize_creds(creds_dict))
# Remove all valid domain attributes
for attr in domain_fields.intersection(set(creds_dict.keys())):
creds_dict.pop(attr)
return creds_dict
| 44.158177
| 79
| 0.609313
|
fa8094c0365a4484cb497f7fa54df2c706321100
| 633
|
py
|
Python
|
Python-code-snippets-201-300/289-pywebview-play-video.py
|
abartoha/python-snippets-ref
|
04e4feada96077f0e849b277204c012194e8fbcd
|
[
"Unlicense"
] | null | null | null |
Python-code-snippets-201-300/289-pywebview-play-video.py
|
abartoha/python-snippets-ref
|
04e4feada96077f0e849b277204c012194e8fbcd
|
[
"Unlicense"
] | null | null | null |
Python-code-snippets-201-300/289-pywebview-play-video.py
|
abartoha/python-snippets-ref
|
04e4feada96077f0e849b277204c012194e8fbcd
|
[
"Unlicense"
] | null | null | null |
"""Code snippets vol-58
289-pywebview-Play video.
Download all snippets so far:
https://wp.me/Pa5TU8-1yg
Blog: stevepython.wordpress.com
Requires:
---------
pip3 install pywebview
play_video.html file and test.mp4 in cwd.
Origin:
https://github.com/r0x0r/pywebview/tree/master/examples
"""
import webview
if __name__ == '__main__':
master_window = webview.create_window('Pywebview: Play video example',
url='play_video.html',
width=625, height=460,
confirm_close=True,)
webview.start()
| 25.32
| 74
| 0.587678
|
dc741e9dc33a612e238073b522bdb99e7dba3cb8
| 2,815
|
py
|
Python
|
tests/test_draco.py
|
ousttrue/pydracodec
|
05ef3cd72f7f1d092c8313ef1bda3a7cd4d0f062
|
[
"Apache-2.0"
] | 1
|
2021-11-26T13:43:13.000Z
|
2021-11-26T13:43:13.000Z
|
tests/test_draco.py
|
ousttrue/pydracodec
|
05ef3cd72f7f1d092c8313ef1bda3a7cd4d0f062
|
[
"Apache-2.0"
] | null | null | null |
tests/test_draco.py
|
ousttrue/pydracodec
|
05ef3cd72f7f1d092c8313ef1bda3a7cd4d0f062
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import os
import pathlib
import json
import pydracodec
import pydracodec.dracodec_unity
import ctypes
def get_bufferview_bytes(gltf_path: pathlib.Path, gltf, bufferview_index: int) -> bytes:
match gltf['bufferViews'][bufferview_index]:
case {
'buffer': buffer_index,
'byteOffset': offset,
'byteLength': length,
}:
match gltf['buffers'][buffer_index]:
case {'uri': uri}:
path = gltf_path.parent / uri
data = path.read_bytes()
return data[offset:offset+length]
raise Exception()
class TestDraco(unittest.TestCase):
def test_upper(self):
dir = os.environ['GLTF_SAMPLE_MODELS']
path = pathlib.Path(dir) / '2.0/Avocado/glTF-Draco/Avocado.gltf'
self.assertTrue(path.exists())
gltf = json.loads(path.read_bytes())
mesh0 = gltf['meshes'][0]
prim0_0 = mesh0['primitives'][0]
match prim0_0:
case {'extensions': {'KHR_draco_mesh_compression': {'bufferView': bufferview_index, 'attributes': attributes}}}:
data = get_bufferview_bytes(path, gltf, bufferview_index)
with pydracodec.DecodeMesh(data) as mesh:
# print(f'{mesh.numFaces}')
indices = pydracodec.GetIndices(mesh, mesh.numFaces)
self.assertEqual(ctypes.c_uint, indices.element_type)
# print(f'{mesh.numVertices}')
# print(f'{mesh.numAttributes}')
for k, v in attributes.items():
match k:
case 'POSITION':
positions = pydracodec.GetAttribute(
mesh, pydracodec.dracodec_unity.AttributeType.POSITION, mesh.numVertices)
self.assertEqual(3, positions.element_count)
case 'NORMAL':
normals = pydracodec.GetAttribute(
mesh, pydracodec.dracodec_unity.AttributeType.NORMAL, mesh.numVertices)
self.assertEqual(3, normals.element_count)
case 'TEXCOORD_0':
uv = pydracodec.GetAttribute(
mesh, pydracodec.dracodec_unity.AttributeType.TEX_COORD, mesh.numVertices)
self.assertEqual(2, uv.element_count)
case 'TANGENT':
pass
case _:
raise Exception()
case _:
raise Exception()
if __name__ == '__main__':
unittest.main()
| 36.558442
| 124
| 0.517229
|
5df0f304a292d3071bb1bea5f3d6f91b697422d1
| 3,731
|
py
|
Python
|
PaperExperiments/XHExp101/parameters.py
|
stefan-c-kremer/TE_World2
|
8e1fae218af8a1eabae776deecac62192c22e0ca
|
[
"MIT"
] | null | null | null |
PaperExperiments/XHExp101/parameters.py
|
stefan-c-kremer/TE_World2
|
8e1fae218af8a1eabae776deecac62192c22e0ca
|
[
"MIT"
] | null | null | null |
PaperExperiments/XHExp101/parameters.py
|
stefan-c-kremer/TE_World2
|
8e1fae218af8a1eabae776deecac62192c22e0ca
|
[
"MIT"
] | null | null | null |
# parameters.py
"""
Exp 101 - {'Initial_genes': '500', 'Host_mutation_rate': '0.03', 'TE_progeny': '0.00, 0, 0.55, 1, 0.30, 2, 0.15, 3', 'TE_Insertion_Distribution': 'Triangle( pmax=0, pzero=3.0/3.0 )', 'Carrying_capacity': '300', 'TE_excision_rate': '0.1', 'Junk_BP': '1.4', 'Gene_Insertion_Distribution': 'Triangle( pzero=1.0/3.0, pmax=1 )', 'mutation_effect': '0.10', 'TE_death_rate': '0.0005'}
"""
from TEUtil import *;
# note that "#" indicates a comment
# set the following to True if you want messages printed to the screen
# while the program runs - search for these keywords in TESim.py to see
# what each one prints out
output = {
"SPLAT": False,
"SPLAT FITNESS": False,
"INITIALIZATION": False,
"GENERATION": True,
"HOST EXTINCTION": True,
"TE EXTINCTION": True,
"TRIAL NO": True,
"GENE INIT": False,
"TE INIT": False,
};
TE_Insertion_Distribution = Triangle( pmax=0, pzero=3.0/3.0 );
Gene_Insertion_Distribution = Triangle( pzero=1.0/3.0, pmax=1 );
# Triangle( pmax, pzero ) generates values between pmax and pzero with
# a triangular probability distribution, where pmax is the point of highest
# probability, and pzero is the point of lowest probability
# - you can change the orientation of the triangle by reversing the values
# of pmax and pzero
# Flat() generates values between 0 and 1 with uniform probability
Gene_length = 1000; # use 1000?
TE_length = 1000; # use 1000?
TE_death_rate = 0.0005;
TE_excision_rate = 0.1; # set this to zero for retro transposons
# for retro transposons this is the probability of the given number of progeny
# for dna transposons this is the probability of the given number of progeny
# ___PLUS___ the original re-inserting
TE_progeny = ProbabilityTable( 0.00, 0, 0.55, 1, 0.30, 2, 0.15, 3 );
Initial_genes = 500;
Append_gene = True; # True: when the intialization routine tries to place
# a gene inside another gene, it instead appends it
# at the end of the original gene (use this with small
# amounts of Junk_BP).
# False: when the intialization routine tries to place
# a gene inside another gene, try to place it somewhere
# else again (don't use theis option with samll amounts
# of Junk_BP).
Initial_TEs = 1;
MILLION = 1000000;
Junk_BP = 1.4 * MILLION;
Host_start_fitness = 1.0;
Host_mutation_rate = 0.03;
Host_mutation = ProbabilityTable( 0.40, lambda fit: 0.0,
0.30, lambda fit: fit - random.random()*0.10,
0.15, lambda fit: fit,
0.15, lambda fit: fit + random.random()*0.10
);
# what happens when a TA hits a gene
Insertion_effect = ProbabilityTable(0.30, lambda fit: 0.0,
0.20, lambda fit: fit - random.random()*0.10,
0.30, lambda fit: fit,
0.20, lambda fit: fit + random.random()*0.10
);
Carrying_capacity = 300;
Host_reproduction_rate = 1; # how many offspring each host has
Host_survival_rate = lambda propfit: min( Carrying_capacity * propfit, 0.95 );
# propfit = proportion of fitness owned by this individual
Maximum_generations = 1500;
Terminate_no_TEs = True; # end simulation if there are no TEs left
# seed = 0;
seed = None; # if seed = None, the random number generator's initial state is
# set "randomly"
save_frequency = 50; # Frequency with with which to save state of experiment
saved = None; # if saved = None then we start a new simulation from scratch
# if saves = string, then we open that file and resume a simulation
| 38.864583
| 377
| 0.647816
|
54501e5a2e3628e242b911a44d9302b33f069813
| 735
|
py
|
Python
|
behave/tests/utils.py
|
uktrade/data-hub
|
81151252c52f17094899927cda244e32195c32e7
|
[
"MIT"
] | null | null | null |
behave/tests/utils.py
|
uktrade/data-hub
|
81151252c52f17094899927cda244e32195c32e7
|
[
"MIT"
] | 16
|
2016-12-05T14:44:01.000Z
|
2017-04-11T16:08:05.000Z
|
behave/tests/utils.py
|
uktrade/data-hub
|
81151252c52f17094899927cda244e32195c32e7
|
[
"MIT"
] | null | null | null |
import os
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
def get_chrome_webdriver():
"""Return the Chrome webdriver."""
selenium_url = 'http://selenium:{port}/wd/hub'.format(port=os.environ['SELENIUM_PORT'])
return webdriver.Remote(
command_executor=selenium_url,
desired_capabilities=DesiredCapabilities.CHROME
)
def get_base_rhod_url():
"""Rhod base url."""
return 'http://rhod:{port}/'.format(port=os.environ['RHOD_PORT'])
def build_rhod_endpoint(endpoint_name):
"""Return full Rhos url."""
if endpoint_name.startswith('/'):
endpoint_name = endpoint_name[1:]
return get_base_rhod_url() + endpoint_name
| 28.269231
| 91
| 0.721088
|
5edd8e74a8899a25fb51e2a4e133f3cb7933fa26
| 5,251
|
py
|
Python
|
tensorflow/contrib/autograph/impl/conversion_test.py
|
tucaiyong/tensorflow
|
3cc3c87f375f1bc292bd58db4928b810ac888bc6
|
[
"Apache-2.0"
] | 14
|
2018-12-06T06:51:33.000Z
|
2021-03-23T11:29:24.000Z
|
tensorflow/contrib/autograph/impl/conversion_test.py
|
tucaiyong/tensorflow
|
3cc3c87f375f1bc292bd58db4928b810ac888bc6
|
[
"Apache-2.0"
] | 10
|
2018-02-04T18:41:52.000Z
|
2018-05-02T09:00:46.000Z
|
tensorflow/contrib/autograph/impl/conversion_test.py
|
tucaiyong/tensorflow
|
3cc3c87f375f1bc292bd58db4928b810ac888bc6
|
[
"Apache-2.0"
] | 4
|
2018-01-17T14:22:49.000Z
|
2018-02-27T15:06:41.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for conversion module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.contrib.autograph import utils
from tensorflow.contrib.autograph.impl import api
from tensorflow.contrib.autograph.impl import conversion
from tensorflow.python.framework import constant_op
from tensorflow.python.keras._impl.keras.engine import training
from tensorflow.python.platform import test
class ConversionTest(test.TestCase):
def _simple_conversion_map(self):
return conversion.ConversionMap(True, (), (), api)
def test_is_whitelisted_for_graph(self):
def test_fn():
return constant_op.constant(1)
self.assertFalse(conversion.is_whitelisted_for_graph(test_fn))
self.assertTrue(conversion.is_whitelisted_for_graph(utils))
self.assertTrue(conversion.is_whitelisted_for_graph(constant_op.constant))
def test_entity_to_graph_unsupported_types(self):
with self.assertRaises(ValueError):
conversion_map = self._simple_conversion_map()
conversion.entity_to_graph('dummy', conversion_map, None, None)
def test_entity_to_graph_callable(self):
b = 2
def f(a):
return a + b
conversion_map = self._simple_conversion_map()
ast, name, ns = conversion.entity_to_graph(f, conversion_map, None, None)
self.assertTrue(isinstance(ast, gast.FunctionDef), ast)
self.assertEqual('tf__f', name)
self.assertTrue(ns['b'] is b)
def test_entity_to_graph_call_tree(self):
def g(a):
return a
def f(a):
return g(a)
conversion_map = self._simple_conversion_map()
conversion.entity_to_graph(f, conversion_map, None, None)
self.assertTrue(f in conversion_map.dependency_cache)
self.assertTrue(g in conversion_map.dependency_cache)
self.assertEqual('tf__f', conversion_map.dependency_cache[f].name)
# need the extra .body[0] in order to step past the with tf.name_scope('f')
# that is added automatically
self.assertEqual(
'tf__g',
conversion_map.dependency_cache[f].body[0].body[0].value.func.id)
self.assertEqual('tf__g', conversion_map.dependency_cache[g].name)
def test_entity_to_graph_class_hierarchy(self):
class TestBase(object):
def __init__(self, x='base'):
self.x = x
def foo(self):
return self.x
def bar(self):
return self.x
class TestSubclass(TestBase):
def __init__(self, y):
super(TestSubclass, self).__init__('sub')
self.y = y
def foo(self):
return self.y
def baz(self):
return self.y
conversion_map = self._simple_conversion_map()
conversion.entity_to_graph(TestSubclass, conversion_map, None, None)
self.assertTrue(TestBase in conversion_map.dependency_cache)
self.assertTrue(TestSubclass in conversion_map.dependency_cache)
self.assertEqual('TfTestBase',
conversion_map.dependency_cache[TestBase].body[-1].name)
self.assertEqual(
'TfTestSubclass',
conversion_map.dependency_cache[TestSubclass].body[-1].name)
def test_entity_to_graph_class_hierarchy_whitelisted(self):
class TestSubclass(training.Model):
def __init__(self, y):
super(TestSubclass, self).__init__()
self.built = False
def call(self, x):
return 3 * x
conversion_map = self._simple_conversion_map()
conversion.entity_to_graph(TestSubclass, conversion_map, None, None)
self.assertTrue(TestSubclass in conversion_map.dependency_cache)
self.assertFalse(training.Model in conversion_map.dependency_cache)
self.assertEqual(
'Model',
conversion_map.dependency_cache[TestSubclass].body[0].names[0].name)
self.assertEqual(
'TfTestSubclass',
conversion_map.dependency_cache[TestSubclass].body[-1].name)
def test_entity_to_graph_lambda(self):
f = lambda a: a
with self.assertRaises(NotImplementedError):
conversion_map = self._simple_conversion_map()
conversion.entity_to_graph(f, conversion_map, None, None)
def test_ag_module_cached(self):
def callee():
return range(3)
def caller(a):
return a()
conversion_map = self._simple_conversion_map()
_, _, callee_ns = conversion.entity_to_graph(
callee, conversion_map, None, None)
_, _, caller_ns = conversion.entity_to_graph(
caller, conversion_map, None, None)
self.assertTrue(callee_ns['ag__'] is caller_ns['ag__'])
if __name__ == '__main__':
test.main()
| 31.63253
| 80
| 0.713388
|
13499018b3645b57be2bdfe062c7fac8eda8b4a9
| 235
|
py
|
Python
|
iot_device/discover.py
|
iot49/iot-device
|
4113ee25a603e5125c8ddf525b11ead57bfbd158
|
[
"MIT"
] | null | null | null |
iot_device/discover.py
|
iot49/iot-device
|
4113ee25a603e5125c8ddf525b11ead57bfbd158
|
[
"MIT"
] | null | null | null |
iot_device/discover.py
|
iot49/iot-device
|
4113ee25a603e5125c8ddf525b11ead57bfbd158
|
[
"MIT"
] | 1
|
2020-09-08T10:42:28.000Z
|
2020-09-08T10:42:28.000Z
|
from abc import ABC, abstractmethod
class Discover(ABC):
"""Base class for device discovery"""
def __init__(self):
pass
@abstractmethod
def scan(self) -> list:
"""url's of devices that are online"""
| 18.076923
| 46
| 0.625532
|
1e067b55e8afb0dc18182239b3e956fb4a0cd0b1
| 2,832
|
py
|
Python
|
tests/functional/gtcs/test_division_by_zero_corrupts_db.py
|
FirebirdSQL/firebird-qa
|
96af2def7f905a06f178e2a80a2c8be4a4b44782
|
[
"MIT"
] | 1
|
2022-02-05T11:37:13.000Z
|
2022-02-05T11:37:13.000Z
|
tests/functional/gtcs/test_division_by_zero_corrupts_db.py
|
FirebirdSQL/firebird-qa
|
96af2def7f905a06f178e2a80a2c8be4a4b44782
|
[
"MIT"
] | 1
|
2021-09-03T11:47:00.000Z
|
2021-09-03T12:42:10.000Z
|
tests/functional/gtcs/test_division_by_zero_corrupts_db.py
|
FirebirdSQL/firebird-qa
|
96af2def7f905a06f178e2a80a2c8be4a4b44782
|
[
"MIT"
] | 1
|
2021-06-30T14:14:16.000Z
|
2021-06-30T14:14:16.000Z
|
#coding:utf-8
#
# id: functional.gtcs.division_by_zero_corrupts_db
# title: GTCS/tests/CF_ISQL_29. Zero divide in SP can crash database when call this SP several times.
# decription:
# ::: NB :::
# ### Name of original test has no any relation with actual task of this test: ###
# https://github.com/FirebirdSQL/fbtcs/blob/master/GTCS/tests/CF_ISQL_29.script
#
# Issue in original test:
# Division by 0 corrupt database
#
# Checked on: 4.0.0.1803 SS; 3.0.6.33265 SS; 2.5.9.27149 SC.
#
# tracker_id:
# min_versions: ['2.5.0']
# versions: 2.5
# qmid: None
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 2.5
# resources: None
substitutions_1 = [("-At procedure 'SPX_AUX_TEST' line: .*", ''), ('[ \t]+', ' ')]
init_script_1 = """"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
test_script_1 = """
set term ^ ;
create procedure spx_aux_test (par1 bigint) returns (ret1 bigint)
as
declare lok1 bigint ;
declare itmpvar integer;
begin
begin
lok1=2;
itmpvar = 1/0;
when any do
begin
exception;
end
end
end
^
commit
^
set term ;^
connect '$(DSN)' user 'SYSDBA' password 'masterkey'; -- this is done in original script.
set term ^;
create or alter procedure spx_aux_test (par1 bigint) returns (ret1 bigint)
as
declare lok1 bigint ;
declare itmpvar integer;
begin
begin
lok1=2;
itmpvar = 1/0;
end
end
^
commit
^
set term ;^
execute procedure spx_aux_test (1);
execute procedure spx_aux_test (1);
execute procedure spx_aux_test (1);
"""
act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stderr_1 = """
Statement failed, SQLSTATE = 22012
arithmetic exception, numeric overflow, or string truncation
-Integer divide by zero. The code attempted to divide an integer value by an integer divisor of zero.
Statement failed, SQLSTATE = 22012
arithmetic exception, numeric overflow, or string truncation
-Integer divide by zero. The code attempted to divide an integer value by an integer divisor of zero.
Statement failed, SQLSTATE = 22012
arithmetic exception, numeric overflow, or string truncation
-Integer divide by zero. The code attempted to divide an integer value by an integer divisor of zero.
"""
@pytest.mark.version('>=2.5')
def test_1(act_1: Action):
act_1.expected_stderr = expected_stderr_1
act_1.execute()
assert act_1.clean_stderr == act_1.clean_expected_stderr
| 28.897959
| 108
| 0.618291
|
5fdadd5ce2d6172dfd221b1a4ead1318e87ce7d5
| 4,116
|
py
|
Python
|
IntentParserApp/intents.py
|
nehavadnere/onos
|
d853f650b85cdf06adfb1fed3b0236c0d54f3bfc
|
[
"Apache-2.0"
] | null | null | null |
IntentParserApp/intents.py
|
nehavadnere/onos
|
d853f650b85cdf06adfb1fed3b0236c0d54f3bfc
|
[
"Apache-2.0"
] | null | null | null |
IntentParserApp/intents.py
|
nehavadnere/onos
|
d853f650b85cdf06adfb1fed3b0236c0d54f3bfc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
'''
Intent Generator
Generate intents from mission requirements and required network topology
'''
import sys
from random import randrange
import requests
import json
import codecs
import time
def main(argv):
print 'Format: python intents.py <path to network.json> <path to mission.json>'
print 'Number of arguments:', len(sys.argv), 'arguments.'
network_file = sys.argv[1]
mission_file = sys.argv[2]
intent_list = []
populateData(network_file, mission_file, intent_list)
'''
Get the network topology and mission data from the ws
'''
def populateData(network_file, mission_file, intent_list):
dataset = []
#Get data from network and mission
with codecs.open(network_file, "r", encoding="utf-8") as network:
nw = json.load(network)
n_loki = nw["$loki"]
for hosts in nw["hosts"]:
host = hosts.get("display_name")
id_nw = hosts.get("id")
#network.close()
with codecs.open(mission_file, "r", encoding="utf-8") as mission:
mi = json.load(mission)
m_loki = mi["$loki"]
for req in mi["missionRequirements"]:
mi_src = req.get("src")
mi_id = req.get("id")
mi_dst = req.get("dst")
# Get the network info of the specifed hosts in mission requirements
mi_src_short = mi_src.rpartition('.')[2]
mi_dst_short = mi_dst.rpartition('.')[2]
for hosts in nw["hosts"]:
host = hosts.get("display_name")
if (mi_src_short == host):
src_host_id = hosts.get("id")
if (mi_dst_short == host):
dst_host_id = hosts.get("id")
print(mi_src_short,src_host_id,mi_dst_short,dst_host_id,mi_id)
data = {
'src_host_id' : src_host_id,
'dst_host_id' : dst_host_id,
'mi_id' : mi_id,
'nw_loki_id' : n_loki,
'mi_loki_id' : m_loki}
dataset.append(data)
intent_list = generateIntent(src_host_id,dst_host_id,mi_id,n_loki,intent_list)
intent_json = {
'intents' : intent_list,
'network' : n_loki,
'mission' : m_loki
}
print intent_json
with codecs.open("resources/out.json", "w") as file1:
json.dump(intent_json,file1)
file1.close()
URL_WS="http://localhost:5000/api/intents"
AUTH = ('onos','rocks')
HEADERS = {'content-type': 'application/json', 'Access':'application/json'}
r = requests.post(url = URL_WS, data = json.dumps(intent_json), headers = HEADERS)
for items in intent_list:
print items["id"]
'''
Generate the intents from mission file and submit to ONOS.
Input parameters needed to generate and submit intents are gathered from mission file.
Unique ID 'mi_id' is asigned to each intent respose which is associated with the input mission id from mission.json, network database id i.e. loki from network.json and intent id from ONOS's intent subsystem.
src_host_id : ONOS ID of source host
dst_host_id : ONOS ID of destination host
loki : Network dataset id
mi_id : mapping to mission ID to ONOS intent ID
'''
def generateIntent(src_host_id,dst_host_id,mi_id,n_loki,intent_list):
#REST API
URL = "http://localhost:8181/onos/v1/intents"
AUTH = ('onos','rocks')
HEADERS = {'content-type': 'application/json', 'Access':'application/json'}
r = requests.get(url = URL, auth = AUTH)
data_out = r.json()
intents = data_out["intents"]
with codecs.open('resources/simple_template.json', "r", encoding="utf-8") as newrule:
jsonfile = json.load(newrule)
jsonfile['one'] = src_host_id
jsonfile['two'] = dst_host_id
with open("resources/simple_template.json", "w") as newrule:
json.dump(jsonfile, newrule)
r = requests.post(url = URL, auth = AUTH, data = json.dumps(jsonfile), headers = HEADERS)
LOCATION = r.headers["Location"]
id1 = LOCATION.rpartition("/")[2]
id1_hex = hex(int(id1))
URL_flows = "http://localhost:8181/onos/v1/intents/relatedflows/org.onosproject.cli/{}".format(id1_hex)
time.sleep(1)
r = requests.get(url = URL_flows, auth = AUTH)
data_out = r.json()
data_out["mi_id"] = "md{}_{}_i{}".format(n_loki,mi_id,id1_hex[2:])
with open("resources/out.json", "w") as newrule:
json.dump(data_out, newrule)
intent_list.append(data_out)
newrule.close()
return intent_list
if __name__ == "__main__":
main(sys.argv[1:])
| 31.906977
| 208
| 0.701166
|
433eeea59a8c067512f2851b64ed3a0e878a84d5
| 2,130
|
py
|
Python
|
setup.py
|
NBISweden/beacon-python
|
ba3499e3cedd4e38c105bb984f34d21098fe8a89
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
NBISweden/beacon-python
|
ba3499e3cedd4e38c105bb984f34d21098fe8a89
|
[
"Apache-2.0"
] | 2
|
2020-03-16T09:15:43.000Z
|
2020-03-18T10:19:57.000Z
|
setup.py
|
NBISweden/beacon-python
|
ba3499e3cedd4e38c105bb984f34d21098fe8a89
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup
from beacon_api import __license__, __version__, __author__, __description__
setup(name='beacon_api',
version=__version__,
url='https://beacon-python.rtfd.io/',
project_urls={
'Source': 'https://github.com/CSCfi/beacon-python',
},
license=__license__,
author=__author__,
author_email='',
description=__description__,
long_description="",
packages=['beacon_api', 'beacon_api/utils', 'beacon_api/conf',
'beacon_api/schemas', 'beacon_api/api', 'beacon_api/permissions',
'beacon_api/extensions'],
# If any package contains *.json, include them:
package_data={'': ['*.json', '*.ini']},
entry_points={
'console_scripts': [
'beacon=beacon_api.app:main',
'beacon_init=beacon_api.utils.db_load:main'
]
},
platforms='any',
classifiers=[ # Optional
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Information Technology',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Scientific/Engineering :: Bio-Informatics',
# Pick your license as you wish
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
install_requires=['aiohttp', 'asyncpg', 'authlib',
'jsonschema', 'gunicorn==19.9.0'],
extras_require={
'test': ['coverage', 'pytest', 'pytest-cov',
'coveralls', 'testfixtures', 'tox',
'flake8', 'flake8-docstrings', 'asynctest', 'aioresponses'],
'docs': [
'sphinx >= 1.4',
'sphinx_rtd_theme']}
)
| 36.724138
| 81
| 0.561502
|
ba467c01d7359dd8a2a32de1f76ea37db455f26e
| 2,620
|
py
|
Python
|
test/parser/unit_operators/testcases/Scan/Scan_generator.py
|
AyishaR/deepC
|
1dc9707ef5ca9000fc13c3da7f1129685a83b494
|
[
"Apache-2.0"
] | 223
|
2020-04-15T20:34:33.000Z
|
2022-03-28T05:41:49.000Z
|
test/parser/unit_operators/testcases/Scan/Scan_generator.py
|
AyishaR/deepC
|
1dc9707ef5ca9000fc13c3da7f1129685a83b494
|
[
"Apache-2.0"
] | 42
|
2019-07-29T15:57:12.000Z
|
2020-04-08T15:12:48.000Z
|
test/parser/unit_operators/testcases/Scan/Scan_generator.py
|
AyishaR/deepC
|
1dc9707ef5ca9000fc13c3da7f1129685a83b494
|
[
"Apache-2.0"
] | 58
|
2019-07-22T11:46:19.000Z
|
2020-04-09T22:56:41.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-argument
#
# This file is part of DNN compiler maintained at
# https://github.com/ai-techsystems/dnnCompiler
import os, sys
import numpy as np
separator = os.path.sep
from onnx import *
sys.path.append(".."+separator+".."+separator+".."+separator+".."+separator+"python/parser")
from onnx_parser import *
op_name = 'Scan'
inputs = [helper.make_tensor_value_info("initial",TensorProto.FLOAT,(1,2)), helper.make_tensor_value_info("x",TensorProto.FLOAT,(1,3,2))]
outputs = [helper.make_tensor_value_info("y",TensorProto.FLOAT,(1,2)), helper.make_tensor_value_info("z",TensorProto.FLOAT,(1,3,2))]
sum_in = onnx.helper.make_tensor_value_info("sum_in", onnx.TensorProto.FLOAT, [2])
next = onnx.helper.make_tensor_value_info('next', onnx.TensorProto.FLOAT, [2])
sum_out = onnx.helper.make_tensor_value_info('sum_out', onnx.TensorProto.FLOAT, [2])
scan_out = onnx.helper.make_tensor_value_info('scan_out', onnx.TensorProto.FLOAT, [2])
add_node = onnx.helper.make_node('Add',inputs=['sum_in', 'next'],outputs=['sum_out'])
id_node = onnx.helper.make_node('Identity',inputs=['sum_out'],outputs=['scan_out'])
scan_body = onnx.helper.make_graph([add_node, id_node],'scan_body',[sum_in, next],[sum_out, scan_out])
nodes = []
nodes.append(onnx.helper.make_node("Scan",inputs=["initial", "x"],outputs=["y", "z"],num_scan_inputs=1,body=scan_body))
graph = helper.make_graph(nodes, op_name+"_graph", inputs, outputs)
opset = (OperatorSetIdProto(version=11),)
model = helper.make_model(graph, opset_imports=opset)
onnx.checker.check_model(model)
t_prefix = ".." + separator + "testcases" + separator + op_name + separator + op_name
g_prefix = ".." + separator + "gold_files" + separator + op_name
onnx.save(model, t_prefix+".onnx")
parse(t_prefix+".onnx", g_prefix+".sym", onnx_output_file=t_prefix+".txt")
| 48.518519
| 137
| 0.75229
|
c2be95e78468be23609bc738dbf06a1b7fe7f657
| 6,315
|
py
|
Python
|
src/sentry/api/endpoints/project_plugin_details.py
|
Ali-Tahir/sentry
|
aa7b306c5ea671ac002a3524982563679557cb31
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/api/endpoints/project_plugin_details.py
|
Ali-Tahir/sentry
|
aa7b306c5ea671ac002a3524982563679557cb31
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/api/endpoints/project_plugin_details.py
|
Ali-Tahir/sentry
|
aa7b306c5ea671ac002a3524982563679557cb31
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
import logging
import six
from django import forms
from django.core.urlresolvers import reverse
from rest_framework import serializers
from rest_framework.response import Response
from requests.exceptions import HTTPError
from sentry.exceptions import InvalidIdentity, PluginError, PluginIdentityRequired
from sentry.plugins.base import plugins
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
from sentry.api.serializers.models.plugin import (
PluginSerializer,
PluginWithConfigSerializer,
serialize_field,
)
from sentry.models import AuditLogEntryEvent
from sentry.signals import plugin_enabled
ERR_ALWAYS_ENABLED = "This plugin is always enabled."
ERR_FIELD_REQUIRED = "This field is required."
OK_UPDATED = "Successfully updated configuration."
class ProjectPluginDetailsEndpoint(ProjectEndpoint):
def _get_plugin(self, plugin_id):
try:
return plugins.get(plugin_id)
except KeyError:
raise ResourceDoesNotExist
def get(self, request, project, plugin_id):
plugin = self._get_plugin(plugin_id)
try:
context = serialize(plugin, request.user, PluginWithConfigSerializer(project))
except PluginIdentityRequired as e:
context = serialize(plugin, request.user, PluginSerializer(project))
context["config_error"] = e.message
context["auth_url"] = reverse("socialauth_associate", args=[plugin.slug])
return Response(context)
def post(self, request, project, plugin_id):
"""
Enable plugin, Test plugin or Reset plugin values
"""
plugin = self._get_plugin(plugin_id)
if request.data.get("test") and plugin.is_testable():
try:
test_results = plugin.test_configuration(project)
except Exception as exc:
if isinstance(exc, HTTPError):
test_results = "%s\n%s" % (exc, exc.response.text[:256])
elif hasattr(exc, "read") and callable(exc.read):
test_results = "%s\n%s" % (exc, exc.read()[:256])
else:
logging.exception("Plugin(%s) raised an error during test", plugin_id)
test_results = "There was an internal error with the Plugin"
if not test_results:
test_results = "No errors returned"
return Response({"detail": test_results}, status=200)
if request.data.get("reset"):
plugin = self._get_plugin(plugin_id)
plugin.reset_options(project=project)
context = serialize(plugin, request.user, PluginWithConfigSerializer(project))
self.create_audit_entry(
request=request,
organization=project.organization,
target_object=project.id,
event=AuditLogEntryEvent.INTEGRATION_EDIT,
data={"integration": plugin_id, "project": project.slug},
)
return Response(context, status=200)
if not plugin.can_disable:
return Response({"detail": ERR_ALWAYS_ENABLED}, status=400)
plugin.enable(project)
self.create_audit_entry(
request=request,
organization=project.organization,
target_object=project.id,
event=AuditLogEntryEvent.INTEGRATION_ADD,
data={"integration": plugin_id, "project": project.slug},
)
return Response(status=201)
def delete(self, request, project, plugin_id):
"""
Disable plugin
"""
plugin = self._get_plugin(plugin_id)
if not plugin.can_disable:
return Response({"detail": ERR_ALWAYS_ENABLED}, status=400)
plugin.disable(project)
self.create_audit_entry(
request=request,
organization=project.organization,
target_object=project.id,
event=AuditLogEntryEvent.INTEGRATION_REMOVE,
data={"integration": plugin_id, "project": project.slug},
)
return Response(status=204)
def put(self, request, project, plugin_id):
plugin = self._get_plugin(plugin_id)
config = [
serialize_field(project, plugin, c)
for c in plugin.get_config(project=project, user=request.user, initial=request.data)
]
cleaned = {}
errors = {}
for field in config:
key = field["name"]
value = request.data.get(key)
if field.get("required") and not value:
errors[key] = ERR_FIELD_REQUIRED
try:
value = plugin.validate_config_field(
project=project, name=key, value=value, actor=request.user
)
except (
forms.ValidationError,
serializers.ValidationError,
InvalidIdentity,
PluginError,
) as e:
errors[key] = e.message
if not errors.get(key):
cleaned[key] = value
if not errors:
try:
cleaned = plugin.validate_config(
project=project, config=cleaned, actor=request.user
)
except (InvalidIdentity, PluginError) as e:
errors["__all__"] = e.message
if errors:
return Response({"errors": errors}, status=400)
for key, value in six.iteritems(cleaned):
if value is None:
plugin.unset_option(project=project, key=key)
else:
plugin.set_option(project=project, key=key, value=value)
context = serialize(plugin, request.user, PluginWithConfigSerializer(project))
plugin_enabled.send(plugin=plugin, project=project, user=request.user, sender=self)
self.create_audit_entry(
request=request,
organization=project.organization,
target_object=project.id,
event=AuditLogEntryEvent.INTEGRATION_EDIT,
data={"integration": plugin_id, "project": project.slug},
)
return Response(context)
| 34.320652
| 96
| 0.616944
|
da10338448ecb18d2b2ca982030bf610a4d30bb7
| 15,803
|
py
|
Python
|
findatapy/timeseries/calendar.py
|
DT021/findatapy
|
c9be891f1ca592928251e52d6f62b6decea9150e
|
[
"Apache-2.0"
] | 1,205
|
2016-08-17T14:44:08.000Z
|
2022-03-31T15:54:10.000Z
|
findatapy/timeseries/calendar.py
|
DT021/findatapy
|
c9be891f1ca592928251e52d6f62b6decea9150e
|
[
"Apache-2.0"
] | 35
|
2016-10-07T07:44:20.000Z
|
2022-01-28T14:21:41.000Z
|
findatapy/timeseries/calendar.py
|
DT021/findatapy
|
c9be891f1ca592928251e52d6f62b6decea9150e
|
[
"Apache-2.0"
] | 191
|
2016-09-01T11:31:44.000Z
|
2022-03-17T21:29:55.000Z
|
__author__ = 'saeedamen' # Saeed Amen
#
# Copyright 2016-2020 Cuemacro
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and limitations under the License.
#
import re
import numpy as np
import pandas as pd
import datetime
from datetime import timedelta
from pandas.tseries.offsets import BDay, CustomBusinessDay, Day, CustomBusinessMonthEnd, DateOffset
from findatapy.timeseries.timezone import Timezone
from findatapy.util.dataconstants import DataConstants
from findatapy.util.loggermanager import LoggerManager
constants = DataConstants()
class Calendar(object):
"""Provides calendar based functions for working out options expiries, holidays etc. Note, that at present, the
expiry _calculations are approximate.
"""
# Approximate mapping from tenor to number of business days
_tenor_bus_day_dict = {'ON' : 1,
'TN' : 2,
'1W' : 5,
'2W' : 10,
'3W' : 15,
'1M' : 20,
'2M' : 40,
'3M' : 60,
'4M' : 80,
'6M' : 120,
'9M' : 180,
'1Y' : 252,
'2Y' : 252 * 2,
'3Y' : 252 * 3,
'5Y' : 252 * 5
}
def __init__(self):
self._holiday_df = pd.read_parquet(constants.holidays_parquet_table)
def flatten_list_of_lists(self, list_of_lists):
"""Flattens lists of obj, into a single list of strings (rather than characters, which is default behavior).
Parameters
----------
list_of_lists : obj (list)
List to be flattened
Returns
-------
str (list)
"""
if isinstance(list_of_lists, list):
rt = []
for i in list_of_lists:
if isinstance(i, list):
rt.extend(self.flatten_list_of_lists(i))
else:
rt.append(i)
return rt
return list_of_lists
def _get_full_cal(self, cal):
holidays_list = []
# Calendars which have been hardcoded in the parquet file (which users may also edit)
if len(cal) == 6:
# Eg. EURUSD (load EUR and USD calendars and combine the holidays)
holidays_list.append([self._get_full_cal(cal[0:3]), self._get_full_cal(cal[3:6])])
elif len(cal) == 9:
holidays_list.append(
[self._get_full_cal(cal[0:3]), self._get_full_cal(cal[3:6]), self._get_full_cal(cal[6:9])])
else:
if cal == 'FX' or cal == 'NYX':
# Filter for Christmas & New Year's Day
for i in range(1999, 2025):
holidays_list.append(pd.Timestamp(str(i) + "-12-25"))
holidays_list.append(pd.Timestamp(str(i) + "-01-01"))
elif cal == 'NYD' or cal == 'NEWYEARSDAY':
# Filter for New Year's Day
for i in range(1999, 2025):
holidays_list.append(pd.Timestamp(str(i) + "-01-01"))
elif cal == 'WDY' or cal == 'WEEKDAY':
bday = CustomBusinessDay(weekmask='Sat Sun')
holidays_list.append([x for x in pd.date_range('01 Jan 1999', '31 Dec 2025', freq=bday)])
elif cal == 'WKD': #
pass
# holidays_list.append()
else:
label = cal + ".holiday-dates"
try:
holidays_list = self._holiday_df[label].dropna().tolist()
except:
logger = LoggerManager().getLogger(__name__)
logger.warning(cal + " holiday calendar not found.")
return holidays_list
def create_calendar_bus_days(self, start_date, end_date, cal='FX'):
"""Creates a calendar of business days
Parameters
----------
start_date : DateTime
start date of calendar
end_date : DataFrame
finish date of calendar
cal : str
business calendar to use
Returns
-------
list
"""
hols = self.get_holidays(start_date=start_date, end_date=end_date, cal=cal)
return pd.bdate_range(start=start_date, end=end_date, freq='D', holidays=hols)
def get_holidays(self, start_date=None, end_date=None, cal='FX', holidays_list=[]):
"""Gets the holidays for a given calendar
Parameters
----------
start_date : DateTime
start date of calendar
end_date : DataFrame
finish date of calendar
cal : str
business calendar to use
Returns
-------
list
"""
# holidays_list , = []
# TODO use Pandas CustomBusinessDays to get more calendars
holidays_list = self._get_full_cal(cal)
# .append(lst)
# Use 'set' so we don't have duplicate dates if we are incorporating multiple calendars
holidays_list = np.array(list(set(self.flatten_list_of_lists(holidays_list))))
holidays_list = pd.to_datetime(holidays_list).sort_values().tz_localize('UTC')
# Floor start date
if start_date is not None:
start_date = pd.Timestamp(start_date).floor('D')
try:
start_date = start_date.tz_localize('UTC')
except:
pass
holidays_list = holidays_list[(holidays_list >= start_date)]
if end_date is not None:
# Ceiling end date
end_date = pd.Timestamp(end_date).ceil('D')
try:
end_date = end_date.tz_localize('UTC')
except:
pass
holidays_list = holidays_list[(holidays_list <= end_date)]
# Remove all weekends unless it is WEEKDAY calendar
if cal != 'WEEKDAY' or cal != 'WKY':
holidays_list = holidays_list[holidays_list.dayofweek <= 4]
return holidays_list
def get_business_days_tenor(self, tenor):
if tenor in self._tenor_bus_day_dict.keys():
return self._tenor_bus_day_dict[tenor]
return None
def get_dates_from_tenors(self, start, end, tenor, cal=None):
freq = str(self.get_business_days_tenor(tenor)) + "B"
return pd.DataFrame(index=pd.bdate_range(start, end, freq=freq))
def get_delta_between_dates(self, date1, date2, unit='days'):
if unit == 'days':
return (date2 - date1).days
def get_delivery_date_from_horizon_date(self, horizon_date, tenor, cal=None, asset_class='fx'):
if 'fx' in asset_class:
tenor_unit = ''.join(re.compile(r'\D+').findall(tenor))
asset_holidays = self.get_holidays(cal=cal)
if tenor_unit == 'ON':
return horizon_date + CustomBusinessDay(n=1, holidays=asset_holidays)
elif tenor_unit == 'TN':
return horizon_date + CustomBusinessDay(n=2, holidays=asset_holidays)
elif tenor_unit == 'SP':
pass
elif tenor_unit == 'SN':
tenor_unit = 'D'
tenor_digit = 1
else:
tenor_digit = int(''.join(re.compile(r'\d+').findall(tenor)))
horizon_date = self.get_spot_date_from_horizon_date(horizon_date, cal, asset_holidays=asset_holidays)
if 'SP' in tenor_unit:
return horizon_date
elif tenor_unit == 'D':
return horizon_date + CustomBusinessDay(n=tenor_digit, holidays=asset_holidays)
elif tenor_unit == 'W':
return horizon_date + Day(n=tenor_digit * 7) + CustomBusinessDay(n=0, holidays=asset_holidays)
else:
if tenor_unit == 'Y':
tenor_digit = tenor_digit * 12
horizon_period_end = horizon_date + CustomBusinessMonthEnd(tenor_digit + 1)
horizon_floating = horizon_date + DateOffset(months=tenor_digit)
cbd = CustomBusinessDay(n=1, holidays=asset_holidays)
delivery_date = []
if isinstance(horizon_period_end, pd.Timestamp):
horizon_period_end = [horizon_period_end]
if isinstance(horizon_floating, pd.Timestamp):
horizon_floating = [horizon_floating]
for period_end, floating in zip(horizon_period_end, horizon_floating):
if floating < period_end:
delivery_date.append(floating - cbd + cbd)
else:
delivery_date.append(period_end)
return pd.DatetimeIndex(delivery_date)
def get_expiry_date_from_horizon_date(self, horizon_date, tenor, cal=None, asset_class='fx-vol'):
"""Calculates the expiry date of FX options, based on the horizon date, the tenor and the holiday
calendar associated with the asset.
Uses expiry rules from Iain Clark's FX option pricing book
Parameters
----------
horizon_date : pd.Timestamp (collection)
Horizon date of contract
tenor : str
Tenor of the contract
cal : str
Holiday calendar (usually related to the asset)
asset_class : str
'fx-vol' - FX options (default)
Returns
-------
pd.Timestamp (collection)
"""
if asset_class == 'fx-vol':
tenor_unit = ''.join(re.compile(r'\D+').findall(tenor))
asset_holidays = self.get_holidays(cal=cal)
if tenor_unit == 'ON':
tenor_digit = 1; tenor_unit = 'D'
else:
tenor_digit = int(''.join(re.compile(r'\d+') .findall(tenor)))
if tenor_unit == 'D':
return horizon_date + CustomBusinessDay(n=tenor_digit, holidays=asset_holidays)
elif tenor_unit == 'W':
return horizon_date + Day(n=tenor_digit * 7) + CustomBusinessDay(n=0, holidays=asset_holidays)
else:
horizon_date = self.get_spot_date_from_horizon_date(horizon_date, cal, asset_holidays=asset_holidays)
if tenor_unit == 'M':
pass
elif tenor_unit == 'Y':
tenor_digit = tenor_digit * 12
cbd = CustomBusinessDay(n=1, holidays=asset_holidays)
horizon_period_end = horizon_date + CustomBusinessMonthEnd(tenor_digit + 1)
horizon_floating = horizon_date + DateOffset(months=tenor_digit)
delivery_date = []
if isinstance(horizon_period_end, pd.Timestamp):
horizon_period_end = [horizon_period_end]
if isinstance(horizon_floating, pd.Timestamp):
horizon_floating = [horizon_floating]
# TODO: double check this!
for period_end, floating in zip(horizon_period_end, horizon_floating):
if floating < period_end:
delivery_date.append(floating - cbd + cbd)
else:
delivery_date.append(period_end)
delivery_date = pd.DatetimeIndex(delivery_date)
return self.get_expiry_date_from_delivery_date(delivery_date, cal)
def _get_settlement_T(self, asset):
base = asset[0:3]
terms = asset[3:6]
if base in ['CAD', 'TRY', 'RUB'] or terms in ['CAD', 'TRY', 'RUB']:
return 1
return 2
def get_spot_date_from_horizon_date(self, horizon_date, asset, asset_holidays=None):
base = asset[0:3]
terms = asset[3:6]
settlement_T = self._get_settlement_T(asset)
if asset_holidays is None:
asset_holidays = self.get_holidays(cal=asset)
# First adjustment step
if settlement_T == 2:
if base in ['MXN', 'ARS', 'CLP'] or terms in ['MXN', 'ARS', 'CLP']:
horizon_date = horizon_date + BDay(1)
else:
if base == 'USD':
horizon_date = horizon_date + CustomBusinessDay(holidays=self.get_holidays(cal=terms))
elif terms == 'USD':
horizon_date = horizon_date + CustomBusinessDay(holidays=self.get_holidays(cal=base))
else:
horizon_date = horizon_date + CustomBusinessDay(holidays=asset_holidays)
if 'USD' not in asset:
asset_holidays = self.get_holidays(cal='USD' + asset)
# Second adjustment step - move forward if horizon_date isn't a good business day in base, terms or USD
if settlement_T <= 2:
horizon_date = horizon_date + CustomBusinessDay(holidays=asset_holidays)
return horizon_date
def get_delivery_date_from_spot_date(self, spot_date, cal):
pass
def get_expiry_date_from_delivery_date(self, delivery_date, cal):
base = cal[0:3]
terms = cal[3:6]
if base == 'USD':
cal = terms
elif terms == 'USD':
cal = base
hols = self.get_holidays(cal=cal + 'NYD')
# cbd = CustomBusinessDay(1, holidays=self.get_holidays(cal=cal))
return delivery_date - CustomBusinessDay(self._get_settlement_T(cal), holidays=hols) # - cbd + cbd
def align_to_NY_cut_in_UTC(self, date_time, hour_of_day=10):
tstz = Timezone()
date_time = tstz.localize_index_as_new_york_time(date_time)
date_time.index = date_time.index + timedelta(hours=hour_of_day)
return tstz.convert_index_aware_to_UTC_time(date_time)
def floor_date(self, data_frame):
data_frame.index = data_frame.index.normalize()
return data_frame
def create_bus_day(self, start, end, cal=None):
if cal is None:
return pd.bdate_range(start, end)
return pd.date_range(start, end, hols=self.get_holidays(start_date=start, end_date=end, cal=cal))
def get_bus_day_of_month(self, date, cal='FX', tz=None):
""" Returns the business day of the month (ie. 3rd Jan, on a Monday, would be the 1st business day of the month)
"""
try:
date = date.normalize() # strip times off the dates - for business dates just want dates!
except:
pass
start = pd.to_datetime(datetime.datetime(date.year[0], date.month[0], 1))
end = pd.Timestamp(datetime.datetime.today()) # pd.to_datetime(datetime.datetime(date.year[-1], date.month[-1], date.day[-1]))
holidays = self.get_holidays(start_date=start, end_date=end, cal=cal)
# bday = CustomBusinessDay(holidays=holidays, weekmask='Mon Tue Wed Thu Fri')
holidays = holidays.tz_localize(None).date
bus_dates = pd.bdate_range(start, end)
# Not most efficient way...
bus_dates = pd.to_datetime([x for x in bus_dates if x not in holidays])
month = bus_dates.month
work_day_index = np.zeros(len(bus_dates))
work_day_index[0] = 1
for i in range(1, len(bus_dates)):
if month[i] == month[i - 1]:
work_day_index[i] = work_day_index[i - 1] + 1
else:
work_day_index[i] = 1
bus_day_of_month = work_day_index[bus_dates.searchsorted(date)]
return bus_day_of_month
def set_market_holidays(self, holiday_df):
self._holiday_df = holiday_df
| 34.962389
| 135
| 0.590268
|
d12835f684faa75594a91f38c450e12079600a91
| 423
|
py
|
Python
|
DS-400/Easy/171-Excel Sheet Column Number/RighttoLeft.py
|
ericchen12377/Leetcode-Algorithm-Python
|
eb58cd4f01d9b8006b7d1a725fc48910aad7f192
|
[
"MIT"
] | 2
|
2020-04-24T18:36:52.000Z
|
2020-04-25T00:15:57.000Z
|
DS-400/Easy/171-Excel Sheet Column Number/RighttoLeft.py
|
ericchen12377/Leetcode-Algorithm-Python
|
eb58cd4f01d9b8006b7d1a725fc48910aad7f192
|
[
"MIT"
] | null | null | null |
DS-400/Easy/171-Excel Sheet Column Number/RighttoLeft.py
|
ericchen12377/Leetcode-Algorithm-Python
|
eb58cd4f01d9b8006b7d1a725fc48910aad7f192
|
[
"MIT"
] | null | null | null |
class Solution(object):
def titleToNumber(self, s):
"""
:type s: str
:rtype: int
"""
result = 0
# Decimal 65 in ASCII corresponds to char 'A'
alpha_map = {chr(i + 65): i + 1 for i in range(26)}
n = len(s)
for i in range(n):
cur_char = s[n - 1 - i]
result += (alpha_map[cur_char] * (26 ** i))
return result
| 26.4375
| 59
| 0.458629
|
5146c4b1b40023007d5b3c8bb314fd9910be68fa
| 1,504
|
py
|
Python
|
collect/scraping.py
|
opendot/3Deeprinting
|
a2a3ab5ba9abe9af1da9805d0d1b4ab0dfb21971
|
[
"MIT"
] | null | null | null |
collect/scraping.py
|
opendot/3Deeprinting
|
a2a3ab5ba9abe9af1da9805d0d1b4ab0dfb21971
|
[
"MIT"
] | null | null | null |
collect/scraping.py
|
opendot/3Deeprinting
|
a2a3ab5ba9abe9af1da9805d0d1b4ab0dfb21971
|
[
"MIT"
] | null | null | null |
from lxml import html
import requests
import os
import time
# starts here
if __name__ == '__main__':
queries = [
'vase',
'vases',
'pot',
]
pages = 10
for q in queries:
for p in range(pages):
print 'Query', q, 'page', p
searchUrl = 'http://www.thingiverse.com/search/page:' + str(p) + '?sort=makes&q=' + q + '&type=things'
searchPage = requests.get(searchUrl)
tree = html.fromstring(searchPage.content)
# get item links
objectURLs = tree.xpath('//a[@class="thing-img-wrapper"]/@href')
for objectURL in objectURLs:
fullObjectUrl = 'http://www.thingiverse.com' + objectURL
objPage = requests.get(fullObjectUrl)
objTree = html.fromstring(objPage.content)
#print objPage.text
print '\t\t Page', fullObjectUrl
downloadURL = objTree.xpath('//a[@class="track thing-file-download-link"]/@href')
if len(downloadURL) > 0:
downloadURL = downloadURL[0] # only first (they are ordered by downloads)
fullDownloadURL = 'http://www.thingiverse.com/' + downloadURL
print '\t\t Download', fullDownloadURL
fileName = 'data/' + downloadURL + '_' + str( int(time.time()) )
os.system('wget -O ' + fileName + ' ' + fullDownloadURL)
time.sleep(2)
| 33.422222
| 115
| 0.533245
|
04f3b3decefde186d40aaa7d7dcee98be0c92c2b
| 5,380
|
py
|
Python
|
capital_gains_calculator/read_xml.py
|
jemrobinson/capital-gains-calculator
|
2ef3c7c786c1e2180e81f5b87194b953dd0ed17f
|
[
"MIT"
] | null | null | null |
capital_gains_calculator/read_xml.py
|
jemrobinson/capital-gains-calculator
|
2ef3c7c786c1e2180e81f5b87194b953dd0ed17f
|
[
"MIT"
] | 2
|
2021-12-31T12:48:47.000Z
|
2022-01-10T17:24:57.000Z
|
capital_gains_calculator/read_xml.py
|
jemrobinson/capital-gains-calculator
|
2ef3c7c786c1e2180e81f5b87194b953dd0ed17f
|
[
"MIT"
] | null | null | null |
"""Utility functions for reading PortfolioPerformance XML files"""
# Standard library imports
from decimal import Decimal
import re
import xml.etree.ElementTree as ET
# Third party imports
import pandas as pd
def flatten(element_lists):
"""Return all elements from a list of lists"""
for element_list in element_lists:
for element in element_list:
yield element
def get_accounts(root):
"""Get accounts"""
accounts = []
for account in (
root.findall("*//account[uuid]")
+ root.findall("*//accountFrom[uuid]")
+ root.findall("*//accountTo[uuid]")
):
name = get_first(account, "name")
uuid = get_first(account, "uuid")
accounts.append({"id": name, "uuid": uuid})
return pd.DataFrame(accounts).drop_duplicates()
def get_first(node, match):
"""Get the full text from the first node containing the requested string"""
objects = node.findall(match)
if not objects:
return None
return objects[0].match
def get_securities(root):
"""Get securities"""
securities = []
for security in flatten(root.findall("securities")):
name = get_first(security, "name")
uuid = get_first(security, "uuid")
isin = get_first(security, "isin")
ticker_symbol = get_first(security, "tickerSymbol")
currency_code = get_first(security, "currencyCode")
note = get_first(security, "note")
securities.append(
{
"id": name,
"uuid": uuid,
"ISIN": isin,
"Symbol": ticker_symbol,
"currencyCode": currency_code,
"note": note,
}
)
return pd.DataFrame(securities).drop_duplicates()
def get_transactions(root, account_id, df_securities):
"""Get transactions"""
transactions = []
for transaction in (
root.findall(
f"*//account[name='{account_id}']/transactions/account-transaction"
)
+ root.findall(
f"*//accountFrom[name='{account_id}']/transactions/account-transaction"
)
+ root.findall(
f"*//accountTo[name='{account_id}']/transactions/account-transaction"
)
+ root.findall(
f"*//portfolio[name='{account_id}']/transactions/portfolio-transaction"
)
):
try:
date = get_first(transaction, "date")
shares = Decimal(get_first(transaction, "shares")) / 100000000
type_ = get_first(transaction, "type")
security_id = ref2name(transaction, df_securities)
fees, taxes = 0, 0
for charge in transaction.findall("./units/unit"):
if charge.attrib["type"] == "FEE":
fees += (
Decimal(
[c for c in charge if c.tag == "amount"][0].attrib["amount"]
)
/ 100
)
if charge.attrib["type"] == "TAX":
taxes += (
Decimal(
[c for c in charge if c.tag == "amount"][0].attrib["amount"]
)
/ 100
)
total = (
Decimal(get_first(transaction, "amount")) / 100
) # this includes fees and taxes
if type_ == "SELL":
total += fees + taxes
else:
total -= fees + taxes
note = get_first(transaction, "note") or ""
if security_id:
transactions.append(
{
"Date": date,
"Type": type_,
"Security": security_id,
"Shares": shares,
"Amount": abs(total),
"Fees": abs(fees),
"Taxes": abs(taxes),
"Cash Account": account_id,
"Note": note,
}
)
except TypeError:
continue
return pd.DataFrame(transactions).drop_duplicates()
def read_xml(file_name):
"""Read a PortfolioPerformance XML file into a Pandas dataframe"""
# Read all XML entries with a valid symbol and security
tree = ET.parse(file_name)
root = tree.getroot()
# Read securities, accounts and transactions and set datatypes
df_securities = get_securities(root)
df_accounts = get_accounts(root)
df_transactions = pd.concat(
[
get_transactions(root, account_name, df_securities)
for account_name in df_accounts["id"].unique()
]
)
# Merge transactions with securities, dropping invalid rows
df_all = pd.merge(
df_transactions, df_securities, how="outer", left_on="Security", right_on="id"
)
return df_all
def ref2name(transaction, df_securities):
"""Find the security name corresponding to a given reference"""
try:
reference = transaction.findall("security")[0].attrib["reference"]
regex_ = r".*/security\[(\d+)\]"
index = int(re.search(regex_, reference, re.IGNORECASE).group(1)) - 1
return df_securities.iloc[index]["id"]
except (IndexError, AttributeError):
return None
| 33.625
| 88
| 0.538662
|
9c1eb1b6bbb6cd724ddc8f2463c01a794f60b5f0
| 474
|
py
|
Python
|
1ere/tkter/Ex1.py
|
gb85150/NSI
|
32c23f531d338493804daa0f19915b5afa6a8012
|
[
"MIT"
] | 4
|
2020-09-14T14:16:30.000Z
|
2021-09-09T18:06:57.000Z
|
1ere/tkter/Ex1.py
|
gb85150/NSI
|
32c23f531d338493804daa0f19915b5afa6a8012
|
[
"MIT"
] | 2
|
2020-09-14T07:28:09.000Z
|
2020-09-19T10:25:42.000Z
|
1ere/tkter/Ex1.py
|
gb85150/NSI
|
32c23f531d338493804daa0f19915b5afa6a8012
|
[
"MIT"
] | 1
|
2021-09-10T12:16:50.000Z
|
2021-09-10T12:16:50.000Z
|
import tkinter as tk
def add():
"""
Ajoute 1 à une variable globale [count]
:return: La variable count est incrémentée de 1
"""
global count
count += 1
return count
count = -1
main_window = tk.Tk()
main_window.title("Mon super compteur")
main_window.geometry("200x200")
click_button = tk.Button(main_window, text='+1', command=add())
click_button.pack()
show_count = tk.Label(main_window, text=count)
show_count.pack()
main_window.mainloop()
| 20.608696
| 63
| 0.698312
|
b9e6e148a378c07c48c5fcf38ca2a4dfad461841
| 2,424
|
py
|
Python
|
daemon/examples/myemane/examplemodel.py
|
CleberPeter/core
|
b02fd4ccac3c9f13b4885eaa6aed64395d74eb69
|
[
"BSD-2-Clause"
] | 463
|
2015-07-28T16:28:12.000Z
|
2022-03-31T16:11:29.000Z
|
daemon/examples/myemane/examplemodel.py
|
CleberPeter/core
|
b02fd4ccac3c9f13b4885eaa6aed64395d74eb69
|
[
"BSD-2-Clause"
] | 392
|
2015-08-07T09:14:40.000Z
|
2022-03-31T18:12:28.000Z
|
daemon/examples/myemane/examplemodel.py
|
CleberPeter/core
|
b02fd4ccac3c9f13b4885eaa6aed64395d74eb69
|
[
"BSD-2-Clause"
] | 163
|
2015-07-30T15:54:03.000Z
|
2022-03-21T22:54:03.000Z
|
"""
Example custom emane model.
"""
from typing import Dict, List, Optional, Set
from core.config import Configuration
from core.emane import emanemanifest, emanemodel
class ExampleModel(emanemodel.EmaneModel):
"""
Custom emane model.
:cvar name: defines the emane model name that will show up in the GUI
Mac Definition:
:cvar mac_library: defines that mac library that the model will reference
:cvar mac_xml: defines the mac manifest file that will be parsed to obtain configuration options,
that will be displayed within the GUI
:cvar mac_defaults: allows you to override options that are maintained within the manifest file above
:cvar mac_config: parses the manifest file and converts configurations into core supported formats
Phy Definition:
NOTE: phy configuration will default to the universal model as seen below and the below section does not
have to be included
:cvar phy_library: defines that phy library that the model will reference, used if you need to
provide a custom phy
:cvar phy_xml: defines the phy manifest file that will be parsed to obtain configuration options,
that will be displayed within the GUI
:cvar phy_defaults: allows you to override options that are maintained within the manifest file above
or for the default universal model
:cvar phy_config: parses the manifest file and converts configurations into core supported formats
Custom Override Options:
NOTE: these options default to what's seen below and do not have to be included
:cvar config_ignore: allows you to ignore options within phy/mac, used typically if you needed to add
a custom option for display within the gui
"""
name: str = "emane_example"
mac_library: str = "rfpipemaclayer"
mac_xml: str = "/usr/share/emane/manifest/rfpipemaclayer.xml"
mac_defaults: Dict[str, str] = {
"pcrcurveuri": "/usr/share/emane/xml/models/mac/rfpipe/rfpipepcr.xml"
}
mac_config: List[Configuration] = emanemanifest.parse(mac_xml, mac_defaults)
phy_library: Optional[str] = None
phy_xml: str = "/usr/share/emane/manifest/emanephy.xml"
phy_defaults: Dict[str, str] = {
"subid": "1",
"propagationmodel": "2ray",
"noisemode": "none",
}
phy_config: List[Configuration] = emanemanifest.parse(phy_xml, phy_defaults)
config_ignore: Set[str] = set()
| 43.285714
| 108
| 0.72731
|
849af18545cad2c85cd1b9eb66affc437c308983
| 1,572
|
py
|
Python
|
var/spack/repos/builtin/packages/scala/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11
|
2015-10-04T02:17:46.000Z
|
2018-02-07T18:23:00.000Z
|
var/spack/repos/builtin/packages/scala/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 22
|
2017-08-01T22:45:10.000Z
|
2022-03-10T07:46:31.000Z
|
var/spack/repos/builtin/packages/scala/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 4
|
2016-06-10T17:57:39.000Z
|
2018-09-11T04:59:38.000Z
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Scala(Package):
"""Scala is a general-purpose programming language providing support for
functional programming and a strong static type system. Designed to be
concise, many of Scala's design decisions were designed to build from
criticisms of Java.
"""
homepage = "https://www.scala-lang.org/"
url = "https://downloads.lightbend.com/scala/2.12.1/scala-2.12.1.tgz"
version('2.13.1', sha256='6918ccc494e34810a7254ad2c4e6f0e1183784c22e7b4801b7dbc8d1994a04db')
version('2.12.10', sha256='3b12bda3300fedd91f64fc7f9165fd45c58328b1b760af24ca6ffe92e3b0656a')
version('2.12.6', sha256='1ac7444c5a85ed1ea45db4a268ee9ea43adf80e7f5724222863afb5492883416')
version('2.12.5', sha256='b261ffe9a495b12e9dda2ed37331e579547e4d1b8b5810161b6c3b39ac806aa1')
version('2.12.1', sha256='4db068884532a3e27010df17befaca0f06ea50f69433d58e06a5e63c7a3cc359')
version('2.11.11', sha256='12037ca64c68468e717e950f47fc77d5ceae5e74e3bdca56f6d02fd5bfd6900b')
version('2.10.6', sha256='54adf583dae6734d66328cafa26d9fa03b8c4cf607e27b9f3915f96e9bcd2d67')
depends_on('java')
def install(self, spec, prefix):
def install_dir(dirname):
install_tree(dirname, join_path(prefix, dirname))
install_dir('bin')
install_dir('lib')
install_dir('doc')
install_dir('man')
| 40.307692
| 97
| 0.751272
|
9085f17b3a82ae6f51d7e161d6983156450efbf4
| 572
|
py
|
Python
|
skills/media.py
|
ZhaoHouLin/python-line-bot-lesson
|
3cde7273efc70b45ab68bf5124670449c0d5c12c
|
[
"MIT"
] | null | null | null |
skills/media.py
|
ZhaoHouLin/python-line-bot-lesson
|
3cde7273efc70b45ab68bf5124670449c0d5c12c
|
[
"MIT"
] | null | null | null |
skills/media.py
|
ZhaoHouLin/python-line-bot-lesson
|
3cde7273efc70b45ab68bf5124670449c0d5c12c
|
[
"MIT"
] | null | null | null |
from linebot.models import AudioSendMessage, VideoSendMessage
from models.message_request import MessageRequest
from skills import add_skill
@add_skill('{media}')
def get(message_request: MessageRequest):
audio = AudioSendMessage(
original_content_url='https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3',
duration=60000
)
video = VideoSendMessage(
original_content_url='https://i.imgur.com/n8QsXTk.mp4',
preview_image_url='https://i.imgur.com/oLvTjtu.png'
)
return [
audio,
video
]
| 23.833333
| 93
| 0.699301
|
ff1906f33ca949a5a87d84143ac58c825ae354ed
| 961
|
py
|
Python
|
workflow/scripts/parse_HLA_types.py
|
snakemake-workflows/dna-seq-neoantigen-prediction
|
77a5256983a7bb1b63c1b25e7c5eb7f21974ba73
|
[
"MIT"
] | 6
|
2020-09-15T02:32:08.000Z
|
2021-11-16T08:47:27.000Z
|
workflow/scripts/parse_HLA_types.py
|
jafors/Neoantigen_Prediction
|
77a5256983a7bb1b63c1b25e7c5eb7f21974ba73
|
[
"MIT"
] | 1
|
2022-01-11T12:39:15.000Z
|
2022-01-11T12:39:15.000Z
|
workflow/scripts/parse_HLA_types.py
|
jafors/Neoantigen_Prediction
|
77a5256983a7bb1b63c1b25e7c5eb7f21974ba73
|
[
"MIT"
] | 4
|
2020-11-03T04:16:39.000Z
|
2021-11-16T08:47:32.000Z
|
import pandas as pd
hlaI = ["A","B","C"]
hlaII = ["DRB1", "DPA1", "DPB1", "DQA1", "DQB1"]
hlas = pd.read_csv(snakemake.input[0], sep='\t')
hlasI = hlas[hlas.Locus.isin(hlaI)]
hlasI["Allele"]="HLA-" + hlasI.Allele.str.split(":", expand=True)[[0,1]].apply(lambda x: ''.join(x), axis=1).str.replace('*','')
hlasI = hlasI[["Allele"]].drop_duplicates()
hlasI.to_csv(snakemake.output[0], sep='\t', index=False)
hlasII = hlas[hlas.Locus.isin(hlaII)]
hlasII["HLA"] = hlasII.Locus.str[0:2]
hlasII["Allele"] = hlasII.Allele.str.split(":", expand=True)[[0,1]].apply(lambda x: ''.join(x), axis=1).str.replace('*','')
hlasII = pd.DataFrame("HLA-" + hlasII.groupby(["HLA","Chromosome"])["Allele"].apply(lambda x: "-".join(x)).reset_index()["Allele"]).drop_duplicates()
hlasII.loc[hlasII.Allele.str.contains("DRB"), "Allele"] = hlasII[hlasII.Allele.str.contains("DRB")]["Allele"].str.replace("HLA-DRB1","DRB1_")
hlasII.to_csv(snakemake.output[1], sep='\t', index=False)
| 45.761905
| 149
| 0.654527
|
88cc46395966a33c3f4bfb8d96761735fd5fe283
| 181
|
py
|
Python
|
lino_book/projects/team/asgi.py
|
khchine5/book
|
b6272d33d49d12335d25cf0a2660f7996680b1d1
|
[
"BSD-2-Clause"
] | 1
|
2018-01-12T14:09:58.000Z
|
2018-01-12T14:09:58.000Z
|
lino_book/projects/team/asgi.py
|
khchine5/book
|
b6272d33d49d12335d25cf0a2660f7996680b1d1
|
[
"BSD-2-Clause"
] | 4
|
2018-02-06T19:53:10.000Z
|
2019-08-01T21:47:44.000Z
|
lino_book/projects/team/asgi.py
|
khchine5/book
|
b6272d33d49d12335d25cf0a2660f7996680b1d1
|
[
"BSD-2-Clause"
] | null | null | null |
import os
from channels.asgi import get_channel_layer
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lino_book.projects.team.settings.demo")
channel_layer = get_channel_layer()
| 25.857143
| 88
| 0.834254
|
833dde95e16a1d9f26c1c241668b9a202435cb22
| 6,299
|
py
|
Python
|
parsr/migrations/0037_auto__add_field_file_package.py
|
frontendphil/analyzr
|
bb177e16039e02aac1bd72e6fd7cc6d2805d95cf
|
[
"MIT"
] | 4
|
2015-05-02T12:19:05.000Z
|
2019-09-20T13:21:13.000Z
|
parsr/migrations/0037_auto__add_field_file_package.py
|
frontendphil/analyzr
|
bb177e16039e02aac1bd72e6fd7cc6d2805d95cf
|
[
"MIT"
] | null | null | null |
parsr/migrations/0037_auto__add_field_file_package.py
|
frontendphil/analyzr
|
bb177e16039e02aac1bd72e6fd7cc6d2805d95cf
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'File.package'
db.add_column(u'parsr_file', 'package',
self.gf('django.db.models.fields.CharField')(default='', max_length=255),
keep_default=False)
def backwards(self, orm):
# Deleting field 'File.package'
db.delete_column(u'parsr_file', 'package')
models = {
u'parsr.author': {
'Meta': {'object_name': 'Author'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'parsr.branch': {
'Meta': {'object_name': 'Branch'},
'analyzed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'analyzed_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'analyzing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'measured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'measuring': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['parsr.Repo']", 'null': 'True'})
},
u'parsr.file': {
'Meta': {'object_name': 'File'},
'change_type': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'copy_of': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['parsr.File']", 'null': 'True'}),
'cyclomatic_complexity': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'}),
'cyclomatic_complexity_delta': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'}),
'halstead_difficulty': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'}),
'halstead_difficulty_delta': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'}),
'halstead_effort': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'}),
'halstead_effort_delta': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'}),
'halstead_volume': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'}),
'halstead_volume_delta': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines_added': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lines_removed': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mimetype': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'package': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'revision': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['parsr.Revision']"})
},
u'parsr.repo': {
'Meta': {'object_name': 'Repo'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timezone': ('timezone_field.fields.TimeZoneField', [], {'default': "'Europe/Berlin'"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'parsr.revision': {
'Meta': {'object_name': 'Revision'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['parsr.Author']", 'null': 'True', 'blank': 'True'}),
'branch': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['parsr.Branch']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'next': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'previous'", 'null': 'True', 'to': u"orm['parsr.Revision']"}),
'revision_date': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['parsr.RevisionDate']", 'null': 'True', 'blank': 'True'})
},
u'parsr.revisiondate': {
'Meta': {'object_name': 'RevisionDate'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'day': ('django.db.models.fields.IntegerField', [], {}),
'hour': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'minute': ('django.db.models.fields.IntegerField', [], {}),
'month': ('django.db.models.fields.IntegerField', [], {}),
'weekday': ('django.db.models.fields.IntegerField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['parsr']
| 67.731183
| 151
| 0.555803
|
8324a84f5172b54a9b6a297e3b09f23588f15b5a
| 2,278
|
py
|
Python
|
ona/db.py
|
k4g4/ona.py
|
1c6f2b4dd3d85c267302d507db5f73680034428c
|
[
"MIT"
] | 1
|
2019-05-12T18:58:32.000Z
|
2019-05-12T18:58:32.000Z
|
ona/db.py
|
k4g4/ona.py
|
1c6f2b4dd3d85c267302d507db5f73680034428c
|
[
"MIT"
] | 5
|
2021-03-18T23:01:01.000Z
|
2022-03-11T23:49:11.000Z
|
ona/db.py
|
k4g4/ona.py
|
1c6f2b4dd3d85c267302d507db5f73680034428c
|
[
"MIT"
] | 1
|
2019-08-05T02:05:10.000Z
|
2019-08-05T02:05:10.000Z
|
from pymongo import MongoClient, ReturnDocument
from cachetools import LRUCache
from contextlib import contextmanager
class OnaDocument(dict):
'''This class represents a generic MongoDB document.'''
__getattr__ = dict.get
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class OnaDB:
'''Database interactions are handled here.'''
def __init__(self, host, port, db, collection, template, db_cache_size):
self.client = MongoClient(host, port)
self.collection = self.client[db][collection]
self.template = template
self.doc_cache = LRUCache(db_cache_size)
def get_doc(self, snowflake):
# Default to 0 if the snowflake doesn't exist (i.e. ctx.guild in a PrivateChannel)
_id = snowflake.id if hasattr(snowflake, "id") else 0
if _id in self.doc_cache:
doc = self.doc_cache[_id]
else:
doc = OnaDocument(self.collection.find_one_and_update({"_id": _id}, {"$setOnInsert": self.template},
upsert=True, return_document=ReturnDocument.AFTER))
self.doc_cache[_id] = doc
if not doc.keys() >= self.template.keys(): # Basically, "the doc does not have every key in the template"
[doc.setdefault(key, value) for key, value in self.template.items()] # Fill up missing keys
self.update_doc(doc)
return doc
def update_doc(self, doc): # This method should not be called outside OnaDB, use doc_context instead
if not self.collection.replace_one({"_id": doc["_id"]}, doc).matched_count:
self.collection.insert_one({"_id": doc["_id"]})
@contextmanager
def doc_context(self, snowflake):
'''Incorporate get_doc and update_doc as a single contextmanager.'''
doc = self.get_doc(snowflake)
yield doc
self.update_doc(doc)
def setup(ona):
ona.guild_db = OnaDB(ona.secrets.host, ona.secrets.port, ona.config.db,
ona.config.guild_db, ona.guild_doc.to_dict(), ona.config.db_cache_size)
ona.user_db = OnaDB(ona.secrets.host, ona.secrets.port, ona.config.db,
ona.config.user_db, ona.user_doc.to_dict(), ona.config.db_cache_size)
| 42.185185
| 117
| 0.647498
|
4446dc6483296d9566685493eaf9e0b1fc20a11f
| 12,695
|
py
|
Python
|
lib/rucio/tests/test_rse_expression_parser.py
|
TeAmP0is0N/rucio
|
45c1b83f8e1514953a41fd076b4e651dd564c39f
|
[
"Apache-2.0"
] | null | null | null |
lib/rucio/tests/test_rse_expression_parser.py
|
TeAmP0is0N/rucio
|
45c1b83f8e1514953a41fd076b4e651dd564c39f
|
[
"Apache-2.0"
] | null | null | null |
lib/rucio/tests/test_rse_expression_parser.py
|
TeAmP0is0N/rucio
|
45c1b83f8e1514953a41fd076b4e651dd564c39f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright European Organization for Nuclear Research (CERN)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Authors:
# - Martin Barisits, <martin.barisits@cern.ch>, 2013-2017
# - Hannes Hansen, <hannes.jakob.hansen@cern.ch>, 2019
# - Andrew Lister, <andrew.lister@stfc.ac.uk>, 2019
# - Patrick Austin, <patrick.austin@stfc.ac.uk>, 2020
from random import choice
from string import ascii_uppercase, digits, ascii_lowercase
from nose.tools import assert_equal, raises, assert_raises
from rucio.common.config import config_get, config_get_bool
from rucio.core import rse
from rucio.core import rse_expression_parser
from rucio.client.rseclient import RSEClient
from rucio.common.exception import InvalidRSEExpression, RSEBlacklisted
def rse_name_generator(size=10):
return 'MOCK_' + ''.join(choice(ascii_uppercase) for x in range(size))
def tag_generator(size_s=10, size_d=2):
return ''.join(choice(ascii_uppercase) for x in range(size_s)).join(choice(digits) for x in range(size_d))
def attribute_name_generator(size=10):
return ''.join(choice(ascii_uppercase)).join(choice(ascii_lowercase) for x in range(size - 1))
class TestRSEExpressionParserCore(object):
def __init__(self):
if config_get_bool('common', 'multi_vo', raise_exception=False, default=False):
self.vo = {'vo': config_get('client', 'vo', raise_exception=False, default='tst')}
self.filter = {'filter': self.vo}
else:
self.vo = {}
self.filter = {'filter': {'vo': 'def'}}
self.rse1 = rse_name_generator()
self.rse2 = rse_name_generator()
self.rse3 = rse_name_generator()
self.rse4 = rse_name_generator()
self.rse5 = rse_name_generator()
self.rse1_id = rse.add_rse(self.rse1, **self.vo)
self.rse2_id = rse.add_rse(self.rse2, **self.vo)
self.rse3_id = rse.add_rse(self.rse3, **self.vo)
self.rse4_id = rse.add_rse(self.rse4, **self.vo)
self.rse5_id = rse.add_rse(self.rse5, **self.vo)
# Add Attributes
self.attribute = attribute_name_generator()
rse.add_rse_attribute(self.rse1_id, self.attribute, "at")
rse.add_rse_attribute(self.rse2_id, self.attribute, "de")
rse.add_rse_attribute(self.rse3_id, self.attribute, "fr")
rse.add_rse_attribute(self.rse4_id, self.attribute, "uk")
rse.add_rse_attribute(self.rse5_id, self.attribute, "us")
# Add numeric Attributes
self.attribute_numeric = attribute_name_generator()
rse.add_rse_attribute(self.rse1_id, self.attribute_numeric, 10)
rse.add_rse_attribute(self.rse2_id, self.attribute_numeric, 20)
rse.add_rse_attribute(self.rse3_id, self.attribute_numeric, 30)
rse.add_rse_attribute(self.rse4_id, self.attribute_numeric, 40)
rse.add_rse_attribute(self.rse5_id, self.attribute_numeric, 50)
# Add Tags
self.tag1 = tag_generator()
self.tag2 = tag_generator()
rse.add_rse_attribute(self.rse1_id, self.tag1, True)
rse.add_rse_attribute(self.rse2_id, self.tag1, True)
rse.add_rse_attribute(self.rse3_id, self.tag1, True)
rse.add_rse_attribute(self.rse4_id, self.tag2, True)
rse.add_rse_attribute(self.rse5_id, self.tag2, True)
@raises(InvalidRSEExpression)
def test_unconnected_operator(self):
""" RSE_EXPRESSION_PARSER (CORE) Test invalid rse expression: unconnected operator"""
rse_expression_parser.parse_expression("TEST_RSE1|", **self.filter)
@raises(InvalidRSEExpression)
def test_wrong_parantheses(self):
""" RSE_EXPRESSION_PARSER (CORE) Test invalid rse expression: wrong parantheses """
rse_expression_parser.parse_expression("TEST_RSE1)", **self.filter)
@raises(InvalidRSEExpression)
def test_unknown_rse(self):
""" RSE_EXPRESSION_PARSER (CORE) Test unknown RSE """
rse_expression_parser.parse_expression("TEST_RSE999", **self.filter)
def test_simple_rse_reference(self):
""" RSE_EXPRESSION_PARSER (CORE) Test simple RSE reference """
assert_equal([t_rse['id'] for t_rse in rse_expression_parser.parse_expression(self.rse1, **self.filter)], [self.rse1_id])
def test_attribute_reference(self):
""" RSE_EXPRESSION_PARSER (CORE) Test simple RSE attribute reference """
assert_equal([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s=uk" % self.attribute, **self.filter)], [self.rse4_id])
def test_all_rse(self):
""" RSE_EXPRESSION_PARSER (CORE) Test reference on all RSE """
all_rses = rse.list_rses(filters=self.filter['filter'])
assert_equal(sorted(rse_expression_parser.parse_expression("*", **self.filter), key=lambda rse: rse['rse']), sorted(all_rses, key=lambda rse: rse['rse']))
def test_tag_reference(self):
""" RSE_EXPRESSION_PARSER (CORE) Test simple RSE tag reference """
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression(self.tag1, **self.filter)]), sorted([self.rse1_id, self.rse2_id, self.rse3_id]))
def test_parantheses(self):
""" RSE_EXPRESSION_PARSER (CORE) Test parantheses """
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("(%s)" % self.tag1, **self.filter)]), sorted([self.rse1_id, self.rse2_id, self.rse3_id]))
def test_union(self):
""" RSE_EXPRESSION_PARSER (CORE) Test union operator """
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s|%s" % (self.tag1, self.tag2), **self.filter)]), sorted([self.rse1_id, self.rse2_id, self.rse3_id, self.rse4_id, self.rse5_id]))
def test_complement(self):
""" RSE_EXPRESSION_PARSER (CORE) Test complement operator """
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s\\%s" % (self.tag1, self.rse3), **self.filter)]), sorted([self.rse1_id, self.rse2_id]))
def test_intersect(self):
""" RSE_EXPRESSION_PARSER (CORE) Test intersect operator """
assert_equal([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s&%s=uk" % (self.tag2, self.attribute), **self.filter)], [self.rse4_id])
def test_order_of_operations(self):
""" RSE_EXPRESSION_PARSER (CORE) Test order of operations """
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s\\%s|%s=fr" % (self.tag1, self.rse3, self.attribute), **self.filter)]), sorted([self.rse1_id, self.rse2_id, self.rse3_id]))
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s\\(%s|%s=fr)" % (self.tag1, self.rse3, self.attribute), **self.filter)]), sorted([self.rse1_id, self.rse2_id]))
def test_complicated_expression_1(self):
""" RSE_EXPRESSION_PARSER (CORE) Test some complicated expression 1"""
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("(%s|%s)\\%s|%s&%s" % (self.tag1, self.tag2, self.tag2, self.tag2, self.tag1), **self.filter)]), sorted([self.rse1_id, self.rse2_id, self.rse3_id]))
def test_complicated_expression_2(self):
""" RSE_EXPRESSION_PARSER (CORE) Test some complicated expression 2"""
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("(((((%s))))|%s=us)&%s|(%s=at|%s=de)" % (self.tag1, self.attribute, self.tag2, self.attribute, self.attribute), **self.filter)]),
sorted([self.rse1_id, self.rse2_id, self.rse5_id]))
def test_complicated_expression_3(self):
""" RSE_EXPRESSION_PARSER (CORE) Test some complicated expression 3"""
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("(*)&%s=at" % self.attribute, **self.filter)]), sorted([self.rse1_id]))
def test_list_on_availability(self):
""" RSE_EXPRESSION_PARSER (CORE) List rses based on availability filter"""
rsewrite_name = rse_name_generator()
rsenowrite_name = rse_name_generator()
rsewrite_id = rse.add_rse(rsewrite_name, **self.vo)
rsenowrite_id = rse.add_rse(rsenowrite_name, **self.vo)
attribute = attribute_name_generator()
rse.add_rse_attribute(rsewrite_id, attribute, "de")
rse.add_rse_attribute(rsenowrite_id, attribute, "de")
rse.update_rse(rsewrite_id, {'availability_write': True})
rse.update_rse(rsenowrite_id, {'availability_write': False})
assert_equal(sorted([item['id'] for item in rse_expression_parser.parse_expression("%s=de" % attribute, **self.filter)]),
sorted([rsewrite_id, rsenowrite_id]))
filters = self.filter
filters['availability_write'] = True
assert_equal(sorted([item['id'] for item in rse_expression_parser.parse_expression("%s=de" % attribute, filters)]),
sorted([rsewrite_id]))
filters['availability_write'] = False
assert_raises(RSEBlacklisted, rse_expression_parser.parse_expression, "%s=de" % attribute, filters)
def test_numeric_operators(self):
""" RSE_EXPRESSION_PARSER (CORE) Test RSE attributes with numeric operations """
assert_equal([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s<11" % self.attribute_numeric, **self.filter)], [self.rse1_id])
assert_raises(InvalidRSEExpression, rse_expression_parser.parse_expression, "%s<9" % self.attribute_numeric, **self.filter)
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s<21" % self.attribute_numeric, **self.filter)]), sorted([self.rse1_id, self.rse2_id]))
assert_equal([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s>49" % self.attribute_numeric, **self.filter)], [self.rse5_id])
assert_raises(InvalidRSEExpression, rse_expression_parser.parse_expression, "%s>51" % self.attribute_numeric, **self.filter)
assert_equal(sorted([t_rse['id'] for t_rse in rse_expression_parser.parse_expression("%s>30" % self.attribute_numeric, **self.filter)]), sorted([self.rse4_id, self.rse5_id]))
class TestRSEExpressionParserClient(object):
def __init__(self):
if config_get_bool('common', 'multi_vo', raise_exception=False, default=False):
self.vo = {'vo': config_get('client', 'vo', raise_exception=False, default='tst')}
else:
self.vo = {}
self.rse1 = rse_name_generator()
self.rse2 = rse_name_generator()
self.rse3 = rse_name_generator()
self.rse4 = rse_name_generator()
self.rse5 = rse_name_generator()
self.rse1_id = rse.add_rse(self.rse1, **self.vo)
self.rse2_id = rse.add_rse(self.rse2, **self.vo)
self.rse3_id = rse.add_rse(self.rse3, **self.vo)
self.rse4_id = rse.add_rse(self.rse4, **self.vo)
self.rse5_id = rse.add_rse(self.rse5, **self.vo)
# Add Attributes
self.attribute = attribute_name_generator()
rse.add_rse_attribute(self.rse1_id, self.attribute, "at")
rse.add_rse_attribute(self.rse2_id, self.attribute, "de")
rse.add_rse_attribute(self.rse3_id, self.attribute, "fr")
rse.add_rse_attribute(self.rse4_id, self.attribute, "uk")
rse.add_rse_attribute(self.rse5_id, self.attribute, "us")
# Add Tags
self.tag1 = tag_generator()
self.tag2 = tag_generator()
rse.add_rse_attribute(self.rse1_id, self.tag1, True)
rse.add_rse_attribute(self.rse2_id, self.tag1, True)
rse.add_rse_attribute(self.rse3_id, self.tag1, True)
rse.add_rse_attribute(self.rse4_id, self.tag2, True)
rse.add_rse_attribute(self.rse5_id, self.tag2, True)
self.rse_client = RSEClient()
def test_complicated_expression(self):
""" RSE_EXPRESSION_PARSER (CLIENT) Test some complicated expression"""
rses = [item['rse'] for item in self.rse_client.list_rses("(((((%s))))|%s=us)&%s|(%s=at|%s=de)" % (self.tag1, self.attribute, self.tag2, self.attribute, self.attribute))]
assert_equal(sorted(rses), sorted([self.rse1, self.rse2, self.rse5]))
def test_complicated_expression_1(self):
""" RSE_EXPRESSION_PARSER (CORE) Test some complicated expression 1"""
rses = [item['rse'] for item in self.rse_client.list_rses("(%s|%s)\\%s|%s&%s" % (self.tag1, self.tag2, self.tag2, self.tag2, self.tag1))]
assert_equal(sorted(rses), sorted([self.rse1, self.rse2, self.rse3]))
| 53.117155
| 241
| 0.69232
|
6754600f6077a162028bd1390f0cd8888a1478f1
| 1,133
|
py
|
Python
|
lib/candy_editor/core/__init__.py
|
lihaochen910/Candy
|
d12cb964768459c22f30c22531d3e1734901e814
|
[
"MIT"
] | 1
|
2021-11-06T14:38:37.000Z
|
2021-11-06T14:38:37.000Z
|
lib/candy_editor/core/__init__.py
|
lihaochen910/Candy
|
d12cb964768459c22f30c22531d3e1734901e814
|
[
"MIT"
] | 5
|
2021-11-06T04:23:06.000Z
|
2022-03-12T01:03:25.000Z
|
lib/candy_editor/core/__init__.py
|
lihaochen910/Candy
|
d12cb964768459c22f30c22531d3e1734901e814
|
[
"MIT"
] | 1
|
2021-11-07T05:19:51.000Z
|
2021-11-07T05:19:51.000Z
|
import logging
# loggingLevel = logging.WARNING
# loggingLevel = logging.INFO
# loggingLevel = logging.DEBUG
##----------------------------------------------------------------##
from . import signals
##----------------------------------------------------------------##
from .helpers import *
from .model import *
from .res import ResGuard
from .tool import ToolBase, startupTool
from .project import Project
from .asset import AssetLibrary, AssetException, AssetNode, AssetManager, AssetCreator
from .cache import CacheManager
##----------------------------------------------------------------##
from .Command import EditorCommand, EditorCommandStack, EditorCommandRegistry, RemoteCommand, RemoteCommandRegistry
from .EditorModule import EditorModule
from .EditorApp import app
##----------------------------------------------------------------##
# import .CoreModule
CANDY_MIME_ENTITY_DATA = 'application/candy.entity-data'
CANDY_MIME_ASSET_LIST = 'application/candy.asset-list'
def getProjectPath ( path = None ):
return Project.get ().getBasePath ( path )
def getAppPath ( path = None ):
return app.getPath ( path )
| 29.051282
| 115
| 0.600177
|
643d2f4b7a8ef45528c8de8e729db7525c458c77
| 1,056
|
py
|
Python
|
tests/test_obj_CylinderSegment.py
|
OrtnerMichael/magPyLib
|
4c7e7f56f6e0b915ec0e024c172c460fa80126e5
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_obj_CylinderSegment.py
|
OrtnerMichael/magPyLib
|
4c7e7f56f6e0b915ec0e024c172c460fa80126e5
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_obj_CylinderSegment.py
|
OrtnerMichael/magPyLib
|
4c7e7f56f6e0b915ec0e024c172c460fa80126e5
|
[
"BSD-2-Clause"
] | null | null | null |
import numpy as np
import magpylib as magpy
def test_repr():
"""test __repr__"""
pm2 = magpy.magnet.CylinderSegment((1, 2, 3), (1, 2, 3, 0, 90))
assert pm2.__repr__()[:15] == "CylinderSegment", "CylinderSegment repr failed"
def test_barycenter():
"""test if barycenter is computed correctly"""
cs = magpy.magnet.CylinderSegment(
magnetization=(100, 0, 0), dimension=(1, 2, 1, 85, 170)
)
expected_barycenter_squeezed = np.array([-0.86248133, 1.12400755, 0.0])
np.testing.assert_allclose(cs.barycenter, expected_barycenter_squeezed)
cs.rotate_from_angax([76 * i for i in range(0, 5)], "x", anchor=(0, 0, 5), start=0)
expected_barycenter_path = np.array(
[
[-0.86248133, 1.12400755, 0.0],
[-0.86248133, 5.12340067, 4.88101025],
[-0.86248133, 1.35491805, 9.94242755],
[-0.86248133, -4.46783198, 7.51035264],
[-0.86248133, -3.51665082, 1.27219099],
]
)
np.testing.assert_allclose(cs.barycenter, expected_barycenter_path)
| 32
| 87
| 0.626894
|
37af7908b2d81365e4be2dc577585b2d542ae72f
| 410
|
py
|
Python
|
ProjectEuler/p056.py
|
TISparta/competitive-programming-solutions
|
31987d4e67bb874bf15653565c6418b5605a20a8
|
[
"MIT"
] | 1
|
2018-01-30T13:21:30.000Z
|
2018-01-30T13:21:30.000Z
|
ProjectEuler/p056.py
|
TISparta/competitive-programming-solutions
|
31987d4e67bb874bf15653565c6418b5605a20a8
|
[
"MIT"
] | null | null | null |
ProjectEuler/p056.py
|
TISparta/competitive-programming-solutions
|
31987d4e67bb874bf15653565c6418b5605a20a8
|
[
"MIT"
] | 1
|
2018-08-29T13:26:50.000Z
|
2018-08-29T13:26:50.000Z
|
# Execution time : 0.251594 seconds
# Solution Explanation
# Brute-force approach is enough.
import time
width = 40
def solution():
return max(sum(int(it) for it in str(a**b)) for a in range(100) for b in range(100))
if __name__=="__main__":
start_ = time.time()
print(' Answer -> %s '.center(width,'-') % ( solution() ))
print(' %f seconds '.center(width,'-') % ( time.time() - start_))
| 22.777778
| 88
| 0.629268
|
c1c70b8566db5a85c82448af289f34c928045991
| 406
|
bzl
|
Python
|
third_party/javax_inject.bzl
|
wix/exodus
|
dfb0c9713b07a8b6a49b548b7b543021e748d80b
|
[
"MIT"
] | 186
|
2019-06-05T01:02:53.000Z
|
2022-03-31T10:44:19.000Z
|
third_party/javax_inject.bzl
|
wix/exodus
|
dfb0c9713b07a8b6a49b548b7b543021e748d80b
|
[
"MIT"
] | 25
|
2019-06-04T23:18:55.000Z
|
2021-09-22T03:44:25.000Z
|
third_party/javax_inject.bzl
|
wix/exodus
|
dfb0c9713b07a8b6a49b548b7b543021e748d80b
|
[
"MIT"
] | 17
|
2019-06-04T18:45:19.000Z
|
2022-01-20T09:40:04.000Z
|
load("//:import_external.bzl", import_external = "safe_wix_scala_maven_import_external")
def dependencies():
import_external(
name = "javax_inject_javax_inject",
artifact = "javax.inject:javax.inject:1",
artifact_sha256 = "91c77044a50c481636c32d916fd89c9118a72195390452c81065080f957de7ff",
srcjar_sha256 = "c4b87ee2911c139c3daf498a781967f1eb2e75bc1a8529a2e7b328a15d0e433e",
)
| 36.909091
| 91
| 0.785714
|
a625714afff55a26db5d4652032250b82e7587f4
| 3,472
|
py
|
Python
|
nuitka/build/inline_copy/lib/scons-3.1.0/SCons/Tool/filesystem.py
|
jayvdb/Nuitka
|
0ff702e065b1b53231ba0cae451385a3da0fe766
|
[
"Apache-2.0"
] | 1
|
2019-03-31T09:56:11.000Z
|
2019-03-31T09:56:11.000Z
|
nuitka/build/inline_copy/lib/scons-3.1.0/SCons/Tool/filesystem.py
|
jayvdb/Nuitka
|
0ff702e065b1b53231ba0cae451385a3da0fe766
|
[
"Apache-2.0"
] | null | null | null |
nuitka/build/inline_copy/lib/scons-3.1.0/SCons/Tool/filesystem.py
|
jayvdb/Nuitka
|
0ff702e065b1b53231ba0cae451385a3da0fe766
|
[
"Apache-2.0"
] | null | null | null |
"""SCons.Tool.filesystem
Tool-specific initialization for the filesystem tools.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/filesystem.py e724ae812eb96f4858a132f5b8c769724744faf6 2019-07-21 00:04:47 bdeegan"
import SCons
from SCons.Tool.install import copyFunc
copyToBuilder, copyAsBuilder = None, None
def copyto_emitter(target, source, env):
""" changes the path of the source to be under the target (which
are assumed to be directories.
"""
n_target = []
for t in target:
n_target = n_target + [t.File( str( s ) ) for s in source]
return (n_target, source)
def copy_action_func(target, source, env):
assert( len(target) == len(source) ), "\ntarget: %s\nsource: %s" %(list(map(str, target)),list(map(str, source)))
for t, s in zip(target, source):
if copyFunc(t.get_path(), s.get_path(), env):
return 1
return 0
def copy_action_str(target, source, env):
return env.subst_target_source(env['COPYSTR'], 0, target, source)
copy_action = SCons.Action.Action( copy_action_func, copy_action_str )
def generate(env):
try:
env['BUILDERS']['CopyTo']
env['BUILDERS']['CopyAs']
except KeyError as e:
global copyToBuilder
if copyToBuilder is None:
copyToBuilder = SCons.Builder.Builder(
action = copy_action,
target_factory = env.fs.Dir,
source_factory = env.fs.Entry,
multi = 1,
emitter = [ copyto_emitter, ] )
global copyAsBuilder
if copyAsBuilder is None:
copyAsBuilder = SCons.Builder.Builder(
action = copy_action,
target_factory = env.fs.Entry,
source_factory = env.fs.Entry )
env['BUILDERS']['CopyTo'] = copyToBuilder
env['BUILDERS']['CopyAs'] = copyAsBuilder
env['COPYSTR'] = 'Copy file(s): "$SOURCES" to "$TARGETS"'
def exists(env):
return 1
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 35.070707
| 121
| 0.659562
|
53f914cb24c06564ae98e011cbd91898f54edaad
| 16,078
|
py
|
Python
|
log_mito/model_64.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
log_mito/model_64.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
log_mito/model_64.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
# exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd'])
Monomer('C3pro', ['Apop'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU'])
Monomer('ApafA')
Monomer('BidM', ['BaxM'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 16000.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None) + BidU(C8A=None) | C8A(BidU=1) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1) % BidU(C8A=1) >> C8A(BidU=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None), C8pro_0)
Initial(C3pro(Apop=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
| 87.857923
| 710
| 0.803458
|
2342ffd1ef296c0a0b1d547686bec2fe69b6a254
| 121
|
py
|
Python
|
newspider/pipelines.py
|
Hironsan/japanese-news-crawler
|
cec9015fc41ce94a80a1d5fe5a7652913686563d
|
[
"MIT"
] | 7
|
2017-06-19T03:00:07.000Z
|
2021-02-23T12:41:39.000Z
|
newspider/pipelines.py
|
Hironsan/japanese-news-crawler
|
cec9015fc41ce94a80a1d5fe5a7652913686563d
|
[
"MIT"
] | null | null | null |
newspider/pipelines.py
|
Hironsan/japanese-news-crawler
|
cec9015fc41ce94a80a1d5fe5a7652913686563d
|
[
"MIT"
] | 4
|
2017-06-13T21:18:15.000Z
|
2018-08-17T06:44:14.000Z
|
# -*- coding: utf-8 -*-
class NewspiderPipeline(object):
def process_item(self, item, spider):
return item
| 17.285714
| 41
| 0.636364
|
9ab9b6bb65ef7e8d4b8eb5e1152e0e27ac553f62
| 25,035
|
py
|
Python
|
imcsdk/imchandle.py
|
TetrationAnalytics/imcsdk
|
d86e47831f294dc9fa5e99b9a92abceac2502d76
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/imchandle.py
|
TetrationAnalytics/imcsdk
|
d86e47831f294dc9fa5e99b9a92abceac2502d76
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/imchandle.py
|
TetrationAnalytics/imcsdk
|
d86e47831f294dc9fa5e99b9a92abceac2502d76
|
[
"Apache-2.0"
] | 2
|
2016-05-26T02:05:46.000Z
|
2017-09-13T05:13:25.000Z
|
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from collections import OrderedDict
from . import imcgenutils
from . import imccoreutils
from .imcexception import ImcException
from .imcconstants import NamingId
from .imcsession import ImcSession
log = logging.getLogger('imc')
CONFIG_CONF_MOS_BUFFER_SIZE = 10
class ImcHandle(ImcSession):
"""
ImcHandle class is the user interface point for any Imc communication.
Args:
ip (str): The IP or Hostname of the IMC Server
username (str): The username as configured on the Imc Server
password (str): The password as configured on the Imc Server
port (int or None): The port number to be used during connection
secure (bool or None): True for secure connection, otherwise False
proxy (str): The proxy object to be used to connect
auto_refresh (bool): if set to True, it'll refresh the cookie continuously
force (bool): if set to True it'll reconnect even if cookie exists
and is valid for the respective connection.
timeout (int): timeout value in secs
Example:
handle = ImcHandle("192.168.1.1","admin","password")\n
handle = ImcHandle("192.168.1.1","admin","password", secure=True)\n
handle = ImcHandle("192.168.1.1","admin","password", secure=False)\n
handle = ImcHandle("192.168.1.1","admin","password", port=80)\n
handle = ImcHandle("192.168.1.1","admin","password", port=443)\n
handle = ImcHandle("192.168.1.1","admin","password", port=100,
secure=True)\n
handle = ImcHandle("192.168.1.1","admin","password", port=100,
secure=False)\n
"""
def __init__(self, ip, username, password, port=None, secure=None,
proxy=None, auto_refresh=False, force=False, timeout=None):
ImcSession.__init__(self, ip=ip, username=username, password=password,
port=port, secure=secure, proxy=proxy,
auto_refresh=auto_refresh, force=force,
timeout=timeout)
self.__to_commit = OrderedDict()
def __enter__(self):
"""
Initiates a connection to the server referenced by the ImcHandle.
A cookie is populated in the ImcHandle, if the login is successful.
The class instance is returned.
"""
self._login()
return self
def __exit__(self, *exc):
"""
Disconnects from the server referenced by the ImcHandle and exits.
"""
self._logout()
def is_starship(self):
"""
Check if SDK is running in starship mode
"""
return self._is_starship()
def set_starship_proxy(self, proxy):
"""
Connects to the server via the proxy URL
"""
self._set_starship_proxy(proxy)
def unset_starship_proxy(self):
"""
Connects to the server via the proxy URL
"""
self._set_starship_proxy(None)
def set_starship_headers(self, headers):
"""
Set the headers to be used in connection
"""
self._set_starship_headers(headers)
def unset_starship_headers(self):
"""
Set the headers to be used in connection
"""
self._set_starship_headers(None)
def set_dump_xml(self):
"""
Enables the logging of xml requests and responses.
"""
self._set_dump_xml()
def unset_dump_xml(self):
"""
Disables the logging of xml requests and responses.
"""
self._unset_dump_xml()
def login(self, auto_refresh=None, force=None, timeout=None):
"""
Initiates a connection to the server referenced by the ImcHandle.
A cookie is populated in the ImcHandle, if the login is successful.
Args:
auto_refresh (bool): if set to True, it refresh the cookie
continuously
force (bool): if set to True it'll reconnect even if cookie exists
and is valid for the respective connection.
timeout (int): timeout value in secs
Returns:
True on successful connect
Example:
handle.login()\n
handle.login(auto_refresh=True)\n
handle.login(force=True)\n
handle.login(auto_refresh=True, force=True)\n
where handle is ImcHandle()
"""
return self._login(auto_refresh=auto_refresh, force=force, timeout=timeout)
def logout(self, timeout=None):
"""
Disconnects from the server referenced by the ImcHandle.
Args:
None
timeout (int): timeout value in secs
Returns:
True on successful disconnect
Example:
handle.logout()
where handle is ImcHandle()
"""
return self._logout(timeout=timeout)
def process_xml_elem(self, elem, timeout=None):
"""
process_xml_elem is a helper method which posts xml elements to the
server and returns parsed response. It's role is to operate on the
output of methods from Imcmethodfactory, which return xml element
node(s).
Args:
elem (xml element object)
Returns:
mo list or external method object
Example:
elem = imcmethodfactory.config_resolve_class(cookie=
handle.cookie, class_id="computeRackUnit")\n
objs = handle.process_xml_elem(elem)
"""
response = self.post_elem(elem, timeout=timeout)
if response.error_code != 0:
raise ImcException(response.error_code, response.error_descr)
if hasattr(response, "out_config"):
return response.out_config.child
else:
return response
def get_auth_token(self, timeout=None):
"""
Returns a token that is used for IMC authentication.
Args:
None
Returns:
auth_token (str)
timeout (int): timeout value in secs
Example:
handle.get_auth_token()
"""
from .imcmethodfactory import aaa_get_compute_auth_tokens
auth_token = None
mo = self.query_classid(class_id=NamingId.COMPUTE_BOARD)
if not mo:
mo = self.query_classid(class_id=NamingId.COMPUTE_RACK_UNIT)
if mo:
elem = aaa_get_compute_auth_tokens(cookie=self.cookie)
response = self.post_elem(elem, timeout=timeout)
if response.error_code != 0:
raise ImcException(response.error_code,
response.error_descr)
# cat = self.AaaGetNComputeAuthTokenByDn(mo[0].Dn, 1, None)
auth_token = response.out_tokens.split(',')[0]
return auth_token
def query_dn(self, dn, hierarchy=False, need_response=False, timeout=None):
"""
Finds an object using it's distinguished name.
Args:
dn (str): distinguished name of the object to be queried for.
hierarchy(bool): True/False,
get all objects in hierarchy if True
need_response(bool): True/False,
return the response xml node, instead of parsed
objects
timeout (int): timeout value in secs
Returns:
managedobject or None by default\n
managedobject list if hierarchy=True\n
externalmethod object if need_response=True\n
Example:
obj = handle.query_dn("sys/rack-unit-1")\n
obj = handle.query_dn("sys/rack-unit-1", hierarchy=True)\n
obj = handle.query_dn("sys/rack-unit-1", need_response=True\n
obj = handle.query_dn("sys/rack-unit-1", hierarchy=True,
need_response=True)\n
"""
from .imcmethodfactory import config_resolve_dn
if not dn:
raise ValueError("Provide dn.")
elem = config_resolve_dn(cookie=self.cookie, dn=dn,
in_hierarchical=hierarchy)
response = self.post_elem(elem, timeout=timeout)
if response.error_code != 0:
raise ImcException(response.error_code, response.error_descr)
if need_response:
return response
if hierarchy:
out_mo_list = imccoreutils.extract_molist_from_method_response(
response,
hierarchy)
return out_mo_list
mo = None
if len(response.out_config.child) > 0:
mo = response.out_config.child[0]
return mo
def query_classid(self, class_id=None, hierarchy=False,
need_response=False, timeout=None):
"""
Finds an object using it's class id.
Args:
class_id (str): class id of the object to be queried for.
hierarchy(bool): if set to True will return all the child
hierarchical objects.
need_response(bool): if set to True will return only response
object.
timeout (int): timeout value in secs
Returns:
managedobjectlist or None by default\n
managedobjectlist or None if hierarchy=True\n
methodresponse if need_response=True\n
Example:
obj = handle.query_classid(class_id="computeRackUnit")\n
obj = handle.query_classid(class_id="computeRackUnit",
hierarchy=True)\n
obj = handle.query_classid(class_id="computeRackUnit",
need_response=True)\n
"""
# ToDo - How to handle unknown class_id
from .imcmethodfactory import config_resolve_class
if not class_id:
raise ValueError("Provide Parameter class_id")
meta_class_id = imccoreutils.find_class_id_in_mo_meta_ignore_case(
class_id)
if not meta_class_id:
meta_class_id = class_id
elem = config_resolve_class(cookie=self.cookie,
class_id=meta_class_id,
in_hierarchical=hierarchy)
response = self.post_elem(elem, timeout=timeout)
if response.error_code != 0:
raise ImcException(response.error_code, response.error_descr)
if need_response:
return response
out_mo_list = imccoreutils.extract_molist_from_method_response(
response,
hierarchy)
return out_mo_list
def query_children(self, in_mo=None, in_dn=None, class_id=None,
hierarchy=False, timeout=None):
"""
Finds children of a given managed object or distinguished name.
Arguments can be specified to query only a specific type(class_id)
of children.
Arguments can also be specified to query only direct children or the
entire hierarchy of children.
Args:
in_mo (managed object): query children managed object under this
object.
in_dn (dn string): query children managed object for a
given managed object of the respective dn.
class_id(str): by default None, if given find only specific
children object for a given class_id.
hierarchy(bool): if set to True will return all the child
hierarchical objects.
timeout (int): timeout value in secs
Returns:
managedobjectlist or None by default\n
managedobjectlist or None if hierarchy=True\n
Example:
mo_list = handle.query_children(in_mo=mo)\n
mo_list = handle.query_children(in_mo=mo, class_id="classid")\n
mo_list = handle.query_children(in_dn=dn)\n
mo_list = handle.query_children(in_dn=dn, class_id="classid")\n
"""
from .imcmethodfactory import config_resolve_children
if not in_mo and not in_dn:
raise ValueError('[Error]: GetChild: Provide in_mo or in_dn.')
if in_mo:
parent_dn = in_mo.dn
elif in_dn:
parent_dn = in_dn
meta_class_id = None
# Setting the default class-id to None
# When hierarchy and class-id are passed together to Cisco IMC,
# an empty response is received.
# Hence, passing the class-id only when hierarchy is not set
# When both hierarchy and class-id are set, do local filtering for class-id
if class_id and not hierarchy:
meta_class_id = imccoreutils.find_class_id_in_mo_meta_ignore_case(
class_id)
if not meta_class_id:
meta_class_id = class_id
elem = config_resolve_children(cookie=self.cookie,
class_id=meta_class_id,
in_dn=parent_dn,
in_hierarchical=hierarchy)
response = self.post_elem(elem, timeout=timeout)
if response.error_code != 0:
raise ImcException(response.error_code, response.error_descr)
out_mo_list = imccoreutils.extract_molist_from_method_response(response,
hierarchy
)
if class_id and hierarchy:
out_mo_list = imccoreutils.filter_molist_on_class_id(
out_mo_list,
class_id=class_id)
return out_mo_list
def add_mo(self, mo, modify_present=True, timeout=None):
"""
Adds a managed object.
Args:
mo (managedobject): ManagedObject to be added.
modify_present (bool): True/False,
overwrite existing object if True
timeout (int): timeout value in secs
Returns:
None
Example:
obj = handle.add_mo(mo)
"""
from .imccoreutils import validate_mo_version
validate_mo_version(self, mo)
if modify_present in imcgenutils.AFFIRMATIVE_LIST:
if self.query_dn(mo.dn) is None:
mo.status = "created"
else:
mo.status = "modified"
else:
mo.status = "created"
self.__to_commit[mo.dn] = mo
self._commit(timeout=timeout)
def set_mo(self, mo, timeout=None):
"""
Modifies a managed object and adds it to ImcHandle commit buffer (if
not already in it).
Args:
mo (managedobject): Managed object with modified properties.
timeout (int): timeout value in secs
Returns:
None
Example:
obj = handle.set_mo(mo)
"""
from .imccoreutils import validate_mo_version
validate_mo_version(self, mo)
mo.status = "modified"
self.__to_commit[mo.dn] = mo
self._commit(timeout=timeout)
def remove_mos(self, mos, timeout=None):
"""
removes multiple managed objects in a single request
Args:
mos (managedobject): List of managed objects
timeout (int): timeout value in secs
Returns:
dict: {'response_status': string,
'response_mos': {'dn':
{'is_configured': bool,
'response_object': MO or ImcException
}
}
}
Example:
obj = handle.remove_mos(mos)
"""
for mo in mos:
mo.status = "deleted"
if mo.parent_mo:
mo.parent_mo.child_remove(mo)
self.__to_commit[mo.dn] = mo
return self._commit_mos(timeout)
def remove_mo(self, mo, timeout=None):
"""
Removes a managed object.
Args:
mo (managedobject): Managed object to be removed.
timeout (int): timeout value in secs
Returns:
None
Example:
obj = handle.remove_mo(mo)
"""
from .imccoreutils import validate_mo_version
validate_mo_version(self, mo)
mo.status = "deleted"
if mo.parent_mo:
mo.parent_mo.child_remove(mo)
self.__to_commit[mo.dn] = mo
self._commit(timeout=timeout)
def _commit(self, timeout=None):
"""
Commit the buffer to the server. Pushes all the configuration changes
so far to the server.
Configuration could be added to the commit buffer using add_mo(),
set_mo(), remove_mo().
Args:
timeout (int): timeout value in secs
Returns:
None
Example:
self._commit()
"""
from .imcbasetype import ConfigMap
from .imcmethodfactory import config_conf_mo
mo_dict = self.__to_commit
if not mo_dict:
# log.debug("Commit Buffer is Empty")
return None
config_map = ConfigMap()
for mo_dn in mo_dict:
config_map.child_add(mo_dict[mo_dn])
elem = config_conf_mo(self.cookie, dn=mo_dn,
in_config=config_map,
in_hierarchical=False)
response = self.post_elem(elem, timeout=timeout)
if response.error_code != 0:
self.__to_commit.clear()
raise ImcException(response.error_code, response.error_descr)
for out_mo in response.out_config.child:
out_mo.sync_mo(mo_dict[out_mo.dn])
self.__to_commit.clear()
def add_mos(self, mos, modify_present=True, timeout=None):
"""
adds multiple managed objects in a single request
Args:
mos (managedobject): List of managed objects
timeout (int): timeout value in secs
Returns:
dict: {'response_status': string,
'response_mos': {'dn':
{'is_configured': bool,
'response_object': MO or ImcException
}
}
}
Example:
obj = handle.set_mos(mos)
"""
for mo in mos:
if modify_present in imcgenutils.AFFIRMATIVE_LIST:
if self.query_dn(mo.dn) is None:
mo.status = "created"
else:
mo.status = "modified"
else:
mo.status = "created"
self.__to_commit[mo.dn] = mo
return self._commit_mos(timeout)
def set_mos(self, mos, timeout=None):
"""
Sets multiple managed objects in a single request
Args:
mos (managedobject): List of managed objects
timeout (int): timeout value in secs
Returns:
dict: {'response_status': string,
'response_mos': {'dn':
{'is_configured': bool,
'response_object': MO or ImcException
}
}
}
Example:
obj = handle.set_mos(mos)
"""
for mo in mos:
self.__to_commit[mo.dn] = mo
return self._commit_mos(timeout)
def __process_config_conf_mos(self, mos, timeout=None):
"""
Internal method to process configconfmos.
IMC XmlApi method 'configConfMos' support maximum 10 MOs in single
request.
"""
from .imcbasetype import ConfigMap, Pair, FailedMos
from .imccore import OperationStatus, ImcErrorResponse
from .imcmethodfactory import config_conf_mos
from .imccoreutils import ConfigConfMosConstants as Const
from .imcexception import ImcException
if not mos:
return None
response_status = None
config_map = ConfigMap()
for mo in mos:
child_list = mo.child
while len(child_list) > 0:
current_child_list = child_list
child_list = []
for child_mo in current_child_list:
child_list.extend(child_mo.child)
pair = Pair()
pair.key = mo.dn
pair.child_add(mo)
config_map.child_add(pair)
elem = config_conf_mos(self.cookie, config_map, False)
failed = {}
passed = {}
response = self.post_elem(elem, timeout)
if isinstance(response, ImcErrorResponse):
error_code = response.error_code
error_descr = response.error_descr
# clear the commit buffer incase of an exception
self.__to_commit.clear()
raise ImcException(error_code, error_descr)
for ch in response.out_configs.child:
if isinstance(ch, OperationStatus):
response_status = ch.operation_status
continue
for chd in ch.child:
if isinstance(ch, Pair):
passed[chd.dn] = chd
elif isinstance(ch, FailedMos):
failed[chd.dn] = chd.error_descr
mos_dict = {
Const.RESPONSE_PASSED_MOS: passed,
Const.RESPONSE_FAILED_MOS: failed
}
response_dict = {
Const.RESPONSE_STATUS: response_status,
Const.RESPONSE_MOS: mos_dict
}
return response_dict
def _commit_mos(self, timeout=None):
"""Method to send multiple mos in a single XML API request"""
from .imccoreutils import ConfigConfMosConstants as Const
mo_dict = self.__to_commit
if not mo_dict:
# log.debug("Commit Buffer is Empty")
return None
status = 0
# status_dict is used to convert a string value of status to an integer
status_dict = {
Const.RESPONSE_STATUS_FAIL: 1,
Const.RESPONSE_STATUS_SUCCESS: 2,
Const.RESPONSE_STATUS_PART_SUCCESS: 4
}
# this is the final dictionary to be returned
ret = {
Const.RESPONSE_STATUS: None,
Const.RESPONSE_MOS: {}
}
mos = mo_dict.values()
for i in range(0, len(mos), CONFIG_CONF_MOS_BUFFER_SIZE):
# Configure the mo list received in batches of 10 on the endpoint
mos_ = list(mos)[i: i + CONFIG_CONF_MOS_BUFFER_SIZE]
response_dict_ = self.__process_config_conf_mos(mos_, timeout)
# Fetch the status from configuration of a batch and save it in
# the overall status
status_str = response_dict_[Const.RESPONSE_STATUS]
status |= status_dict[status_str]
# Update the ret dictionary
response_mos_ = response_dict_[Const.RESPONSE_MOS]
passed = ret[Const.RESPONSE_MOS].get(Const.RESPONSE_PASSED_MOS, {})
failed = ret[Const.RESPONSE_MOS].get(Const.RESPONSE_FAILED_MOS, {})
passed.update(response_mos_[Const.RESPONSE_PASSED_MOS])
failed.update(response_mos_[Const.RESPONSE_FAILED_MOS])
ret[Const.RESPONSE_MOS][Const.RESPONSE_PASSED_MOS] = passed
ret[Const.RESPONSE_MOS][Const.RESPONSE_FAILED_MOS] = failed
if status == 0:
ret[Const.RESPONSE_STATUS] = None
elif status == 1:
ret[Const.RESPONSE_STATUS] = Const.RESPONSE_STATUS_FAIL
elif status == 2:
ret[Const.RESPONSE_STATUS] = Const.RESPONSE_STATUS_SUCCESS
elif status >= 3:
ret[Const.RESPONSE_STATUS] = Const.RESPONSE_STATUS_PART_SUCCESS
# Always cleanup the commit buffer
self.__to_commit.clear()
return ret
| 33.604027
| 83
| 0.569323
|
1246aae32c5cf2a8cf8d007ce669452046febb88
| 12,361
|
py
|
Python
|
sopel_SpiceBotSERV/SpiceBotCore/interface/database.py
|
SpiceBot/SpiceBotSERV
|
19d254c694287af869f5390f71efd20a9fcf620f
|
[
"EFL-2.0"
] | null | null | null |
sopel_SpiceBotSERV/SpiceBotCore/interface/database.py
|
SpiceBot/SpiceBotSERV
|
19d254c694287af869f5390f71efd20a9fcf620f
|
[
"EFL-2.0"
] | null | null | null |
sopel_SpiceBotSERV/SpiceBotCore/interface/database.py
|
SpiceBot/SpiceBotSERV
|
19d254c694287af869f5390f71efd20a9fcf620f
|
[
"EFL-2.0"
] | null | null | null |
# coding=utf8
from __future__ import unicode_literals, absolute_import, division, print_function
import json
import inspect
from sopel.tools import Identifier
from sopel.db import SopelDB, NickValues, ChannelValues, PluginValues
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.exc import SQLAlchemyError
BASE = declarative_base()
# 12:50:24 <+RustyB> session.query(Nicknames.slug).all()
# 12:50:30 <+RustyB> or session.query(Nicknames.canonical).all()
# 12:50:32 <+RustyB> depending what you want
# 12:50:46 <deathbybandaid> what's the diff?
# 12:51:04 <+dgw> Also, RIP my hope to get a quick PR in for #1836 because I like that number.
# 12:51:06 <+RustyB> slug is all lowercase i believe
# 12:51:10 <+RustyB> and canonical is what is in chat
# 12:51:23 <+RustyB> because old dumb sopel reasons?
# 12:51:31 <deathbybandaid> I usually compare with .lower() anyway
# 12:51:44 <+dgw> Identifier objects handle that for you, btw
# sqlite> select * from nicknames where canonical = 'RustyCloud';
# 4|rustycloud|RustyCloud
class SpiceDB(object):
# NICK FUNCTIONS
def adjust_nick_value(self, nick, key, value):
"""Sets the value for a given key to be associated with the nick."""
nick = Identifier(nick)
value = json.dumps(value, ensure_ascii=False)
nick_id = self.get_nick_id(nick)
session = self.ssession()
try:
result = session.query(NickValues) \
.filter(NickValues.nick_id == nick_id) \
.filter(NickValues.key == key) \
.one_or_none()
# NickValue exists, update
if result:
result.value = float(result.value) + float(value)
session.commit()
# DNE - Insert
else:
new_nickvalue = NickValues(nick_id=nick_id, key=key, value=float(value))
session.add(new_nickvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def adjust_nick_list(self, nick, key, entries, adjustmentdirection):
"""Sets the value for a given key to be associated with the nick."""
nick = Identifier(nick)
if not isinstance(entries, list):
entries = [entries]
entries = json.dumps(entries, ensure_ascii=False)
nick_id = self.get_nick_id(nick)
session = self.ssession()
try:
result = session.query(NickValues) \
.filter(NickValues.nick_id == nick_id) \
.filter(NickValues.key == key) \
.one_or_none()
# NickValue exists, update
if result:
if adjustmentdirection == 'add':
for entry in entries:
if entry not in result.value:
result.value.append(entry)
elif adjustmentdirection == 'del':
for entry in entries:
while entry in result.value:
result.value.remove(entry)
session.commit()
# DNE - Insert
else:
values = []
if adjustmentdirection == 'add':
for entry in entries:
if entry not in values:
values.append(entry)
elif adjustmentdirection == 'del':
for entry in entries:
while entry in values:
values.remove(entry)
new_nickvalue = NickValues(nick_id=nick_id, key=key, value=values)
session.add(new_nickvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
# CHANNEL FUNCTIONS
def adjust_channel_value(self, channel, key, value):
"""Sets the value for a given key to be associated with the channel."""
channel = Identifier(channel).lower()
value = json.dumps(value, ensure_ascii=False)
session = self.ssession()
try:
result = session.query(ChannelValues) \
.filter(ChannelValues.channel == channel)\
.filter(ChannelValues.key == key) \
.one_or_none()
# ChannelValue exists, update
if result:
result.value = float(result.value) + float(value)
session.commit()
# DNE - Insert
else:
new_channelvalue = ChannelValues(channel=channel, key=key, value=float(value))
session.add(new_channelvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def adjust_channel_list(self, channel, key, entries, adjustmentdirection):
"""Sets the value for a given key to be associated with the channel."""
channel = Identifier(channel).lower()
if not isinstance(entries, list):
entries = [entries]
entries = json.dumps(entries, ensure_ascii=False)
session = self.ssession()
try:
result = session.query(ChannelValues) \
.filter(ChannelValues.channel == channel)\
.filter(ChannelValues.key == key) \
.one_or_none()
# ChannelValue exists, update
if result:
if adjustmentdirection == 'add':
for entry in entries:
if entry not in result.value:
result.value.append(entry)
elif adjustmentdirection == 'del':
for entry in entries:
while entry in result.value:
result.value.remove(entry)
session.commit()
# DNE - Insert
else:
values = []
if adjustmentdirection == 'add':
for entry in entries:
if entry not in values:
values.append(entry)
elif adjustmentdirection == 'del':
for entry in entries:
while entry in values:
values.remove(entry)
new_channelvalue = ChannelValues(channel=channel, key=key, value=values)
session.add(new_channelvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
# PLUGIN FUNCTIONS
def adjust_plugin_value(self, plugin, key, value):
"""Sets the value for a given key to be associated with the plugin."""
plugin = plugin.lower()
value = json.dumps(value, ensure_ascii=False)
session = self.ssession()
try:
result = session.query(PluginValues) \
.filter(PluginValues.plugin == plugin)\
.filter(PluginValues.key == key) \
.one_or_none()
# PluginValue exists, update
if result:
result.value = float(result.value) + float(value)
session.commit()
# DNE - Insert
else:
new_pluginvalue = PluginValues(plugin=plugin, key=key, value=float(value))
session.add(new_pluginvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def adjust_plugin_list(self, plugin, key, entries, adjustmentdirection):
"""Sets the value for a given key to be associated with the plugin."""
plugin = plugin.lower()
if not isinstance(entries, list):
entries = [entries]
entries = json.dumps(entries, ensure_ascii=False)
session = self.ssession()
try:
result = session.query(PluginValues) \
.filter(PluginValues.plugin == plugin)\
.filter(PluginValues.key == key) \
.one_or_none()
# PluginValue exists, update
if result:
if adjustmentdirection == 'add':
for entry in entries:
if entry not in result.value:
result.value.append(entry)
elif adjustmentdirection == 'del':
for entry in entries:
while entry in result.value:
result.value.remove(entry)
session.commit()
# DNE - Insert
else:
values = []
if adjustmentdirection == 'add':
for entry in entries:
if entry not in values:
values.append(entry)
elif adjustmentdirection == 'del':
for entry in entries:
while entry in values:
values.remove(entry)
new_pluginvalue = PluginValues(plugin=plugin, key=key, value=values)
session.add(new_pluginvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
class BotDatabase():
def __init__(self):
self.db = None
self.dict = {
"bot": {
"nick": None,
},
}
def initialize(self, config):
SopelDB.adjust_nick_value = SpiceDB.adjust_nick_value
SopelDB.adjust_nick_list = SpiceDB.adjust_nick_list
SopelDB.adjust_channel_value = SpiceDB.adjust_channel_value
SopelDB.adjust_channel_list = SpiceDB.adjust_channel_list
SopelDB.adjust_plugin_value = SpiceDB.adjust_plugin_value
SopelDB.adjust_plugin_list = SpiceDB.adjust_plugin_list
self.db = SopelDB(config)
BASE.metadata.create_all(self.db.engine)
self.dict["bot"]["nick"] = config.core.nick
def __getattr__(self, name):
''' will only get called for undefined attributes '''
if hasattr(self.db, name):
return eval("self.db." + name)
else:
return None
"""Nick"""
def adjust_nick_value(self, nick, key, value):
return self.db.adjust_nick_value(nick, key, value)
def adjust_nick_list(self, nick, key, entries, adjustmentdirection):
return self.db.adjust_nick_list(nick, key, entries, adjustmentdirection)
"""Bot"""
def get_bot_value(self, key):
return self.db.get_nick_value(self.dict["bot"]["nick"], key)
def set_bot_value(self, key, value):
return self.db.set_nick_value(self.dict["bot"]["nick"], key, value)
def delete_bot_value(self, key):
return self.db.delete_nick_value(self.dict["bot"]["nick"], key)
def adjust_bot_value(self, key, value):
return self.db.adjust_nick_value(self.dict["bot"]["nick"], key, value)
def adjust_bot_list(self, key, entries, adjustmentdirection):
return self.db.adjust_nick_list(self.dict["bot"]["nick"], key, entries, adjustmentdirection)
"""Channels"""
def adjust_channel_value(self, channel, key, value):
return self.db.adjust_channel_value(channel, key, value)
def adjust_channel_list(self, nick, key, entries, adjustmentdirection):
return self.db.adjust_channel_list(nick, key, entries, adjustmentdirection)
"""Plugins"""
def adjust_plugin_value(self, plugin, key, value):
return self.db.adjust_plugin_value(plugin, key, value)
def adjust_plugin_list(self, plugin, key, entries, adjustmentdirection):
return self.db.adjust_plugin_list(plugin, key, entries, adjustmentdirection)
botdb = BotDatabase()
"""
Other
"""
def lineno():
"""Returns the current line number in our program."""
linenum = inspect.currentframe().f_back.f_lineno
frameinfo = inspect.getframeinfo(inspect.currentframe())
filename = frameinfo.filename
return str("File: " + str(filename) + " Line: " + str(linenum))
| 37.457576
| 100
| 0.561848
|
a7552a1b61a6da47959fcc3e3379c242da53d407
| 377
|
py
|
Python
|
shophub/shop/migrations/0009_auto_20190330_1746.py
|
Saiful1721648/Product-Recommendation-System-Using-Machine-Learning-through-Big-Data-in-E-commerce-Website
|
cd2fb19ae1bc6a2d0382e3f5288243681ebd7e88
|
[
"Apache-2.0"
] | null | null | null |
shophub/shop/migrations/0009_auto_20190330_1746.py
|
Saiful1721648/Product-Recommendation-System-Using-Machine-Learning-through-Big-Data-in-E-commerce-Website
|
cd2fb19ae1bc6a2d0382e3f5288243681ebd7e88
|
[
"Apache-2.0"
] | null | null | null |
shophub/shop/migrations/0009_auto_20190330_1746.py
|
Saiful1721648/Product-Recommendation-System-Using-Machine-Learning-through-Big-Data-in-E-commerce-Website
|
cd2fb19ae1bc6a2d0382e3f5288243681ebd7e88
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.1.7 on 2019-03-30 12:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0008_orders_amount'),
]
operations = [
migrations.AlterField(
model_name='orders',
name='amount',
field=models.IntegerField(default=0),
),
]
| 19.842105
| 49
| 0.588859
|
57369e8f2fff6569f585a7188424092635537361
| 6,824
|
py
|
Python
|
postprocessing/python_micro_compare/compare_precip_profiles.py
|
larson-group/clubb_release
|
b4d671e3e238dbe00752c0dead6a0d4f9897350a
|
[
"Intel",
"Unlicense",
"NetCDF"
] | null | null | null |
postprocessing/python_micro_compare/compare_precip_profiles.py
|
larson-group/clubb_release
|
b4d671e3e238dbe00752c0dead6a0d4f9897350a
|
[
"Intel",
"Unlicense",
"NetCDF"
] | null | null | null |
postprocessing/python_micro_compare/compare_precip_profiles.py
|
larson-group/clubb_release
|
b4d671e3e238dbe00752c0dead6a0d4f9897350a
|
[
"Intel",
"Unlicense",
"NetCDF"
] | 1
|
2022-01-28T22:22:04.000Z
|
2022-01-28T22:22:04.000Z
|
# $Id$
"""
compare_precip_profiles
Description:
Compares the precipitation profiles for the Morrison Microphysics parameterization
"""
# Import libraries
import numpy as np
import matplotlib.pyplot as plt
import netCDF4
# Point to CLUBB's 'output' directory and location of SAM's stat file
out_dir = '/home/weberjk/precip_ta/output/'
sam_file = '/home/weberjk/precip_ta/input/input_fields/LBA.nc'
# Within CLUBB's 'output' directory, specify the names of each subdirectory
# containing different simulations. Each subdirectory's data will be overplotted
# and the name of the subdirectory used in the legend.
output_subdirectories = ['Control','NonLcl','const_lscale']
# For plotting, include what case is being plotted.
case = 'LBA'
# SAM and CLUBB vars. For each SAM variable, the CLUBB equivalent must be in the same
# array position.
sam_vars = ['QR','QG','QS','QI','NR','NG','NS','NI']
clubb_vars = ['rrm','rgm','rsm','rim','Nrm','Ngm','Nsm','Nim']
z0 = 0 # Start height [m]
z1 = 18000 # end height
t0 = 189 # Start time [min]
t1 = 360 # end time
#----------------------------------------------------------------------------------------
# Should not have to edit below this line
#----------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------
# Useful constants
#----------------------------------------------------------------------------------------
s_in_min = 60
#----------------------------------------------------------------------------------------
# Functions
#----------------------------------------------------------------------------------------
def pull_profiles(nc, varname, conversion):
"""
Input:
nc -- Netcdf file object
varname -- Variable name string
conversion -- Conversion factor
Output:
time x height array of the specified variable
"""
var = nc.variables[varname]
var = np.squeeze(var)
var = var*conversion
return var
def return_mean_profiles(var, idx_t0, idx_t1, idx_z0, idx_z1):
"""
Input:
var -- time x height array of some property
idx_t0 -- Index corrosponding to the beginning of the averaging interval
idx_t1 -- Index corrosponding to the end of the averaging interval
idx_z0 -- Index corrosponding to the lowest model level of the averaging interval
idx_z1 -- Index corrosponding to the highest model level of the averaging interval
Output:
var -- time averaged vertical profile of the specified variable
"""
var = np.mean(var[idx_t0:idx_t1,idx_z0:idx_z1],axis=0)
return var
#----------------------------------------------------------------------------------------
# Begin Code
#----------------------------------------------------------------------------------------
t0_in_s = t0*s_in_min # CLUBB's time is in seconds.
t1_in_s = t1*s_in_min
#----------------------------------------------------------------------------------------
# Retrieve SAM's altitude, time, and flux profiles
#----------------------------------------------------------------------------------------
nc = netCDF4.Dataset(sam_file)
sam_z = pull_profiles(nc, 'z', 1.)
sam_t = pull_profiles(nc, 'time', 1.)
sam_rho = pull_profiles(nc, 'RHO', 1.) # For conversions
num_per_cm3_to_num_per_kg = 100**3 * sam_rho**-1 # Convert SAM's #/cm^3 to #/kg
g_per_kg_to_kg_per_kg = 1000.**-1 # Convert SAM's g/kg to kg/kg
# Store the conversions corrosponding to SAM's indicies for each variable
convert = [g_per_kg_to_kg_per_kg,g_per_kg_to_kg_per_kg,g_per_kg_to_kg_per_kg,g_per_kg_to_kg_per_kg,
num_per_cm3_to_num_per_kg,num_per_cm3_to_num_per_kg,num_per_cm3_to_num_per_kg,num_per_cm3_to_num_per_kg]
idx_z0 = (np.abs(sam_z[:] - z0)).argmin()
idx_z1 = (np.abs(sam_z[:] - z1)).argmin()
sam_z = sam_z[idx_z0:idx_z1]
idx_t0 = (np.abs(sam_t[:] - t0)).argmin()
idx_t1 = (np.abs(sam_t[:] - t1)).argmin()
# Create a structure that will hold all the profiles for each SAM variable
sam_mean = np.empty( (len(sam_vars),len(sam_z) ) )
# Loop through all the variables
for j in np.arange(0,len(sam_vars)):
print "Pulling SAM variable: %s"%(sam_vars[j])
sam_mean[j,:] = ( return_mean_profiles(
pull_profiles(nc, sam_vars[j],convert[j]),
idx_t0, idx_t1, idx_z0, idx_z1) )
nc.close()
#----------------------------------------------------------------------------------------
# Retrieve CLUBB's altitude, time, and flux profiles
#----------------------------------------------------------------------------------------
clubb_file = '%s/lba_zt.nc'%(out_dir+test[0])
nc = netCDF4.Dataset(clubb_file)
clb_z = pull_profiles(nc, 'altitude', 1.)
clb_t = pull_profiles(nc, 'time', 1.)
idx_z0 = (np.abs(clb_z[:] - z0)).argmin()
idx_z1 = (np.abs(clb_z[:] - z1)).argmin()
clb_z = clb_z[idx_z0:idx_z1]
idx_t0 = (np.abs(clb_t[:] - t0_in_s)).argmin()
idx_t1 = (np.abs(clb_t[:] - t1_in_s)).argmin()
# Create an array to hold all the clubb profiles, this time, for each subdirectory
clubb_mean = np.empty( ( len(output_subdirectories),len(clubb_vars),len(clb_z) ) )
# Loop through each subdirectory
for i in np.arange(0,len(output_subdirectories)):
clubb_file = '%s/lba_zt.nc'%(out_dir+test[i])
nc = netCDF4.Dataset(clubb_file)
# Loop through each variable
for j in np.arange(0,len(clubb_vars)):
print "Pulling CLUBB variable: %s from case:%s"%(clubb_vars[j], test[i])
clubb_mean[i,j,:] = ( return_mean_profiles(
pull_profiles(nc, clubb_vars[j],1.),
idx_t0, idx_t1, idx_z0, idx_z1) )
nc.close()
#----------------------------------------------------------------------------------------
# Plot
#-----------------------------------------------------------------------------------------
f,((ax1,ax2,ax3,ax4),(ax5,ax6,ax7,ax8)) = plt.subplots(2,4,sharey=True)
# First, plot SAM data
ax1.plot(sam_mean[0],sam_z,lw=2,c='k')
ax1.grid()
ax1.set_ylim(0,max(sam_z))
ax2.plot(sam_mean[1],sam_z,lw=2,c='k')
ax2.grid()
ax3.plot(sam_mean[2],sam_z,lw=2,c='k')
ax3.grid()
ax4.plot(sam_mean[3],sam_z,lw=2,c='k')
ax4.grid()
ax5.plot(sam_mean[4],sam_z,lw=2,c='k')
ax5.grid()
ax6.plot(sam_mean[5],sam_z,lw=2,c='k')
ax6.grid()
ax7.plot(sam_mean[6],sam_z,lw=2,c='k')
ax7.grid()
ax8.plot(sam_mean[7],sam_z,lw=2,c='k')
ax8.grid()
for i in np.arange(0,len(test)):
ax1.plot(clubb_mean[i,0,:],clb_z,label=test[i])
ax2.plot(clubb_mean[i,1,:],clb_z)
ax3.plot(clubb_mean[i,2,:],clb_z)
ax4.plot(clubb_mean[i,3,:],clb_z)
ax5.plot(clubb_mean[i,4,:],clb_z)
ax6.plot(clubb_mean[i,5,:],clb_z)
ax7.plot(clubb_mean[i,6,:],clb_z)
ax8.plot(clubb_mean[i,7,:],clb_z)
ax1.legend()
plt.show()
| 35.915789
| 114
| 0.552608
|
fa9eaf9c7e50962f8487accd9fec8bd8ca1dc094
| 4,423
|
py
|
Python
|
mbdata/api/utils.py
|
LeoVerto/mbdata
|
fd6a391d3d1c0a3085904ae65a50129a34cd1a51
|
[
"MIT"
] | 1
|
2021-04-13T17:46:43.000Z
|
2021-04-13T17:46:43.000Z
|
mbdata/api/utils.py
|
LeoVerto/mbdata
|
fd6a391d3d1c0a3085904ae65a50129a34cd1a51
|
[
"MIT"
] | null | null | null |
mbdata/api/utils.py
|
LeoVerto/mbdata
|
fd6a391d3d1c0a3085904ae65a50129a34cd1a51
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2013 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import re
import logging
import xml.sax.saxutils
from six import StringIO, text_type
from flask import request, abort, current_app, json
from mbdata.api.errors import (
SUCCESS,
INVALID_FORMAT_ERROR,
MISSING_PARAMETER_ERROR,
ERROR_STATUS_CODES,
ERROR_DEFAULT_STATUS_CODE,
)
from mbdata.utils.models import ENTITY_TYPES
logger = logging.getLogger(__name__)
def to_uuid(s):
if re.match(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', s):
return s
raise ValueError('invalid UUID')
def to_enum(s):
return s.lower()
PARAM_TYPES = {
'uuid': to_uuid,
'enum': to_enum,
'int': int,
'text': text_type,
}
def get_param(name, type=None, default=None, required=False, container=None):
if type and type.endswith('+'):
assert default is None
type = type[:-1]
value = request.args.getlist(name, type=PARAM_TYPES.get(type, type))
if not value and required:
abort(response_error(MISSING_PARAMETER_ERROR, 'missing parameter {0}'.format(name)))
if container is not None:
value = container(value)
else:
value = request.args.get(name, type=PARAM_TYPES.get(type, type), default=default)
if value is None and required:
abort(response_error(MISSING_PARAMETER_ERROR, 'missing parameter {0}'.format(name)))
return value
def singular(plural):
"""
Take a plural English word and turn it into singular
Obviously, this doesn't work in general. It know just enough words to
generate XML tag names for list items. For example, if we have an element
called 'tracks' in the response, it will be serialized as a list without
named items in JSON, but we need names for items in XML, so those will be
called 'track'.
"""
if plural.endswith('ies'):
return plural[:-3] + 'y'
if plural.endswith('s'):
return plural[:-1]
raise ValueError('unknown plural form %r' % (plural,))
def dumpxml(output, name, value, parent_name=None):
if isinstance(value, dict):
output.write('<{0}>'.format(name))
for sub_name, sub_value in value.iteritems():
dumpxml(output, sub_name, sub_value, parent_name=name)
output.write('</{0}>'.format(name))
elif isinstance(value, list):
output.write('<{0}>'.format(name))
if parent_name == 'relationships' and name in ENTITY_TYPES:
sub_name = 'relationship'
else:
sub_name = singular(name)
for sub_value in value:
dumpxml(output, sub_name, sub_value)
output.write('</{0}>'.format(name))
else:
output.write('<{0}>'.format(name))
output.write(xml.sax.saxutils.escape(unicode(value)).encode('utf8'))
output.write('</{0}>'.format(name))
def render_xml(data):
output = StringIO()
output.write('<?xml version="1.0" encoding="UTF-8"?>')
for name, value in data.iteritems():
dumpxml(output, name, value)
output.flush()
return current_app.response_class(output.getvalue(),
content_type='application/xml; charset=UTF-8')
def render_json(data):
options = {}
if current_app.config['DEBUG']:
options['indent'] = 2
response = json.dumps(data, ensure_ascii=False, **options)
return current_app.response_class(response,
content_type='application/json; charset=UTF-8')
def render_response(code, message, data):
response = {
'response': {
'status': {
'code': code,
'message': message,
'version': '1.0',
},
}
}
response['response'].update(data)
format = get_param('format', type='enum', default='json')
if format == 'xml':
return render_xml(response)
elif format == 'json':
return render_json(response)
else:
abort(response_error(INVALID_FORMAT_ERROR, 'invalid format {0}'.format(format)))
def response_ok(**data):
return render_response(SUCCESS, 'success', data)
def response_error(code, message, **data):
logger.debug('response_error(%r, %r, %r)', code, message, data)
response = render_response(code, message, data)
response.status_code = ERROR_STATUS_CODES.get(code, ERROR_DEFAULT_STATUS_CODE)
return response
| 30.93007
| 96
| 0.641194
|
8159adec8698c8b27cc473b5a5439b8007c507c8
| 11,491
|
py
|
Python
|
services/api/app.py
|
Shane-Neeley/g2p-aggregator
|
e0611d9c20857fd30b9f5557400e9ea375546ebd
|
[
"MIT"
] | null | null | null |
services/api/app.py
|
Shane-Neeley/g2p-aggregator
|
e0611d9c20857fd30b9f5557400e9ea375546ebd
|
[
"MIT"
] | null | null | null |
services/api/app.py
|
Shane-Neeley/g2p-aggregator
|
e0611d9c20857fd30b9f5557400e9ea375546ebd
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# sys libs
import datetime
import logging
import yaml
import urllib2
import sys
import re
import argparse
import sys
import os
import socket
import json
# backend
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search, A
# webserver
import connexion
from flask_cors import CORS
from OpenSSL import SSL
# our utils
from log_setup import init_logging
# ***** globals
# set the WSGI application callable to allow using uWSGI:
# uwsgi --http :8080 -w app
application = None
# swagger doc
# beacon_api = None
g2p_api = None
# defaults
ARGS = None
# *****
init_logging()
log = logging.getLogger(__name__)
# controllers *****************************
# API_URL = 'https://app.swaggerhub.com/apiproxy/schema/file/ELIXIR-Finland/ga-4_gh_beacon_api_specification/0.4.0/swagger.yaml' # noqa
DESCRIPTION = """
The Variant Interpretation for Cancer Consortium (VICC)
The VICC is a Driver Project of the Global Alliance for Genomics Health (GA4GH).
The field of precision medicine aspires to a future in which a cancer patient's molecular information can be used to inform diagnosis, prognosis and treatment options most likely to benefit that individual patient. Many groups have created knowledgebases to annotate cancer genomic mutations associated with evidence of pathogenicity or relevant treatment options. However, clinicians and researchers are unable to fully utilize the accumulated knowledge derived from such efforts. Integration of the available knowledge is currently infeasible because each group (often redundantly) curates their own knowledgebase without adherence to any interoperability standards. Therefore, there is a clear need to standardize and coordinate clinical-genomics curation efforts, and create a public community resource able to query the aggregated information. To this end we have formed the Variant Interpretation for Cancer Consortium (VICC) to bring together the leading institutions that are independently developing comprehensive cancer variant interpretation databases.
""" # NOQA
VICC_BEACON = {
"id": "vicc",
"name": "VICC",
"url": None,
"organization": "VICC",
"description": DESCRIPTION,
"homePage": "http://cancervariants.org/",
"email": "vicc_paper@genomicsandhealth.org",
"aggregator": True,
"visible": True,
"enabled": True,
"supportedReferences": [
"GRCH37"
]
}
def _es():
""" get an elastic search connection """
return Elasticsearch(['{}'.format(ARGS.elastic)], verify_certs=False)
# utilities used by controllers
class Params():
""" turn parameter dict to a class"""
def __init__(self, args):
self.referenceName = args.get('referenceName', None)
self.start = args.get('start', None)
self.startMin = args.get('startMin', None)
self.startMax = args.get('startMax', None)
self.end = args.get('end', None)
self.endMin = args.get('endMin', None)
self.endMax = args.get('endMax', None)
self.referenceBases = args.get('referenceBases', None)
self.alternateBases = args.get('alternateBases', None)
self.assemblyId = args.get('assemblyId', None)
self.datasetIds = args.get('datasetIds', None)
self.includeDatasetResponses = args.get('includeDatasetResponses',
None)
def _location_lookup(params):
""" perform elastic search query """
client = _es()
# build parameters for query, and echo query to response
args = {}
if params.assemblyId:
args['features.referenceName'] = params.assemblyId
if params.referenceName:
args['features.chromosome'] = params.referenceName
if params.startMin and params.startMax:
args['features.start'] = '(>={} AND <={})'.format(params.startMin,
params.startMax)
elif params.startMin:
args['features.start'] = '(>={})'.format(params.startMin)
elif params.startMax:
args['features.start'] = '(<={})'.format(params.startMax)
elif params.start:
args['features.start'] = params.start
if params.endMin and params.endMax:
args['features.end'] = '(>={} AND <={})'.format(params.endMin,
params.endMax)
elif params.endMin:
args['features.end'] = '(>={})'.format(params.endMin)
elif params.endMax:
args['features.end'] = '(<={})'.format(params.endMax)
elif params.end:
args['features.end'] = params.end
if params.referenceBases:
args['features.ref'] = params.referenceBases
if params.alternateBases:
args['features.alt'] = params.alternateBases
q = Search(using=client)
query = ' '.join(['+{}:{}'.format(k, args[k]) for k in args.keys()])
q = q.query("query_string", query=query)
count = q.count()
return {
"beaconId": VICC_BEACON['id'],
"apiVersion": g2p_api.specification['info']['version'],
"exists": count > 0,
"datasetAlleleResponses": [
{
'externalUrl': '{}://{}/api/v1/associations/{}'.format(
g2p_api.specification['schemes'][0],
g2p_api.specification['host'],
hit.meta.id),
'note': hit.association.description,
}
for hit in q]
}
# These are imported by name by connexion so we create them here.
def getBeacon():
""" static beacon """
return VICC_BEACON
def getBeaconAlleleResponse(**kwargs):
""" lookup by allele (aka variant/feature) """
return _location_lookup(Params(kwargs))
def postBeaconAlleleResponse(queryBeaconAllele):
""" lookup by allele (aka variant/feature) """
log.debug(queryBeaconAllele)
return _location_lookup(Params(queryBeaconAllele))
def searchAssociations(**kwargs):
""" return matching associations"""
log.debug(kwargs)
client = _es()
q = kwargs.get('q', '*')
s = Search(using=client, index='associations')
s = s.query("query_string", query=q)
# grab total before we apply size
size = int(kwargs.get('size', '10'))
_from = int(kwargs.get('from', '1'))
# set sort order
sort = kwargs.get('sort', None)
if sort:
(field, order) = sort.split(':')
if order == 'desc':
field = '-{}'.format(field)
if '.keyword' not in field:
field = '{}.keyword'.format(field)
log.debug('set sort to {}'.format(field))
s = s.sort(field)
s = s[_from:(_from+size)]
response = s.execute()
total = response.hits.total
log.debug('total {}'.format(total))
hits = [hit.to_dict() for hit in response]
return {
'hits': {
'total': total,
'hits': hits
}
}
def associationTerms(**kwargs):
log.debug(kwargs)
client = _es()
q = kwargs.get('q', '*')
field = kwargs.get('f')
if not field.endswith('.keyword'):
field = '{}.keyword'.format(field)
# create a search, ...
s = Search(using=client, index='associations')
# with no data ..
s = s.params(size=0)
s = s.query("query_string", query=q)
# ... just aggregations
size = int(kwargs.get('size', '10'))
s.aggs.bucket('terms', 'terms', field=field, size=size)
aggs = s.execute().aggregations
# map it to an array of objects
return aggs.to_dict()
# return [{'phenotype_description': b.key,
# 'phenotype_ontology_id': b.phenotype_id.buckets[0].key,
# 'phenotype_evidence_count':b.phenotype_id.buckets[0].doc_count} for b in aggs.phenotype_descriptions.buckets]
def getAssociation(**kwargs):
""" return a single association"""
log.debug(kwargs)
client = _es()
association = client.get(index="associations",
doc_type='association', id=kwargs['id'])
return association['_source']
# setup server
def configure_app(args):
""" configure the app, import swagger """
global application
# global beacon_api
global g2p_api
def function_resolver(operation_id):
"""Map the operation_id to the function in this class."""
if '.' in operation_id:
_, function_name = operation_id.rsplit('.', 1)
else:
function_name = operation_id
function = getattr(sys.modules[__name__], function_name)
return function
app = connexion.App(
__name__,
swagger_ui=True,
swagger_json=True)
CORS(app.app)
swagger_host = None
if args.swagger_host:
swagger_host = args.swagger_host
else:
host = 'localhost' # socket.gethostname()
if args.port != 80:
host += ':{}'.format(args.port)
swagger_host = '{}'.format(host)
# with open('swagger-beacon.yaml', 'r') as stream:
# swagger_beacon = yaml.load(stream)
#
# with open('swagger-g2p.yaml', 'r') as stream:
# swagger_combined = yaml.load(stream)
#
# swagger_beacon['host'] = swagger_host
# swagger_combined['host'] = swagger_host
with open('swagger-combined.yaml', 'r') as stream:
swagger_combined = yaml.load(stream)
swagger_combined['host'] = swagger_host
log.info('advertise swagger host as {}'.format(swagger_host))
# # remove schemes that do not apply
# if args.key_file:
# # swagger_beacon['schemes'].remove('http')
# swagger_combined['schemes'].remove('http')
# else:
# # swagger_beacon['schemes'].remove('https')
# swagger_combined['schemes'].remove('https')
# beacon_api = app.add_api(swagger_beacon, base_path='/v1/beacon',
# resolver=function_resolver)
g2p_api = app.add_api(swagger_combined, base_path='/api/v1',
resolver=function_resolver)
log.info('g2p_api.version {}'.format(
g2p_api.specification['info']['version']
))
# set global
application = app.app
return (app, g2p_api)
def run(args):
""" configure and start the apps """
(app, g2p_api) = configure_app(args)
if args.key_file:
context = (args.certificate_file, args.key_file)
app.run(port=args.port, ssl_context=context, host='0.0.0.0')
else:
app.run(port=args.port, host='0.0.0.0')
def setup_args():
# run our standalone flask server
argparser = argparse.ArgumentParser(
description='GA4GH Beacon & G2P webserver')
argparser.add_argument('-P', '--port', default=8080, type=int)
argparser.add_argument('-K', '--key_file', default=None)
argparser.add_argument('-C', '--certificate_file', default=None)
es_host = os.getenv('ES', 'http://localhost')
argparser.add_argument('-ES', '--elastic', default=es_host)
argparser.add_argument('-H', '--swagger_host', default=None,
help='Swagger hostname, defaults to localhost')
return argparser.parse_args()
def test():
""" setup test config"""
global ARGS
class TestArgs:
def __init__(self):
self.port = 8080
self.key_file = None
self.elastic = os.getenv('ES', 'http://localhost')
self.swagger_host = None
ARGS = TestArgs()
log.info(ARGS)
configure_app(ARGS)
def main():
global ARGS
ARGS = setup_args()
log.info(ARGS)
run(ARGS)
if __name__ == '__main__':
main()
| 32.737892
| 1,063
| 0.629797
|
0181993e5da1e8cb7a2b01d451922940f531d0e7
| 25,198
|
py
|
Python
|
pydcop/algorithms/gdba.py
|
rpgoldman/pyDcop
|
65e458b180b5614015872214c29d25fa8c306867
|
[
"BSD-3-Clause"
] | null | null | null |
pydcop/algorithms/gdba.py
|
rpgoldman/pyDcop
|
65e458b180b5614015872214c29d25fa8c306867
|
[
"BSD-3-Clause"
] | null | null | null |
pydcop/algorithms/gdba.py
|
rpgoldman/pyDcop
|
65e458b180b5614015872214c29d25fa8c306867
|
[
"BSD-3-Clause"
] | null | null | null |
# BSD-3-Clause License
#
# Copyright 2017 Orange
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
r"""
GDBA Algorithm
--------------
See the following article for more details on adaptation mode:
Distributed Breakout Algorithm: Beyond Satisfaction' (S. Okamoto,
R. Zivan, A. Nahon, 2016)
Algorithm Parameters
^^^^^^^^^^^^^^^^^^^^
* modifier: str, "A" or "M"
How to increase costs dynamically to focus search: "A" (Additive) or "M" (Multiplicative).
Default is "A"
* violation: str, NZ", "NM" or "MX".
How to determine if a constraint is violated, Non-zero cost (NZ), Non-minimum (NM) or Maximum (MX).
Default is "NZ"
* increase_mode: str. "E", "R", "C", or "T"
Determine which costs have to be increased.
Possible values: ‘E’ (single-entry) or ‘C’ (column) or ‘R’ (row) or ‘T’ ( Transversal).
default is "E".
"""
import functools
import logging
import operator
import random
from collections import defaultdict
from typing import Iterable, Dict, Any, Tuple
from pydcop.algorithms import AlgoParameterDef, ComputationDef
from pydcop.infrastructure.computations import Message, VariableComputation, register
from pydcop.computations_graph.constraints_hypergraph import VariableComputationNode
from pydcop.dcop.objects import Variable
from pydcop.dcop.relations import (
RelationProtocol,
NAryMatrixRelation,
generate_assignment_as_dict,
filter_assignment_dict,
optimal_cost_value)
__author__ = "Pierre Nagellen, Pierre Rust"
GRAPH_TYPE = "constraints_hypergraph"
HEADER_SIZE = 100
UNIT_SIZE = 5
def build_computation(comp_def: ComputationDef):
return GdbaComputation(
comp_def.node.variable,
comp_def.node.constraints,
mode=comp_def.algo.mode,
**comp_def.algo.params,
comp_def=comp_def
)
def computation_memory(computation: VariableComputationNode) -> float:
"""Return the memory footprint of a DBA computation.
Notes
-----
With DBA, a computation must only remember the current value for each
of it's neighbors.
Parameters
----------
computation: VariableComputationNode
a computation in the hyper-graph computation graph
Returns
-------
float:
the memory footprint of the computation.
"""
neighbors = set(
(n for l in computation.neighbors for n in l.nodes if n not in computation.name)
)
return len(neighbors) * UNIT_SIZE
def communication_load(src: VariableComputationNode, target: str) -> float:
"""Return the communication load between two variables.
Notes
-----
The main messages in DBA are the 'ok?' and 'improve' messages, which at
most contains a value and a possible improvement. The size of the message
does not depends on the source nor target variable, nor on their
respective domains.
Parameters
----------
src: VariableComputationNode
The ComputationNode for the source variable.
target: str
the name of the other variable `src` is sending messages to
Returns
-------
float
The size of messages sent from the src variable to the target variable.
"""
return 2 * UNIT_SIZE + HEADER_SIZE
# ############################ MESSAGES ################################
class GdbaOkMessage(Message):
def __init__(self, value):
super().__init__("gdba_ok", None)
self._value = value
@property
def value(self):
return self._value
@property
def size(self):
return 1
def __str__(self):
return "GdbaOkMessage({})".format(self.value)
def __repr__(self):
return "GdbaOkMessage({})".format(self.value)
def __eq__(self, other):
if type(other) != GdbaOkMessage:
return False
if self.value == other.value:
return True
return False
class GdbaImproveMessage(Message):
def __init__(self, improve):
super().__init__("gdba_improve", None)
self._improve = improve
@property
def improve(self):
return self._improve
@property
def size(self):
return 1
def __str__(self):
return "GdbaImproveMessage({})".format(self.improve)
def __repr__(self):
return "GdbaImproveMessage({})".format(self.improve)
def __eq__(self, other):
if type(other) != GdbaImproveMessage:
return False
if self.improve == other.improve:
return True
return False
algo_params = [
AlgoParameterDef("modifier", "str", ["A", "M"], "A"),
AlgoParameterDef("violation", "str", ["NZ", "NM", "MX"], "NZ"),
AlgoParameterDef("increase_mode", "str", ["E", "R", "C", "T"], "E"),
]
# ########################### COMPUTATION ############################
class GdbaComputation(VariableComputation):
"""
GdbaComputation implements an extension of DBA to suit DCOPs. Several
adaptations are possible. There are 3 dimensions which have several
modes, for a total of 24 variants. They are listed below:
EffCost: how to concretely increase the costs of constraints:
increase of 1 or costs.
Possible values: 'A' (Additive) or 'M' (Multiplicative)
IsViolated: How to define that a constraint is violated.
Possible values: 'NZ' (Non-zero cost) or 'NM' (Non-minimum) or
'MX' (Maximum)
IncreaseMode: Determine which costs have to be increased. Possible
values: 'E' (single-entry) or 'C' (column) or 'R' (row) or 'T' (
Transversal)
See the following article for more details on those adaptation modes:
'Distributed Breakout Algorithm: Beyond Satisfaction' (S. Okamoto,
R. Zivan, A. Nahon, 2016)
"""
def __init__(
self,
variable: Variable,
constraints: Iterable[RelationProtocol],
mode="min",
modifier="A",
violation="NZ",
increase_mode="E",
msg_sender=None,
comp_def=None,
):
"""
:param variable: a variable object for which this computation is
responsible
:param constraints: the list of constraints involving this variable
:param modifier: The manner to modify costs. 'A' (resp. 'M') for
additive (resp. multiplicative) manner. Defaults to 'A'
:param violation: The criteria to determine a constraint violation.
Defaults to 'NZ'
:param increase_mode: The increase mode of a constraint cost
describes which modifiers should be increased.
Defaults to 'E'
"""
super().__init__(variable, comp_def)
self._msg_sender = msg_sender
# Handling messages arriving during wrong mode
self.__postponed_improve_messages__ = []
self.__postponed_ok_messages__ = []
self._waiting_mode = "starting"
self._mode = mode
self._modifier_mode = modifier
self._violation_mode = violation
self._increase_mode = increase_mode
base_modifier = 0 if self._modifier_mode == "A" else 1
self.__constraints__ = list()
self.__constraints_modifiers__ = dict()
# Transform the constraints in matrices, with also the min and max
# values recorded
for c in constraints:
if type(c) != NAryMatrixRelation:
rel_mat = NAryMatrixRelation.from_func_relation(c)
c_array = rel_mat._m.flat
maxi = c_array[0]
mini = c_array[0]
for i in c_array:
if i > maxi:
maxi = i
if i < mini:
mini = i
rel = (rel_mat, mini, maxi)
else:
c_array = c._m.flat
maxi = c_array[0]
mini = c_array[0]
for i in c_array:
if i > maxi:
maxi = i
if i < mini:
mini = i
rel = (c, mini, maxi)
self.__constraints__.append(rel)
# The modifiers for constraints. It is a Dictionary of dictionary
# (of dictionary ... regarding the arity of each constraint). It
# represents the value of the modifier for each constraint asgt.
self.__constraints_modifiers__[rel[0]] = defaultdict(lambda: base_modifier)
self._violated_constraints = []
# some constraints might be unary, and our variable can have several
# constraints involving the same variable
self._neighbors = set(
[v for c in constraints for v in c.dimensions if v != variable]
)
# Agent view of its neighbors resp. for ok and improve modes
self._neighbors_values = {}
self._neighbors_improvements = {}
self._my_improve = 0 # Possible improvement the agent can realize
self._new_value = None
@property
def constraints(self):
return self.__constraints__
# WARNING: This does not return the _neighbors attribute, but only the
# list of names of the neighbors
@property
def neighbors(self):
return self._neighbors
def on_start(self):
# Select an initial value.
if not self.neighbors:
# If a variable has no neighbors, we must select its final value immediately
# as it will never receive any message.
value, cost = optimal_cost_value(self._variable, self._mode)
self.value_selection(value, cost)
if self.logger.isEnabledFor(logging.INFO):
self.logger.info(
f"Select initial value {self.current_value} "
f"based on cost function for var {self._variable.name}"
)
self.finished()
else:
if self.variable.initial_value is None:
self.value_selection(random.choice(self.variable.domain), self.current_cost)
self.logger.info(
"%s gdba starts: randomly select value %s and " "send to neighbors",
self.variable.name,
self.current_value,
)
else:
self.value_selection(self.variable.initial_value, self.current_cost)
self.logger.info(
"%s gdba starts: select initial value %s and send to neighbors",
self.variable.name,
self.current_value,
)
self._send_current_value()
self._go_to_wait_ok_mode()
@register("gdba_ok")
def _on_ok_msg(self, variable_name, recv_msg, t):
self.logger.debug("%s received %s from %s", self.name, recv_msg, variable_name)
if self._waiting_mode == "ok":
self._handle_ok_message(variable_name, recv_msg)
else:
# A value message can be received during the improve mode (due to
# async.). In this case, its handling is postponed until the next
# iteration of wait_ok_mode
self.logger.debug(
"%s postponed processing of %s from %s",
self.name,
recv_msg,
variable_name,
)
self.__postponed_ok_messages__.append((variable_name, recv_msg))
def _handle_ok_message(self, variable_name, recv_msg):
self._neighbors_values[variable_name] = recv_msg.value
self.logger.debug(
"%s processes %s from %s", self.variable.name, recv_msg, variable_name
)
# if we have a value for all neighbors, compute our best value for
# conflict reduction
if len(self._neighbors_values) == len(self.neighbors):
self.logger.info(
"%s received values from all neighbors : %s",
self.name,
self._neighbors_values,
)
self.__cost__, self._violated_constraints = self.compute_eval_value(
self.current_value
)
# Set cost at the first step
# Compute and send best improvement to neighbors
bests, best_eval = self._compute_best_improvement()
self._my_improve = self.__cost__ - best_eval
if (self._my_improve > 0 and self._mode == "min") or (
self._my_improve < 0 and self._mode == "max"
):
self._new_value = random.choice(bests)
else:
self._new_value = self.current_value
self._send_improve()
self._go_to_wait_improve_mode()
else:
# Still waiting for other neighbors
self.logger.debug(
"%s waiting for OK values from other neighbors (got %s)",
self.name,
[n for n in self._neighbors_values],
)
def _send_improve(self):
msg = GdbaImproveMessage(self._my_improve)
for n in self.neighbors:
self.post_msg(n.name, msg)
self.logger.debug("%s has sent %s to %s", self.name, msg, n.name)
def _compute_best_improvement(self):
"""
Compute the best possible improvement for the current assignment.
:return: (list of values achieving best improvement, best improvement)
"""
best_vals = list()
best_eval = None
for v in self.variable.domain:
curr_eval = self.compute_eval_value(v)[0]
if best_eval is None:
best_eval = curr_eval
best_vals = [v]
elif (self._mode == "min" and curr_eval < best_eval) or (
self._mode == "max" and curr_eval > best_eval
):
best_eval = curr_eval
best_vals = [v]
elif curr_eval == best_eval:
best_vals.append(v)
return best_vals, best_eval
def _send_current_value(self):
self.new_cycle()
# #########TO DO#########
# This is where to put an eventual stop condition
for n in self.neighbors:
msg = GdbaOkMessage(self.current_value)
self.post_msg(n.name, msg)
self.logger.debug("%s has sent %s to %s", self.name, msg, n.name)
def compute_eval_value(self, val):
"""
This function computes the effective cost of the current assignment
for the agent's variable.
:param: a value for the variable of this object (it must be a value
from the definition domain).
:return: the evaluation value for the given value and the list
of indices of the violated constraints for this value
"""
new_eval_value = 0
violated_constraints = []
vars_with_cost = set()
for c in self.__constraints__:
(rel_mat, _, _) = c
for v in rel_mat.dimensions:
if hasattr(v, "cost_for_val"):
if v.name != self.name:
vars_with_cost.update([(v, self._neighbors_values[v.name])])
else:
vars_with_cost.update([(v, self.current_value)])
if self._is_violated(c, val):
violated_constraints.append(rel_mat)
new_eval_value += self._eff_cost(rel_mat, val)
vars_cost = functools.reduce(
operator.add,
[v.cost_for_val(v_val) for (v, v_val) in vars_with_cost],
0,
)
new_eval_value += vars_cost
return new_eval_value, violated_constraints
def _go_to_wait_improve_mode(self):
"""
Set _mode attribute to 'improve' and process postponed improve messages
(if any)
"""
self._waiting_mode = "improve"
self.logger.debug("%s enters improve mode", self.name)
# if improve messages were received during wiat_ok_mode, they should be
# processed now
for sender, msg in self.__postponed_improve_messages__:
self.logger.debug(
"%s processes postponed improve message %s", self.name, msg
)
self._handle_improve_message(sender, msg)
self.__postponed_improve_messages__.clear()
@register("gdba_improve")
def _on_improve_message(self, variable_name, recv_msg, t):
self.logger.debug("%s received %s from %s", self.name, recv_msg, variable_name)
if self._waiting_mode == "improve":
self._handle_improve_message(variable_name, recv_msg)
else:
self.logger.debug(
"%s postpones processing of %s from %s",
self.name,
recv_msg,
variable_name,
)
self.__postponed_improve_messages__.append((variable_name, recv_msg))
def _handle_improve_message(self, variable_name, recv_msg):
self._neighbors_improvements[variable_name] = recv_msg
self.logger.debug("%s computes %s from %s", self.name, recv_msg, variable_name)
# if messages received from all neighbors
if len(self._neighbors_improvements) == len(self.neighbors):
self.logger.info(
"%s improvement messages from all neighbors: %s",
self.name,
self._neighbors_values,
)
maxi = self._my_improve
max_list = [self.name]
for n, msg in self._neighbors_improvements.items():
if msg.improve > maxi:
maxi = msg.improve
max_list = [n]
elif msg.improve == maxi:
max_list.append(n)
if (self._my_improve > 0 and self._mode == "min") or (
(self._my_improve < 0) and (self._mode == "max")
):
winner = break_ties(max_list)
if winner == self.name: # covers all cases with self is in
# max_list
self.value_selection(
self._new_value, self.current_cost + self._my_improve
)
elif maxi == 0: # No neighbor can improve
for c in self._violated_constraints:
self._increase_cost(c)
# End of a cycle: clear agent view
self._neighbors_improvements.clear()
self._neighbors_values.clear()
self._violated_constraints.clear()
self._send_current_value()
self._go_to_wait_ok_mode()
else:
# Still waiting for other neighbors
self.logger.debug(
"%s waiting for improve values from other " "neighbors (got %s)",
self.name,
[n for n in self._neighbors_improvements],
)
def _go_to_wait_ok_mode(self):
self._waiting_mode = "ok"
self.logger.debug("%s enters values mode", self.name)
for sender, msg in self.__postponed_ok_messages__:
self.logger.debug("%s processes postponed value message %s", self.name, msg)
self._handle_ok_message(sender, msg)
self.__postponed_ok_messages__.clear()
def _is_violated(self, rel: Tuple[NAryMatrixRelation, float, float], val) -> bool:
"""
Determine if a constraint is violated according to the chosen violation
mode.
:param rel: A tuple (NAryMatrixRelation, min_val of the matrix,
max_val of the matrix)
:param val: the value of the agent variable to evaluate the violation
:return: True (resp. False) if the constraint is (rep. not) violated
"""
m, min_val, max_val = rel
# Keep only the assignment of variables present in the constraint
global_asgt = self._neighbors_values.copy()
global_asgt[self.name] = val
tmp_assignment = filter_assignment_dict(global_asgt, m.dimensions)
if self._violation_mode == "NZ":
return m.get_value_for_assignment(tmp_assignment) != 0
elif self._violation_mode == "NM":
return m.get_value_for_assignment(tmp_assignment) != min_val
else: # self._violation_mode == 'MX'
return m.get_value_for_assignment(tmp_assignment) == max_val
def _eff_cost(self, rel: NAryMatrixRelation, val) -> float:
"""
Compute the effective cost of a constraint with combining its base
cost with the associated modifier value (i.e. the weight of the
constraint at the current step)
:param rel: a constraint given as NAryMatrixRelation.
:param val: the value of the agent's variablefor which to compute the
_eff_cost
:return: the effective cost of the constraint for the current
assignment.
"""
# Keep only the variables present in the relation rel
global_asgt = self._neighbors_values.copy()
global_asgt[self.name] = val
asgt = filter_assignment_dict(global_asgt, rel.dimensions)
c = rel.get_value_for_assignment(asgt)
modifier = self._get_modifier_for_assignment(rel, asgt)
if self._modifier_mode == "A":
c += modifier
else: # modifier_mode == 'M'
c *= modifier
return c
def _get_modifier_for_assignment(
self, constraint: NAryMatrixRelation, asgt: Dict[str, Any]
):
"""
Search in the modifiers dictionary, the modifier corresponding to the
given constraint and assignment, and return its value
:param constraint: a constraint as NAryMatrixRelation
:param asgt: a complete assignment for the constraint as a dictionary
{variable: value}
:return: the value of the modifier of the constraint for the given
assignment
"""
modifier = self.__constraints_modifiers__[constraint]
key = frozenset(asgt.items())
return modifier[key]
def _increase_modifier(self, constraint: NAryMatrixRelation, asgt: Dict[str, Any]):
"""
Increase the modifier corresponding to the arguments
:param constraint: a constraint as NAryMatrixRelation
:param asgt: a complete assignment for the constraint
"""
modifier = self.__constraints_modifiers__[constraint]
key = frozenset(asgt.items())
modifier[key] += 1
def _increase_cost(self, constraint: NAryMatrixRelation):
"""
Increase the cost(s) of a constraint according to the given
increase_mode
:param constraint: a constraint as NAryMatrixRelation
:return:
"""
asgt = self._neighbors_values.copy()
asgt[self.name] = self.current_value
self.logger.debug("%s increases cost for %s", self.name, constraint)
if self._increase_mode == "E":
self._increase_modifier(constraint, asgt)
elif self._increase_mode == "R":
for val in self.variable.domain:
asgt[self.name] = val
self._increase_modifier(constraint, asgt)
elif self._increase_mode == "C":
# Creates all the assignments for the constraints, with the
# agent variable set to its current value
asgts = generate_assignment_as_dict(list(self._neighbors))
for ass in asgts:
ass[self.name] = self.current_value
self._increase_modifier(constraint, ass)
elif self._increase_mode == "T":
# Creates all the assignments for the constraints
asgts = generate_assignment_as_dict(constraint.dimensions)
for ass in asgts:
self._increase_modifier(constraint, ass)
def break_ties(val_list):
return sorted(val_list)[0]
| 36.947214
| 101
| 0.611477
|
1563e2bd33313f8715016714ed09e63e900f588c
| 7,191
|
py
|
Python
|
backend/orange_paper_28802/settings.py
|
crowdbotics-apps/orange-paper-28802
|
b0fe224473d271577b110142ab42d25c50465084
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/orange_paper_28802/settings.py
|
crowdbotics-apps/orange-paper-28802
|
b0fe224473d271577b110142ab42d25c50465084
|
[
"FTL",
"AML",
"RSA-MD"
] | 20
|
2021-07-13T17:43:59.000Z
|
2021-07-13T17:44:02.000Z
|
backend/orange_paper_28802/settings.py
|
crowdbotics-apps/orange-paper-28802
|
b0fe224473d271577b110142ab42d25c50465084
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
"""
Django settings for orange_paper_28802 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
from modules.manifest import get_modules
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
MODULES_APPS = get_modules()
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS + MODULES_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'orange_paper_28802.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'web_build')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'orange_paper_28802.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static'), os.path.join(BASE_DIR, 'web_build/static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| 28.995968
| 112
| 0.730914
|
94c5247483e06b6be6f337ec406b8c597b933f04
| 3,863
|
py
|
Python
|
binary_release.py
|
jgough/opensearch-curator
|
e8d7eb4d969eac551db9f99bd021d0c05e28dc35
|
[
"Apache-2.0"
] | null | null | null |
binary_release.py
|
jgough/opensearch-curator
|
e8d7eb4d969eac551db9f99bd021d0c05e28dc35
|
[
"Apache-2.0"
] | null | null | null |
binary_release.py
|
jgough/opensearch-curator
|
e8d7eb4d969eac551db9f99bd021d0c05e28dc35
|
[
"Apache-2.0"
] | null | null | null |
import os
import re
import sys
import shutil
import hashlib
# This script simply takes the output of `python setup.py build_exe` and makes
# a compressed archive (zip for windows, tar.gz for Linux) for distribution.
# Utility function to read from file.
def fread(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def get_version():
VERSIONFILE="curator/_version.py"
verstrline = fread(VERSIONFILE).strip()
vsre = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(vsre, verstrline, re.M)
if mo:
VERSION = mo.group(1)
else:
raise RuntimeError("Unable to find version string in %s." % (VERSIONFILE,))
build_number = os.environ.get('CURATOR_BUILD_NUMBER', None)
if build_number:
return VERSION + "b{}".format(build_number)
return VERSION
archive_format = 'gztar'
enviro = dict(os.environ)
platform = sys.platform
pyver = str(sys.version_info[0]) + '.' + str(sys.version_info[1])
if platform == 'win32':
# Win32 stuff
archive_format = 'zip'
build_name = 'exe.win-' + enviro['PROCESSOR_ARCHITECTURE'].lower() + '-' + pyver
target_name = "curator-" + str(get_version()) + "-amd64"
elif platform == 'linux' or platform == 'linux2':
sys_string = enviro['_system_type'].lower() + '-' + enviro['_system_arch'].lower()
build_name = 'exe.' + sys_string + '-' + pyver
target_name = "curator-" + str(get_version()) + "-" + sys_string
else:
# Unsupported platform?
print('Your platform ({0}) is not yet supported for binary build/distribution.'.format(platform))
sys.exit(1)
#sys_string = sys_type + '-' + sys_arch
#build_name = 'exe.' + sys_string + '-' + pyver
#print('Expected build directory: {0}'.format(build_name))
build_path = os.path.join('build', build_name)
if os.path.exists(build_path):
#print("I found the path: {0}".format(build_path))
target_path = os.path.join('.', target_name)
# Check to see if an older directory exists...
if os.path.exists(target_path):
print('An older build exists at {0}. Please delete this before continuing.'.format(target_path))
sys.exit(1)
else:
shutil.copytree(build_path, target_path)
# Ensure the rename went smoothly, then continue
if os.path.exists(target_path):
#print("Build successfully renamed")
if float(pyver) >= 2.7:
shutil.make_archive('opensearch-' + target_name, archive_format, '.', target_path)
if platform == 'win32':
fname = 'opensearch-' + target_name + '.zip'
else:
fname = 'opensearch-' + target_name + '.tar.gz'
# Clean up directory if we made a viable archive.
if os.path.exists(fname):
shutil.rmtree(target_path)
else:
print('Something went wrong creating the archive {0}'.format(fname))
sys.exit(1)
md5sum = hashlib.md5(open(fname, 'rb').read()).hexdigest()
sha1sum = hashlib.sha1(open(fname, 'rb').read()).hexdigest()
with open(fname + ".md5.txt", "w") as md5_file:
md5_file.write("{0}".format(md5sum))
with open(fname + ".sha1.txt", "w") as sha1_file:
sha1_file.write("{0}".format(sha1sum))
print('Archive: {0}'.format(fname))
print('{0} = {1}'.format(fname + ".md5.txt", md5sum))
print('{0} = {1}'.format(fname + ".sha1.txt", sha1sum))
else:
print('Your python version ({0}) is too old to use with shutil.make_archive.'.format(pyver))
print('You can manually compress the {0} directory to achieve the same result.'.format(target_name))
else:
# We couldn't find a build_path
print("Build not found. Please run 'python setup.py build_exe' to create the build directory.")
sys.exit(1)
| 41.095745
| 112
| 0.623609
|
d43a34a6fb3e4a70412c4090e05b1f4d4a5cef2e
| 2,482
|
py
|
Python
|
example/TCN/network.py
|
ddddwee1/TorchSUL
|
775832049564d8ee7c43e510b57bd716e0a746dd
|
[
"WTFPL"
] | 7
|
2019-12-14T12:23:36.000Z
|
2021-11-16T00:25:13.000Z
|
example/TCN/network.py
|
ddddwee1/TorchSUL
|
775832049564d8ee7c43e510b57bd716e0a746dd
|
[
"WTFPL"
] | 1
|
2020-10-20T06:33:53.000Z
|
2020-10-26T19:01:21.000Z
|
example/TCN/network.py
|
ddddwee1/TorchSUL
|
775832049564d8ee7c43e510b57bd716e0a746dd
|
[
"WTFPL"
] | 1
|
2021-08-24T09:09:36.000Z
|
2021-08-24T09:09:36.000Z
|
from TorchSUL import Model as M
import random
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
class ResBlock1D(M.Model):
def initialize(self, outchn=1024, k=3):
self.k = k
# self.bn = M.BatchNorm()
self.c1 = M.ConvLayer1D(k, outchn, stride=k, activation=M.PARAM_PRELU, batch_norm=True, usebias=False, pad='VALID')
self.c2 = M.ConvLayer1D(1, outchn, activation=M.PARAM_PRELU, batch_norm=True, usebias=False, pad='VALID')
def forward(self, x):
short = x
# residual branch
# branch = self.bn(x)
# branch = M.activation(branch, M.PARAM_LRELU)
branch = self.c1(x)
branch = F.dropout(branch, 0.5, self.training, False)
branch = self.c2(branch)
branch = F.dropout(branch, 0.5, self.training, False)
# slicing & shortcut
# branch_shape = branch.shape[-1]
# short_shape = short.shape[-1]
# start = (short_shape - branch_shape) // 2
short = short[:, :, self.k//2::self.k]
res = short + branch
# res = F.dropout(res, 0.25, self.training, False)
return res
class Refine2dNet(M.Model):
def initialize(self, num_kpts, temp_length):
self.num_kpts = num_kpts
self.temp_length = temp_length
self.c1 = M.ConvLayer1D(3, 1024, stride=3, activation=M.PARAM_PRELU, pad='VALID', batch_norm=True, usebias=False)
self.r1 = ResBlock1D(k=3)
self.r2 = ResBlock1D(k=3)
self.r3 = ResBlock1D(k=3)
self.r4 = ResBlock1D(k=3)
# self.r3 = ResBlock1D(k=3, dilation=3)
# self.c5 = M.ConvLayer1D(9, 256, activation=M.PARAM_PRELU, pad='VALID', batch_norm=True, usebias=False)
self.c4 = M.ConvLayer1D(1, num_kpts*3)
def forward(self, x, drop=True):
x = x.view(x.shape[0], x.shape[1], self.num_kpts * 2)
x = x.permute(0,2,1)
x = self.c1(x)
x = self.r1(x)
x = self.r2(x)
x = self.r3(x)
x = self.r4(x)
# x = self.r5(x)
# x = self.c5(x)
x = self.c4(x)
x = x.permute(0, 2, 1)
x = x.reshape(x.shape[0], x.shape[1], self.num_kpts, 3)
return x
def evaluate(self, x):
aa = []
for i in range(x.shape[1]-self.temp_length+1):
aa.append(x[:, i:i+self.temp_length])
aa = torch.cat(aa, dim=0)
y = self(aa)
y = y.permute(1,0,2,3)
return y
class Discriminator2D(M.Model):
def initialize(self):
self.c1 = M.ConvLayer1D(1, 1024, activation=M.PARAM_PRELU)
self.c2 = M.ConvLayer1D(1, 256, activation=M.PARAM_PRELU)
self.c3 = M.ConvLayer1D(1, 256, activation=M.PARAM_PRELU)
self.c4 = M.ConvLayer1D(1, 1)
def forward(self, x):
return self.c4(self.c3(self.c2(self.c1(x))))
| 29.903614
| 117
| 0.665995
|
f42c0d7def942d07adde359b2744b90cb277ffb6
| 9,402
|
py
|
Python
|
paper/architectures/efficientnet.py
|
KRLGroup/memory-wrap
|
c1df5d7f53800939ec40f5d3a432248a28846460
|
[
"MIT"
] | 2
|
2021-06-11T09:47:16.000Z
|
2021-09-13T02:37:08.000Z
|
paper/architectures/efficientnet.py
|
KRLGroup/memory-wrap
|
c1df5d7f53800939ec40f5d3a432248a28846460
|
[
"MIT"
] | null | null | null |
paper/architectures/efficientnet.py
|
KRLGroup/memory-wrap
|
c1df5d7f53800939ec40f5d3a432248a28846460
|
[
"MIT"
] | 1
|
2021-09-13T02:37:15.000Z
|
2021-09-13T02:37:15.000Z
|
'''EfficientNet in PyTorch.
References:
https://github.com/kuangliu/pytorch-cifar/blob/master/models/efficientnet.py
https://github.com/keras-team/keras-applications/blob/master/keras_applications/efficientnet.py
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from memorywrap import MemoryWrapLayer as EncoderMemoryWrapLayer
from memorywrap import BaselineMemory as MemoryWrapLayer
def swish(x):
return x * x.sigmoid()
def drop_connect(x, drop_ratio):
keep_ratio = 1.0 - drop_ratio
mask = torch.empty([x.shape[0], 1, 1, 1], dtype=x.dtype, device=x.device)
mask.bernoulli_(keep_ratio)
x.div_(keep_ratio)
x.mul_(mask)
return x
class SE(nn.Module):
'''Squeeze-and-Excitation block with Swish.'''
def __init__(self, in_channels, se_channels):
super(SE, self).__init__()
self.se1 = nn.Conv2d(in_channels, se_channels,
kernel_size=1, bias=True)
self.se2 = nn.Conv2d(se_channels, in_channels,
kernel_size=1, bias=True)
def forward(self, x):
out = F.adaptive_avg_pool2d(x, (1, 1))
out = swish(self.se1(out))
out = self.se2(out).sigmoid()
out = x * out
return out
class Block(nn.Module):
'''expansion + depthwise + pointwise + squeeze-excitation'''
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride,
expand_ratio=1,
se_ratio=0.,
drop_rate=0.):
super(Block, self).__init__()
self.stride = stride
self.drop_rate = drop_rate
self.expand_ratio = expand_ratio
# Expansion
channels = expand_ratio * in_channels
self.conv1 = nn.Conv2d(in_channels,
channels,
kernel_size=1,
stride=1,
padding=0,
bias=False)
self.bn1 = nn.BatchNorm2d(channels)
# Depthwise conv
self.conv2 = nn.Conv2d(channels,
channels,
kernel_size=kernel_size,
stride=stride,
padding=(1 if kernel_size == 3 else 2),
groups=channels,
bias=False)
self.bn2 = nn.BatchNorm2d(channels)
# SE layers
se_channels = int(in_channels * se_ratio)
self.se = SE(channels, se_channels)
# Output
self.conv3 = nn.Conv2d(channels,
out_channels,
kernel_size=1,
stride=1,
padding=0,
bias=False)
self.bn3 = nn.BatchNorm2d(out_channels)
# Skip connection if in and out shapes are the same (MV-V2 style)
self.has_skip = (stride == 1) and (in_channels == out_channels)
def forward(self, x):
out = x if self.expand_ratio == 1 else swish(self.bn1(self.conv1(x)))
out = swish(self.bn2(self.conv2(out)))
out = self.se(out)
out = self.bn3(self.conv3(out))
if self.has_skip:
if self.training and self.drop_rate > 0:
out = drop_connect(out, self.drop_rate)
out = out + x
return out
class EfficientNet(nn.Module):
def __init__(self, cfg, num_classes=10):
super(EfficientNet, self).__init__()
self.cfg = cfg
self.conv1 = nn.Conv2d(3,
32,
kernel_size=3,
stride=1,
padding=1,
bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.layers = self._make_layers(in_channels=32)
self.linear = nn.Linear(cfg['out_channels'][-1], num_classes)
def _make_layers(self, in_channels):
layers = []
cfg = [self.cfg[k] for k in ['expansion', 'out_channels', 'num_blocks', 'kernel_size',
'stride']]
b = 0
blocks = sum(self.cfg['num_blocks'])
for expansion, out_channels, num_blocks, kernel_size, stride in zip(*cfg):
strides = [stride] + [1] * (num_blocks - 1)
for stride in strides:
drop_rate = self.cfg['drop_connect_rate'] * b / blocks
layers.append(
Block(in_channels,
out_channels,
kernel_size,
stride,
expansion,
se_ratio=0.25,
drop_rate=drop_rate))
in_channels = out_channels
return nn.Sequential(*layers)
def forward(self, x):
out = swish(self.bn1(self.conv1(x)))
out = self.layers(out)
out = F.adaptive_avg_pool2d(out, 1)
out = out.view(out.size(0), -1)
dropout_rate = self.cfg['dropout_rate']
if self.training and dropout_rate > 0:
out = F.dropout(out, p=dropout_rate)
out = self.linear(out)
return out
class MemoryEfficientNet(nn.Module):
def __init__(self, cfg, num_classes=10):
super(MemoryEfficientNet, self).__init__()
self.cfg = cfg
self.conv1 = nn.Conv2d(3,
32,
kernel_size=3,
stride=1,
padding=1,
bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.layers = self._make_layers(in_channels=32)
# replaced last layer
#self.linear = nn.Linear(cfg['out_channels'][-1], num_classes)
self.mw = MemoryWrapLayer(cfg['out_channels'][-1], num_classes)
def _make_layers(self, in_channels):
layers = []
cfg = [self.cfg[k] for k in ['expansion', 'out_channels', 'num_blocks', 'kernel_size',
'stride']]
b = 0
blocks = sum(self.cfg['num_blocks'])
for expansion, out_channels, num_blocks, kernel_size, stride in zip(*cfg):
strides = [stride] + [1] * (num_blocks - 1)
for stride in strides:
drop_rate = self.cfg['drop_connect_rate'] * b / blocks
layers.append(
Block(in_channels,
out_channels,
kernel_size,
stride,
expansion,
se_ratio=0.25,
drop_rate=drop_rate))
in_channels = out_channels
return nn.Sequential(*layers)
def forward_encoder(self, x):
out = swish(self.bn1(self.conv1(x)))
out = self.layers(out)
out = F.adaptive_avg_pool2d(out, 1)
out = out.view(out.size(0), -1)
dropout_rate = self.cfg['dropout_rate']
if self.training and dropout_rate > 0:
out = F.dropout(out, p=dropout_rate)
return out
def forward(self, x, ss, return_weights=False):
# input
out = self.forward_encoder(x)
out_ss = self.forward_encoder(ss)
# prediction
out_mw = self.mw(out, out_ss, return_weights)
return out_mw
class EncoderMemoryEfficientNet(MemoryEfficientNet):
def __init__(self, cfg, num_classes=10):
super(MemoryEfficientNet, self).__init__()
self.cfg = cfg
self.conv1 = nn.Conv2d(3,
32,
kernel_size=3,
stride=1,
padding=1,
bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.layers = self._make_layers(in_channels=32)
# replaced last layer
#self.linear = nn.Linear(cfg['out_channels'][-1], num_classes)
self.mw = EncoderMemoryWrapLayer(cfg['out_channels'][-1], num_classes)
def MemoryEfficientNetB0(num_classes=10):
cfg = {
'num_blocks': [1, 2, 2, 3, 3, 4, 1],
'expansion': [1, 6, 6, 6, 6, 6, 6],
'out_channels': [16, 24, 40, 80, 112, 192, 320],
'kernel_size': [3, 3, 5, 3, 5, 5, 3],
'stride': [1, 2, 2, 2, 1, 2, 1],
'dropout_rate': 0.2,
'drop_connect_rate': 0.2,
}
return MemoryEfficientNet(cfg, num_classes)
def EncoderMemoryEfficientNetB0(num_classes=10):
cfg = {
'num_blocks': [1, 2, 2, 3, 3, 4, 1],
'expansion': [1, 6, 6, 6, 6, 6, 6],
'out_channels': [16, 24, 40, 80, 112, 192, 320],
'kernel_size': [3, 3, 5, 3, 5, 5, 3],
'stride': [1, 2, 2, 2, 1, 2, 1],
'dropout_rate': 0.2,
'drop_connect_rate': 0.2,
}
return EncoderMemoryEfficientNet(cfg, num_classes)
def EfficientNetB0(num_classes=10):
cfg = {
'num_blocks': [1, 2, 2, 3, 3, 4, 1],
'expansion': [1, 6, 6, 6, 6, 6, 6],
'out_channels': [16, 24, 40, 80, 112, 192, 320],
'kernel_size': [3, 3, 5, 3, 5, 5, 3],
'stride': [1, 2, 2, 2, 1, 2, 1],
'dropout_rate': 0.2,
'drop_connect_rate': 0.2,
}
return EfficientNet(cfg,num_classes)
| 34.566176
| 95
| 0.5117
|
a867a718dba852787cc190184ce406c6ff0df45c
| 1,317
|
py
|
Python
|
prx/PathProxy.py
|
Iuty/vue.fastcnn2.winservice
|
de4c8dcd19549ca9992346101c78b87077c86813
|
[
"Apache-2.0"
] | null | null | null |
prx/PathProxy.py
|
Iuty/vue.fastcnn2.winservice
|
de4c8dcd19549ca9992346101c78b87077c86813
|
[
"Apache-2.0"
] | null | null | null |
prx/PathProxy.py
|
Iuty/vue.fastcnn2.winservice
|
de4c8dcd19549ca9992346101c78b87077c86813
|
[
"Apache-2.0"
] | null | null | null |
import os
class PathProxy:
"""
path define here
"""
app_path = r"d:\\FastCNN\\"
project_path = app_path + r"Projects\\"
def getConfigPath():
return PathProxy.app_path + r"Config\Config.conf"
def getProjectDir(projectname):
return os.path.join(PathProxy.project_path , projectname )+"\\"
def getSettingPath(projectname):
return PathProxy.getProjectDir(projectname) + 'Setting.conf'
def getProjectTrainDir(projectname):
return PathProxy.getProjectDir(projectname) + "train" + "\\"
def getProjectTestDir(projectname):
return PathProxy.getProjectDir(projectname) + "test" + "\\"
def getClassDir(projectname,classname):
return PathProxy.getProjectDir(projectname) + classname + "\\"
def getModelDir(projectname):
return os.path.join(PathProxy.getProjectDir(projectname) , "model\\")
def getModelTagDir(projectname,tag):
return os.path.join(PathProxy.getModelDir(projectname) , tag)
def getModelParamPath(projectname,tag):
return os.path.join(PathProxy.getModelTagDir(projectname,tag) , 'Param.conf')
"""
method here
"""
def mkdir(dir):
if os.path.exists(dir):
return
os.makedirs(dir)
pass
| 30.627907
| 85
| 0.637813
|
6f7f4a0a76fbb9aa125e4010056cfde45b697e17
| 59,836
|
py
|
Python
|
tensorflow/python/eager/backprop_test.py
|
sjmielke/tensorflow
|
8648e4f6cf131170ef778f7866e5a9f78e41f893
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/eager/backprop_test.py
|
sjmielke/tensorflow
|
8648e4f6cf131170ef778f7866e5a9f78e41f893
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/eager/backprop_test.py
|
sjmielke/tensorflow
|
8648e4f6cf131170ef778f7866e5a9f78e41f893
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from absl.testing import parameterized
import numpy as np
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.layers.pooling import max_pooling3d
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import custom_gradient
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_grad # pylint: disable=unused-import
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops.signal import fft_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import training
from tensorflow.python.util import nest
def _chain_grads(primals, grad_fns):
if len(grad_fns) == 1:
return grad_fns[-1]
@custom_gradient.custom_gradient(primals=primals)
def grad(*args, **kwargs):
return (grad_fns[0](*args, **kwargs),
_chain_grads(primals, grad_fns[1:]))
return grad
class BackpropTest(test.TestCase, parameterized.TestCase):
@test_util.run_in_graph_and_eager_modes
def testAggregateGradients(self):
def fn(x):
ind1 = constant_op.constant(np.array([0, 1]))
ind2 = constant_op.constant(np.array([2, 3]))
ind3 = constant_op.constant(np.array([1, 3]))
g1 = embedding_ops.embedding_lookup(x, ind1)
g2 = embedding_ops.embedding_lookup(x, ind2)
g3 = embedding_ops.embedding_lookup(x, ind3)
return g1 * g2 * g3
var_np = np.random.rand(4, 2).astype(np.float32)
var = constant_op.constant(var_np)
grad = backprop.gradients_function(fn, [0])(var)[0]
grad = self.evaluate(ops.convert_to_tensor(grad))
if not context.executing_eagerly():
tf_var = array_ops.constant(var_np, dtypes.float32)
tf_ind1 = array_ops.constant([0, 1])
tf_ind2 = array_ops.constant([2, 3])
tf_ind3 = array_ops.constant([1, 3])
tf_g1 = embedding_ops.embedding_lookup(tf_var, tf_ind1)
tf_g2 = embedding_ops.embedding_lookup(tf_var, tf_ind2)
tf_g3 = embedding_ops.embedding_lookup(tf_var, tf_ind3)
tf_y = tf_g1 * tf_g2 * tf_g3
tf_grad = gradients.gradients(tf_y, [tf_var])[0]
tf_dense_grad = math_ops.unsorted_segment_sum(
tf_grad.values, tf_grad.indices, tf_grad.dense_shape[0])
self.assertAllClose(grad, self.evaluate(tf_dense_grad))
@test_util.run_in_graph_and_eager_modes
def testAggregateGradientsWithTensor(self):
def fn(x):
ind1 = constant_op.constant(np.array([0, 1]))
# A mixture of IndexedSlices and dense tensor to aggregate.
g1 = embedding_ops.embedding_lookup(x, ind1)
g2 = math_ops.reduce_sum(x * constant_op.constant(2.0))
return g1 * g2
var_np = np.random.rand(4, 2).astype(np.float32)
var = constant_op.constant(var_np)
grad = backprop.gradients_function(fn, [0])(var)[0]
grad = self.evaluate(ops.convert_to_tensor(grad))
if not context.executing_eagerly():
tf_var = array_ops.constant(var_np, dtypes.float32)
tf_ind1 = array_ops.constant([0, 1])
tf_g1 = embedding_ops.embedding_lookup(tf_var, tf_ind1)
tf_g2 = math_ops.reduce_sum(tf_var * 2.0, axis=(0, 1))
tf_y = tf_g1 * tf_g2
tf_grad = gradients.gradients(tf_y, [tf_var])[0]
self.assertAllClose(grad, tf_grad)
def testImplicitGradWithResourceVariable(self):
x = resource_variable_ops.ResourceVariable(
initial_value=constant_op.constant(1.0), name='x')
def fn():
b = constant_op.constant(2.0)
c = math_ops.add(x.value(), b)
return math_ops.add(c, constant_op.constant(3.0))
grads_and_vars = backprop.implicit_grad(fn)()
self.assertAllEqual(grads_and_vars[0][0], 1.0)
self.assertAllEqual(id(grads_and_vars[0][1]), id(x))
@parameterized.named_parameters(
[('Function', def_function.function),
('NoFunction', lambda f: f)])
def testIdentityBehaviorConsistent(self, decorator):
@decorator
def f(x):
x1 = array_ops.identity(x)
with backprop.GradientTape() as t:
t.watch(x)
t.watch(x1)
y1 = x * 2.
y2 = x1 * 3.
loss = y1 + y2
return t.gradient(loss, [x, x1])
self.assertAllClose([2., 3.], f(constant_op.constant(10.)))
def testGradientInsideLoop(self):
with ops.Graph().as_default():
v = resource_variable_ops.ResourceVariable(1.0)
def body(_):
_ = v + 1.0 # This reads the variable inside the loop context
with backprop.GradientTape() as t:
result = v * 2
self.assertIsNotNone(t.gradient(result, v))
return 1.0
control_flow_ops.while_loop(lambda i: False, body, [1.0])
def testWhereGradient(self):
# Note: where is special because only some of its arguments are of
# differentiable dtypes.
def f(x):
return array_ops.where(x < 10, x, x * x)
g = backprop.gradients_function(f)
self.assertAllEqual(g(5.)[0], 1.0)
self.assertAllEqual(g(50.)[0], 100.0)
def testTwoTargets(self):
with backprop.GradientTape() as t:
x = constant_op.constant(3.0)
y = constant_op.constant(2.0)
t.watch([x, y])
xx = 2 * x
yy = 3 * y
dx, dy = t.gradient([xx, yy], [x, y])
self.assertAllEqual(dx, 2.0)
self.assertAllEqual(dy, 3.0)
def testCustomGradientEmptyError(self):
@custom_gradient.custom_gradient
def identity(x):
def grad(_):
return [] # This return value is wrong!
return x, grad
x = variables.Variable(1.0)
with backprop.GradientTape() as t:
y = identity(x)
with self.assertRaises(ValueError):
t.gradient(y, [x])
def testOutputGradUsedInComputation(self):
with backprop.GradientTape() as t:
x = constant_op.constant(3.0)
y = constant_op.constant(2.0)
t.watch([x, y])
loss = x * y
dx, = t.gradient([loss, x], [x], output_gradients=[1.0, 2.0])
self.assertAllEqual(dx, 4.0)
def testDy(self):
def f(x):
return x
grad_fn = backprop.gradients_function(f)
self.assertAllEqual(2., grad_fn(1., dy=2.)[0])
def testGradientInteger(self):
def f(x):
return x + x
int_tensor = constant_op.constant(1)
self.assertEqual(backprop.gradients_function(f)(int_tensor)[0], None)
def testErrors(self):
@custom_gradient.custom_gradient
def f(x):
def grad(_):
raise RuntimeError('x')
return x, grad
# TODO(apassos) raise the right error here
with self.assertRaises(RuntimeError):
backprop.gradients_function(f)(constant_op.constant(1.0))
def testGradientsFunctionInCustomGradient(self):
@custom_gradient.custom_gradient
def f(x):
(y,) = backprop.gradients_function(lambda x: x * x)(x)
def grad(dy):
return [2 * dy]
return y, grad
self.assertAllEqual(f(1.0), 2.0)
def testImplicitGradOverEmbeddingLookup(self):
batch_size = 8
embedding_size = 512
vocab_size = 1000
lrn_rate = 0.1
random_init = random_ops.random_uniform([vocab_size, embedding_size])
x = array_ops.ones((batch_size), dtypes.int64)
embedding = resource_variable_ops.ResourceVariable(
initial_value=random_init, dtype=dtypes.float32, name='embedding')
def f():
embedded_x = embedding_ops.embedding_lookup(embedding, x)
return constant_op.constant(1.0, dtypes.float32) - embedded_x
grad = backprop.implicit_grad(f)()[0][0]
opt = training.GradientDescentOptimizer(lrn_rate)
with ops.Graph().as_default(), self.cached_session():
tf_x = array_ops.ones((batch_size), dtypes.int64)
# TODO(ashankar,apassos): Change to ResourceVariable.
tf_embedding = variables.Variable(
random_init.numpy(), name='tf_embedding')
tf_embedded_x = embedding_ops.embedding_lookup(tf_embedding, tf_x)
tf_y = 1.0 - tf_embedded_x
tf_grad = gradients.gradients(tf_y, [tf_embedding])[0]
tf_opt = training.GradientDescentOptimizer(0.1)
tf_embedding.initializer.run()
self.assertAllClose(tf_grad.indices.eval(), grad.indices)
self.assertAllClose(tf_grad.values.eval(), grad.values)
tf_opt.apply_gradients([(tf_grad, tf_embedding)]).run()
expected = self.evaluate(tf_embedding)
opt.apply_gradients([(grad, embedding)])
self.assertAllClose(expected, embedding.read_value())
def testImplicitGradOrdering(self):
v0 = resource_variable_ops.ResourceVariable(1.0)
v1 = resource_variable_ops.ResourceVariable(2.0)
def f():
x = v1 * v1
y = v0 * v0
return x + y
grads = backprop.implicit_grad(f)()
ordered_variables = [x[1] for x in grads]
self.assertIs(ordered_variables[0], v0)
self.assertIs(ordered_variables[1], v1)
def testTapeNoOpGradient(self):
x = constant_op.constant(3.0)
with backprop.GradientTape() as t:
t.watch(x)
y = x
self.assertEqual(t.gradient(y, x).numpy(), 1.0)
def testTapeIdentityGradientIsIdentity(self):
x = constant_op.constant(3.0)
with backprop.GradientTape() as t:
t.watch(x)
y = array_ops.identity(x)
self.assertEqual(t.gradient(y, x).numpy(), 1.0)
def testTapeGradientMultiTargetOneIsSource(self):
x = constant_op.constant(2.0)
with backprop.GradientTape() as t:
t.watch(x)
y = x*x
self.assertEqual(t.gradient([x, y], x).numpy(), 5.0)
def testTapeNoOpGradientWithMultiTargetAllSource(self):
x = constant_op.constant(3.0)
with backprop.GradientTape() as t:
t.watch(x)
y = x
self.assertEqual(t.gradient([y, y], x).numpy(), 2.0)
def testTapeNoOpGradientWithMultiTargetMultiSource(self):
x = constant_op.constant(3.0)
y = constant_op.constant(5.0)
with backprop.GradientTape() as t:
t.watch(x)
t.watch(y)
z = y * y
self.assertAllEqual(t.gradient([x, y, z], [x, y]), [1.0, 11.0])
def testTapeGradientStringTarget(self):
s = constant_op.constant('unknown', dtype=dtypes.string)
x = constant_op.constant(3.0)
with backprop.GradientTape() as t:
t.watch(x)
t.watch(s)
grads = t.gradient(s, x)
self.assertEqual(grads, None)
def testTapeNoOpGradientStringSourceAndTarget(self):
s = constant_op.constant('unknown', dtype=dtypes.string)
with backprop.GradientTape() as t:
t.watch(s)
grads = t.gradient(s, s)
self.assertEqual(grads, None)
def testTapeNoOpGradientWithMultiTargetMultiSourceIncludeString(self):
x = constant_op.constant(3.0)
y = constant_op.constant(5.0)
s = constant_op.constant('unknown', dtype=dtypes.string)
with backprop.GradientTape() as t:
t.watch(x)
t.watch(y)
t.watch(s)
z = y * y
grads = t.gradient([x, y, z, s], [x, y, s])
self.assertAllEqual(grads[:2], [1.0, 11.0])
self.assertEqual(grads[2], None)
def testTapeNoOpOnVariableIsIdentity(self):
v0 = resource_variable_ops.ResourceVariable(1.0)
with backprop.GradientTape() as t:
y = v0.read_value()
self.assertEqual(t.gradient(y, v0).numpy(), 1.0)
@test_util.assert_no_new_tensors
@test_util.assert_no_garbage_created
def testTapeNoOpGradient2By2(self):
a_2_by_2 = constant_op.constant(2.0, shape=[2, 2])
with backprop.GradientTape(persistent=True) as tape:
tape.watch(a_2_by_2)
dy_dy = tape.gradient(a_2_by_2, [a_2_by_2])[0]
self.assertAllEqual(dy_dy.numpy(),
constant_op.constant(1.0, shape=[2, 2]).numpy())
@test_util.assert_no_new_pyobjects_executing_eagerly
def testTapeNoOpGradientMultiTarget2By2(self):
a_2_by_2 = constant_op.constant(2.0, shape=[2, 2])
with backprop.GradientTape(persistent=True) as tape:
tape.watch(a_2_by_2)
dy_dy = tape.gradient([a_2_by_2, a_2_by_2], [a_2_by_2])[0]
self.assertAllEqual(dy_dy.numpy(),
constant_op.constant(2.0, shape=[2, 2]).numpy())
def testTapeStopRecording(self):
with backprop.GradientTape() as t:
x = resource_variable_ops.ResourceVariable(1.0)
with t.stop_recording():
y = x * x
self.assertEqual(t.gradient(y, x), None)
def testTapeStopStartRecording(self):
with backprop.GradientTape(persistent=True) as t:
x = resource_variable_ops.ResourceVariable(1.0)
x2 = x * 2 # This should be differentiated through.
with t.stop_recording():
y = x2 * x2
z = x2 * x2
self.assertEqual(t.gradient(y, x2), None)
# If the x*2 was not differentiated through, this would be 2.0, not 4.0
self.assertEqual(t.gradient(z, x2).numpy(), 4.0)
def testTapeReset(self):
with backprop.GradientTape() as t:
v = resource_variable_ops.ResourceVariable(1.0)
loss = v * v
t.reset()
loss += v * v
self.assertAllEqual(t.gradient(loss, v), 2.0)
def testPythonMax(self):
x = [resource_variable_ops.ResourceVariable(2.),
resource_variable_ops.ResourceVariable(3.),
resource_variable_ops.ResourceVariable(5.)]
with backprop.GradientTape() as t:
f = max(x)
grad = t.gradient(f, x)
self.assertAllEqual(self.evaluate(f), 5.)
self.assertAllEqual(self.evaluate(grad), [None, None, 1.0])
def testAutomaticWatchedVariables(self):
with backprop.GradientTape() as t:
self.assertEqual(0, len(t.watched_variables()))
v = resource_variable_ops.ResourceVariable(1.0)
loss = v * v
self.assertAllEqual([v], t.watched_variables())
t.reset()
self.assertEqual(0, len(t.watched_variables()))
loss += v * v
self.assertAllEqual([v], t.watched_variables())
def testExplicitWatchedVariables(self):
with backprop.GradientTape() as t:
self.assertEqual(0, len(t.watched_variables()))
v = resource_variable_ops.ResourceVariable(1.0)
t.watch(v)
self.assertAllEqual([v], t.watched_variables())
t.reset()
self.assertEqual(0, len(t.watched_variables()))
t.watch(v)
self.assertAllEqual([v], t.watched_variables())
@test_util.assert_no_new_tensors
def testGradientNone(self):
def loss(x, l):
return math_ops.reduce_mean(
nn_ops.softmax_cross_entropy_with_logits(logits=x, labels=l),
constant_op.constant([0]))
logits = constant_op.constant([[0.0, 0.0]])
labels = constant_op.constant([[1.0, 0.0]])
# softmax_cross_entropy_with_logits returns two outputs and in this case the
# gradient wrt the second is None.
g, = backprop.gradients_function(loss, [0])(logits, labels)
self.assertAllEqual(g.numpy(), [[-0.5, 0.5]])
@test_util.run_in_graph_and_eager_modes
def testGradientWithinTapeBlock(self):
v1 = resource_variable_ops.ResourceVariable(1.)
self.evaluate(v1.initializer)
with backprop.GradientTape() as t:
loss = 2 * v1
grad = t.gradient(loss, v1)
self.assertAllEqual(self.evaluate(grad), 2.0)
with backprop.GradientTape(persistent=True) as t:
loss = 2 * v1
grad = t.gradient(loss, v1)
self.assertAllEqual(self.evaluate(grad), 2.0)
@test_util.run_in_graph_and_eager_modes
def testNestedSelfContexts(self):
v1 = resource_variable_ops.ResourceVariable(1.)
self.evaluate(v1.initializer)
with backprop.GradientTape() as t:
with self.assertRaises(ValueError):
with t:
pass
@test_util.assert_no_new_tensors
def testSecondGrad(self):
def first(x):
l = constant_op.constant([[0.0]])
x = nn_ops.softmax_cross_entropy_with_logits(labels=l, logits=x)
x = math_ops.reduce_sum(x, constant_op.constant([0]))
return x
def second(x):
grad = backprop.gradients_function(first, [0])(x)[0]
return math_ops.reduce_sum(grad, constant_op.constant([0]))
f = constant_op.constant([[0.1]])
grad = backprop.gradients_function(second, [0])(f)[0]
self.assertAllEqual([[0.0]], grad)
@test_util.run_in_graph_and_eager_modes
def testWatchingIsTapeLocal(self):
x1 = resource_variable_ops.ResourceVariable(2.0, trainable=False)
x2 = resource_variable_ops.ResourceVariable(2.0, trainable=False)
with backprop.GradientTape() as tape1:
with backprop.GradientTape() as tape2:
tape1.watch(x1)
tape2.watch([x1, x2])
y = x1 ** 3
z = x2 ** 2
dy, dz = tape2.gradient([y, z], [x1, x2])
d2y, d2z = tape1.gradient([dy, dz], [x1, x2])
self.evaluate([x1.initializer, x2.initializer])
self.assertEqual(self.evaluate(d2y), 12.0)
self.assertIsNone(d2z)
@test_util.assert_no_new_tensors
def testMakeVJP(self):
def f(x):
return x * x
wrapped_fn = backprop.make_vjp(f, persistent=False)
result, vjp = wrapped_fn(constant_op.constant(3.0))
self.assertAllEqual(result, 9.0)
self.assertAllEqual(vjp(2.0)[0], 12.0)
def testPersistentMakeVJP(self):
def f(x):
return x * x
wrapped_fn = backprop.make_vjp(f, persistent=True)
_, vjp = wrapped_fn(constant_op.constant(3.0))
vjp_result1 = vjp(2.0)[0]
vjp_result2 = vjp(2.0)[0]
self.assertAllEqual(vjp_result1, vjp_result2, 12.0)
@test_util.assert_no_new_tensors
def testGradGrad(self):
def sq(x):
return x * x
def grad(x):
value = backprop.gradients_function(sq, [0])(x)[0]
return value
gradgrad = backprop.gradients_function(grad, [0])
self.assertAllEqual(gradgrad(constant_op.constant(3.0))[0], 2.0)
@test_util.assert_no_new_tensors
def testGradGradExp(self):
def grad(x):
value = backprop.gradients_function(math_ops.exp, [0])(x)[0]
return value
gradgrad = backprop.gradients_function(grad, [0])
self.assertAllEqual(gradgrad(constant_op.constant(0.0))[0], 1.0)
@test_util.assert_no_new_tensors
def testStopGradient(self):
grad = backprop.gradients_function(
lambda x: array_ops.stop_gradient(math_ops.argmax(x)))
self.assertAllEqual(grad([0.0])[0], None)
@test_util.assert_no_new_tensors
def testArgmax(self):
def argmax(x):
i = math_ops.argmax(x)
return array_ops.stop_gradient(i)
grad = backprop.gradients_function(argmax)
self.assertAllEqual(grad([0.0])[0], None)
@test_util.run_gpu_only
@test_util.assert_no_new_tensors
def testGPU(self):
def fn(x):
with context.device('/gpu:0'):
b = constant_op.constant(2.0)
c = math_ops.add(x.gpu(), b)
# TODO(apassos): remove cpu below by making TensorVSPace aware
# of devices.
return math_ops.add(c, constant_op.constant(3.0)).cpu()
grad = backprop.gradients_function(fn, [0])(constant_op.constant(1.0))[0]
self.assertAllEqual(grad, 1.0)
@test_util.run_gpu_only
@test_util.assert_no_new_tensors
def testGPUImplicitGrad(self):
with context.device('gpu:0'):
v = resource_variable_ops.ResourceVariable(
constant_op.constant(1.0), name='v')
def f():
with context.device('gpu:0'):
return v.read_value()
self.assertEqual(
backprop.implicit_grad(f)()[0][0].cpu().numpy(), 1.0)
@test_util.assert_no_new_tensors
def testCPU(self):
def fn(x):
b = constant_op.constant(2.0)
c = math_ops.add(x, b)
return math_ops.add(c, constant_op.constant(3.0))
grad = backprop.gradients_function(fn, [0])(constant_op.constant(1.0))[0]
self.assertAllEqual(grad, 1.0)
@test_util.run_gpu_only
@test_util.assert_no_new_tensors
def testTensorCopyGPU2CPU2GPU(self):
def f(a, b):
return a.cpu() + b.cpu()
with context.device('/gpu:0'):
a = constant_op.constant(1.0)
b = constant_op.constant(2.0)
grad = backprop.gradients_function(f, [0])(a, b)[0]
self.assertAllEqual(grad, 1.0)
@test_util.assert_no_new_tensors
def testEmptyParams(self):
def fn(a, b):
return a * b
x = constant_op.constant(1.0)
y = constant_op.constant(2.0)
dx, dy = backprop.gradients_function(fn)(x, y)
self.assertAllEqual(dx, y.numpy())
self.assertAllEqual(dy, x.numpy())
@test_util.assert_no_new_tensors
def testUnconnectedNone(self):
v = resource_variable_ops.ResourceVariable(
1.0, name='testUnconnectedNone')
def f():
v.read_value()
return constant_op.constant(1.0)
self.assertEqual(backprop.implicit_grad(f)()[0][0], None)
@test_util.assert_no_new_tensors
def testGradientTapeReEnterContext(self):
g = backprop.GradientTape()
with g:
x = constant_op.constant(3.0)
g.watch(x)
y = 2*x
with g:
z = 2*y
grad = g.gradient(target=z, sources=[x])
self.assertEqual(self.evaluate(grad), [4.0])
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testGradientTapeRepeatedSource(self):
with backprop.GradientTape(persistent=False) as g:
x = constant_op.constant(3.0)
g.watch(x)
y = 2 * x
grad = g.gradient(target=y, sources=[x, x])
self.assertEqual(self.evaluate(grad), [2.0, 2.0])
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testPersistentGradientTapeRepeatedSource(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant(3.0)
y = constant_op.constant(5.0)
g.watch(x)
g.watch(y)
z = x * x + x * y
grad = g.gradient(target=z, sources=[x, x])
self.assertEqual(self.evaluate(grad), [11.0, 11.0])
grad = g.gradient(target=z, sources=[y, x])
self.assertEqual(self.evaluate(grad), [3.0, 11.0])
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testGradientTapeStructure(self):
with backprop.GradientTape(persistent=True) as g:
# Using different constant values because constant tensors are
# cached, leading to a different gradient then what one might expect.
x1 = constant_op.constant(3.0)
x2 = constant_op.constant(3.1)
x3 = constant_op.constant(3.2)
g.watch(x1)
g.watch(x2)
g.watch(x3)
y = x1 + 2 * x2 + 3 * x3
self.assertEqual(self.evaluate(g.gradient(y, x1)), [1.0])
self.assertEqual(self.evaluate(g.gradient(y, (x1,))), (1.0,))
self.assertEqual(self.evaluate(g.gradient(y, (x1, x2))), (1.0, 2.0))
self.assertEqual(self.evaluate(g.gradient(y, [(x1, x2), (x2, x3)])),
[(1.0, 2.0), (2.0, 3.0)])
self.assertEqual(self.evaluate(g.gradient(y, (x1, x2, [x1, x3]))),
(1.0, 2.0, [1.0, 3.0]))
self.assertEqual(self.evaluate(g.gradient(y, [x1, {'x2': x2, 'x3': x3}])),
[1.0, {'x2': 2.0, 'x3': 3.0}])
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testGradientTape(self):
with backprop.GradientTape() as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
with backprop.GradientTape() as gg:
gg.watch(y)
z = 2 * y
inner_grad = gg.gradient(z, [y])[0]
self.assertEqual(self.evaluate(inner_grad), 2.0)
y += inner_grad
grad = g.gradient(y, [x])[0]
self.assertEqual(self.evaluate(grad), 6.0)
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testGadientTapeCalledOnConstantTarget(self):
with backprop.GradientTape() as g:
x = variables.Variable([3.0])
y = variables.Variable([2.0])
grad = g.gradient(x, y)
self.assertAllEqual(grad, None)
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only('b/120545219')
def testGradientTapeWithCond(self):
x = constant_op.constant(3.0)
def true_fn():
return x
def false_fn():
return x * x
with backprop.GradientTape() as g:
g.watch(x)
y = control_flow_ops.cond(x < x, true_fn, false_fn)
if not context.executing_eagerly():
with self.assertRaisesRegexp(NotImplementedError, 'tf.gradients'):
dy = g.gradient(y, [x])[0]
else:
dy = g.gradient(y, [x])[0]
self.assertEqual(self.evaluate(dy), 6.0)
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only('b/120545219')
def testGradientTapeWithWhileLoop(self):
i = constant_op.constant(1)
x = constant_op.constant(2.)
def cond(i, _):
return i < 3
def body(i, x):
return i + 1, x * 2
with backprop.GradientTape() as g:
g.watch([x])
_, y = control_flow_ops.while_loop(cond, body, [i, x])
if not context.executing_eagerly():
with self.assertRaisesRegexp(NotImplementedError, 'tf.gradients'):
dy = g.gradient(y, [x])[0]
else:
dy = g.gradient(y, [x])[0]
self.assertEqual(self.evaluate(dy), 4.0)
@test_util.assert_no_new_tensors
def testGradientTapeGradientCalledMultipleTimes(self):
with backprop.GradientTape() as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
z = y * y
g.gradient(z, [x])
with self.assertRaisesRegexp(
RuntimeError, 'GradientTape.gradient can only be called once'):
g.gradient(y, [x])
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only('b/120545219')
def testPersistentTape(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
z = y * y
dz_dx = g.gradient(z, [x])[0]
self.assertEqual(self.evaluate(dz_dx), 4 * 3 * 3 * 3)
dy_dx = g.gradient(y, [x])[0]
self.assertEqual(self.evaluate(dy_dx), 2 * 3)
del g
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testHigherOrderGradient(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x ** 3 # y := x^3
dy_dx = g.gradient(y, x) # dy/dx := 3x^2
d2y_dx2 = g.gradient(dy_dx, x) # d2y/dx2 := 6x
d3y_dx3 = g.gradient(d2y_dx2, x) # d3y/dx3 := 6
x = 3
self.assertEqual(self.evaluate(y), x ** 3)
self.assertEqual(self.evaluate(dy_dx), 3 * x ** 2)
self.assertEqual(self.evaluate(d2y_dx2), 6 * x)
self.assertEqual(self.evaluate(d3y_dx3), 6)
del g
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testPersistentNestedTape(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
with backprop.GradientTape(persistent=True) as gg:
gg.watch(y)
z = 2 * y
for _ in range(2):
inner_grad = gg.gradient(z, [y])[0]
self.assertEqual(self.evaluate(inner_grad), 2.0)
y += inner_grad
del gg
grad = g.gradient(y, [x])[0]
self.assertEqual(self.evaluate(grad), 6.0)
grad = g.gradient(z, [x])[0]
self.assertEqual(self.evaluate(grad), 12.0)
del g
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testGradientTapeVariable(self):
v = resource_variable_ops.ResourceVariable(1.0, name='v')
self.evaluate(v.initializer)
with backprop.GradientTape() as g:
y = v * v
grad = g.gradient(y, [v])[0]
self.assertAllEqual(self.evaluate(grad), 2.0)
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testNestedGradients(self):
x = constant_op.constant(3.0)
with backprop.GradientTape() as g:
g.watch(x)
y = x * x
z = y * y
dz_dx, dz_dy = g.gradient(z, [x, y])
self.assertEqual(self.evaluate(dz_dx), 108.0)
self.assertEqual(self.evaluate(dz_dy), 18.0)
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testUnconnectedGradientsDefault(self):
x = constant_op.constant(1.0)
y = constant_op.constant(3.0)
with backprop.GradientTape() as g:
g.watch([x, y])
z = y * 2
dz_dx = g.gradient(z, x)
self.assertEqual(dz_dx, None)
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testUnconnectedGradientsZeros(self):
x = constant_op.constant(1.0, shape=[2, 2])
y = constant_op.constant(3.0)
with backprop.GradientTape() as g:
g.watch([x, y])
z = y * 2
dz_dx = g.gradient(z, x, unconnected_gradients='zero')
self.assertAllEqual([[0.0, 0.0], [0.0, 0.0]], self.evaluate(dz_dx))
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testUnconnectedGradientsVariablesZeros(self):
x = resource_variable_ops.ResourceVariable(
constant_op.constant(1., shape=[2, 2]))
self.evaluate(x.initializer)
y = resource_variable_ops.ResourceVariable(constant_op.constant(3.))
self.evaluate(y.initializer)
with backprop.GradientTape() as g:
g.watch([x, y])
z = y * 2
dz_dx = g.gradient(z, x, unconnected_gradients='zero')
self.assertAllEqual([[0.0, 0.0], [0.0, 0.0]], self.evaluate(dz_dx))
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
def testUnknownUnconnectedGradientsValueGiven(self):
x = constant_op.constant(1.0)
y = constant_op.constant(1.0)
with backprop.GradientTape() as g:
g.watch([x, y])
z = y * 2
with self.assertRaisesRegexp(
ValueError, "Unknown value for unconnected_gradients: 'nonsense'"):
g.gradient(z, x, unconnected_gradients='nonsense')
@test_util.run_in_graph_and_eager_modes
def testUnconnectedGradientsNestedDefunZeros(self):
@function.defun
def f(x):
return x * x
@function.defun
def h(y):
z = f(y)
return array_ops.stop_gradient(z)
x = constant_op.constant(1.0)
with backprop.GradientTape() as g:
g.watch(x)
y = h(x)
dy_dx = g.gradient(y, x, unconnected_gradients='zero')
self.assertEqual(0.0, self.evaluate(dy_dx))
@test_util.assert_no_new_tensors
def testEmptyParamsForValueAndGradFunction(self):
def fn(a, b):
return a * b
val_and_grads_fn = backprop.val_and_grad_function(fn)
x = 2.0
y = 3.0
val, (dx, dy) = val_and_grads_fn(x, y)
self.assertAllClose(val, x * y)
self.assertAllEqual(dx, y)
self.assertAllEqual(dy, x)
@test_util.assert_no_new_tensors
def testNonEmptyParamsForValueAndGradFunction(self):
def fn(a, b):
return a * b
val_and_grad_fn = backprop.val_and_grad_function(fn, params=[1])
x = 2.0
y = 3.0
val, grads = val_and_grad_fn(x, y)
self.assertAllClose(val, x * y)
self.assertEqual(1, len(grads))
self.assertAllEqual(grads[0], x)
@test_util.run_gpu_only
@test_util.assert_no_new_tensors
def testTensorCopyCPU2GPU2CPU(self):
# forward: a (cpu->gpu) -> add (gpu) -> c (gpu->cpu) -> add (cpu) -> e (cpu)
# back: e (cpu) -> add (cpu) -> c (cpu->gpu) -> add (gpu) -> grad (gpu->cpu)
def f(a, b):
with context.device('/gpu:0'):
c = math_ops.add(a.gpu(0), b.gpu(0))
return math_ops.add(c.cpu(), constant_op.constant(3.0))
with context.device('/cpu:0'):
a = constant_op.constant(1.0)
b = constant_op.constant(2.0)
grad = backprop.gradients_function(f, [0])(a, b)[0]
self.assertAllEqual(grad, 1.0)
def testGetAttrType(self):
typ = backprop.op_attr_type('Add', 'T')
self.assertEqual(typ, pywrap_tensorflow.TF_ATTR_TYPE)
def testGetAttrList(self):
typ = backprop.op_attr_type('MaxPool', 'ksize')
self.assertEqual(typ, [pywrap_tensorflow.TF_ATTR_INT])
def testMakeAttrType(self):
self.assertEqual(dtypes.float32,
backprop.make_attr(pywrap_tensorflow.TF_ATTR_TYPE, 1))
def testMakeAttrTypeList(self):
self.assertEqual([dtypes.float32],
backprop.make_attr([pywrap_tensorflow.TF_ATTR_TYPE], [1]))
def testMulType(self):
def mul(x):
return math_ops._mul_dispatch(x, x) # pylint: disable=protected-access
self.assertAllEqual(
backprop.gradients_function(mul)(3.0)[0].numpy(),
6.0)
def testMakeAttrShape(self):
for s in ([], None, [1, 2, 3], [None, None], [1, None, 3]):
expected = tensor_shape.TensorShape(s).as_proto()
actual = backprop.make_attr(pywrap_tensorflow.TF_ATTR_SHAPE, s)
self.assertEqual(
expected,
actual,
msg=('For shape %r, expected %r != %r actual' % (s, expected,
actual)))
def testMakeAttrShapeList(self):
shape_list = [[], None, [1, 2, 3], [None, None], [1, None, 3]]
self.assertEqual(
[tensor_shape.TensorShape(s).as_proto() for s in shape_list],
backprop.make_attr([pywrap_tensorflow.TF_ATTR_SHAPE], shape_list))
def testArgsGradientFunction(self):
def f(*args):
return args[0] * args[0]
grad = backprop.gradients_function(f)
self.assertAllEqual(grad(1.0)[0], 2.0)
def testPartial(self):
def f(x, y):
return x * y
part = functools.partial(f, constant_op.constant(2.0))
self.assertAllEqual(
backprop.gradients_function(part)(constant_op.constant(1.0))[0],
2.0)
def testReturnSameThing(self):
def f(x):
return x, 2 * x
self.assertAllEqual(backprop.gradients_function(f)(1.0)[0], 3.0)
@test_util.assert_no_new_tensors
def testExceptionSafety(self):
def f(unused_x):
raise ValueError()
try:
backprop.gradients_function(f)(1.0)
except ValueError:
pass
def real_f(x):
return x * x
self.assertAllEqual(backprop.gradients_function(real_f)(1.0)[0], 2.0)
@test_util.assert_no_new_tensors
def testMultiValueConvertToTensor(self):
x = resource_variable_ops.ResourceVariable(
initial_value=array_ops.constant([1.0]), name='x')
def fn():
a = math_ops.add(x.value(), 1.0)
# Make sure convert_to_tensor works correctly with list of TensorNodes.
b = array_ops.stack([a, a], axis=0)
return math_ops.reduce_mean(b)
grad = backprop.implicit_grad(fn)()[0][0]
self.assertAllEqual([1.0], grad)
def testOutput(self):
def multiout(x):
return x + 2, x * x
x = constant_op.constant([0.0, 1.0, 2.0])
grad = backprop.gradients_function(multiout)(x)[0]
self.assertAllEqual([1.0, 3.0, 5.0], grad)
def testMultiValuePreservesIfNotDiffedAgainst(self):
def tfe_conv2d(timage, tkernel, conv2dstrides):
return nn_ops.conv2d(timage, tkernel, conv2dstrides, 'SAME')
i = constant_op.constant([[[[1.0]]]])
k = constant_op.constant([[[[2.0]]]])
s = [1, 1, 1, 1]
grad = backprop.gradients_function(tfe_conv2d, params=(0,))(i, k, s)[0]
self.assertAllEqual([[[[2.0]]]], grad)
def testSameObjectForMultipleArguments(self):
def f(x, y):
return math_ops.multiply(x, y)
g = backprop.gradients_function(f)
def np_g(x, y):
dx, dy = g(x, y)
return [dx.numpy(), dy.numpy()]
x = constant_op.constant(1.)
self.assertAllEqual([1., 1.], np_g(x, x))
x = 1.
self.assertAllEqual([1., 1.], np_g(x, x))
x = constant_op.constant([[1.]])
self.assertAllEqual([[[1.]], [[1.]]], np_g(x, x))
x = [[1.]]
self.assertAllEqual([[[1.]], [[1.]]], np_g(x, x))
v = resource_variable_ops.ResourceVariable(
initial_value=1., name='testSameObjectForMultipleArguments.Variable')
self.assertAllEqual([1., 1.], np_g(v, v))
@test_util.assert_no_new_tensors
def testImplicitGradientsCustomGradientAndCachedVariableValue(self):
@custom_gradient.custom_gradient
def my_square(x):
result = math_ops.square(x)
def grad(dr):
return 2 * dr * x + 1
return result, grad
x = resource_variable_ops.ResourceVariable(
initial_value=3., name='X.' + self.id())
def f():
return my_square(x)
g = backprop.implicit_grad(f)
grads_and_vars = g()
self.assertEqual(1, len(grads_and_vars))
grad, var = grads_and_vars[0]
self.assertAllEqual(7, grad)
self.assertAllEqual(x, var)
def testJacobianCustomGradient(self):
class MyCallable(object):
def __init__(self):
self.a = variables.Variable(1.)
self.b = variables.Variable(2.)
self.c = variables.Variable(3.)
def __call__(self, x):
return self.a * x * x + self.b * x + self.c
@def_function.function
def call(c, x):
@custom_gradient.custom_gradient
def _call():
y = c(x)
def grad(dy, variables=None): # pylint: disable=redefined-outer-name
with backprop.GradientTape(persistent=True) as g:
g.watch(variables)
y = c(x)
grad_vars = [
2 * math_ops.reduce_sum(dy * g.jacobian(y, v)) for v in variables
]
del g
return (), grad_vars
return y, grad
return _call()
c = MyCallable()
x = constant_op.constant([1., 2., 3.])
with backprop.GradientTape(persistent=True) as g:
g.watch([c.a, c.b, c.c])
y = call(c, x)
self.assertAllEqual(g.gradient(y, x), None)
@test_util.assert_no_new_tensors
def testCustomGradient(self):
@custom_gradient.custom_gradient
def my_mul(x, y):
result = x*y
def grad(dr):
return [dr*y, dr*x]
return result, grad
lr = 0.25
x = resource_variable_ops.ResourceVariable(2., name='x')
def loss(x):
return my_mul(2., x.read_value())
loss_grads_fn = backprop.implicit_val_and_grad(loss)
losses = []
for _ in range(5):
loss, grads_and_vars = loss_grads_fn(x)
losses.append(loss.numpy())
for (grad, var) in grads_and_vars:
var.assign_sub(lr*grad)
self.assertAllEqual(losses, [4.0, 3., 2., 1., 0.])
@test_util.assert_no_new_tensors
def testCustomGradientIdentity(self):
@custom_gradient.custom_gradient
def my_identity(x):
def grad(dresult):
return [2 * dresult]
return x, grad
self.assertAllEqual(backprop.gradients_function(my_identity)(1.0)[0], 2.0)
def testDifferentiatingFunctionThatReturnsNone(self):
def fn(x, y):
result = x*y # pylint: disable=unused-variable
x = constant_op.constant(1)
y = constant_op.constant(2)
loss_grads_fn = backprop.implicit_val_and_grad(fn)
with self.assertRaisesRegexp(
ValueError, 'Cannot differentiate a function that returns None; '
'did you forget to return a value from fn?'):
loss_grads_fn(x, y)
val_and_grads_fn = backprop.val_and_grad_function(fn)
with self.assertRaisesRegexp(
ValueError, 'Cannot differentiate a function that returns None; '
'did you forget to return a value from fn?'):
val_and_grads_fn(x, y)
def testZerosCacheDoesntLeakAcrossGraphs(self):
with ops.Graph().as_default():
def get_grad():
with ops.Graph().as_default(), self.cached_session():
t = constant_op.constant(1, dtype=dtypes.float32, shape=(10, 4))
x = constant_op.constant(2, dtype=dtypes.float32, shape=(10, 4))
with backprop.GradientTape() as tape:
tape.watch(x)
x1, _ = array_ops.split(x, num_or_size_splits=2, axis=1)
y1 = x1**2
y = array_ops.concat([y1, t], axis=1)
return self.evaluate(tape.gradient(y, x))
grad1 = get_grad()
grad2 = get_grad()
self.assertAllEqual(grad1, grad2)
@test_util.run_in_graph_and_eager_modes
def testSelectivelyWatchVariables(self):
x1 = resource_variable_ops.ResourceVariable(1.0)
x2 = resource_variable_ops.ResourceVariable(1.0)
with backprop.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(x2)
y = x1**2
z = x2**3
self.assertTupleEqual(tape.watched_variables(), (x2,))
dy, dz = tape.gradient([y, z], [x1, x2])
self.evaluate([x1.initializer, x2.initializer])
self.assertIsNone(dy)
self.assertEqual(self.evaluate(dz), 3.0)
@test_util.run_in_graph_and_eager_modes
def testDifferentiatingScalarCache(self):
# In the following test, if x2 = x1 (i.e the objects are the exact same),
# then y is essentially, 2*x1, and dy/dx1 = 2.
# When we had a pure scalar cache in eager, this would be the case. This
# test prevents us from going back to that case.
with backprop.GradientTape(persistent=False) as g:
x1 = constant_op.constant(3.0)
x2 = constant_op.constant(3.0)
g.watch(x1)
g.watch(x2)
y = x1 + x2
grad = g.gradient(target=y, sources=[x1])
self.assertEqual(self.evaluate(grad), [1.0])
def testVariablesAndConstantsProduceTheSameGradients(self):
# In the following test, differentiating [y, z] against [a, b] gives:
# (dy/da + dz/da, dy/db + dz/db).
# If a and b are the same constant, dz/da will not be 0 (which it should
# be).
# This is solved by using variable since doing a read_value on a tensor will
# produce a new tensor and corresponding TensorHandle, and not reuse the
# same tensor (which would happen if we are using a cache and reusing
# EagerTensor objects).
def get_grads(a, b):
with backprop.GradientTape() as tape:
tape.watch([a, b])
y = a**3
z = b**2
return tape.gradient([y, z], [a, b])
gradients_constants = get_grads(
constant_op.constant(2.0), constant_op.constant(2.0))
gradients_variables = get_grads(
resource_variable_ops.ResourceVariable(2.0),
resource_variable_ops.ResourceVariable(2.0))
self.assertAllEqual(gradients_constants, gradients_variables)
def testUnknownShapes(self):
with ops.Graph().as_default():
with backprop.GradientTape() as tape:
a = array_ops.placeholder(dtype=dtypes.float32, shape=None)
tape.watch(a)
b = a**3
db_da = tape.gradient(b, a)
with self.cached_session() as sess:
self.assertEqual((8.0, 12.0), sess.run((b, db_da), feed_dict={a: 2.0}))
@test_util.run_in_graph_and_eager_modes
def testCustomGradientInEagerAndGraph(self):
@custom_gradient.custom_gradient
def f(x):
y = x * x
def grad(dy):
return [4 * dy]
return y, grad
with backprop.GradientTape() as t:
c = constant_op.constant(1.0)
t.watch(c)
g = f(c)
self.assertAllEqual(self.evaluate(t.gradient(g, c)), 4.0)
@test_util.run_in_graph_and_eager_modes
def testNthOrderCustomGradientsTape(self):
def _all_grads_tape(f, primals, doutputs):
primals = nest.map_structure(ops.convert_to_tensor, primals)
with backprop.GradientTape(persistent=True) as t:
t.watch(primals)
with variable_scope.variable_scope(
# Required when graph building
variable_scope.get_variable_scope(), use_resource=True):
current = f(primals)
ret = [current]
for doutput in doutputs:
current = t.gradient(current, primals, output_gradients=doutput,
unconnected_gradients='zero')
ret.append(current)
return ret
@custom_gradient.custom_gradient
def f(x):
y = 2. * x
return y, _chain_grads(x, [lambda dy: dy * 2.1,
lambda ddy: ddy * 2.2,
lambda dddy: dddy * x * 2.3])
self.assertAllClose(
[6., 4.2, 22.], _all_grads_tape(f, 3., [2., 10.]))
self.assertAllClose(
[6., 2.1, 2.2, 6.9, 2.3, 0.],
_all_grads_tape(f, 3., [1., 1., 1., 1., 1.]))
traced_tape_grads = def_function.function(_all_grads_tape)
self.assertAllClose(
[6., 4.2, 22.], traced_tape_grads(f, 3., [2., 10.]))
self.assertAllClose(
[6., 2.1, 2.2, 6.9, 2.3, 0.],
traced_tape_grads(f, 3., [1., 1., 1., 1., 1.]))
@test_util.run_in_graph_and_eager_modes
def testNthOrderCustomGradientsTFGradients(self):
@def_function.function
def _all_grads_tf_gradients(f, primals, doutputs):
primals = nest.map_structure(ops.convert_to_tensor, primals)
current = f(primals)
ret = [current]
for doutput in doutputs:
current, = gradients.gradients(current, primals, grad_ys=doutput,
unconnected_gradients='zero')
ret.append(current)
return ret
@custom_gradient.custom_gradient
def f(x):
y = 2. * x
return y, _chain_grads(x, [lambda dy: dy * 2.1,
lambda ddy: ddy * 2.2,
lambda dddy: dddy * x * 2.3])
self.assertAllClose(
[6., 4.2, 22.], _all_grads_tf_gradients(f, 3., [2., 10.]))
self.assertAllClose(
[6., 2.1, 2.2, 6.9, 2.3, 0.], _all_grads_tf_gradients(
f, 3., [1., 1., 1., 1., 1.]))
@test_util.run_in_graph_and_eager_modes
def testCustomGradientManualNesting(self):
@custom_gradient.custom_gradient
def f(x, y):
z = 2. * x * y
@custom_gradient.custom_gradient(primals=(x, y))
def g(unused_dz):
def h(unused_dz, unused_dydz):
return (2.2, 3.2)
return (2.1, 3.1), h
return z, g
with backprop.GradientTape(persistent=True) as t:
with backprop.GradientTape(persistent=True) as tt:
c = constant_op.constant(1.)
d = constant_op.constant(-1.)
t.watch(c)
tt.watch(c)
t.watch(d)
tt.watch(d)
output = f(c, d)
self.assertAllClose(-2., output)
gc = tt.gradient(output, c)
self.assertAllClose(2.1, gc)
gd = tt.gradient(output, d)
self.assertAllClose(3.1, gd)
gcgc = t.gradient(gc, c)
self.assertAllClose(2.2, gcgc)
gcgd = t.gradient(gc, d)
self.assertAllClose(3.2, gcgd)
gdgc = t.gradient(gd, c)
self.assertAllClose(2.2, gdgc)
gdgd = t.gradient(gd, d)
self.assertAllClose(3.2, gdgd)
@test_util.run_in_graph_and_eager_modes
def testCustomGradientForwardprop(self):
@custom_gradient.custom_gradient
def f(x):
z = 2. * tensor_util.constant_value(x)
def g(dz):
@custom_gradient.custom_gradient
def first_order(unused_x, unused_dz):
def second_order_and_transpose(unused_ddz):
return 2.2, 3.1
return 2.1, second_order_and_transpose
return first_order(x, dz)
return z, g
with backprop.GradientTape(persistent=True) as t:
with backprop.GradientTape() as tt:
c = constant_op.constant(1.)
t.watch(c)
tt.watch(c)
output_grad = array_ops.ones([])
t.watch(output_grad)
output = f(c)
self.assertAllClose(2., output)
gc = tt.gradient(output, c, output_gradients=output_grad)
self.assertAllClose(2.1, gc)
ggc = t.gradient(gc, c)
self.assertAllClose(2.2, ggc)
# Note that executed eagerly this kind of transpose is not efficient. But
# from a tf.function we could prune out the first-order gradient
# computation.
transpose = t.gradient(gc, output_grad)
self.assertAllClose(3.1, transpose)
@test_util.run_in_graph_and_eager_modes
def testMaxPooling3DGradient(self):
if test.is_built_with_rocm():
self.skipTest('Pooling with 3D tensors is not supported in ROCm')
def forward(a):
r = max_pooling3d(a, pool_size=pool_size, strides=strides, padding='SAME')
return r
input_sizes = [1, 3, 2, 4, 1]
pool_size = (2, 2, 1)
strides = (1, 1, 1)
total_size = np.prod(input_sizes)
x = np.arange(1, total_size + 1, dtype=np.float32)
aa = constant_op.constant(x, shape=input_sizes, dtype=dtypes.float32)
da = backprop.gradients_function(forward)(aa)
if not context.executing_eagerly():
tf_aa = constant_op.constant(x, shape=input_sizes, dtype=dtypes.float32)
tf_max = max_pooling3d(
tf_aa, pool_size=pool_size, strides=strides, padding='SAME')
tf_da = gradients.gradients(tf_max, [tf_aa])
self.assertAllEqual(da[0], tf_da[0].eval())
@test_util.run_in_graph_and_eager_modes
def testWatchBadThing(self):
g = backprop.GradientTape()
with self.assertRaisesRegexp(ValueError, 'ndarray'):
g.watch(np.array(1.))
def testOpWithNoAttrs(self):
@function.defun(autograph=False)
def f():
with backprop.GradientTape() as tape:
xs = random_ops.random_normal([10, 32])
tape.watch(xs)
# The `rfft()` op has no defined attrs, which exercises a different
# branch in the Python op wrapper code generator for recording
# gradients.
ys = fft_ops.rfft(xs)
self.assertEmpty(ys.op.node_def.attr)
gs = tape.gradient(ys, xs)
self.assertIsNotNone(gs)
f.get_concrete_function()
class JacobianTest(test.TestCase):
def _jacobian(self, experimental_use_pfor):
persistent = context.executing_eagerly and not experimental_use_pfor
with backprop.GradientTape(persistent=persistent) as g:
x = constant_op.constant([1., 2.])
y = constant_op.constant([3., 4.])
g.watch(x)
g.watch(y)
z = x * x * y
jacobian = g.jacobian(z, [x, y],
experimental_use_pfor=experimental_use_pfor)
answer = [array_ops.diag(2 * x * y), array_ops.diag(x * x)]
return jacobian, answer
@test_util.run_v1_only('b/120545219')
def testPfor(self):
jacobian, answer = self._jacobian(experimental_use_pfor=True)
for j, a in zip(jacobian, answer):
self.assertAllEqual(a, j)
@test_util.run_v1_only('b/120545219')
def testWhileLoop(self):
jacobian, answer = self._jacobian(experimental_use_pfor=False)
for j, a in zip(jacobian, answer):
self.assertAllEqual(a, j)
@test_util.run_v1_only('b/120545219')
def testPforDefun(self):
@function.defun
def _f():
return self._jacobian(experimental_use_pfor=True)
jacobian, answer = _f()
for j, a in zip(jacobian, answer):
self.assertAllEqual(a, j)
@test_util.run_v1_only('b/120545219')
def testWhileLoopDefun(self):
@function.defun
def _f():
return self._jacobian(experimental_use_pfor=False)
jacobian, answer = _f()
for j, a in zip(jacobian, answer):
self.assertAllEqual(a, j)
@test_util.run_v1_only('b/120545219')
def testPersistentTape(self):
if not context.executing_eagerly():
return
with backprop.GradientTape() as g:
x = constant_op.constant([1.0, 2.0])
g.watch(x)
y = x * x
with self.assertRaisesRegexp(RuntimeError, 'persistent'):
g.jacobian(y, x, experimental_use_pfor=False)
@test_util.run_v1_only('b/120545219')
def testPforException(self):
var = variables.Variable([1.])
@custom_gradient.custom_gradient
def op(x):
def grad(_):
# Note that we perform a stateful operation here that will not be
# compatible with parallel for construct.
with ops.control_dependencies(
[var.assign(random_ops.random_uniform([1]))]):
return constant_op.constant(1.)
return x, grad
with backprop.GradientTape() as g:
x = constant_op.constant([1., 2.])
g.watch(x)
y = op(x)
with self.assertRaisesRegexp(ValueError, 'No converter'):
g.jacobian(y, x, experimental_use_pfor=True)
@test_util.run_v1_only('b/120545219')
def test_parallel_iterations(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant([[1., 2], [3, 4]])
g.watch(x)
y = math_ops.matmul(x, x)
self.assertAllClose(g.jacobian(y, x, parallel_iterations=2),
g.jacobian(y, x, parallel_iterations=3))
@test_util.run_in_graph_and_eager_modes
def test_nested_jacobian(self):
if context.executing_eagerly():
# TODO(agarwal): b/128842926
self.skipTest('Conversion of function calls not implemented yet.')
x = array_ops.ones((10, 2))
with backprop.GradientTape(persistent=False) as g:
g.watch(x)
with backprop.GradientTape(persistent=False) as gg:
gg.watch(x)
y = math_ops.reduce_sum(math_ops.square(x))
dy_x = gg.jacobian(y, x)
dy_xx = g.batch_jacobian(dy_x, x)
dy_xx_answer = [[[2., 0], [0, 2.]]] * 10
self.assertAllClose(dy_xx_answer, self.evaluate(dy_xx))
@test_util.run_in_graph_and_eager_modes
def test_indexed_slices(self):
with backprop.GradientTape(persistent=True) as g:
inp = random_ops.random_uniform([3, 2])
g.watch(inp)
output = nn.embedding_lookup(inp, [0, 2])
self.assertAllClose(
g.jacobian(output, inp, experimental_use_pfor=True),
g.jacobian(output, inp, experimental_use_pfor=False))
@test_util.run_all_in_graph_and_eager_modes
class BatchJacobianTest(test.TestCase, parameterized.TestCase):
def _batch_jacobian(self, experimental_use_pfor):
persistent = context.executing_eagerly and not experimental_use_pfor
with backprop.GradientTape(persistent=persistent) as g:
x = constant_op.constant([[1., 2.], [3., 4.]])
y = constant_op.constant([[3., 4.], [5., 6.]])
g.watch(x)
z = x * x * y
batch_jacobian = g.batch_jacobian(
z, x, experimental_use_pfor=experimental_use_pfor)
answer = array_ops.stack([array_ops.diag(2 * x[0] * y[0]),
array_ops.diag(2 * x[1] * y[1])])
return batch_jacobian, answer
def testPfor(self):
batch_jacobian, answer = self._batch_jacobian(experimental_use_pfor=True)
self.assertAllEqual(answer, batch_jacobian)
def testWhileLoop(self):
batch_jacobian, answer = self._batch_jacobian(experimental_use_pfor=False)
self.assertAllEqual(answer, batch_jacobian)
def testPforDefun(self):
@function.defun
def _f():
return self._batch_jacobian(experimental_use_pfor=True)
batch_jacobian, answer = _f()
self.assertAllEqual(answer, batch_jacobian)
def testWhileLoopDefun(self):
@function.defun
def _f():
return self._batch_jacobian(experimental_use_pfor=False)
batch_jacobian, answer = _f()
self.assertAllEqual(answer, batch_jacobian)
def testPersistentTape(self):
if not context.executing_eagerly():
return
with backprop.GradientTape() as g:
x = constant_op.constant([[1.0, 2.0]])
g.watch(x)
y = x * x
with self.assertRaisesRegexp(RuntimeError, 'persistent'):
g.batch_jacobian(y, x, experimental_use_pfor=False)
def testBadShape(self):
x = random_ops.random_uniform([2, 3])
with backprop.GradientTape() as g:
y = array_ops.concat([x, x], axis=0)
with self.assertRaisesRegexp(ValueError, 'Need first dimension'):
g.batch_jacobian(y, x)
def testBadInputRank(self):
x = random_ops.random_uniform([2])
with backprop.GradientTape() as g:
y = random_ops.random_uniform([2, 2])
with self.assertRaisesRegexp(ValueError, 'must have rank at least 2'):
g.batch_jacobian(y, x)
def testBadOutputRank(self):
x = random_ops.random_uniform([2, 2])
with backprop.GradientTape() as g:
y = random_ops.random_uniform([2])
with self.assertRaisesRegexp(ValueError, 'must have rank at least 2'):
g.batch_jacobian(y, x)
def testPforException(self):
var = variables.Variable([1.])
@custom_gradient.custom_gradient
def op(x):
def grad(_):
# Note that we perform a stateful operation here that will not be
# compatible with parallel for construct.
with ops.control_dependencies(
[var.assign(random_ops.random_uniform([1]))]):
return constant_op.constant(1.)
return x, grad
with backprop.GradientTape() as g:
x = constant_op.constant([[1.], [2.]])
g.watch(x)
y = op(x)
with self.assertRaisesRegexp(ValueError, 'No converter'):
g.batch_jacobian(y, x, experimental_use_pfor=True)
def test_parallel_iterations(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant([[1., 2], [3, 4]])
g.watch(x)
w = constant_op.constant([[1., 2, 3, 4], [5, 6, 7, 8]])
y = math_ops.matmul(x, w)
self.assertAllClose(g.batch_jacobian(y, x, parallel_iterations=2),
g.batch_jacobian(y, x, parallel_iterations=3))
@parameterized.parameters(
(True, True),
(True, False),
(False, True),
(False, False))
def test_degenerate_shape(self, use_function, use_pfor):
def f(x):
with backprop.GradientTape(persistent=True) as tape:
tape.watch(x)
y = x**2
return tape.batch_jacobian(y, x, experimental_use_pfor=use_pfor)
if use_function:
f = def_function.function(f)
self.assertAllEqual([1, 0, 0], array_ops.shape(f(array_ops.zeros([1, 0]))))
class AggregateIndexedSlicesGradientsTest(test_util.TensorFlowTestCase):
def _assert_indexed_slices_equal(self, left, right):
self.assertAllEqual(
self.evaluate(ops.convert_to_tensor(left)),
self.evaluate(ops.convert_to_tensor(right)))
def testNoGradients(self):
self.assertIsNone(backprop.aggregate_indexed_slices_gradients([]))
def testOneGradient(self):
t = math_ops._as_indexed_slices(
constant_op.constant([[1., 2.], [0, 0], [3., 4.]]))
result = backprop.aggregate_indexed_slices_gradients([t])
self._assert_indexed_slices_equal(t, result)
def testMultipleGradients(self):
t0 = math_ops._as_indexed_slices(
constant_op.constant([[1., 2.], [0, 0], [3., 4.]]))
t1 = math_ops._as_indexed_slices(
constant_op.constant([[0., 0.], [5, 6], [7., 8.]]))
total = constant_op.constant([[1., 2.], [5, 6], [10., 12.]])
result = backprop.aggregate_indexed_slices_gradients([t0, t1])
self._assert_indexed_slices_equal(total, result)
def testMultipleGradientsWithNones(self):
t0 = math_ops._as_indexed_slices(
constant_op.constant([[1., 2.], [0, 0], [3., 4.]]))
t1 = math_ops._as_indexed_slices(
constant_op.constant([[0., 0.], [5, 6], [7., 8.]]))
t3 = None
total = constant_op.constant([[1., 2.], [5, 6], [10., 12.]])
result = backprop.aggregate_indexed_slices_gradients([t0, t1, t3])
self._assert_indexed_slices_equal(total, result)
def testMixedTensorAndIndexedSlices(self):
t0 = math_ops._as_indexed_slices(
constant_op.constant([[1., 2.], [0, 0], [3., 4.]]))
t1 = constant_op.constant([[0., 0.], [5, 6], [7., 8.]])
total = constant_op.constant([[1., 2.], [5, 6], [10., 12.]])
result = backprop.aggregate_indexed_slices_gradients([t0, t1])
self._assert_indexed_slices_equal(total, result)
if __name__ == '__main__':
test.main()
| 32.554951
| 80
| 0.658032
|
42b15e70023493766499d81a21d116751cb30103
| 580
|
py
|
Python
|
shoppingmall/shoppingapp/migrations/0009_auto_20220306_0557.py
|
roygoswamisuvankar/shopping-mall
|
8fdc194699e1610cf53210b84208979c27efe7c3
|
[
"MIT"
] | null | null | null |
shoppingmall/shoppingapp/migrations/0009_auto_20220306_0557.py
|
roygoswamisuvankar/shopping-mall
|
8fdc194699e1610cf53210b84208979c27efe7c3
|
[
"MIT"
] | null | null | null |
shoppingmall/shoppingapp/migrations/0009_auto_20220306_0557.py
|
roygoswamisuvankar/shopping-mall
|
8fdc194699e1610cf53210b84208979c27efe7c3
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.9 on 2022-03-06 05:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shoppingapp', '0008_alter_bank_ac_emp_phone'),
]
operations = [
migrations.AlterField(
model_name='bank_ac',
name='ac_number',
field=models.CharField(default='', max_length=50),
),
migrations.AlterField(
model_name='bank_ac',
name='emp_phone',
field=models.CharField(default='', max_length=20),
),
]
| 24.166667
| 62
| 0.586207
|
8dabbb08edfbd685c98ee7f6be687ef32b450e89
| 391
|
py
|
Python
|
Build_1/asgi.py
|
AbhijithGanesh/car-data-analyzer
|
8b60503c2fb060d0afa63107bf8defe4b48a5587
|
[
"BSD-3-Clause"
] | null | null | null |
Build_1/asgi.py
|
AbhijithGanesh/car-data-analyzer
|
8b60503c2fb060d0afa63107bf8defe4b48a5587
|
[
"BSD-3-Clause"
] | null | null | null |
Build_1/asgi.py
|
AbhijithGanesh/car-data-analyzer
|
8b60503c2fb060d0afa63107bf8defe4b48a5587
|
[
"BSD-3-Clause"
] | null | null | null |
"""
ASGI config for Build_1 project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Build_1.settings')
application = get_asgi_application()
| 23
| 78
| 0.785166
|
6742461897bb9396c0d1f05898ff58fc0d6177b9
| 6,644
|
py
|
Python
|
sentiment.py
|
IJPaul/song-sentiment-site
|
70fb43be7c73ba7ada8e49768d10ebf161244c65
|
[
"MIT"
] | null | null | null |
sentiment.py
|
IJPaul/song-sentiment-site
|
70fb43be7c73ba7ada8e49768d10ebf161244c65
|
[
"MIT"
] | null | null | null |
sentiment.py
|
IJPaul/song-sentiment-site
|
70fb43be7c73ba7ada8e49768d10ebf161244c65
|
[
"MIT"
] | null | null | null |
import basic_scraping
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
import re
import googlesearch
"""
Returns an array containing lyrics of a song line by line
Parameter songURL: The Genius URL
Precondition: A valid song lyrics URL (string)
"""
def getLyrics(songURL):
lyricsArr = []
lyrics = basic_scraping.getPageElement(songURL, 'div', 'lyrics').getText()
for line in lyrics.split('\n'):
if len(line) >= 1 and not (line[0] == '[' and line[len(line)-1] == ']'):
lyricsArr.append(line)
return lyricsArr
"""
Returns an array of the top 100 artists and songs of the given year
Parameter billboardURL: The billboard URL
Precondition: A valid billboard URL from billboardtop100of.com (string)
"""
def getTopSongs(billboardURL):
songs = []
rows = basic_scraping.getPageElements(billboardURL, 'td')
i = 0
while i < len(rows)-2:
artist = getPrimaryArtist(rows[i+1].getText())
artist = re.sub('\n', '', artist)
song = getPrimarySongName(rows[i+2].getText())
song = re.sub('\n', '', song)
song = song.replace("LYRICS", "")
artistAndSong = (artist, song)
songs.append(artistAndSong)
i += 3
return songs
"""
Returns true if the generated Genius URL is in fact a valid Genius. Returns false otherwise
Parameter songURL: The URL
Precondition: songURL is a string
"""
def isValidGeniusUrl(songURL):
try:
lyrics = basic_scraping.getPageElement(songURL, 'div', 'lyrics').getText()
return True
except:
return False
"""
Returns the song's primary artist(s) by removing features
ex: 'Rihanna feat. Drake' becomes 'Rihanna'
ex: 'Major Lazer and DJ Snake feat. MO' becomes 'Major Lazer and DJ Snake'
Parameter artist: The name of the artist(s)
Preconditon: artist is a string
"""
def getPrimaryArtist(artist):
feature = artist.find('feat.')
if feature == -1:
return artist
else:
newArtist = artist[:feature].strip()
return newArtist
"""
Returns the song's primary name i.e. if the song is
ex: 'Habits (Stay High)' becomes 'Habits'
Parameter song: The name of the song
Preconditon: song is a string
"""
def getPrimarySongName(song):
parenthesis = song.find('(')
if parenthesis == -1:
return song
else:
newTitle = song[:parenthesis].strip()
return newTitle
"""
Returns a billboardtop100of URL for the given year
Parameter year: The year
Precondition: year is an int 1940 <= year <= current year
"""
def getBillboardUrl(year):
url = "http://billboardtop100of.com/%s-2/" % year
return url
"""
Returns a VADER sentiment dictionary for a sentence
Parameter text: The text to be analyzed for sentiment
Precondition: text is a string of roughly sentence length
"""
def getSentiment(text):
analyzer = SentimentIntensityAnalyzer()
vs = analyzer.polarity_scores(text)
return vs
"""
Returns a VADER sentiment dictionary for an entire song if the song has lyrics
otherwise, returns None
Parameter songURL: The Genius URL
Precondition: A valid song lyrics URL (string)
"""
def getSongSentiment(songURL):
lyricsArr = getLyrics(songURL)
sentiment = {'neg': 0, 'neu': 0, 'pos': 0, 'compound' : 0}
count = 0
for line in lyricsArr:
vs = getSentiment(line)
sentiment['neg'] += vs['neg']
sentiment['neu'] += vs['neu']
sentiment['pos'] += vs['pos']
sentiment['compound'] += vs['compound']
count += 1
for key in sentiment:
if count != 0:
sentiment[key] = '{0:.3g}'.format(sentiment[key] / count)
else:
sentiment[key] = 0
return sentiment
"""
Returns whether song is 'postive', 'neutral', or 'negative'
based on the songs compound VADER score
Parameter songURL: The Genius URL
Precondition: A valid song lyrics URL (string)
"""
def getSongMood(songURL):
overallSentiment = float(getSongSentiment(songURL)['compound'])
print(overallSentiment)
if overallSentiment >= 0.05:
return 'positive'
elif overallSentiment > -0.05:
return 'neutral'
else:
return 'negative'
"""
Returns the Genius URL of a song from the artist name and song name if one exists
otherwise, returns None
Parameters artist, song: Name of artist(s), name of song
Preconditions: artist and song are both strings
"""
def getGeniusUrl(artist, song):
# Attempt to get the url by doing regex expression replacement
baseUrl = "https://Genius.com/"
artistName = artist.replace(' ', '-')
songTitle = song.replace(' ', '-')
artistName = re.sub('[\$"\'()!%#?]', "", artistName)
songTitle = re.sub('[\$"\'()!%#,?]', "", songTitle)
geniusUrl = (baseUrl + artistName + '-' + songTitle + '-lyrics').replace(chr(8217), '')
print(geniusUrl)
if not isValidGeniusUrl(geniusUrl):
geniusUrl = getGeniusUrlFromGoogle(artist, song)
return geniusUrl
"""
Returns the top Genius URLs from a Google Search of the invalid Genius URL.
This function is used when artists and songs have names such that performing regular expression operations
on the artist name and song name and inserting them into the base Genius URL is not enough
to generate the valid Genius song lyrics URL.
Since the URL is close to the valid one, a google search of the invalid one will return the correct one within
the first few results.
Parameters artist, song: Name of artist(s), name of song
Preconditions: artist and song are both strings
"""
def getGeniusUrlFromGoogle(artist, song):
searchTerms = 'genius lyrics ' + artist + ' ' + song
urls = googlesearch.scrapeGoogle(searchTerms, 10)
geniusUrl = None
for url in urls:
if isValidGeniusUrl(url):
geniusUrl = url
break
return geniusUrl
"""
Parameters artist, song: Name of artist(s), name of song
Preconditions: artist and song are both strings
"""
def getYearSentiment(year):
billboardUrl = getBillboardUrl(year)
# a tuple in the form (artist, song)
topHundred = getTopSongs(billboardUrl)
moodDict = {'neg' : 0, 'pos' : 0, 'neu' : 0}
for artistSong in topHundred:
artist = artistSong[0].split('feat.')[0].strip()
song = artistSong[1]
url = getGeniusUrl(artist, song)
mood = getSongMood(url)
if mood == 'negative':
moodDict['neg'] += 1
elif mood == 'positive':
moodDict['pos'] += 1
else:
moodDict['neu'] += 1
return moodDict
| 29.39823
| 110
| 0.653371
|
0977c379e85f3675d41b4af2a35b6fa7c2564005
| 2,009
|
py
|
Python
|
litex_boards/platforms/sipeed_tang_nano.py
|
chmousset/litex-boards
|
c081177d77f37a4ea6cff150d42a69bd6f0abbc2
|
[
"BSD-2-Clause"
] | null | null | null |
litex_boards/platforms/sipeed_tang_nano.py
|
chmousset/litex-boards
|
c081177d77f37a4ea6cff150d42a69bd6f0abbc2
|
[
"BSD-2-Clause"
] | null | null | null |
litex_boards/platforms/sipeed_tang_nano.py
|
chmousset/litex-boards
|
c081177d77f37a4ea6cff150d42a69bd6f0abbc2
|
[
"BSD-2-Clause"
] | null | null | null |
#
# This file is part of LiteX-Boards.
#
# Copyright (c) 2021 Florent Kermarrec <florent@enjoy-digital.fr>
# SPDX-License-Identifier: BSD-2-Clause
# Board diagram/pinout:
# https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png
# http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf
from migen import *
from litex.build.generic_platform import *
from litex.build.gowin.platform import GowinPlatform
from litex.build.openfpgaloader import OpenFPGALoader
# IOs ----------------------------------------------------------------------------------------------
_io = [
# Clk / Rst
("clk24", 0, Pins("35"), IOStandard("LVCMOS33")),
# Leds
("user_led", 0, Pins("16"), IOStandard("LVCMOS33")),
("user_led", 1, Pins("17"), IOStandard("LVCMOS33")),
("user_led", 2, Pins("18"), IOStandard("LVCMOS33")),
# Buttons.
("user_btn", 0, Pins("15"), IOStandard("LVCMOS33")),
("user_btn", 1, Pins("14"), IOStandard("LVCMOS33")),
# Serial
("serial", 0,
Subsignal("tx", Pins("8")),
Subsignal("rx", Pins("9")),
IOStandard("LVCMOS33")
),
]
# Connectors ---------------------------------------------------------------------------------------
_connectors = []
# Platform -----------------------------------------------------------------------------------------
class Platform(GowinPlatform):
default_clk_name = "clk24"
default_clk_period = 1e9/24e6
def __init__(self, toolchain="gowin"):
GowinPlatform.__init__(self, "GW1N-LV1QN48C6/I5", _io, _connectors, toolchain=toolchain, devicename="GW1N-1")
self.toolchain.options["use_done_as_gpio"] = 1
self.toolchain.options["use_reconfign_as_gpio"] = 1
def create_programmer(self):
return OpenFPGALoader("tangnano")
def do_finalize(self, fragment):
GowinPlatform.do_finalize(self, fragment)
self.add_period_constraint(self.lookup_request("clk24", loose=True), 1e9/24e6)
| 32.934426
| 117
| 0.578895
|
662617d895fc10a9e86b3366f604a96459c527dd
| 1,399
|
py
|
Python
|
2020_April_Leetcode_30_days_challenge/Week_1_Single Number/by_xor_in_place.py
|
coderMaruf/leetcode-1
|
20ffe26e43999e44c8acf9800acb371a49bb5853
|
[
"MIT"
] | 32
|
2020-01-05T13:37:16.000Z
|
2022-03-26T07:27:09.000Z
|
2020_April_Leetcode_30_days_challenge/Week_1_Single Number/by_xor_in_place.py
|
coderMaruf/leetcode-1
|
20ffe26e43999e44c8acf9800acb371a49bb5853
|
[
"MIT"
] | null | null | null |
2020_April_Leetcode_30_days_challenge/Week_1_Single Number/by_xor_in_place.py
|
coderMaruf/leetcode-1
|
20ffe26e43999e44c8acf9800acb371a49bb5853
|
[
"MIT"
] | 8
|
2020-06-18T16:17:27.000Z
|
2022-03-15T23:58:18.000Z
|
'''
Description:
Given a non-empty array of integers, every element appears twice except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
Example 1:
Input: [2,2,1]
Output: 1
Example 2:
Input: [4,1,2,1,2]
Output: 4
'''
from typing import List
class Solution:
def singleNumber(self, nums: List[int]) -> int:
# Utilize the property of XOR, A ⊕ A = 0, to cancel those elements which appeared twice.
for idx in range( 1, len(nums) ):
nums[0] ^= nums[idx]
return nums[0]
# n : the length of input list, nums.
## Time Complexity: O( n )
#
# The overhead in time is the for loop, which is of O( n ).
## Space Complexity: O( 1 )
#
# The overhead in space is the storage for loop index, which are of O( 1 )
from collections import namedtuple
TestEntry = namedtuple('TestEntry', 'sequence')
def test_bench():
test_data = [
TestEntry( sequence = [2,2,1] ),
TestEntry( sequence = [4,1,2,1,2] ),
TestEntry( sequence = [1,1,3,2,3,5,4,4,5] ),
]
# expected output:
'''
1
4
2
'''
for t in test_data:
print( Solution().singleNumber( nums = t.sequence) )
return
if __name__ == '__main__':
test_bench()
| 17.4875
| 106
| 0.587563
|
410d77d610b3e32840308bdac1aac7114f935e55
| 3,342
|
py
|
Python
|
libcst/codemod/visitors/tests/test_gather_exports.py
|
hoduchieu01/LibCST
|
23af2cb45e1f7647f1604404df5f6e5e87b2e42b
|
[
"Apache-2.0"
] | 1
|
2021-01-18T09:50:29.000Z
|
2021-01-18T09:50:29.000Z
|
libcst/codemod/visitors/tests/test_gather_exports.py
|
hoduchieu01/LibCST
|
23af2cb45e1f7647f1604404df5f6e5e87b2e42b
|
[
"Apache-2.0"
] | null | null | null |
libcst/codemod/visitors/tests/test_gather_exports.py
|
hoduchieu01/LibCST
|
23af2cb45e1f7647f1604404df5f6e5e87b2e42b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
from libcst import parse_module
from libcst.codemod import CodemodContext, CodemodTest
from libcst.codemod.visitors import GatherExportsVisitor
from libcst.testing.utils import UnitTest
class TestGatherExportsVisitor(UnitTest):
def gather_exports(self, code: str) -> GatherExportsVisitor:
transform_instance = GatherExportsVisitor(CodemodContext())
input_tree = parse_module(CodemodTest.make_fixture_data(code))
input_tree.visit(transform_instance)
return transform_instance
def test_gather_noop(self) -> None:
code = """
from foo import bar
from typing import List
bar(["foo", "bar"])
list_of_str = ["foo", "bar", "baz"]
another: List[str] = ["foobar", "foobarbaz"]
"""
gatherer = self.gather_exports(code)
self.assertEqual(gatherer.explicit_exported_objects, set())
def test_gather_exports_simple(self) -> None:
code = """
from foo import bar
from biz import baz
__all__ = ["bar", "baz"]
"""
gatherer = self.gather_exports(code)
self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"})
def test_gather_exports_simple_set(self) -> None:
code = """
from foo import bar
from biz import baz
__all__ = {"bar", "baz"}
"""
gatherer = self.gather_exports(code)
self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"})
def test_gather_exports_simple_tuple(self) -> None:
code = """
from foo import bar
from biz import baz
__all__ = ("bar", "baz")
"""
gatherer = self.gather_exports(code)
self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"})
def test_gather_exports_simple_annotated(self) -> None:
code = """
from foo import bar
from biz import baz
from typing import List
__all__: List[str] = ["bar", "baz"]
"""
gatherer = self.gather_exports(code)
self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"})
def test_gather_exports_ignore_invalid_1(self) -> None:
code = """
from foo import bar
from biz import baz
__all__ = [bar, baz]
"""
gatherer = self.gather_exports(code)
self.assertEqual(gatherer.explicit_exported_objects, set())
def test_gather_exports_ignore_invalid_2(self) -> None:
code = """
from foo import bar
from biz import baz
__all__ = ["bar", "baz", ["biz"]]
"""
gatherer = self.gather_exports(code)
self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"})
def test_gather_exports_ignore_invalid_3(self) -> None:
code = """
from foo import bar
from biz import baz
__all__ = ["bar", "baz", "foo""bar"]
"""
gatherer = self.gather_exports(code)
self.assertEqual(gatherer.explicit_exported_objects, {"bar", "baz"})
| 29.575221
| 76
| 0.605625
|
86c1aca3554984ca27fed42a13f15d55d2972ffe
| 1,010
|
py
|
Python
|
magicked_admin/utils/text.py
|
Nightroad94/kf2-magicked-admin
|
0c393a4a134f7ba51f0ed3ad9edcea19c3467fc1
|
[
"MIT"
] | 16
|
2018-03-11T05:28:17.000Z
|
2019-07-12T02:59:05.000Z
|
magicked_admin/utils/text.py
|
Nightroad94/kf2-magicked-admin
|
0c393a4a134f7ba51f0ed3ad9edcea19c3467fc1
|
[
"MIT"
] | 77
|
2019-09-17T22:45:04.000Z
|
2021-11-22T20:03:59.000Z
|
magicked_admin/utils/text.py
|
Nightroad94/kf2-magicked-admin
|
0c393a4a134f7ba51f0ed3ad9edcea19c3467fc1
|
[
"MIT"
] | 10
|
2018-03-22T22:43:26.000Z
|
2019-07-22T12:31:37.000Z
|
import math
CHAT_LINE_HEIGHT = 8
def millify(n):
if not n:
return '0'
millnames = ['', 'K', 'M', 'B', 'T']
n = float(n)
millidx = max(0,
min(len(millnames) - 1,
int(math.floor(
0 if n == 0 else math.log10(abs(n)) / 3))))
return '{:.0f}{}'.format(n / 10 ** (3 * millidx), millnames[millidx])
def trim_string(input_str, length):
if not input_str:
return ""
return (input_str[:length - 2] + '..') if len(input_str) > length \
else input_str
def str_to_bool(s):
if s in ['True', 'true', '1']:
return True
elif s in ['False', 'false', '0']:
return False
else:
raise ValueError
def pad_output(message):
if not message:
return None
message_height = len(message.split('\n'))
padding_lines = CHAT_LINE_HEIGHT - message_height
if padding_lines > 0:
return '\n' * padding_lines + message
else:
return message
| 21.041667
| 73
| 0.534653
|
01a488b62e12bf57c0ca24ea340869d7aadfc519
| 405
|
py
|
Python
|
vh-vsftpd/__init__.py
|
rEinve/ajenti-v
|
7f3ef24198f91d7b7be42bbf0df36414d09172a4
|
[
"MIT"
] | 150
|
2015-01-08T22:49:25.000Z
|
2021-11-13T03:15:50.000Z
|
vh-vsftpd/__init__.py
|
rEinve/ajenti-v
|
7f3ef24198f91d7b7be42bbf0df36414d09172a4
|
[
"MIT"
] | 148
|
2015-01-07T03:53:16.000Z
|
2021-10-03T16:13:57.000Z
|
vh-vsftpd/__init__.py
|
rEinve/ajenti-v
|
7f3ef24198f91d7b7be42bbf0df36414d09172a4
|
[
"MIT"
] | 91
|
2015-01-08T22:49:15.000Z
|
2022-03-17T12:04:29.000Z
|
import ajenti
from ajenti.api import *
from ajenti.plugins import *
info = PluginInfo(
title='Ajenti VH - VSFTPD Support',
icon='globe',
dependencies=[
PluginDependency('vh'),
PluginDependency('services'),
BinaryDependency('vsftpd'),
],
)
def init():
from ajenti.plugins.vh import destroyed_configs
destroyed_configs.append('vsftpd')
import vsftpd
| 18.409091
| 51
| 0.664198
|
60b18005932362651742e91fa4bc2a4d8d4fa6e2
| 1,575
|
py
|
Python
|
seven/main.py
|
yumenetwork/isn-tkinter
|
4811e061e86b072bc4c494e43aaf829d4b90281b
|
[
"MIT"
] | null | null | null |
seven/main.py
|
yumenetwork/isn-tkinter
|
4811e061e86b072bc4c494e43aaf829d4b90281b
|
[
"MIT"
] | null | null | null |
seven/main.py
|
yumenetwork/isn-tkinter
|
4811e061e86b072bc4c494e43aaf829d4b90281b
|
[
"MIT"
] | null | null | null |
from tkinter import *
myWindow = Tk()
x = 0
y = 0
l = 0
c = 0
# Fonctions
def create_grid():
grille.delete("all")
for i in range(5, 305, 20):
grille.create_line([(i, 5), (i, 305)])
for i in range(5, 305, 20):
grille.create_line([(5, i), (305, i)])
grille.create_line([(5, 5), (5, 305)])
grille.create_line([(305, 5), (305, 305)])
grille.create_line([(5, 5), (305, 5)])
grille.create_line([(5, 305), (305, 305)])
def get_coord(event):
global x, y
x = event.x
y = event.y
result_labelx.config(text=f"abscisse : {x}")
result_labely.config(text=f"ordonnée : {y}")
ligne = int(y / 20) + 1
column = int(x / 20) + 1
result_colonne.config(text=f"colonne : {column}")
result_ligne.config(text=f"ligne : {ligne}")
create_grid()
grille.create_line([(x - 5, y), (x + 5, y)])
grille.create_line([(x, y - 5), (x, y + 5)])
# Widgets
result_labelx = Label(myWindow, text=f"abscisse : {x}")
result_labely = Label(myWindow, text=f"ordonnée : {x}")
result_ligne = Label(myWindow, text=f"ligne : {l}")
result_colonne = Label(myWindow, text=f"colonne : {c}")
boutonGrid = Button(myWindow, text="Tracer la grille", command=create_grid)
grille = Canvas(myWindow, width=310, height=310)
# Affichage des Widgets
boutonGrid.grid(row=0, column=1)
result_labelx.grid(row=1, column=1)
result_labely.grid(row=2, column=1)
result_ligne.grid(row=3, column=1)
result_colonne.grid(row=4, column=1)
grille.grid(row=0, column=0, rowspan=5)
# Main
myWindow.bind("<Button 1>", get_coord)
myWindow.mainloop()
| 25.819672
| 75
| 0.63746
|
9f92d0d2a8d7db55c4465aa4210f0a96882bce42
| 6,438
|
py
|
Python
|
NLP/learn/utils.py
|
Sejik/NLP
|
3799457fd15e3e7bb86820126e40d9d468906c92
|
[
"Apache-2.0"
] | null | null | null |
NLP/learn/utils.py
|
Sejik/NLP
|
3799457fd15e3e7bb86820126e40d9d468906c92
|
[
"Apache-2.0"
] | null | null | null |
NLP/learn/utils.py
|
Sejik/NLP
|
3799457fd15e3e7bb86820126e40d9d468906c92
|
[
"Apache-2.0"
] | null | null | null |
from collections import OrderedDict
import json
import logging
from pathlib import Path
import os
import re
import torch
from torch.nn import DataParallel
import requests
logger = logging.getLogger(__name__)
""" Train Counter """
class TrainCounter:
global_step = 0
epoch = 0
def __init__(self, display_unit="epoch"):
if type(display_unit) == int:
display_unit = f"every_{display_unit}_global_step"
self.display_unit = display_unit
def get_display(self):
if self.display_unit == "epoch":
return self.epoch
else:
return self.global_step
""" Save and Load checkpoint """
def load_model_checkpoint(model, checkpoint):
model.load_state_dict(checkpoint["weights"])
model.config = checkpoint["config"]
model.metrics = checkpoint["metrics"]
model.init_params = checkpoint["init_params"]
model.predict_helper = checkpoint["predict_helper"]
model.train_counter = checkpoint["train_counter"]
logger.info(f"Load model checkpoints...!")
return model
def load_optimizer_checkpoint(optimizer, checkpoint):
optimizer.load_state_dict(checkpoint["optimizer"])
logger.info(f"Load optimizer checkpoints...!")
return optimizer
def save_checkpoint(path, model, optimizer, max_to_keep=10):
path = Path(path)
checkpoint_dir = path / "checkpoint"
checkpoint_dir.mkdir(exist_ok=True)
# Remove old checkpoints
sorted_path = get_sorted_path(checkpoint_dir)
if len(sorted_path) > max_to_keep:
remove_train_counts = list(sorted_path.keys())[: -(max_to_keep - 1)]
for train_count in remove_train_counts:
optimizer_path = sorted_path[train_count].get("optimizer", None)
if optimizer_path:
os.remove(optimizer_path)
model_path = sorted_path[train_count].get("model", None)
if model_path:
os.remove(model_path)
train_counter = model.train_counter
optimizer_path = checkpoint_dir / f"optimizer_{train_counter.get_display()}.pkl"
torch.save({"optimizer": optimizer.state_dict()}, optimizer_path)
model_path = checkpoint_dir / f"model_{train_counter.get_display()}.pkl"
torch.save(
{
"config": model.config,
"init_params": model.init_params,
"predict_helper": model.predict_helper,
"metrics": model.metrics,
"train_counter": model.train_counter,
"vocab_texts": {k: v.to_text() for k, v in model.vocabs.items()},
"weights": model.state_dict(),
},
model_path,
)
# Write Vocab as text file (Only once)
vocab_dir = path / "vocab"
vocab_dir.mkdir(exist_ok=True)
for token_name, vocab in model.vocabs.items():
vocab_path = vocab_dir / f"{token_name}.txt"
if not vocab_path.exists():
vocab.dump(vocab_path)
logger.info(f"Save {train_counter.global_step} global_step checkpoints...!")
def get_sorted_path(checkpoint_dir, both_exist=False):
paths = []
for root, dirs, files in os.walk(checkpoint_dir):
for f_name in files:
if "model" in f_name or "optimizer" in f_name:
paths.append(Path(root) / f_name)
path_with_train_count = {}
for path in paths:
train_count = re.findall("\d+", path.name)[0]
train_count = int(train_count)
if train_count not in path_with_train_count:
path_with_train_count[train_count] = {}
if "model" in path.name:
path_with_train_count[train_count]["model"] = path
if "optimizer" in path.name:
path_with_train_count[train_count]["optimizer"] = path
if both_exist:
remove_keys = []
for key, checkpoint in path_with_train_count.items():
if not ("model" in checkpoint and "optimizer" in checkpoint):
remove_keys.append(key)
for key in remove_keys:
del path_with_train_count[key]
return OrderedDict(sorted(path_with_train_count.items()))
""" NSML """
def bind_nsml(model, **kwargs): # pragma: no cover
if type(model) == DataParallel:
model = model.module
CHECKPOINT_FNAME = "checkpoint.bin"
def infer(raw_data, **kwargs):
print("raw_data:", raw_data)
def load(dir_path, *args):
checkpoint_path = os.path.join(dir_path, CHECKPOINT_FNAME)
checkpoint = torch.load(checkpoint_path)
model.load_state_dict(checkpoint["weights"])
model.config = checkpoint["config"]
model.metrics = checkpoint["metrics"]
model.init_params = checkpoint["init_params"],
model.predict_helper = checkpoint["predict_helper"],
model.train_counter = checkpoint["train_counter"]
if "optimizer" in kwargs:
kwargs["optimizer"].load_state_dict(checkpoint["optimizer"])
logger.info(f"Load checkpoints...! {checkpoint_path}")
def save(dir_path, *args):
# save the model with 'checkpoint' dictionary.
checkpoint_path = os.path.join(dir_path, CHECKPOINT_FNAME)
checkpoint = {
"config": model.config,
"init_params": model.init_params,
"predict_helper": model.predict_helper,
"metrics": model.metrics,
"train_counter": model.train_counter,
"vocab_texts": {k: v.to_text() for k, v in model.vocabs.items()},
"weights": model.state_dict(),
}
if "optimizer" in kwargs:
checkpoint["optimizer"] = (kwargs["optimizer"].state_dict(),)
torch.save(checkpoint, checkpoint_path)
train_counter = model.train_counter
logger.info(f"Save {train_counter.global_step} global_step checkpoints...! {checkpoint_path}")
# function in function is just used to divide the namespace.
""" Notification """
def get_session_name():
session_name = "local"
return session_name
def send_message_to_slack(webhook_url, title=None, message=None): # pragma: no cover
if message is None:
data = {"text": f"{get_session_name()} session is exited."}
else:
data = {"attachments": [{"title": title, "text": message, "color": "#438C56"}]}
try:
if webhook_url == "":
print(data["text"])
else:
requests.post(webhook_url, data=json.dumps(data))
except Exception as e:
print(str(e))
| 30.511848
| 102
| 0.644765
|
9243714bdc98d2f98ce216309c584d6a44a35bf0
| 10,941
|
py
|
Python
|
jenkins_jobs/local_yaml.py
|
bvberkum/jenkins-job-builder
|
bb105b69126bea59518204a05bc9dfc94bb23617
|
[
"Apache-2.0"
] | null | null | null |
jenkins_jobs/local_yaml.py
|
bvberkum/jenkins-job-builder
|
bb105b69126bea59518204a05bc9dfc94bb23617
|
[
"Apache-2.0"
] | null | null | null |
jenkins_jobs/local_yaml.py
|
bvberkum/jenkins-job-builder
|
bb105b69126bea59518204a05bc9dfc94bb23617
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright (C) 2013 Hewlett-Packard.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Provides local yaml parsing classes and extend yaml module
"""Custom application specific yamls tags are supported to provide
enhancements when reading yaml configuration.
These allow inclusion of arbitrary files as a method of having blocks of data
managed separately to the yaml job configurations. A specific usage of this is
inlining scripts contained in separate files, although such tags may also be
used to simplify usage of macros or job templates.
The tag ``!include:`` will treat the following string as file which should be
parsed as yaml configuration data.
Example:
.. literalinclude:: /../../tests/localyaml/fixtures/include001.yaml
contents of include001.yaml.inc:
.. literalinclude:: /../../tests/yamlparser/fixtures/include001.yaml.inc
The tag ``!include-raw:`` will treat the given string or list of strings as
filenames to be opened as one or more data blob, which should be read into
the calling yaml construct without any further parsing. Any data in a file
included through this tag, will be treated as string data.
Examples:
.. literalinclude:: /../../tests/localyaml/fixtures/include-raw001.yaml
contents of include-raw001-hello-world.sh:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw001-hello-world.sh
contents of include-raw001-vars.sh:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw001-vars.sh
using a list of files:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw-multi001.yaml
The tag ``!include-raw-escape:`` treats the given string or list of strings as
filenames to be opened as one or more data blobs, which should be escaped
before being read in as string data. This allows job-templates to use this tag
to include scripts from files without needing to escape braces in the original
file.
Examples:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw-escaped001.yaml
contents of include-raw001-hello-world.sh:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw001-hello-world.sh
contents of include-raw001-vars.sh:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw001-vars.sh
using a list of files:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw-escaped-multi001.yaml
For all the multi file includes, the files are simply appended using a newline
character.
"""
import functools
import io
import logging
import os
import re
import yaml
from yaml.constructor import BaseConstructor
from yaml import YAMLObject
from collections import OrderedDict
logger = logging.getLogger(__name__)
class OrderedConstructor(BaseConstructor):
"""The default constructor class for PyYAML loading uses standard python
dictionaries which can have randomized ordering enabled (default in
CPython from version 3.3). The order of the XML elements being outputted
is both important for tests and for ensuring predictable generation based
on the source. This subclass overrides this behaviour to ensure that all
dict's created make use of OrderedDict to have iteration of keys to always
follow the order in which the keys were inserted/created.
"""
def construct_yaml_map(self, node):
data = OrderedDict()
yield data
value = self.construct_mapping(node)
if isinstance(node, yaml.MappingNode):
self.flatten_mapping(node)
else:
raise yaml.constructor.ConstructorError(
None, None,
'expected a mapping node, but found %s' % node.id,
node.start_mark)
mapping = OrderedDict()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=False)
try:
hash(key)
except TypeError as exc:
raise yaml.constructor.ConstructorError(
'while constructing a mapping', node.start_mark,
'found unacceptable key (%s)' % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=False)
mapping[key] = value
data.update(mapping)
class LocalAnchorLoader(yaml.Loader):
"""Subclass for yaml.Loader which keeps Alias between calls"""
anchors = {}
def __init__(self, *args, **kwargs):
super(LocalAnchorLoader, self).__init__(*args, **kwargs)
self.anchors = LocalAnchorLoader.anchors
@classmethod
def reset_anchors(cls):
cls.anchors = {}
# override the default composer to skip resetting the anchors at the
# end of the current document
def compose_document(self):
# Drop the DOCUMENT-START event.
self.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.get_event()
return node
class LocalLoader(OrderedConstructor, LocalAnchorLoader):
"""Subclass for yaml.Loader which handles storing the search_path and
escape_callback functions for use by the custom YAML objects to find files
and escape the content where required.
Constructor access a list of search paths to look under for the given
file following each tag, taking the first match found. Search path by
default will include the same directory as the yaml file and the current
working directory.
Loading::
# use the load function provided in this module
import local_yaml
data = local_yaml.load(io.open(fn, 'r', encoding='utf-8'))
# Loading by providing the alternate class to the default yaml load
from local_yaml import LocalLoader
data = yaml.load(io.open(fn, 'r', encoding='utf-8'), LocalLoader)
# Loading with a search path
from local_yaml import LocalLoader
import functools
data = yaml.load(io.open(fn, 'r', encoding='utf-8'),
functools.partial(LocalLoader, search_path=['path']))
"""
def __init__(self, *args, **kwargs):
# make sure to pop off any local settings before passing to
# the parent constructor as any unknown args may cause errors.
self.search_path = list()
if 'search_path' in kwargs:
for p in kwargs.pop('search_path'):
logger.debug("Adding '{0}' to search path for include tags"
.format(p))
self.search_path.append(os.path.normpath(p))
if 'escape_callback' in kwargs:
self.escape_callback = kwargs.pop('escape_callback')
else:
self.escape_callback = self._escape
super(LocalLoader, self).__init__(*args, **kwargs)
# constructor to preserve order of maps and ensure that the order of
# keys returned is consistent across multiple python versions
self.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
type(self).construct_yaml_map)
if hasattr(self.stream, 'name'):
self.search_path.append(os.path.normpath(
os.path.dirname(self.stream.name)))
self.search_path.append(os.path.normpath(os.path.curdir))
def _escape(self, data):
return re.sub(r'({|})', r'\1\1', data)
class BaseYAMLObject(YAMLObject):
yaml_loader = LocalLoader
yaml_dumper = yaml.Dumper
class YamlInclude(BaseYAMLObject):
yaml_tag = u'!include:'
@classmethod
def _find_file(cls, filename, search_path):
for dirname in search_path:
candidate = os.path.expanduser(os.path.join(dirname, filename))
if os.path.isfile(candidate):
logger.info("Including file '{0}' from path '{1}'"
.format(filename, dirname))
return candidate
return filename
@classmethod
def _open_file(cls, loader, scalar_node):
filename = cls._find_file(loader.construct_yaml_str(scalar_node),
loader.search_path)
try:
with io.open(filename, 'r', encoding='utf-8') as f:
return f.read()
except:
logger.error("Failed to include file using search path: '{0}'"
.format(':'.join(loader.search_path)))
raise
@classmethod
def _from_file(cls, loader, node):
data = yaml.load(cls._open_file(loader, node),
functools.partial(cls.yaml_loader,
search_path=loader.search_path))
return data
@classmethod
def from_yaml(cls, loader, node):
if isinstance(node, yaml.ScalarNode):
return cls._from_file(loader, node)
elif isinstance(node, yaml.SequenceNode):
return u'\n'.join(cls._from_file(loader, scalar_node)
for scalar_node in node.value)
else:
raise yaml.constructor.ConstructorError(
None, None, "expected either a sequence or scalar node, but "
"found %s" % node.id, node.start_mark)
class YamlIncludeRaw(YamlInclude):
yaml_tag = u'!include-raw:'
@classmethod
def _from_file(cls, loader, node):
return cls._open_file(loader, node)
class YamlIncludeRawEscape(YamlIncludeRaw):
yaml_tag = u'!include-raw-escape:'
@classmethod
def from_yaml(cls, loader, node):
return loader.escape_callback(YamlIncludeRaw.from_yaml(loader, node))
class DeprecatedTag(BaseYAMLObject):
@classmethod
def from_yaml(cls, loader, node):
logger.warning("tag '%s' is deprecated, switch to using '%s'",
cls.yaml_tag, cls._new.yaml_tag)
return cls._new.from_yaml(loader, node)
class YamlIncludeDeprecated(DeprecatedTag):
yaml_tag = u'!include'
_new = YamlInclude
class YamlIncludeRawDeprecated(DeprecatedTag):
yaml_tag = u'!include-raw'
_new = YamlIncludeRaw
class YamlIncludeRawEscapeDeprecated(DeprecatedTag):
yaml_tag = u'!include-raw-escape'
_new = YamlIncludeRawEscape
def load(stream, **kwargs):
LocalAnchorLoader.reset_anchors()
return yaml.load(stream, functools.partial(LocalLoader, **kwargs))
| 33.56135
| 78
| 0.665021
|
dad8ed5807af3fa7a19745383758d3a4fd600bca
| 55,162
|
py
|
Python
|
test/unit/test_assistant_v1.py
|
U34rAli/python-sdk
|
3ec1da7fcbbb540a221d980829a638bc94456da4
|
[
"Apache-2.0"
] | 2
|
2019-11-23T03:32:32.000Z
|
2019-11-23T03:32:36.000Z
|
test/unit/test_assistant_v1.py
|
U34rAli/python-sdk
|
3ec1da7fcbbb540a221d980829a638bc94456da4
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_assistant_v1.py
|
U34rAli/python-sdk
|
3ec1da7fcbbb540a221d980829a638bc94456da4
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
import json
import datetime
from dateutil.tz import tzutc
import responses
import ibm_watson
from ibm_watson import ApiException
from ibm_watson.assistant_v1 import Context, Counterexample, \
CounterexampleCollection, Entity, EntityCollection, Example, \
ExampleCollection, MessageInput, Intent, IntentCollection, Synonym, \
SynonymCollection, Value, ValueCollection, Workspace, WorkspaceCollection
from ibm_cloud_sdk_core.authenticators import BasicAuthenticator
platform_url = 'https://gateway.watsonplatform.net'
service_path = '/assistant/api'
base_url = '{0}{1}'.format(platform_url, service_path)
#########################
# counterexamples
#########################
@responses.activate
def test_create_counterexample():
endpoint = '/v1/workspaces/{0}/counterexamples'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"text": "I want financial advice today.",
"created": "2016-07-11T16:39:01.774Z",
"updated": "2015-12-07T18:53:59.153Z"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=201,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(version='2017-02-03', authenticator=authenticator)
service.set_service_url(base_url)
counterexample = service.create_counterexample(
workspace_id='boguswid', text='I want financial advice today.').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert counterexample == response
# Verify that response can be converted to a Counterexample
Counterexample._from_dict(counterexample)
@responses.activate
def test_rate_limit_exceeded():
endpoint = '/v1/workspaces/{0}/counterexamples'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
error_code = 429
error_msg = 'Rate limit exceeded'
responses.add(
responses.POST,
url,
body='Rate limit exceeded',
status=429,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(version='2017-02-03', authenticator=authenticator)
try:
service.create_counterexample(
workspace_id='boguswid', text='I want financial advice today.')
except ApiException as ex:
assert len(responses.calls) == 1
assert isinstance(ex, ApiException)
assert error_code == ex.code
assert error_msg in str(ex)
@responses.activate
def test_unknown_error():
endpoint = '/v1/workspaces/{0}/counterexamples'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
error_msg = 'Unknown error'
responses.add(
responses.POST,
url,
status=407,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(version='2017-02-03', authenticator=authenticator)
try:
service.create_counterexample(
workspace_id='boguswid', text='I want financial advice today.')
except ApiException as ex:
assert len(responses.calls) == 1
assert error_msg in str(ex)
@responses.activate
def test_delete_counterexample():
endpoint = '/v1/workspaces/{0}/counterexamples/{1}'.format(
'boguswid', 'I%20want%20financial%20advice%20today')
url = '{0}{1}'.format(base_url, endpoint)
response = None
responses.add(
responses.DELETE,
url,
body=response,
status=204,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
counterexample = service.delete_counterexample(
workspace_id='boguswid', text='I want financial advice today').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert counterexample is None
@responses.activate
def test_get_counterexample():
endpoint = '/v1/workspaces/{0}/counterexamples/{1}'.format(
'boguswid', 'What%20are%20you%20wearing%3F')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"text": "What are you wearing?",
"created": "2016-07-11T23:53:59.153Z",
"updated": "2016-12-07T18:53:59.153Z"
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
counterexample = service.get_counterexample(
workspace_id='boguswid', text='What are you wearing?').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert counterexample == response
# Verify that response can be converted to a Counterexample
Counterexample._from_dict(counterexample)
@responses.activate
def test_list_counterexamples():
endpoint = '/v1/workspaces/{0}/counterexamples'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"counterexamples": [{
"text": "I want financial advice today.",
"created": "2016-07-11T16:39:01.774Z",
"updated": "2015-12-07T18:53:59.153Z"
}, {
"text": "What are you wearing today",
"created": "2016-07-11T16:39:01.774Z",
"updated": "2015-12-07T18:53:59.153Z"
}],
"pagination": {
"refresh_url":
"/v1/workspaces/pizza_app-e0f3/counterexamples?version=2017-12-18&page_limit=2",
"next_url":
"/v1/workspaces/pizza_app-e0f3/counterexamples?cursor=base64=&version=2017-12-18&page_limit=2"
}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
counterexamples = service.list_counterexamples(workspace_id='boguswid').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert counterexamples == response
# Verify that response can be converted to a CounterexampleCollection
CounterexampleCollection._from_dict(counterexamples)
@responses.activate
def test_update_counterexample():
endpoint = '/v1/workspaces/{0}/counterexamples/{1}'.format(
'boguswid', 'What%20are%20you%20wearing%3F')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"text": "What are you wearing?",
"created": "2016-07-11T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
counterexample = service.update_counterexample(
workspace_id='boguswid',
text='What are you wearing?',
new_text='What are you wearing?').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert counterexample == response
# Verify that response can be converted to a Counterexample
Counterexample._from_dict(counterexample)
#########################
# entities
#########################
@responses.activate
def test_create_entity():
endpoint = '/v1/workspaces/{0}/entities'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"entity": "pizza_toppings",
"description": "Tasty pizza toppings",
"created": "2015-12-06T04:32:20.000Z",
"updated": "2015-12-07T18:53:59.153Z",
"metadata": {
"property": "value"
}
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=201,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
entity = service.create_entity(
workspace_id='boguswid',
entity='pizza_toppings',
description='Tasty pizza toppings',
metadata={"property": "value"},
values=None,
fuzzy_match=None).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert entity == response
# Verify that response can be converted to an Entity
Entity._from_dict(entity)
@responses.activate
def test_delete_entity():
endpoint = '/v1/workspaces/{0}/entities/{1}'.format('boguswid', 'pizza_toppings')
url = '{0}{1}'.format(base_url, endpoint)
response = ""
responses.add(
responses.DELETE,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
entity = service.delete_entity(workspace_id='boguswid', entity='pizza_toppings').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert entity == ""
@responses.activate
def test_get_entity():
endpoint = '/v1/workspaces/{0}/entities/{1}'.format('boguswid', 'pizza_toppings')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"entity": "pizza_toppings",
"description": "Tasty pizza toppings",
"created": "2015-12-06T04:32:20.000Z",
"updated": "2015-12-07T18:53:59.153Z",
"metadata": {
"property": "value"
}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
entity = service.get_entity(workspace_id='boguswid', entity='pizza_toppings', export=True).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert entity == response
# Verify that response can be converted to an Entity
Entity._from_dict(entity)
@responses.activate
def test_list_entities():
endpoint = '/v1/workspaces/{0}/entities'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"entities": [{
"entity": "pizza_toppings",
"description": "Tasty pizza toppings",
"created": "2015-12-06T04:32:20.000Z",
"updated": "2015-12-07T18:53:59.153Z",
"metadata": {
"property": "value"
}
}],
"pagination": {
"refresh_url":
"/v1/workspaces/pizza_app-e0f3/entities?version=2017-12-18&filter=name:pizza&include_count=true&page_limit=1",
"next_url":
"/v1/workspaces/pizza_app-e0f3/entities?cursor=base64=&version=2017-12-18&filter=name:pizza&page_limit=1",
"total":
1,
"matched":
1
}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
entities = service.list_entities(
workspace_id='boguswid',
export=True).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert entities == response
# Verify that response can be converted to an EntityCollection
EntityCollection._from_dict(entities)
@responses.activate
def test_update_entity():
endpoint = '/v1/workspaces/{0}/entities/{1}'.format('boguswid', 'pizza_toppings')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"entity": "pizza_toppings",
"description": "Tasty pizza toppings",
"created": "2015-12-06T04:32:20.000Z",
"updated": "2015-12-07T18:53:59.153Z",
"metadata": {
"property": "value"
}
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
entity = service.update_entity(
workspace_id='boguswid',
entity='pizza_toppings',
new_entity='pizza_toppings').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert entity == response
# Verify that response can be converted to an Entity
Entity._from_dict(entity)
#########################
# examples
#########################
@responses.activate
def test_create_example():
endpoint = '/v1/workspaces/{0}/intents/{1}/examples'.format(
'boguswid', 'pizza_order')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"text": "Gimme a pizza with pepperoni",
"created": "2016-07-11T16:39:01.774Z",
"updated": "2015-12-07T18:53:59.153Z"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=201,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
example = service.create_example(
workspace_id='boguswid',
intent='pizza_order',
text='Gimme a pizza with pepperoni',
mentions=[{'entity': 'xxx', 'location': [0, 1]}]).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert example == response
# Verify that response can be converted to an Example
Example._from_dict(example)
@responses.activate
def test_delete_example():
endpoint = '/v1/workspaces/{0}/intents/{1}/examples/{2}'.format(
'boguswid', 'pizza_order', 'Gimme%20a%20pizza%20with%20pepperoni')
url = '{0}{1}'.format(base_url, endpoint)
response = {}
responses.add(
responses.DELETE,
url,
body=json.dumps(response),
status=204,
content_type='')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
example = service.delete_example(
workspace_id='boguswid',
intent='pizza_order',
text='Gimme a pizza with pepperoni').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert example is None
@responses.activate
def test_get_example():
endpoint = '/v1/workspaces/{0}/intents/{1}/examples/{2}'.format(
'boguswid', 'pizza_order', 'Gimme%20a%20pizza%20with%20pepperoni')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"text": "Gimme a pizza with pepperoni",
"created": "2016-07-11T23:53:59.153Z",
"updated": "2016-12-07T18:53:59.153Z"
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(version='2017-02-03', authenticator=authenticator)
example = service.get_example(
workspace_id='boguswid',
intent='pizza_order',
text='Gimme a pizza with pepperoni').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert example == response
# Verify that response can be converted to an Example
Example._from_dict(example)
@responses.activate
def test_list_examples():
endpoint = '/v1/workspaces/{0}/intents/{1}/examples'.format(
'boguswid', 'pizza_order')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"examples": [{
"text": "Can I order a pizza?",
"created": "2016-07-11T16:39:01.774Z",
"updated": "2015-12-07T18:53:59.153Z"
}, {
"text": "Gimme a pizza with pepperoni",
"created": "2016-07-11T16:39:01.774Z",
"updated": "2015-12-07T18:53:59.153Z"
}],
"pagination": {
"refresh_url":
"/v1/workspaces/pizza_app-e0f3/intents/order/examples?version=2017-12-18&page_limit=2",
"next_url":
"/v1/workspaces/pizza_app-e0f3/intents/order/examples?cursor=base64=&version=2017-12-18&page_limit=2"
}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
examples = service.list_examples(
workspace_id='boguswid', intent='pizza_order').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert examples == response
# Verify that response can be converted to an ExampleCollection
ExampleCollection._from_dict(examples)
@responses.activate
def test_update_example():
endpoint = '/v1/workspaces/{0}/intents/{1}/examples/{2}'.format(
'boguswid', 'pizza_order', 'Gimme%20a%20pizza%20with%20pepperoni')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"text": "Gimme a pizza with pepperoni",
"created": "2016-07-11T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
example = service.update_example(
workspace_id='boguswid',
intent='pizza_order',
text='Gimme a pizza with pepperoni',
new_text='Gimme a pizza with pepperoni',
new_mentions=[{'entity': 'xxx', 'location': [0, 1]}]).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert example == response
# Verify that response can be converted to an Example
Example._from_dict(example)
#########################
# intents
#########################
@responses.activate
def test_create_intent():
endpoint = '/v1/workspaces/{0}/intents'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"intent": "pizza_order",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z",
"description": "User wants to start a new pizza order"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=201,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
intent = service.create_intent(
workspace_id='boguswid',
intent='pizza_order',
description='User wants to start a new pizza order').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert intent == response
# Verify that response can be converted to an Intent
Intent._from_dict(intent)
@responses.activate
def test_delete_intent():
endpoint = '/v1/workspaces/{0}/intents/{1}'.format('boguswid',
'pizza_order')
url = '{0}{1}'.format(base_url, endpoint)
response = None
responses.add(
responses.DELETE,
url,
body=json.dumps(response),
status=204,
content_type='')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
intent = service.delete_intent(
workspace_id='boguswid', intent='pizza_order').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert intent is None
@responses.activate
def test_get_intent():
endpoint = '/v1/workspaces/{0}/intents/{1}'.format('boguswid',
'pizza_order')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"intent": "pizza_order",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z",
"description": "User wants to start a new pizza order"
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
intent = service.get_intent(
workspace_id='boguswid', intent='pizza_order', export=False).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert intent == response
# Verify that response can be converted to an Intent
Intent._from_dict(intent)
@responses.activate
def test_list_intents():
endpoint = '/v1/workspaces/{0}/intents'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"intents": [{
"intent": "pizza_order",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z",
"description": "User wants to start a new pizza order"
}],
"pagination": {
"refresh_url":
"/v1/workspaces/pizza_app-e0f3/intents?version=2017-12-18&page_limit=1",
"next_url":
"/v1/workspaces/pizza_app-e0f3/intents?cursor=base64=&version=2017-12-18&page_limit=1"
}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
intents = service.list_intents(workspace_id='boguswid', export=False).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert intents == response
# Verify that response can be converted to an IntentCollection
IntentCollection._from_dict(intents)
@responses.activate
def test_update_intent():
endpoint = '/v1/workspaces/{0}/intents/{1}'.format('boguswid',
'pizza_order')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"intent": "pizza_order",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z",
"description": "User wants to start a new pizza order"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
intent = service.update_intent(
workspace_id='boguswid',
intent='pizza_order',
new_intent='pizza_order',
new_description='User wants to start a new pizza order').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert intent == response
# Verify that response can be converted to an Intent
Intent._from_dict(intent)
def test_intent_models():
intent = Intent(intent="pizza_order",
created=datetime.datetime(2015, 12, 6, 23, 53, 59, 15300, tzinfo=tzutc()),
updated=datetime.datetime(2015, 12, 7, 18, 53, 59, 15300, tzinfo=tzutc()),
description="User wants to start a new pizza order")
intentDict = intent._to_dict()
check = Intent._from_dict(intentDict)
assert intent == check
#########################
# logs
#########################
@responses.activate
def test_list_logs():
endpoint = '/v1/workspaces/{0}/logs'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"logs": [{
"request": {
"input": {
"text": "Can you turn off the AC"
},
"context": {
"conversation_id": "f2c7e362-4cc8-4761-8b0f-9ccd70c63bca",
"system": {}
}
},
"response": {
"input": {
"text": "Can you turn off the AC"
},
"context": {
"conversation_id": "f2c7e362-4cc8-4761-8b0f-9ccd70c63bca",
"system": {
"dialog_stack": ["root"],
"dialog_turn_counter": 1,
"dialog_request_counter": 1
},
"defaultCounter": 0
},
"entities": [],
"intents": [{
"intent": "turn_off",
"confidence": 0.9332477126694649
}],
"output": {
"log_messages": [],
"text": [
"Hi. It looks like a nice drive today. What would you like me to do?"
],
"nodes_visited": ["node_1_1467221909631"]
}
},
"request_timestamp": "2016-07-16T09:22:38.960Z",
"response_timestamp": "2016-07-16T09:22:39.011Z",
"log_id": "e70d6c12-582d-47a8-a6a2-845120a1f232"
}],
"pagination": {
"next_url":
"/v1/workspaces/15fb0e8a-463d-4fec-86aa-a737d9c38a32/logs?cursor=dOfVSuh6fBpDuOxEL9m1S7JKDV7KLuBmRR+lQG1s1i/rVnBZ0ZBVCuy53ruHgPImC31gQv5prUsJ77e0Mj+6sGu/yfusHYF5&version=2016-07-11&filter=response.top_intent:turn_off&page_limit=1",
"matched":
215
}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
logs = service.list_logs(
workspace_id='boguswid').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert logs == response
@responses.activate
def test_list_all_logs():
endpoint = '/v1/logs'
url = '{0}{1}'.format(base_url, endpoint)
response = {
"logs": [{
"request": {
"input": {
"text": "Good morning"
},
"context": {
"metadata": {
"deployment": "deployment_1"
}
}
},
"response": {
"intents": [{
"intent": "hello",
"confidence": 1
}],
"entities": [],
"input": {
"text": "Good morning"
},
"output": {
"text": ["Hi! What can I do for you?"],
"nodes_visited": ["node_2_1501875253968"],
"log_messages": []
},
"context": {
"metadata": {
"deployment": "deployment_1"
},
"conversation_id": "81a43b48-7dca-4a7d-a0d7-6fed03fcee69",
"system": {
"dialog_stack": [{
"dialog_node": "root"
}],
"dialog_turn_counter": 1,
"dialog_request_counter": 1,
"_node_output_map": {
"node_2_1501875253968": [0]
},
"branch_exited": True,
"branch_exited_reason": "completed"
}
}
},
"language": "en",
"workspace_id": "9978a49e-ea89-4493-b33d-82298d3db20d",
"request_timestamp": "2017-09-13T19:52:32.611Z",
"response_timestamp": "2017-09-13T19:52:32.628Z",
"log_id": "aa886a8a-bac5-4b91-8323-2fd61a69c9d3"
}],
"pagination": {}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
logs = service.list_all_logs(
'language::en,request.context.metadata.deployment::deployment_1').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert logs == response
#########################
# message
#########################
@responses.activate
def test_message():
authenticator = BasicAuthenticator('username', 'password')
assistant = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
assistant.set_default_headers({'x-watson-learning-opt-out': "true"})
workspace_id = 'f8fdbc65-e0bd-4e43-b9f8-2975a366d4ec'
message_url = '%s/v1/workspaces/%s/message' % (base_url, workspace_id)
url1_str = '%s/v1/workspaces/%s/message?version=2017-02-03'
message_url1 = url1_str % (base_url, workspace_id)
message_response = {
"context": {
"conversation_id": "1b7b67c0-90ed-45dc-8508-9488bc483d5b",
"system": {
"dialog_stack": ["root"],
"dialog_turn_counter": 1,
"dialog_request_counter": 1
}
},
"intents": [],
"entities": [],
"input": {},
"output": {
"text": "okay",
"log_messages": []
}
}
responses.add(
responses.POST,
message_url,
body=json.dumps(message_response),
status=200,
content_type='application/json')
message = assistant.message(
workspace_id=workspace_id,
input={'text': 'Turn on the lights'},
context=None).get_result()
assert message is not None
assert responses.calls[0].request.url == message_url1
assert 'x-watson-learning-opt-out' in responses.calls[0].request.headers
assert responses.calls[0].request.headers['x-watson-learning-opt-out'] == 'true'
assert responses.calls[0].response.text == json.dumps(message_response)
# test context
responses.add(
responses.POST,
message_url,
body=message_response,
status=200,
content_type='application/json')
message_ctx = {
'context': {
'conversation_id': '1b7b67c0-90ed-45dc-8508-9488bc483d5b',
'system': {
'dialog_stack': ['root'],
'dialog_turn_counter': 2,
'dialog_request_counter': 1
}
}
}
message = assistant.message(
workspace_id=workspace_id,
input={'text': 'Turn on the lights'},
context=json.dumps(message_ctx['context'])).get_result()
assert message is not None
assert responses.calls[1].request.url == message_url1
assert responses.calls[1].response.text == json.dumps(message_response)
assert len(responses.calls) == 2
@responses.activate
def test_message_with_models():
authenticator = BasicAuthenticator('username', 'password')
assistant = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
assistant.set_default_headers({'x-watson-learning-opt-out': "true"})
workspace_id = 'f8fdbc65-e0bd-4e43-b9f8-2975a366d4ec'
message_url = '%s/v1/workspaces/%s/message' % (base_url, workspace_id)
url1_str = '%s/v1/workspaces/%s/message?version=2017-02-03'
message_url1 = url1_str % (base_url, workspace_id)
message_response = {
"context": {
"conversation_id": "1b7b67c0-90ed-45dc-8508-9488bc483d5b",
"system": {
"dialog_stack": ["root"],
"dialog_turn_counter": 1,
"dialog_request_counter": 1
}
},
"intents": [],
"entities": [],
"input": {},
"output": {
"text": "okay",
"log_messages": []
}
}
responses.add(
responses.POST,
message_url,
body=json.dumps(message_response),
status=200,
content_type='application/json')
message = assistant.message(
workspace_id=workspace_id,
input=MessageInput(text='Turn on the lights'),
context=None).get_result()
assert message is not None
assert responses.calls[0].request.url == message_url1
assert 'x-watson-learning-opt-out' in responses.calls[0].request.headers
assert responses.calls[0].request.headers['x-watson-learning-opt-out'] == 'true'
assert responses.calls[0].response.text == json.dumps(message_response)
# test context
responses.add(
responses.POST,
message_url,
body=message_response,
status=200,
content_type='application/json')
message_ctx = Context._from_dict(message_response['context'])
message = assistant.message(
workspace_id=workspace_id,
input=MessageInput(text='Turn on the lights'),
context=message_ctx).get_result()
assert message is not None
assert responses.calls[1].request.url == message_url1
assert responses.calls[1].response.text == json.dumps(message_response)
assert len(responses.calls) == 2
#########################
# synonyms
#########################
@responses.activate
def test_create_synonym():
endpoint = '/v1/workspaces/{0}/entities/{1}/values/{2}/synonyms'.format(
'boguswid', 'aeiou', 'vowel')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"synonym": "aeiou",
"created": "2000-01-23T04:56:07.000+00:00",
"updated": "2000-01-23T04:56:07.000+00:00"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=201,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
synonym = service.create_synonym(
workspace_id='boguswid', entity='aeiou', value='vowel', synonym='a').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert synonym == response
# Verify that response can be converted to a Synonym
Synonym._from_dict(synonym)
@responses.activate
def test_delete_synonym():
endpoint = '/v1/workspaces/{0}/entities/{1}/values/{2}/synonyms/{3}'.format(
'boguswid', 'aeiou', 'vowel', 'a')
url = '{0}{1}'.format(base_url, endpoint)
response = None
responses.add(
responses.DELETE,
url,
body=json.dumps(response),
status=204,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
synonym = service.delete_synonym(
workspace_id='boguswid', entity='aeiou', value='vowel', synonym='a').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert synonym is None
@responses.activate
def test_get_synonym():
endpoint = '/v1/workspaces/{0}/entities/{1}/values/{2}/synonyms/{3}'.format(
'boguswid', 'grilling', 'bbq', 'barbecue')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"synonym": "barbecue",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z"
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
synonym = service.get_synonym(
workspace_id='boguswid', entity='grilling', value='bbq', synonym='barbecue').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert synonym == response
# Verify that response can be converted to a Synonym
Synonym._from_dict(synonym)
@responses.activate
def test_list_synonyms():
endpoint = '/v1/workspaces/{0}/entities/{1}/values/{2}/synonyms'.format(
'boguswid', 'grilling', 'bbq')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"synonyms": [{
"synonym": "BBQ sauce",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z"
}, {
"synonym": "barbecue",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z"
}],
"pagination": {
"refresh_url":
"/v1/workspaces/pizza_app-e0f3/entities/sauce/values/types/synonyms?version=2017-12-18&filter=name:b&include_count=true&page_limit=2",
"next_url":
"/v1/workspaces/pizza_app-e0f3/entities/sauce/values/types/synonyms?cursor=base64=&version=2017-12-18&filter=name:b&page_limit=2",
"total":
8,
"matched":
2
}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
synonyms = service.list_synonyms(
workspace_id='boguswid',
entity='grilling',
value='bbq').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert synonyms == response
# Verify that response can be converted to a SynonymCollection
SynonymCollection._from_dict(synonyms)
@responses.activate
def test_update_synonym():
endpoint = '/v1/workspaces/{0}/entities/{1}/values/{2}/synonyms/{3}'.format(
'boguswid', 'grilling', 'bbq', 'barbecue')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"synonym": "barbecue",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
synonym = service.update_synonym(
workspace_id='boguswid', entity='grilling', value='bbq', synonym='barbecue', new_synonym='barbecue').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert synonym == response
# Verify that response can be converted to a Synonym
Synonym._from_dict(synonym)
#########################
# values
#########################
@responses.activate
def test_create_value():
endpoint = '/v1/workspaces/{0}/entities/{1}/values'.format('boguswid', 'grilling')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"metadata": "{}",
"created": "2000-01-23T04:56:07.000+00:00",
"value": "aeiou",
"type": "synonyms",
"updated": "2000-01-23T04:56:07.000+00:00"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=201,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
value = service.create_value(
workspace_id='boguswid',
entity='grilling',
value='aeiou').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert value == response
# Verify that response can be converted to a Value
Value._from_dict(value)
@responses.activate
def test_delete_value():
endpoint = '/v1/workspaces/{0}/entities/{1}/values/{2}'.format(
'boguswid', 'grilling', 'bbq')
url = '{0}{1}'.format(base_url, endpoint)
response = ""
responses.add(
responses.DELETE,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
value = service.delete_value(
workspace_id='boguswid', entity='grilling', value='bbq').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert value == ""
@responses.activate
def test_get_value():
endpoint = '/v1/workspaces/{0}/entities/{1}/values/{2}'.format(
'boguswid', 'grilling', 'bbq')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"value": "BBQ sauce",
"metadata": {
"code": 1422
},
"type": "synonyms",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z"
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
value = service.get_value(
workspace_id='boguswid', entity='grilling', value='bbq', export=True).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert value == response
# Verify that response can be converted to a Value
Value._from_dict(value)
@responses.activate
def test_list_values():
endpoint = '/v1/workspaces/{0}/entities/{1}/values'.format('boguswid', 'grilling')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"values": [{
"value": "BBQ sauce",
"metadata": {
"code": 1422
},
"type": "synonyms",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-07T18:53:59.153Z"
}],
"pagination": {
"refresh_url":
"/v1/workspaces/pizza_app-e0f3/entities/sauce/values?version=2017-12-18&filter=name:pizza&include_count=true&page_limit=1",
"next_url":
"/v1/workspaces/pizza_app-e0f3/sauce/values?cursor=base64=&version=2017-12-18&filter=name:pizza&page_limit=1",
"total":
1,
"matched":
1
}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
values = service.list_values(
workspace_id='boguswid',
entity='grilling',
export=True).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert values == response
# Verify that response can be converted to a ValueCollection
ValueCollection._from_dict(values)
@responses.activate
def test_update_value():
endpoint = '/v1/workspaces/{0}/entities/{1}/values/{2}'.format(
'boguswid', 'grilling', 'bbq')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"value": "BBQ sauce",
"metadata": {
"code": 1422
},
"type": "synonyms",
"created": "2015-12-06T23:53:59.153Z",
"updated": "2015-12-06T23:53:59.153Z"
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
value = service.update_value(
workspace_id='boguswid',
entity='grilling',
value='bbq',
new_value='BBQ sauce',
new_metadata={"code": 1422},
new_synonyms=None).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert value == response
# Verify that response can be converted to a Value
Value._from_dict(value)
#########################
# workspaces
#########################
@responses.activate
def test_create_workspace():
endpoint = '/v1/workspaces'
url = '{0}{1}'.format(base_url, endpoint)
response = {
"name": "Pizza app",
"created": "2015-12-06T23:53:59.153Z",
"language": "en",
"metadata": {},
"updated": "2015-12-06T23:53:59.153Z",
"description": "Pizza app",
"workspace_id": "pizza_app-e0f3",
"learning_opt_out": True
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=201,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
workspace = service.create_workspace(
name='Pizza app', description='Pizza app', language='en', metadata={},
system_settings={'tooling': {'store_generic_responses' : True}}).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert workspace == response
# Verify that response can be converted to a Workspace
Workspace._from_dict(workspace)
@responses.activate
def test_delete_workspace():
endpoint = '/v1/workspaces/{0}'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
response = {}
responses.add(
responses.DELETE,
url,
body=json.dumps(response),
status=204,
content_type='')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
workspace = service.delete_workspace(workspace_id='boguswid').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert workspace is None
@responses.activate
def test_get_workspace():
endpoint = '/v1/workspaces/{0}'.format('boguswid')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"name": "Pizza app",
"created": "2015-12-06T23:53:59.153Z",
"language": "en",
"metadata": {},
"updated": "2015-12-06T23:53:59.153Z",
"description": "Pizza app",
"status": "Available",
"learning_opt_out": False,
"workspace_id": "pizza_app-e0f3"
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
workspace = service.get_workspace(workspace_id='boguswid', export=True, sort='stable').get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert workspace == response
# Verify that response can be converted to a Workspace
Workspace._from_dict(workspace)
@responses.activate
def test_list_workspaces():
endpoint = '/v1/workspaces'
url = '{0}{1}'.format(base_url, endpoint)
response = {
"workspaces": [{
"name": "Pizza app",
"created": "2015-12-06T23:53:59.153Z",
"language": "en",
"metadata": {},
"updated": "2015-12-06T23:53:59.153Z",
"description": "Pizza app",
"workspace_id": "pizza_app-e0f3",
"learning_opt_out": True
}],
"pagination": {
"refresh_url":
"/v1/workspaces?version=2016-01-24&page_limit=1",
"next_url":
"/v1/workspaces?cursor=base64=&version=2016-01-24&page_limit=1"
}
}
responses.add(
responses.GET,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
workspaces = service.list_workspaces().get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert workspaces == response
# Verify that response can be converted to a WorkspaceCollection
WorkspaceCollection._from_dict(workspaces)
@responses.activate
def test_update_workspace():
endpoint = '/v1/workspaces/{0}'.format('pizza_app-e0f3')
url = '{0}{1}'.format(base_url, endpoint)
response = {
"name": "Pizza app",
"created": "2015-12-06T23:53:59.153Z",
"language": "en",
"metadata": {},
"updated": "2015-12-06T23:53:59.153Z",
"description": "Pizza app",
"workspace_id": "pizza_app-e0f3",
"learning_opt_out": True
}
responses.add(
responses.POST,
url,
body=json.dumps(response),
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
workspace = service.update_workspace(
workspace_id='pizza_app-e0f3',
name='Pizza app',
description='Pizza app',
language='en',
metadata={},
system_settings={'tooling': {'store_generic_responses' : True}}).get_result()
assert len(responses.calls) == 1
assert responses.calls[0].request.url.startswith(url)
assert workspace == response
# Verify that response can be converted to a Workspace
Workspace._from_dict(workspace)
@responses.activate
def test_dialog_nodes():
url = 'https://gateway.watsonplatform.net/assistant/api/v1/workspaces/id/dialog_nodes'
responses.add(
responses.GET,
url,
body='{ "application/json": { "dialog_node": "location-atm" }}',
status=200,
content_type='application/json')
responses.add(
responses.POST,
"{0}?version=2017-02-03".format(url),
body='{ "application/json": { "dialog_node": "location-done" }}',
status=200,
content_type='application/json')
responses.add(
responses.DELETE,
"{0}/location-done?version=2017-02-03".format(url),
body='{"description": "deleted successfully"}',
status=200,
content_type='application/json')
responses.add(
responses.GET,
"{0}/location-done?version=2017-02-03".format(url),
body='{ "application/json": { "dialog_node": "location-atm" }}',
status=200,
content_type='application/json')
authenticator = BasicAuthenticator('username', 'password')
assistant = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
assistant.create_dialog_node('id', 'location-done', user_label='xxx')
assert responses.calls[0].response.json()['application/json']['dialog_node'] == 'location-done'
assistant.delete_dialog_node('id', 'location-done')
assert responses.calls[1].response.json() == {"description": "deleted successfully"}
assistant.get_dialog_node('id', 'location-done')
assert responses.calls[2].response.json() == {"application/json": {"dialog_node": "location-atm"}}
assistant.list_dialog_nodes('id')
assert responses.calls[3].response.json() == {"application/json": {"dialog_node": "location-atm"}}
assert len(responses.calls) == 4
@responses.activate
def test_delete_user_data():
url = 'https://gateway.watsonplatform.net/assistant/api/v1/user_data'
responses.add(
responses.DELETE,
url,
body=None,
status=204,
content_type='application_json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
response = service.delete_user_data('id').get_result()
assert response is None
assert len(responses.calls) == 1
@responses.activate
def test_list_mentions():
url = 'https://gateway.watsonplatform.net/assistant/api/v1/workspaces/workspace_id/entities/entity1/mentions'
responses.add(
responses.GET,
url,
body='[{"entity": "xxx"}]',
status=200,
content_type='application_json')
authenticator = BasicAuthenticator('username', 'password')
service = ibm_watson.AssistantV1(
version='2017-02-03', authenticator=authenticator)
response = service.list_mentions('workspace_id', 'entity1').get_result()
assert response == [{"entity": "xxx"}]
assert len(responses.calls) == 1
| 35.247284
| 243
| 0.612451
|
a9f846b08efd2d05e03a654f0afb952cd945e717
| 9,850
|
py
|
Python
|
tests/toranj/test-036-wpantund-host-route-management.py
|
AdityaHPatwardhan/openthread
|
a201e9d5d0273bb51fa20efc8758be20a725018e
|
[
"BSD-3-Clause"
] | 2,962
|
2016-05-11T15:06:06.000Z
|
2022-03-27T20:06:16.000Z
|
tests/toranj/test-036-wpantund-host-route-management.py
|
AdityaHPatwardhan/openthread
|
a201e9d5d0273bb51fa20efc8758be20a725018e
|
[
"BSD-3-Clause"
] | 5,899
|
2016-05-11T19:21:49.000Z
|
2022-03-31T18:17:20.000Z
|
tests/toranj/test-036-wpantund-host-route-management.py
|
AdityaHPatwardhan/openthread
|
a201e9d5d0273bb51fa20efc8758be20a725018e
|
[
"BSD-3-Clause"
] | 1,113
|
2016-05-11T15:37:42.000Z
|
2022-03-31T09:37:04.000Z
|
#!/usr/bin/env python
#
# Copyright (c) 2019, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import wpan
from wpan import verify
# -----------------------------------------------------------------------------------------------------------------------
# Test description:
#
# This test covers behavior of wpantund feature for managing of host interface routes (related to off-mesh routes
# within the Thread network). This feature can be enabled using "Daemon:OffMeshRoute:AutoAddOnInterface" property (is
# enabled by default).
#
# A route corresponding to an off-mesh route would be added on host primary interface (by wpantund),
# if it is added by at least one (other) device within the network and
# (a) either it is not added by host/this-device, or
# (b) if it is also added by the device itself then
# - filtering of self added routes is not enabled, and
# - it is added at lower preference level.
test_name = __file__[:-3] if __file__.endswith('.py') else __file__
print('-' * 120)
print('Starting \'{}\''.format(test_name))
# -----------------------------------------------------------------------------------------------------------------------
# Utility functions
def verify_interface_routes(node, route_list):
"""
This function verifies that node has the same interface routes as given by `route_list` which is an array of
tuples of (route, prefix_len, metric).
"""
node_routes = wpan.parse_interface_routes_result(node.get(wpan.WPAN_IP6_INTERFACE_ROUTES))
verify(len(route_list) == len(node_routes))
for route in route_list:
for node_route in node_routes:
if (node_route.route_prefix, node_route.prefix_len, node_route.metric) == route:
break
else:
raise wpan.VerifyError('Did not find route {} on node {}'.format(route, node))
# -----------------------------------------------------------------------------------------------------------------------
# Creating `wpan.Nodes` instances
speedup = 4
wpan.Node.set_time_speedup_factor(speedup)
r1 = wpan.Node()
r2 = wpan.Node()
r3 = wpan.Node()
c3 = wpan.Node()
# -----------------------------------------------------------------------------------------------------------------------
# Init all nodes
wpan.Node.init_all_nodes()
# -----------------------------------------------------------------------------------------------------------------------
# Build network topology
#
# Test topology:
#
# r1 ---- r2
# \ /
# \ /
# \ /
# r3 ---- c3
#
# 3 routers, c3 is added to ensure r3 is promoted to a router quickly!
r1.form("route-test")
r1.allowlist_node(r2)
r2.allowlist_node(r1)
r2.join_node(r1, wpan.JOIN_TYPE_ROUTER)
r3.allowlist_node(r2)
r2.allowlist_node(r3)
r3.join_node(r2, wpan.JOIN_TYPE_ROUTER)
c3.allowlist_node(r3)
r3.allowlist_node(c3)
c3.join_node(r3, wpan.JOIN_TYPE_END_DEVICE)
r3.allowlist_node(r1)
r1.allowlist_node(r3)
# -----------------------------------------------------------------------------------------------------------------------
# Test implementation
ROUTE1 = 'fd00:abba::'
LEN1 = 64
ROUTE2 = 'fd00:cafe:feed::'
LEN2 = 64
ROUTE3 = 'fd00:abba::'
LEN3 = 48
ROUTE4 = 'fd00:1234::'
LEN4 = 64
# Route Priority for off-mesh routes
HIGH_PRIORITY = 1
MEDIUM_PRIORITY = 0
LOW_PRIORITY = -1
# Host route metric mapping to off-mesh route (note lower metric value is higher priority)
HIGH_METRIC = 1
MEDIUM_METRIC = 256
LOW_METRIC = 512
WAIT_TIME = 10
# Verify the default daemon configuration for managing host/off-mesh routes
verify(r1.get(wpan.WPAN_DAEMON_OFF_MESH_ROUTE_AUTO_ADD_ON_INTERFACE) == 'true')
verify(r1.get(wpan.WPAN_DAEMON_OFF_MESH_ROUTE_FILTER_SELF_AUTO_ADDED) == 'true')
# Disable the auto route add on r2.
r2.set(wpan.WPAN_DAEMON_OFF_MESH_ROUTE_AUTO_ADD_ON_INTERFACE, 'false')
# Verify the host interface routes are empty when we start.
verify_interface_routes(r1, [])
# Add all 3 routes on r2.
r2.add_route(ROUTE1, prefix_len=LEN1, priority=LOW_PRIORITY)
r2.add_route(ROUTE2, prefix_len=LEN2, priority=MEDIUM_PRIORITY)
r2.add_route(ROUTE3, prefix_len=LEN3, priority=HIGH_PRIORITY)
# We expect to see all 3 routes added on r1 host interface with same priority levels as r2.
def check_routes_on_r1_1():
verify_interface_routes(r1, [(ROUTE1, LEN1, LOW_METRIC), (ROUTE2, LEN2, MEDIUM_METRIC),
(ROUTE3, LEN3, HIGH_METRIC)])
wpan.verify_within(check_routes_on_r1_1, WAIT_TIME)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Add the same routes on r3 with different priorities.
r3.add_route(ROUTE1, prefix_len=LEN1, priority=MEDIUM_PRIORITY)
r3.add_route(ROUTE2, prefix_len=LEN2, priority=LOW_PRIORITY)
# We expect the host interface routes on r1 to change accordingly
def check_routes_on_r1_2():
route_list = [(ROUTE1, LEN1, MEDIUM_METRIC), (ROUTE2, LEN2, MEDIUM_METRIC), (ROUTE3, LEN3, HIGH_METRIC)]
verify_interface_routes(r1, route_list)
wpan.verify_within(check_routes_on_r1_2, WAIT_TIME)
verify_interface_routes(r2, [])
# - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Remove the previously added routes from r2.
r2.remove_route(ROUTE1, prefix_len=LEN1)
r2.remove_route(ROUTE2, prefix_len=LEN2)
r2.remove_route(ROUTE3, prefix_len=LEN3)
# We expect the host interface routes on r1 to again change accordingly:
def check_routes_on_r1_3():
verify_interface_routes(r1, [(ROUTE1, LEN1, MEDIUM_METRIC), (ROUTE2, LEN2, LOW_METRIC)])
wpan.verify_within(check_routes_on_r1_3, WAIT_TIME)
verify_interface_routes(r2, [])
# - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Disable "Daemon:OffMeshRoute:FilterSelfAutoAdded" feature on wpantund.
#
# The route should be added on host primary interface, if it
# is added by at least one other device within the network and,
# (a) either it is not added by host/this-device, or
# (b) if it is also added by device then
# - filtering of self added routes is not enabled, and
# - it is added at lower preference level.
r1.set(wpan.WPAN_DAEMON_OFF_MESH_ROUTE_FILTER_SELF_AUTO_ADDED, 'false')
verify(r1.get(wpan.WPAN_DAEMON_OFF_MESH_ROUTE_FILTER_SELF_AUTO_ADDED) == 'false')
# Add ROUTE1 on r1 with low-priority. Since it's also present on r3 with
# medium priority, we should still see the route on host (as medium).
r1.add_route(ROUTE1, prefix_len=LEN1, priority=LOW_PRIORITY)
verify_interface_routes(r1, [(ROUTE1, LEN1, MEDIUM_METRIC), (ROUTE2, LEN2, LOW_METRIC)])
# Now change ROUTE1 on r1 to be same priority as on r2, now the route should
# no longer be present on host interface routes.
r1.remove_route(ROUTE1, prefix_len=LEN1)
r1.add_route(ROUTE1, prefix_len=LEN1, priority=MEDIUM_PRIORITY)
verify_interface_routes(r1, [(ROUTE2, LEN2, LOW_METRIC)])
# Adding ROUTE2 with higher priority should remove it from interface routes
r1.add_route(ROUTE2, prefix_len=LEN2, priority=MEDIUM_PRIORITY)
verify_interface_routes(r1, [])
# Adding a new ROUTE4 on r1 should not change anything related to interface host routes.
r1.add_route(ROUTE4, prefix_len=LEN4, priority=MEDIUM_METRIC)
verify_interface_routes(r1, [])
# Removing ROUTE1 and ROUT2 on r1 should cause them to be added back on host
# interface (since they are still present as off-mesh routes on r3).
r1.remove_route(ROUTE1, prefix_len=LEN1)
r1.remove_route(ROUTE2, prefix_len=LEN2)
verify_interface_routes(r1, [(ROUTE1, LEN1, MEDIUM_METRIC), (ROUTE2, LEN2, LOW_METRIC)])
verify_interface_routes(r2, [])
# - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Enable "Daemon:OffMeshRoute:FilterSelfAutoAdded" feature on wpantund.
r1.set(wpan.WPAN_DAEMON_OFF_MESH_ROUTE_FILTER_SELF_AUTO_ADDED, 'true')
verify(r1.get(wpan.WPAN_DAEMON_OFF_MESH_ROUTE_FILTER_SELF_AUTO_ADDED) == 'true')
# Adding ROUTE1 with any priority should remove it from host interface routes.
r1.add_route(ROUTE1, prefix_len=LEN1, priority=LOW_PRIORITY)
verify_interface_routes(r1, [(ROUTE2, LEN2, LOW_METRIC)])
r1.remove_route(ROUTE1, prefix_len=LEN1)
verify_interface_routes(r1, [(ROUTE1, LEN1, MEDIUM_METRIC), (ROUTE2, LEN2, LOW_METRIC)])
verify_interface_routes(r2, [])
# -----------------------------------------------------------------------------------------------------------------------
# Test finished
wpan.Node.finalize_all_nodes()
print('\'{}\' passed.'.format(test_name))
| 36.481481
| 121
| 0.672284
|
d1fda3712e6e0cc716f7f5c1301c56d001c15916
| 950
|
py
|
Python
|
python/testData/highlighting/async.py
|
alexey-anufriev/intellij-community
|
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/highlighting/async.py
|
alexey-anufriev/intellij-community
|
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
|
[
"Apache-2.0"
] | null | null | null |
python/testData/highlighting/async.py
|
alexey-anufriev/intellij-community
|
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
|
[
"Apache-2.0"
] | 1
|
2020-10-15T05:56:42.000Z
|
2020-10-15T05:56:42.000Z
|
<info descr="null">async</info> def <info descr="PY.FUNC_DEFINITION">foo</info>():
pass
async = 1
<info descr="null">async</info> def <info descr="PY.FUNC_DEFINITION">bar</info>():
pass
<info descr="null">async</info> def<error descr="'(' expected"><error descr="Identifier expected"> </error></error> # Incomplete<EOLError descr="':' expected"></EOLError>
<error descr="Indent expected">d</error>ef <info descr="PY.FUNC_DEFINITION">regular</info>(<info descr="PY.PARAMETER">xs</info>):
<info descr="null">async</info> def <info descr="PY.NESTED_FUNC_DEFINITION">quux</info>():
<info descr="null">async</info> for x in xs:
pass
<info descr="null">async</info> with xs:
pass
<info descr="null">async</info> for x in xs:
pass
<error descr="'async' keyword is not expected here">async</error> with <info descr="PY.PARAMETER">xs</info>:
pass
return async
| 32.758621
| 170
| 0.636842
|
949ec1cd20386cf9efe5d10b577c1f8e55011d04
| 1,591
|
py
|
Python
|
engine/station_control/test_plans/ppr_test.py
|
geeklevi/PythonElectron
|
0a01b8842a56f91338de6c341bb1c2037aaae359
|
[
"CC0-1.0"
] | null | null | null |
engine/station_control/test_plans/ppr_test.py
|
geeklevi/PythonElectron
|
0a01b8842a56f91338de6c341bb1c2037aaae359
|
[
"CC0-1.0"
] | null | null | null |
engine/station_control/test_plans/ppr_test.py
|
geeklevi/PythonElectron
|
0a01b8842a56f91338de6c341bb1c2037aaae359
|
[
"CC0-1.0"
] | null | null | null |
from .generic_test import GenericTestPlan
from .instruments import Keithley2602B, Agilent86140B
from .tests import liv
import time
class PprTestPlan(GenericTestPlan):
def __init__(self, *args, **kwargs):
super().__init__(self, *args, **kwargs)
print("pprtest")
# print(args)
# print(kwargs)
self.smu1 = Keithley2602B()
self.osa1 = Agilent86140B()
if 'smu1' in kwargs:
self.smu1.add_address(kwargs['smu1'])
if 'osa1' in kwargs:
self.osa1.add_address(kwargs['osa1'])
if 'rm' in kwargs:
self.smu1.add_resource_manager(kwargs['rm'])
# self.osa1.add_resource_manager(kwargs['rm'])
# then open the visa in the file
self.smu1.open_resource()
# self.osa1.open_resource()
print(self.smu1.address)
# print(self.osa1.address)
self.smu1.initialization()
self.smu1.idendification_query()
liv_initialization = self.read_txt_to_list("keithley_2602B_liv_test.txt", 'keithley2602b')
for cmd in liv_initialization:
# print(cmd)
self.smu1.visa.write(cmd)
time.sleep(0.1)
liv_read_buffer = self.read_txt_to_list("keithley_2602B_liv_read_buffer.txt", 'keithley2602b')
print(liv_read_buffer[0])
data = self.smu1.visa.query(liv_read_buffer[0])
print("woxiangtuichu")
print(data)
print(type(data))
print(kwargs['rm'].session)
self.liv = liv(self.smu1)
self.smu1.visa.close()
| 36.159091
| 102
| 0.610308
|
9f09e197b33f41b4d52f1fea70587afa4cebdabb
| 428,435
|
py
|
Python
|
tests/test_other.py
|
asRIA/emscripten
|
2bee818f21ff1687f28a4186795c10509cea784c
|
[
"MIT"
] | null | null | null |
tests/test_other.py
|
asRIA/emscripten
|
2bee818f21ff1687f28a4186795c10509cea784c
|
[
"MIT"
] | null | null | null |
tests/test_other.py
|
asRIA/emscripten
|
2bee818f21ff1687f28a4186795c10509cea784c
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# Copyright 2013 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
# noqa: E241
from __future__ import print_function
from functools import wraps
import glob
import gzip
import itertools
import json
import os
import pipes
import re
import select
import shlex
import shutil
import struct
import subprocess
import sys
import time
import tempfile
import unittest
import uuid
from subprocess import PIPE, STDOUT
if __name__ == '__main__':
raise Exception('do not run this file directly; do something like: tests/runner.py other')
from tools.shared import run_process, try_delete
from tools.shared import EMCC, EMXX, EMAR, EMRANLIB, PYTHON, FILE_PACKAGER, WINDOWS, LLVM_ROOT, EM_BUILD_VERBOSE
from tools.shared import CLANG_CC, CLANG_CXX, LLVM_AR, LLVM_DWARFDUMP
from tools.shared import NODE_JS, SPIDERMONKEY_ENGINE, JS_ENGINES, WASM_ENGINES, V8_ENGINE
from runner import RunnerCore, path_from_root, no_wasm_backend, no_fastcomp, is_slow_test, ensure_dir
from runner import needs_dlfcn, env_modify, no_windows, requires_native_clang, chdir, with_env_modify, create_test_file, parameterized
from jsrun import run_js
from tools import shared, building
import jsrun
import clang_native
import tools.line_endings
import tools.js_optimizer
import tools.tempfiles
import tools.duplicate_function_eliminator
scons_path = shared.which('scons')
emmake = shared.bat_suffix(path_from_root('emmake'))
emcmake = shared.bat_suffix(path_from_root('emcmake'))
emconfigure = shared.bat_suffix(path_from_root('emconfigure'))
emconfig = shared.bat_suffix(path_from_root('em-config'))
emsize = shared.bat_suffix(path_from_root('emsize'))
class temp_directory(object):
def __init__(self, dirname):
self.dir = dirname
def __enter__(self):
self.directory = tempfile.mkdtemp(prefix='emtest_temp_', dir=self.dir)
self.prev_cwd = os.getcwd()
os.chdir(self.directory)
print('temp_directory: ' + self.directory)
return self.directory
def __exit__(self, type, value, traceback):
os.chdir(self.prev_cwd)
def uses_canonical_tmp(func):
"""Decorator that signals the use of the canonical temp by a test method.
This decorator takes care of cleaning the directory after the
test to satisfy the leak detector.
"""
@wraps(func)
def decorated(self):
# Before running the test completely remove the canonical_tmp
if os.path.exists(self.canonical_temp_dir):
shutil.rmtree(self.canonical_temp_dir)
try:
func(self)
finally:
# Make sure the test isn't lying about the fact that it uses
# canonical_tmp
self.assertTrue(os.path.exists(self.canonical_temp_dir))
# Remove the temp dir in a try-finally, as otherwise if the
# test fails we would not clean it up, and if leak detection
# is set we will show that error instead of the actual one.
shutil.rmtree(self.canonical_temp_dir)
return decorated
def is_python3_version_supported():
"""Retuns True if the installed python3 version is supported by emscripten.
Note: Emscripten requires python3.5 or above since python3.4 and below do not
support circular dependencies."""
try:
print('is_python3_version_supported')
python3 = shared.which('python3')
print(' python3 =', python3)
output = run_process([python3, '--version'], stdout=PIPE).stdout
print(' output =', output, output.split())
output = output.split()[1]
# ignore final component which can contains non-integers (e.g 'rc1')
version = [int(x) for x in output.split('.')[:2]]
return version >= [3, 5]
except Exception:
# If anything goes wrong (no python3, unexpected output format), then we do
# not support this python3
return False
def encode_leb(number):
# TODO(sbc): handle larger numbers
assert(number < 255)
# pack the integer then take only the first (little end) byte
return struct.pack('<i', number)[:1]
def get_fastcomp_src_dir():
"""Locate fastcomp source tree by searching realtive to LLVM_ROOT."""
d = LLVM_ROOT
key_file = 'readme-emscripten-fastcomp.txt'
while d != os.path.dirname(d):
d = os.path.abspath(d)
# when the build directory lives below the source directory
if os.path.exists(os.path.join(d, key_file)):
return d
# when the build directory lives alongside the source directory
elif os.path.exists(os.path.join(d, 'src', key_file)):
return os.path.join(d, 'src')
else:
d = os.path.dirname(d)
return None
def parse_wasm(filename):
wat = run_process([os.path.join(building.get_binaryen_bin(), 'wasm-dis'), filename], stdout=PIPE).stdout
imports = []
exports = []
funcs = []
for line in wat.splitlines():
line = line.strip()
if line.startswith('(import '):
line = line.strip('()')
name = line.split()[2].strip('"')
imports.append(name)
if line.startswith('(export '):
line = line.strip('()')
name = line.split()[1].strip('"')
exports.append(name)
if line.startswith('(func '):
line = line.strip('()')
name = line.split()[1].strip('"')
funcs.append(name)
return imports, exports, funcs
class other(RunnerCore):
# Utility to run a simple test in this suite. This receives a directory which
# should contain a test.cpp and test.out files, compiles the cpp, and runs it
# to verify the output, with optional compile and run arguments.
# TODO: use in more places
def do_other_test(self, dirname, emcc_args=[], run_args=[]):
shutil.copyfile(path_from_root('tests', dirname, 'test.cpp'), 'test.cpp')
run_process([EMCC, 'test.cpp'] + emcc_args)
expected = open(path_from_root('tests', dirname, 'test.out')).read()
seen = run_js('a.out.js', args=run_args, stderr=PIPE, full_output=True) + '\n'
self.assertContained(expected, seen)
# Another utility to run a test in this suite. This receives a source file
# to compile, with optional compiler and execution flags.
# Output can be checked by seeing if literals are contained, and that a list
# of regexes match. The return code can also be checked.
def do_smart_test(self, source, literals=[], regexes=[],
emcc_args=[], run_args=[], assert_returncode=0):
run_process([EMCC, source] + emcc_args)
seen = run_js('a.out.js', args=run_args, stderr=PIPE, full_output=True,
assert_returncode=assert_returncode) + '\n'
for literal in literals:
self.assertContained([literal], seen)
for regex in regexes:
self.assertTrue(re.search(regex, seen), 'Expected regex "%s" to match on:\n%s' % (regex, seen))
def run_on_pty(self, cmd):
master, slave = os.openpty()
output = []
try:
env = os.environ.copy()
env['TERM'] = 'xterm-color'
proc = subprocess.Popen(cmd, stdout=slave, stderr=slave, env=env)
while proc.poll() is None:
r, w, x = select.select([master], [], [], 1)
if r:
output.append(os.read(master, 1024))
return (proc.returncode, b''.join(output))
finally:
os.close(master)
os.close(slave)
def test_emcc_v(self):
for compiler in [EMCC, EMXX]:
# -v, without input files
proc = run_process([compiler, '-v'], stdout=PIPE, stderr=PIPE)
self.assertContained('clang version %s' % shared.expected_llvm_version(), proc.stderr)
self.assertContained('GNU', proc.stderr)
self.assertNotContained('this is dangerous', proc.stdout)
self.assertNotContained('this is dangerous', proc.stderr)
def test_emcc_generate_config(self):
for compiler in [EMCC, EMXX]:
config_path = './emscripten_config'
run_process([compiler, '--generate-config', config_path])
self.assertExists(config_path, 'A config file should have been created at %s' % config_path)
config_contents = open(config_path).read()
self.assertContained('EMSCRIPTEN_ROOT', config_contents)
self.assertContained('LLVM_ROOT', config_contents)
os.remove(config_path)
def test_emcc_output_mjs(self):
run_process([EMCC, '-o', 'hello_world.mjs', path_from_root('tests', 'hello_world.c')])
with open('hello_world.mjs') as f:
output = f.read()
self.assertContained('export default Module;', output)
# TODO(sbc): Test that this is actually runnable. We currently don't have
# any tests for EXPORT_ES6 but once we do this should be enabled.
# self.assertContained('hello, world!', run_js('hello_world.mjs'))
def test_emcc_out_file(self):
# Verify that "-ofile" works in addition to "-o" "file"
run_process([EMCC, '-c', '-ofoo.o', path_from_root('tests', 'hello_world.c')])
self.assertExists('foo.o')
run_process([EMCC, '-ofoo.js', 'foo.o'])
self.assertExists('foo.js')
@parameterized({
'c': [EMCC, '.c'],
'cxx': [EMXX, '.cpp']})
def test_emcc_basics(self, compiler, suffix):
# emcc src.cpp ==> writes a.out.js and a.out.wasm
run_process([compiler, path_from_root('tests', 'hello_world' + suffix)])
self.assertExists('a.out.js')
self.assertExists('a.out.wasm')
self.assertContained('hello, world!', run_js('a.out.js'))
# --version
output = run_process([compiler, '--version'], stdout=PIPE, stderr=PIPE)
output = output.stdout.replace('\r', '')
self.assertContained('emcc (Emscripten gcc/clang-like replacement)', output)
self.assertContained('''Copyright (C) 2014 the Emscripten authors (see AUTHORS.txt)
This is free and open source software under the MIT license.
There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
''', output)
# --help
output = run_process([compiler, '--help'], stdout=PIPE, stderr=PIPE)
self.assertContained('Display this information', output.stdout)
self.assertContained('Most clang options will work', output.stdout)
# -dumpmachine
output = run_process([compiler, '-dumpmachine'], stdout=PIPE, stderr=PIPE)
self.assertContained(shared.get_llvm_target(), output.stdout)
# -dumpversion
output = run_process([compiler, '-dumpversion'], stdout=PIPE, stderr=PIPE)
self.assertEqual(shared.EMSCRIPTEN_VERSION, output.stdout.strip())
# properly report source code errors, and stop there
self.clear()
stderr = self.expect_fail([compiler, path_from_root('tests', 'hello_world_error' + suffix)])
self.assertNotContained('IOError', stderr) # no python stack
self.assertNotContained('Traceback', stderr) # no python stack
self.assertContained('error: invalid preprocessing directive', stderr)
self.assertContained(["error: use of undeclared identifier 'cheez", "error: unknown type name 'cheez'"], stderr)
self.assertContained('errors generated.', stderr.splitlines()[-2])
@parameterized({
'c': [EMCC, '.c'],
'cxx': [EMXX, '.cpp']})
def test_emcc_2(self, compiler, suffix):
# emcc src.cpp -c and emcc src.cpp -o src.[o|bc] ==> should give a .bc file
for args in [['-c'], ['-o', 'src.o'], ['-o', 'src.bc'], ['-o', 'src.so'], ['-O1', '-c', '-o', '/dev/null'], ['-O1', '-o', '/dev/null']]:
print('args:', args)
if '/dev/null' in args and WINDOWS:
print('skip because windows')
continue
target = args[1] if len(args) == 2 else 'hello_world.o'
self.clear()
run_process([compiler, path_from_root('tests', 'hello_world' + suffix)] + args)
if args[-1] == '/dev/null':
print('(no output)')
continue
syms = building.llvm_nm(target)
self.assertIn('main', syms.defs)
if self.is_wasm_backend():
# wasm backend will also have '__original_main' or such
self.assertEqual(len(syms.defs), 2)
else:
self.assertEqual(len(syms.defs), 1)
if target == 'js': # make sure emcc can recognize the target as a bitcode file
shutil.move(target, target + '.bc')
target += '.bc'
run_process([compiler, target, '-o', target + '.js'])
self.assertContained('hello, world!', run_js(target + '.js'))
@parameterized({
'c': [EMCC, '.c'],
'cxx': [EMXX, '.cpp']})
def test_emcc_3(self, compiler, suffix):
# handle singleton archives
run_process([compiler, '-c', path_from_root('tests', 'hello_world' + suffix), '-o', 'a.o'])
run_process([LLVM_AR, 'r', 'a.a', 'a.o'], stdout=PIPE, stderr=PIPE)
run_process([compiler, 'a.a'])
self.assertContained('hello, world!', run_js('a.out.js'))
if not self.is_wasm_backend():
# emcc src.ll ==> generates .js
self.clear()
run_process([compiler, path_from_root('tests', 'hello_world.ll')])
self.assertContained('hello, world!', run_js('a.out.js'))
# emcc [..] -o [path] ==> should work with absolute paths
for path in [os.path.abspath(os.path.join('..', 'file1.js')), os.path.join('b_dir', 'file2.js')]:
print(path)
os.chdir(self.get_dir())
self.clear()
print(os.listdir(os.getcwd()))
ensure_dir(os.path.join('a_dir', 'b_dir'))
os.chdir('a_dir')
# use single file so we don't have more files to clean up
run_process([compiler, path_from_root('tests', 'hello_world' + suffix), '-o', path, '-s', 'SINGLE_FILE=1'])
last = os.getcwd()
os.chdir(os.path.dirname(path))
self.assertContained('hello, world!', run_js(os.path.basename(path)))
os.chdir(last)
try_delete(path)
@parameterized({
'c': [EMCC],
'cxx': [EMXX]})
def test_emcc_4(self, compiler):
# Optimization: emcc src.cpp -o something.js [-Ox]. -O0 is the same as not specifying any optimization setting
for params, opt_level, bc_params, closure, has_malloc in [ # bc params are used after compiling to bitcode
(['-o', 'something.js'], 0, None, 0, 1),
(['-o', 'something.js', '-O0'], 0, None, 0, 0),
(['-o', 'something.js', '-O1'], 1, None, 0, 0),
(['-o', 'something.js', '-O1', '-g'], 1, None, 0, 0), # no closure since debug
(['-o', 'something.js', '-O2'], 2, None, 0, 1),
(['-o', 'something.js', '-O2', '-g'], 2, None, 0, 0),
(['-o', 'something.js', '-Os'], 2, None, 0, 1),
(['-o', 'something.js', '-O3'], 3, None, 0, 1),
# and, test compiling to bitcode first
(['-o', 'something.bc'], 0, [], 0, 0),
(['-o', 'something.bc', '-O0'], 0, [], 0, 0),
(['-o', 'something.bc', '-O1'], 1, ['-O1'], 0, 0),
(['-o', 'something.bc', '-O2'], 2, ['-O2'], 0, 0),
(['-o', 'something.bc', '-O3'], 3, ['-O3'], 0, 0),
(['-O1', '-o', 'something.bc'], 1, [], 0, 0),
# non-wasm
(['-s', 'WASM=0', '-o', 'something.js'], 0, None, 0, 1),
(['-s', 'WASM=0', '-o', 'something.js', '-O0'], 0, None, 0, 0),
(['-s', 'WASM=0', '-o', 'something.js', '-O1'], 1, None, 0, 0),
(['-s', 'WASM=0', '-o', 'something.js', '-O1', '-g'], 1, None, 0, 0), # no closure since debug
(['-s', 'WASM=0', '-o', 'something.js', '-O2'], 2, None, 0, 1),
(['-s', 'WASM=0', '-o', 'something.js', '-O2', '-g'], 2, None, 0, 0),
(['-s', 'WASM=0', '-o', 'something.js', '-Os'], 2, None, 0, 1),
(['-s', 'WASM=0', '-o', 'something.js', '-O3'], 3, None, 0, 1),
# and, test compiling to bitcode first
(['-s', 'WASM=0', '-o', 'something.bc'], 0, ['-s', 'WASM=0'], 0, 0),
(['-s', 'WASM=0', '-o', 'something.bc', '-O0'], 0, ['-s', 'WASM=0'], 0, 0),
(['-s', 'WASM=0', '-o', 'something.bc', '-O1'], 1, ['-s', 'WASM=0', '-O1'], 0, 0),
(['-s', 'WASM=0', '-o', 'something.bc', '-O2'], 2, ['-s', 'WASM=0', '-O2'], 0, 0),
(['-s', 'WASM=0', '-o', 'something.bc', '-O3'], 3, ['-s', 'WASM=0', '-O3'], 0, 0),
(['-s', 'WASM=0', '-O1', '-o', 'something.bc'], 1, ['-s', 'WASM=0'], 0, 0),
]:
if 'WASM=0' in params and self.is_wasm_backend():
continue
print(params, opt_level, bc_params, closure, has_malloc)
self.clear()
keep_debug = '-g' in params
args = [compiler, path_from_root('tests', 'hello_world_loop' + ('_malloc' if has_malloc else '') + '.cpp')] + params
print('..', args)
output = run_process(args, stdout=PIPE, stderr=PIPE)
assert len(output.stdout) == 0, output.stdout
if bc_params is not None:
self.assertExists('something.bc', output.stderr)
bc_args = [compiler, 'something.bc', '-o', 'something.js'] + bc_params
print('....', bc_args)
output = run_process(bc_args, stdout=PIPE, stderr=PIPE)
self.assertExists('something.js', output.stderr)
self.assertContained('hello, world!', run_js('something.js'))
# Verify optimization level etc. in the generated code
# XXX these are quite sensitive, and will need updating when code generation changes
generated = open('something.js').read()
main = self.get_func(generated, '_main') if 'function _main' in generated else generated
assert 'new Uint16Array' in generated and 'new Uint32Array' in generated, 'typed arrays 2 should be used by default'
assert 'SAFE_HEAP' not in generated, 'safe heap should not be used by default'
assert ': while(' not in main, 'when relooping we also js-optimize, so there should be no labelled whiles'
if closure:
if opt_level == 0:
assert '._main =' in generated, 'closure compiler should have been run'
elif opt_level >= 1:
assert '._main=' in generated, 'closure compiler should have been run (and output should be minified)'
else:
# closure has not been run, we can do some additional checks. TODO: figure out how to do these even with closure
assert '._main = ' not in generated, 'closure compiler should not have been run'
if keep_debug:
assert ('switch (label)' in generated or 'switch (label | 0)' in generated) == (opt_level <= 0), 'relooping should be in opt >= 1'
assert ('assert(STACKTOP < STACK_MAX' in generated) == (opt_level == 0), 'assertions should be in opt == 0'
if 'WASM=0' in params:
if opt_level >= 2 and '-g' in params:
assert re.search(r'HEAP8\[\$?\w+ ?\+ ?\(+\$?\w+ ?', generated) or re.search(r'HEAP8\[HEAP32\[', generated) or re.search(r'[i$]\d+ & ~\(1 << [i$]\d+\)', generated), 'eliminator should create compound expressions, and fewer one-time vars' # also in -O1, but easier to test in -O2
looks_unminified = ' = {}' in generated and ' = []' in generated
looks_minified = '={}' in generated and '=[]' and ';var' in generated
assert not (looks_minified and looks_unminified)
if opt_level == 0 or '-g' in params:
assert looks_unminified
elif opt_level >= 2:
assert looks_minified
@no_wasm_backend('tests for asmjs optimzer')
@parameterized({
'c': [EMCC],
'cxx': [EMXX]})
def test_emcc_5(self, compiler):
# asm.js optimization levels
for params, test, text in [
(['-O2'], lambda generated: 'function addRunDependency' in generated, 'shell has unminified utilities'),
(['-O2', '--closure', '1'], lambda generated: 'function addRunDependency' not in generated and ';function' in generated, 'closure minifies the shell, removes whitespace'),
(['-O2', '--closure', '1', '-g1'], lambda generated: 'function addRunDependency' not in generated and ';function' not in generated, 'closure minifies the shell, -g1 makes it keep whitespace'),
(['-O2'], lambda generated: 'var b=0' in generated and 'function _main' not in generated, 'registerize/minify is run by default in -O2'),
(['-O2', '--minify', '0'], lambda generated: 'var b = 0' in generated and 'function _main' not in generated, 'minify is cancelled, but not registerize'),
(['-O2', '--js-opts', '0'], lambda generated: 'var b=0' not in generated and 'var b = 0' not in generated and 'function _main' in generated, 'js opts are cancelled'),
(['-O2', '-g'], lambda generated: 'var b=0' not in generated and 'var b = 0' not in generated and 'function _main' in generated, 'registerize/minify is cancelled by -g'),
(['-O2', '-g0'], lambda generated: 'var b=0' in generated and 'function _main' not in generated, 'registerize/minify is run by default in -O2 -g0'),
(['-O2', '-g1'], lambda generated: 'var b = 0' in generated and 'function _main' not in generated, 'compress is cancelled by -g1'),
(['-O2', '-g2'], lambda generated: ('var b = 0' in generated or 'var i1 = 0' in generated) and 'function _main' in generated, 'minify is cancelled by -g2'),
(['-O2', '-g3'], lambda generated: 'var b=0' not in generated and 'var b = 0' not in generated and 'function _main' in generated, 'registerize is cancelled by -g3'),
(['-O2', '--profiling'], lambda generated: ('var b = 0' in generated or 'var i1 = 0' in generated) and 'function _main' in generated, 'similar to -g2'),
(['-O2', '-profiling'], lambda generated: ('var b = 0' in generated or 'var i1 = 0' in generated) and 'function _main' in generated, 'similar to -g2'),
(['-O2', '--profiling-funcs'], lambda generated: 'var b=0' in generated and '"use asm";var a=' in generated and 'function _main' in generated, 'very minified, but retain function names'),
(['-O2', '-profiling-funcs'], lambda generated: 'var b=0' in generated and '"use asm";var a=' in generated and 'function _main' in generated, 'very minified, but retain function names'),
(['-O2'], lambda generated: 'var b=0' in generated and '"use asm";var a=' in generated and 'function _main' not in generated, 'very minified, no function names'),
# (['-O2', '-g4'], lambda generated: 'var b=0' not in generated and 'var b = 0' not in generated and 'function _main' in generated, 'same as -g3 for now'),
(['-s', 'INLINING_LIMIT=0'], lambda generated: 'function _dump' in generated, 'no inlining without opts'),
([], lambda generated: 'Module["_dump"]' not in generated, 'dump is not exported by default'),
(['-s', 'EXPORTED_FUNCTIONS=["_main", "_dump"]'], lambda generated: 'Module["_dump"] =' in generated, 'dump is now exported'),
(['--llvm-opts', '1'], lambda generated: '_puts(' in generated, 'llvm opts requested'),
([], lambda generated: '// Sometimes an existing Module' in generated, 'without opts, comments in shell code'),
(['-O2'], lambda generated: '// Sometimes an existing Module' not in generated, 'with opts, no comments in shell code'),
(['-O2', '-g2'], lambda generated: '// Sometimes an existing Module' not in generated, 'with -g2, no comments in shell code'),
(['-O2', '-g3'], lambda generated: '// Sometimes an existing Module' in generated, 'with -g3, yes comments in shell code'),
]:
print(params, text)
self.clear()
run_process([compiler, path_from_root('tests', 'hello_world_loop.cpp'), '-o', 'a.out.js', '-s', 'WASM=0'] + params)
self.assertContained('hello, world!', run_js('a.out.js'))
assert test(open('a.out.js').read()), text
def test_multiple_sources(self):
# Compiling two sources at a time should work.
cmd = [EMCC, '-c', path_from_root('tests', 'twopart_main.cpp'), path_from_root('tests', 'twopart_side.c')]
run_process(cmd)
# Object files should be generated by default in the current working
# directory, and not alongside the sources.
self.assertExists('twopart_main.o')
self.assertExists('twopart_side.o')
self.assertNotExists(path_from_root('tests', 'twopart_main.o'))
self.assertNotExists(path_from_root('tests', 'twopart_side.o'))
# But it is an error if '-o' is also specified.
self.clear()
err = self.expect_fail(cmd + ['-o', 'out.o'])
self.assertContained('cannot specify -o with -c/-S and multiple source files', err)
self.assertNotExists('twopart_main.o')
self.assertNotExists('twopart_side.o')
self.assertNotExists(path_from_root('tests', 'twopart_main.o'))
self.assertNotExists(path_from_root('tests', 'twopart_side.o'))
def test_combining_object_files(self):
# Compiling two files with -c will generate separate object files
run_process([EMCC, path_from_root('tests', 'twopart_main.cpp'), path_from_root('tests', 'twopart_side.c'), '-c'])
self.assertExists('twopart_main.o')
self.assertExists('twopart_side.o')
# Linking with just one of them is expected to fail
err = self.expect_fail([EMCC, 'twopart_main.o'])
self.assertContained('undefined symbol: theFunc', err)
# Linking with both should work
run_process([EMCC, 'twopart_main.o', 'twopart_side.o'])
self.assertContained('side got: hello from main, over', run_js('a.out.js'))
# Combining object files into another object should also work, using the `-r` flag
run_process([EMCC, '-r', 'twopart_main.o', 'twopart_side.o', '-o', 'combined.o'])
# We also support building without the `-r` flag but expect a warning
err = run_process([EMCC, 'twopart_main.o', 'twopart_side.o', '-o', 'combined2.o'], stderr=PIPE).stderr
self.assertBinaryEqual('combined.o', 'combined2.o')
self.assertContained('warning: Assuming object file output in the absence of `-c`', err)
# Should be two symbols (and in the wasm backend, also __original_main)
syms = building.llvm_nm('combined.o')
self.assertIn('main', syms.defs)
if self.is_wasm_backend():
self.assertEqual(len(syms.defs), 3)
else:
self.assertEqual(len(syms.defs), 2)
run_process([EMCC, 'combined.o', '-o', 'combined.o.js'])
self.assertContained('side got: hello from main, over', run_js('combined.o.js'))
def test_js_transform(self):
with open('t.py', 'w') as f:
f.write('''
import sys
f = open(sys.argv[1], 'a')
f.write('transformed!')
f.close()
''')
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--js-transform', '%s t.py' % (PYTHON)])
self.assertIn('transformed!', open('a.out.js').read())
@no_wasm_backend("wasm backend alwasy embedds memory")
def test_js_mem_file(self):
for opts in [0, 1, 2, 3]:
print('mem init in', opts)
self.clear()
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0', '-O' + str(opts)])
if opts >= 2:
self.assertExists('a.out.js.mem')
else:
self.assertNotExists('a.out.js.mem')
def test_emcc_asm_v_wasm(self):
for opts in ([], ['-O1'], ['-O2'], ['-O3']):
print('opts', opts)
for mode in ([], ['-s', 'WASM=0']):
self.clear()
wasm = '=0' not in str(mode)
print(' mode', mode, 'wasm?', wasm)
run_process([EMCC, path_from_root('tests', 'hello_world.c')] + opts + mode)
self.assertExists('a.out.js')
if wasm:
self.assertExists('a.out.wasm')
for engine in JS_ENGINES:
print(' engine', engine)
out = run_js('a.out.js', engine=engine, stderr=PIPE, full_output=True)
self.assertContained('hello, world!', out)
if not wasm and engine == SPIDERMONKEY_ENGINE:
self.validate_asmjs(out)
if not wasm and not self.is_wasm_backend():
src = open('a.out.js').read()
if opts == []:
self.assertContained('almost asm', src)
else:
self.assertContained('use asm', src)
def test_emcc_cflags(self):
output = run_process([EMCC, '--cflags'], stdout=PIPE)
flags = output.stdout.strip()
self.assertContained(' '.join(building.doublequote_spaces(shared.emsdk_cflags([], False))), flags)
output = run_process([EMXX, '--cflags'], stdout=PIPE)
flags = output.stdout.strip()
self.assertContained(' '.join(building.doublequote_spaces(shared.emsdk_cflags([], True))), flags)
# check they work
cmd = [CLANG_CXX, path_from_root('tests', 'hello_world.cpp')] + shlex.split(flags.replace('\\', '\\\\')) + ['-c', '-emit-llvm', '-o', 'a.bc']
run_process(cmd)
run_process([EMCC, 'a.bc'])
self.assertContained('hello, world!', run_js('a.out.js'))
def test_emcc_print_search_dirs(self):
result = run_process([EMCC, '-print-search-dirs'], stdout=PIPE, stderr=PIPE)
self.assertContained('programs: =', result.stdout)
self.assertContained('libraries: =', result.stdout)
def test_emar_em_config_flag(self):
# Test that the --em-config flag is accepted but not passed down do llvm-ar.
# We expand this in case the EM_CONFIG is ~/.emscripten (default)
config = os.path.expanduser(shared.EM_CONFIG)
proc = run_process([EMAR, '--em-config', config, '-version'], stdout=PIPE, stderr=PIPE)
self.assertEqual(proc.stderr, "")
self.assertContained('LLVM', proc.stdout)
def test_emsize(self):
with open(path_from_root('tests', 'other', 'test_emsize.out')) as expected_output:
expected = expected_output.read()
cmd = [emsize, path_from_root('tests', 'other', 'test_emsize.js')]
for command in [cmd, cmd + ['-format=sysv']]:
output = run_process(cmd, stdout=PIPE).stdout
self.assertContained(expected, output)
@parameterized({
# ('directory to the test', 'output filename', ['extra args to pass to
# CMake']) Testing all combinations would be too much work and the test
# would take 10 minutes+ to finish (CMake feature detection is slow), so
# combine multiple features into one to try to cover as much as possible
# while still keeping this test in sensible time limit.
'js': ('target_js', 'test_cmake.js', ['-DCMAKE_BUILD_TYPE=Debug']),
'html': ('target_html', 'hello_world_gles.html', ['-DCMAKE_BUILD_TYPE=Release']),
'library': ('target_library', 'libtest_cmake.a', ['-DCMAKE_BUILD_TYPE=MinSizeRel']),
'static_cpp': ('target_library', 'libtest_cmake.a', ['-DCMAKE_BUILD_TYPE=RelWithDebInfo', '-DCPP_LIBRARY_TYPE=STATIC']),
'stdproperty': ('stdproperty', 'helloworld.js', [])
})
def test_cmake(self, test_dir, output_file, cmake_args):
# Test all supported generators.
if WINDOWS:
generators = ['MinGW Makefiles', 'NMake Makefiles']
else:
generators = ['Unix Makefiles', 'Ninja', 'Eclipse CDT4 - Ninja']
configurations = {'MinGW Makefiles' : {'build' : ['mingw32-make'] }, # noqa
'NMake Makefiles' : {'build' : ['nmake', '/NOLOGO']}, # noqa
'Unix Makefiles' : {'build' : ['make']}, # noqa
'Ninja' : {'build' : ['ninja']}, # noqa
'Eclipse CDT4 - Ninja': {'build' : ['ninja']}, # noqa
}
for generator in generators:
conf = configurations[generator]
if not shared.which(conf['build'][0]):
# Use simple test if applicable
print('Skipping %s test for CMake support; build tool found found: %s.' % (generator, conf['build'][0]))
continue
cmakelistsdir = path_from_root('tests', 'cmake', test_dir)
with temp_directory(self.get_dir()) as tempdirname:
# Run Cmake
cmd = [emcmake, 'cmake'] + cmake_args + ['-G', generator, cmakelistsdir]
env = os.environ.copy()
# https://github.com/emscripten-core/emscripten/pull/5145: Check that CMake works even if EMCC_SKIP_SANITY_CHECK=1 is passed.
if test_dir == 'target_html':
env['EMCC_SKIP_SANITY_CHECK'] = '1'
print(str(cmd))
ret = run_process(cmd, env=env, stdout=None if EM_BUILD_VERBOSE >= 2 else PIPE, stderr=None if EM_BUILD_VERBOSE >= 1 else PIPE)
if ret.stderr is not None and len(ret.stderr.strip()):
print(ret.stderr) # If there were any errors, print them directly to console for diagnostics.
if ret.stderr is not None and 'error' in ret.stderr.lower():
print('Failed command: ' + ' '.join(cmd))
print('Result:\n' + ret.stderr)
self.fail('cmake call failed!')
# Build
cmd = conf['build']
if EM_BUILD_VERBOSE >= 3 and 'Ninja' not in generator:
cmd += ['VERBOSE=1']
ret = run_process(cmd, stdout=None if EM_BUILD_VERBOSE >= 2 else PIPE)
if ret.stderr is not None and len(ret.stderr.strip()):
print(ret.stderr) # If there were any errors, print them directly to console for diagnostics.
if ret.stdout is not None and 'error' in ret.stdout.lower() and '0 error(s)' not in ret.stdout.lower():
print('Failed command: ' + ' '.join(cmd))
print('Result:\n' + ret.stdout)
self.fail('make failed!')
self.assertExists(tempdirname + '/' + output_file, 'building a cmake-generated Makefile failed to produce an output file %s!' % tempdirname + '/' + output_file)
# Run through node, if CMake produced a .js file.
if output_file.endswith('.js'):
ret = run_process(NODE_JS + [tempdirname + '/' + output_file], stdout=PIPE).stdout
self.assertTextDataIdentical(open(cmakelistsdir + '/out.txt').read().strip(), ret.strip())
# Test that the various CMAKE_xxx_COMPILE_FEATURES that are advertised for the Emscripten toolchain match with the actual language features that Clang supports.
# If we update LLVM version and this test fails, copy over the new advertised features from Clang and place them to cmake/Modules/Platform/Emscripten.cmake.
@no_windows('Skipped on Windows because CMake does not configure native Clang builds well on Windows.')
def test_cmake_compile_features(self):
with temp_directory(self.get_dir()):
cmd = ['cmake', '-DCMAKE_C_COMPILER=' + CLANG_CC, '-DCMAKE_CXX_COMPILER=' + CLANG_CXX, path_from_root('tests', 'cmake', 'stdproperty')]
print(str(cmd))
native_features = run_process(cmd, stdout=PIPE).stdout
with temp_directory(self.get_dir()):
cmd = [emcmake, 'cmake', path_from_root('tests', 'cmake', 'stdproperty')]
print(str(cmd))
emscripten_features = run_process(cmd, stdout=PIPE).stdout
native_features = '\n'.join([x for x in native_features.split('\n') if '***' in x])
emscripten_features = '\n'.join([x for x in emscripten_features.split('\n') if '***' in x])
self.assertTextDataIdentical(native_features, emscripten_features)
# Tests that it's possible to pass C++11 or GNU++11 build modes to CMake by building code that needs C++11 (embind)
def test_cmake_with_embind_cpp11_mode(self):
for args in [[], ['-DNO_GNU_EXTENSIONS=1']]:
with temp_directory(self.get_dir()) as tempdirname:
configure = [emcmake, 'cmake', path_from_root('tests', 'cmake', 'cmake_with_emval')] + args
print(str(configure))
run_process(configure)
build = ['cmake', '--build', '.']
print(str(build))
run_process(build)
ret = run_process(NODE_JS + [os.path.join(tempdirname, 'cpp_with_emscripten_val.js')], stdout=PIPE).stdout.strip()
if '-DNO_GNU_EXTENSIONS=1' in args:
self.assertTextDataIdentical('Hello! __STRICT_ANSI__: 1, __cplusplus: 201103', ret)
else:
self.assertTextDataIdentical('Hello! __STRICT_ANSI__: 0, __cplusplus: 201103', ret)
# Tests that the Emscripten CMake toolchain option
def test_cmake_bitcode_static_libraries(self):
if self.is_wasm_backend():
# Test that this option produces an error with the llvm backend
err = self.expect_fail([emcmake, 'cmake', path_from_root('tests', 'cmake', 'static_lib'), '-DEMSCRIPTEN_GENERATE_BITCODE_STATIC_LIBRARIES=ON'])
self.assertContained('EMSCRIPTEN_GENERATE_BITCODE_STATIC_LIBRARIES is not compatible with the', err)
return
# Test that building static libraries by default generates UNIX archives (.a, with the emar tool)
self.clear()
run_process([emcmake, 'cmake', path_from_root('tests', 'cmake', 'static_lib')])
run_process(['cmake', '--build', '.'])
self.assertTrue(building.is_ar('libstatic_lib.a'))
run_process([EMAR, 'x', 'libstatic_lib.a'])
found = False # hashing makes the object name random
for x in os.listdir('.'):
if x.endswith('.o'):
found = True
if self.is_wasm_backend():
assert building.is_wasm(x)
else:
assert building.is_bitcode(x)
assert found
# Test that passing the -DEMSCRIPTEN_GENERATE_BITCODE_STATIC_LIBRARIES=ON
# directive causes CMake to generate LLVM bitcode files as static libraries
# (.bc)
self.clear()
run_process([emcmake, 'cmake', '-DEMSCRIPTEN_GENERATE_BITCODE_STATIC_LIBRARIES=ON', path_from_root('tests', 'cmake', 'static_lib')])
run_process(['cmake', '--build', '.'])
if self.is_wasm_backend():
assert building.is_wasm('libstatic_lib.bc')
else:
assert building.is_bitcode('libstatic_lib.bc')
assert not building.is_ar('libstatic_lib.bc')
# Test that one is able to fake custom suffixes for static libraries.
# (sometimes projects want to emulate stuff, and do weird things like files
# with ".so" suffix which are in fact either ar archives or bitcode files)
self.clear()
run_process([emcmake, 'cmake', '-DSET_FAKE_SUFFIX_IN_PROJECT=1', path_from_root('tests', 'cmake', 'static_lib')])
run_process(['cmake', '--build', '.'])
assert building.is_ar('myprefix_static_lib.somecustomsuffix')
# Tests that the CMake variable EMSCRIPTEN_VERSION is properly provided to user CMake scripts
def test_cmake_emscripten_version(self):
run_process([emcmake, 'cmake', path_from_root('tests', 'cmake', 'emscripten_version')])
def test_system_include_paths(self):
# Verify that all default include paths are within `emscripten/system`
def verify_includes(stderr):
self.assertContained('<...> search starts here:', stderr)
assert stderr.count('End of search list.') == 1, stderr
start = stderr.index('<...> search starts here:')
end = stderr.index('End of search list.')
includes = stderr[start:end]
includes = [i.strip() for i in includes.splitlines()[1:-1]]
for i in includes:
if shared.Cache.dirname in i:
self.assertContained(shared.Cache.dirname, i)
else:
self.assertContained(path_from_root('system'), i)
err = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-v'], stderr=PIPE).stderr
verify_includes(err)
err = run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-v'], stderr=PIPE).stderr
verify_includes(err)
def test_failure_error_code(self):
for compiler in [EMCC, EMXX]:
# Test that if one file is missing from the build, then emcc shouldn't succeed, and shouldn't produce an output file.
self.expect_fail([compiler, path_from_root('tests', 'hello_world.c'), 'this_file_is_missing.c', '-o', 'out.js'])
self.assertFalse(os.path.exists('out.js'))
def test_use_cxx(self):
create_test_file('empty_file', ' ')
dash_xc = run_process([EMCC, '-v', '-xc', 'empty_file'], stderr=PIPE).stderr
self.assertNotContained('-x c++', dash_xc)
dash_xcpp = run_process([EMCC, '-v', '-xc++', 'empty_file'], stderr=PIPE).stderr
self.assertContained('-x c++', dash_xcpp)
def test_cxx11(self):
for std in ['-std=c++11', '--std=c++11']:
for compiler in [EMCC, EMXX]:
run_process([compiler, std, path_from_root('tests', 'hello_cxx11.cpp')])
# Regression test for issue #4522: Incorrect CC vs CXX detection
def test_incorrect_c_detection(self):
# This auto-detection only works for the compile phase.
# For linking you need to use `em++` or pass `-x c++`
create_test_file('test.c', 'foo\n')
for compiler in [EMCC, EMXX]:
run_process([compiler, '-c', '--bind', '--embed-file', 'test.c', path_from_root('tests', 'hello_world.cpp')])
def test_odd_suffixes(self):
for suffix in ['CPP', 'c++', 'C++', 'cxx', 'CXX', 'cc', 'CC', 'i', 'ii']:
if self.is_wasm_backend() and suffix == 'ii':
# wasm backend treats .i and .ii specially and considers them already
# pre-processed. Because if this is strips all the -D command line
# flags, including the __EMSCRIPTEN__ define, which makes this fail
# to compile since libcxx/__config depends in __EMSCRIPTEN__.
continue
self.clear()
print(suffix)
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'test.' + suffix)
run_process([EMCC, self.in_dir('test.' + suffix)])
self.assertContained('hello, world!', run_js('a.out.js'))
for suffix in ['lo']:
self.clear()
print(suffix)
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'binary.' + suffix])
run_process([EMCC, 'binary.' + suffix])
self.assertContained('hello, world!', run_js('a.out.js'))
@no_wasm_backend('asm.js minification')
def test_asm_minify(self):
def test(args):
run_process([EMCC, path_from_root('tests', 'hello_world_loop_malloc.cpp'), '-s', 'WASM=0'] + args)
self.assertContained('hello, world!', run_js('a.out.js'))
return open('a.out.js').read()
src = test([])
assert 'function _malloc' in src
src = test(['-O2', '-s', 'ASM_JS=1'])
normal_size = len(src)
print('normal', normal_size)
assert 'function _malloc' not in src
src = test(['-O2', '-s', 'ASM_JS=1', '--minify', '0'])
unminified_size = len(src)
print('unminified', unminified_size)
assert unminified_size > normal_size
assert 'function _malloc' not in src
src = test(['-O2', '-s', 'ASM_JS=1', '-g'])
debug_size = len(src)
print('debug', debug_size)
self.assertGreater(debug_size, unminified_size)
self.assertContained('function _malloc', src)
@no_wasm_backend('tests fastcomp extra assertions for function pointer errors - do we need these?')
def test_dangerous_func_cast(self):
src = r'''
#include <stdio.h>
typedef void (*voidfunc)();
int my_func() {
printf("my func\n");
return 10;
}
int main(int argc, char **argv) {
voidfunc fps[10];
for (int i = 0; i < 10; i++)
fps[i] = (i == argc) ? (void (*)())my_func : NULL;
fps[2 * (argc-1) + 1]();
return 0;
}
'''
create_test_file('src.c', src)
def test(args, expected):
print(args, expected)
run_process([EMCC, 'src.c'] + args, stderr=PIPE)
self.assertContained(expected, run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=None))
if self.is_wasm_backend():
return
print('in asm.js')
run_process([EMCC, 'src.c', '-s', 'WASM=0'] + args, stderr=PIPE)
self.assertContained(expected, run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=None))
# TODO: emulation function support in wasm is imperfect
print('with emulated function pointers in asm.js')
run_process([EMCC, '-s', 'WASM=0', 'src.c', '-s', 'ASSERTIONS=1'] + args + ['-s', 'EMULATED_FUNCTION_POINTERS=1'], stderr=PIPE)
out = run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=None)
self.assertContained(expected, out)
# fastcomp. all asm, so it can't just work with wrong sigs. but,
# ASSERTIONS=2 gives much better info to debug
# Case 1: No useful info, but does mention ASSERTIONS
test(['-O1'], 'ASSERTIONS')
# Case 2: Some useful text
test(['-O1', '-s', 'ASSERTIONS=1'], [
'Invalid function pointer',
"called with signature 'v'. Perhaps this is an invalid value",
'Build with ASSERTIONS=2 for more info'
])
# Case 3: actually useful identity of the bad pointer, with comparisons to
# what it would be in other types/tables
test(['-O1', '-s', 'ASSERTIONS=2'], [
'Invalid function pointer',
"called with signature 'v'. Perhaps this is an invalid value",
'This pointer might make sense in another type signature:',
'Invalid function pointer',
"called with signature 'v'. Perhaps this is an invalid value",
"i: asm['_my_func']"
])
# Case 4: emulate so it works
test(['-O1', '-s', 'EMULATE_FUNCTION_POINTER_CASTS=1'], 'my func\n')
@no_wasm_backend('uses EMULATED_FUNCTION_POINTERS')
def test_emulate_function_pointer_casts_assertions_2(self):
# check empty tables work with assertions 2 in this mode (#6554)
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EMULATED_FUNCTION_POINTERS=1', '-s', 'ASSERTIONS=2'])
def test_wl_linkflags(self):
# Test path -L and -l via -Wl, arguments and -Wl, response files
create_test_file('main.cpp', '''
extern "C" void printey();
int main() {
printey();
return 0;
}
''')
create_test_file('libfile.cpp', '''
#include <stdio.h>
extern "C" void printey() {
printf("hello from lib\\n");
}
''')
create_test_file('linkflags.txt', '''
-L.
-lfoo
''')
run_process([EMCC, '-o', 'libfile.o', 'libfile.cpp'])
run_process([EMAR, 'cr', 'libfoo.a', 'libfile.o'])
run_process([EMCC, 'main.cpp', '-L.', '-lfoo'])
run_process([EMCC, 'main.cpp', '-Wl,-L.', '-Wl,-lfoo'])
run_process([EMCC, 'main.cpp', '-Wl,@linkflags.txt'])
def test_l_link(self):
# Linking with -lLIBNAME and -L/DIRNAME should work, also should work with spaces
create_test_file('main.cpp', '''
extern void printey();
int main() {
printey();
return 0;
}
''')
create_test_file('libfile.cpp', '''
#include <stdio.h>
void printey() {
printf("hello from lib\\n");
}
''')
ensure_dir('libdir')
libfile = self.in_dir('libdir', 'libfile.so')
aout = 'a.out.js'
def build(path, args):
run_process([EMCC, path] + args)
# Test linking the library built here by emcc
build('libfile.cpp', ['-c'])
shutil.move('libfile.o', libfile)
build('main.cpp', ['-L' + 'libdir', '-lfile'])
self.assertContained('hello from lib', run_js(aout))
# Also test execution with `-l c` and space-separated library linking syntax
os.remove(aout)
build('libfile.cpp', ['-c', '-l', 'c'])
shutil.move('libfile.o', libfile)
build('main.cpp', ['-L', 'libdir', '-l', 'file'])
self.assertContained('hello from lib', run_js(aout))
# Must not leave unneeded linker stubs
self.assertNotExists('a.out')
self.assertNotExists('a.exe')
def test_commons_link(self):
create_test_file('a.h', r'''
#if !defined(A_H)
#define A_H
extern int foo[8];
#endif
''')
create_test_file('a.c', r'''
#include "a.h"
int foo[8];
''')
create_test_file('main.c', r'''
#include <stdio.h>
#include "a.h"
int main() {
printf("|%d|\n", foo[0]);
return 0;
}
''')
run_process([EMCC, '-o', 'a.o', 'a.c'])
run_process([EMAR, 'rv', 'library.a', 'a.o'])
run_process([EMCC, '-o', 'main.o', 'main.c'])
run_process([EMCC, '-o', 'a.js', 'main.o', 'library.a'])
self.assertContained('|0|', run_js('a.js'))
@parameterized({
'expand_symlinks': [[]],
'no_canonical_prefixes': [['-no-canonical-prefixes']],
})
@no_windows('Windows does not support symlinks')
def test_symlink_points_to_bad_suffix(self, flags):
"""Tests compiling a symlink where foobar.c points to foobar.xxx.
In this case, we should always successfully compile the code."""
create_test_file('foobar.xxx', 'int main(){ return 0; }')
os.symlink('foobar.xxx', 'foobar.c')
run_process([EMCC, 'foobar.c', '-o', 'foobar.bc'] + flags)
@parameterized({
'expand_symlinks': ([], True),
'no_canonical_prefixes': (['-no-canonical-prefixes'], False),
})
@no_windows('Windows does not support symlinks')
def test_symlink_has_bad_suffix(self, flags, expect_success):
"""Tests compiling a symlink where foobar.xxx points to foobar.c.
In this case, setting -no-canonical-prefixes will result in a build failure
due to the inappropriate file suffix on foobar.xxx."""
create_test_file('foobar.c', 'int main(){ return 0; }')
os.symlink('foobar.c', 'foobar.xxx')
proc = run_process([EMCC, 'foobar.xxx', '-o', 'foobar.bc'] + flags, check=expect_success, stderr=PIPE)
if not expect_success:
self.assertNotEqual(proc.returncode, 0)
self.assertContained("unknown suffix", proc.stderr)
def test_multiply_defined_libsymbols(self):
lib_name = 'libA.c'
a2_name = 'a2.c'
b2_name = 'b2.c'
main_name = 'main.c'
create_test_file(lib_name, 'int mult() { return 1; }')
create_test_file(a2_name, 'void x() {}')
create_test_file(b2_name, 'void y() {}')
create_test_file(main_name, r'''
#include <stdio.h>
int mult();
int main() {
printf("result: %d\n", mult());
return 0;
}
''')
building.emcc(lib_name, output_filename='libA.so')
building.emcc(a2_name, ['-L.', '-lA'])
building.emcc(b2_name, ['-L.', '-lA'])
building.emcc(main_name, ['-L.', '-lA', a2_name + '.o', b2_name + '.o'], output_filename='a.out.js')
self.assertContained('result: 1', run_js('a.out.js'))
def test_multiply_defined_libsymbols_2(self):
a = "int x() { return 55; }"
a_name = 'a.c'
create_test_file(a_name, a)
b = "int y() { return 2; }"
b_name = 'b.c'
create_test_file(b_name, b)
c = "int z() { return 5; }"
c_name = 'c.c'
create_test_file(c_name, c)
main = r'''
#include <stdio.h>
int x();
int y();
int z();
int main() {
printf("result: %d\n", x() + y() + z());
return 0;
}
'''
main_name = 'main.c'
create_test_file(main_name, main)
building.emcc(a_name) # a.c.o
building.emcc(b_name) # b.c.o
building.emcc(c_name) # c.c.o
lib_name = 'libLIB.a'
building.emar('cr', lib_name, [a_name + '.o', b_name + '.o']) # libLIB.a with a and b
# a is in the lib AND in an .o, so should be ignored in the lib. We do still need b from the lib though
building.emcc(main_name, [a_name + '.o', c_name + '.o', '-L.', '-lLIB'], output_filename='a.out.js')
self.assertContained('result: 62', run_js('a.out.js'))
@no_wasm_backend('not relevent with lld')
def test_link_group(self):
lib_src_name = 'lib.c'
create_test_file(lib_src_name, 'int x() { return 42; }')
main_name = 'main.c'
create_test_file(main_name, r'''
#include <stdio.h>
int x();
int main() {
printf("result: %d\n", x());
return 0;
}
''')
building.emcc(lib_src_name) # lib.c.o
lib_name = 'libLIB.a'
building.emar('cr', lib_name, [lib_src_name + '.o']) # libLIB.a with lib.c.o
def test(lib_args, err_expected):
print(err_expected)
output = run_process([EMCC, main_name, '-o', 'a.out.js'] + lib_args, stdout=PIPE, stderr=PIPE, check=not err_expected)
if err_expected:
self.assertContained(err_expected, output.stderr)
else:
self.assertNotContained('undefined symbol', output.stderr)
out_js = 'a.out.js'
self.assertExists(out_js, output.stdout + '\n' + output.stderr)
self.assertContained('result: 42', run_js(out_js))
test(['-Wl,--start-group', lib_name, '-Wl,--start-group'], 'Nested --start-group, missing --end-group?')
test(['-Wl,--end-group', lib_name, '-Wl,--start-group'], '--end-group without --start-group')
test(['-Wl,--start-group', lib_name, '-Wl,--end-group'], None)
test(['-Wl,--start-group', lib_name], None)
print('embind test with groups')
main_name = 'main.cpp'
create_test_file(main_name, r'''
#include <stdio.h>
#include <emscripten/val.h>
using namespace emscripten;
extern "C" int x();
int main() {
int y = -x();
y = val::global("Math").call<int>("abs", y);
printf("result: %d\n", y);
return 0;
}
''')
test(['-Wl,--start-group', lib_name, '-Wl,--end-group', '--bind'], None)
def test_whole_archive(self):
# Verify that -Wl,--whole-archive includes the static constructor from the
# otherwise unreferenced library.
run_process([EMCC, '-c', '-o', 'main.o', path_from_root('tests', 'test_whole_archive', 'main.c')])
run_process([EMCC, '-c', '-o', 'testlib.o', path_from_root('tests', 'test_whole_archive', 'testlib.c')])
run_process([EMAR, 'crs', 'libtest.a', 'testlib.o'])
run_process([EMCC, '-Wl,--whole-archive', 'libtest.a', '-Wl,--no-whole-archive', 'main.o'])
self.assertContained('foo is: 42\n', run_js('a.out.js'))
run_process([EMCC, '-Wl,-whole-archive', 'libtest.a', '-Wl,-no-whole-archive', 'main.o'])
self.assertContained('foo is: 42\n', run_js('a.out.js'))
# Verify the --no-whole-archive prevents the inclusion of the ctor
run_process([EMCC, '-Wl,-whole-archive', '-Wl,--no-whole-archive', 'libtest.a', 'main.o'])
self.assertContained('foo is: 0\n', run_js('a.out.js'))
def test_link_group_bitcode(self):
create_test_file('1.c', r'''
int f(void);
int main() {
f();
return 0;
}
''')
create_test_file('2.c', r'''
#include <stdio.h>
int f() {
printf("Hello\n");
return 0;
}
''')
run_process([EMCC, '-o', '1.o', '1.c'])
run_process([EMCC, '-o', '2.o', '2.c'])
run_process([EMAR, 'crs', '2.a', '2.o'])
run_process([EMCC, '-o', 'out.bc', '-Wl,--start-group', '2.a', '1.o', '-Wl,--end-group'])
run_process([EMCC, 'out.bc'])
self.assertContained('Hello', run_js('a.out.js'))
@no_wasm_backend('lld resolves circular lib dependencies')
def test_circular_libs(self):
def tmp_source(name, code):
with open(name, 'w') as f:
f.write(code)
tmp_source('a.c', 'int z(); int x() { return z(); }')
tmp_source('b.c', 'int x(); int y() { return x(); } int z() { return 42; }')
tmp_source('c.c', 'int q() { return 0; }')
tmp_source('main.c', r'''
#include <stdio.h>
int y();
int main() {
printf("result: %d\n", y());
return 0;
}
''')
building.emcc('a.c') # a.c.o
building.emcc('b.c') # b.c.o
building.emcc('c.c')
building.emar('cr', 'libA.a', ['a.c.o', 'c.c.o'])
building.emar('cr', 'libB.a', ['b.c.o', 'c.c.o'])
args = ['main.c', '-o', 'a.out.js']
libs_list = ['libA.a', 'libB.a']
# 'libA.a' does not satisfy any symbols from main, so it will not be included,
# and there will be an undefined symbol.
err = self.expect_fail([EMCC] + args + libs_list)
self.assertContained('error: undefined symbol: x', err)
# -Wl,--start-group and -Wl,--end-group around the libs will cause a rescan
# of 'libA.a' after 'libB.a' adds undefined symbol "x", so a.c.o will now be
# included (and the link will succeed).
libs = ['-Wl,--start-group'] + libs_list + ['-Wl,--end-group']
run_process([EMCC] + args + libs)
self.assertContained('result: 42', run_js('a.out.js'))
# -( and -) should also work.
args = ['main.c', '-o', 'a2.out.js']
libs = ['-Wl,-('] + libs_list + ['-Wl,-)']
run_process([EMCC] + args + libs)
self.assertContained('result: 42', run_js('a2.out.js'))
# The fastcomp path will deliberately ignore duplicate input files in order
# to allow "libA.so" on the command line twice. The is not really .so support
# and the .so files are really bitcode.
@no_wasm_backend('tests legacy .so linking behviour')
@needs_dlfcn
def test_redundant_link(self):
lib = "int mult() { return 1; }"
lib_name = 'libA.c'
create_test_file(lib_name, lib)
main = r'''
#include <stdio.h>
int mult();
int main() {
printf("result: %d\n", mult());
return 0;
}
'''
main_name = 'main.c'
create_test_file(main_name, main)
building.emcc(lib_name, output_filename='libA.so')
building.emcc(main_name, ['libA.so', 'libA.so'], output_filename='a.out.js')
self.assertContained('result: 1', run_js('a.out.js'))
def test_dot_a_all_contents_invalid(self):
# check that we error if an object file in a .a is not valid bitcode.
# do not silently ignore native object files, which may have been
# built by mistake
create_test_file('native.c', 'int native() { return 5; }')
create_test_file('main.c', 'extern int native(); int main() { return native(); }')
run_process([CLANG_CC, 'native.c', '-target', 'x86_64-linux', '-c', '-o', 'native.o'])
run_process([EMAR, 'crs', 'libfoo.a', 'native.o'])
stderr = self.expect_fail([EMCC, 'main.c', 'libfoo.a'])
self.assertContained('unknown file type', stderr)
def test_export_all(self):
lib = r'''
#include <stdio.h>
void libf1() { printf("libf1\n"); }
void libf2() { printf("libf2\n"); }
'''
create_test_file('lib.c', lib)
create_test_file('main.js', '''
var Module = {
onRuntimeInitialized: function() {
_libf1();
_libf2();
}
};
''')
building.emcc('lib.c', ['-s', 'EXPORT_ALL', '-s', 'LINKABLE', '--pre-js', 'main.js'], output_filename='a.out.js')
self.assertContained('libf1\nlibf2\n', run_js('a.out.js'))
def test_export_all_and_exported_functions(self):
# EXPORT_ALL should not export library functions by default.
# This mans that to export library function you also need to explicitly
# list them in EXPORTED_FUNCTIONS.
lib = r'''
#include <stdio.h>
#include <emscripten.h>
EMSCRIPTEN_KEEPALIVE void libfunc() { puts("libfunc\n"); }
'''
create_test_file('lib.c', lib)
create_test_file('main.js', '''
var Module = {
onRuntimeInitialized: function() {
_libfunc();
__get_daylight();
}
};
''')
# __get_daylight should not be linked by default, even with EXPORT_ALL
building.emcc('lib.c', ['-s', 'EXPORT_ALL', '--pre-js', 'main.js'], output_filename='a.out.js')
err = run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=None)
self.assertContained('__get_daylight is not defined', err)
building.emcc('lib.c', ['-s', "EXPORTED_FUNCTIONS=['__get_daylight']", '-s', 'EXPORT_ALL', '--pre-js', 'main.js'], output_filename='a.out.js')
self.assertContained('libfunc\n', run_js('a.out.js'))
def test_stdin(self):
def run_test():
for engine in JS_ENGINES:
if engine == V8_ENGINE:
continue # no stdin support in v8 shell
engine[0] = os.path.normpath(engine[0])
print(engine, file=sys.stderr)
# work around a bug in python's subprocess module
# (we'd use run_js() normally)
try_delete('out.txt')
cmd = jsrun.make_command(os.path.normpath('out.js'), engine)
cmd = ' '.join(building.doublequote_spaces(cmd))
if WINDOWS:
os.system('type "in.txt" | {} >out.txt'.format(cmd))
else: # posix
os.system('cat in.txt | {} > out.txt'.format(cmd))
self.assertContained('abcdef\nghijkl\neof', open('out.txt').read())
building.emcc(path_from_root('tests', 'module', 'test_stdin.c'), output_filename='out.js')
create_test_file('in.txt', 'abcdef\nghijkl')
run_test()
building.emcc(path_from_root('tests', 'module', 'test_stdin.c'),
['-O2', '--closure', '1'], output_filename='out.js')
run_test()
def test_ungetc_fscanf(self):
create_test_file('main.cpp', r'''
#include <stdio.h>
int main(int argc, char const *argv[])
{
char str[4] = {0};
FILE* f = fopen("my_test.input", "r");
if (f == NULL) {
printf("cannot open file\n");
return -1;
}
ungetc('x', f);
ungetc('y', f);
ungetc('z', f);
fscanf(f, "%3s", str);
printf("%s\n", str);
return 0;
}
''')
create_test_file('my_test.input', 'abc')
building.emcc('main.cpp', ['--embed-file', 'my_test.input'], output_filename='a.out.js')
self.assertContained('zyx', run_process(JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_abspaths(self):
# Includes with absolute paths are generally dangerous, things like -I/usr/.. will get to system local headers, not our portable ones.
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'main.c')
for args, expected in [(['-I/usr/something', '-Wwarn-absolute-paths'], True),
(['-L/usr/something', '-Wwarn-absolute-paths'], True),
(['-I/usr/something'], False),
(['-L/usr/something'], False),
(['-I/usr/something', '-Wno-warn-absolute-paths'], False),
(['-L/usr/something', '-Wno-warn-absolute-paths'], False),
(['-Isubdir/something', '-Wwarn-absolute-paths'], False),
(['-Lsubdir/something', '-Wwarn-absolute-paths'], False),
([], False)]:
print(args, expected)
proc = run_process([EMCC, 'main.c'] + args, stderr=PIPE)
WARNING = 'encountered. If this is to a local system header/library, it may cause problems (local system files make sense for compiling natively on your system, but not necessarily to JavaScript)'
self.assertContainedIf(WARNING, proc.stderr, expected)
def test_local_link(self):
# Linking a local library directly, like /usr/lib/libsomething.so, cannot work of course since it
# doesn't contain bitcode. However, when we see that we should look for a bitcode file for that
# library in the -L paths and system/lib
create_test_file('main.cpp', '''
extern void printey();
int main() {
printey();
return 0;
}
''')
ensure_dir('subdir')
open(os.path.join('subdir', 'libfile.so'), 'w').write('this is not llvm bitcode!')
create_test_file('libfile.cpp', '''
#include <stdio.h>
void printey() {
printf("hello from lib\\n");
}
''')
run_process([EMCC, 'libfile.cpp', '-o', 'libfile.so'], stderr=PIPE)
run_process([EMCC, 'main.cpp', os.path.join('subdir', 'libfile.so'), '-L.'])
self.assertContained('hello from lib', run_js('a.out.js'))
def test_identical_basenames(self):
# Issue 287: files in different dirs but with the same basename get confused as the same,
# causing multiply defined symbol errors
ensure_dir('foo')
ensure_dir('bar')
open(os.path.join('foo', 'main.cpp'), 'w').write('''
extern void printey();
int main() {
printey();
return 0;
}
''')
open(os.path.join('bar', 'main.cpp'), 'w').write('''
#include <stdio.h>
void printey() { printf("hello there\\n"); }
''')
run_process([EMCC, os.path.join('foo', 'main.cpp'), os.path.join('bar', 'main.cpp')])
self.assertContained('hello there', run_js('a.out.js'))
# ditto with first creating .o files
try_delete('a.out.js')
run_process([EMCC, os.path.join('foo', 'main.cpp'), '-o', os.path.join('foo', 'main.o')])
run_process([EMCC, os.path.join('bar', 'main.cpp'), '-o', os.path.join('bar', 'main.o')])
run_process([EMCC, os.path.join('foo', 'main.o'), os.path.join('bar', 'main.o')])
self.assertContained('hello there', run_js('a.out.js'))
def test_main_a(self):
# if main() is in a .a, we need to pull in that .a
main_name = 'main.c'
create_test_file(main_name, r'''
#include <stdio.h>
extern int f();
int main() {
printf("result: %d.\n", f());
return 0;
}
''')
other_name = 'other.c'
create_test_file(other_name, r'''
#include <stdio.h>
int f() { return 12346; }
''')
run_process([EMCC, main_name, '-c', '-o', main_name + '.bc'])
run_process([EMCC, other_name, '-c', '-o', other_name + '.bc'])
run_process([EMAR, 'cr', main_name + '.a', main_name + '.bc'])
run_process([EMCC, other_name + '.bc', main_name + '.a'])
self.assertContained('result: 12346.', run_js('a.out.js'))
def test_multiple_archives_duplicate_basenames(self):
create_test_file('common.c', r'''
#include <stdio.h>
void a(void) {
printf("a\n");
}
''')
run_process([EMCC, 'common.c', '-c', '-o', 'common.o'])
try_delete('liba.a')
run_process([EMAR, 'rc', 'liba.a', 'common.o'])
create_test_file('common.c', r'''
#include <stdio.h>
void b(void) {
printf("b\n");
}
''')
run_process([EMCC, 'common.c', '-c', '-o', 'common.o'])
try_delete('libb.a')
run_process([EMAR, 'rc', 'libb.a', 'common.o'])
create_test_file('main.c', r'''
void a(void);
void b(void);
int main() {
a();
b();
}
''')
run_process([EMCC, 'main.c', '-L.', '-la', '-lb'])
self.assertContained('a\nb\n', run_js('a.out.js'))
def test_archive_duplicate_basenames(self):
ensure_dir('a')
create_test_file(os.path.join('a', 'common.c'), r'''
#include <stdio.h>
void a(void) {
printf("a\n");
}
''')
run_process([EMCC, os.path.join('a', 'common.c'), '-c', '-o', os.path.join('a', 'common.o')])
ensure_dir('b')
create_test_file(os.path.join('b', 'common.c'), r'''
#include <stdio.h>
void b(void) {
printf("b...\n");
}
''')
run_process([EMCC, os.path.join('b', 'common.c'), '-c', '-o', os.path.join('b', 'common.o')])
try_delete('liba.a')
run_process([EMAR, 'rc', 'liba.a', os.path.join('a', 'common.o'), os.path.join('b', 'common.o')])
# Verify that archive contains basenames with hashes to avoid duplication
text = run_process([EMAR, 't', 'liba.a'], stdout=PIPE).stdout
self.assertEqual(text.count('common'), 2)
for line in text.split('\n'):
# should not have huge hash names
self.assertLess(len(line), 20, line)
create_test_file('main.c', r'''
void a(void);
void b(void);
int main() {
a();
b();
}
''')
err = run_process([EMCC, 'main.c', '-L.', '-la'], stderr=PIPE).stderr
self.assertNotIn('archive file contains duplicate entries', err)
self.assertContained('a\nb...\n', run_js('a.out.js'))
# Using llvm-ar directly should cause duplicate basenames
try_delete('libdup.a')
run_process([LLVM_AR, 'rc', 'libdup.a', os.path.join('a', 'common.o'), os.path.join('b', 'common.o')])
text = run_process([EMAR, 't', 'libdup.a'], stdout=PIPE).stdout
self.assertEqual(text.count('common.o'), 2)
# With fastcomp we don't support duplicate members so this should generate
# a warning. With the wasm backend (lld) this is fully supported.
cmd = [EMCC, 'main.c', '-L.', '-ldup']
if self.is_wasm_backend():
run_process(cmd)
self.assertContained('a\nb...\n', run_js('a.out.js'))
else:
err = self.expect_fail(cmd)
self.assertIn('libdup.a: archive file contains duplicate entries', err)
self.assertIn('error: undefined symbol: a', err)
# others are not duplicates - the hashing keeps them separate
self.assertEqual(err.count('duplicate: '), 1)
self.assertContained('a\nb...\n', run_js('a.out.js'))
def test_export_from_archive(self):
export_name = 'this_is_an_entry_point'
full_export_name = '_' + export_name
# The wasm backend exports symbols without the leading '_'
if self.is_wasm_backend():
expect_export = export_name
else:
expect_export = full_export_name
create_test_file('export.c', r'''
#include <stdio.h>
void %s(void) {
printf("Hello, world!\n");
}
''' % export_name)
run_process([EMCC, 'export.c', '-c', '-o', 'export.o'])
run_process([EMAR, 'rc', 'libexport.a', 'export.o'])
create_test_file('main.c', r'''
int main() {
return 0;
}
''')
# Sanity check: the symbol should not be linked in if not requested.
run_process([EMCC, 'main.c', '-L.', '-lexport'])
self.assertFalse(self.is_exported_in_wasm(expect_export, 'a.out.wasm'))
# Sanity check: exporting without a definition does not cause it to appear.
# Note: exporting main prevents emcc from warning that it generated no code.
run_process([EMCC, 'main.c', '-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0', '-s', "EXPORTED_FUNCTIONS=['_main', '%s']" % full_export_name])
self.assertFalse(self.is_exported_in_wasm(expect_export, 'a.out.wasm'))
# Actual test: defining symbol in library and exporting it causes it to appear in the output.
run_process([EMCC, 'main.c', '-L.', '-lexport', '-s', "EXPORTED_FUNCTIONS=['%s']" % full_export_name])
self.assertTrue(self.is_exported_in_wasm(expect_export, 'a.out.wasm'))
def test_embed_file(self):
create_test_file('somefile.txt', 'hello from a file with lots of data and stuff in it thank you very much')
create_test_file('main.cpp', r'''
#include <stdio.h>
int main() {
FILE *f = fopen("somefile.txt", "r");
char buf[100];
fread(buf, 1, 20, f);
buf[20] = 0;
fclose(f);
printf("|%s|\n", buf);
return 0;
}
''')
run_process([EMCC, 'main.cpp', '--embed-file', 'somefile.txt'])
self.assertContained('|hello from a file wi|', run_js('a.out.js'))
# preload twice, should not err
run_process([EMCC, 'main.cpp', '--embed-file', 'somefile.txt', '--embed-file', 'somefile.txt'])
self.assertContained('|hello from a file wi|', run_js('a.out.js'))
def test_embed_file_dup(self):
ensure_dir(self.in_dir('tst', 'test1'))
ensure_dir(self.in_dir('tst', 'test2'))
open(self.in_dir('tst', 'aa.txt'), 'w').write('frist')
open(self.in_dir('tst', 'test1', 'aa.txt'), 'w').write('sacond')
open(self.in_dir('tst', 'test2', 'aa.txt'), 'w').write('thard')
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
void print_file(const char *name) {
FILE *f = fopen(name, "r");
char buf[100];
memset(buf, 0, 100);
fread(buf, 1, 20, f);
buf[20] = 0;
fclose(f);
printf("|%s|\n", buf);
}
int main() {
print_file("tst/aa.txt");
print_file("tst/test1/aa.txt");
print_file("tst/test2/aa.txt");
return 0;
}
''')
run_process([EMCC, 'main.cpp', '--embed-file', 'tst'])
self.assertContained('|frist|\n|sacond|\n|thard|\n', run_js('a.out.js'))
def test_exclude_file(self):
ensure_dir(self.in_dir('tst', 'abc.exe'))
ensure_dir(self.in_dir('tst', 'abc.txt'))
open(self.in_dir('tst', 'hello.exe'), 'w').write('hello')
open(self.in_dir('tst', 'hello.txt'), 'w').write('world')
open(self.in_dir('tst', 'abc.exe', 'foo'), 'w').write('emscripten')
open(self.in_dir('tst', 'abc.txt', 'bar'), 'w').write('!!!')
create_test_file('main.cpp', r'''
#include <stdio.h>
int main() {
if(fopen("tst/hello.exe", "rb")) printf("Failed\n");
if(!fopen("tst/hello.txt", "rb")) printf("Failed\n");
if(fopen("tst/abc.exe/foo", "rb")) printf("Failed\n");
if(!fopen("tst/abc.txt/bar", "rb")) printf("Failed\n");
return 0;
}
''')
run_process([EMCC, 'main.cpp', '--embed-file', 'tst', '--exclude-file', '*.exe'])
self.assertEqual(run_js('a.out.js').strip(), '')
def test_multidynamic_link(self):
# Linking the same dynamic library in statically will error, normally, since we statically link it, causing dupe symbols
def test(link_cmd, lib_suffix=''):
print(link_cmd, lib_suffix)
self.clear()
ensure_dir('libdir')
create_test_file('main.cpp', r'''
#include <stdio.h>
extern void printey();
extern void printother();
int main() {
printf("*");
printey();
printf("\n");
printother();
printf("\n");
printf("*");
return 0;
}
''')
open(os.path.join('libdir', 'libfile.cpp'), 'w').write('''
#include <stdio.h>
void printey() {
printf("hello from lib");
}
''')
open(os.path.join('libdir', 'libother.cpp'), 'w').write('''
#include <stdio.h>
extern void printey();
void printother() {
printf("|");
printey();
printf("|");
}
''')
compiler = [EMCC]
# Build libfile normally into an .so
run_process(compiler + [os.path.join('libdir', 'libfile.cpp'), '-o', os.path.join('libdir', 'libfile.so' + lib_suffix)])
# Build libother and dynamically link it to libfile
run_process(compiler + [os.path.join('libdir', 'libother.cpp')] + link_cmd + ['-o', os.path.join('libdir', 'libother.so')])
# Build the main file, linking in both the libs
run_process(compiler + [os.path.join('main.cpp')] + link_cmd + ['-lother', '-c'])
print('...')
# The normal build system is over. We need to do an additional step to link in the dynamic libraries, since we ignored them before
run_process([EMCC, 'main.o'] + link_cmd + ['-lother', '-s', 'EXIT_RUNTIME=1'])
self.assertContained('*hello from lib\n|hello from lib|\n*', run_js('a.out.js'))
test(['-L' + 'libdir', '-lfile']) # -l, auto detection from library path
test(['-L' + 'libdir', self.in_dir('libdir', 'libfile.so.3.1.4.1.5.9')], '.3.1.4.1.5.9') # handle libX.so.1.2.3 as well
def test_js_link(self):
create_test_file('main.cpp', '''
#include <stdio.h>
int main() {
printf("hello from main\\n");
return 0;
}
''')
create_test_file('before.js', '''
var MESSAGE = 'hello from js';
// Module is initialized with empty object by default, so if there are no keys - nothing was run yet
if (Object.keys(Module).length) throw 'This code should run before anything else!';
''')
create_test_file('after.js', '''
out(MESSAGE);
''')
run_process([EMCC, 'main.cpp', '--pre-js', 'before.js', '--post-js', 'after.js', '-s', 'WASM_ASYNC_COMPILATION=0'])
self.assertContained('hello from main\nhello from js\n', run_js('a.out.js'))
def test_sdl_endianness(self):
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <SDL/SDL.h>
int main() {
printf("%d, %d, %d\n", SDL_BYTEORDER, SDL_LIL_ENDIAN, SDL_BIG_ENDIAN);
return 0;
}
''')
run_process([EMCC, 'main.cpp'])
self.assertContained('1234, 1234, 4321\n', run_js('a.out.js'))
def test_sdl2_mixer(self):
building.emcc(path_from_root('tests', 'sdl2_mixer.c'), ['-s', 'USE_SDL_MIXER=2'], output_filename='a.out.js')
def test_libpng(self):
shutil.copyfile(path_from_root('tests', 'pngtest.png'), 'pngtest.png')
building.emcc(path_from_root('tests', 'pngtest.c'), ['--embed-file', 'pngtest.png', '-s', 'USE_ZLIB=1', '-s', 'USE_LIBPNG=1'], output_filename='a.out.js')
self.assertContained('TESTS PASSED', run_process(JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_libjpeg(self):
shutil.copyfile(path_from_root('tests', 'screenshot.jpg'), 'screenshot.jpg')
building.emcc(path_from_root('tests', 'jpeg_test.c'), ['--embed-file', 'screenshot.jpg', '-s', 'USE_LIBJPEG=1'], output_filename='a.out.js')
self.assertContained('Image is 600 by 450 with 3 components', run_js('a.out.js', args=['screenshot.jpg'], stdout=PIPE, stderr=PIPE))
def test_bullet(self):
building.emcc(path_from_root('tests', 'bullet_hello_world.cpp'), ['-s', 'USE_BULLET=1'], output_filename='a.out.js')
self.assertContained('BULLET RUNNING', run_process(JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_vorbis(self):
# This will also test if ogg compiles, because vorbis depends on ogg
building.emcc(path_from_root('tests', 'vorbis_test.c'), ['-s', 'USE_VORBIS=1'], output_filename='a.out.js')
self.assertContained('ALL OK', run_process(JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_bzip2(self):
building.emcc(path_from_root('tests', 'bzip2_test.c'), ['-s', 'USE_BZIP2=1'], output_filename='a.out.js')
self.assertContained("usage: unzcrash filename", run_process(JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_freetype(self):
# copy the Liberation Sans Bold truetype file located in the
# <emscripten_root>/tests/freetype to the compilation folder
shutil.copy2(path_from_root('tests/freetype', 'LiberationSansBold.ttf'), os.getcwd())
# build test program with the font file embed in it
building.emcc(path_from_root('tests', 'freetype_test.c'), ['-s', 'USE_FREETYPE=1', '--embed-file', 'LiberationSansBold.ttf'], output_filename='a.out.js')
# the test program will print an ascii representation of a bitmap where the
# 'w' character has been rendered using the Liberation Sans Bold font
expectedOutput = ' \n' + \
' \n' + \
' \n' + \
' \n' + \
'*** +***+ \n' + \
'***+ ***** +\n' + \
'+**+ ***** +\n' + \
'+*** +**+**+ *\n' + \
' ***+ ***+**+ +*\n' + \
' +**+ *** *** +*\n' + \
' +**++**+ +**+**\n' + \
' ***+**+ +**+**\n' + \
' ****** *****\n' + \
' +****+ +****\n' + \
' +****+ +****\n' + \
' **** ****'
self.assertContained(expectedOutput, run_process(JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_link_memcpy(self):
# memcpy can show up *after* optimizations, so after our opportunity to link in libc, so it must be special-cased
create_test_file('main.cpp', r'''
#include <stdio.h>
int main(int argc, char **argv) {
int num = argc + 10;
char buf[num], buf2[num];
for (int i = 0; i < num; i++) {
buf[i] = i*i+i/3;
}
for (int i = 1; i < num; i++) {
buf[i] += buf[i-1];
}
for (int i = 0; i < num; i++) {
buf2[i] = buf[i];
}
for (int i = 1; i < num; i++) {
buf2[i] += buf2[i-1];
}
for (int i = 0; i < num; i++) {
printf("%d:%d\n", i, buf2[i]);
}
return 0;
}
''')
run_process([EMCC, '-O2', 'main.cpp'])
output = run_js('a.out.js', full_output=True, stderr=PIPE)
self.assertContained('''0:0
1:1
2:6
3:21
4:53
5:111
6:-49
7:98
8:55
9:96
10:-16
''', output)
self.assertNotContained('warning: library.js memcpy should not be running, it is only for testing!', output)
def test_undefined_function(self):
cmd = [EMCC, path_from_root('tests', 'hello_world.cpp')]
run_process(cmd)
# adding a missing symbol to EXPORTED_FUNCTIONS should cause failure
cmd += ['-s', "EXPORTED_FUNCTIONS=['foobar']"]
err = self.expect_fail(cmd)
self.assertContained('undefined exported function: "foobar"', err)
# setting ERROR_ON_UNDEFINED_SYMBOLS=0 suppresses error
cmd += ['-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0']
run_process(cmd)
def test_undefined_symbols(self):
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <SDL.h>
#include "SDL/SDL_opengl.h"
extern "C" {
void something();
void elsey();
}
int main() {
printf("%p", SDL_GL_GetProcAddress("glGenTextures")); // pull in gl proc stuff, avoid warnings on emulation funcs
something();
elsey();
return 0;
}
''')
for args in ([], ['-O1'], ['-s', 'MAX_WEBGL_VERSION=2']):
for action in ('WARN', 'ERROR', None):
for value in ([0, 1]):
try_delete('a.out.js')
print('checking "%s" %s=%s' % (args, action, value))
extra = ['-s', action + '_ON_UNDEFINED_SYMBOLS=%d' % value] if action else []
proc = run_process([EMCC, 'main.cpp'] + extra + args, stderr=PIPE, check=False)
print(proc.stderr)
if value or action is None:
# The default is that we error in undefined symbols
self.assertContained('error: undefined symbol: something', proc.stderr)
self.assertContained('error: undefined symbol: elsey', proc.stderr)
check_success = False
elif action == 'ERROR' and not value:
# Error disables, should only warn
self.assertContained('warning: undefined symbol: something', proc.stderr)
self.assertContained('warning: undefined symbol: elsey', proc.stderr)
self.assertNotContained('undefined symbol: emscripten_', proc.stderr)
check_success = True
elif action == 'WARN' and not value:
# Disabled warning should imply disabling errors
self.assertNotContained('undefined symbol', proc.stderr)
check_success = True
if check_success:
self.assertEqual(proc.returncode, 0)
self.assertTrue(os.path.exists('a.out.js'))
else:
self.assertNotEqual(proc.returncode, 0)
self.assertFalse(os.path.exists('a.out.js'))
def test_GetProcAddress_LEGACY_GL_EMULATION(self):
# without legacy gl emulation, getting a proc from there should fail
self.do_other_test(os.path.join('other', 'GetProcAddress_LEGACY_GL_EMULATION'), run_args=['0'], emcc_args=['-s', 'LEGACY_GL_EMULATION=0'])
# with it, it should work
self.do_other_test(os.path.join('other', 'GetProcAddress_LEGACY_GL_EMULATION'), run_args=['1'], emcc_args=['-s', 'LEGACY_GL_EMULATION=1'])
def test_prepost(self):
create_test_file('main.cpp', '''
#include <stdio.h>
int main() {
printf("hello from main\\n");
return 0;
}
''')
create_test_file('pre.js', '''
var Module = {
preRun: function() { out('pre-run') },
postRun: function() { out('post-run') }
};
''')
run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js', '-s', 'WASM_ASYNC_COMPILATION=0'])
self.assertContained('pre-run\nhello from main\npost-run\n', run_js('a.out.js'))
# addRunDependency during preRun should prevent main, and post-run from
# running.
with open('pre.js', 'a') as f:
f.write('Module.preRun = function() { out("add-dep"); addRunDependency(); }\n')
run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js', '-s', 'WASM_ASYNC_COMPILATION=0'])
output = run_js('a.out.js')
self.assertContained('add-dep\n', output)
self.assertNotContained('hello from main\n', output)
self.assertNotContained('post-run\n', output)
# noInitialRun prevents run
for no_initial_run, run_dep in [(0, 0), (1, 0), (0, 1)]:
print(no_initial_run, run_dep)
args = ['-s', 'WASM_ASYNC_COMPILATION=0', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["callMain"]']
if no_initial_run:
args += ['-s', 'INVOKE_RUN=0']
if run_dep:
create_test_file('pre.js', 'Module.preRun = function() { addRunDependency("test"); }')
create_test_file('post.js', 'removeRunDependency("test");')
args += ['--pre-js', 'pre.js', '--post-js', 'post.js']
run_process([EMCC, 'main.cpp'] + args)
output = run_js('a.out.js')
self.assertContainedIf('hello from main', output, not no_initial_run)
if no_initial_run:
# Calling main later should still work, filesystem etc. must be set up.
print('call main later')
src = open('a.out.js').read()
src += '\nModule.callMain();\n'
create_test_file('a.out.js', src)
self.assertContained('hello from main', run_js('a.out.js'))
# Use postInit
create_test_file('pre.js', '''
var Module = {
preRun: function() { out('pre-run') },
postRun: function() { out('post-run') },
preInit: function() { out('pre-init') }
};
''')
run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js'])
self.assertContained('pre-init\npre-run\nhello from main\npost-run\n', run_js('a.out.js'))
def test_prepost2(self):
create_test_file('main.cpp', '''
#include <stdio.h>
int main() {
printf("hello from main\\n");
return 0;
}
''')
create_test_file('pre.js', '''
var Module = {
preRun: function() { out('pre-run') },
};
''')
create_test_file('pre2.js', '''
Module.postRun = function() { out('post-run') };
''')
run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js', '--pre-js', 'pre2.js'])
self.assertContained('pre-run\nhello from main\npost-run\n', run_js('a.out.js'))
def test_prepre(self):
create_test_file('main.cpp', '''
#include <stdio.h>
int main() {
printf("hello from main\\n");
return 0;
}
''')
create_test_file('pre.js', '''
var Module = {
preRun: [function() { out('pre-run') }],
};
''')
create_test_file('pre2.js', '''
Module.preRun.push(function() { out('prepre') });
''')
run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js', '--pre-js', 'pre2.js'])
self.assertContained('prepre\npre-run\nhello from main\n', run_js('a.out.js'))
def test_extern_prepost(self):
create_test_file('extern-pre.js', '''
// I am an external pre.
''')
create_test_file('extern-post.js', '''
// I am an external post.
''')
run_process([EMCC, '-O2', path_from_root('tests', 'hello_world.c'), '--extern-pre-js', 'extern-pre.js', '--extern-post-js', 'extern-post.js'])
# the files should be included, and externally - not as part of optimized
# code, so they are the very first and last things, and they are not
# minified.
with open('a.out.js') as output:
js = output.read()
pre = js.index('// I am an external pre.')
post = js.index('// I am an external post.')
# ignore some slack - newlines and other things. we just care about the
# big picture here
SLACK = 50
self.assertLess(pre, post)
self.assertLess(pre, SLACK)
self.assertGreater(post, len(js) - SLACK)
# make sure the slack is tiny compared to the whole program
self.assertGreater(len(js), 100 * SLACK)
@no_wasm_backend('depends on bc output')
def test_save_bc(self):
cmd = [EMCC, path_from_root('tests', 'hello_world_loop_malloc.cpp'), '--save-bc', 'my_bitcode.bc']
run_process(cmd)
assert 'hello, world!' in run_js('a.out.js')
self.assertExists('my_bitcode.bc')
try_delete('a.out.js')
building.llvm_dis('my_bitcode.bc', 'my_ll.ll')
run_process([EMCC, 'my_ll.ll', '-nostdlib', '-o', 'two.js'])
assert 'hello, world!' in run_js('two.js')
def test_js_optimizer(self):
ACORN_PASSES = ['JSDCE', 'AJSDCE', 'applyImportAndExportNameChanges', 'emitDCEGraph', 'applyDCEGraphRemovals', 'growableHeap', 'unsignPointers', 'asanify']
for input, expected, passes in [
(path_from_root('tests', 'optimizer', 'eliminateDeadGlobals.js'), open(path_from_root('tests', 'optimizer', 'eliminateDeadGlobals-output.js')).read(),
['eliminateDeadGlobals']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-output.js')).read(),
['hoistMultiples', 'removeAssignsToUndefined', 'simplifyExpressions']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-output.js')).read(),
['asm', 'simplifyExpressions']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-si.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-si-output.js')).read(),
['simplifyIfs']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-regs.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-regs-output.js')).read(),
['registerize']),
(path_from_root('tests', 'optimizer', 'eliminator-test.js'), open(path_from_root('tests', 'optimizer', 'eliminator-test-output.js')).read(),
['eliminate']),
(path_from_root('tests', 'optimizer', 'safe-eliminator-test.js'), open(path_from_root('tests', 'optimizer', 'safe-eliminator-test-output.js')).read(),
['eliminateMemSafe']),
(path_from_root('tests', 'optimizer', 'asm-eliminator-test.js'), open(path_from_root('tests', 'optimizer', 'asm-eliminator-test-output.js')).read(),
['asm', 'eliminate']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-regs.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-regs-output.js')).read(),
['asm', 'registerize']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-regs-harder.js'), [open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-regs-harder-output.js')).read(), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-regs-harder-output2.js')).read(), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-regs-harder-output3.js')).read()],
['asm', 'registerizeHarder']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-regs-min.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-regs-min-output.js')).read(),
['asm', 'registerize', 'minifyLocals']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-minifyLocals.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-minifyLocals-output.js')).read(),
['minifyLocals']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-pre.js'), [open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-pre-output.js')).read(), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-pre-output2.js')).read()],
['asm', 'simplifyExpressions']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-pre-f32.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-pre-output-f32.js')).read(),
['asm', 'asmPreciseF32', 'simplifyExpressions', 'optimizeFrounds']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-pre-f32.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-pre-output-f32-nosimp.js')).read(),
['asm', 'asmPreciseF32', 'optimizeFrounds']),
(path_from_root('tests', 'optimizer', 'test-reduce-dead-float-return.js'), open(path_from_root('tests', 'optimizer', 'test-reduce-dead-float-return-output.js')).read(),
['asm', 'optimizeFrounds', 'registerizeHarder']),
(path_from_root('tests', 'optimizer', 'test-no-reduce-dead-float-return-to-nothing.js'), open(path_from_root('tests', 'optimizer', 'test-no-reduce-dead-float-return-to-nothing-output.js')).read(),
['asm', 'registerizeHarder']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-last.js'), [open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-lastOpts-output.js')).read(), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-lastOpts-output2.js')).read(), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-lastOpts-output3.js')).read()],
['asm', 'asmLastOpts']),
(path_from_root('tests', 'optimizer', 'asmLastOpts.js'), open(path_from_root('tests', 'optimizer', 'asmLastOpts-output.js')).read(),
['asm', 'asmLastOpts']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-last.js'), [open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-last-output.js')).read(), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-last-output2.js')).read(), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-last-output3.js')).read()],
['asm', 'asmLastOpts', 'last']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-relocate.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-relocate-output.js')).read(),
['asm', 'relocate']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-minlast.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-asm-minlast-output.js')).read(),
['asm', 'minifyWhitespace', 'asmLastOpts', 'last']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-shiftsAggressive.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-shiftsAggressive-output.js')).read(),
['asm', 'aggressiveVariableElimination']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-localCSE.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-localCSE-output.js')).read(),
['asm', 'localCSE']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-ensureLabelSet.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-ensureLabelSet-output.js')).read(),
['asm', 'ensureLabelSet']),
(path_from_root('tests', 'optimizer', '3154.js'), open(path_from_root('tests', 'optimizer', '3154-output.js')).read(),
['asm', 'eliminate', 'registerize', 'asmLastOpts', 'last']),
(path_from_root('tests', 'optimizer', 'safeLabelSetting.js'), open(path_from_root('tests', 'optimizer', 'safeLabelSetting-output.js')).read(),
['asm', 'safeLabelSetting']), # eliminate, just enough to trigger asm normalization/denormalization
(path_from_root('tests', 'optimizer', 'null_if.js'), [open(path_from_root('tests', 'optimizer', 'null_if-output.js')).read(), open(path_from_root('tests', 'optimizer', 'null_if-output2.js')).read()],
['asm', 'registerizeHarder', 'asmLastOpts', 'minifyWhitespace']), # issue 3520
(path_from_root('tests', 'optimizer', 'null_else.js'), [open(path_from_root('tests', 'optimizer', 'null_else-output.js')).read(), open(path_from_root('tests', 'optimizer', 'null_else-output2.js')).read()],
['asm', 'registerizeHarder', 'asmLastOpts', 'minifyWhitespace']), # issue 3549
(path_from_root('tests', 'optimizer', 'test-js-optimizer-splitMemory.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-splitMemory-output.js')).read(),
['splitMemory']),
(path_from_root('tests', 'optimizer', 'JSDCE.js'), open(path_from_root('tests', 'optimizer', 'JSDCE-output.js')).read(),
['JSDCE']),
(path_from_root('tests', 'optimizer', 'JSDCE-hasOwnProperty.js'), open(path_from_root('tests', 'optimizer', 'JSDCE-hasOwnProperty-output.js')).read(),
['JSDCE']),
(path_from_root('tests', 'optimizer', 'JSDCE-fors.js'), open(path_from_root('tests', 'optimizer', 'JSDCE-fors-output.js')).read(),
['JSDCE']),
(path_from_root('tests', 'optimizer', 'AJSDCE.js'), open(path_from_root('tests', 'optimizer', 'AJSDCE-output.js')).read(),
['AJSDCE']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph2.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph2-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph3.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph3-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph4.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph4-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph5.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph5-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'minimal-runtime-applyDCEGraphRemovals.js'), open(path_from_root('tests', 'optimizer', 'minimal-runtime-applyDCEGraphRemovals-output.js')).read(),
['applyDCEGraphRemovals']),
(path_from_root('tests', 'optimizer', 'applyDCEGraphRemovals.js'), open(path_from_root('tests', 'optimizer', 'applyDCEGraphRemovals-output.js')).read(),
['applyDCEGraphRemovals']),
(path_from_root('tests', 'optimizer', 'applyImportAndExportNameChanges.js'), open(path_from_root('tests', 'optimizer', 'applyImportAndExportNameChanges-output.js')).read(),
['applyImportAndExportNameChanges']),
(path_from_root('tests', 'optimizer', 'applyImportAndExportNameChanges2.js'), open(path_from_root('tests', 'optimizer', 'applyImportAndExportNameChanges2-output.js')).read(),
['applyImportAndExportNameChanges']),
(path_from_root('tests', 'optimizer', 'minimal-runtime-emitDCEGraph.js'), open(path_from_root('tests', 'optimizer', 'minimal-runtime-emitDCEGraph-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'minimal-runtime-2-emitDCEGraph.js'), open(path_from_root('tests', 'optimizer', 'minimal-runtime-2-emitDCEGraph-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'standalone-emitDCEGraph.js'), open(path_from_root('tests', 'optimizer', 'standalone-emitDCEGraph-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emittedJSPreservesParens.js'), open(path_from_root('tests', 'optimizer', 'emittedJSPreservesParens-output.js')).read(),
['asm']),
(path_from_root('tests', 'optimizer', 'test-growableHeap.js'), open(path_from_root('tests', 'optimizer', 'test-growableHeap-output.js')).read(),
['growableHeap']),
(path_from_root('tests', 'optimizer', 'test-unsignPointers.js'), open(path_from_root('tests', 'optimizer', 'test-unsignPointers-output.js')).read(),
['unsignPointers']),
(path_from_root('tests', 'optimizer', 'test-asanify.js'), open(path_from_root('tests', 'optimizer', 'test-asanify-output.js')).read(),
['asanify']),
(path_from_root('tests', 'optimizer', 'test-js-optimizer-minifyGlobals.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-minifyGlobals-output.js')).read(),
['minifyGlobals']),
]:
print(input, passes)
if not isinstance(expected, list):
expected = [expected]
expected = [out.replace('\n\n', '\n').replace('\n\n', '\n') for out in expected]
acorn = any(p in ACORN_PASSES for p in passes)
# test calling optimizer
if not acorn:
print(' js')
output = run_process(NODE_JS + [path_from_root('tools', 'js-optimizer.js'), input] + passes, stdin=PIPE, stdout=PIPE).stdout
else:
print(' acorn')
output = run_process(NODE_JS + [path_from_root('tools', 'acorn-optimizer.js'), input] + passes, stdin=PIPE, stdout=PIPE).stdout
def check_js(js, expected):
# print >> sys.stderr, 'chak\n==========================\n', js, '\n===========================\n'
if 'registerizeHarder' in passes:
# registerizeHarder is hard to test, as names vary by chance, nondeterminstically FIXME
def fix(src):
if type(src) is list:
return list(map(fix, src))
src = '\n'.join([line for line in src.split('\n') if 'var ' not in line]) # ignore vars
def reorder(func):
def swap(func, stuff):
# emit EYE_ONE always before EYE_TWO, replacing i1,i2 or i2,i1 etc
for i in stuff:
if i not in func:
return func
indexes = [[i, func.index(i)] for i in stuff]
indexes.sort(key=lambda x: x[1])
for j in range(len(indexes)):
func = func.replace(indexes[j][0], 'STD_' + str(j))
return func
func = swap(func, ['i1', 'i2', 'i3'])
func = swap(func, ['i1', 'i2'])
func = swap(func, ['i4', 'i5'])
return func
src = 'function '.join(map(reorder, src.split('function ')))
return src
js = fix(js)
expected = fix(expected)
self.assertIdentical(expected, js.replace('\r\n', '\n').replace('\n\n', '\n').replace('\n\n', '\n'))
if input not in [ # blacklist of tests that are native-optimizer only
path_from_root('tests', 'optimizer', 'asmLastOpts.js'),
path_from_root('tests', 'optimizer', '3154.js')
]:
check_js(output, expected)
else:
print('(skip non-native)')
if not self.is_wasm_backend() and tools.js_optimizer.use_native(passes) and tools.js_optimizer.get_native_optimizer():
# test calling native
def check_json():
run_process(NODE_JS + [path_from_root('tools', 'js-optimizer.js'), output_temp, 'receiveJSON'], stdin=PIPE, stdout=open(output_temp + '.js', 'w'))
output = open(output_temp + '.js').read()
check_js(output, expected)
self.clear()
input_temp = 'temp.js'
output_temp = 'output.js'
shutil.copyfile(input, input_temp)
run_process(NODE_JS + [path_from_root('tools', 'js-optimizer.js'), input_temp, 'emitJSON'], stdin=PIPE, stdout=open(input_temp + '.js', 'w'))
original = open(input).read()
if '// EXTRA_INFO:' in original:
json = open(input_temp + '.js').read()
json += '\n' + original[original.find('// EXTRA_INFO:'):]
create_test_file(input_temp + '.js', json)
# last is only relevant when we emit JS
if 'last' not in passes and \
'null_if' not in input and 'null_else' not in input: # null-* tests are js optimizer or native, not a mixture (they mix badly)
print(' native (receiveJSON)')
output = run_process([tools.js_optimizer.get_native_optimizer(), input_temp + '.js'] + passes + ['receiveJSON', 'emitJSON'], stdin=PIPE, stdout=open(output_temp, 'w')).stdout
check_json()
print(' native (parsing JS)')
output = run_process([tools.js_optimizer.get_native_optimizer(), input] + passes + ['emitJSON'], stdin=PIPE, stdout=open(output_temp, 'w')).stdout
check_json()
print(' native (emitting JS)')
output = run_process([tools.js_optimizer.get_native_optimizer(), input] + passes, stdin=PIPE, stdout=PIPE).stdout
check_js(output, expected)
@no_fastcomp('wasm2js-only')
def test_js_optimizer_wasm2js(self):
# run the js optimizer in a similar way as wasm2js does
shutil.copyfile(path_from_root('tests', 'optimizer', 'wasm2js.js'), 'wasm2js.js')
run_process([PYTHON, path_from_root('tools', 'js_optimizer.py'), 'wasm2js.js', 'minifyNames', 'last'])
with open(path_from_root('tests', 'optimizer', 'wasm2js-output.js')) as expected:
with open('wasm2js.js.jsopt.js') as actual:
self.assertIdentical(expected.read(), actual.read())
def test_m_mm(self):
create_test_file('foo.c', '#include <emscripten.h>')
for opt in ['M', 'MM']:
proc = run_process([EMCC, 'foo.c', '-' + opt], stdout=PIPE, stderr=PIPE)
assert 'foo.o: ' in proc.stdout, '-%s failed to produce the right output: %s' % (opt, proc.stdout)
assert 'error' not in proc.stderr, 'Unexpected stderr: ' + proc.stderr
@uses_canonical_tmp
def test_emcc_debug_files(self):
for opts in [0, 1, 2, 3]:
for debug in [None, '1', '2']:
print(opts, debug)
if os.path.exists(self.canonical_temp_dir):
shutil.rmtree(self.canonical_temp_dir)
env = os.environ.copy()
if debug is None:
env.pop('EMCC_DEBUG', None)
else:
env['EMCC_DEBUG'] = debug
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-O' + str(opts)], stderr=PIPE, env=env)
if debug is None:
self.assertFalse(os.path.exists(self.canonical_temp_dir))
elif debug == '1':
if self.is_wasm_backend():
self.assertExists(os.path.join(self.canonical_temp_dir, 'emcc-3-original.js'))
else:
self.assertExists(os.path.join(self.canonical_temp_dir, 'emcc-0-linktime.bc'))
self.assertExists(os.path.join(self.canonical_temp_dir, 'emcc-1-original.js'))
elif debug == '2':
if self.is_wasm_backend():
self.assertExists(os.path.join(self.canonical_temp_dir, 'emcc-3-original.js'))
else:
self.assertExists(os.path.join(self.canonical_temp_dir, 'emcc-0-basebc.bc'))
self.assertExists(os.path.join(self.canonical_temp_dir, 'emcc-1-linktime.bc'))
self.assertExists(os.path.join(self.canonical_temp_dir, 'emcc-2-original.js'))
def test_debuginfo(self):
for args, expect_debug in [
(['-O0'], False),
(['-O0', '-g'], True),
(['-O0', '-g4'], True),
(['-O1'], False),
(['-O1', '-g'], True),
(['-O2'], False),
(['-O2', '-g'], True),
]:
print(args, expect_debug)
err = run_process([EMCC, '-v', path_from_root('tests', 'hello_world.cpp')] + args, stdout=PIPE, stderr=PIPE).stderr
lines = err.splitlines()
if self.is_wasm_backend():
finalize = [l for l in lines if 'wasm-emscripten-finalize' in l][0]
if expect_debug:
self.assertIn(' -g ', finalize)
else:
self.assertNotIn(' -g ', finalize)
else:
if expect_debug:
self.assertNotIn('strip-debug', err)
else:
self.assertIn('strip-debug', err)
@no_fastcomp()
def test_debuginfo_line_tables_only(self):
def test(do_compile):
do_compile([])
no_size = os.path.getsize('a.out.wasm')
do_compile(['-gline-tables-only'])
line_size = os.path.getsize('a.out.wasm')
do_compile(['-g'])
full_size = os.path.getsize('a.out.wasm')
return (no_size, line_size, full_size)
def compile_to_object(compile_args):
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-c', '-o', 'a.out.wasm'] + compile_args)
no_size, line_size, full_size = test(compile_to_object)
self.assertLess(no_size, line_size)
self.assertLess(line_size, full_size)
def compile_to_executable(compile_args, link_args):
# compile with the specified args
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-c', '-o', 'a.o'] + compile_args)
# link with debug info
run_process([EMCC, 'a.o'] + link_args)
def compile_to_debug_executable(compile_args):
return compile_to_executable(compile_args, ['-g'])
no_size, line_size, full_size = test(compile_to_debug_executable)
self.assertLess(no_size, line_size)
self.assertLess(line_size, full_size)
def compile_to_release_executable(compile_args):
return compile_to_executable(compile_args, [])
no_size, line_size, full_size = test(compile_to_release_executable)
self.assertEqual(no_size, line_size)
self.assertEqual(line_size, full_size)
@no_fastcomp()
def test_dwarf(self):
def compile_with_dwarf(args, output):
# Test that -g enables dwarf info in object files and linked wasm
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-o', output, '-g'] + args)
def verify(output):
info = run_process([LLVM_DWARFDUMP, '--all', output], stdout=PIPE).stdout
self.assertIn('DW_TAG_subprogram', info) # Ensure there's a subprogram entry in .debug_info
self.assertIn('debug_line[0x', info) # Ensure there's a line table
compile_with_dwarf(['-c'], 'a.o')
verify('a.o')
compile_with_dwarf([], 'a.js')
verify('a.wasm')
@unittest.skipIf(not scons_path, 'scons not found in PATH')
@with_env_modify({'EMSCRIPTEN_ROOT': path_from_root()})
def test_scons(self):
# this test copies the site_scons directory alongside the test
shutil.copytree(path_from_root('tests', 'scons'), 'test')
shutil.copytree(path_from_root('tools', 'scons', 'site_scons'), os.path.join('test', 'site_scons'))
with chdir('test'):
run_process(['scons'])
output = run_js('scons_integration.js', assert_returncode=5)
self.assertContained('If you see this - the world is all right!', output)
@unittest.skipIf(not scons_path, 'scons not found in PATH')
@with_env_modify({'EMSCRIPTEN_TOOLPATH': path_from_root('tools', 'scons', 'site_scons'),
'EMSCRIPTEN_ROOT': path_from_root()})
def test_emscons(self):
# uses the emscons wrapper which requires EMSCRIPTEN_TOOLPATH to find
# site_scons
shutil.copytree(path_from_root('tests', 'scons'), 'test')
with chdir('test'):
run_process([path_from_root('emscons'), 'scons'])
output = run_js('scons_integration.js', assert_returncode=5)
self.assertContained('If you see this - the world is all right!', output)
def test_embind_fail(self):
out = self.expect_fail([EMCC, path_from_root('tests', 'embind', 'test_unsigned.cpp')])
self.assertContained("undefined symbol: _embind_register_function", out)
@is_slow_test
def test_embind(self):
environ = os.environ.copy()
environ['EMCC_CLOSURE_ARGS'] = environ.get('EMCC_CLOSURE_ARGS', '') + " --externs " + pipes.quote(path_from_root('tests', 'embind', 'underscore-externs.js'))
test_cases = [
(['--bind']),
(['--bind', '-O1']),
(['--bind', '-O2']),
(['--bind', '-O2', '-s', 'ALLOW_MEMORY_GROWTH=1', path_from_root('tests', 'embind', 'isMemoryGrowthEnabled=true.cpp')]),
]
without_utf8_args = ['-s', 'EMBIND_STD_STRING_IS_UTF8=0']
test_cases_without_utf8 = []
for args in test_cases:
test_cases_without_utf8.append((args + without_utf8_args))
test_cases += test_cases_without_utf8
test_cases.extend([(args[:] + ['-s', 'DYNAMIC_EXECUTION=0']) for args in test_cases])
# closure compiler doesn't work with DYNAMIC_EXECUTION=0
test_cases.append((['--bind', '-O2', '--closure', '1']))
for args in test_cases:
print(args)
self.clear()
testFiles = [
path_from_root('tests', 'embind', 'underscore-1.4.2.js'),
path_from_root('tests', 'embind', 'imvu_test_adapter.js'),
path_from_root('tests', 'embind', 'embind.test.js'),
]
run_process(
[EMCC, path_from_root('tests', 'embind', 'embind_test.cpp'),
'--pre-js', path_from_root('tests', 'embind', 'test.pre.js'),
'--post-js', path_from_root('tests', 'embind', 'test.post.js'),
'-s', 'WASM_ASYNC_COMPILATION=0',
'-s', 'IN_TEST_HARNESS=1'] + args,
env=environ)
if 'DYNAMIC_EXECUTION=0' in args:
with open('a.out.js') as js_binary_file:
js_binary_str = js_binary_file.read()
self.assertNotContained('new Function(', js_binary_str)
self.assertNotContained('eval(', js_binary_str)
with open('a.out.js', 'ab') as f:
for tf in testFiles:
f.write(open(tf, 'rb').read())
output = run_js('a.out.js', stdout=PIPE, stderr=PIPE, full_output=True)
self.assertNotContained('FAIL', output)
def test_emconfig(self):
output = run_process([emconfig, 'LLVM_ROOT'], stdout=PIPE).stdout.strip()
self.assertEqual(output, LLVM_ROOT)
# EMSCRIPTEN_ROOT is kind of special since it should always report the locaton of em-config
# itself (its not configurable via the config file but driven by the location for arg0)
output = run_process([emconfig, 'EMSCRIPTEN_ROOT'], stdout=PIPE).stdout.strip()
self.assertEqual(output, os.path.dirname(emconfig))
invalid = 'Usage: em-config VAR_NAME'
# Don't accept variables that do not exist
output = self.expect_fail([emconfig, 'VAR_WHICH_DOES_NOT_EXIST']).strip()
self.assertEqual(output, invalid)
# Don't accept no arguments
output = self.expect_fail([emconfig]).strip()
self.assertEqual(output, invalid)
# Don't accept more than one variable
output = self.expect_fail([emconfig, 'LLVM_ROOT', 'EMCC']).strip()
self.assertEqual(output, invalid)
# Don't accept arbitrary python code
output = self.expect_fail([emconfig, 'sys.argv[1]']).strip()
self.assertEqual(output, invalid)
def test_link_s(self):
# -s OPT=VALUE can conflict with -s as a linker option. We warn and ignore
create_test_file('main.cpp', r'''
extern "C" {
void something();
}
int main() {
something();
return 0;
}
''')
create_test_file('supp.cpp', r'''
#include <stdio.h>
extern "C" {
void something() {
printf("yello\n");
}
}
''')
run_process([EMCC, 'main.cpp', '-o', 'main.o'])
run_process([EMCC, 'supp.cpp', '-o', 'supp.o'])
run_process([EMCC, 'main.o', '-s', 'supp.o', '-s', 'SAFE_HEAP=1'])
self.assertContained('yello', run_js('a.out.js'))
# Check that valid -s option had an effect'
self.assertContained('SAFE_HEAP', open('a.out.js').read())
def test_conftest_s_flag_passing(self):
create_test_file('conftest.c', r'''
int main() {
return 0;
}
''')
with env_modify({'EMMAKEN_JUST_CONFIGURE': '1'}):
cmd = [EMCC, '-s', 'ASSERTIONS=1', 'conftest.c', '-o', 'conftest']
output = run_process(cmd, stderr=PIPE)
self.assertNotContained('emcc: warning: treating -s as linker option', output.stderr)
self.assertExists('conftest')
def test_file_packager(self):
ensure_dir('subdir')
create_test_file('data1.txt', 'data1')
os.chdir('subdir')
create_test_file('data2.txt', 'data2')
# relative path to below the current dir is invalid
stderr = self.expect_fail([PYTHON, FILE_PACKAGER, 'test.data', '--preload', '../data1.txt'])
self.assertContained('below the current directory', stderr)
# relative path that ends up under us is cool
proc = run_process([PYTHON, FILE_PACKAGER, 'test.data', '--preload', '../subdir/data2.txt'], stderr=PIPE, stdout=PIPE)
self.assertGreater(len(proc.stdout), 0)
self.assertNotContained('below the current directory', proc.stderr)
# direct path leads to the same code being generated - relative path does not make us do anything different
proc2 = run_process([PYTHON, FILE_PACKAGER, 'test.data', '--preload', 'data2.txt'], stderr=PIPE, stdout=PIPE)
self.assertGreater(len(proc2.stdout), 0)
self.assertNotContained('below the current directory', proc2.stderr)
def clean(txt):
lines = txt.splitlines()
lines = [l for l in lines if 'PACKAGE_UUID' not in l and 'loadPackage({' not in l]
return ''.join(lines)
self.assertTextDataIdentical(clean(proc.stdout), clean(proc2.stdout))
# verify '--separate-metadata' option produces separate metadata file
os.chdir('..')
run_process([PYTHON, FILE_PACKAGER, 'test.data', '--preload', 'data1.txt', '--preload', 'subdir/data2.txt', '--js-output=immutable.js', '--separate-metadata'])
self.assertExists('immutable.js.metadata')
# verify js output JS file is not touched when the metadata is separated
orig_timestamp = os.path.getmtime('immutable.js')
orig_content = open('immutable.js').read()
# ensure some time passes before running the packager again so that if it does touch the
# js file it will end up with the different timestamp.
time.sleep(1.0)
run_process([PYTHON, FILE_PACKAGER, 'test.data', '--preload', 'data1.txt', '--preload', 'subdir/data2.txt', '--js-output=immutable.js', '--separate-metadata'])
# assert both file content and timestamp are the same as reference copy
self.assertTextDataIdentical(orig_content, open('immutable.js').read())
self.assertEqual(orig_timestamp, os.path.getmtime('immutable.js'))
# verify the content of metadata file is correct
with open('immutable.js.metadata') as f:
metadata = json.load(f)
self.assertEqual(len(metadata['files']), 2)
assert metadata['files'][0]['start'] == 0 and metadata['files'][0]['end'] == len('data1') and metadata['files'][0]['filename'] == '/data1.txt'
assert metadata['files'][1]['start'] == len('data1') and metadata['files'][1]['end'] == len('data1') + len('data2') and metadata['files'][1]['filename'] == '/subdir/data2.txt'
assert metadata['remote_package_size'] == len('data1') + len('data2')
# can only assert the uuid format is correct, the uuid's value is expected to differ in between invocation
uuid.UUID(metadata['package_uuid'], version=4)
def test_file_packager_unicode(self):
unicode_name = 'unicode…☃'
try:
ensure_dir(unicode_name)
except OSError:
print("we failed to even create a unicode dir, so on this OS, we can't test this")
return
full = os.path.join(unicode_name, 'data.txt')
create_test_file(full, 'data')
proc = run_process([PYTHON, FILE_PACKAGER, 'test.data', '--preload', full], stdout=PIPE, stderr=PIPE)
assert len(proc.stdout), proc.stderr
assert unicode_name in proc.stdout, proc.stdout
print(len(proc.stderr))
def test_file_packager_mention_FORCE_FILESYSTEM(self):
MESSAGE = 'Remember to build the main file with -s FORCE_FILESYSTEM=1 so that it includes support for loading this file package'
create_test_file('data.txt', 'data1')
# mention when running standalone
err = run_process([PYTHON, FILE_PACKAGER, 'test.data', '--preload', 'data.txt'], stdout=PIPE, stderr=PIPE).stderr
self.assertContained(MESSAGE, err)
# do not mention from emcc
err = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--preload-file', 'data.txt'], stdout=PIPE, stderr=PIPE).stderr
self.assertEqual(len(err), 0)
def test_headless(self):
shutil.copyfile(path_from_root('tests', 'screenshot.png'), 'example.png')
run_process([EMCC, path_from_root('tests', 'sdl_headless.c'), '-s', 'HEADLESS=1'])
output = run_js('a.out.js', stderr=PIPE)
assert '''Init: 0
Font: 0x1
Sum: 0
you should see two lines of text in different colors and a blue rectangle
SDL_Quit called (and ignored)
done.
''' in output, output
def test_preprocess(self):
# Pass -Werror to prevent regressions such as https://github.com/emscripten-core/emscripten/pull/9661
out = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-E', '-Werror'], stdout=PIPE).stdout
self.assertNotExists('a.out.js')
self.assertNotExists('a.out')
# Test explicitly that the output contains a line typically written by the preprocessor.
self.assertContained('# 1 ', out)
self.assertContained('hello_world.c"', out)
self.assertContained('printf("hello, world!', out)
def test_syntax_only_valid(self):
result = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-fsyntax-only'], stdout=PIPE, stderr=STDOUT)
self.assertEqual(result.stdout, '')
self.assertNotExists('a.out.js')
def test_syntax_only_invalid(self):
create_test_file('src.c', 'int main() {')
result = run_process([EMCC, 'src.c', '-fsyntax-only'], stdout=PIPE, check=False, stderr=STDOUT)
self.assertNotEqual(result.returncode, 0)
self.assertContained("src.c:1:13: error: expected '}'", result.stdout)
self.assertNotExists('a.out.js')
def test_demangle(self):
create_test_file('src.cpp', '''
#include <stdio.h>
#include <emscripten.h>
void two(char c) {
EM_ASM(out(stackTrace()));
}
void one(int x) {
two(x % 17);
}
int main() {
EM_ASM(out(demangle('__Znwm'))); // check for no aborts
EM_ASM(out(demangle('_main')));
EM_ASM(out(demangle('__Z2f2v')));
EM_ASM(out(demangle('__Z12abcdabcdabcdi')));
EM_ASM(out(demangle('__ZL12abcdabcdabcdi')));
EM_ASM(out(demangle('__Z4testcsifdPvPiPc')));
EM_ASM(out(demangle('__ZN4test5moarrEcslfdPvPiPc')));
EM_ASM(out(demangle('__ZN4Waka1f12a234123412345pointEv')));
EM_ASM(out(demangle('__Z3FooIiEvv')));
EM_ASM(out(demangle('__Z3FooIidEvi')));
EM_ASM(out(demangle('__ZN3Foo3BarILi5EEEvv')));
EM_ASM(out(demangle('__ZNK10__cxxabiv120__si_class_type_info16search_below_dstEPNS_19__dynamic_cast_infoEPKvib')));
EM_ASM(out(demangle('__Z9parsewordRPKciRi')));
EM_ASM(out(demangle('__Z5multiwahtjmxyz')));
EM_ASM(out(demangle('__Z1aA32_iPA5_c')));
EM_ASM(out(demangle('__ZN21FWakaGLXFleeflsMarfooC2EjjjPKvbjj')));
EM_ASM(out(demangle('__ZN5wakaw2Cm10RasterBaseINS_6watwat9PolocatorEE8merbine1INS4_2OREEEvPKjj'))); // we get this wrong, but at least emit a '?'
one(17);
return 0;
}
''')
# full demangle support
run_process([EMCC, 'src.cpp', '-s', 'DEMANGLE_SUPPORT=1'])
output = run_js('a.out.js')
self.assertContained('''operator new(unsigned long)
_main
f2()
abcdabcdabcd(int)
abcdabcdabcd(int)
test(char, short, int, float, double, void*, int*, char*)
test::moarr(char, short, long, float, double, void*, int*, char*)
Waka::f::a23412341234::point()
void Foo<int>()
void Foo<int, double>(int)
void Foo::Bar<5>()
__cxxabiv1::__si_class_type_info::search_below_dst(__cxxabiv1::__dynamic_cast_info*, void const*, int, bool) const
parseword(char const*&, int, int&)
multi(wchar_t, signed char, unsigned char, unsigned short, unsigned int, unsigned long, long long, unsigned long long, ...)
a(int [32], char (*) [5])
FWakaGLXFleeflsMarfoo::FWakaGLXFleeflsMarfoo(unsigned int, unsigned int, unsigned int, void const*, bool, unsigned int, unsigned int)
void wakaw::Cm::RasterBase<wakaw::watwat::Polocator>::merbine1<wakaw::Cm::RasterBase<wakaw::watwat::Polocator>::OR>(unsigned int const*, unsigned int)
''', output)
# test for multiple functions in one stack trace
run_process([EMCC, 'src.cpp', '-s', 'DEMANGLE_SUPPORT=1', '-g'])
output = run_js('a.out.js')
self.assertIn('one(int)', output)
self.assertIn('two(char)', output)
def test_demangle_cpp(self):
create_test_file('src.cpp', '''
#include <stdio.h>
#include <emscripten.h>
#include <cxxabi.h>
#include <assert.h>
int main() {
char out[256];
int status = 1;
size_t length = 255;
abi::__cxa_demangle("_ZN4Waka1f12a234123412345pointEv", out, &length, &status);
assert(status == 0);
printf("%s\\n", out);
return 0;
}
''')
run_process([EMCC, 'src.cpp'])
output = run_js('a.out.js')
self.assertContained('Waka::f::a23412341234::point()', output)
# Test that malloc() -> OOM -> abort() -> stackTrace() -> jsStackTrace() -> demangleAll() -> demangle() -> malloc()
# cycle will not produce an infinite loop.
def test_demangle_malloc_infinite_loop_crash(self):
run_process([EMXX, path_from_root('tests', 'malloc_demangle_infinite_loop.cpp'), '-g', '-s', 'ABORTING_MALLOC=1', '-s', 'DEMANGLE_SUPPORT=1'])
output = run_js('a.out.js', assert_returncode=None, stderr=PIPE)
if output.count('Cannot enlarge memory arrays') > 2:
print(output)
assert(output.count('Cannot enlarge memory arrays') <= 2)
def test_module_exports_with_closure(self):
# This test checks that module.export is retained when JavaScript is minified by compiling with --closure 1
# This is important as if module.export is not present the Module object will not be visible to node.js
# Run with ./runner.py other.test_module_exports_with_closure
# First make sure test.js isn't present.
self.clear()
# compile with -O2 --closure 0
run_process([EMCC, path_from_root('tests', 'Module-exports', 'test.c'),
'-o', 'test.js', '-O2', '--closure', '0',
'--pre-js', path_from_root('tests', 'Module-exports', 'setup.js'),
'-s', 'EXPORTED_FUNCTIONS=["_bufferTest"]',
'-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ccall", "cwrap"]',
'-s', 'WASM_ASYNC_COMPILATION=0'])
# Check that compilation was successful
self.assertExists('test.js')
test_js_closure_0 = open('test.js').read()
# Check that test.js compiled with --closure 0 contains "module['exports'] = Module;"
assert ("module['exports'] = Module;" in test_js_closure_0) or ('module["exports"]=Module' in test_js_closure_0) or ('module["exports"] = Module;' in test_js_closure_0)
# Check that main.js (which requires test.js) completes successfully when run in node.js
# in order to check that the exports are indeed functioning correctly.
shutil.copyfile(path_from_root('tests', 'Module-exports', 'main.js'), 'main.js')
if NODE_JS in JS_ENGINES:
self.assertContained('bufferTest finished', run_js('main.js'))
# Delete test.js again and check it's gone.
try_delete('test.js')
self.assertNotExists('test.js')
# compile with -O2 --closure 1
run_process([EMCC, path_from_root('tests', 'Module-exports', 'test.c'),
'-o', 'test.js', '-O2', '--closure', '1',
'--pre-js', path_from_root('tests', 'Module-exports', 'setup.js'),
'-s', 'EXPORTED_FUNCTIONS=["_bufferTest"]',
'-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ccall", "cwrap"]',
'-s', 'WASM_ASYNC_COMPILATION=0'])
# Check that compilation was successful
self.assertExists('test.js')
test_js_closure_1 = open('test.js').read()
# Check that test.js compiled with --closure 1 contains "module.exports", we want to verify that
# "module['exports']" got minified to "module.exports" when compiling with --closure 1
self.assertContained("module.exports", test_js_closure_1)
# Check that main.js (which requires test.js) completes successfully when run in node.js
# in order to check that the exports are indeed functioning correctly.
if NODE_JS in JS_ENGINES:
self.assertContained('bufferTest finished', run_js('main.js', engine=NODE_JS))
def test_node_catch_exit(self):
# Test that in node.js exceptions are not caught if NODEJS_EXIT_CATCH=0
if NODE_JS not in JS_ENGINES:
return
create_test_file('count.c', '''
#include <string.h>
int count(const char *str) {
return (int)strlen(str);
}
''')
create_test_file('index.js', '''
const count = require('./count.js');
console.log(xxx); //< here is the ReferenceError
''')
reference_error_text = 'console.log(xxx); //< here is the ReferenceError'
run_process([EMCC, 'count.c', '-o', 'count.js'])
# Check that the ReferenceError is caught and rethrown and thus the original error line is masked
self.assertNotContained(reference_error_text,
run_js('index.js', engine=NODE_JS, stderr=STDOUT, assert_returncode=None))
run_process([EMCC, 'count.c', '-o', 'count.js', '-s', 'NODEJS_CATCH_EXIT=0'])
# Check that the ReferenceError is not caught, so we see the error properly
self.assertContained(reference_error_text,
run_js('index.js', engine=NODE_JS, stderr=STDOUT, assert_returncode=None))
def test_extra_exported_methods(self):
# Test with node.js that the EXTRA_EXPORTED_RUNTIME_METHODS setting is considered by libraries
if NODE_JS not in JS_ENGINES:
self.skipTest("node engine required for this test")
create_test_file('count.c', '''
#include <string.h>
int count(const char *str) {
return (int)strlen(str);
}
''')
create_test_file('index.js', '''
const count = require('./count.js');
console.log(count.FS_writeFile);
''')
reference_error_text = 'undefined'
run_process([EMCC, 'count.c', '-s', 'FORCE_FILESYSTEM=1', '-s',
'EXTRA_EXPORTED_RUNTIME_METHODS=["FS_writeFile"]', '-o', 'count.js'])
# Check that the Module.FS_writeFile exists
self.assertNotContained(reference_error_text,
run_js('index.js', engine=NODE_JS, stderr=STDOUT, assert_returncode=None))
run_process([EMCC, 'count.c', '-s', 'FORCE_FILESYSTEM=1', '-o', 'count.js'])
# Check that the Module.FS_writeFile is not exported
self.assertContained(reference_error_text,
run_js('index.js', engine=NODE_JS, stderr=STDOUT, assert_returncode=None))
def test_fs_stream_proto(self):
open('src.cpp', 'wb').write(br'''
#include <stdio.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/stat.h>
#include <errno.h>
#include <string.h>
int main()
{
long file_size = 0;
int h = open("src.cpp", O_RDONLY, 0666);
if (0 != h)
{
FILE* file = fdopen(h, "rb");
if (0 != file)
{
fseek(file, 0, SEEK_END);
file_size = ftell(file);
fseek(file, 0, SEEK_SET);
}
else
{
printf("fdopen() failed: %s\n", strerror(errno));
return 10;
}
close(h);
printf("File size: %ld\n", file_size);
}
else
{
printf("open() failed: %s\n", strerror(errno));
return 10;
}
return 0;
}
''')
run_process([EMCC, 'src.cpp', '--embed-file', 'src.cpp'])
for engine in JS_ENGINES:
out = run_js('a.out.js', engine=engine, stderr=PIPE, full_output=True)
self.assertContained('File size: 724', out)
def test_proxyfs(self):
# This test supposes that 3 different programs share the same directory and files.
# The same JS object is not used for each of them
# But 'require' function caches JS objects.
# If we just load same js-file multiple times like following code,
# these programs (m0,m1,m2) share the same JS object.
#
# var m0 = require('./proxyfs_test.js');
# var m1 = require('./proxyfs_test.js');
# var m2 = require('./proxyfs_test.js');
#
# To separate js-objects for each of them, following 'require' use different js-files.
#
# var m0 = require('./proxyfs_test.js');
# var m1 = require('./proxyfs_test1.js');
# var m2 = require('./proxyfs_test2.js');
#
create_test_file('proxyfs_test_main.js', r'''
var m0 = require('./proxyfs_test.js');
var m1 = require('./proxyfs_test1.js');
var m2 = require('./proxyfs_test2.js');
var section;
function print(str){
process.stdout.write(section+":"+str+":");
}
m0.FS.mkdir('/working');
m0.FS.mount(m0.PROXYFS,{root:'/',fs:m1.FS},'/working');
m0.FS.mkdir('/working2');
m0.FS.mount(m0.PROXYFS,{root:'/',fs:m2.FS},'/working2');
section = "child m1 reads and writes local file.";
print("m1 read embed");
m1.ccall('myreade','number',[],[]);
print("m1 write");console.log("");
m1.ccall('mywrite0','number',['number'],[1]);
print("m1 read");
m1.ccall('myread0','number',[],[]);
section = "child m2 reads and writes local file.";
print("m2 read embed");
m2.ccall('myreade','number',[],[]);
print("m2 write");console.log("");
m2.ccall('mywrite0','number',['number'],[2]);
print("m2 read");
m2.ccall('myread0','number',[],[]);
section = "child m1 reads local file.";
print("m1 read");
m1.ccall('myread0','number',[],[]);
section = "parent m0 reads and writes local and children's file.";
print("m0 read embed");
m0.ccall('myreade','number',[],[]);
print("m0 read m1");
m0.ccall('myread1','number',[],[]);
print("m0 read m2");
m0.ccall('myread2','number',[],[]);
section = "m0,m1 and m2 verify local files.";
print("m0 write");console.log("");
m0.ccall('mywrite0','number',['number'],[0]);
print("m0 read");
m0.ccall('myread0','number',[],[]);
print("m1 read");
m1.ccall('myread0','number',[],[]);
print("m2 read");
m2.ccall('myread0','number',[],[]);
print("m0 read embed");
m0.ccall('myreade','number',[],[]);
print("m1 read embed");
m1.ccall('myreade','number',[],[]);
print("m2 read embed");
m2.ccall('myreade','number',[],[]);
section = "parent m0 writes and reads children's files.";
print("m0 write m1");console.log("");
m0.ccall('mywrite1','number',[],[]);
print("m0 read m1");
m0.ccall('myread1','number',[],[]);
print("m0 write m2");console.log("");
m0.ccall('mywrite2','number',[],[]);
print("m0 read m2");
m0.ccall('myread2','number',[],[]);
print("m1 read");
m1.ccall('myread0','number',[],[]);
print("m2 read");
m2.ccall('myread0','number',[],[]);
print("m0 read m0");
m0.ccall('myread0','number',[],[]);
''')
create_test_file('proxyfs_pre.js', r'''
if (typeof Module === 'undefined') Module = {};
Module["noInitialRun"]=true;
noExitRuntime=true;
''')
create_test_file('proxyfs_embed.txt', r'''test
''')
create_test_file('proxyfs_test.c', r'''
#include <stdio.h>
int
mywrite1(){
FILE* out = fopen("/working/hoge.txt","w");
fprintf(out,"test1\n");
fclose(out);
return 0;
}
int
myread1(){
FILE* in = fopen("/working/hoge.txt","r");
char buf[1024];
int len;
if(in==NULL)
printf("open failed\n");
while(! feof(in)){
if(fgets(buf,sizeof(buf),in)==buf){
printf("%s",buf);
}
}
fclose(in);
return 0;
}
int
mywrite2(){
FILE* out = fopen("/working2/hoge.txt","w");
fprintf(out,"test2\n");
fclose(out);
return 0;
}
int
myread2(){
{
FILE* in = fopen("/working2/hoge.txt","r");
char buf[1024];
int len;
if(in==NULL)
printf("open failed\n");
while(! feof(in)){
if(fgets(buf,sizeof(buf),in)==buf){
printf("%s",buf);
}
}
fclose(in);
}
return 0;
}
int
mywrite0(int i){
FILE* out = fopen("hoge.txt","w");
fprintf(out,"test0_%d\n",i);
fclose(out);
return 0;
}
int
myread0(){
{
FILE* in = fopen("hoge.txt","r");
char buf[1024];
int len;
if(in==NULL)
printf("open failed\n");
while(! feof(in)){
if(fgets(buf,sizeof(buf),in)==buf){
printf("%s",buf);
}
}
fclose(in);
}
return 0;
}
int
myreade(){
{
FILE* in = fopen("proxyfs_embed.txt","r");
char buf[1024];
int len;
if(in==NULL)
printf("open failed\n");
while(! feof(in)){
if(fgets(buf,sizeof(buf),in)==buf){
printf("%s",buf);
}
}
fclose(in);
}
return 0;
}
''')
run_process([EMCC,
'-o', 'proxyfs_test.js', 'proxyfs_test.c',
'--embed-file', 'proxyfs_embed.txt', '--pre-js', 'proxyfs_pre.js',
'-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ccall", "cwrap"]',
'-lproxyfs.js',
'-s', 'WASM_ASYNC_COMPILATION=0',
'-s', 'MAIN_MODULE=1',
'-s', 'EXPORT_ALL=1'])
# Following shutil.copyfile just prevent 'require' of node.js from caching js-object.
# See https://nodejs.org/api/modules.html
shutil.copyfile('proxyfs_test.js', 'proxyfs_test1.js')
shutil.copyfile('proxyfs_test.js', 'proxyfs_test2.js')
out = run_js('proxyfs_test_main.js')
section = "child m1 reads and writes local file."
self.assertContained(section + ":m1 read embed:test", out)
self.assertContained(section + ":m1 write:", out)
self.assertContained(section + ":m1 read:test0_1", out)
section = "child m2 reads and writes local file."
self.assertContained(section + ":m2 read embed:test", out)
self.assertContained(section + ":m2 write:", out)
self.assertContained(section + ":m2 read:test0_2", out)
section = "child m1 reads local file."
self.assertContained(section + ":m1 read:test0_1", out)
section = "parent m0 reads and writes local and children's file."
self.assertContained(section + ":m0 read embed:test", out)
self.assertContained(section + ":m0 read m1:test0_1", out)
self.assertContained(section + ":m0 read m2:test0_2", out)
section = "m0,m1 and m2 verify local files."
self.assertContained(section + ":m0 write:", out)
self.assertContained(section + ":m0 read:test0_0", out)
self.assertContained(section + ":m1 read:test0_1", out)
self.assertContained(section + ":m2 read:test0_2", out)
self.assertContained(section + ":m0 read embed:test", out)
self.assertContained(section + ":m1 read embed:test", out)
self.assertContained(section + ":m2 read embed:test", out)
section = "parent m0 writes and reads children's files."
self.assertContained(section + ":m0 write m1:", out)
self.assertContained(section + ":m0 read m1:test1", out)
self.assertContained(section + ":m0 write m2:", out)
self.assertContained(section + ":m0 read m2:test2", out)
self.assertContained(section + ":m1 read:test1", out)
self.assertContained(section + ":m2 read:test2", out)
self.assertContained(section + ":m0 read m0:test0_0", out)
def test_dependency_file(self):
# Issue 1732: -MMD (and friends) create dependency files that need to be
# copied from the temporary directory.
create_test_file('test.cpp', r'''
#include "test.hpp"
void my_function()
{
}
''')
create_test_file('test.hpp', r'''
void my_function();
''')
run_process([EMCC, '-MMD', '-c', 'test.cpp', '-o', 'test.o'])
self.assertExists('test.d')
deps = open('test.d').read()
# Look for ': ' instead of just ':' to not confuse C:\path\ notation with make "target: deps" rule. Not perfect, but good enough for this test.
head, tail = deps.split(': ', 2)
assert 'test.o' in head, 'Invalid dependency target'
assert 'test.cpp' in tail and 'test.hpp' in tail, 'Invalid dependencies generated'
def test_dependency_file_2(self):
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'a.c')
run_process([EMCC, 'a.c', '-MMD', '-MF', 'test.d', '-c'])
self.assertContained(open('test.d').read(), 'a.o: a.c\n')
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'a.c')
run_process([EMCC, 'a.c', '-MMD', '-MF', 'test2.d', '-c', '-o', 'test.o'])
self.assertContained(open('test2.d').read(), 'test.o: a.c\n')
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'a.c')
ensure_dir('obj')
run_process([EMCC, 'a.c', '-MMD', '-MF', 'test3.d', '-c', '-o', 'obj/test.o'])
self.assertContained(open('test3.d').read(), 'obj/test.o: a.c\n')
def test_js_lib_quoted_key(self):
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
__internal_data:{
'<' : 0,
'white space' : 1
},
printf__deps: ['__internal_data', 'fprintf']
});
''')
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '--js-library', 'lib.js'])
self.assertContained('hello, world!', run_js('a.out.js'))
def test_js_lib_exported(self):
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
jslibfunc: function(x) { return 2 * x }
});
''')
create_test_file('src.cpp', r'''
#include <emscripten.h>
#include <stdio.h>
extern "C" int jslibfunc(int x);
int main() {
printf("c calling: %d\n", jslibfunc(6));
EM_ASM({
out('js calling: ' + Module['_jslibfunc'](5) + '.');
});
}
''')
run_process([EMCC, 'src.cpp', '--js-library', 'lib.js', '-s', 'EXPORTED_FUNCTIONS=["_main", "_jslibfunc"]'])
self.assertContained('c calling: 12\njs calling: 10.', run_js('a.out.js'))
def test_js_lib_primitive_dep(self):
# Verify that primitive dependencies aren't generated in the output JS.
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
foo__deps: ['Int8Array', 'NonPrimitive'],
foo: function() {},
});
''')
create_test_file('main.c', r'''
void foo(void);
int main(int argc, char** argv) {
foo();
return 0;
}
''')
run_process([EMCC, '-O0', 'main.c', '--js-library', 'lib.js', '-s', 'WARN_ON_UNDEFINED_SYMBOLS=0'])
generated = open('a.out.js').read()
self.assertContained('missing function: NonPrimitive', generated)
self.assertNotContained('missing function: Int8Array', generated)
def test_js_lib_using_asm_lib(self):
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
jslibfunc__deps: ['asmlibfunc'],
jslibfunc: function(x) {
return 2 * _asmlibfunc(x);
},
asmlibfunc__asm: true,
asmlibfunc__sig: 'ii',
asmlibfunc: function(x) {
x = x | 0;
return x + 1 | 0;
}
});
''')
create_test_file('src.cpp', r'''
#include <stdio.h>
extern "C" int jslibfunc(int x);
int main() {
printf("c calling: %d\n", jslibfunc(6));
}
''')
run_process([EMCC, 'src.cpp', '--js-library', 'lib.js'])
self.assertContained('c calling: 14\n', run_js('a.out.js'))
def test_EMCC_BUILD_DIR(self):
# EMCC_BUILD_DIR env var contains the dir we were building in, when running the js compiler (e.g. when
# running a js library). We force the cwd to be src/ for technical reasons, so this lets you find out
# where you were.
create_test_file('lib.js', r'''
printErr('dir was ' + process.env.EMCC_BUILD_DIR);
''')
err = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '--js-library', 'lib.js'], stderr=PIPE).stderr
self.assertContained('dir was ' + os.path.realpath(os.path.normpath(self.get_dir())), err)
def test_float_h(self):
process = run_process([EMCC, path_from_root('tests', 'float+.c')], stdout=PIPE, stderr=PIPE)
assert process.returncode == 0, 'float.h should agree with our system: ' + process.stdout + '\n\n\n' + process.stderr
def test_output_is_dir(self):
ensure_dir('out_dir')
err = self.expect_fail([EMCC, '-c', path_from_root('tests', 'hello_world.c'), '-o', 'out_dir/'])
self.assertContained('error: unable to open output file', err)
def test_default_obj_ext(self):
run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c')])
self.assertExists('hello_world.o')
run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c'), '--default-obj-ext', 'obj'])
self.assertExists('hello_world.obj')
def test_doublestart_bug(self):
create_test_file('code.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
void main_loop(void) {
static int cnt = 0;
if (++cnt >= 10) emscripten_cancel_main_loop();
}
int main(void) {
printf("This should only appear once.\n");
emscripten_set_main_loop(main_loop, 10, 0);
return 0;
}
''')
create_test_file('pre.js', r'''
if (!Module['preRun']) Module['preRun'] = [];
Module["preRun"].push(function () {
addRunDependency('test_run_dependency');
removeRunDependency('test_run_dependency');
});
''')
run_process([EMCC, 'code.cpp', '--pre-js', 'pre.js'])
output = run_js('a.out.js')
assert output.count('This should only appear once.') == 1, output
def test_module_print(self):
create_test_file('code.cpp', r'''
#include <stdio.h>
int main(void) {
printf("123456789\n");
return 0;
}
''')
create_test_file('pre.js', r'''
var Module = { print: function(x) { throw '<{(' + x + ')}>' } };
''')
run_process([EMCC, 'code.cpp', '--pre-js', 'pre.js'])
output = run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=None)
assert r'<{(123456789)}>' in output, output
def test_precompiled_headers_warnings(self):
# Check that we don't have any underlying warnings from clang, this can happen if we
# pass any link flags to when building a pch.
create_test_file('header.h', '#define X 5\n')
run_process([EMCC, '-Werror', '-xc++-header', 'header.h'])
def test_precompiled_headers(self):
for suffix in ['gch', 'pch']:
print(suffix)
self.clear()
create_test_file('header.h', '#define X 5\n')
run_process([EMCC, '-xc++-header', 'header.h', '-c'])
self.assertExists('header.h.gch') # default output is gch
if suffix != 'gch':
run_process([EMCC, '-xc++-header', 'header.h', '-o', 'header.h.' + suffix])
self.assertBinaryEqual('header.h.gch', 'header.h.' + suffix)
create_test_file('src.cpp', r'''
#include <stdio.h>
int main() {
printf("|%d|\n", X);
return 0;
}
''')
run_process([EMCC, 'src.cpp', '-include', 'header.h'])
output = run_js('a.out.js', stderr=PIPE, full_output=True)
self.assertContained('|5|', output)
# also verify that the gch is actually used
err = run_process([EMCC, 'src.cpp', '-include', 'header.h', '-Xclang', '-print-stats'], stderr=PIPE).stderr
self.assertTextDataContained('*** PCH/Modules Loaded:\nModule: header.h.' + suffix, err)
# and sanity check it is not mentioned when not
try_delete('header.h.' + suffix)
err = run_process([EMCC, 'src.cpp', '-include', 'header.h', '-Xclang', '-print-stats'], stderr=PIPE).stderr
self.assertNotContained('*** PCH/Modules Loaded:\nModule: header.h.' + suffix, err.replace('\r\n', '\n'))
# with specified target via -o
try_delete('header.h.' + suffix)
run_process([EMCC, '-xc++-header', 'header.h', '-o', 'my.' + suffix])
self.assertExists('my.' + suffix)
# -include-pch flag
run_process([EMCC, '-xc++-header', 'header.h', '-o', 'header.h.' + suffix])
run_process([EMCC, 'src.cpp', '-include-pch', 'header.h.' + suffix])
output = run_js('a.out.js')
self.assertContained('|5|', output)
@no_wasm_backend('tests extra fastcomp warnings on unaligned loads/stores, which matter a lot more in asm.js')
def test_warn_unaligned(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
struct packey {
char x;
int y;
double z;
} __attribute__((__packed__));
int main() {
volatile packey p;
p.x = 0;
p.y = 1;
p.z = 2;
return 0;
}
''')
output = run_process([EMCC, 'src.cpp', '-s', 'WASM=0', '-s', 'WARN_UNALIGNED=1', '-g'], stderr=PIPE)
self.assertContained('emcc: warning: unaligned store', output.stderr)
self.assertContained('emcc: warning: unaligned store', output.stderr)
self.assertContained('@line 11 "src.cpp"', output.stderr)
def test_LEGACY_VM_SUPPORT(self):
# when modern features are lacking, we can polyfill them or at least warn
create_test_file('pre.js', 'Math.imul = undefined;')
def test(expected, opts=[]):
print(opts)
result = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--pre-js', 'pre.js'] + opts, stderr=PIPE, check=False)
if result.returncode == 0:
self.assertContained(expected, run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=None))
else:
self.assertContained(expected, result.stderr)
# when legacy is needed, we show an error indicating so
test('build with LEGACY_VM_SUPPORT')
# legacy + disabling wasm works
if self.is_wasm_backend():
return
test('hello, world!', ['-s', 'LEGACY_VM_SUPPORT=1', '-s', 'WASM=0'])
def test_on_abort(self):
expected_output = 'Module.onAbort was called'
def add_on_abort_and_verify(extra=''):
with open('a.out.js') as f:
js = f.read()
with open('a.out.js', 'w') as f:
f.write("var Module = { onAbort: function() { console.log('%s') } };\n" % expected_output)
f.write(extra + '\n')
f.write(js)
self.assertContained(expected_output, run_js('a.out.js', assert_returncode=None))
# test direct abort() C call
create_test_file('src.c', '''
#include <stdlib.h>
int main() {
abort();
}
''')
run_process([EMCC, 'src.c', '-s', 'WASM_ASYNC_COMPILATION=0'])
add_on_abort_and_verify()
# test direct abort() JS call
create_test_file('src.c', '''
#include <emscripten.h>
int main() {
EM_ASM({ abort() });
}
''')
run_process([EMCC, 'src.c', '-s', 'WASM_ASYNC_COMPILATION=0'])
add_on_abort_and_verify()
# test throwing in an abort handler, and catching that
create_test_file('src.c', '''
#include <emscripten.h>
int main() {
EM_ASM({
try {
out('first');
abort();
} catch (e) {
out('second');
abort();
throw e;
}
});
}
''')
run_process([EMCC, 'src.c', '-s', 'WASM_ASYNC_COMPILATION=0'])
with open('a.out.js') as f:
js = f.read()
with open('a.out.js', 'w') as f:
f.write("var Module = { onAbort: function() { console.log('%s'); throw 're-throw'; } };\n" % expected_output)
f.write(js)
out = run_js('a.out.js', stderr=STDOUT, assert_returncode=None)
print(out)
self.assertContained(expected_output, out)
self.assertContained('re-throw', out)
self.assertContained('first', out)
self.assertContained('second', out)
self.assertEqual(out.count(expected_output), 2)
# test an abort during startup
run_process([EMCC, path_from_root('tests', 'hello_world.c')])
os.remove('a.out.wasm') # trigger onAbort by intentionally causing startup to fail
add_on_abort_and_verify()
def test_no_exit_runtime(self):
create_test_file('code.cpp', r'''
#include <stdio.h>
template<int x>
struct Waste {
Waste() {
printf("coming around %d\n", x);
}
~Waste() {
printf("going away %d\n", x);
}
};
Waste<1> w1;
Waste<2> w2;
Waste<3> w3;
Waste<4> w4;
Waste<5> w5;
int main(int argc, char **argv) {
return 0;
}
''')
for wasm in [0, 1]:
for no_exit in [1, 0]:
for opts in [[], ['-O1'], ['-O2', '-g2'], ['-O2', '-g2', '--llvm-lto', '1']]:
if self.is_wasm_backend() and not wasm:
continue
print(wasm, no_exit, opts)
cmd = [EMCC] + opts + ['code.cpp', '-s', 'EXIT_RUNTIME=' + str(1 - no_exit), '-s', 'WASM=' + str(wasm)]
if wasm:
cmd += ['--profiling-funcs'] # for function names
run_process(cmd)
output = run_js('a.out.js', stderr=PIPE, full_output=True)
src = open('a.out.js').read()
if wasm:
src += '\n' + self.get_wasm_text('a.out.wasm')
exit = 1 - no_exit
print(' exit:', exit, 'opts:', opts)
self.assertContained('coming around', output)
self.assertContainedIf('going away', output, exit)
if not self.is_wasm_backend():
# The wasm backend uses atexit to register destructors when
# constructors are called There is currently no way to exclude
# these destructors from the wasm binary.
assert ('atexit(' in src) == exit, 'atexit should not appear in src when EXIT_RUNTIME=0'
assert ('_ZN5WasteILi2EED' in src) == exit, 'destructors should not appear if no exit:\n' + src
def test_no_exit_runtime_warnings_flush(self):
# check we warn if there is unflushed info
create_test_file('code.c', r'''
#include <stdio.h>
int main(int argc, char **argv) {
printf("hello\n");
printf("world"); // no newline, not flushed
#if FLUSH
printf("\n");
#endif
}
''')
create_test_file('code.cpp', r'''
#include <iostream>
int main() {
using namespace std;
cout << "hello" << std::endl;
cout << "world"; // no newline, not flushed
#if FLUSH
std::cout << std::endl;
#endif
}
''')
for src in ['code.c', 'code.cpp']:
for no_exit in [0, 1]:
for assertions in [0, 1]:
for flush in [0, 1]:
# TODO: also check FILESYSTEM=0 here. it never worked though, buffered output was not emitted at shutdown
print(src, no_exit, assertions, flush)
cmd = [EMCC, src, '-s', 'EXIT_RUNTIME=%d' % (1 - no_exit), '-s', 'ASSERTIONS=%d' % assertions]
if flush:
cmd += ['-DFLUSH']
run_process(cmd)
output = run_js('a.out.js', stderr=PIPE, full_output=True)
exit = 1 - no_exit
self.assertContained('hello', output)
assert ('world' in output) == (exit or flush), 'unflushed content is shown only when exiting the runtime'
assert (no_exit and assertions and not flush) == ('stdio streams had content in them that was not flushed. you should set EXIT_RUNTIME to 1' in output), 'warning should be shown'
def test_fs_after_main(self):
for args in [[], ['-O1']]:
print(args)
run_process([EMCC, path_from_root('tests', 'fs_after_main.cpp')])
self.assertContained('Test passed.', run_js('a.out.js'))
@no_wasm_backend('tests fastcomp compiler flags')
def test_os_oz(self):
for arg, expect in [
('-O1', '-O1'),
('-O2', '-O3'),
('-Os', '-Os'),
('-Oz', '-Oz'),
('-O3', '-O3'),
]:
print(arg, expect)
proc = run_process([EMCC, '-v', path_from_root('tests', 'hello_world.cpp'), arg], stderr=PIPE)
self.assertContained(expect, proc.stderr)
self.assertContained('hello, world!', run_js('a.out.js'))
def test_oz_size(self):
sizes = {}
for name, args in [
('0', []),
('1', ['-O1']),
('2', ['-O2']),
('s', ['-Os']),
('z', ['-Oz']),
('3', ['-O3']),
]:
print(name, args)
self.clear()
run_process([EMCC, '-c', path_from_root('system', 'lib', 'dlmalloc.c')] + args)
sizes[name] = os.path.getsize('dlmalloc.o')
print(sizes)
opt_min = min(sizes['1'], sizes['2'], sizes['3'], sizes['s'], sizes['z'])
opt_max = max(sizes['1'], sizes['2'], sizes['3'], sizes['s'], sizes['z'])
# 'opt builds are all fairly close'
self.assertLess(opt_min - opt_max, opt_max * 0.1)
# unopt build is quite larger'
self.assertGreater(sizes['0'], (1.20 * opt_max))
@no_wasm_backend('relies on ctor evaluation and dtor elimination')
def test_global_inits(self):
create_test_file('inc.h', r'''
#include <stdio.h>
template<int x>
struct Waste {
int state;
Waste() : state(10) {}
void test(int a) {
printf("%d\n", a + state);
}
~Waste() {
printf("going away %d\n", x);
}
};
Waste<3> *getMore();
''')
create_test_file('main.cpp', r'''
#include "inc.h"
Waste<1> mw1;
Waste<2> mw2;
int main(int argc, char **argv) {
printf("argc: %d\n", argc);
mw1.state += argc;
mw2.state += argc;
mw1.test(5);
mw2.test(6);
getMore()->test(0);
return 0;
}
''')
create_test_file('side.cpp', r'''
#include "inc.h"
Waste<3> sw3;
Waste<3> *getMore() {
return &sw3;
}
''')
for opts, has_global in [
(['-O2', '-g', '-s', 'EXIT_RUNTIME=1'], True),
# no-exit-runtime removes the atexits, and then globalgce can work
# it's magic to remove the global initializer entirely
(['-O2', '-g'], False),
(['-Os', '-g', '-s', 'EXIT_RUNTIME=1'], True),
(['-Os', '-g'], False),
(['-O2', '-g', '--llvm-lto', '1', '-s', 'EXIT_RUNTIME=1'], True),
(['-O2', '-g', '--llvm-lto', '1'], False),
]:
print(opts, has_global)
run_process([EMCC, 'main.cpp', '-c'] + opts)
run_process([EMCC, 'side.cpp', '-c'] + opts)
run_process([EMCC, 'main.o', 'side.o'] + opts)
run_js('a.out.js', stderr=PIPE, full_output=True)
src = open('a.out.js').read()
self.assertContained('argc: 1\n16\n17\n10\n', run_js('a.out.js'))
self.assertContainedIf('globalCtors', src, has_global)
# Tests that when there are only 0 or 1 global initializers, that a grouped global initializer function will not be generated
# (that would just consume excess code size)
def test_no_global_inits(self):
create_test_file('one_global_initializer.cpp', r'''
#include <emscripten.h>
#include <stdio.h>
double t = emscripten_get_now();
int main() { printf("t:%d\n", (int)(t>0)); }
''')
run_process([EMCC, 'one_global_initializer.cpp'])
# Above file has one global initializer, should not generate a redundant grouped globalCtors function
self.assertNotContained('globalCtors', open('a.out.js').read())
self.assertContained('t:1', run_js('a.out.js'))
create_test_file('zero_global_initializers.cpp', r'''
#include <stdio.h>
int main() { printf("t:1\n"); }
''')
run_process([EMCC, 'zero_global_initializers.cpp'])
# Above file should have zero global initializers, should not generate any global initializer functions
self.assertNotContained('__GLOBAL__sub_', open('a.out.js').read())
self.assertContained('t:1', run_js('a.out.js'))
def test_implicit_func(self):
create_test_file('src.c', r'''
#include <stdio.h>
int main()
{
printf("hello %d\n", strnlen("waka", 2)); // Implicit declaration, no header, for strnlen
int (*my_strnlen)(char*, ...) = strnlen;
printf("hello %d\n", my_strnlen("shaka", 2));
return 0;
}
''')
IMPLICIT_WARNING = "warning: implicit declaration of function 'strnlen' is invalid in C99"
IMPLICIT_ERROR = "error: implicit declaration of function 'strnlen' is invalid in C99"
INCOMPATIBLE_WARNINGS = ('warning: incompatible pointer types', 'warning: incompatible function pointer types')
for opts, expected, compile_expected in [
([], None, [IMPLICIT_ERROR]),
(['-Wno-error=implicit-function-declaration'], ['hello '], [IMPLICIT_WARNING]), # turn error into warning
(['-Wno-implicit-function-declaration'], ['hello '], []), # turn error into nothing at all (runtime output is incorrect)
]:
print(opts, expected)
try_delete('a.out.js')
stderr = run_process([EMCC, 'src.c'] + opts, stderr=PIPE, check=False).stderr
for ce in compile_expected + [INCOMPATIBLE_WARNINGS]:
self.assertContained(ce, stderr)
if expected is None:
self.assertNotExists('a.out.js')
else:
output = run_js('a.out.js', stderr=PIPE, full_output=True)
for e in expected:
self.assertContained(e, output)
@no_wasm_backend('uses prebuilt .ll file')
def test_incorrect_static_call(self):
for wasm in [0, 1]:
for opts in [0, 1]:
for asserts in [0, 1]:
extra = []
if opts != 1 - asserts:
extra = ['-s', 'ASSERTIONS=' + str(asserts)]
cmd = [EMCC, path_from_root('tests', 'sillyfuncast2_noasm.ll'), '-O' + str(opts), '-s', 'WASM=' + str(wasm)] + extra
print(opts, asserts, wasm, cmd)
# Should not need to pipe stdout here but binaryen writes to stdout
# when it really should write to stderr.
stderr = run_process(cmd, stdout=PIPE, stderr=PIPE, check=False).stderr
assert ('unexpected' in stderr) == asserts, stderr
assert ("to 'doit'" in stderr) == asserts, stderr
@no_wasm_backend('fastcomp specific')
def test_llvm_lit(self):
grep_path = shared.which('grep')
if not grep_path:
self.skipTest('This test needs the "grep" tool in PATH. If you are using emsdk on Windows, you can obtain it via installing and activating the gnu package.')
llvm_src = get_fastcomp_src_dir()
if not llvm_src:
self.skipTest('llvm source tree not found')
LLVM_LIT = os.path.join(LLVM_ROOT, 'llvm-lit.py')
if not os.path.exists(LLVM_LIT):
LLVM_LIT = os.path.join(LLVM_ROOT, 'llvm-lit')
if not os.path.exists(LLVM_LIT):
self.skipTest('llvm-lit not found; fastcomp directory is most likely prebuilt')
cmd = [PYTHON, LLVM_LIT, '-v', os.path.join(llvm_src, 'test', 'CodeGen', 'JS')]
print(cmd)
run_process(cmd)
@requires_native_clang
def test_bad_triple(self):
# compile a minimal program, with as few dependencies as possible, as
# native building on CI may not always work well
create_test_file('minimal.cpp', 'int main() { return 0; }')
run_process([CLANG_CXX, 'minimal.cpp', '-target', 'x86_64-linux', '-c', '-emit-llvm', '-o', 'a.bc'] + clang_native.get_clang_native_args(), env=clang_native.get_clang_native_env())
# wasm backend will hard fail where as fastcomp only warns
if self.is_wasm_backend():
err = self.expect_fail([EMCC, 'a.bc'])
self.assertContained('machine type must be wasm32', err)
else:
err = run_process([EMCC, 'a.bc'], stderr=PIPE).stderr
assert 'warning' in err or 'WARNING' in err, err
assert 'incorrect target triple' in err or 'different target triples' in err, err
def test_valid_abspath(self):
# Test whether abspath warning appears
abs_include_path = os.path.abspath(self.get_dir())
err = run_process([EMCC, '-I%s' % abs_include_path, '-Wwarn-absolute-paths', path_from_root('tests', 'hello_world.c')], stderr=PIPE).stderr
warning = '-I or -L of an absolute path "-I%s" encountered. If this is to a local system header/library, it may cause problems (local system files make sense for compiling natively on your system, but not necessarily to JavaScript).' % abs_include_path
self.assertContained(warning, err)
# Passing an absolute path to a directory inside the emscripten tree is always ok and should not issue a warning.
abs_include_path = path_from_root('tests')
err = run_process([EMCC, '-I%s' % abs_include_path, '-Wwarn-absolute-paths', path_from_root('tests', 'hello_world.c')], stderr=PIPE).stderr
warning = '-I or -L of an absolute path "-I%s" encountered. If this is to a local system header/library, it may cause problems (local system files make sense for compiling natively on your system, but not necessarily to JavaScript).' % abs_include_path
self.assertNotContained(warning, err)
# Hide warning for this include path
err = run_process([EMCC, '--valid-abspath', abs_include_path, '-I%s' % abs_include_path, '-Wwarn-absolute-paths', path_from_root('tests', 'hello_world.c')], stderr=PIPE).stderr
self.assertNotContained(warning, err)
def test_valid_abspath_2(self):
if WINDOWS:
abs_include_path = 'C:\\nowhere\\at\\all'
else:
abs_include_path = '/nowhere/at/all'
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '--valid-abspath', abs_include_path, '-I%s' % abs_include_path]
print(' '.join(cmd))
run_process(cmd)
self.assertContained('hello, world!', run_js('a.out.js'))
def test_warn_dylibs(self):
shared_suffixes = ['.so', '.dylib', '.dll']
for suffix in ['.o', '.a', '.bc', '.so', '.lib', '.dylib', '.js', '.html']:
print(suffix)
err = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'out' + suffix], stderr=PIPE).stderr
warning = 'When Emscripten compiles to a typical native suffix for shared libraries (.so, .dylib, .dll) then it emits an object file. You should then compile that to an emscripten SIDE_MODULE (using that flag) with suffix .wasm (for wasm) or .js (for asm.js).'
self.assertContainedIf(warning, err, suffix in shared_suffixes)
def test_side_module_without_proper_target(self):
# SIDE_MODULE is only meaningful when compiling to wasm (or js+wasm)
# otherwise, we are just linking bitcode, and should show an error
for wasm in [0, 1]:
if self.is_wasm_backend() and not wasm:
continue
print(wasm)
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'SIDE_MODULE=1', '-o', 'a.so', '-s', 'WASM=%d' % wasm])
self.assertContained('SIDE_MODULE must only be used when compiling to an executable shared library, and not when emitting an object file', stderr)
@no_wasm_backend('asm.js optimizations')
def test_simplify_ifs(self):
def test(src, nums):
create_test_file('src.c', src)
for opts, ifs in [
[['-g2'], nums[0]],
[['--profiling'], nums[1]],
[['--profiling', '-g2'], nums[2]]
]:
print(opts, ifs)
if type(ifs) == int:
ifs = [ifs]
try_delete('a.out.js')
run_process([EMCC, 'src.c', '-O2', '-s', 'WASM=0'] + opts, stdout=PIPE)
src = open('a.out.js').read()
main = src[src.find('function _main'):src.find('\n}', src.find('function _main'))]
actual_ifs = main.count('if (')
assert actual_ifs in ifs, main + ' : ' + str([ifs, actual_ifs])
test(r'''
#include <stdio.h>
#include <string.h>
int main(int argc, char **argv) {
if (argc > 5 && strlen(argv[0]) > 1 && strlen(argv[1]) > 2) printf("halp");
return 0;
}
''', [3, 1, 1])
test(r'''
#include <stdio.h>
#include <string.h>
int main(int argc, char **argv) {
while (argc % 3 == 0) {
if (argc > 5 && strlen(argv[0]) > 1 && strlen(argv[1]) > 2) {
printf("halp");
argc++;
} else {
while (argc > 0) {
printf("%d\n", argc--);
}
}
}
return 0;
}
''', [8, [5, 7], [5, 7]])
test(r'''
#include <stdio.h>
#include <string.h>
int main(int argc, char **argv) {
while (argc % 17 == 0) argc *= 2;
if (argc > 5 && strlen(argv[0]) > 10 && strlen(argv[1]) > 20) {
printf("halp");
argc++;
} else {
printf("%d\n", argc--);
}
while (argc % 17 == 0) argc *= 2;
return argc;
}
''', [6, 3, 3])
test(r'''
#include <stdio.h>
#include <stdlib.h>
int main(int argc, char *argv[]) {
if (getenv("A") && getenv("B")) {
printf("hello world\n");
} else {
printf("goodnight moon\n");
}
printf("and that's that\n");
return 0;
}
''', [[3, 2], 1, 1])
test(r'''
#include <stdio.h>
#include <stdlib.h>
int main(int argc, char *argv[]) {
if (getenv("A") || getenv("B")) {
printf("hello world\n");
}
printf("and that's that\n");
return 0;
}
''', [[3, 2], 1, 1])
def test_symbol_map(self):
UNMINIFIED_HEAP8 = 'var HEAP8 = new global.Int8Array'
UNMINIFIED_MIDDLE = 'function middle'
for opts in [['-O2'], ['-O3']]:
for wasm in [0, 1, 2]:
# -s WASM=2 is a WASM_BACKEND-only feature:
if wasm == 2 and not shared.Settings.WASM_BACKEND:
continue
print(opts, wasm)
self.clear()
create_test_file('src.c', r'''
#include <emscripten.h>
EM_JS(int, run_js, (), {
out(new Error().stack);
return 0;
});
EMSCRIPTEN_KEEPALIVE
void middle() {
if (run_js()) {
// fake recursion that is never reached, to avoid inlining in binaryen and LLVM
middle();
}
}
int main() {
EM_ASM({ _middle() });
}
''')
cmd = [EMCC, 'src.c', '--emit-symbol-map'] + opts
cmd += ['-s', 'WASM=%d' % wasm]
run_process(cmd)
# check that the map is correct
with open('a.out.js.symbols') as f:
symbols = f.read()
lines = [line.split(':') for line in symbols.strip().split('\n')]
minified_middle = None
for minified, full in lines:
# handle both fastcomp and wasm backend notation
if full == '_middle' or full == 'middle':
minified_middle = minified
break
self.assertNotEqual(minified_middle, None)
if wasm:
# stack traces are standardized enough that we can easily check that the
# minified name is actually in the output
stack_trace_reference = 'wasm-function[%s]' % minified_middle
out = run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=None)
self.assertContained(stack_trace_reference, out)
# make sure there are no symbols in the wasm itself
wat = run_process([os.path.join(building.get_binaryen_bin(), 'wasm-dis'), 'a.out.wasm'], stdout=PIPE).stdout
for func_start in ('(func $middle', '(func $_middle'):
self.assertNotContained(func_start, wat)
# check we don't keep unnecessary debug info with wasm2js when emitting
# a symbol map
if self.is_wasm_backend() and wasm == 0 and '-O' in str(opts):
with open('a.out.js') as f:
js = f.read()
self.assertNotContained(UNMINIFIED_HEAP8, js)
self.assertNotContained(UNMINIFIED_MIDDLE, js)
# verify those patterns would exist with more debug info
run_process(cmd + ['--profiling-funcs'])
with open('a.out.js') as f:
js = f.read()
self.assertContained(UNMINIFIED_HEAP8, js)
self.assertContained(UNMINIFIED_MIDDLE, js)
def test_bc_to_bc(self):
# emcc should 'process' bitcode to bitcode. build systems can request this if
# e.g. they assume our 'executable' extension is bc, and compile an .o to a .bc
# (the user would then need to build bc to js of course, but we need to actually
# emit the bc)
run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c')])
self.assertExists('hello_world.o')
run_process([EMCC, 'hello_world.o', '-o', 'hello_world.bc'])
self.assertExists('hello_world.o')
self.assertExists('hello_world.bc')
def test_bad_function_pointer_cast(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
typedef int (*callback) (int, ...);
int impl(int foo) {
printf("Hello, world.\n");
return 0;
}
int main() {
volatile callback f = (callback) impl;
f(0); /* This fails with or without additional arguments. */
return 0;
}
''')
for opts in [0, 1, 2]:
for safe in [0, 1]:
for emulate_casts in [0, 1]:
for emulate_fps in [0, 1]:
for relocatable in [0, 1]:
for wasm in [0, 1]:
if self.is_wasm_backend() and (not wasm or emulate_fps):
continue
if emulate_casts and self.is_wasm_backend() and relocatable:
# TODO('https://github.com/emscripten-core/emscripten/issues/8507')
continue
cmd = [EMCC, 'src.cpp', '-O' + str(opts)]
if not wasm:
cmd += ['-s', 'WASM=0']
if safe:
cmd += ['-s', 'SAFE_HEAP']
if emulate_casts:
cmd += ['-s', 'EMULATE_FUNCTION_POINTER_CASTS']
if emulate_fps:
cmd += ['-s', 'EMULATED_FUNCTION_POINTERS']
if relocatable:
cmd += ['-s', 'RELOCATABLE'] # disables asm-optimized safe heap
print(cmd)
run_process(cmd)
output = run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=None)
if emulate_casts:
# success!
self.assertContained('Hello, world.', output)
else:
# otherwise, the error depends on the mode we are in
if self.is_wasm_backend() or (wasm and (relocatable or emulate_fps)):
# wasm trap raised by the vm
self.assertContained('function signature mismatch', output)
elif opts == 0 and safe and not wasm:
# non-wasm safe mode checks asm.js function table masks
self.assertContained('Function table mask error', output)
elif opts == 0:
# informative error message (assertions are enabled in -O0)
self.assertContained('Invalid function pointer', output)
else:
# non-informative error
self.assertContained(('abort(', 'exception'), output)
@no_wasm_backend('asm.js function table feature')
def test_aliased_func_pointers(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
int impl1(int foo) { return foo; }
float impla(float foo) { return foo; }
int impl2(int foo) { return foo+1; }
float implb(float foo) { return foo+1; }
int impl3(int foo) { return foo+2; }
float implc(float foo) { return foo+2; }
int main(int argc, char **argv) {
volatile void *f = (void*)impl1;
if (argc == 50) f = (void*)impla;
if (argc == 51) f = (void*)impl2;
if (argc == 52) f = (void*)implb;
if (argc == 53) f = (void*)impl3;
if (argc == 54) f = (void*)implc;
return (int)f;
}
''')
print('aliasing')
sizes_ii = {}
sizes_dd = {}
for alias in [None, 0, 1]:
cmd = [EMCC, 'src.cpp', '-O1', '-s', 'WASM=0']
if alias is not None:
cmd += ['-s', 'ALIASING_FUNCTION_POINTERS=' + str(alias)]
else:
alias = -1
print(cmd)
run_process(cmd)
src = open('a.out.js').read().split('\n')
for line in src:
if line.strip().startswith('var FUNCTION_TABLE_ii = '):
sizes_ii[alias] = line.count(',')
if line.strip().startswith('var FUNCTION_TABLE_dd = '):
sizes_dd[alias] = line.count(',')
print('ii', sizes_ii)
print('dd', sizes_dd)
for sizes in [sizes_ii, sizes_dd]:
self.assertEqual(sizes[-1], sizes[1]) # default is to alias
self.assertLess(sizes[1], sizes[0]) # without aliasing, we have more unique values and fat tables
def test_bad_export(self):
for m in ['', ' ']:
self.clear()
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXPORTED_FUNCTIONS=["' + m + '_main"]']
print(cmd)
stderr = run_process(cmd, stderr=PIPE, check=False).stderr
if m:
self.assertContained('undefined exported function: " _main"', stderr)
else:
self.assertContained('hello, world!', run_js('a.out.js'))
def test_no_dynamic_execution(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O1', '-s', 'DYNAMIC_EXECUTION=0'])
self.assertContained('hello, world!', run_js('a.out.js'))
src = open('a.out.js').read()
self.assertNotContained('eval(', src)
self.assertNotContained('eval.', src)
self.assertNotContained('new Function', src)
try_delete('a.out.js')
# Test that --preload-file doesn't add an use of eval().
create_test_file('temp.txt', "foo\n")
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O1',
'-s', 'DYNAMIC_EXECUTION=0', '--preload-file', 'temp.txt'])
src = open('a.out.js').read()
assert 'eval(' not in src
assert 'eval.' not in src
assert 'new Function' not in src
try_delete('a.out.js')
# Test that -s DYNAMIC_EXECUTION=1 and -s RELOCATABLE=1 are not allowed together.
self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-O1',
'-s', 'DYNAMIC_EXECUTION=0', '-s', 'RELOCATABLE=1'])
try_delete('a.out.js')
create_test_file('test.c', r'''
#include <emscripten/emscripten.h>
int main() {
emscripten_run_script("console.log('hello from script');");
return 0;
}
''')
# Test that emscripten_run_script() aborts when -s DYNAMIC_EXECUTION=0
run_process([EMCC, 'test.c', '-O1', '-s', 'DYNAMIC_EXECUTION=0'])
self.assertContained('DYNAMIC_EXECUTION=0 was set, cannot eval', run_js('a.out.js', assert_returncode=None, full_output=True, stderr=PIPE))
try_delete('a.out.js')
# Test that emscripten_run_script() posts a warning when -s DYNAMIC_EXECUTION=2
run_process([EMCC, 'test.c', '-O1', '-s', 'DYNAMIC_EXECUTION=2'])
self.assertContained('Warning: DYNAMIC_EXECUTION=2 was set, but calling eval in the following location:', run_js('a.out.js', assert_returncode=None, full_output=True, stderr=PIPE))
self.assertContained('hello from script', run_js('a.out.js', assert_returncode=None, full_output=True, stderr=PIPE))
try_delete('a.out.js')
def test_init_file_at_offset(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
int main() {
int data = 0x12345678;
FILE *f = fopen("test.dat", "wb");
fseek(f, 100, SEEK_CUR);
fwrite(&data, 4, 1, f);
fclose(f);
int data2;
f = fopen("test.dat", "rb");
fread(&data2, 4, 1, f); // should read 0s, not that int we wrote at an offset
printf("read: %d\n", data2);
fseek(f, 0, SEEK_END);
long size = ftell(f); // should be 104, not 4
fclose(f);
printf("file size is %ld\n", size);
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained('read: 0\nfile size is 104\n', run_js('a.out.js'))
def test_unlink(self):
self.do_other_test(os.path.join('other', 'unlink'))
def test_argv0_node(self):
create_test_file('code.cpp', r'''
#include <stdio.h>
int main(int argc, char **argv) {
printf("I am %s.\n", argv[0]);
return 0;
}
''')
run_process([EMCC, 'code.cpp'])
self.assertContained('I am ' + os.path.realpath(self.get_dir()).replace('\\', '/') + '/a.out.js', run_js('a.out.js').replace('\\', '/'))
def test_returncode(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <stdlib.h>
int main() {
#if CALL_EXIT
exit(CODE);
#else
return CODE;
#endif
}
''')
for code in [0, 123]:
for no_exit in [0, 1]:
for call_exit in [0, 1]:
for async_compile in [0, 1]:
run_process([EMCC, 'src.cpp', '-DCODE=%d' % code, '-s', 'EXIT_RUNTIME=%d' % (1 - no_exit), '-DCALL_EXIT=%d' % call_exit, '-s', 'WASM_ASYNC_COMPILATION=%d' % async_compile])
for engine in JS_ENGINES:
# async compilation can't return a code in d8
if async_compile and engine == V8_ENGINE:
continue
print(code, no_exit, call_exit, async_compile, engine)
proc = run_process(engine + ['a.out.js'], stderr=PIPE, check=False)
# we always emit the right exit code, whether we exit the runtime or not
self.assertEqual(proc.returncode, code)
msg = 'but EXIT_RUNTIME is not set, so halting execution but not exiting the runtime or preventing further async execution (build with EXIT_RUNTIME=1, if you want a true shutdown)'
if no_exit and call_exit:
self.assertContained(msg, proc.stderr)
else:
self.assertNotContained(msg, proc.stderr)
def test_emscripten_force_exit_NO_EXIT_RUNTIME(self):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
#if CALL_EXIT
emscripten_force_exit(0);
#endif
}
''')
for no_exit in [0, 1]:
for call_exit in [0, 1]:
run_process([EMCC, 'src.cpp', '-s', 'EXIT_RUNTIME=%d' % (1 - no_exit), '-DCALL_EXIT=%d' % call_exit])
print(no_exit, call_exit)
out = run_js('a.out.js', stdout=PIPE, stderr=PIPE, full_output=True)
assert ('emscripten_force_exit cannot actually shut down the runtime, as the build does not have EXIT_RUNTIME set' in out) == (no_exit and call_exit), out
def test_mkdir_silly(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <dirent.h>
#include <errno.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
int main(int argc, char **argv) {
printf("\n");
for (int i = 1; i < argc; i++) {
printf("%d:\n", i);
int ok = mkdir(argv[i], S_IRWXU|S_IRWXG|S_IRWXO);
printf(" make %s: %d\n", argv[i], ok);
DIR *dir = opendir(argv[i]);
printf(" open %s: %d\n", argv[i], dir != NULL);
if (dir) {
struct dirent *entry;
while ((entry = readdir(dir))) {
printf(" %s, %d\n", entry->d_name, entry->d_type);
}
}
}
}
''')
run_process([EMCC, 'src.cpp'])
# cannot create /, can open
self.assertContained(r'''
1:
make /: -1
open /: 1
., 4
.., 4
tmp, 4
home, 4
dev, 4
proc, 4
''', run_js('a.out.js', args=['/']))
# cannot create empty name, cannot open
self.assertContained(r'''
1:
make : -1
open : 0
''', run_js('a.out.js', args=['']))
# can create unnormalized path, can open
self.assertContained(r'''
1:
make /a//: 0
open /a//: 1
., 4
.., 4
''', run_js('a.out.js', args=['/a//']))
# can create child unnormalized
self.assertContained(r'''
1:
make /a: 0
open /a: 1
., 4
.., 4
2:
make /a//b//: 0
open /a//b//: 1
., 4
.., 4
''', run_js('a.out.js', args=['/a', '/a//b//']))
def test_stat_silly(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <errno.h>
#include <sys/stat.h>
int main(int argc, char **argv) {
for (int i = 1; i < argc; i++) {
const char *path = argv[i];
struct stat path_stat;
if (stat(path, &path_stat) != 0) {
printf("Failed to stat path: %s; errno=%d\n", path, errno);
} else {
printf("ok on %s\n", path);
}
}
}
''')
run_process([EMCC, 'src.cpp'])
# cannot stat ""
self.assertContained(r'''Failed to stat path: /a; errno=44
Failed to stat path: ; errno=44
''', run_js('a.out.js', args=['/a', '']))
def test_symlink_silly(self):
create_test_file('src.cpp', r'''
#include <dirent.h>
#include <errno.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include <stdio.h>
int main(int argc, char **argv) {
if (symlink(argv[1], argv[2]) != 0) {
printf("Failed to symlink paths: %s, %s; errno=%d\n", argv[1], argv[2], errno);
} else {
printf("ok\n");
}
}
''')
run_process([EMCC, 'src.cpp'])
# cannot symlink nonexistents
self.assertContained(r'Failed to symlink paths: , abc; errno=44', run_js('a.out.js', args=['', 'abc']))
self.assertContained(r'Failed to symlink paths: , ; errno=44', run_js('a.out.js', args=['', '']))
self.assertContained(r'ok', run_js('a.out.js', args=['123', 'abc']))
self.assertContained(r'Failed to symlink paths: abc, ; errno=44', run_js('a.out.js', args=['abc', '']))
def test_rename_silly(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <errno.h>
int main(int argc, char **argv) {
if (rename(argv[1], argv[2]) != 0) {
printf("Failed to rename paths: %s, %s; errno=%d\n", argv[1], argv[2], errno);
} else {
printf("ok\n");
}
}
''')
run_process([EMCC, 'src.cpp'])
# cannot symlink nonexistents
self.assertContained(r'Failed to rename paths: , abc; errno=44', run_js('a.out.js', args=['', 'abc']))
self.assertContained(r'Failed to rename paths: , ; errno=44', run_js('a.out.js', args=['', '']))
self.assertContained(r'Failed to rename paths: 123, abc; errno=44', run_js('a.out.js', args=['123', 'abc']))
self.assertContained(r'Failed to rename paths: abc, ; errno=44', run_js('a.out.js', args=['abc', '']))
def test_readdir_r_silly(self):
create_test_file('src.cpp', r'''
#include <iostream>
#include <cstring>
#include <cerrno>
#include <unistd.h>
#include <fcntl.h>
#include <cstdlib>
#include <dirent.h>
#include <sys/stat.h>
#include <sys/types.h>
using std::endl;
namespace
{
void check(const bool result)
{
if(not result) {
std::cout << "Check failed!" << endl;
throw "bad";
}
}
// Do a recursive directory listing of the directory whose path is specified
// by \a name.
void ls(const std::string& name, std::size_t indent = 0)
{
::DIR *dir;
struct ::dirent *entry;
if(indent == 0) {
std::cout << name << endl;
++indent;
}
// Make sure we can open the directory. This should also catch cases where
// the empty string is passed in.
if (not (dir = ::opendir(name.c_str()))) {
const int error = errno;
std::cout
<< "Failed to open directory: " << name << "; " << error << endl;
return;
}
// Just checking the sanity.
if (name.empty()) {
std::cout
<< "Managed to open a directory whose name was the empty string.."
<< endl;
check(::closedir(dir) != -1);
return;
}
// Iterate over the entries in the directory.
while ((entry = ::readdir(dir))) {
const std::string entryName(entry->d_name);
if (entryName == "." || entryName == "..") {
// Skip the dot entries.
continue;
}
const std::string indentStr(indent * 2, ' ');
if (entryName.empty()) {
std::cout
<< indentStr << "\"\": Found empty string as a "
<< (entry->d_type == DT_DIR ? "directory" : "file")
<< " entry!" << endl;
continue;
} else {
std::cout << indentStr << entryName
<< (entry->d_type == DT_DIR ? "/" : "") << endl;
}
if (entry->d_type == DT_DIR) {
// We found a subdirectory; recurse.
ls(std::string(name + (name == "/" ? "" : "/" ) + entryName),
indent + 1);
}
}
// Close our handle.
check(::closedir(dir) != -1);
}
void touch(const std::string &path)
{
const int fd = ::open(path.c_str(), O_CREAT | O_TRUNC, 0644);
check(fd != -1);
check(::close(fd) != -1);
}
}
int main()
{
check(::mkdir("dir", 0755) == 0);
touch("dir/a");
touch("dir/b");
touch("dir/c");
touch("dir/d");
touch("dir/e");
std::cout << "Before:" << endl;
ls("dir");
std::cout << endl;
// Attempt to delete entries as we walk the (single) directory.
::DIR * const dir = ::opendir("dir");
check(dir != NULL);
struct ::dirent *entry;
while((entry = ::readdir(dir)) != NULL) {
const std::string name(entry->d_name);
// Skip "." and "..".
if(name == "." || name == "..") {
continue;
}
// Unlink it.
std::cout << "Unlinking " << name << endl;
check(::unlink(("dir/" + name).c_str()) != -1);
}
check(::closedir(dir) != -1);
std::cout << "After:" << endl;
ls("dir");
std::cout << endl;
return 0;
}
''')
run_process([EMCC, 'src.cpp'])
# cannot symlink nonexistents
self.assertContained(r'''Before:
dir
a
b
c
d
e
Unlinking a
Unlinking b
Unlinking c
Unlinking d
Unlinking e
After:
dir
''', run_js('a.out.js', args=['', 'abc']))
def test_emversion(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
int main() {
printf("major: %d\n", __EMSCRIPTEN_major__);
printf("minor: %d\n", __EMSCRIPTEN_minor__);
printf("tiny: %d\n", __EMSCRIPTEN_tiny__);
}
''')
run_process([EMCC, 'src.cpp'])
expected = '''\
major: %d
minor: %d
tiny: %d
''' % (shared.EMSCRIPTEN_VERSION_MAJOR, shared.EMSCRIPTEN_VERSION_MINOR, shared.EMSCRIPTEN_VERSION_TINY)
self.assertContained(expected, run_js('a.out.js'))
def test_libc_files_without_syscalls(self):
# a program which includes FS due to libc js library support, but has no syscalls,
# so full FS support would normally be optimized out
create_test_file('src.cpp', r'''
#include <sys/time.h>
#include <stddef.h>
int main() {
return utimes(NULL, NULL);
}''')
run_process([EMCC, 'src.cpp'])
def test_syscall_without_filesystem(self):
# a program which includes a non-trivial syscall, but disables the filesystem.
create_test_file('src.c', r'''
#include <sys/time.h>
#include <stddef.h>
extern int __sys_openat(int);
int main() {
return __sys_openat(0);
}''')
run_process([EMCC, 'src.c', '-s', 'NO_FILESYSTEM=1'])
def test_dashS(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-S'])
self.assertExists('hello_world.s')
def test_dashS_stdout(self):
stdout = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-S', '-o', '-'], stdout=PIPE).stdout
self.assertEqual(os.listdir('.'), [])
self.assertContained('hello_world.c', stdout)
def test_emit_llvm(self):
# TODO(https://github.com/emscripten-core/emscripten/issues/9016):
# We shouldn't need to copy the file here but if we don't then emcc will
# internally clobber the hello_world.ll in tests.
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'hello_world.c')
run_process([EMCC, 'hello_world.c', '-S', '-emit-llvm'])
self.assertExists('hello_world.ll')
bitcode = open('hello_world.ll').read()
self.assertContained('target triple = "', bitcode)
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-c', '-emit-llvm'])
self.assertTrue(building.is_bitcode('hello_world.bc'))
def test_dashE(self):
create_test_file('src.cpp', r'''#include <emscripten.h>
__EMSCRIPTEN_major__ __EMSCRIPTEN_minor__ __EMSCRIPTEN_tiny__ EMSCRIPTEN_KEEPALIVE
''')
def test(args=[]):
print(args)
out = run_process([EMCC, 'src.cpp', '-E'] + args, stdout=PIPE).stdout
self.assertContained('%d %d %d __attribute__((used))' % (shared.EMSCRIPTEN_VERSION_MAJOR, shared.EMSCRIPTEN_VERSION_MINOR, shared.EMSCRIPTEN_VERSION_TINY), out)
test()
test(['--bind'])
def test_dashE_respect_dashO(self):
# issue #3365
with_dash_o = run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-E', '-o', 'ignored.js'], stdout=PIPE, stderr=PIPE).stdout
without_dash_o = run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-E'], stdout=PIPE, stderr=PIPE).stdout
self.assertEqual(len(with_dash_o), 0)
self.assertNotEqual(len(without_dash_o), 0)
def test_dashM(self):
out = run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-M'], stdout=PIPE).stdout
self.assertContained('hello_world.o:', out) # Verify output is just a dependency rule instead of bitcode or js
def test_dashM_respect_dashO(self):
# issue #3365
with_dash_o = run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-M', '-o', 'ignored.js'], stdout=PIPE).stdout
without_dash_o = run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-M'], stdout=PIPE).stdout
self.assertEqual(len(with_dash_o), 0)
self.assertNotEqual(len(without_dash_o), 0)
def test_malloc_implicit(self):
self.do_other_test(os.path.join('other', 'malloc_implicit'))
def test_switch64phi(self):
# issue 2539, fastcomp segfault on phi-i64 interaction
create_test_file('src.cpp', r'''
#include <cstdint>
#include <limits>
#include <cstdio>
//============================================================================
namespace
{
class int_adapter {
public:
typedef ::int64_t int_type;
int_adapter(int_type v = 0)
: value_(v)
{}
static const int_adapter pos_infinity()
{
return (::std::numeric_limits<int_type>::max)();
}
static const int_adapter neg_infinity()
{
return (::std::numeric_limits<int_type>::min)();
}
static const int_adapter not_a_number()
{
return (::std::numeric_limits<int_type>::max)()-1;
}
static bool is_neg_inf(int_type v)
{
return (v == neg_infinity().as_number());
}
static bool is_pos_inf(int_type v)
{
return (v == pos_infinity().as_number());
}
static bool is_not_a_number(int_type v)
{
return (v == not_a_number().as_number());
}
bool is_infinity() const
{
return (value_ == neg_infinity().as_number() ||
value_ == pos_infinity().as_number());
}
bool is_special() const
{
return(is_infinity() || value_ == not_a_number().as_number());
}
bool operator<(const int_adapter& rhs) const
{
if(value_ == not_a_number().as_number()
|| rhs.value_ == not_a_number().as_number()) {
return false;
}
if(value_ < rhs.value_) return true;
return false;
}
int_type as_number() const
{
return value_;
}
int_adapter operator-(const int_adapter& rhs)const
{
if(is_special() || rhs.is_special())
{
if (rhs.is_pos_inf(rhs.as_number()))
{
return int_adapter(1);
}
if (rhs.is_neg_inf(rhs.as_number()))
{
return int_adapter();
}
}
return int_adapter();
}
private:
int_type value_;
};
class time_iterator {
public:
time_iterator(int_adapter t, int_adapter d)
: current_(t),
offset_(d)
{}
time_iterator& operator--()
{
current_ = int_adapter(current_ - offset_);
return *this;
}
bool operator>=(const int_adapter& t)
{
return not (current_ < t);
}
private:
int_adapter current_;
int_adapter offset_;
};
void iterate_backward(const int_adapter *answers, const int_adapter& td)
{
int_adapter end = answers[0];
time_iterator titr(end, td);
std::puts("");
for (; titr >= answers[0]; --titr) {
}
}
}
int
main()
{
const int_adapter answer1[] = {};
iterate_backward(NULL, int_adapter());
iterate_backward(answer1, int_adapter());
}
''')
run_process([EMCC, 'src.cpp', '-O2', '-s', 'SAFE_HEAP=1'])
@parameterized({
'none': [{'EMCC_FORCE_STDLIBS': None}, False],
# forced libs is ok, they were there anyhow
'normal': [{'EMCC_FORCE_STDLIBS': 'libc,libc++abi,libc++'}, False],
# partial list, but ok since we grab them as needed
'parial': [{'EMCC_FORCE_STDLIBS': 'libc++'}, False],
# fail! not enough stdlibs
'partial_only': [{'EMCC_FORCE_STDLIBS': 'libc++,libc,libc++abi', 'EMCC_ONLY_FORCED_STDLIBS': '1'}, True],
# force all the needed stdlibs, so this works even though we ignore the input file
'full_only': [{'EMCC_FORCE_STDLIBS': 'libc,libc++abi,libc++,libpthread,libmalloc', 'EMCC_ONLY_FORCED_STDLIBS': '1'}, False],
})
def test_only_force_stdlibs(self, env, fail):
with env_modify(env):
run_process([EMXX, path_from_root('tests', 'hello_libcxx.cpp'), '-s', 'WARN_ON_UNDEFINED_SYMBOLS=0'])
if fail:
output = self.expect_fail(NODE_JS + ['a.out.js'], stdout=PIPE)
self.assertContained('missing function', output)
else:
self.assertContained('hello, world!', run_js('a.out.js'))
def test_only_force_stdlibs_2(self):
create_test_file('src.cpp', r'''
#include <iostream>
#include <stdexcept>
int main()
{
try {
throw std::exception();
std::cout << "got here" << std::endl;
}
catch (const std::exception& ex) {
std::cout << "Caught exception: " << ex.what() << std::endl;
}
}
''')
with env_modify({'EMCC_FORCE_STDLIBS': 'libc,libc++abi,libc++,libmalloc,libpthread', 'EMCC_ONLY_FORCED_STDLIBS': '1'}):
run_process([EMXX, 'src.cpp', '-s', 'DISABLE_EXCEPTION_CATCHING=0'])
self.assertContained('Caught exception: std::exception', run_js('a.out.js', stderr=PIPE))
def test_strftime_zZ(self):
create_test_file('src.cpp', r'''
#include <cerrno>
#include <cstring>
#include <ctime>
#include <iostream>
int main()
{
// Buffer to hold the current hour of the day. Format is HH + nul
// character.
char hour[3];
// Buffer to hold our ISO 8601 formatted UTC offset for the current
// timezone. Format is [+-]hhmm + nul character.
char utcOffset[6];
// Buffer to hold the timezone name or abbreviation. Just make it
// sufficiently large to hold most timezone names.
char timezone[128];
std::tm tm;
// Get the current timestamp.
const std::time_t now = std::time(NULL);
// What time is that here?
if (::localtime_r(&now, &tm) == NULL) {
const int error = errno;
std::cout
<< "Failed to get localtime for timestamp=" << now << "; errno=" << error
<< "; " << std::strerror(error) << std::endl;
return 1;
}
size_t result = 0;
// Get the formatted hour of the day.
if ((result = std::strftime(hour, 3, "%H", &tm)) != 2) {
const int error = errno;
std::cout
<< "Failed to format hour for timestamp=" << now << "; result="
<< result << "; errno=" << error << "; " << std::strerror(error)
<< std::endl;
return 1;
}
std::cout << "The current hour of the day is: " << hour << std::endl;
// Get the formatted UTC offset in ISO 8601 format.
if ((result = std::strftime(utcOffset, 6, "%z", &tm)) != 5) {
const int error = errno;
std::cout
<< "Failed to format UTC offset for timestamp=" << now << "; result="
<< result << "; errno=" << error << "; " << std::strerror(error)
<< std::endl;
return 1;
}
std::cout << "The current timezone offset is: " << utcOffset << std::endl;
// Get the formatted timezone name or abbreviation. We don't know how long
// this will be, so just expect some data to be written to the buffer.
if ((result = std::strftime(timezone, 128, "%Z", &tm)) == 0) {
const int error = errno;
std::cout
<< "Failed to format timezone for timestamp=" << now << "; result="
<< result << "; errno=" << error << "; " << std::strerror(error)
<< std::endl;
return 1;
}
std::cout << "The current timezone is: " << timezone << std::endl;
std::cout << "ok!\n";
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained('ok!', run_js('a.out.js'))
def test_strptime_symmetry(self):
building.emcc(path_from_root('tests', 'strptime_symmetry.cpp'), output_filename='a.out.js')
self.assertContained('TEST PASSED', run_js('a.out.js'))
def test_truncate_from_0(self):
create_test_file('src.cpp', r'''
#include <cerrno>
#include <cstring>
#include <iostream>
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
using std::endl;
//============================================================================
// :: Helpers
namespace
{
// Returns the size of the regular file specified as 'path'.
::off_t getSize(const char* const path)
{
// Stat the file and make sure that it's the expected size.
struct ::stat path_stat;
if (::stat(path, &path_stat) != 0) {
const int error = errno;
std::cout
<< "Failed to lstat path: " << path << "; errno=" << error << "; "
<< std::strerror(error) << endl;
return -1;
}
std::cout
<< "Size of file is: " << path_stat.st_size << endl;
return path_stat.st_size;
}
// Causes the regular file specified in 'path' to have a size of 'length'
// bytes.
void resize(const char* const path,
const ::off_t length)
{
std::cout
<< "Truncating file=" << path << " to length=" << length << endl;
if (::truncate(path, length) == -1)
{
const int error = errno;
std::cout
<< "Failed to truncate file=" << path << "; errno=" << error
<< "; " << std::strerror(error) << endl;
}
const ::off_t size = getSize(path);
if (size != length) {
std::cout
<< "Failed to truncate file=" << path << " to length=" << length
<< "; got size=" << size << endl;
}
}
// Helper to create a file with the given content.
void createFile(const std::string& path, const std::string& content)
{
std::cout
<< "Creating file: " << path << " with content=" << content << endl;
const int fd = ::open(path.c_str(), O_CREAT | O_WRONLY, 0644);
if (fd == -1) {
const int error = errno;
std::cout
<< "Failed to open file for writing: " << path << "; errno=" << error
<< "; " << std::strerror(error) << endl;
return;
}
if (::write(fd, content.c_str(), content.size()) != content.size()) {
const int error = errno;
std::cout
<< "Failed to write content=" << content << " to file=" << path
<< "; errno=" << error << "; " << std::strerror(error) << endl;
// Fall through to close FD.
}
::close(fd);
}
}
//============================================================================
// :: Entry Point
int main()
{
const char* const file = "/tmp/file";
createFile(file, "This is some content");
getSize(file);
resize(file, 32);
resize(file, 17);
resize(file, 0);
// This throws a JS exception.
resize(file, 32);
return 0;
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained(r'''Creating file: /tmp/file with content=This is some content
Size of file is: 20
Truncating file=/tmp/file to length=32
Size of file is: 32
Truncating file=/tmp/file to length=17
Size of file is: 17
Truncating file=/tmp/file to length=0
Size of file is: 0
Truncating file=/tmp/file to length=32
Size of file is: 32
''', run_js('a.out.js'))
def test_create_readonly(self):
create_test_file('src.cpp', r'''
#include <cerrno>
#include <cstring>
#include <iostream>
#include <fcntl.h>
#include <unistd.h>
using std::endl;
//============================================================================
// :: Helpers
namespace
{
// Helper to create a read-only file with content.
void readOnlyFile(const std::string& path, const std::string& content)
{
std::cout
<< "Creating file: " << path << " with content of size="
<< content.size() << endl;
const int fd = ::open(path.c_str(), O_CREAT | O_WRONLY, 0400);
if (fd == -1) {
const int error = errno;
std::cout
<< "Failed to open file for writing: " << path << "; errno=" << error
<< "; " << std::strerror(error) << endl;
return;
}
// Write the content to the file.
ssize_t result = 0;
if ((result = ::write(fd, content.data(), content.size()))
!= ssize_t(content.size()))
{
const int error = errno;
std::cout
<< "Failed to write to file=" << path << "; errno=" << error
<< "; " << std::strerror(error) << endl;
// Fall through to close the file.
}
else {
std::cout
<< "Data written to file=" << path << "; successfully wrote "
<< result << " bytes" << endl;
}
::close(fd);
}
}
//============================================================================
// :: Entry Point
int main()
{
const char* const file = "/tmp/file";
unlink(file);
readOnlyFile(file, "This content should get written because the file "
"does not yet exist and so, only the mode of the "
"containing directory will influence my ability to "
"create and open the file. The mode of the file only "
"applies to opening of the stream, not subsequent stream "
"operations after stream has opened.\n\n");
readOnlyFile(file, "This should not get written because the file already "
"exists and is read-only.\n\n");
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained(r'''Creating file: /tmp/file with content of size=292
Data written to file=/tmp/file; successfully wrote 292 bytes
Creating file: /tmp/file with content of size=79
Failed to open file for writing: /tmp/file; errno=2; Permission denied
''', run_js('a.out.js'))
def test_embed_file_large(self):
# If such long files are encoded on one line,
# they overflow the interpreter's limit
large_size = int(1500000)
create_test_file('large.txt', 'x' * large_size)
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <unistd.h>
int main()
{
FILE* fp = fopen("large.txt", "r");
if (fp) {
printf("ok\n");
fseek(fp, 0L, SEEK_END);
printf("%ld\n", ftell(fp));
} else {
printf("failed to open large file.txt\n");
}
return 0;
}
''')
run_process([EMCC, 'src.cpp', '--embed-file', 'large.txt'])
for engine in JS_ENGINES:
if engine == V8_ENGINE:
continue # ooms
print(engine)
self.assertContained('ok\n' + str(large_size) + '\n', run_js('a.out.js', engine=engine))
def test_force_exit(self):
create_test_file('src.cpp', r'''
#include <emscripten/emscripten.h>
namespace
{
extern "C"
EMSCRIPTEN_KEEPALIVE
void callback()
{
EM_ASM({ out('callback pre()') });
::emscripten_force_exit(42);
EM_ASM({ out('callback post()') });
}
}
int
main()
{
EM_ASM({ setTimeout(function() { out("calling callback()"); _callback() }, 100) });
::emscripten_exit_with_live_runtime();
return 123;
}
''')
run_process([EMCC, 'src.cpp'])
output = run_js('a.out.js', assert_returncode=42)
assert 'callback pre()' in output
assert 'callback post()' not in output
def test_bad_locale(self):
create_test_file('src.cpp', r'''
#include <locale.h>
#include <stdio.h>
#include <wctype.h>
int
main(const int argc, const char * const * const argv)
{
const char * const locale = (argc > 1 ? argv[1] : "C");
const char * const actual = setlocale(LC_ALL, locale);
if(actual == NULL) {
printf("%s locale not supported\n",
locale);
return 0;
}
printf("locale set to %s: %s\n", locale, actual);
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained('locale set to C: C;C;C;C;C;C',
run_js('a.out.js', args=['C']))
self.assertContained('locale set to waka: waka;waka;waka;waka;waka;waka',
run_js('a.out.js', args=['waka']))
def test_browser_language_detection(self):
# Test HTTP Accept-Language parsing by simulating navigator.languages #8751
run_process([EMCC,
path_from_root('tests', 'test_browser_language_detection.c')])
self.assertContained('C.UTF-8', run_js('a.out.js'))
# Accept-Language: fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3
create_test_file('preamble.js', r'''navigator = {};
navigator.languages = [ "fr", "fr-FR", "en-US", "en" ];''')
run_process([EMCC, '--pre-js', 'preamble.js',
path_from_root('tests', 'test_browser_language_detection.c')])
self.assertContained('fr.UTF-8', run_js('a.out.js'))
# Accept-Language: fr-FR,fr;q=0.8,en-US;q=0.5,en;q=0.3
create_test_file('preamble.js', r'''navigator = {};
navigator.languages = [ "fr-FR", "fr", "en-US", "en" ];''')
run_process([EMCC, '--pre-js', 'preamble.js',
path_from_root('tests', 'test_browser_language_detection.c')])
self.assertContained('fr_FR.UTF-8', run_js('a.out.js'))
def test_js_main(self):
# try to add a main() from JS, at runtime. this is not supported (the
# compiler needs to know at compile time about main).
create_test_file('pre_main.js', r'''
var Module = {
'_main': function() {
}
};
''')
create_test_file('src.cpp', '')
run_process([EMCC, 'src.cpp', '--pre-js', 'pre_main.js'])
self.assertContained('compiled without a main, but one is present. if you added it from JS, use Module["onRuntimeInitialized"]',
run_js('a.out.js', assert_returncode=None, stderr=PIPE))
def test_js_malloc(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
EM_ASM({
for (var i = 0; i < 1000; i++) {
var ptr = Module._malloc(1024 * 1024); // only done in JS, but still must not leak
Module._free(ptr);
}
});
printf("ok.\n");
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained('ok.', run_js('a.out.js', args=['C']))
def test_locale_wrong(self):
create_test_file('src.cpp', r'''
#include <locale>
#include <iostream>
#include <stdexcept>
int
main(const int argc, const char * const * const argv)
{
const char * const name = argc > 1 ? argv[1] : "C";
try {
const std::locale locale(name);
std::cout
<< "Constructed locale \"" << name << "\"\n"
<< "This locale is "
<< (locale == std::locale::global(locale) ? "" : "not ")
<< "the global locale.\n"
<< "This locale is " << (locale == std::locale::classic() ? "" : "not ")
<< "the C locale." << std::endl;
} catch(const std::runtime_error &ex) {
std::cout
<< "Can't construct locale \"" << name << "\": " << ex.what()
<< std::endl;
return 1;
} catch(...) {
std::cout
<< "FAIL: Unexpected exception constructing locale \"" << name << '\"'
<< std::endl;
return 127;
}
}
''')
run_process([EMCC, 'src.cpp', '-s', 'EXIT_RUNTIME=1', '-s', 'DISABLE_EXCEPTION_CATCHING=0'])
self.assertContained('Constructed locale "C"\nThis locale is the global locale.\nThis locale is the C locale.', run_js('a.out.js', args=['C']))
self.assertContained('''Can't construct locale "waka": collate_byname<char>::collate_byname failed to construct for waka''', run_js('a.out.js', args=['waka'], assert_returncode=1))
def test_cleanup_os(self):
# issue 2644
def test(args, be_clean):
print(args)
self.clear()
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'a.c')
create_test_file('b.c', ' ')
run_process([EMCC, 'a.c', 'b.c'] + args)
clutter = glob.glob('*.o')
if be_clean:
assert len(clutter) == 0, 'should not leave clutter ' + str(clutter)
else:
assert len(clutter) == 2, 'should leave .o files'
test(['-o', 'c.bc'], True)
test(['-o', 'c.js'], True)
test(['-o', 'c.html'], True)
test(['-c'], False)
@no_wasm_backend('asm.js debug info')
def test_js_dash_g(self):
create_test_file('src.c', '''
#include <stdio.h>
#include <assert.h>
void checker(int x) {
x += 20;
assert(x < 15); // this is line 7!
}
int main() {
checker(10);
return 0;
}
''')
def check(has):
print(has)
lines = open('a.out.js').readlines()
lines = [line for line in lines if '___assert_fail(' in line or '___assert_func(' in line]
found_line_num = any(('//@line 7 "' in line) for line in lines)
found_filename = any(('src.c"\n' in line) for line in lines)
assert found_line_num == has, 'Must have debug info with the line number'
assert found_filename == has, 'Must have debug info with the filename'
run_process([EMCC, '-s', 'WASM=0', 'src.c', '-g'])
check(True)
run_process([EMCC, '-s', 'WASM=0', 'src.c'])
check(False)
run_process([EMCC, '-s', 'WASM=0', 'src.c', '-g0'])
check(False)
run_process([EMCC, '-s', 'WASM=0', 'src.c', '-g0', '-g']) # later one overrides
check(True)
run_process([EMCC, '-s', 'WASM=0', 'src.c', '-g', '-g0']) # later one overrides
check(False)
def test_dash_g_bc(self):
def test(opts):
print(opts)
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'a_.bc'] + opts)
sizes = {'_': os.path.getsize('a_.bc')}
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-g', '-o', 'ag.bc'] + opts)
sizes['g'] = os.path.getsize('ag.bc')
for i in range(0, 5):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-g' + str(i), '-o', 'a' + str(i) + '.bc'] + opts)
sizes[i] = os.path.getsize('a' + str(i) + '.bc')
print(' ', sizes)
assert sizes['_'] == sizes[0] == sizes[1] == sizes[2], 'no debug means no llvm debug info ' + str(sizes)
assert sizes['g'] == sizes[3] == sizes[4], '-g or -g4 means llvm debug info ' + str(sizes)
assert sizes['_'] < sizes['g'], 'llvm debug info has positive size ' + str(sizes)
test([])
test(['-O1'])
def test_no_filesystem(self):
FS_MARKER = 'var FS'
# fopen forces full filesystem support
run_process([EMCC, path_from_root('tests', 'hello_world_fopen.c'), '-s', 'ASSERTIONS=0'])
yes_size = os.path.getsize('a.out.js')
self.assertContained('hello, world!', run_js('a.out.js'))
self.assertContained(FS_MARKER, open('a.out.js').read())
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASSERTIONS=0'])
no_size = os.path.getsize('a.out.js')
self.assertContained('hello, world!', run_js('a.out.js'))
self.assertNotContained(FS_MARKER, open('a.out.js').read())
print('yes fs, no fs:', yes_size, no_size)
# ~100K of FS code is removed
self.assertGreater(yes_size - no_size, 90000)
self.assertLess(no_size, 360000)
def test_no_filesystem_libcxx(self):
run_process([EMCC, path_from_root('tests', 'hello_libcxx.cpp'), '-s', 'FILESYSTEM=0'])
self.assertContained('hello, world!', run_js('a.out.js'))
def test_no_nuthin(self):
# check FILESYSTEM is automatically set, and effective
def test(opts, absolute):
print('opts, absolute:', opts, absolute)
sizes = {}
def do(name, source, moar_opts):
self.clear()
# pad the name to a common length so that doesn't effect the size of the
# output
padded_name = name + '_' * (20 - len(name))
run_process([EMCC, path_from_root('tests', source), '-o', padded_name + '.js'] + opts + moar_opts)
sizes[name] = os.path.getsize(padded_name + '.js')
if os.path.exists(padded_name + '.wasm'):
sizes[name] += os.path.getsize(padded_name + '.wasm')
self.assertContained('hello, world!', run_js(padded_name + '.js'))
do('normal', 'hello_world_fopen.c', [])
do('no_fs', 'hello_world.c', []) # without fopen, we should auto-detect we do not need full fs support and can do FILESYSTEM=0
do('no_fs_manual', 'hello_world.c', ['-s', 'FILESYSTEM=0'])
print(' ', sizes)
self.assertLess(sizes['no_fs'], sizes['normal'])
self.assertLess(sizes['no_fs'], absolute)
# manual can usually remove a tiny bit more
self.assertLess(sizes['no_fs_manual'], sizes['no_fs'] + 30)
test(['-s', 'ASSERTIONS=0'], 120000) # we don't care about code size with assertions
test(['-O1'], 91000)
test(['-O2'], 46000)
test(['-O3', '--closure', '1'], 17000)
# asm.js too
if not self.is_wasm_backend():
test(['-O3', '--closure', '1', '-s', 'WASM=0'], 36000)
test(['-O3', '--closure', '2', '-s', 'WASM=0'], 33000) # might change now and then
def test_no_browser(self):
BROWSER_INIT = 'var Browser'
run_process([EMCC, path_from_root('tests', 'hello_world.c')])
self.assertNotContained(BROWSER_INIT, open('a.out.js').read())
run_process([EMCC, path_from_root('tests', 'browser_main_loop.c')]) # uses emscripten_set_main_loop, which needs Browser
self.assertContained(BROWSER_INIT, open('a.out.js').read())
def test_EXPORTED_RUNTIME_METHODS(self):
def test(opts, has, not_has):
print(opts, has, not_has)
self.clear()
# check without assertions, as with assertions we add stubs for the things we remove (which
# print nice error messages)
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASSERTIONS=0'] + opts)
self.assertContained('hello, world!', run_js('a.out.js'))
src = open('a.out.js').read()
self.assertContained(has, src)
self.assertNotContained(not_has, src)
test([], 'Module["', 'Module["waka')
test(['-s', 'EXPORTED_RUNTIME_METHODS=[]'], '', 'Module["addRunDependency')
test(['-s', 'EXPORTED_RUNTIME_METHODS=["addRunDependency"]'], 'Module["addRunDependency', 'Module["waka')
test(['-s', 'EXPORTED_RUNTIME_METHODS=[]', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["addRunDependency"]'], 'Module["addRunDependency', 'Module["waka')
def test_stat_fail_alongtheway(self):
create_test_file('src.cpp', r'''
#include <errno.h>
#include <stdio.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#define CHECK(expression) \
if(!(expression)) { \
error = errno; \
printf("FAIL: %s\n", #expression); fail = 1; \
} else { \
error = errno; \
printf("pass: %s\n", #expression); \
} \
int
main()
{
int error;
int fail = 0;
CHECK(mkdir("path", 0777) == 0);
CHECK(close(open("path/file", O_CREAT | O_WRONLY, 0644)) == 0);
{
struct stat st;
CHECK(stat("path", &st) == 0);
CHECK(st.st_mode = 0777);
}
{
struct stat st;
CHECK(stat("path/nosuchfile", &st) == -1);
printf("info: errno=%d %s\n", error, strerror(error));
CHECK(error == ENOENT);
}
{
struct stat st;
CHECK(stat("path/file", &st) == 0);
CHECK(st.st_mode = 0666);
}
{
struct stat st;
CHECK(stat("path/file/impossible", &st) == -1);
printf("info: errno=%d %s\n", error, strerror(error));
CHECK(error == ENOTDIR);
}
{
struct stat st;
CHECK(lstat("path/file/impossible", &st) == -1);
printf("info: errno=%d %s\n", error, strerror(error));
CHECK(error == ENOTDIR);
}
return fail;
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained(r'''pass: mkdir("path", 0777) == 0
pass: close(open("path/file", O_CREAT | O_WRONLY, 0644)) == 0
pass: stat("path", &st) == 0
pass: st.st_mode = 0777
pass: stat("path/nosuchfile", &st) == -1
info: errno=44 No such file or directory
pass: error == ENOENT
pass: stat("path/file", &st) == 0
pass: st.st_mode = 0666
pass: stat("path/file/impossible", &st) == -1
info: errno=54 Not a directory
pass: error == ENOTDIR
pass: lstat("path/file/impossible", &st) == -1
info: errno=54 Not a directory
pass: error == ENOTDIR
''', run_js('a.out.js'))
def test_link_with_a_static(self):
create_test_file('x.c', r'''
int init_weakref(int a, int b) {
return a + b;
}
''')
create_test_file('y.c', r'''
static int init_weakref(void) { // inlined in -O2, not in -O0 where it shows up in llvm-nm as 't'
return 150;
}
int testy(void) {
return init_weakref();
}
''')
create_test_file('z.c', r'''
extern int init_weakref(int, int);
extern int testy(void);
int main(void) {
return testy() + init_weakref(5, 6);
}
''')
run_process([EMCC, 'x.c', '-o', 'x.o'])
run_process([EMCC, 'y.c', '-o', 'y.o'])
run_process([EMCC, 'z.c', '-o', 'z.o'])
try_delete('libtest.a')
run_process([EMAR, 'rc', 'libtest.a', 'y.o'])
run_process([EMAR, 'rc', 'libtest.a', 'x.o'])
run_process([EMRANLIB, 'libtest.a'])
for args in [[], ['-O2']]:
print('args:', args)
run_process([EMCC, 'z.o', 'libtest.a', '-s', 'EXIT_RUNTIME=1'] + args)
run_js('a.out.js', assert_returncode=161)
def test_link_with_bad_o_in_a(self):
# when building a .a, we force-include all the objects inside it. but, some
# may not be valid bitcode, e.g. if it contains metadata or something else
# weird. we should just ignore those
run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c'), '-o', 'hello_world.o'])
create_test_file('bad.obj', 'this is not a good file, it should be ignored!')
run_process([LLVM_AR, 'cr', 'libfoo.a', 'hello_world.o', 'bad.obj'])
run_process([EMCC, 'libfoo.a'])
self.assertContained('hello, world!', run_js('a.out.js'))
def test_require(self):
inname = path_from_root('tests', 'hello_world.c')
building.emcc(inname, args=['-s', 'ASSERTIONS=0'], output_filename='a.out.js')
output = run_process(NODE_JS + ['-e', 'require("./a.out.js")'], stdout=PIPE, stderr=PIPE)
assert output.stdout == 'hello, world!\n' and output.stderr == '', 'expected no output, got\n===\nSTDOUT\n%s\n===\nSTDERR\n%s\n===\n' % (output.stdout, output.stderr)
def test_require_modularize(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1', '-s', 'ASSERTIONS=0'])
src = open('a.out.js').read()
self.assertContained('module.exports = Module;', src)
output = run_process(NODE_JS + ['-e', 'var m = require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
self.assertFalse(output.stderr)
self.assertEqual(output.stdout, 'hello, world!\n')
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1', '-s', 'EXPORT_NAME="NotModule"', '-s', 'ASSERTIONS=0'])
src = open('a.out.js').read()
self.assertContained('module.exports = NotModule;', src)
output = run_process(NODE_JS + ['-e', 'var m = require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
self.assertFalse(output.stderr)
self.assertEqual(output.stdout, 'hello, world!\n')
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1'])
# We call require() twice to ensure it returns wrapper function each time
output = run_process(NODE_JS + ['-e', 'require("./a.out.js")();var m = require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
self.assertFalse(output.stderr)
self.assertEqual(output.stdout, 'hello, world!\nhello, world!\n')
def test_define_modularize(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1', '-s', 'ASSERTIONS=0'])
with open('a.out.js') as f:
src = 'var module = 0; ' + f.read()
create_test_file('a.out.js', src)
assert "define([], function() { return Module; });" in src
output = run_process(NODE_JS + ['-e', 'var m; (global.define = function(deps, factory) { m = factory(); }).amd = true; require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
assert output.stdout == 'hello, world!\n' and output.stderr == '', 'expected output, got\n===\nSTDOUT\n%s\n===\nSTDERR\n%s\n===\n' % (output.stdout, output.stderr)
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1', '-s', 'EXPORT_NAME="NotModule"', '-s', 'ASSERTIONS=0'])
with open('a.out.js') as f:
src = 'var module = 0; ' + f.read()
create_test_file('a.out.js', src)
assert "define([], function() { return NotModule; });" in src
output = run_process(NODE_JS + ['-e', 'var m; (global.define = function(deps, factory) { m = factory(); }).amd = true; require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
assert output.stdout == 'hello, world!\n' and output.stderr == '', 'expected output, got\n===\nSTDOUT\n%s\n===\nSTDERR\n%s\n===\n' % (output.stdout, output.stderr)
def test_EXPORT_NAME_with_html(self):
result = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'a.html', '-s', 'EXPORT_NAME=Other'], stdout=PIPE, check=False, stderr=STDOUT)
self.assertNotEqual(result.returncode, 0)
self.assertContained('Customizing EXPORT_NAME requires that the HTML be customized to use that name', result.stdout)
@no_wasm_backend('tests fastcomp specific passes')
def test_emcc_c_multi(self):
def test(args, llvm_opts=None):
print(args)
lib = r'''
int mult() { return 1; }
'''
lib_name = 'libA.c'
create_test_file(lib_name, lib)
main = r'''
#include <stdio.h>
int mult();
int main() {
printf("result: %d\n", mult());
return 0;
}
'''
main_name = 'main.c'
create_test_file(main_name, main)
err = run_process([EMCC, '-v', '-c', main_name, lib_name] + args, stderr=PIPE).stderr
VECTORIZE = '-disable-loop-vectorization'
if args:
assert err.count(VECTORIZE) == 2, err # specified twice, once per file
# corresponding to exactly once per invocation of optimizer
assert err.count(os.path.sep + 'opt') == 2, err
else:
assert err.count(VECTORIZE) == 0, err # no optimizations
run_process([EMCC, main_name.replace('.c', '.o'), lib_name.replace('.c', '.o')])
self.assertContained('result: 1', run_js('a.out.js'))
test([])
test(['-O2'], '-O3')
test(['-Oz'], '-Oz')
test(['-Os'], '-Os')
def test_export_all_3142(self):
create_test_file('src.cpp', r'''
typedef unsigned int Bit32u;
struct S_Descriptor {
Bit32u limit_0_15 :16;
Bit32u base_0_15 :16;
Bit32u base_16_23 :8;
};
class Descriptor
{
public:
Descriptor() { saved.fill[0]=saved.fill[1]=0; }
union {
S_Descriptor seg;
Bit32u fill[2];
} saved;
};
Descriptor desc;
''')
try_delete('a.out.js')
run_process([EMCC, 'src.cpp', '-O2', '-s', 'EXPORT_ALL'])
self.assertExists('a.out.js')
@no_wasm_backend('tests PRECISE_F32=1')
def test_f0(self):
run_process([EMCC, path_from_root('tests', 'fasta.cpp'), '-O2', '-s', 'PRECISE_F32=1', '-profiling', '-s', 'WASM=0'])
src = open('a.out.js').read()
assert ' = f0;' in src or ' = f0,' in src
def test_emmake_emconfigure(self):
def check(what, args, fail=True, expect=''):
args = [what] + args
print(what, args, fail, expect)
output = run_process(args, stdout=PIPE, stderr=PIPE, check=False)
assert ('is a helper for' in output.stderr) == fail
assert ('Typical usage' in output.stderr) == fail
self.assertContained(expect, output.stdout)
check(emmake, [])
check(emconfigure, [])
check(emmake, ['--version'])
check(emconfigure, ['--version'])
check(emmake, ['make'], fail=False)
check(emconfigure, ['configure'], fail=False)
check(emconfigure, ['./configure'], fail=False)
check(emcmake, ['cmake'], fail=False)
create_test_file('test.py', '''
import os
print(os.environ.get('CROSS_COMPILE'))
''')
check(emconfigure, [PYTHON, 'test.py'], expect=path_from_root('em'), fail=False)
check(emmake, [PYTHON, 'test.py'], expect=path_from_root('em'), fail=False)
create_test_file('test.py', '''
import os
print(os.environ.get('NM'))
''')
check(emconfigure, [PYTHON, 'test.py'], expect=shared.LLVM_NM, fail=False)
def test_emmake_python(self):
# simulates a configure/make script that looks for things like CC, AR, etc., and which we should
# not confuse by setting those vars to something containing `python X` as the script checks for
# the existence of an executable.
run_process([emmake, PYTHON, path_from_root('tests', 'emmake', 'make.py')])
def test_sdl2_config(self):
for args, expected in [
[['--version'], '2.0.0'],
[['--cflags'], '-s USE_SDL=2'],
[['--libs'], '-s USE_SDL=2'],
[['--cflags', '--libs'], '-s USE_SDL=2'],
]:
print(args, expected)
out = run_process([PYTHON, path_from_root('system', 'bin', 'sdl2-config')] + args, stdout=PIPE, stderr=PIPE).stdout
assert expected in out, out
print('via emmake')
out = run_process([emmake, 'sdl2-config'] + args, stdout=PIPE, stderr=PIPE).stdout
assert expected in out, out
def test_module_onexit(self):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
Module['onExit'] = function(status) { out('exiting now, status ' + status) };
});
return 14;
}
''')
try_delete('a.out.js')
run_process([EMCC, 'src.cpp', '-s', 'EXIT_RUNTIME=1'])
self.assertContained('exiting now, status 14', run_js('a.out.js', assert_returncode=14))
def test_NO_aliasing(self):
# the NO_ prefix flips boolean options
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXIT_RUNTIME=1'])
exit_1 = open('a.out.js').read()
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'NO_EXIT_RUNTIME=0'])
no_exit_0 = open('a.out.js').read()
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXIT_RUNTIME=0'])
exit_0 = open('a.out.js').read()
assert exit_1 == no_exit_0
assert exit_1 != exit_0
def test_underscore_exit(self):
create_test_file('src.cpp', r'''
#include <unistd.h>
int main() {
_exit(0); // should not end up in an infinite loop with non-underscore exit
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained('', run_js('a.out.js', assert_returncode=0))
def test_file_packager_huge(self):
MESSAGE = 'warning: file packager is creating an asset bundle of 257 MB. this is very large, and browsers might have trouble loading it'
create_test_file('huge.dat', 'a' * (1024 * 1024 * 257))
create_test_file('tiny.dat', 'a')
err = run_process([PYTHON, FILE_PACKAGER, 'test.data', '--preload', 'tiny.dat'], stdout=PIPE, stderr=PIPE).stderr
self.assertNotContained(MESSAGE, err)
err = run_process([PYTHON, FILE_PACKAGER, 'test.data', '--preload', 'huge.dat'], stdout=PIPE, stderr=PIPE).stderr
self.assertContained(MESSAGE, err)
self.clear()
def test_massive_alloc(self):
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <stdlib.h>
int main() {
volatile int x = (int)malloc(1024 * 1024 * 1400);
return x == 0; // can't alloc it, but don't fail catastrophically, expect null
}
''')
run_process([EMCC, 'main.cpp', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'WASM=0'])
# just care about message regarding allocating over 1GB of memory
output = run_js('a.out.js', stderr=PIPE, full_output=True)
if self.is_wasm_backend():
self.assertContained('''Warning: Enlarging memory arrays, this is not fast! 16777216,1473314816\n''', output)
else:
self.assertContained('''Warning: Enlarging memory arrays, this is not fast! 16777216,1476395008\n''', output)
print('wasm')
run_process([EMCC, 'main.cpp', '-s', 'ALLOW_MEMORY_GROWTH=1'])
# no message about growth, just check return code
run_js('a.out.js', stderr=PIPE, full_output=True)
def test_failing_alloc(self):
for pre_fail, post_fail, opts in [
('', '', []),
('EM_ASM( Module.temp = HEAP32[DYNAMICTOP_PTR>>2] );', 'EM_ASM( assert(Module.temp === HEAP32[DYNAMICTOP_PTR>>2], "must not adjust DYNAMICTOP when an alloc fails!") );', []),
# also test non-wasm in normal mode
('', '', ['-s', 'WASM=0']),
('EM_ASM( Module.temp = HEAP32[DYNAMICTOP_PTR>>2] );', 'EM_ASM( assert(Module.temp === HEAP32[DYNAMICTOP_PTR>>2], "must not adjust DYNAMICTOP when an alloc fails!") );', ['-s', 'WASM=0']),
]:
for growth in [0, 1]:
for aborting_args in [[], ['-s', 'ABORTING_MALLOC=0'], ['-s', 'ABORTING_MALLOC=1']]:
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <stdlib.h>
#include <vector>
#include <assert.h>
#include <emscripten.h>
#define CHUNK_SIZE (10 * 1024 * 1024)
int main() {
std::vector<void*> allocs;
bool has = false;
while (1) {
printf("trying an allocation\n");
%s
void* curr = malloc(CHUNK_SIZE);
if (!curr) {
%s
break;
}
has = true;
printf("allocated another chunk, %%zu so far\n", allocs.size());
allocs.push_back(curr);
}
assert(has);
printf("an allocation failed!\n");
#ifdef SPLIT
return 0;
#endif
while (1) {
assert(allocs.size() > 0);
void *curr = allocs.back();
allocs.pop_back();
free(curr);
printf("freed one\n");
if (malloc(CHUNK_SIZE)) break;
}
printf("managed another malloc!\n");
}
''' % (pre_fail, post_fail))
args = [EMCC, 'main.cpp'] + opts + aborting_args
args += ['-s', 'TEST_MEMORY_GROWTH_FAILS=1'] # In this test, force memory growing to fail
if growth:
args += ['-s', 'ALLOW_MEMORY_GROWTH=1']
# growth disables aborting by default, but it can be overridden
aborting = 'ABORTING_MALLOC=1' in aborting_args or (not aborting_args and not growth)
print('test_failing_alloc', args, pre_fail)
run_process(args)
# growth also disables aborting
can_manage_another = not aborting
split = '-DSPLIT' in args
print('can manage another:', can_manage_another, 'split:', split, 'aborting:', aborting)
output = run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=0 if can_manage_another else None)
if can_manage_another:
self.assertContained('an allocation failed!\n', output)
if not split:
# split memory allocation may fail due to GC objects no longer being allocatable,
# and we can't expect to recover from that deterministically. So just check we
# get to the fail.
# otherwise, we should fail eventually, then free, then succeed
self.assertContained('managed another malloc!\n', output)
else:
# we should see an abort
self.assertContained('abort(Cannot enlarge memory arrays', output)
if growth:
# when growth is enabled, the default is to not abort, so just explain that
self.assertContained('If you want malloc to return NULL (0) instead of this abort, do not link with -s ABORTING_MALLOC=1', output)
else:
# when growth is not enabled, suggest 3 possible solutions (start with more memory, allow growth, or don't abort)
self.assertContained(('higher than the current value 16777216,', 'higher than the current value 33554432,'), output)
self.assertContained('compile with -s ALLOW_MEMORY_GROWTH=1 ', output)
self.assertContained('compile with -s ABORTING_MALLOC=0 ', output)
def test_failing_growth_2gb(self):
create_test_file('test.cpp', r'''
#include <stdio.h>
#include <stdlib.h>
void* out;
int main() {
while (1) {
puts("loop...");
out = malloc(1024 * 1024);
if (!out) {
puts("done");
return 0;
}
}
}
''')
run_process([EMCC, '-O1', 'test.cpp', '-s', 'ALLOW_MEMORY_GROWTH'])
self.assertContained('done', run_js('a.out.js'))
def test_libcxx_minimal(self):
create_test_file('vector.cpp', r'''
#include <vector>
int main(int argc, char** argv) {
std::vector<void*> v;
for (int i = 0 ; i < argc; i++) {
v.push_back(nullptr);
}
return v.size();
}
''')
run_process([EMCC, '-O2', 'vector.cpp', '-o', 'vector.js'])
run_process([EMCC, '-O2', path_from_root('tests', 'hello_libcxx.cpp'), '-o', 'iostream.js'])
vector = os.path.getsize('vector.js')
iostream = os.path.getsize('iostream.js')
print(vector, iostream)
self.assertGreater(vector, 1000)
# we can strip out almost all of libcxx when just using vector
self.assertLess(2.25 * vector, iostream)
@no_wasm_backend('relies on EMULATED_FUNCTION_POINTERS')
def test_emulated_function_pointers(self):
create_test_file('src.c', r'''
#include <emscripten.h>
typedef void (*fp)();
int main(int argc, char **argv) {
volatile fp f = 0;
EM_ASM({
if (typeof FUNCTION_TABLE_v !== 'undefined') {
out('function table: ' + FUNCTION_TABLE_v);
} else {
out('no visible function tables');
}
});
if (f) f();
return 0;
}
''')
def test(args, expected):
print(args, expected)
run_process([EMCC, 'src.c', '-s', 'WASM=0'] + args, stderr=PIPE)
self.assertContained(expected, run_js('a.out.js'))
for opts in [0, 1, 2, 3]:
test(['-O' + str(opts)], 'no visible function tables')
test(['-O' + str(opts), '-s', 'EMULATED_FUNCTION_POINTERS=1'], 'function table: ')
@no_wasm_backend('relies on EMULATED_FUNCTION_POINTERS')
def test_emulated_function_pointers_2(self):
create_test_file('src.c', r'''
#include <emscripten.h>
typedef void (*fp)();
static void one() { EM_ASM( out('one') ); }
static void two() { EM_ASM( out('two') ); }
void test() {
volatile fp f = one;
f();
f = two;
f();
}
int main(int argc, char **argv) {
test();
// swap them!
EM_ASM_INT({
var one = $0;
var two = $1;
if (typeof FUNCTION_TABLE_v === 'undefined') {
out('no');
return;
}
var temp = FUNCTION_TABLE_v[one];
FUNCTION_TABLE_v[one] = FUNCTION_TABLE_v[two];
FUNCTION_TABLE_v[two] = temp;
}, (int)&one, (int)&two);
test();
return 0;
}
''')
flipped = 'one\ntwo\ntwo\none\n'
unchanged = 'one\ntwo\none\ntwo\n'
no_table = 'one\ntwo\nno\none\ntwo\n'
def test(args, expected):
print(args, expected.replace('\n', ' '))
run_process([EMCC, 'src.c', '-s', 'WASM=0'] + args)
self.assertContained(expected, run_js('a.out.js'))
for opts in [0, 1, 2]:
test(['-O' + str(opts)], no_table)
test(['-O' + str(opts), '-s', 'EMULATED_FUNCTION_POINTERS=1'], flipped)
test(['-O' + str(opts), '-s', 'EMULATED_FUNCTION_POINTERS=2'], flipped)
test(['-O' + str(opts), '-s', 'EMULATED_FUNCTION_POINTERS=1', '-s', 'RELOCATABLE=1'], flipped)
test(['-O' + str(opts), '-s', 'EMULATED_FUNCTION_POINTERS=2', '-s', 'RELOCATABLE=1'], unchanged) # with both of those, we optimize and you cannot flip them
test(['-O' + str(opts), '-s', 'MAIN_MODULE=1'], unchanged) # default for modules is optimized
test(['-O' + str(opts), '-s', 'MAIN_MODULE=1', '-s', 'EMULATED_FUNCTION_POINTERS=2'], unchanged)
test(['-O' + str(opts), '-s', 'MAIN_MODULE=1', '-s', 'EMULATED_FUNCTION_POINTERS=1'], flipped) # but you can disable that
def test_minimal_dynamic(self):
def run(wasm):
print('wasm?', wasm)
library_file = 'library.wasm' if wasm else 'library.js'
def test(main_args, library_args=[], expected='hello from main\nhello from library'):
print('testing', main_args, library_args)
self.clear()
create_test_file('library.c', r'''
#include <stdio.h>
void library_func() {
#ifdef USE_PRINTF
printf("hello from library: %p\n", &library_func);
#else
puts("hello from library");
#endif
}
''')
# -fno-builtin to prevent printf -> iprintf optimization
run_process([EMCC, 'library.c', '-fno-builtin', '-s', 'SIDE_MODULE=1', '-O2', '-o', library_file, '-s', 'WASM=' + str(wasm), '-s', 'EXPORT_ALL'] + library_args)
create_test_file('main.c', r'''
#include <dlfcn.h>
#include <stdio.h>
int main() {
puts("hello from main");
void *lib_handle = dlopen("%s", 0);
if (!lib_handle) {
puts("cannot load side module");
return 1;
}
typedef void (*voidfunc)();
voidfunc x = (voidfunc)dlsym(lib_handle, "library_func");
if (!x) puts("cannot find side function");
else x();
}
''' % library_file)
run_process([EMCC, 'main.c', '--embed-file', library_file, '-O2', '-s', 'WASM=' + str(wasm)] + main_args)
self.assertContained(expected, run_js('a.out.js', assert_returncode=None, stderr=STDOUT))
size = os.path.getsize('a.out.js')
if wasm:
size += os.path.getsize('a.out.wasm')
side_size = os.path.getsize(library_file)
print(' sizes:', size, side_size)
return (size, side_size)
def percent_diff(x, y):
small = min(x, y)
large = max(x, y)
return float(100 * large) / small - 100
full = test(main_args=['-s', 'MAIN_MODULE=1'])
# printf is not used in main, but libc was linked in, so it's there
printf = test(main_args=['-s', 'MAIN_MODULE=1'], library_args=['-DUSE_PRINTF'])
# main module tests
# dce in main, and it fails since puts is not exported
dce = test(main_args=['-s', 'MAIN_MODULE=2'], expected=('cannot', 'undefined'))
# with exporting, it works
dce = test(main_args=['-s', 'MAIN_MODULE=2', '-s', 'EXPORTED_FUNCTIONS=["_main", "_puts"]'])
# printf is not used in main, and we dce, so we failz
dce_fail = test(main_args=['-s', 'MAIN_MODULE=2'], library_args=['-DUSE_PRINTF'], expected=('cannot', 'undefined'))
# exporting printf in main keeps it alive for the library
dce_save = test(main_args=['-s', 'MAIN_MODULE=2', '-s', 'EXPORTED_FUNCTIONS=["_main", "_printf", "_puts"]'], library_args=['-DUSE_PRINTF'])
self.assertLess(percent_diff(full[0], printf[0]), 4)
self.assertLess(percent_diff(dce[0], dce_fail[0]), 4)
self.assertLess(dce[0], 0.2 * full[0]) # big effect, 80%+ is gone
self.assertGreater(dce_save[0], 1.05 * dce[0]) # save exported all of printf
# side module tests
# mode 2, so dce in side, but library_func is not exported, so it is dce'd
side_dce_fail = test(main_args=['-s', 'MAIN_MODULE=1'], library_args=['-s', 'SIDE_MODULE=2'], expected='cannot find side function')
# mode 2, so dce in side, and library_func is not exported
side_dce_work = test(main_args=['-s', 'MAIN_MODULE=1'], library_args=['-s', 'SIDE_MODULE=2', '-s', 'EXPORTED_FUNCTIONS=["_library_func"]'], expected='hello from library')
self.assertLess(side_dce_fail[1], 0.95 * side_dce_work[1]) # removing that function saves a chunk
run(wasm=1)
if not self.is_wasm_backend():
run(wasm=0)
def test_ld_library_path(self):
create_test_file('hello1.c', r'''
#include <stdio.h>
void
hello1 ()
{
printf ("Hello1\n");
return;
}
''')
create_test_file('hello2.c', r'''
#include <stdio.h>
void
hello2 ()
{
printf ("Hello2\n");
return;
}
''')
create_test_file('hello3.c', r'''
#include <stdio.h>
void
hello3 ()
{
printf ("Hello3\n");
return;
}
''')
create_test_file('hello4.c', r'''
#include <stdio.h>
#include <math.h>
double
hello4 (double x)
{
printf ("Hello4\n");
return fmod(x, 2.0);
}
''')
create_test_file('pre.js', r'''
Module['preRun'].push(function (){
ENV['LD_LIBRARY_PATH']='/lib:/usr/lib';
});
''')
create_test_file('main.c', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
int
main()
{
void *h;
void (*f) ();
double (*f2) (double);
h = dlopen ("libhello1.wasm", RTLD_NOW);
f = dlsym (h, "hello1");
f();
dlclose (h);
h = dlopen ("libhello2.wasm", RTLD_NOW);
f = dlsym (h, "hello2");
f();
dlclose (h);
h = dlopen ("libhello3.wasm", RTLD_NOW);
f = dlsym (h, "hello3");
f();
dlclose (h);
h = dlopen ("/usr/local/lib/libhello4.wasm", RTLD_NOW);
f2 = dlsym (h, "hello4");
double result = f2(5.5);
dlclose (h);
if (result == 1.5) {
printf("Ok\n");
}
return 0;
}
''')
run_process([EMCC, '-o', 'libhello1.wasm', 'hello1.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
run_process([EMCC, '-o', 'libhello2.wasm', 'hello2.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
run_process([EMCC, '-o', 'libhello3.wasm', 'hello3.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
run_process([EMCC, '-o', 'libhello4.wasm', 'hello4.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
run_process([EMCC, '-o', 'main.js', 'main.c', '-s', 'MAIN_MODULE=1', '-s', 'INITIAL_MEMORY=' + str(32 * 1024 * 1024),
'--embed-file', 'libhello1.wasm@/lib/libhello1.wasm',
'--embed-file', 'libhello2.wasm@/usr/lib/libhello2.wasm',
'--embed-file', 'libhello3.wasm@/libhello3.wasm',
'--embed-file', 'libhello4.wasm@/usr/local/lib/libhello4.wasm',
'--pre-js', 'pre.js'])
out = run_js('main.js')
self.assertContained('Hello1', out)
self.assertContained('Hello2', out)
self.assertContained('Hello3', out)
self.assertContained('Hello4', out)
self.assertContained('Ok', out)
def test_dlopen_rtld_global(self):
# This test checks RTLD_GLOBAL where a module is loaded
# before the module providing a global it needs is. in asm.js we use JS
# to create a redirection function. In wasm we just have wasm, so we
# need to introspect the wasm module. Browsers may add that eventually,
# or we could ship a little library that does it.
create_test_file('hello1.c', r'''
#include <stdio.h>
extern int hello1_val;
int hello1_val=3;
void
hello1 (int i)
{
printf ("hello1_val by hello1:%d\n",hello1_val);
printf ("Hello%d\n",i);
}
''')
create_test_file('hello2.c', r'''
#include <stdio.h>
extern int hello1_val;
extern void hello1 (int);
void
hello2 (int i)
{
void (*f) (int);
printf ("hello1_val by hello2:%d\n",hello1_val);
f = hello1;
f(i);
}
''')
create_test_file('main.c', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
int
main(int argc,char** argv)
{
void *h;
void *h2;
void (*f) (int);
h = dlopen ("libhello1.wasm", RTLD_NOW|RTLD_GLOBAL);
h2 = dlopen ("libhello2.wasm", RTLD_NOW|RTLD_GLOBAL);
f = dlsym (h, "hello1");
f(1);
f = dlsym (h2, "hello2");
f(2);
dlclose (h);
dlclose (h2);
return 0;
}
''')
run_process([EMCC, '-o', 'libhello1.js', 'hello1.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
run_process([EMCC, '-o', 'libhello2.js', 'hello2.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
run_process([EMCC, '-o', 'main.js', 'main.c', '-s', 'MAIN_MODULE=1',
'--embed-file', 'libhello1.wasm',
'--embed-file', 'libhello2.wasm'])
out = run_js('main.js')
self.assertContained('Hello1', out)
self.assertContained('Hello2', out)
self.assertContained('hello1_val by hello1:3', out)
self.assertContained('hello1_val by hello2:3', out)
@no_fastcomp()
def test_main_module_without_exceptions_message(self):
# A side module that needs exceptions needs a main module with that
# support enabled; show a clear message in that case.
create_test_file('side.cpp', r'''
#include <exception>
#include <stdio.h>
extern "C" void test_throw() {
try {
throw 42;
} catch(int x) {
printf("catch %d.\n", x);
return;
}
puts("bad location");
}
''')
create_test_file('main.cpp', r'''
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
typedef void (*voidf)();
int main() {
void* h = dlopen ("libside.wasm", RTLD_NOW|RTLD_GLOBAL);
assert(h);
voidf f = (voidf)dlsym(h, "test_throw");
assert(f);
f();
return 0;
}
''')
run_process([EMCC, '-o', 'libside.wasm', 'side.cpp', '-s', 'SIDE_MODULE=1', '-fexceptions'])
def build_main(args):
print(args)
with env_modify({'EMCC_FORCE_STDLIBS': 'libc++abi'}):
run_process([EMCC, 'main.cpp', '-s', 'MAIN_MODULE=1',
'--embed-file', 'libside.wasm'] + args)
build_main([])
out = run_js('a.out.js', assert_returncode=None, stderr=STDOUT)
self.assertContained('Exception catching is disabled, this exception cannot be caught.', out)
self.assertContained('note: in dynamic linking, if a side module wants exceptions, the main module must be built with that support', out)
build_main(['-fexceptions'])
out = run_js('a.out.js')
self.assertContained('catch 42', out)
def test_debug_asmLastOpts(self):
create_test_file('src.c', r'''
#include <stdio.h>
struct Dtlink_t
{ struct Dtlink_t* right; /* right child */
union
{ unsigned int _hash; /* hash value */
struct Dtlink_t* _left; /* left child */
} hl;
};
int treecount(register struct Dtlink_t* e)
{
return e ? treecount(e->hl._left) + treecount(e->right) + 1 : 0;
}
int main() {
printf("hello, world!\n");
}
''')
run_process([EMCC, 'src.c', '-s', 'EXPORTED_FUNCTIONS=["_main", "_treecount"]', '--minify', '0', '-g4', '-Oz'])
self.assertContained('hello, world!', run_js('a.out.js'))
@no_wasm_backend('MEM_INIT_METHOD not supported under wasm')
def test_meminit_crc(self):
create_test_file('src.c', r'''
#include <stdio.h>
int main() { printf("Mary had a little lamb.\n"); }
''')
run_process([EMCC, 'src.c', '--memory-init-file', '0', '-s', 'MEM_INIT_METHOD=2', '-s', 'ASSERTIONS=1', '-s', 'WASM=0'])
with open('a.out.js') as f:
d = f.read()
return
self.assertContained('Mary had', d)
d = d.replace('Mary had', 'Paul had')
create_test_file('a.out.js', d)
out = run_js('a.out.js', assert_returncode=None, stderr=STDOUT)
self.assertContained('Assertion failed: memory initializer checksum', out)
def test_emscripten_print_double(self):
create_test_file('src.c', r'''
#include <stdio.h>
#include <assert.h>
#include <emscripten.h>
void test(double d) {
char buffer[100], buffer2[100];
unsigned len, len2, len3;
len = emscripten_print_double(d, NULL, -1);
len2 = emscripten_print_double(d, buffer, len+1);
assert(len == len2);
buffer[len] = 0;
len3 = snprintf(buffer2, 100, "%g", d);
printf("|%g : %u : %s : %s : %d|\n", d, len, buffer, buffer2, len3);
}
int main() {
printf("\n");
test(0);
test(1);
test(-1);
test(1.234);
test(-1.234);
test(1.1234E20);
test(-1.1234E20);
test(1.1234E-20);
test(-1.1234E-20);
test(1.0/0.0);
test(-1.0/0.0);
}
''')
run_process([EMCC, 'src.c'])
out = run_js('a.out.js')
self.assertContained('''
|0 : 1 : 0 : 0 : 1|
|1 : 1 : 1 : 1 : 1|
|-1 : 2 : -1 : -1 : 2|
|1.234 : 5 : 1.234 : 1.234 : 5|
|-1.234 : 6 : -1.234 : -1.234 : 6|
|1.1234e+20 : 21 : 112340000000000000000 : 1.1234e+20 : 10|
|-1.1234e+20 : 22 : -112340000000000000000 : -1.1234e+20 : 11|
|1.1234e-20 : 10 : 1.1234e-20 : 1.1234e-20 : 10|
|-1.1234e-20 : 11 : -1.1234e-20 : -1.1234e-20 : 11|
|inf : 8 : Infinity : inf : 3|
|-inf : 9 : -Infinity : -inf : 4|
''', out)
def test_emscripten_scan_stack(self):
create_test_file('src.cpp', r'''
#include <set>
#include <emscripten.h>
#include <stdio.h>
#include <assert.h>
std::set<int> seenInts;
void scan(void* x, void* y) {
printf("scan\n");
int* p = (int*)x;
int* q = (int*)y;
// The callback sends us the [low, high) range.
assert(p < q);
// The range is of a reasonable size - not all of memory.
assert(q - p < 100);
while (p < q) {
seenInts.insert(*p);
p++;
}
}
int main() {
int x;
int* y = &x;
*y = 12345678;
emscripten_scan_stack(scan);
assert(seenInts.count(12345678));
puts("ok");
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained('ok', run_js('a.out.js'))
def test_no_warn_exported_jslibfunc(self):
err = run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=["alGetError"]',
'-s', 'EXPORTED_FUNCTIONS=["_main", "_alGetError"]'], stderr=PIPE).stderr
self.assertNotContained('function requested to be exported, but not implemented: "_alGetError"', err)
@no_wasm_backend()
def test_almost_asm_warning(self):
def run(args, expected):
print(args, expected)
err = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0'] + args, stderr=PIPE).stderr
if expected:
self.assertContained('[-Walmost-asm]', err)
else:
self.assertEqual(err, '')
run(['-O1', '-s', 'ALLOW_MEMORY_GROWTH=1'], True), # default
# suppress almost-asm warning manually
run(['-O1', '-s', 'ALLOW_MEMORY_GROWTH=1', '-Wno-almost-asm'], False),
# last warning flag should "win"
run(['-O1', '-s', 'ALLOW_MEMORY_GROWTH=1', '-Wno-almost-asm', '-Walmost-asm'], True)
def test_musl_syscalls(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c')])
src = open('a.out.js').read()
# there should be no musl syscalls in hello world output
self.assertNotContained('__syscall', src)
@no_windows('posix-only')
def test_emcc_dev_null(self):
out = run_process([EMCC, '-dM', '-E', '-x', 'c', '/dev/null'], stdout=PIPE).stdout
self.assertContained('#define __EMSCRIPTEN__ 1', out) # all our defines should show up
def test_umask_0(self):
create_test_file('src.c', r'''
#include <sys/stat.h>
#include <stdio.h>
int main() {
umask(0);
printf("hello, world!\n");
}''')
run_process([EMCC, 'src.c'])
self.assertContained('hello, world!', run_js('a.out.js'))
def test_no_missing_symbols(self): # simple hello world should not show any missing symbols
run_process([EMCC, path_from_root('tests', 'hello_world.c')])
# main() is implemented in C, and even if requested from JS, we should not warn
create_test_file('library_foo.js', '''
mergeInto(LibraryManager.library, {
my_js__deps: ['main'],
my_js: (function() {
return function() {
console.log("hello " + _nonexistingvariable);
};
}()),
});
''')
create_test_file('test.cpp', '''
#include <stdio.h>
#include <stdlib.h>
extern "C" {
extern void my_js();
}
int main() {
my_js();
return EXIT_SUCCESS;
}
''')
run_process([EMCC, 'test.cpp', '--js-library', 'library_foo.js'])
# but we do error on a missing js var
create_test_file('library_foo_missing.js', '''
mergeInto(LibraryManager.library, {
my_js__deps: ['main', 'nonexistingvariable'],
my_js: (function() {
return function() {
console.log("hello " + _nonexistingvariable);
};
}()),
});
''')
err = self.expect_fail([EMCC, 'test.cpp', '--js-library', 'library_foo_missing.js'])
self.assertContained('undefined symbol: nonexistingvariable', err)
# and also for missing C code, of course (without the --js-library, it's just a missing C method)
err = self.expect_fail([EMCC, 'test.cpp'])
self.assertContained('undefined symbol: my_js', err)
@no_fastcomp('fastcomp links in memset in JS in a hackish way')
def test_js_lib_to_system_lib(self):
# memset is in compiled code, so a js library __deps can't access it. it
# would need to be in deps_info.json or EXPORTED_FUNCTIONS
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
depper__deps: ['memset'],
depper: function(ptr) {
_memset(ptr, 'd'.charCodeAt(0), 10);
},
});
''')
create_test_file('test.cpp', r'''
#include <string.h>
#include <stdio.h>
extern "C" {
extern void depper(char*);
}
int main(int argc, char** argv) {
char buffer[11];
buffer[10] = '\0';
// call by a pointer, to force linking of memset, no llvm intrinsic here
volatile auto ptr = memset;
(*ptr)(buffer, 'a', 10);
depper(buffer);
puts(buffer);
}
''')
err = self.expect_fail([EMCC, 'test.cpp', '--js-library', 'lib.js'])
self.assertContained('_memset may need to be added to EXPORTED_FUNCTIONS if it arrives from a system library', err)
# without the dep, and with EXPORTED_FUNCTIONS, it works ok
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
depper: function(ptr) {
_memset(ptr, 'd'.charCodeAt(0), 10);
},
});
''')
run_process([EMCC, 'test.cpp', '--js-library', 'lib.js', '-s', 'EXPORTED_FUNCTIONS=[_main,_memset]'])
self.assertContained('dddddddddd', run_js('a.out.js'))
def test_realpath(self):
create_test_file('src.c', r'''
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
#define TEST_PATH "/boot/README.txt"
int
main(int argc, char **argv)
{
errno = 0;
char *t_realpath_buf = realpath(TEST_PATH, NULL);
if (NULL == t_realpath_buf) {
perror("Resolve failed");
return 1;
} else {
printf("Resolved: %s\n", t_realpath_buf);
free(t_realpath_buf);
return 0;
}
}
''')
ensure_dir('boot')
create_test_file(os.path.join('boot', 'README.txt'), ' ')
run_process([EMCC, 'src.c', '--embed-file', 'boot'])
self.assertContained('Resolved: /boot/README.txt', run_js('a.out.js'))
def test_realpath_nodefs(self):
create_test_file('src.c', r'''
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
#include <emscripten.h>
#define TEST_PATH "/working/TEST_NODEFS.txt"
int
main(int argc, char **argv)
{
errno = 0;
EM_ASM({
FS.mkdir('/working');
FS.mount(NODEFS, { root: '.' }, '/working');
});
char *t_realpath_buf = realpath(TEST_PATH, NULL);
if (NULL == t_realpath_buf) {
perror("Resolve failed");
return 1;
} else {
printf("Resolved: %s\n", t_realpath_buf);
free(t_realpath_buf);
return 0;
}
}
''')
create_test_file('TEST_NODEFS.txt', ' ')
run_process([EMCC, 'src.c', '-lnodefs.js'])
self.assertContained('Resolved: /working/TEST_NODEFS.txt', run_js('a.out.js'))
def test_realpath_2(self):
ensure_dir('Folder')
create_test_file('src.c', r'''
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
int testrealpath(const char* path) {
errno = 0;
char *t_realpath_buf = realpath(path, NULL);
if (NULL == t_realpath_buf) {
printf("Resolve failed: \"%s\"\n",path);fflush(stdout);
return 1;
} else {
printf("Resolved: \"%s\" => \"%s\"\n", path, t_realpath_buf);fflush(stdout);
free(t_realpath_buf);
return 0;
}
}
int main(int argc, char **argv)
{
// files:
testrealpath("testfile.txt");
testrealpath("Folder/testfile.txt");
testrealpath("testnonexistentfile.txt");
// folders
testrealpath("Folder");
testrealpath("/Folder");
testrealpath("./");
testrealpath("");
testrealpath("/");
return 0;
}
''')
create_test_file('testfile.txt', '')
create_test_file(os.path.join('Folder', 'testfile.txt'), '')
run_process([EMCC, 'src.c', '--embed-file', 'testfile.txt', '--embed-file', 'Folder'])
self.assertContained('''Resolved: "testfile.txt" => "/testfile.txt"
Resolved: "Folder/testfile.txt" => "/Folder/testfile.txt"
Resolve failed: "testnonexistentfile.txt"
Resolved: "Folder" => "/Folder"
Resolved: "/Folder" => "/Folder"
Resolved: "./" => "/"
Resolve failed: ""
Resolved: "/" => "/"
''', run_js('a.out.js'))
def test_no_warnings(self):
# build once before to make sure system libs etc. exist
run_process([EMCC, path_from_root('tests', 'hello_libcxx.cpp')])
# check that there is nothing in stderr for a regular compile
err = run_process([EMCC, path_from_root('tests', 'hello_libcxx.cpp')], stderr=PIPE).stderr
self.assertEqual(err, '')
@no_wasm_backend("llvm-lto is fastcomp only flag")
def test_llvm_lto(self):
sizes = {}
lto_levels = [0, 1, 2, 3]
for lto in lto_levels:
cmd = [EMCC, path_from_root('tests', 'hello_libcxx.cpp'), '-O2', '--llvm-lto', str(lto)]
if self.is_wasm_backend():
cmd += ['-flto']
print(cmd)
run_process(cmd)
self.assertContained('hello, world!', run_js('a.out.js'))
sizes[lto] = os.path.getsize('a.out.wasm')
print(sizes)
# LTO sizes should be distinct
for i in lto_levels:
assert sizes[i] not in set(sizes).difference(set([sizes[i]]))
# LTO should reduce code size
# Skip mode 2 because it has historically increased code size, but not always
self.assertLess(sizes[1], sizes[0])
if not self.is_wasm_backend():
self.assertLess(sizes[3], sizes[0])
def test_dlmalloc_modes(self):
create_test_file('src.cpp', r'''
#include <stdlib.h>
#include <stdio.h>
int main() {
void* c = malloc(1024);
free(c);
free(c);
printf("double-freed\n");
}
''')
run_process([EMCC, 'src.cpp'])
self.assertContained('double-freed', run_js('a.out.js'))
# in debug mode, the double-free is caught
run_process([EMCC, 'src.cpp', '-s', 'ASSERTIONS=2'])
seen_error = False
out = '?'
try:
out = run_js('a.out.js')
except Exception:
seen_error = True
self.assertTrue(seen_error, out)
def test_mallocs(self):
def run(opts):
print(opts)
sizes = {}
for malloc, name in (
('dlmalloc', 'dlmalloc'),
(None, 'default'),
('emmalloc', 'emmalloc')
):
print(malloc, name)
cmd = [EMCC, path_from_root('tests', 'hello_libcxx.cpp'), '-o', 'a.out.js'] + opts
if malloc:
cmd += ['-s', 'MALLOC="%s"' % malloc]
print(cmd)
run_process(cmd)
sizes[name] = os.path.getsize('a.out.wasm')
print(sizes)
# dlmalloc is the default
self.assertEqual(sizes['dlmalloc'], sizes['default'])
# emmalloc is much smaller
self.assertLess(sizes['emmalloc'], sizes['dlmalloc'] - 5000)
run([])
run(['-O2'])
@no_fastcomp("fastcomp doesn't support 2GB+")
def test_emmalloc_2GB(self):
def test(args, text=None):
if text:
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MALLOC=emmalloc'] + args)
self.assertContained(text, stderr)
else:
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MALLOC=emmalloc'] + args)
test(['-s', 'INITIAL_MEMORY=2GB'], 'INITIAL_MEMORY must be less than 2GB due to current spec limitations')
# emmalloc allows growth by default (as the max size is fine), but not if
# a too-high max is set
test(['-s', 'ALLOW_MEMORY_GROWTH'])
test(['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=1GB'])
test(['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=3GB'], 'emmalloc only works on <2GB of memory. Use the default allocator, or decrease MAXIMUM_MEMORY')
@no_fastcomp("fastcomp doesn't support 2GB+")
def test_2GB_plus(self):
# when the heap size can be over 2GB, we rewrite pointers to be unsigned
def test(page_diff):
args = [EMCC, path_from_root('tests', 'hello_world.c'), '-O2', '-s', 'ALLOW_MEMORY_GROWTH']
if page_diff is not None:
args += ['-s', 'MAXIMUM_MEMORY=%d' % (2**31 + page_diff * 64 * 1024)]
print(args)
run_process(args)
return os.path.getsize('a.out.js')
less = test(-1)
equal = test(0)
more = test(1)
none = test(None)
# exactly 2GB still doesn't require unsigned pointers, as we can't address
# the 2GB location in memory
self.assertEqual(less, equal)
self.assertLess(equal, more)
# not specifying maximum memory does not result in unsigned pointers, as the
# default maximum memory is 2GB.
self.assertEqual(less, none)
@no_fastcomp('depends on wasm-emscripten-finalize')
@parameterized({
'normal': (['-s', 'WASM_BIGINT=0'], 'testbind.js'),
'bigint': (['-s', 'WASM_BIGINT=1'], 'testbind_bigint.js'),
})
def test_sixtyfour_bit_return_value(self, args, bind_js):
# This test checks that the most significant 32 bits of a 64 bit long are correctly made available
# to native JavaScript applications that wish to interact with compiled code returning 64 bit longs.
# The MS 32 bits should be available in Runtime.getTempRet0() even when compiled with -O2 --closure 1
# Compile test.c and wrap it in a native JavaScript binding so we can call our compiled function from JS.
run_process([EMCC, path_from_root('tests', 'return64bit', 'test.c'),
'--pre-js', path_from_root('tests', 'return64bit', 'testbindstart.js'),
'--pre-js', path_from_root('tests', 'return64bit', bind_js),
'--post-js', path_from_root('tests', 'return64bit', 'testbindend.js'),
'-s', 'EXPORTED_FUNCTIONS=["_test_return64"]', '-o', 'test.js', '-O2',
'--closure', '1', '-g1', '-s', 'WASM_ASYNC_COMPILATION=0'] + args)
# Simple test program to load the test.js binding library and call the binding to the
# C function returning the 64 bit long.
create_test_file('testrun.js', '''
var test = require("./test.js");
test.runtest();
''')
# Run the test and confirm the output is as expected.
out = run_js('testrun.js', engine=NODE_JS + ['--experimental-wasm-bigint'])
self.assertContained('''\
input = 0xaabbccdd11223344
low = 5678
high = 1234
input = 0xabcdef1912345678
low = 5678
high = 1234
''', out)
def test_lib_include_flags(self):
run_process([EMCC] + '-l m -l c -I'.split() + [path_from_root('tests', 'include_test'), path_from_root('tests', 'lib_include_flags.c')])
def test_dash_s(self):
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s'])
self.assertContained('hello, world!', run_js('a.out.js'))
def test_dash_s_response_file_string(self):
create_test_file('response_file', '"MyModule"\n')
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXPORT_NAME=@response_file'])
def test_dash_s_response_file_list(self):
create_test_file('response_file', '["_main", "_malloc"]\n')
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXPORTED_FUNCTIONS=@response_file'])
def test_dash_s_response_file_misssing(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXPORTED_FUNCTIONS=@foo'])
self.assertContained('error: foo: file not found parsing argument: EXPORTED_FUNCTIONS=@foo', err)
def test_dash_s_unclosed_quote(self):
# Unclosed quote
err = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), "-s", "TEST_KEY='MISSING_QUOTE"], stderr=PIPE, check=False).stderr
self.assertNotContained('AssertionError', err) # Do not mention that it is an assertion error
self.assertContained('unclosed opened quoted string. expected final character to be "\'"', err)
def test_dash_s_single_quote(self):
# Only one quote
err = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), "-s", "TEST_KEY='"], stderr=PIPE, check=False).stderr
self.assertNotContained('AssertionError', err) # Do not mention that it is an assertion error
self.assertContained('unclosed opened quoted string.', err)
def test_dash_s_unclosed_list(self):
# Unclosed list
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), "-s", "TEST_KEY=[Value1, Value2"])
self.assertNotContained('AssertionError', err) # Do not mention that it is an assertion error
self.assertContained('unclosed opened string list. expected final character to be "]"', err)
def test_dash_s_valid_list(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), "-s", "TEST_KEY=[Value1, \"Value2\"]"])
self.assertNotContained('a problem occurred in evaluating the content after a "-s", specifically', err)
def test_dash_s_wrong_type(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXPORTED_FUNCTIONS=foo'])
self.assertContained("error: setting `EXPORTED_FUNCTIONS` expects `<class 'list'>` but got `<class 'str'>`", err)
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXIT_RUNTIME=[foo,bar]'])
self.assertContained("error: setting `EXIT_RUNTIME` expects `<class 'int'>` but got `<class 'list'>`", err)
def test_dash_s_typo(self):
# with suggestions
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'DISABLE_EXCEPTION_CATCH=1'])
self.assertContained("Attempt to set a non-existent setting: 'DISABLE_EXCEPTION_CATCH'", stderr)
self.assertContained('did you mean one of DISABLE_EXCEPTION_CATCHING', stderr)
# no suggestions
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'CHEEZ=1'])
self.assertContained("perhaps a typo in emcc\'s -s X=Y notation?", stderr)
self.assertContained('(see src/settings.js for valid values)', stderr)
# suggestions do not include renamed legacy settings
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ZBINARYEN_ASYNC_COMPILATION'])
self.assertContained("Attempt to set a non-existent setting: 'ZBINARYEN_ASYNC_COMPILATION'", stderr)
self.assertNotContained(' BINARYEN_ASYNC_COMPILATION', stderr)
def test_python_2_3(self):
# check emcc/em++ can be called by any python
def trim_py_suffix(filename):
"""remove .py from EMCC(=emcc.py)"""
return filename[:-3] if filename.endswith('.py') else filename
def run(python):
if python == 'python3':
has = is_python3_version_supported()
else:
has = shared.which(python) is not None
print(python, has)
if has:
print(' checking emcc.py...')
run_process([python, path_from_root('emcc.py'), '--version'], stdout=PIPE)
print(' checking em++.py...')
run_process([python, path_from_root('em++.py'), '--version'], stdout=PIPE)
run('python')
run('python2')
run('python3')
def test_zeroinit(self):
create_test_file('src.c', r'''
#include <stdio.h>
int buf[1048576];
int main() {
printf("hello, world! %d\n", buf[123456]);
return 0;
}
''')
run_process([EMCC, 'src.c', '-O2', '-g'])
size = os.path.getsize('a.out.wasm')
# size should be much smaller than the size of that zero-initialized buffer
self.assertLess(size, 123456 / 2)
@no_wasm_backend('asm.js')
def test_separate_asm_warning(self):
# Test that -s PRECISE_F32=2 raises a warning that --separate-asm is implied.
stderr = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0', '-s', 'PRECISE_F32=2', '-o', 'a.html'], stderr=PIPE).stderr
self.assertContained('forcing separate asm output', stderr)
# Test that -s PRECISE_F32=2 --separate-asm should not post a warning.
stderr = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0', '-s', 'PRECISE_F32=2', '-o', 'a.html', '--separate-asm'], stderr=PIPE).stderr
self.assertNotContained('forcing separate asm output', stderr)
# Test that -s PRECISE_F32=1 should not post a warning.
stderr = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0', '-s', 'PRECISE_F32=1', '-o', 'a.html'], stderr=PIPE).stderr
self.assertNotContained('forcing separate asm output', stderr)
# Manually doing separate asm should show a warning, if not targeting html
warning = '--separate-asm works best when compiling to HTML'
stderr = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0', '--separate-asm'], stderr=PIPE).stderr
self.assertContained(warning, stderr)
stderr = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0', '--separate-asm', '-o', 'a.html'], stderr=PIPE).stderr
self.assertNotContained(warning, stderr)
# test that the warning can be suppressed
stderr = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0', '--separate-asm', '-Wno-separate-asm'], stderr=PIPE).stderr
self.assertNotContained(warning, stderr)
def test_canonicalize_nan_warning(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
union U {
int x;
float y;
} a;
int main() {
a.x = 0x7FC01234;
printf("%f\n", a.y);
printf("0x%x\n", a.x);
return 0;
}
''')
stderr = run_process([EMCC, 'src.cpp', '-O1'], stderr=PIPE).stderr
if not self.is_wasm_backend():
self.assertContained("emcc: warning: cannot represent a NaN literal", stderr)
stderr = run_process([EMCC, 'src.cpp', '-O1', '-g'], stderr=PIPE).stderr
self.assertContained("emcc: warning: cannot represent a NaN literal", stderr)
self.assertContained('//@line 12 "src.cpp"', stderr)
else:
out = run_js('a.out.js')
self.assertContained('nan\n', out)
self.assertContained('0x7fc01234\n', out)
@no_wasm_backend('tests our python linking logic')
def test_link_response_file_does_not_force_absolute_paths(self):
with_space = 'with space'
ensure_dir(with_space)
create_test_file(os.path.join(with_space, 'main.cpp'), '''
int main() {
return 0;
}
''')
building.emcc(os.path.join(with_space, 'main.cpp'), ['-g'])
with chdir(with_space):
link_args = building.link(['main.cpp.o'], 'all.bc', just_calculate=True)
time.sleep(0.2) # Wait for Windows FS to release access to the directory
shutil.rmtree(with_space)
# We want only the relative path to be in the linker args, it should not be converted to an absolute path.
if hasattr(self, 'assertCountEqual'):
self.assertCountEqual(link_args, ['main.cpp.o'])
else:
# Python 2 compatibility
self.assertItemsEqual(link_args, ['main.cpp.o'])
def test_memory_growth_noasm(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O2', '-s', 'ALLOW_MEMORY_GROWTH=1'])
src = open('a.out.js').read()
assert 'use asm' not in src
def test_EM_ASM_i64(self):
create_test_file('src.cpp', '''
#include <stdint.h>
#include <emscripten.h>
int main() {
EM_ASM({
out('inputs: ' + $0 + ', ' + $1 + '.');
}, int64_t(0x12345678ABCDEF1FLL));
}
''')
stderr = self.expect_fail([EMCC, 'src.cpp', '-Oz'])
if not self.is_wasm_backend():
self.assertContained('EM_ASM should not receive i64s as inputs, they are not valid in JS', stderr)
def test_eval_ctors_non_terminating(self):
for wasm in (1, 0):
if self.is_wasm_backend() and not wasm:
continue
print('wasm', wasm)
src = r'''
struct C {
C() {
volatile int y = 0;
while (y == 0) {}
}
};
C always;
int main() {}
'''
create_test_file('src.cpp', src)
run_process([EMCC, 'src.cpp', '-O2', '-s', 'EVAL_CTORS=1', '-profiling-funcs', '-s', 'WASM=%d' % wasm])
@no_wasm_backend('EVAL_CTORS is monolithic with the wasm backend')
def test_eval_ctors(self):
for wasm in (1, 0):
if self.is_wasm_backend() and not wasm:
continue
print('wasm', wasm)
print('check no ctors is ok')
# on by default in -Oz, but user-overridable
def get_size(args):
print('get_size', args)
run_process([EMCC, path_from_root('tests', 'hello_libcxx.cpp'), '-s', 'WASM=%d' % wasm] + args)
self.assertContained('hello, world!', run_js('a.out.js'))
if wasm:
codesize = self.count_wasm_contents('a.out.wasm', 'funcs')
memsize = self.count_wasm_contents('a.out.wasm', 'memory-data')
else:
codesize = os.path.getsize('a.out.js')
memsize = os.path.getsize('a.out.js.mem')
return (codesize, memsize)
def check_size(left, right):
# can't measure just the mem out of the wasm, so ignore [1] for wasm
if left[0] == right[0] and left[1] == right[1]:
return 0
if left[0] < right[0] and left[1] > right[1]:
return -1 # smaller code, bigger mem
if left[0] > right[0] and left[1] < right[1]:
return 1
assert False, [left, right]
o2_size = get_size(['-O2'])
assert check_size(get_size(['-O2']), o2_size) == 0, 'deterministic'
assert check_size(get_size(['-O2', '-s', 'EVAL_CTORS=1']), o2_size) < 0, 'eval_ctors works if user asks for it'
oz_size = get_size(['-Oz'])
assert check_size(get_size(['-Oz']), oz_size) == 0, 'deterministic'
assert check_size(get_size(['-Oz', '-s', 'EVAL_CTORS=1']), oz_size) == 0, 'eval_ctors is on by default in oz'
assert check_size(get_size(['-Oz', '-s', 'EVAL_CTORS=0']), oz_size) == 1, 'eval_ctors can be turned off'
linkable_size = get_size(['-Oz', '-s', 'EVAL_CTORS=1', '-s', 'LINKABLE=1'])
assert check_size(get_size(['-Oz', '-s', 'EVAL_CTORS=0', '-s', 'LINKABLE=1']), linkable_size) == 1, 'noticeable difference in linkable too'
def test_eval_ctor_ordering(self):
# ensure order of execution remains correct, even with a bad ctor
def test(p1, p2, p3, last, expected):
src = r'''
#include <stdio.h>
#include <stdlib.h>
volatile int total = 0;
struct C {
C(int x) {
volatile int y = x;
y++;
y--;
if (y == 0xf) {
printf("you can't eval me ahead of time\n"); // bad ctor
}
total <<= 4;
total += int(y);
}
};
C __attribute__((init_priority(%d))) c1(0x5);
C __attribute__((init_priority(%d))) c2(0x8);
C __attribute__((init_priority(%d))) c3(%d);
int main() {
printf("total is 0x%%x.\n", total);
}
''' % (p1, p2, p3, last)
create_test_file('src.cpp', src)
run_process([EMCC, 'src.cpp', '-O2', '-s', 'EVAL_CTORS=1', '-profiling-funcs', '-s', 'WASM=%d' % wasm])
self.assertContained('total is %s.' % hex(expected), run_js('a.out.js'))
shutil.copyfile('a.out.js', 'x' + hex(expected) + '.js')
if wasm:
shutil.copyfile('a.out.wasm', 'x' + hex(expected) + '.wasm')
return self.count_wasm_contents('a.out.wasm', 'funcs')
else:
return open('a.out.js').read().count('function _')
print('no bad ctor')
first = test(1000, 2000, 3000, 0xe, 0x58e) # noqa
second = test(3000, 1000, 2000, 0xe, 0x8e5) # noqa
third = test(2000, 3000, 1000, 0xe, 0xe58) # noqa
print(first, second, third)
assert first == second and second == third
print('with bad ctor')
first = test(1000, 2000, 3000, 0xf, 0x58f) # noqa; 2 will succeed
second = test(3000, 1000, 2000, 0xf, 0x8f5) # noqa; 1 will succedd
third = test(2000, 3000, 1000, 0xf, 0xf58) # noqa; 0 will succeed
print(first, second, third)
assert first < second and second < third, [first, second, third]
@uses_canonical_tmp
@with_env_modify({'EMCC_DEBUG': '1'})
def test_eval_ctors_debug_output(self):
for wasm in (1, 0):
print('wasm', wasm)
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
external_thing: function() {}
});
''')
create_test_file('src.cpp', r'''
extern "C" void external_thing();
struct C {
C() { external_thing(); } // don't remove this!
};
C c;
int main() {}
''')
err = run_process([EMCC, 'src.cpp', '--js-library', 'lib.js', '-Oz', '-s', 'WASM=%d' % wasm], stderr=PIPE).stderr
if self.is_wasm_backend():
# disabled in the wasm backend
self.assertContained('Ctor evalling in the wasm backend is disabled', err)
self.assertNotContained('ctor_evaller: not successful', err) # with logging
else:
self.assertContained('external_thing', err) # the failing call should be mentioned
if not wasm and not self.is_wasm_backend(): # asm.js will show a stack trace
self.assertContained('ctorEval.js', err) # with a stack trace
self.assertContained('ctor_evaller: not successful', err) # with logging
def test_override_js_execution_environment(self):
create_test_file('main.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
out('environment is WEB? ' + ENVIRONMENT_IS_WEB);
out('environment is WORKER? ' + ENVIRONMENT_IS_WORKER);
out('environment is NODE? ' + ENVIRONMENT_IS_NODE);
out('environment is SHELL? ' + ENVIRONMENT_IS_SHELL);
});
}
''')
# use SINGLE_FILE since we don't want to depend on loading a side .wasm file on the environment in this test;
# with the wrong env we have very odd failures
run_process([EMCC, 'main.cpp', '-s', 'SINGLE_FILE=1'])
src = open('a.out.js').read()
envs = ['web', 'worker', 'node', 'shell']
for env in envs:
for engine in JS_ENGINES:
if engine == V8_ENGINE:
continue # ban v8, weird failures
actual = 'NODE' if engine == NODE_JS else 'SHELL'
print(env, actual, engine)
module = {'ENVIRONMENT': env}
if env != actual:
# avoid problems with arguments detection, which may cause very odd failures with the wrong environment code
module['arguments'] = []
curr = 'var Module = %s;\n' % str(module)
print(' ' + curr)
create_test_file('test.js', curr + src)
seen = run_js('test.js', engine=engine, stderr=PIPE, full_output=True, assert_returncode=None)
self.assertContained('Module.ENVIRONMENT has been deprecated. To force the environment, use the ENVIRONMENT compile-time option (for example, -s ENVIRONMENT=web or -s ENVIRONMENT=node', seen)
def test_override_c_environ(self):
create_test_file('pre.js', r'''
var Module = {
preRun: [function() { ENV.hello = 'world' }]
};
''')
create_test_file('src.cpp', r'''
#include <stdlib.h>
#include <stdio.h>
int main() {
printf("|%s|\n", getenv("hello"));
}
''')
run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js'])
self.assertContained('|world|', run_js('a.out.js'))
create_test_file('pre.js', r'''
var Module = {
preRun: [function(module) { module.ENV.hello = 'world' }]
};
''')
run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ENV"]'])
self.assertContained('|world|', run_js('a.out.js'))
run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ENV"]', '-s', 'MODULARIZE=1'])
output = run_process(NODE_JS + ['-e', 'require("./a.out.js")();'], stdout=PIPE, stderr=PIPE)
self.assertContained('|world|', output.stdout)
def test_warn_no_filesystem(self):
WARNING = 'Filesystem support (FS) was not included. The problem is that you are using files from JS, but files were not used from C/C++, so filesystem support was not auto-included. You can force-include filesystem support with -s FORCE_FILESYSTEM=1'
run_process([EMCC, path_from_root('tests', 'hello_world.c')])
seen = run_js('a.out.js', stderr=PIPE)
assert WARNING not in seen
def test(contents):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
EM_ASM({ %s });
printf("hello, world!\n");
return 0;
}
''' % contents)
run_process([EMCC, 'src.cpp'])
self.assertContained(WARNING, run_js('a.out.js', stderr=PIPE, assert_returncode=None))
# might appear in handwritten code
test("FS.init()")
test("FS.createPreloadedFile('waka waka, just warning check')")
test("FS.createDataFile('waka waka, just warning check')")
test("FS.analyzePath('waka waka, just warning check')")
test("FS.loadFilesFromDB('waka waka, just warning check')")
# might appear in filesystem code from a separate script tag
test("Module['FS_createDataFile']('waka waka, just warning check')")
test("Module['FS_createPreloadedFile']('waka waka, just warning check')")
# text is in the source when needed, but when forcing FS, it isn't there
run_process([EMCC, 'src.cpp'])
self.assertContained(WARNING, open('a.out.js').read())
run_process([EMCC, 'src.cpp', '-s', 'FORCE_FILESYSTEM=1']) # forcing FS means no need
self.assertNotContained(WARNING, open('a.out.js').read())
run_process([EMCC, 'src.cpp', '-s', 'ASSERTIONS=0']) # no assertions, no need
self.assertNotContained(WARNING, open('a.out.js').read())
run_process([EMCC, 'src.cpp', '-O2']) # optimized, so no assertions
self.assertNotContained(WARNING, open('a.out.js').read())
def test_warn_module_print_err(self):
ERROR = 'was not exported. add it to EXTRA_EXPORTED_RUNTIME_METHODS (see the FAQ)'
def test(contents, expected, args=[]):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({ %s });
return 0;
}
''' % contents)
run_process([EMCC, 'src.cpp'] + args)
self.assertContained(expected, run_js('a.out.js', stderr=STDOUT, assert_returncode=None))
# error shown (when assertions are on)
test("Module.print('x')", ERROR)
test("Module['print']('x')", ERROR)
test("Module.printErr('x')", ERROR)
test("Module['printErr']('x')", ERROR)
# when exported, all good
test("Module['print']('print'); Module['printErr']('err'); ", 'print\nerr', ['-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["print", "printErr"]'])
def test_warn_unexported_main(self):
WARNING = 'main() is in the input files, but "_main" is not in EXPORTED_FUNCTIONS, which means it may be eliminated as dead code. Export it if you want main() to run.'
proc = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXPORTED_FUNCTIONS=[]'], stderr=PIPE)
self.assertContained(WARNING, proc.stderr)
############################################################
# Function eliminator tests
############################################################
def normalize_line_endings(self, input):
return input.replace('\r\n', '\n').replace('\n\n', '\n').replace('\n\n', '\n')
def get_file_contents(self, file):
file_contents = ""
with open(file) as fout:
file_contents = "".join(fout.readlines())
file_contents = self.normalize_line_endings(file_contents)
return file_contents
def function_eliminator_test_helper(self, input_file, expected_output_file, use_hash_info=False):
input_file = path_from_root('tests', 'optimizer', input_file)
expected_output_file = path_from_root('tests', 'optimizer', expected_output_file)
command = [path_from_root('tools', 'eliminate-duplicate-functions.js'), input_file, '--no-minimize-whitespace', '--use-asm-ast']
if use_hash_info:
command.append('--use-hash-info')
proc = run_process(NODE_JS + command, stdin=PIPE, stderr=PIPE, stdout=PIPE)
assert proc.stderr == '', proc.stderr
expected_output = self.get_file_contents(expected_output_file)
output = self.normalize_line_endings(proc.stdout)
self.assertIdentical(expected_output, output)
def test_function_eliminator_simple(self):
self.function_eliminator_test_helper('test-function-eliminator-simple.js',
'test-function-eliminator-simple-output.js')
def test_function_eliminator_replace_function_call(self):
self.function_eliminator_test_helper('test-function-eliminator-replace-function-call.js',
'test-function-eliminator-replace-function-call-output.js')
def test_function_eliminator_replace_function_call_two_passes(self):
self.function_eliminator_test_helper('test-function-eliminator-replace-function-call-output.js',
'test-function-eliminator-replace-function-call-two-passes-output.js')
def test_function_eliminator_replace_array_value(self):
output_file = 'output.js'
try:
shared.safe_copy(path_from_root('tests', 'optimizer', 'test-function-eliminator-replace-array-value.js'), output_file)
tools.duplicate_function_eliminator.run(output_file)
output_file_contents = self.get_file_contents(output_file)
expected_file_contents = self.get_file_contents(path_from_root('tests', 'optimizer', 'test-function-eliminator-replace-array-value-output.js'))
self.assertIdentical(expected_file_contents, output_file_contents)
finally:
tools.tempfiles.try_delete(output_file)
def test_function_eliminator_replace_object_value_assignment(self):
self.function_eliminator_test_helper('test-function-eliminator-replace-object-value-assignment.js',
'test-function-eliminator-replace-object-value-assignment-output.js')
def test_function_eliminator_variable_clash(self):
self.function_eliminator_test_helper('test-function-eliminator-variable-clash.js',
'test-function-eliminator-variable-clash-output.js')
def test_function_eliminator_replace_variable_value(self):
self.function_eliminator_test_helper('test-function-eliminator-replace-variable-value.js',
'test-function-eliminator-replace-variable-value-output.js')
@no_wasm_backend('tests native asm.js optimizer, which is never build for wasm backend')
def test_function_eliminator_double_parsed_correctly(self):
# This is a test that makes sure that when we perform final optimization on
# the JS file, doubles are preserved (and not converted to ints).
output_file = 'output.js'
try:
shared.safe_copy(path_from_root('tests', 'optimizer', 'test-function-eliminator-double-parsed-correctly.js'), output_file)
# Run duplicate function elimination
tools.duplicate_function_eliminator.run(output_file)
# Run last opts
shutil.move(tools.js_optimizer.run(output_file, ['last', 'asm']), output_file)
output_file_contents = self.get_file_contents(output_file)
# Compare
expected_file_contents = self.get_file_contents(path_from_root('tests', 'optimizer', 'test-function-eliminator-double-parsed-correctly-output.js'))
self.assertIdentical(expected_file_contents, output_file_contents)
finally:
tools.tempfiles.try_delete(output_file)
# Now do the same, but using a pre-generated equivalent function hash info that
# comes in handy for parallel processing
def test_function_eliminator_simple_with_hash_info(self):
self.function_eliminator_test_helper('test-function-eliminator-simple-with-hash-info.js',
'test-function-eliminator-simple-output.js',
use_hash_info=True)
def test_function_eliminator_replace_function_call_with_hash_info(self):
self.function_eliminator_test_helper('test-function-eliminator-replace-function-call-with-hash-info.js',
'test-function-eliminator-replace-function-call-output.js',
use_hash_info=True)
def test_function_eliminator_replace_function_call_two_passes_with_hash_info(self):
self.function_eliminator_test_helper('test-function-eliminator-replace-function-call-output-with-hash-info.js',
'test-function-eliminator-replace-function-call-two-passes-output.js',
use_hash_info=True)
def test_function_eliminator_replace_object_value_assignment_with_hash_info(self):
self.function_eliminator_test_helper('test-function-eliminator-replace-object-value-assignment-with-hash-info.js',
'test-function-eliminator-replace-object-value-assignment-output.js',
use_hash_info=True)
def test_function_eliminator_variable_clash_with_hash_info(self):
self.function_eliminator_test_helper('test-function-eliminator-variable-clash-with-hash-info.js',
'test-function-eliminator-variable-clash-output.js',
use_hash_info=True)
def test_function_eliminator_replace_variable_value_with_hash_info(self):
self.function_eliminator_test_helper('test-function-eliminator-replace-variable-value-with-hash-info.js',
'test-function-eliminator-replace-variable-value-output.js',
use_hash_info=True)
@no_wasm_backend('uses CYBERDWARF')
def test_cyberdwarf_pointers(self):
run_process([EMCC, path_from_root('tests', 'debugger', 'test_pointers.cpp'), '-Oz', '-s', 'CYBERDWARF=1',
'--pre-js', path_from_root('tests', 'debugger', 'test_preamble.js'), '-o', 'test_pointers.js'])
run_js('test_pointers.js')
@no_wasm_backend('uses CYBERDWARF')
def test_cyberdwarf_union(self):
run_process([EMCC, path_from_root('tests', 'debugger', 'test_union.cpp'), '-Oz', '-s', 'CYBERDWARF=1',
'--pre-js', path_from_root('tests', 'debugger', 'test_preamble.js'), '-o', 'test_union.js'])
run_js('test_union.js')
def test_source_file_with_fixed_language_mode(self):
create_test_file('src_tmp_fixed_lang', '''
#include <string>
#include <iostream>
int main() {
std::cout << "Test_source_fixed_lang_hello" << std::endl;
return 0;
}
''')
run_process([EMCC, '-Wall', '-x', 'c++', 'src_tmp_fixed_lang'])
self.assertContained("Test_source_fixed_lang_hello", run_js('a.out.js'))
stderr = self.expect_fail([EMCC, '-Wall', 'src_tmp_fixed_lang'])
self.assertContained("Input file has an unknown suffix, don't know what to do with it!", stderr)
def test_disable_inlining(self):
create_test_file('test.c', r'''
#include <stdio.h>
void foo() {
printf("foo\n");
}
int main() {
foo();
return 0;
}
''')
# Without the 'INLINING_LIMIT=1', -O2 inlines foo()
cmd = [EMCC, 'test.c', '-O2', '-o', 'test.bc', '-s', 'INLINING_LIMIT=1']
if self.is_wasm_backend():
cmd += ['-flto']
run_process(cmd)
# If foo() had been wrongly inlined above, internalizing foo and running
# global DCE makes foo DCE'd
building.llvm_opt('test.bc', ['-internalize', '-internalize-public-api-list=main', '-globaldce'], 'test2.bc')
# To this test to be successful, foo() shouldn't have been inlined above and
# foo() should be in the function list
syms = building.llvm_nm('test2.bc', include_internal=True)
assert 'foo' in syms.defs, 'foo() should not be inlined'
@no_wasm_backend('--separate-asm')
def test_output_eol(self):
# --separate-asm only makes sense without wasm (no asm.js with wasm)
for params in [[], ['--separate-asm', '-s', 'WASM=0'], ['--proxy-to-worker'], ['--proxy-to-worker', '--separate-asm', '-s', 'WASM=0']]:
for output_suffix in ['html', 'js']:
for eol in ['windows', 'linux']:
files = ['a.js']
if '--separate-asm' in params:
files += ['a.asm.js']
if output_suffix == 'html':
files += ['a.html']
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'a.' + output_suffix, '--output_eol', eol] + params
run_process(cmd)
for f in files:
print(str(cmd) + ' ' + str(params) + ' ' + eol + ' ' + f)
assert os.path.isfile(f)
if eol == 'linux':
expected_ending = '\n'
else:
expected_ending = '\r\n'
ret = tools.line_endings.check_line_endings(f, expect_only=expected_ending)
assert ret == 0
for f in files:
try_delete(f)
@no_wasm_backend('asm2wasm specific')
@uses_canonical_tmp
def test_binaryen_opts(self):
with env_modify({'EMCC_DEBUG': '1'}):
for args, expect_js_opts, expect_wasm_opts, expect_only_wasm in [
([], False, False, True),
(['-O0'], False, False, True),
(['-O1'], False, True, True),
(['-O2'], False, True, True),
(['-O2', '--js-opts', '1'], True, True, False), # user asked
(['-O2', '-s', 'EVAL_CTORS=1'], False, True, True), # ctor evaller turned off since only-wasm
(['-O3'], False, True, True),
(['-Os'], False, True, True),
(['-Oz'], False, True, True), # ctor evaller turned off since only-wasm
]:
try_delete('a.out.js')
try_delete('a.out.wasm')
try_delete('a.out.wat')
cmd = [EMCC, path_from_root('tests', 'core', 'test_i64.c')] + args
print(args, 'js opts:', expect_js_opts, 'only-wasm:', expect_only_wasm, ' ', ' '.join(cmd))
err = run_process(cmd, stdout=PIPE, stderr=PIPE).stderr
assert expect_js_opts == ('applying js optimization passes:' in err), err
if not self.is_wasm_backend():
assert expect_only_wasm == ('-emscripten-only-wasm' in err and '--wasm-only' in err), err # check both flag to fastcomp and to asm2wasm
wat = run_process([os.path.join(building.get_binaryen_bin(), 'wasm-dis'), 'a.out.wasm'], stdout=PIPE).stdout
# i64s
i64s = wat.count('(i64.')
print(' seen i64s:', i64s)
assert expect_only_wasm == (i64s > 30), 'i64 opts can be emitted in only-wasm mode, but not normally' # note we emit a few i64s even without wasm-only, when we replace udivmoddi (around 15 such)
selects = wat.count('(select')
print(' seen selects:', selects)
if expect_wasm_opts:
# when optimizing we should create selects
self.assertGreater(selects, 15)
else:
# when not optimizing for size we should not
self.assertEqual(selects, 0)
# asm2wasm opt line
asm2wasm_line = [line for line in err.split('\n') if 'asm2wasm' in line]
asm2wasm_line = '' if not asm2wasm_line else asm2wasm_line[0]
if '-O0' in args or '-O' not in str(args):
assert '-O' not in asm2wasm_line, 'no opts should be passed to asm2wasm: ' + asm2wasm_line
else:
opts_str = args[0]
assert opts_str.startswith('-O')
assert opts_str in asm2wasm_line, 'expected opts: ' + asm2wasm_line
@no_wasm_backend('fastcomp specific')
def test_binaryen_and_precise_f32(self):
for args, expect in [
([], True),
(['-s', 'PRECISE_F32=0'], True), # disabled, but no asm.js, so we definitely want f32
(['-s', 'PRECISE_F32=1'], True),
(['-s', 'PRECISE_F32=2'], True),
]:
print(args, expect)
try_delete('a.out.js')
err = run_process([EMCC, '-v', path_from_root('tests', 'hello_world.cpp'), '-s', 'BINARYEN=1'] + args, stderr=PIPE).stderr
assert expect == (' -emscripten-precise-f32' in err), err
self.assertContained('hello, world!', run_js('a.out.js'))
def test_binaryen_names(self):
sizes = {}
for args, expect_names in [
([], False),
(['-g'], True),
(['-O1'], False),
(['-O2'], False),
(['-O2', '-g'], True),
(['-O2', '-g1'], False),
(['-O2', '-g2'], True),
(['-O2', '--profiling'], True),
(['-O2', '--profiling-funcs'], True),
]:
print(args, expect_names)
try_delete('a.out.js')
# we use dlmalloc here, as emmalloc has a bunch of asserts that contain the text "malloc" in them, which makes counting harder
run_process([EMCC, path_from_root('tests', 'hello_world.cpp')] + args + ['-s', 'MALLOC="dlmalloc"'])
code = open('a.out.wasm', 'rb').read()
if expect_names:
# name section adds the name of malloc (there is also another one for the export)
self.assertEqual(code.count(b'malloc'), 2)
else:
# should be just malloc for the export
self.assertEqual(code.count(b'malloc'), 1)
sizes[str(args)] = os.path.getsize('a.out.wasm')
print(sizes)
self.assertLess(sizes["['-O2']"], sizes["['-O2', '--profiling-funcs']"], 'when -profiling-funcs, the size increases due to function names')
def test_binaryen_warn_mem(self):
# if user changes INITIAL_MEMORY at runtime, the wasm module may not accept the memory import if it is too big/small
create_test_file('pre.js', 'var Module = { INITIAL_MEMORY: 50 * 1024 * 1024 };\n')
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'INITIAL_MEMORY=' + str(16 * 1024 * 1024), '--pre-js', 'pre.js', '-s', 'WASM_ASYNC_COMPILATION=0'])
out = run_js('a.out.js', full_output=True, stderr=PIPE, assert_returncode=None)
self.assertContained('LinkError', out)
self.assertContained('Memory size incompatibility issues may be due to changing INITIAL_MEMORY at runtime to something too large. Use ALLOW_MEMORY_GROWTH to allow any size memory (and also make sure not to set INITIAL_MEMORY at runtime to something smaller than it was at compile time).', out)
self.assertNotContained('hello, world!', out)
# and with memory growth, all should be good
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'INITIAL_MEMORY=' + str(16 * 1024 * 1024), '--pre-js', 'pre.js', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'WASM_ASYNC_COMPILATION=0'])
self.assertContained('hello, world!', run_js('a.out.js'))
@no_wasm_backend('asm.js specific')
def test_binaryen_asmjs_outputs(self):
# Test that an .asm.js file is outputted exactly when it is requested.
for args, output_asmjs in [
([], False),
(['-s', 'MAIN_MODULE=2'], False),
]:
with temp_directory(self.get_dir()) as temp_dir:
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-o', os.path.join(temp_dir, 'a.js')] + args
print(' '.join(cmd))
run_process(cmd)
if output_asmjs:
self.assertExists(os.path.join(temp_dir, 'a.asm.js'))
self.assertNotExists(os.path.join(temp_dir, 'a.temp.asm.js'))
# Test that outputting to .wasm does not nuke an existing .asm.js file, if
# user wants to manually dual-deploy both to same directory.
with temp_directory(self.get_dir()) as temp_dir:
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0', '-o', os.path.join(temp_dir, 'a.js'), '--separate-asm']
print(' '.join(cmd))
run_process(cmd)
self.assertExists(os.path.join(temp_dir, 'a.asm.js'))
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-o', os.path.join(temp_dir, 'a.js')]
print(' '.join(cmd))
run_process(cmd)
self.assertExists(os.path.join(temp_dir, 'a.asm.js'))
self.assertExists(os.path.join(temp_dir, 'a.wasm'))
self.assertNotExists(os.path.join(temp_dir, 'a.temp.asm.js'))
def test_binaryen_mem(self):
for args, expect_initial, expect_max in [
(['-s', 'INITIAL_MEMORY=20971520'], 320, 320),
(['-s', 'INITIAL_MEMORY=20971520', '-s', 'ALLOW_MEMORY_GROWTH=1'], 320, None),
(['-s', 'INITIAL_MEMORY=20971520', '-s', 'MAXIMUM_MEMORY=41943040'], 320, 640),
(['-s', 'INITIAL_MEMORY=20971520', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'MAXIMUM_MEMORY=41943040'], 320, 640),
]:
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=1', '-O2'] + args
print(' '.join(cmd))
run_process(cmd)
wat = run_process([os.path.join(building.get_binaryen_bin(), 'wasm-dis'), 'a.out.wasm'], stdout=PIPE).stdout
for line in wat:
if '(import "env" "memory" (memory ' in line:
parts = line.strip().replace('(', '').replace(')', '').split(' ')
print(parts)
self.assertEqual(parts[5], str(expect_initial))
if not expect_max:
self.assertEqual(len(parts), 6)
else:
self.assertEqual(parts[6], str(expect_max))
def test_invalid_mem(self):
# A large amount is fine, multiple of 16MB or not
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=33MB'])
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=32MB'])
# But not in asm.js
if not self.is_wasm_backend():
ret = self.expect_fail([EMCC, '-s', 'WASM=0', path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=33MB'])
self.assertContained('INITIAL_MEMORY must be a multiple of 16MB', ret)
# A tiny amount is fine in wasm
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=65536', '-s', 'TOTAL_STACK=1024'])
# And the program works!
self.assertContained('hello, world!', run_js('a.out.js'))
# But not in asm.js
if not self.is_wasm_backend():
ret = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=65536', '-s', 'WASM=0'])
self.assertContained('INITIAL_MEMORY must be at least 16MB', ret)
# Must be a multiple of 64KB
ret = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=33554433']) # 32MB + 1 byte
self.assertContained('INITIAL_MEMORY must be a multiple of 64KB', ret)
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MAXIMUM_MEMORY=33MB'])
ret = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MAXIMUM_MEMORY=34603009']) # 33MB + 1 byte
self.assertContained('MAXIMUM_MEMORY must be a multiple of 64KB', ret)
def test_invalid_output_dir(self):
ret = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-o', os.path.join('NONEXISTING_DIRECTORY', 'out.js')])
self.assertContained('specified output file (NONEXISTING_DIRECTORY%sout.js) is in a directory that does not exist' % os.path.sep, ret)
def test_binaryen_ctors(self):
# ctor order must be identical to js builds, deterministically
create_test_file('src.cpp', r'''
#include <stdio.h>
struct A {
A() { puts("constructing A!"); }
};
A a;
struct B {
B() { puts("constructing B!"); }
};
B b;
int main() {}
''')
run_process([EMCC, 'src.cpp'])
correct = run_js('a.out.js')
for args in [[], ['-s', 'RELOCATABLE=1']]:
print(args)
run_process([EMCC, 'src.cpp', '-s', 'WASM=1', '-o', 'b.out.js'] + args)
seen = run_js('b.out.js')
assert correct == seen, correct + '\n vs \n' + seen
# test debug info and debuggability of JS output
@uses_canonical_tmp
def test_binaryen_debug(self):
with env_modify({'EMCC_DEBUG': '1'}):
for args, expect_dash_g, expect_emit_text, expect_clean_js, expect_whitespace_js, expect_closured in [
(['-O0'], False, False, False, True, False),
(['-O0', '-g1'], False, False, False, True, False),
(['-O0', '-g2'], True, False, False, True, False), # in -g2+, we emit -g to asm2wasm so function names are saved
(['-O0', '-g'], True, True, False, True, False),
(['-O0', '--profiling-funcs'], True, False, False, True, False),
(['-O1'], False, False, False, True, False),
(['-O2'], False, False, True, False, False),
(['-O2', '-g1'], False, False, True, True, False),
(['-O2', '-g'], True, True, False, True, False),
(['-O2', '--closure', '1'], False, False, True, False, True),
(['-O2', '--closure', '1', '-g1'], False, False, True, True, True),
(['-O2', '--js-opts', '1'], False, False, True, False, False),
]:
print(args, expect_dash_g, expect_emit_text)
try_delete('a.out.wat')
cmd = [EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'WASM=1'] + args
print(' '.join(cmd))
err = run_process(cmd, stdout=PIPE, stderr=PIPE).stderr
if not self.is_wasm_backend():
asm2wasm_line = [x for x in err.split('\n') if 'asm2wasm' in x][0]
asm2wasm_line = asm2wasm_line.strip() + ' ' # ensure it ends with a space, for simpler searches below
print('|' + asm2wasm_line + '|')
assert expect_dash_g == (' -g ' in asm2wasm_line)
assert expect_emit_text == (' -S ' in asm2wasm_line)
if expect_emit_text:
text = open('a.out.wat').read()
assert ';;' in text, 'must see debug info comment'
assert 'hello_world.cpp:12' in text, 'must be file:line info'
js = open('a.out.js').read()
assert expect_clean_js == ('// ' not in js), 'cleaned-up js must not have comments'
assert expect_whitespace_js == ('{\n ' in js), 'whitespace-minified js must not have excess spacing'
assert expect_closured == ('var a;' in js or 'var a,' in js or 'var a=' in js or 'var a ' in js), 'closured js must have tiny variable names'
@uses_canonical_tmp
def test_binaryen_ignore_implicit_traps(self):
sizes = []
with env_modify({'EMCC_DEBUG': '1'}):
for args, expect in [
([], False),
(['-s', 'BINARYEN_IGNORE_IMPLICIT_TRAPS=1'], True),
]:
print(args, expect)
cmd = [EMCC, path_from_root('tests', 'hello_libcxx.cpp'), '-s', 'WASM=1', '-O3'] + args
print(' '.join(cmd))
err = run_process(cmd, stdout=PIPE, stderr=PIPE).stderr
self.assertContainedIf('--ignore-implicit-traps ', err, expect)
sizes.append(os.path.getsize('a.out.wasm'))
print('sizes:', sizes)
# sizes must be different, as the flag has an impact
self.assertEqual(len(set(sizes)), 2)
@no_fastcomp('BINARYEN_EXTRA_PASSES is used to optimize only in the wasm backend (fastcomp uses flags to asm2wasm)')
def test_binaryen_passes_extra(self):
def build(args=[]):
return run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-O3'] + args, stdout=PIPE).stdout
build()
base_size = os.path.getsize('a.out.wasm')
out = build(['-s', 'BINARYEN_EXTRA_PASSES="--metrics"'])
# and --metrics output appears
self.assertContained('[funcs]', out)
# adding --metrics should not affect code size
self.assertEqual(base_size, os.path.getsize('a.out.wasm'))
def assertFileContents(self, filename, contents):
contents = contents.replace('\r', '')
if os.environ.get('EMTEST_REBASELINE'):
with open(filename, 'w') as f:
f.write(contents)
return
if not os.path.exists(filename):
self.fail('Test expectation file not found: ' + filename + '.\n' +
'Run with EMTEST_REBASELINE to generate.')
expected_content = open(filename).read()
message = "Run with EMTEST_REBASELINE=1 to automatically update expectations"
self.assertTextDataIdentical(expected_content, contents, message,
filename, filename + '.new')
def run_metadce_test(self, filename, args, expected_exists, expected_not_exists, expected_size,
check_sent=True, check_imports=True, check_exports=True, check_funcs=True):
size_slack = 0.05
# in -Os, -Oz, we remove imports wasm doesn't need
print('Running metadce test: %s:' % filename, args, expected_exists,
expected_not_exists, expected_size, check_sent, check_imports, check_exports, check_funcs)
filename = path_from_root('tests', 'other', 'metadce', filename)
def clean_arg(arg):
return arg.replace('-', '')
def args_to_filename(args):
result = ''
for a in args:
if a == '-s':
continue
a = a.replace('-', '')
a = a.replace('=1', '')
a = a.replace('=[]', '_NONE')
a = a.replace('=', '_')
if a:
result += '_' + a
return result
expected_basename = os.path.splitext(filename)[0]
if not self.is_wasm_backend():
expected_basename += '_fastcomp'
expected_basename += args_to_filename(args)
run_process([EMCC, filename, '-g2'] + args)
# find the imports we send from JS
js = open('a.out.js').read()
start = js.find('asmLibraryArg = ')
end = js.find('}', start) + 1
start = js.find('{', start)
relevant = js[start + 2:end - 2]
relevant = relevant.replace(' ', '').replace('"', '').replace("'", '').split(',')
sent = [x.split(':')[0].strip() for x in relevant]
sent = [x for x in sent if x]
sent.sort()
for exists in expected_exists:
self.assertIn(exists, sent)
for not_exists in expected_not_exists:
self.assertNotIn(not_exists, sent)
wasm_size = os.path.getsize('a.out.wasm')
if expected_size is not None:
ratio = abs(wasm_size - expected_size) / float(expected_size)
print(' seen wasm size: %d (expected: %d), ratio to expected: %f' % (wasm_size, expected_size, ratio))
self.assertLess(ratio, size_slack)
imports, exports, funcs = parse_wasm('a.out.wasm')
imports.sort()
exports.sort()
funcs.sort()
# filter out _NNN suffixed that can be the result of bitcode linking when
# internal symbol names collide.
def strip_numeric_suffixes(funcname):
parts = funcname.split('_')
while parts:
if parts[-1].isdigit():
parts.pop()
else:
break
return '_'.join(parts)
funcs = [strip_numeric_suffixes(f) for f in funcs]
if check_sent:
sent_file = expected_basename + '.sent'
sent_data = '\n'.join(sent) + '\n'
self.assertFileContents(sent_file, sent_data)
if check_imports:
filename = expected_basename + '.imports'
data = '\n'.join(imports) + '\n'
self.assertFileContents(filename, data)
if check_exports:
filename = expected_basename + '.exports'
data = '\n'.join(exports) + '\n'
self.assertFileContents(filename, data)
if check_funcs:
filename = expected_basename + '.funcs'
data = '\n'.join(funcs) + '\n'
self.assertFileContents(filename, data)
@parameterized({
'O0': ([], [], ['waka'], 9766), # noqa
'O1': (['-O1'], [], ['waka'], 7886), # noqa
'O2': (['-O2'], [], ['waka'], 7871), # noqa
# in -O3, -Os and -Oz we metadce, and they shrink it down to the minimal output we want
'O3': (['-O3'], [], [], 85), # noqa
'Os': (['-Os'], [], [], 85), # noqa
'Oz': (['-Oz'], [], [], 85), # noqa
'Os_mr': (['-Os', '-s', 'MINIMAL_RUNTIME'], [], [], 85), # noqa
})
@no_fastcomp()
def test_metadce_minimal(self, *args):
self.run_metadce_test('minimal.c', *args)
@parameterized({
'O0': ([], ['abort'], ['waka'], 22712), # noqa
'O1': (['-O1'], ['abort'], ['waka'], 10450), # noqa
'O2': (['-O2'], ['abort'], ['waka'], 10440), # noqa
# in -O3, -Os and -Oz we metadce, and they shrink it down to the minimal output we want
'O3': (['-O3'], [], [], 55), # noqa
'Os': (['-Os'], [], [], 55), # noqa
'Oz': (['-Oz'], [], [], 55), # noqa
})
@no_wasm_backend()
def test_metadce_minimal_fastcomp(self, *args):
self.run_metadce_test('minimal.c', *args)
@parameterized({
'noexcept': (['-O2'], [], ['waka'], 218988), # noqa
# exceptions increases code size significantly
'except': (['-O2', '-fexceptions'], [], ['waka'], 279827), # noqa
# exceptions does not pull in demangling by default, which increases code size
'mangle': (['-O2', '-fexceptions',
'-s', 'DEMANGLE_SUPPORT'], [], ['waka'], 408028), # noqa
})
@no_fastcomp()
def test_metadce_cxx(self, *args):
self.run_metadce_test('hello_libcxx.cpp', *args)
@parameterized({
'normal': (['-O2'], ['abort'], ['waka'], 186423),
'emulated_function_pointers':
(['-O2', '-s', 'EMULATED_FUNCTION_POINTERS=1'], ['abort'], ['waka'], 188310),
})
@no_wasm_backend()
def test_metadce_cxx_fastcomp(self, *args):
# test on libc++: see effects of emulated function pointers
self.run_metadce_test('hello_libcxx.cpp', *args)
@parameterized({
'O0': ([], [], ['waka'], 22849), # noqa
'O1': (['-O1'], [], ['waka'], 10533), # noqa
'O2': (['-O2'], [], ['waka'], 10256), # noqa
'O3': (['-O3'], [], [], 1999), # noqa; in -O3, -Os and -Oz we metadce
'Os': (['-Os'], [], [], 2010), # noqa
'Oz': (['-Oz'], [], [], 2004), # noqa
# finally, check what happens when we export nothing. wasm should be almost empty
'export_nothing':
(['-Os', '-s', 'EXPORTED_FUNCTIONS=[]'], [], [], 61), # noqa
# we don't metadce with linkable code! other modules may want stuff
# don't compare the # of functions in a main module, which changes a lot
# TODO(sbc): Investivate why the number of exports is order of magnitude
# larger for wasm backend.
'main_module_2': (['-O3', '-s', 'MAIN_MODULE=2'], [], [], 10652, True, True, True, False), # noqa
})
@no_fastcomp()
def test_metadce_hello(self, *args):
self.run_metadce_test('hello_world.cpp', *args)
@parameterized({
'O0': ([], ['abort'], ['waka'], 42701), # noqa
'O1': (['-O1'], ['abort'], ['waka'], 13199), # noqa
'O2': (['-O2'], ['abort'], ['waka'], 12425), # noqa
'O3': (['-O3'], [], [], 2045), # noqa; in -O3, -Os and -Oz we metadce
'Os': (['-Os'], [], [], 2064), # noqa
'Oz': (['-Oz'], [], [], 2045), # noqa
# finally, check what happens when we export nothing. wasm should be almost empty
'export_nothing':
(['-Os', '-s', 'EXPORTED_FUNCTIONS=[]'], [], [], 8), # noqa; totally empty!
# we don't metadce with linkable code! other modules may want stuff
# don't compare the # of functions in a main module, which changes a lot
'main_module_2': (['-O3', '-s', 'MAIN_MODULE=2'], [], [], 10017), # noqa
})
@no_wasm_backend()
def test_metadce_hello_fastcomp(self, *args):
self.run_metadce_test('hello_world.cpp', *args)
@parameterized({
'O3': ('mem.c', ['-O3'],
[], [], 6100), # noqa
# argc/argv support code etc. is in the wasm
'O3_standalone': ('mem.c', ['-O3', '-s', 'STANDALONE_WASM'],
[], [], 6309), # noqa
# without argc/argv, no support code for them is emitted
'O3_standalone_narg': ('mem_no_argv.c', ['-O3', '-s', 'STANDALONE_WASM'],
[], [], 6309), # noqa
# without main, no support code for argc/argv is emitted either
'O3_standalone_lib': ('mem_no_main.c', ['-O3', '-s', 'STANDALONE_WASM', '--no-entry'],
[], [], 6309), # noqa
# Growth support code is in JS, no significant change in the wasm
'O3_grow': ('mem.c', ['-O3', '-s', 'ALLOW_MEMORY_GROWTH'],
[], [], 6098), # noqa
# Growth support code is in the wasm
'O3_grow_standalone': ('mem.c', ['-O3', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'STANDALONE_WASM'],
[], [], 6449), # noqa
# without argc/argv, no support code for them is emitted, even with lto
'O3_standalone_narg_flto':
('mem_no_argv.c', ['-O3', '-s', 'STANDALONE_WASM', '-flto'],
[], [], 4971), # noqa
})
@no_fastcomp()
def test_metadce_mem(self, filename, *args):
self.run_metadce_test(filename, *args)
@parameterized({
'O3': ('libcxxabi_message.cpp', ['-O3'],
[], [], 128), # noqa
# argc/argv support code etc. is in the wasm
'O3_standalone': ('libcxxabi_message.cpp', ['-O3', '-s', 'STANDALONE_WASM'],
[], [], 174), # noqa
})
@no_fastcomp()
def test_metadce_libcxxabi_message(self, filename, *args):
self.run_metadce_test(filename, *args)
# ensures runtime exports work, even with metadce
def test_extra_runtime_exports(self):
exports = ['stackSave', 'stackRestore', 'stackAlloc', 'FS']
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'WASM=1', '-Os', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=%s' % str(exports)])
js = open('a.out.js').read()
for export in exports:
assert ('Module["%s"]' % export) in js, export
def test_legalize_js_ffi(self):
# test disabling of JS FFI legalization
wasm_dis = os.path.join(building.get_binaryen_bin(), 'wasm-dis')
for (args, js_ffi) in [
(['-s', 'LEGALIZE_JS_FFI=1', '-s', 'SIDE_MODULE=1', '-O1', '-s', 'EXPORT_ALL=1'], True),
(['-s', 'LEGALIZE_JS_FFI=0', '-s', 'SIDE_MODULE=1', '-O1', '-s', 'EXPORT_ALL=1'], False),
(['-s', 'LEGALIZE_JS_FFI=0', '-s', 'SIDE_MODULE=1', '-O0', '-s', 'EXPORT_ALL=1'], False),
(['-s', 'LEGALIZE_JS_FFI=0', '-s', 'WARN_ON_UNDEFINED_SYMBOLS=0', '-O0'], False),
]:
if self.is_wasm_backend() and 'SIDE_MODULE=1' in args:
continue
print(args)
try_delete('a.out.wasm')
try_delete('a.out.wat')
cmd = [EMCC, path_from_root('tests', 'other', 'ffi.c'), '-g', '-o', 'a.out.js'] + args
print(' '.join(cmd))
run_process(cmd)
run_process([wasm_dis, 'a.out.wasm', '-o', 'a.out.wat'])
text = open('a.out.wat').read()
# remove internal comments and extra whitespace
text = re.sub(r'\(;[^;]+;\)', '', text)
text = re.sub(r'\$var\$*.', '', text)
text = re.sub(r'param \$\d+', 'param ', text)
text = re.sub(r' +', ' ', text)
# TODO: remove the unecessary ".*" in e_* regexs after binaryen #2510 lands
e_add_f32 = re.search(r'func \$_?add_f .*\(param f32\) \(param f32\) \(result f32\)', text)
i_i64_i32 = re.search(r'import .*"_?import_ll" .*\(param i32 i32\) \(result i32\)', text)
i_f32_f64 = re.search(r'import .*"_?import_f" .*\(param f64\) \(result f64\)', text)
i_i64_i64 = re.search(r'import .*"_?import_ll" .*\(param i64\) \(result i64\)', text)
i_f32_f32 = re.search(r'import .*"_?import_f" .*\(param f32\) \(result f32\)', text)
e_i64_i32 = re.search(r'func \$_?add_ll .*\(param i32\) \(param i32\) \(param i32\) \(param i32\) \(result i32\)', text)
e_f32_f64 = re.search(r'func \$legalstub\$_?add_f .*\(param f64\) \(param f64\) \(result f64\)', text)
e_i64_i64 = re.search(r'func \$_?add_ll .*\(param i64\) \(param i64\) \(result i64\)', text)
assert e_add_f32, 'add_f export missing'
if js_ffi:
assert i_i64_i32, 'i64 not converted to i32 in imports'
assert i_f32_f64, 'f32 not converted to f64 in imports'
assert not i_i64_i64, 'i64 not converted to i32 in imports'
assert not i_f32_f32, 'f32 not converted to f64 in imports'
assert e_i64_i32, 'i64 not converted to i32 in exports'
assert not e_f32_f64, 'f32 not converted to f64 in exports'
assert not e_i64_i64, 'i64 not converted to i64 in exports'
else:
assert not i_i64_i32, 'i64 converted to i32 in imports'
assert not i_f32_f64, 'f32 converted to f64 in imports'
assert i_i64_i64, 'i64 converted to i32 in imports'
assert i_f32_f32, 'f32 converted to f64 in imports'
assert not e_i64_i32, 'i64 converted to i32 in exports'
assert not e_f32_f64, 'f32 converted to f64 in exports'
assert e_i64_i64, 'i64 converted to i64 in exports'
def test_no_legalize_js_ffi(self):
# test minimal JS FFI legalization for invoke and dyncalls
if self.is_wasm_backend():
self.skipTest('not testing legalize with main module and wasm backend')
wasm_dis = os.path.join(building.get_binaryen_bin(), 'wasm-dis')
for (args, js_ffi) in [
(['-s', 'LEGALIZE_JS_FFI=0', '-s', 'MAIN_MODULE=2', '-O3', '-s', 'DISABLE_EXCEPTION_CATCHING=0'], False),
]:
print(args)
try_delete('a.out.wasm')
try_delete('a.out.wat')
with env_modify({'EMCC_FORCE_STDLIBS': 'libc++'}):
cmd = [EMCC, path_from_root('tests', 'other', 'noffi.cpp'), '-g', '-o', 'a.out.js'] + args
print(' '.join(cmd))
run_process(cmd)
run_process([wasm_dis, 'a.out.wasm', '-o', 'a.out.wat'])
text = open('a.out.wat').read()
# remove internal comments and extra whitespace
text = re.sub(r'\(;[^;]+;\)', '', text)
text = re.sub(r'\$var\$*.', '', text)
text = re.sub(r'param \$\d+', 'param ', text)
text = re.sub(r' +', ' ', text)
# print("text: %s" % text)
i_legalimport_i64 = re.search(r'\(import.*\$legalimport\$invoke_j.*', text)
e_legalstub_i32 = re.search(r'\(func.*\$legalstub\$dyn.*\(result i32\)', text)
assert i_legalimport_i64, 'legal import not generated for invoke call'
assert e_legalstub_i32, 'legal stub not generated for dyncall'
def test_export_aliasee(self):
# build side module
args = ['-s', 'SIDE_MODULE=1']
cmd = [EMCC, path_from_root('tests', 'other', 'alias', 'side.c'), '-g', '-o', 'side.wasm'] + args
print(' '.join(cmd))
run_process(cmd)
# build main module
args = ['-s', 'EXPORTED_FUNCTIONS=["_main", "_foo"]', '-s', 'MAIN_MODULE=2', '-s', 'EXIT_RUNTIME=1', '-lnodefs.js']
cmd = [EMCC, path_from_root('tests', 'other', 'alias', 'main.c'), '-o', 'main.js'] + args
print(' '.join(cmd))
run_process(cmd)
# run the program
self.assertContained('success', run_js('main.js'))
def test_sysconf_phys_pages(self):
def run(args, expected):
if self.is_wasm_backend() and 'WASM=0' in args:
return
cmd = [EMCC, path_from_root('tests', 'unistd', 'sysconf_phys_pages.c')] + args
print(str(cmd))
run_process(cmd)
result = run_js('a.out.js').strip()
self.assertEqual(result, str(expected) + ', errno: 0')
run([], 1024)
run(['-s', 'INITIAL_MEMORY=32MB'], 2048)
run(['-s', 'INITIAL_MEMORY=32MB', '-s', 'ALLOW_MEMORY_GROWTH=1'], (2 * 1024 * 1024 * 1024) // 16384)
run(['-s', 'INITIAL_MEMORY=32MB', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'WASM=0'], (2 * 1024 * 1024 * 1024) // 16384)
def test_wasm_target_and_STANDALONE_WASM(self):
# STANDALONE_WASM means we never minify imports and exports.
for opts, potentially_expect_minified_exports_and_imports in (
([], False),
(['-s', 'STANDALONE_WASM'], False),
(['-O2'], False),
(['-O3'], True),
(['-O3', '-s', 'STANDALONE_WASM'], False),
(['-Os'], True),
):
if 'STANDALONE_WASM' in opts and not self.is_wasm_backend():
continue
# targeting .wasm (without .js) means we enable STANDALONE_WASM automatically, and don't minify imports/exports
for target in ('out.js', 'out.wasm'):
expect_minified_exports_and_imports = potentially_expect_minified_exports_and_imports and target.endswith('.js')
standalone = target.endswith('.wasm') or 'STANDALONE_WASM' in opts
print(opts, potentially_expect_minified_exports_and_imports, target, ' => ', expect_minified_exports_and_imports, standalone)
self.clear()
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-o', target] + opts)
self.assertExists('out.wasm')
if target.endswith('.wasm'):
# only wasm requested
self.assertNotExists('out.js')
wat = run_process([os.path.join(building.get_binaryen_bin(), 'wasm-dis'), 'out.wasm'], stdout=PIPE).stdout
wat_lines = wat.split('\n')
exports = [line.strip().split(' ')[1].replace('"', '') for line in wat_lines if "(export " in line]
imports = [line.strip().split(' ')[2].replace('"', '') for line in wat_lines if "(import " in line]
exports_and_imports = exports + imports
print(' exports', exports)
print(' imports', imports)
if expect_minified_exports_and_imports:
assert 'a' in exports_and_imports
else:
assert 'a' not in exports_and_imports
assert 'memory' in exports_and_imports or 'fd_write' in exports_and_imports, 'some things are not minified anyhow'
# verify the wasm runs with the JS
if target.endswith('.js'):
self.assertContained('hello, world!', run_js('out.js'))
# verify a standalone wasm
if standalone and self.is_wasm_backend():
for engine in WASM_ENGINES:
print(engine)
self.assertContained('hello, world!', run_js('out.wasm', engine=engine))
def test_wasm_targets_side_module(self):
# side modules do allow a wasm target
for opts, target in [([], 'a.out.wasm'), (['-o', 'lib.wasm'], 'lib.wasm')]:
# specified target
print('building: ' + target)
self.clear()
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'SIDE_MODULE=1'] + opts)
for x in os.listdir('.'):
assert not x.endswith('.js'), 'we should not emit js when making a wasm side module: ' + x
self.assertIn(b'dylink', open(target, 'rb').read())
@no_fastcomp('test wasm object files')
def test_wasm_backend_lto(self):
# test building of non-wasm-object-files libraries, building with them, and running them
src = path_from_root('tests', 'hello_libcxx.cpp')
# test codegen in lto mode, and compare to normal (wasm object) mode
for args in [[], ['-O1'], ['-O2'], ['-O3'], ['-Os'], ['-Oz']]:
print(args)
print('wasm in object')
run_process([EMXX, src] + args + ['-c', '-o', 'hello_obj.o'])
self.assertTrue(building.is_wasm('hello_obj.o'))
self.assertFalse(building.is_bitcode('hello_obj.o'))
print('bitcode in object')
run_process([EMXX, src] + args + ['-c', '-o', 'hello_bitcode.o', '-flto'])
self.assertFalse(building.is_wasm('hello_bitcode.o'))
self.assertTrue(building.is_bitcode('hello_bitcode.o'))
print('use bitcode object (LTO)')
run_process([EMXX, 'hello_bitcode.o'] + args + ['-flto'])
self.assertContained('hello, world!', run_js('a.out.js'))
print('use bitcode object (non-LTO)')
run_process([EMXX, 'hello_bitcode.o'] + args)
self.assertContained('hello, world!', run_js('a.out.js'))
print('use native object (LTO)')
run_process([EMXX, 'hello_obj.o'] + args + ['-flto'])
self.assertContained('hello, world!', run_js('a.out.js'))
print('use native object (non-LTO)')
run_process([EMXX, 'hello_obj.o'] + args)
self.assertContained('hello, world!', run_js('a.out.js'))
@parameterized({
'except': [],
'noexcept': ['-s', 'DISABLE_EXCEPTION_CATCHING=0']
})
@no_fastcomp('test wasm object files')
def test_wasm_backend_lto_libcxx(self, *args):
run_process([EMXX, path_from_root('tests', 'hello_libcxx.cpp'), '-flto'] + list(args))
@no_fastcomp('wasm backend lto specific')
def test_lto_flags(self):
for flags, expect_bitcode in [
([], False),
(['-flto'], True),
(['-flto=thin'], True),
(['-s', 'WASM_OBJECT_FILES=0'], True),
(['-s', 'WASM_OBJECT_FILES=1'], False),
]:
run_process([EMCC, path_from_root('tests', 'hello_world.cpp')] + flags + ['-c', '-o', 'a.o'])
seen_bitcode = building.is_bitcode('a.o')
self.assertEqual(expect_bitcode, seen_bitcode, 'must emit LTO-capable bitcode when flags indicate so (%s)' % str(flags))
def test_wasm_nope(self):
for opts in [[], ['-O2']]:
print(opts)
# check we show a good error message if there is no wasm support
create_test_file('pre.js', 'WebAssembly = undefined;\n')
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '--pre-js', 'pre.js'] + opts)
out = run_js('a.out.js', stderr=STDOUT, assert_returncode=None)
self.assertContained('no native wasm support detected', out)
def test_jsrun(self):
print(NODE_JS)
jsrun.WORKING_ENGINES = {}
# Test that engine check passes
self.assertTrue(jsrun.check_engine(NODE_JS))
# Run it a second time (cache hit)
self.assertTrue(jsrun.check_engine(NODE_JS))
# Test that engine check fails
bogus_engine = ['/fake/inline4']
self.assertFalse(jsrun.check_engine(bogus_engine))
self.assertFalse(jsrun.check_engine(bogus_engine))
# Test the other possible way (list vs string) to express an engine
if type(NODE_JS) is list:
engine2 = NODE_JS[0]
else:
engine2 = [NODE_JS]
self.assertTrue(jsrun.check_engine(engine2))
# Test that run_js requires the engine
run_js(path_from_root('tests', 'hello_world.js'), NODE_JS)
caught_exit = 0
try:
run_js(path_from_root('tests', 'hello_world.js'), bogus_engine)
except SystemExit as e:
caught_exit = e.code
self.assertEqual(1, caught_exit, 'Did not catch SystemExit with bogus JS engine')
def test_error_on_missing_libraries(self):
# -llsomenonexistingfile is an error by default
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-lsomenonexistingfile'])
if self.is_wasm_backend():
self.assertContained('wasm-ld: error: unable to find library -lsomenonexistingfile', err)
else:
self.assertContained('emcc: cannot find library "somenonexistingfile"', err)
# Tests that if user accidentally attempts to link native object code, we show an error
def test_native_link_error_message(self):
run_process([CLANG_CC, '-c', path_from_root('tests', 'hello_123.c'), '-o', 'hello_123.o'])
err = self.expect_fail([EMCC, 'hello_123.o', '-o', 'hello_123.js'])
self.assertContained('hello_123.o is not a valid input', err)
# Tests that we should give a clear error on INITIAL_MEMORY not being enough for static initialization + stack
def test_clear_error_on_massive_static_data(self):
with open('src.cpp', 'w') as f:
f.write('''
char muchData[128 * 1024];
int main() {
return (int)&muchData;
}
''')
err = self.expect_fail([EMCC, 'src.cpp', '-s', 'TOTAL_STACK=1KB', '-s', 'INITIAL_MEMORY=64KB'])
if self.is_wasm_backend():
self.assertContained('wasm-ld: error: initial memory too small', err)
else:
self.assertContained('Memory is not large enough for static data (134000) plus the stack (1024), please increase INITIAL_MEMORY (65536)', err)
def test_o_level_clamp(self):
for level in [3, 4, 20]:
err = run_process([EMCC, '-O' + str(level), path_from_root('tests', 'hello_world.c')], stderr=PIPE).stderr
self.assertContainedIf("optimization level '-O" + str(level) + "' is not supported; using '-O3' instead", err, level > 3)
# Tests that if user specifies multiple -o output directives, then the last one will take precedence
def test_multiple_o_files(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'a.js', '-o', 'b.js'])
assert os.path.isfile('b.js')
assert not os.path.isfile('a.js')
# Tests that Emscripten-provided header files can be cleanly included in C code
def test_include_system_header_in_c(self):
for std in [[], ['-std=c89']]: # Test oldest C standard, and the default C standard
for directory, headers in [
('emscripten', ['dom_pk_codes.h', 'em_asm.h', 'emscripten.h', 'fetch.h', 'html5.h', 'key_codes.h', 'threading.h', 'trace.h', 'vr.h']), # This directory has also bind.h, val.h and wire.h, which require C++11
('AL', ['al.h', 'alc.h']),
('EGL', ['egl.h', 'eglplatform.h']),
('GL', ['freeglut_std.h', 'gl.h', 'glew.h', 'glfw.h', 'glu.h', 'glut.h']),
('GLES', ['gl.h', 'glplatform.h']),
('GLES2', ['gl2.h', 'gl2platform.h']),
('GLES3', ['gl3.h', 'gl3platform.h', 'gl31.h', 'gl32.h']),
('GLFW', ['glfw3.h']),
('KHR', ['khrplatform.h'])]:
for h in headers:
inc = '#include <' + directory + '/' + h + '>'
print(inc)
create_test_file('a.c', inc)
create_test_file('b.c', inc)
run_process([EMCC] + std + ['a.c', 'b.c'])
@is_slow_test
def test_single_file(self):
for (single_file_enabled,
meminit1_enabled,
debug_enabled,
closure_enabled,
wasm_enabled) in itertools.product([True, False], repeat=5):
# skip unhelpful option combinations
if wasm_enabled and meminit1_enabled:
continue
if closure_enabled and debug_enabled:
continue
expect_wasm = wasm_enabled
expect_meminit = meminit1_enabled and not wasm_enabled
expect_wat = debug_enabled and wasm_enabled and not self.is_wasm_backend()
cmd = [EMCC, path_from_root('tests', 'hello_world.c')]
if single_file_enabled:
expect_meminit = False
expect_wasm = False
cmd += ['-s', 'SINGLE_FILE=1']
if meminit1_enabled:
cmd += ['--memory-init-file', '1']
if debug_enabled:
cmd += ['-g']
if closure_enabled:
cmd += ['--closure', '1']
if not wasm_enabled:
cmd += ['-s', 'WASM=0']
self.clear()
def do_test(cmd):
print(' '.join(cmd))
run_process(cmd)
print(os.listdir('.'))
assert expect_meminit == (os.path.exists('a.out.mem') or os.path.exists('a.out.js.mem'))
assert expect_wasm == os.path.exists('a.out.wasm')
assert expect_wat == os.path.exists('a.out.wat')
self.assertContained('hello, world!', run_js('a.out.js'))
do_test(cmd)
# additional combinations that are not part of the big product()
if self.is_wasm_backend() and debug_enabled:
separate_dwarf_cmd = cmd + ['-gseparate-dwarf']
if wasm_enabled:
do_test(separate_dwarf_cmd)
self.assertExists('a.out.wasm.debug.wasm')
else:
self.expect_fail(separate_dwarf_cmd)
def test_emar_M(self):
create_test_file('file1', ' ')
create_test_file('file2', ' ')
run_process([EMAR, 'cr', 'file1.a', 'file1'])
run_process([EMAR, 'cr', 'file2.a', 'file2'])
run_process([EMAR, '-M'], input='''create combined.a
addlib file1.a
addlib file2.a
save
end
''')
result = run_process([EMAR, 't', 'combined.a'], stdout=PIPE).stdout
self.assertContained('file1', result)
self.assertContained('file2', result)
def test_emar_duplicate_inputs(self):
# Verify the we can supply the same intput muliple times without
# confusing emar.py:
# See https://github.com/emscripten-core/emscripten/issues/9733
create_test_file('file1', ' ')
run_process([EMAR, 'cr', 'file1.a', 'file1', 'file1'])
# Temporarily disabled to allow this llvm change to roll
# https://reviews.llvm.org/D69665
@no_windows('Temporarily disabled under windows')
def test_emar_response_file(self):
# Test that special character such as single quotes in filenames survive being
# sent via response file
create_test_file("file'1", ' ')
create_test_file("file'2", ' ')
building.emar('cr', 'libfoo.a', ("file'1", "file'2"))
def test_archive_empty(self):
# This test added because we had an issue with the AUTO_ARCHIVE_INDEXES failing on empty
# archives (which inherently don't have indexes).
run_process([EMAR, 'crS', 'libfoo.a'])
run_process([EMCC, '-Werror', 'libfoo.a', path_from_root('tests', 'hello_world.c')])
def test_archive_no_index(self):
create_test_file('foo.c', 'int foo = 1;')
run_process([EMCC, '-c', 'foo.c'])
run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c')])
# The `S` flag means don't add an archive index
run_process([EMAR, 'crS', 'libfoo.a', 'foo.o'])
# The llvm backend (link GNU ld and lld) doesn't support linking archives with no index.
# However we have logic that will automatically add indexes (unless running with
# NO_AUTO_ARCHIVE_INDEXES).
if self.is_wasm_backend():
stderr = self.expect_fail([EMCC, '-s', 'NO_AUTO_ARCHIVE_INDEXES', 'libfoo.a', 'hello_world.o'])
self.assertContained('libfoo.a: archive has no index; run ranlib to add one', stderr)
# The default behavior is to add archive indexes automatically.
run_process([EMCC, 'libfoo.a', 'hello_world.o'])
@no_fastcomp('AUTO_ARCHIVE_INDEXES only applies to wasm backend')
def test_archive_non_objects(self):
create_test_file('file.txt', 'test file')
# llvm-nm has issues with files that start with two or more null bytes since it thinks they
# are COFF files. Ensure that we correctly ignore such files when we process them.
create_test_file('zeros.bin', '\0\0\0\0')
run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c')])
# No index added.
# --format=darwin (the default on OSX has a strange issue where it add extra
# newlines to files: https://bugs.llvm.org/show_bug.cgi?id=42562
run_process([EMAR, 'crS', '--format=gnu', 'libfoo.a', 'file.txt', 'zeros.bin', 'hello_world.o'])
run_process([EMCC, path_from_root('tests', 'hello_world.c'), 'libfoo.a'])
def test_flag_aliases(self):
def assert_aliases_match(flag1, flag2, flagarg, extra_args=[]):
results = {}
for f in (flag1, flag2):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', f + '=' + flagarg] + extra_args)
with open('a.out.js') as out:
results[f + '.js'] = out.read()
with open('a.out.wasm', 'rb') as out:
results[f + '.wasm'] = out.read()
self.assertEqual(results[flag1 + '.js'], results[flag2 + '.js'], 'js results should be identical')
self.assertEqual(results[flag1 + '.wasm'], results[flag2 + '.wasm'], 'wasm results should be identical')
assert_aliases_match('INITIAL_MEMORY', 'TOTAL_MEMORY', '16777216')
assert_aliases_match('INITIAL_MEMORY', 'TOTAL_MEMORY', '64MB')
assert_aliases_match('MAXIMUM_MEMORY', 'WASM_MEM_MAX', '16777216', ['-s', 'ALLOW_MEMORY_GROWTH'])
assert_aliases_match('MAXIMUM_MEMORY', 'BINARYEN_MEM_MAX', '16777216', ['-s', 'ALLOW_MEMORY_GROWTH'])
def test_IGNORE_CLOSURE_COMPILER_ERRORS(self):
create_test_file('pre.js', r'''
// make closure compiler very very angry
var dupe = 1;
var dupe = 2;
function Node() {
throw 'Node is a DOM thing too, and use the ' + dupe;
}
function Node() {
throw '(duplicate) Node is a DOM thing too, and also use the ' + dupe;
}
''')
def test(check, extra=[]):
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-O2', '--closure', '1', '--pre-js', 'pre.js'] + extra
proc = run_process(cmd, check=check, stderr=PIPE)
if not check:
self.assertNotEqual(proc.returncode, 0)
return proc
WARNING = 'Variable dupe declared more than once'
proc = test(check=False)
self.assertContained(WARNING, proc.stderr)
proc = test(check=True, extra=['-s', 'IGNORE_CLOSURE_COMPILER_ERRORS=1'])
self.assertNotContained(WARNING, proc.stderr)
def test_closure_full_js_library(self):
# test for closure errors in the entire JS library
# We must ignore various types of errors that are expected in this situation, as we
# are including a lot of JS without corresponding compiled code for it. This still
# lets us catch all other errors.
with env_modify({'EMCC_CLOSURE_ARGS': '--jscomp_off undefinedVars'}):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O1', '--closure', '1', '-g1', '-s', 'INCLUDE_FULL_LIBRARY=1', '-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0'])
# Tests --closure-args command line flag
def test_closure_externs(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--closure', '1', '--pre-js', path_from_root('tests', 'test_closure_externs_pre_js.js'), '--closure-args', '--externs "' + path_from_root('tests', 'test_closure_externs.js') + '"'])
def test_toolchain_profiler(self):
environ = os.environ.copy()
environ['EM_PROFILE_TOOLCHAIN'] = '1'
# replaced subprocess functions should not cause errors
run_process([EMCC, path_from_root('tests', 'hello_world.c')], env=environ)
def test_noderawfs(self):
fopen_write = open(path_from_root('tests', 'asmfs', 'fopen_write.cpp')).read()
create_test_file('main.cpp', fopen_write)
run_process([EMCC, 'main.cpp', '-s', 'NODERAWFS=1'])
self.assertContained("read 11 bytes. Result: Hello data!", run_js('a.out.js'))
# NODERAWFS should directly write on OS file system
self.assertEqual("Hello data!", open('hello_file.txt').read())
def test_noderawfs_disables_embedding(self):
expected = '--preload-file and --embed-file cannot be used with NODERAWFS which disables virtual filesystem'
base = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'NODERAWFS=1']
err = self.expect_fail(base + ['--preload-file', 'somefile'])
self.assertContained(expected, err)
err = self.expect_fail(base + ['--embed-file', 'somefile'])
self.assertContained(expected, err)
def test_node_code_caching(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-s', 'NODE_CODE_CACHING',
'-s', 'WASM_ASYNC_COMPILATION=0'])
def get_cached():
cached = glob.glob('a.out.wasm.*.cached')
if not cached:
return None
self.assertEqual(len(cached), 1)
return cached[0]
# running the program makes it cache the code
self.assertFalse(get_cached())
self.assertEqual('hello, world!', run_js('a.out.js').strip())
self.assertTrue(get_cached(), 'should be a cache file')
# hard to test it actually uses it to speed itself up, but test that it
# does try to deserialize it at least
with open(get_cached(), 'w') as f:
f.write('waka waka')
ERROR = 'NODE_CODE_CACHING: failed to deserialize, bad cache file?'
self.assertContained(ERROR, run_js('a.out.js', stderr=PIPE, full_output=True))
# we cached proper code after showing that error
with open(get_cached(), 'rb') as f:
self.assertEqual(f.read().count(b'waka'), 0)
self.assertNotContained(ERROR, run_js('a.out.js', stderr=PIPE, full_output=True))
def test_autotools_shared_check(self):
env = os.environ.copy()
env['LC_ALL'] = 'C'
expected = ': supported targets:.* elf'
for python in [PYTHON, 'python', 'python2', 'python3']:
if not shared.which(python):
continue
if python == 'python3' and not is_python3_version_supported():
continue
print(python)
out = run_process([python, path_from_root('emcc.py'), '--help'], stdout=PIPE, env=env).stdout
assert re.search(expected, out)
def test_ioctl_window_size(self):
self.do_other_test(os.path.join('other', 'ioctl', 'window_size'))
def test_fd_closed(self):
self.do_other_test(os.path.join('other', 'fd_closed'))
def test_fflush(self):
# fflush without the full filesystem won't quite work
self.do_other_test(os.path.join('other', 'fflush'))
def test_fflush_fs(self):
# fflush with the full filesystem will flush from libc, but not the JS logging, which awaits a newline
self.do_other_test(os.path.join('other', 'fflush_fs'), emcc_args=['-s', 'FORCE_FILESYSTEM=1'])
def test_fflush_fs_exit(self):
# on exit, we can send out a newline as no more code will run
self.do_other_test(os.path.join('other', 'fflush_fs_exit'), emcc_args=['-s', 'FORCE_FILESYSTEM=1', '-s', 'EXIT_RUNTIME=1'])
def test_extern_weak(self):
self.do_other_test(os.path.join('other', 'extern_weak'))
if not self.is_wasm_backend(): # TODO: wasm backend main module
self.do_other_test(os.path.join('other', 'extern_weak'), emcc_args=['-s', 'MAIN_MODULE=1', '-DLINKABLE'])
def test_main_module_without_main(self):
create_test_file('pre.js', r'''
var Module = {
onRuntimeInitialized: function() {
Module._foo();
}
};
''')
create_test_file('src.c', r'''
#include <emscripten.h>
EMSCRIPTEN_KEEPALIVE void foo() {
EM_ASM({ console.log("bar") });
}
''')
run_process([EMCC, 'src.c', '--pre-js', 'pre.js', '-s', 'MAIN_MODULE=2'])
self.assertContained('bar', run_js('a.out.js'))
def test_js_optimizer_parse_error(self):
# check we show a proper understandable error for JS parse problems
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
var x = !<->5.; // wtf
});
}
''')
stderr = self.expect_fail([EMCC, 'src.cpp', '-O2'])
# wasm backend output doesn't have spaces in the EM_ASM function bodies
self.assertContained(('''
var ASM_CONSTS = [function() { var x = !<->5.; }];
^
''', '''
1024: function() {var x = !<->5.;}
^
'''), stderr)
@no_fastcomp('wasm2js only')
def test_js_optimizer_chunk_size_determinism(self):
def build():
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O3', '-s', 'WASM=0'])
with open('a.out.js') as f:
# FIXME: newline differences can exist, ignore for now
return f.read().replace('\n', '')
normal = build()
with env_modify({
'EMCC_JSOPT_MIN_CHUNK_SIZE': '1',
'EMCC_JSOPT_MAX_CHUNK_SIZE': '1'
}):
tiny = build()
with env_modify({
'EMCC_JSOPT_MIN_CHUNK_SIZE': '4294967296',
'EMCC_JSOPT_MAX_CHUNK_SIZE': '4294967296'
}):
huge = build()
self.assertIdentical(normal, tiny)
self.assertIdentical(normal, huge)
def test_EM_ASM_ES6(self):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
var x = (a, b) => 5; // valid ES6
async function y() {} // valid ES2017
out('hello!');
});
}
''')
run_process([EMCC, 'src.cpp', '-O2'])
self.assertContained('hello!', run_js('a.out.js'))
def test_check_sourcemapurl(self):
if not self.is_wasm():
self.skipTest('only supported with wasm')
run_process([EMCC, path_from_root('tests', 'hello_123.c'), '-g4', '-o', 'a.js', '--source-map-base', 'dir/'])
output = open('a.wasm', 'rb').read()
# has sourceMappingURL section content and points to 'dir/a.wasm.map' file
source_mapping_url_content = encode_leb(len('sourceMappingURL')) + b'sourceMappingURL' + encode_leb(len('dir/a.wasm.map')) + b'dir/a.wasm.map'
self.assertEqual(output.count(source_mapping_url_content), 1)
# make sure no DWARF debug info sections remain - they would just waste space
self.assertNotIn(b'.debug_', output)
def test_check_source_map_args(self):
# -g4 is needed for source maps; -g is not enough
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-g'])
self.assertNotExists('a.out.wasm.map')
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-g4'])
self.assertExists('a.out.wasm.map')
@parameterized({
'normal': [],
'profiling': ['--profiling'] # -g4 --profiling should still emit a source map; see #8584
})
def test_check_sourcemapurl_default(self, *args):
print(args)
if not self.is_wasm():
self.skipTest('only supported with wasm')
try_delete('a.wasm.map')
run_process([EMCC, path_from_root('tests', 'hello_123.c'), '-g4', '-o', 'a.js'] + list(args))
output = open('a.wasm', 'rb').read()
# has sourceMappingURL section content and points to 'a.wasm.map' file
source_mapping_url_content = encode_leb(len('sourceMappingURL')) + b'sourceMappingURL' + encode_leb(len('a.wasm.map')) + b'a.wasm.map'
self.assertIn(source_mapping_url_content, output)
def test_wasm_sourcemap(self):
# The no_main.c will be read (from relative location) due to speficied "-s"
shutil.copyfile(path_from_root('tests', 'other', 'wasm_sourcemap', 'no_main.c'), 'no_main.c')
wasm_map_cmd = [PYTHON, path_from_root('tools', 'wasm-sourcemap.py'),
'--sources', '--prefix', '=wasm-src://',
'--load-prefix', '/emscripten/tests/other/wasm_sourcemap=.',
'--dwarfdump-output',
path_from_root('tests', 'other', 'wasm_sourcemap', 'foo.wasm.dump'),
'-o', 'a.out.wasm.map',
path_from_root('tests', 'other', 'wasm_sourcemap', 'foo.wasm'),
'--basepath=' + os.getcwd()]
run_process(wasm_map_cmd)
output = open('a.out.wasm.map').read()
# has "sources" entry with file (includes also `--prefix =wasm-src:///` replacement)
self.assertIn('wasm-src:///emscripten/tests/other/wasm_sourcemap/no_main.c', output)
# has "sourcesContent" entry with source code (included with `-s` option)
self.assertIn('int foo()', output)
# has some entries
self.assertRegexpMatches(output, r'"mappings":\s*"[A-Za-z0-9+/]')
def test_wasm_sourcemap_dead(self):
wasm_map_cmd = [PYTHON, path_from_root('tools', 'wasm-sourcemap.py'),
'--dwarfdump-output',
path_from_root('tests', 'other', 'wasm_sourcemap_dead', 't.wasm.dump'),
'-o', 'a.out.wasm.map',
path_from_root('tests', 'other', 'wasm_sourcemap_dead', 't.wasm'),
'--basepath=' + os.getcwd()]
run_process(wasm_map_cmd, stdout=PIPE, stderr=PIPE)
output = open('a.out.wasm.map').read()
# has only two entries
self.assertRegexpMatches(output, r'"mappings":\s*"[A-Za-z0-9+/]+,[A-Za-z0-9+/]+"')
@no_fastcomp()
def test_wasm_sourcemap_relative_paths(self):
def test(infile, source_map_added_dir=''):
expected_source_map_path = 'a.cpp'
if source_map_added_dir:
expected_source_map_path = source_map_added_dir + '/' + expected_source_map_path
print(infile, expected_source_map_path)
shutil.copyfile(path_from_root('tests', 'hello_123.c'), infile)
infiles = [
infile,
os.path.abspath(infile),
'./' + infile
]
for curr in infiles:
print(' ', curr)
run_process([EMCC, curr, '-g4'])
with open('a.out.wasm.map', 'r') as f:
self.assertIn('"%s"' % expected_source_map_path, str(f.read()))
test('a.cpp')
ensure_dir('inner')
test('inner/a.cpp', 'inner')
@no_fastcomp('dwarf')
def test_separate_dwarf(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-g'])
self.assertExists('a.out.wasm')
self.assertNotExists('a.out.wasm.debug.wasm')
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-gseparate-dwarf'])
self.assertExists('a.out.wasm')
self.assertExists('a.out.wasm.debug.wasm')
self.assertLess(os.path.getsize('a.out.wasm'), os.path.getsize('a.out.wasm.debug.wasm'))
# the special section should also exist, that refers to the side debug file
with open('a.out.wasm', 'rb') as f:
wasm = f.read()
self.assertIn(b'external_debug_info', wasm)
self.assertIn(b'a.out.wasm.debug.wasm', wasm)
@no_fastcomp('dwarf')
def test_separate_dwarf_with_filename(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-gseparate-dwarf=with_dwarf.wasm'])
self.assertNotExists('a.out.wasm.debug.wasm')
self.assertExists('with_dwarf.wasm')
# the correct notation is to have exactly one '=' and in the right place
for invalid in ('-gseparate-dwarf=x=', '-gseparate-dwarfy=', '-gseparate-dwarf-hmm'):
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), invalid])
self.assertContained('invalid -gseparate-dwarf=FILENAME notation', stderr)
def test_wasm_producers_section(self):
# no producers section by default
run_process([EMCC, path_from_root('tests', 'hello_world.c')])
with open('a.out.wasm', 'rb') as f:
self.assertNotIn('clang', str(f.read()))
size = os.path.getsize('a.out.wasm')
if self.is_wasm_backend():
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EMIT_PRODUCERS_SECTION=1'])
with open('a.out.wasm', 'rb') as f:
self.assertIn('clang', str(f.read()))
size_with_section = os.path.getsize('a.out.wasm')
self.assertLess(size, size_with_section)
def test_html_preprocess(self):
test_file = path_from_root('tests', 'module', 'test_stdin.c')
output_file = 'test_stdin.html'
shell_file = path_from_root('tests', 'module', 'test_html_preprocess.html')
run_process([EMCC, '-o', output_file, test_file, '--shell-file', shell_file, '-s', 'ASSERTIONS=0'], stdout=PIPE, stderr=PIPE)
output = open(output_file).read()
self.assertContained("""<style>
/* Disable preprocessing inside style block as syntax is ambiguous with CSS */
#include {background-color: black;}
#if { background-color: red;}
#else {background-color: blue;}
#endif {background-color: green;}
#xxx {background-color: purple;}
</style>
T1:(else) ASSERTIONS != 1
T2:ASSERTIONS != 1
T3:ASSERTIONS < 2
T4:(else) ASSERTIONS <= 1
T5:(else) ASSERTIONS
T6:!ASSERTIONS""", output)
run_process([EMCC, '-o', output_file, test_file, '--shell-file', shell_file, '-s', 'ASSERTIONS=1'], stdout=PIPE, stderr=PIPE)
output = open(output_file).read()
self.assertContained("""<style>
/* Disable preprocessing inside style block as syntax is ambiguous with CSS */
#include {background-color: black;}
#if { background-color: red;}
#else {background-color: blue;}
#endif {background-color: green;}
#xxx {background-color: purple;}
</style>
T1:ASSERTIONS == 1
T2:(else) ASSERTIONS == 1
T3:ASSERTIONS < 2
T4:(else) ASSERTIONS <= 1
T5:ASSERTIONS
T6:(else) !ASSERTIONS""", output)
run_process([EMCC, '-o', output_file, test_file, '--shell-file', shell_file, '-s', 'ASSERTIONS=2'], stdout=PIPE, stderr=PIPE)
output = open(output_file).read()
self.assertContained("""<style>
/* Disable preprocessing inside style block as syntax is ambiguous with CSS */
#include {background-color: black;}
#if { background-color: red;}
#else {background-color: blue;}
#endif {background-color: green;}
#xxx {background-color: purple;}
</style>
T1:(else) ASSERTIONS != 1
T2:ASSERTIONS != 1
T3:(else) ASSERTIONS >= 2
T4:ASSERTIONS > 1
T5:ASSERTIONS
T6:(else) !ASSERTIONS""", output)
# Tests that Emscripten-compiled applications can be run from a relative path with node command line that is different than the current working directory.
def test_node_js_run_from_different_directory(self):
ensure_dir('subdir')
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', os.path.join('subdir', 'a.js'), '-O3'])
ret = run_process(NODE_JS + [os.path.join('subdir', 'a.js')], stdout=PIPE).stdout
self.assertContained('hello, world!', ret)
# Tests that a pthreads + modularize build can be run in node js
@no_fastcomp('node pthreads only supported on wasm backend')
def test_node_js_pthread_module(self):
# create module loader script
moduleLoader = 'moduleLoader.js'
moduleLoaderContents = '''
const test_module = require("./module");
test_module().then((test_module_instance) => {
test_module_instance._main();
process.exit(0);
});
'''
ensure_dir('subdir')
create_test_file(os.path.join('subdir', moduleLoader), moduleLoaderContents)
# build hello_world.c
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', os.path.join('subdir', 'module.js'), '-s', 'USE_PTHREADS=1', '-s', 'PTHREAD_POOL_SIZE=2', '-s', 'MODULARIZE=1', '-s', 'EXPORT_NAME=test_module', '-s', 'ENVIRONMENT=worker,node'])
# run the module
ret = run_process(NODE_JS + ['--experimental-wasm-threads'] + [os.path.join('subdir', moduleLoader)], stdout=PIPE).stdout
self.assertContained('hello, world!', ret)
@no_windows('node system() does not seem to work, see https://github.com/emscripten-core/emscripten/pull/10547')
def test_node_js_system(self):
run_process([EMCC, '-DENV_NODE', path_from_root('tests', 'system.c'), '-o', 'a.js', '-O3'])
ret = run_process(NODE_JS + ['a.js'], stdout=PIPE).stdout
self.assertContained('OK', ret)
def test_is_bitcode(self):
fname = 'tmp.o'
with open(fname, 'wb') as f:
f.write(b'foo')
self.assertFalse(building.is_bitcode(fname))
with open(fname, 'wb') as f:
f.write(b'\xDE\xC0\x17\x0B')
f.write(16 * b'\x00')
f.write(b'BC')
self.assertTrue(building.is_bitcode(fname))
with open(fname, 'wb') as f:
f.write(b'BC')
self.assertTrue(building.is_bitcode(fname))
def test_is_ar(self):
fname = 'tmp.a'
with open(fname, 'wb') as f:
f.write(b'foo')
self.assertFalse(building.is_ar(fname))
with open(fname, 'wb') as f:
f.write(b'!<arch>\n')
self.assertTrue(building.is_ar(fname))
def test_emcc_parsing(self):
create_test_file('src.c', r'''
#include <stdio.h>
void a() { printf("a\n"); }
void b() { printf("b\n"); }
void c() { printf("c\n"); }
void d() { printf("d\n"); }
''')
create_test_file('response', r'''[
"_a",
"_b",
"_c",
"_d"
]
''')
for export_arg, expected in [
# extra space at end - should be ignored
("EXPORTED_FUNCTIONS=['_a', '_b', '_c', '_d' ]", ''),
# extra newline in response file - should be ignored
("EXPORTED_FUNCTIONS=@response", ''),
# stray slash
("EXPORTED_FUNCTIONS=['_a', '_b', \\'_c', '_d']", '''undefined exported function: "\\\\'_c'"'''),
# stray slash
("EXPORTED_FUNCTIONS=['_a', '_b',\\ '_c', '_d']", '''undefined exported function: "\\\\ '_c'"'''),
# stray slash
('EXPORTED_FUNCTIONS=["_a", "_b", \\"_c", "_d"]', 'undefined exported function: "\\\\"_c""'),
# stray slash
('EXPORTED_FUNCTIONS=["_a", "_b",\\ "_c", "_d"]', 'undefined exported function: "\\\\ "_c"'),
# missing comma
('EXPORTED_FUNCTIONS=["_a", "_b" "_c", "_d"]', 'undefined exported function: "_b" "_c"'),
]:
print(export_arg)
proc = run_process([EMCC, 'src.c', '-s', export_arg], stdout=PIPE, stderr=PIPE, check=not expected)
print(proc.stderr)
if not expected:
self.assertFalse(proc.stderr)
else:
self.assertNotEqual(proc.returncode, 0)
self.assertContained(expected, proc.stderr)
@no_fastcomp('uses new ASYNCIFY')
def test_asyncify_escaping(self):
proc = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASYNCIFY=1', '-s', "ASYNCIFY_ONLY=[DOS_ReadFile(unsigned short, unsigned char*, unsigned short*, bool)]"], stdout=PIPE, stderr=PIPE)
self.assertContained('emcc: ASYNCIFY list contains an item without balanced parentheses', proc.stderr)
self.assertContained(' DOS_ReadFile(unsigned short', proc.stderr)
self.assertContained('Try to quote the entire argument', proc.stderr)
@no_fastcomp('uses new ASYNCIFY')
def test_asyncify_response_file(self):
return self.skipTest(' TODO remove the support for multiple binaryen versions warning output ("function name" vs "pattern" etc).')
create_test_file('a.txt', r'''[
"DOS_ReadFile(unsigned short, unsigned char*, unsigned short*, bool)"
]
''')
proc = run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASYNCIFY=1', '-s', "ASYNCIFY_ONLY=@a.txt"], stdout=PIPE, stderr=PIPE)
# we should parse the response file properly, and then issue a proper warning for the missing function
self.assertContained(
'Asyncify onlylist contained a non-matching pattern: DOS_ReadFile(unsigned short, unsigned char*, unsigned short*, bool)',
proc.stderr)
# Sockets and networking
def test_inet(self):
self.do_run(open(path_from_root('tests', 'sha1.c')).read(), 'SHA1=15dd99a1991e0b3826fede3deffc1feba42278e6')
src = r'''
#include <stdio.h>
#include <arpa/inet.h>
int main() {
printf("*%x,%x,%x,%x,%x,%x*\n", htonl(0xa1b2c3d4), htonl(0xfe3572e0), htonl(0x07abcdf0), htons(0xabcd), ntohl(0x43211234), ntohs(0xbeaf));
in_addr_t i = inet_addr("190.180.10.78");
printf("%x\n", i);
return 0;
}
'''
self.do_run(src, '*d4c3b2a1,e07235fe,f0cdab07,cdab,34122143,afbe*\n4e0ab4be\n')
def test_inet2(self):
src = r'''
#include <stdio.h>
#include <arpa/inet.h>
int main() {
struct in_addr x, x2;
int *y = (int*)&x;
*y = 0x12345678;
printf("%s\n", inet_ntoa(x));
int r = inet_aton(inet_ntoa(x), &x2);
printf("%s\n", inet_ntoa(x2));
return 0;
}
'''
self.do_run(src, '120.86.52.18\n120.86.52.18\n')
def test_inet3(self):
src = r'''
#include <stdio.h>
#include <arpa/inet.h>
#include <sys/socket.h>
int main() {
char dst[64];
struct in_addr x, x2;
int *y = (int*)&x;
*y = 0x12345678;
printf("%s\n", inet_ntop(AF_INET,&x,dst,sizeof dst));
int r = inet_aton(inet_ntoa(x), &x2);
printf("%s\n", inet_ntop(AF_INET,&x2,dst,sizeof dst));
return 0;
}
'''
self.do_run(src, '120.86.52.18\n120.86.52.18\n')
def test_inet4(self):
src = r'''
#include <stdio.h>
#include <arpa/inet.h>
#include <sys/socket.h>
void test(const char *test_addr, bool first=true){
char str[40];
struct in6_addr addr;
unsigned char *p = (unsigned char*)&addr;
int ret;
ret = inet_pton(AF_INET6,test_addr,&addr);
if(ret == -1) return;
if(ret == 0) return;
if(inet_ntop(AF_INET6,&addr,str,sizeof(str)) == NULL ) return;
printf("%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x - %s\n",
p[0],p[1],p[2],p[3],p[4],p[5],p[6],p[7],p[8],p[9],p[10],p[11],p[12],p[13],p[14],p[15],str);
if (first) test(str, false); // check again, on our output
}
int main(){
test("::");
test("::1");
test("::1.2.3.4");
test("::17.18.19.20");
test("::ffff:1.2.3.4");
test("1::ffff");
test("::255.255.255.255");
test("0:ff00:1::");
test("0:ff::");
test("abcd::");
test("ffff::a");
test("ffff::a:b");
test("ffff::a:b:c");
test("ffff::a:b:c:d");
test("ffff::a:b:c:d:e");
test("::1:2:0:0:0");
test("0:0:1:2:3::");
test("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
test("1::255.255.255.255");
//below should fail and not produce results..
test("1.2.3.4");
test("");
test("-");
printf("ok.\n");
}
'''
self.do_run(src, r'''0000:0000:0000:0000:0000:0000:0000:0000 - ::
0000:0000:0000:0000:0000:0000:0000:0000 - ::
0000:0000:0000:0000:0000:0000:0000:0001 - ::1
0000:0000:0000:0000:0000:0000:0000:0001 - ::1
0000:0000:0000:0000:0000:0000:0102:0304 - ::102:304
0000:0000:0000:0000:0000:0000:0102:0304 - ::102:304
0000:0000:0000:0000:0000:0000:1112:1314 - ::1112:1314
0000:0000:0000:0000:0000:0000:1112:1314 - ::1112:1314
0000:0000:0000:0000:0000:ffff:0102:0304 - ::ffff:1.2.3.4
0000:0000:0000:0000:0000:ffff:0102:0304 - ::ffff:1.2.3.4
0001:0000:0000:0000:0000:0000:0000:ffff - 1::ffff
0001:0000:0000:0000:0000:0000:0000:ffff - 1::ffff
0000:0000:0000:0000:0000:0000:ffff:ffff - ::ffff:ffff
0000:0000:0000:0000:0000:0000:ffff:ffff - ::ffff:ffff
0000:ff00:0001:0000:0000:0000:0000:0000 - 0:ff00:1::
0000:ff00:0001:0000:0000:0000:0000:0000 - 0:ff00:1::
0000:00ff:0000:0000:0000:0000:0000:0000 - 0:ff::
0000:00ff:0000:0000:0000:0000:0000:0000 - 0:ff::
abcd:0000:0000:0000:0000:0000:0000:0000 - abcd::
abcd:0000:0000:0000:0000:0000:0000:0000 - abcd::
ffff:0000:0000:0000:0000:0000:0000:000a - ffff::a
ffff:0000:0000:0000:0000:0000:0000:000a - ffff::a
ffff:0000:0000:0000:0000:0000:000a:000b - ffff::a:b
ffff:0000:0000:0000:0000:0000:000a:000b - ffff::a:b
ffff:0000:0000:0000:0000:000a:000b:000c - ffff::a:b:c
ffff:0000:0000:0000:0000:000a:000b:000c - ffff::a:b:c
ffff:0000:0000:0000:000a:000b:000c:000d - ffff::a:b:c:d
ffff:0000:0000:0000:000a:000b:000c:000d - ffff::a:b:c:d
ffff:0000:0000:000a:000b:000c:000d:000e - ffff::a:b:c:d:e
ffff:0000:0000:000a:000b:000c:000d:000e - ffff::a:b:c:d:e
0000:0000:0000:0001:0002:0000:0000:0000 - ::1:2:0:0:0
0000:0000:0000:0001:0002:0000:0000:0000 - ::1:2:0:0:0
0000:0000:0001:0002:0003:0000:0000:0000 - 0:0:1:2:3::
0000:0000:0001:0002:0003:0000:0000:0000 - 0:0:1:2:3::
ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff - ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff - ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
0001:0000:0000:0000:0000:0000:ffff:ffff - 1::ffff:ffff
0001:0000:0000:0000:0000:0000:ffff:ffff - 1::ffff:ffff
ok.
''')
def test_getsockname_unconnected_socket(self):
self.do_run(r'''
#include <sys/socket.h>
#include <stdio.h>
#include <assert.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <string.h>
int main() {
int fd;
int z;
fd = socket(PF_INET, SOCK_STREAM, IPPROTO_TCP);
struct sockaddr_in adr_inet;
socklen_t len_inet = sizeof adr_inet;
z = getsockname(fd, (struct sockaddr *)&adr_inet, &len_inet);
if (z != 0) {
perror("getsockname error");
return 1;
}
char buffer[1000];
sprintf(buffer, "%s:%u", inet_ntoa(adr_inet.sin_addr), (unsigned)ntohs(adr_inet.sin_port));
const char *correct = "0.0.0.0:0";
printf("got (expected) socket: %s (%s), size %lu (%lu)\n", buffer, correct, strlen(buffer), strlen(correct));
assert(strlen(buffer) == strlen(correct));
assert(strcmp(buffer, correct) == 0);
puts("success.");
}
''', 'success.')
def test_getpeername_unconnected_socket(self):
self.do_run(r'''
#include <sys/socket.h>
#include <stdio.h>
#include <assert.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <string.h>
int main() {
int fd;
int z;
fd = socket(PF_INET, SOCK_STREAM, IPPROTO_TCP);
struct sockaddr_in adr_inet;
socklen_t len_inet = sizeof adr_inet;
z = getpeername(fd, (struct sockaddr *)&adr_inet, &len_inet);
if (z != 0) {
perror("getpeername error");
return 1;
}
puts("unexpected success.");
}
''', 'getpeername error: Socket not connected', assert_returncode=None)
def test_getaddrinfo(self):
self.do_run(open(path_from_root('tests', 'sockets', 'test_getaddrinfo.c')).read(), 'success')
def test_getnameinfo(self):
self.do_run(open(path_from_root('tests', 'sockets', 'test_getnameinfo.c')).read(), 'success')
def test_gethostbyname(self):
self.do_run(open(path_from_root('tests', 'sockets', 'test_gethostbyname.c')).read(), 'success')
def test_getprotobyname(self):
self.do_run(open(path_from_root('tests', 'sockets', 'test_getprotobyname.c')).read(), 'success')
def test_socketpair(self):
self.do_run(r'''
#include <sys/socket.h>
#include <stdio.h>
int main() {
int fd[2];
int err;
err = socketpair(AF_INET, SOCK_STREAM, 0, fd);
if (err != 0) {
perror("socketpair error");
return 1;
}
puts("unexpected success.");
}
''', 'socketpair error: Function not implemented', assert_returncode=None)
def test_link(self):
self.do_run(r'''
#include <netdb.h>
#include <sys/types.h>
#include <sys/socket.h>
int main () {
void* thing = gethostbyname("bing.com");
ssize_t rval = recv (0, thing, 0, 0);
rval = send (0, thing, 0, 0);
return 0;
}''', '', force_c=True)
# This test verifies that function names embedded into the build with --js-library (JS functions imported to asm.js/wasm)
# are minified when -O3 is used
def test_js_function_names_are_minified(self):
def check_size(f, expected_size):
if not os.path.isfile(f):
return # Nonexistent file passes in this check
obtained_size = os.path.getsize(f)
print('size of generated ' + f + ': ' + str(obtained_size))
try_delete(f)
self.assertLess(obtained_size, expected_size)
run_process([PYTHON, path_from_root('tests', 'gen_many_js_functions.py'), 'library_long.js', 'main_long.c'])
for wasm in [['-s', 'WASM=1'], ['-s', 'WASM=0']]:
if self.is_wasm_backend() and 'WASM=0' in wasm:
continue
# Currently we rely on Closure for full minification of every appearance of JS function names.
# TODO: Add minification also for non-Closure users and add [] to this list to test minification without Closure.
for closure in [['--closure', '1']]:
args = [EMCC, '-O3', '--js-library', 'library_long.js', 'main_long.c', '-o', 'a.html'] + wasm + closure
print(' '.join(args))
run_process(args)
ret = run_process(NODE_JS + ['a.js'], stdout=PIPE).stdout
self.assertTextDataIdentical('Sum of numbers from 1 to 1000: 500500 (expected 500500)', ret.strip())
check_size('a.js', 150000)
check_size('a.wasm', 80000)
# Checks that C++ exceptions managing invoke_*() wrappers will not be generated if exceptions are disabled
def test_no_invoke_functions_are_generated_if_exception_catching_is_disabled(self):
self.skipTest('Skipping other.test_no_invoke_functions_are_generated_if_exception_catching_is_disabled: Enable after new version of fastcomp has been tagged')
for args in [[], ['-s', 'WASM=0']]:
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'DISABLE_EXCEPTION_CATCHING=1', '-o', 'a.html'] + args)
output = open('a.js').read()
self.assertContained('_main', output) # Smoke test that we actually compiled
self.assertNotContained('invoke_', output)
# Verifies that only the minimal needed set of invoke_*() functions will be generated when C++ exceptions are enabled
def test_no_excessive_invoke_functions_are_generated_when_exceptions_are_enabled(self):
self.skipTest('Skipping other.test_no_excessive_invoke_functions_are_generated_when_exceptions_are_enabled: Enable after new version of fastcomp has been tagged')
for args in [[], ['-s', 'WASM=0']]:
run_process([EMCC, path_from_root('tests', 'invoke_i.cpp'), '-s', 'DISABLE_EXCEPTION_CATCHING=0', '-o', 'a.html'] + args)
output = open('a.js').read()
self.assertContained('invoke_i', output)
self.assertNotContained('invoke_ii', output)
self.assertNotContained('invoke_v', output)
def test_emscripten_metadata(self):
run_process([EMCC, path_from_root('tests', 'hello_world.c')])
self.assertNotIn(b'emscripten_metadata', open('a.out.wasm', 'rb').read())
run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-s', 'EMIT_EMSCRIPTEN_METADATA'])
self.assertIn(b'emscripten_metadata', open('a.out.wasm', 'rb').read())
# make sure wasm executes correctly
ret = run_process(NODE_JS + ['a.out.js'], stdout=PIPE).stdout
self.assertTextDataIdentical('hello, world!\n', ret)
@parameterized({
'O0': (False, ['-O0']), # noqa
'O0_emit': (True, ['-O0', '-s', 'EMIT_EMSCRIPTEN_LICENSE']), # noqa
'O2': (False, ['-O2']), # noqa
'O2_emit': (True, ['-O2', '-s', 'EMIT_EMSCRIPTEN_LICENSE']), # noqa
'O2_js_emit': (True, ['-O2', '-s', 'EMIT_EMSCRIPTEN_LICENSE', '-s', 'WASM=0']), # noqa
'O2_closure': (False, ['-O2', '--closure', '1']), # noqa
'O2_closure_emit': (True, ['-O2', '-s', 'EMIT_EMSCRIPTEN_LICENSE', '--closure', '1']), # noqa
'O2_closure_js_emit': (True, ['-O2', '-s', 'EMIT_EMSCRIPTEN_LICENSE', '--closure', '1', '-s', 'WASM=0']), # noqa
})
@no_fastcomp('EMIT_EMSCRIPTEN_LICENSE is upstream only')
def test_emscripten_license(self, expect_license, args):
# fastcomp does not support the new license flag
if not self.is_wasm_backend():
expect_license = False
run_process([EMCC, path_from_root('tests', 'hello_world.c')] + args)
with open('a.out.js') as f:
js = f.read()
licenses_found = len(re.findall('Copyright [0-9]* The Emscripten Authors', js))
if expect_license:
self.assertNotEqual(licenses_found, 0, 'Unable to find license block in output file!')
self.assertEqual(licenses_found, 1, 'Found too many license blocks in the output file!')
else:
self.assertEqual(licenses_found, 0, 'Found a license block in the output file, but it should not have been there!')
# This test verifies that the generated exports from asm.js/wasm module only reference the
# unminified exported name exactly once. (need to contain the export name once for unminified
# access from calling code, and should not have the unminified name exist more than once, that
# would be wasteful for size)
def test_function_exports_are_small(self):
def test(wasm, closure, opt):
extra_args = wasm + opt + closure
print(extra_args)
args = [EMCC, path_from_root('tests', 'long_function_name_in_export.c'), '-o', 'a.html', '-s', 'ENVIRONMENT=web', '-s', 'DECLARE_ASM_MODULE_EXPORTS=0', '-Werror'] + extra_args
run_process(args)
output = open('a.js', 'r').read()
try_delete('a.js')
self.assertNotContained('asm["_thisIsAFunctionExportedFromAsmJsOrWasmWithVeryLongFunction"]', output)
# TODO: Add stricter testing when Wasm side is also optimized: (currently Wasm does still need
# to reference exports multiple times)
if 'WASM=1' not in wasm:
num_times_export_is_referenced = output.count('thisIsAFunctionExportedFromAsmJsOrWasmWithVeryLongFunction')
self.assertEqual(num_times_export_is_referenced, 1)
for closure in [[], ['--closure', '1']]:
for opt in [['-O2'], ['-O3'], ['-Os']]:
test(['-s', 'WASM=0'], closure, opt)
test(['-s', 'WASM=1', '-s', 'WASM_ASYNC_COMPILATION=0'], closure, opt)
def test_minimal_runtime_code_size(self):
smallest_code_size_args = ['-s', 'MINIMAL_RUNTIME=2',
'-s', 'AGGRESSIVE_VARIABLE_ELIMINATION=1',
'-s', 'ENVIRONMENT=web',
'-s', 'TEXTDECODER=2',
'-s', 'ABORTING_MALLOC=0',
'-s', 'ALLOW_MEMORY_GROWTH=0',
'-s', 'SUPPORT_ERRNO=0',
'-s', 'DECLARE_ASM_MODULE_EXPORTS=1',
'-s', 'MALLOC=emmalloc',
'-s', 'GL_EMULATE_GLES_VERSION_STRING_FORMAT=0',
'-s', 'GL_EXTENSIONS_IN_PREFIXED_FORMAT=0',
'-s', 'GL_SUPPORT_AUTOMATIC_ENABLE_EXTENSIONS=0',
'-s', 'GL_TRACK_ERRORS=0',
'-s', 'GL_SUPPORT_EXPLICIT_SWAP_CONTROL=0',
'-s', 'GL_POOL_TEMP_BUFFERS=0',
'-s', 'FAST_UNROLLED_MEMCPY_AND_MEMSET=0',
'-s', 'MIN_CHROME_VERSION=58',
'-s', 'NO_FILESYSTEM=1',
'--output_eol', 'linux',
'-Oz',
'--closure', '1',
'-DNDEBUG',
'-ffast-math']
asmjs = ['-s', 'WASM=0', '--separate-asm', '-s', 'ELIMINATE_DUPLICATE_FUNCTIONS=1', '--memory-init-file', '1']
wasm2js = ['-s', 'WASM=0', '--memory-init-file', '1']
hello_world_sources = [path_from_root('tests', 'small_hello_world.c'),
'-s', 'RUNTIME_FUNCS_TO_IMPORT=[]',
'-s', 'USES_DYNAMIC_ALLOC=0',
'-s', 'ASM_PRIMITIVE_VARS=[STACKTOP]']
random_printf_sources = [path_from_root('tests', 'hello_random_printf.c'),
'-s', 'RUNTIME_FUNCS_TO_IMPORT=[]',
'-s', 'USES_DYNAMIC_ALLOC=0',
'-s', 'ASM_PRIMITIVE_VARS=[STACKTOP]',
'-s', 'SINGLE_FILE=1']
hello_webgl_sources = [path_from_root('tests', 'minimal_webgl', 'main.cpp'),
path_from_root('tests', 'minimal_webgl', 'webgl.c'),
'--js-library', path_from_root('tests', 'minimal_webgl', 'library_js.js'),
'-s', 'RUNTIME_FUNCS_TO_IMPORT=[]',
'-s', 'USES_DYNAMIC_ALLOC=1', '-lwebgl.js',
'-s', 'MODULARIZE=1']
hello_webgl2_sources = hello_webgl_sources + ['-s', 'MAX_WEBGL_VERSION=2']
def print_percent(actual, expected):
if actual == expected:
return ''
return ' ({:+.2f}%)'.format((actual - expected) * 100.0 / expected)
for js in [False, True]:
for sources, name in [
[hello_world_sources, 'hello_world'],
[random_printf_sources, 'random_printf'],
[hello_webgl_sources, 'hello_webgl'],
[hello_webgl2_sources, 'hello_webgl2']
]:
outputs = ['a.html', 'a.js']
test_name = name
args = smallest_code_size_args[:]
if not self.is_wasm_backend():
test_name += '_fastcomp'
if js:
outputs += ['a.mem']
if self.is_wasm_backend():
args += wasm2js
test_name += '_wasm2js'
else:
args += asmjs
outputs += ['a.asm.js']
test_name += '_asmjs'
else:
outputs += ['a.wasm']
test_name += '_wasm'
if 'SINGLE_FILE=1' in sources:
outputs = ['a.html']
results_file = path_from_root('tests', 'code_size', test_name + '.json')
print('\n-----------------------------\n' + test_name)
expected_results = {}
try:
expected_results = json.loads(open(results_file, 'r').read())
except Exception:
if not os.environ.get('EMTEST_REBASELINE'):
raise
args = [EMCC, '-o', 'a.html'] + args + sources
print('\n' + ' '.join(args))
run_process(args)
print('\n')
def get_file_gzipped_size(f):
f_gz = f + '.gz'
with gzip.open(f_gz, 'wb') as gzf:
gzf.write(open(f, 'rb').read())
size = os.path.getsize(f_gz)
try_delete(f_gz)
return size
obtained_results = {}
total_output_size = 0
total_expected_size = 0
total_output_size_gz = 0
total_expected_size_gz = 0
for f in outputs:
f_gz = f + '.gz'
expected_size = expected_results[f] if f in expected_results else float('inf')
expected_size_gz = expected_results[f_gz] if f_gz in expected_results else float('inf')
size = os.path.getsize(f)
size_gz = get_file_gzipped_size(f)
obtained_results[f] = size
obtained_results[f_gz] = size_gz
if size != expected_size and (f.endswith('.js') or f.endswith('.html')):
print('Contents of ' + f + ': ')
print(open(f, 'r').read())
print('size of ' + f + ' == ' + str(size) + ', expected ' + str(expected_size) + ', delta=' + str(size - expected_size) + print_percent(size, expected_size))
print('size of ' + f_gz + ' == ' + str(size_gz) + ', expected ' + str(expected_size_gz) + ', delta=' + str(size_gz - expected_size_gz) + print_percent(size_gz, expected_size_gz))
# Hack: Generated .mem initializer files have different sizes on different
# platforms (Windows gives x, CircleCI Linux gives x-17 bytes, my home
# Linux gives x+2 bytes..). Likewise asm.js files seem to be affected by
# the LLVM IR text names, which lead to asm.js names, which leads to
# difference code size, which leads to different relooper choices,
# as a result leading to slightly different total code sizes.
# TODO: identify what is causing this. meanwhile allow some amount of slop
mem_slop = 10 if self.is_wasm_backend() else 50
if size <= expected_size + mem_slop and size >= expected_size - mem_slop:
size = expected_size
# N.B. even though the test code above prints out gzip compressed sizes, regression testing is done against uncompressed sizes
# this is because optimizing for compressed sizes can be unpredictable and sometimes counterproductive
total_output_size += size
total_expected_size += expected_size
total_output_size_gz += size_gz
total_expected_size_gz += expected_size_gz
obtained_results['total'] = total_output_size
obtained_results['total_gz'] = total_output_size_gz
print('Total output size=' + str(total_output_size) + ' bytes, expected total size=' + str(total_expected_size) + ', delta=' + str(total_output_size - total_expected_size) + print_percent(total_output_size, total_expected_size))
print('Total output size gzipped=' + str(total_output_size_gz) + ' bytes, expected total size gzipped=' + str(total_expected_size_gz) + ', delta=' + str(total_output_size_gz - total_expected_size_gz) + print_percent(total_output_size_gz, total_expected_size_gz))
if os.environ.get('EMTEST_REBASELINE'):
open(results_file, 'w').write(json.dumps(obtained_results, indent=2) + '\n')
else:
if total_output_size > total_expected_size:
print('Oops, overall generated code size regressed by ' + str(total_output_size - total_expected_size) + ' bytes!')
if total_output_size < total_expected_size:
print('Hey amazing, overall generated code size was improved by ' + str(total_expected_size - total_output_size) + ' bytes! Rerun test with other.test_minimal_runtime_code_size with EMTEST_REBASELINE=1 to update the expected sizes!')
self.assertEqual(total_output_size, total_expected_size)
# Test that legacy settings that have been fixed to a specific value and their value can no longer be changed,
def test_legacy_settings_forbidden_to_change(self):
stderr = self.expect_fail([EMCC, '-s', 'MEMFS_APPEND_TO_TYPED_ARRAYS=0', path_from_root('tests', 'hello_world.c')])
self.assertContained('MEMFS_APPEND_TO_TYPED_ARRAYS=0 is no longer supported', stderr)
run_process([EMCC, '-s', 'MEMFS_APPEND_TO_TYPED_ARRAYS=1', path_from_root('tests', 'hello_world.c')])
run_process([EMCC, '-s', 'PRECISE_I64_MATH=2', path_from_root('tests', 'hello_world.c')])
@no_fastcomp('depends on wasm backend .a linking')
def test_jsmath(self):
run_process([EMCC, path_from_root('tests', 'other', 'jsmath.cpp'), '-Os', '-o', 'normal.js', '--closure', '0'])
normal_js_size = os.path.getsize('normal.js')
normal_wasm_size = os.path.getsize('normal.wasm')
run_process([EMCC, path_from_root('tests', 'other', 'jsmath.cpp'), '-Os', '-o', 'jsmath.js', '-s', 'JS_MATH', '--closure', '0'])
jsmath_js_size = os.path.getsize('jsmath.js')
jsmath_wasm_size = os.path.getsize('jsmath.wasm')
# js math increases JS size, but decreases wasm, and wins overall
# it would win more with closure, but no point in making the test slower)
self.assertLess(normal_js_size, jsmath_js_size)
self.assertLess(jsmath_wasm_size, normal_wasm_size)
self.assertLess(jsmath_js_size + jsmath_wasm_size, 0.90 * (normal_js_size + normal_wasm_size))
# js math has almost identical output, but misses some corner cases, 4 out of 34
normal = run_js('normal.js').splitlines()
jsmath = run_js('jsmath.js').splitlines()
assert len(normal) == len(jsmath)
diff = 0
for i in range(len(normal)):
if normal[i] != jsmath[i]:
diff += 1
self.assertEqual(diff, 4)
def test_strict_mode_hello_world(self):
# Verify that strict mode can be used for simple hello world program both
# via the environment EMCC_STRICT=1 and from the command line `-s STRICT`
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'STRICT=1']
run_process(cmd)
with env_modify({'EMCC_STRICT': '1'}):
self.do_run(open(path_from_root('tests', 'hello_world.c')).read(), 'hello, world!')
def test_legacy_settings(self):
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'SPLIT_MEMORY=0']
# By default warnings are not shown
stderr = run_process(cmd, stderr=PIPE).stderr
self.assertNotContained('warning', stderr)
# Adding or -Wlegacy-settings enables the warning
stderr = run_process(cmd + ['-Wlegacy-settings'], stderr=PIPE).stderr
self.assertContained('warning: use of legacy setting: SPLIT_MEMORY', stderr)
self.assertContained('[-Wlegacy-settings]', stderr)
def test_strict_mode_legacy_settings(self):
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'SPLIT_MEMORY=0']
run_process(cmd)
stderr = self.expect_fail(cmd + ['-s', 'STRICT=1'])
self.assertContained('legacy setting used in strict mode: SPLIT_MEMORY', stderr)
with env_modify({'EMCC_STRICT': '1'}):
stderr = self.expect_fail(cmd)
self.assertContained('legacy setting used in strict mode: SPLIT_MEMORY', stderr)
def test_strict_mode_legacy_settings_runtime(self):
# Verify that legacy settings are not accessible at runtime under strict
# mode.
self.set_setting('RETAIN_COMPILER_SETTINGS', 1)
src = r'''\
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("BINARYEN_METHOD: %s\n", (char*)emscripten_get_compiler_setting("BINARYEN_METHOD"));
return 0;
}
'''
self.do_run(src, 'BINARYEN_METHOD: native-wasm')
with env_modify({'EMCC_STRICT': '1'}):
self.do_run(src, 'invalid compiler setting: BINARYEN_METHOD')
self.set_setting('STRICT', 1)
self.do_run(src, 'invalid compiler setting: BINARYEN_METHOD')
def test_renamed_setting(self):
# Verify that renamed settings are available by either name (when not in
# strict mode.
self.set_setting('RETAIN_COMPILER_SETTINGS', 1)
src = r'''\
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("%d %d\n",
emscripten_get_compiler_setting("BINARYEN_ASYNC_COMPILATION"),
emscripten_get_compiler_setting("WASM_ASYNC_COMPILATION"));
return 0;
}
'''
# Setting the new name should set both
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.do_run(src, '0 0')
self.set_setting('WASM_ASYNC_COMPILATION', 1)
self.do_run(src, '1 1')
self.clear_setting('WASM_ASYNC_COMPILATION')
# Setting the old name should set both
self.set_setting('BINARYEN_ASYNC_COMPILATION', 0)
self.do_run(src, '0 0')
self.set_setting('BINARYEN_ASYNC_COMPILATION', 1)
self.do_run(src, '1 1')
def test_strict_mode_legacy_settings_library(self):
create_test_file('lib.js', r'''
#if SPLIT_MEMORY
#endif
''')
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'out.js', '--js-library', 'lib.js']
run_process(cmd)
self.assertContained('ReferenceError: SPLIT_MEMORY is not defined', self.expect_fail(cmd + ['-s', 'STRICT=1']))
with env_modify({'EMCC_STRICT': '1'}):
self.assertContained('ReferenceError: SPLIT_MEMORY is not defined', self.expect_fail(cmd))
def test_safe_heap_log(self):
self.set_setting('SAFE_HEAP')
self.set_setting('SAFE_HEAP_LOG')
self.set_setting('EXIT_RUNTIME')
src = open(path_from_root('tests', 'hello_world.c')).read()
self.do_run(src, 'SAFE_HEAP load: ')
if not self.is_wasm_backend():
self.set_setting('WASM', 0)
self.do_run(src, 'SAFE_HEAP load: ')
@no_fastcomp('iprintf/__small_printf are wasm-backend-only features')
def test_mini_printfs(self):
def test(code):
with open('src.c', 'w') as f:
f.write('''
#include <stdio.h>
void* unknown_value;
int main() {
%s
}
''' % code)
run_process([EMCC, 'src.c', '-O1'])
return os.path.getsize('a.out.wasm')
i = test('printf("%d", *(int*)unknown_value);')
f = test('printf("%f", *(double*)unknown_value);')
lf = test('printf("%Lf", *(long double*)unknown_value);')
both = test('printf("%d", *(int*)unknown_value); printf("%Lf", *(long double*)unknown_value);')
print(i, f, lf, both)
# iprintf is much smaller than printf with float support
self.assertGreater(i, f - 3400)
self.assertLess(i, f - 3000)
# __small_printf is somewhat smaller than printf with long double support
self.assertGreater(f, lf - 900)
self.assertLess(f, lf - 500)
# both is a little bigger still
self.assertGreater(lf, both - 100)
self.assertLess(lf, both - 50)
@parameterized({
'normal': ([], '''\
0.000051 => -5.123719529365189373493194580078e-05
0.000051 => -5.123719300544352718866300544498e-05
0.000051 => -5.123719300544352718866300544498e-05
'''),
'full_long_double': (['-s', 'PRINTF_LONG_DOUBLE'], '''\
0.000051 => -5.123719529365189373493194580078e-05
0.000051 => -5.123719300544352718866300544498e-05
0.000051 => -5.123719300544352710023893104250e-05
'''),
})
@no_fastcomp('float128 is wasm backend only')
def test_long_double_printing(self, args, expected):
create_test_file('src.cpp', r'''
#include <stdio.h>
int main(void) {
float f = 5.123456789e-5;
double d = 5.123456789e-5;
long double ld = 5.123456789e-5;
printf("%f => %.30e\n", f, f / (f - 1));
printf("%f => %.30e\n", d, d / (d - 1));
printf("%Lf => %.30Le\n", ld, ld / (ld - 1));
}
''')
run_process([EMCC, 'src.cpp'] + args)
self.assertContained(expected, run_js('a.out.js'))
# Tests that passing -s MALLOC=none will not include system malloc() to the build.
def test_malloc_none(self):
stderr = self.expect_fail([EMCC, path_from_root('tests', 'malloc_none.c'), '-s', 'MALLOC=none'])
self.assertContained('undefined symbol: malloc', stderr)
@parameterized({
'c': ['c'],
'cpp': ['cpp'],
})
@no_fastcomp('lsan not supported on fastcomp')
def test_lsan_leaks(self, ext):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_leaks.' + ext),
emcc_args=['-fsanitize=leak', '-s', 'ALLOW_MEMORY_GROWTH=1'],
assert_returncode=None, literals=[
'Direct leak of 2048 byte(s) in 1 object(s) allocated from',
'Direct leak of 1337 byte(s) in 1 object(s) allocated from',
'Direct leak of 42 byte(s) in 1 object(s) allocated from',
])
@parameterized({
'c': ['c', [
r'in malloc.*a\.out\.wasm\+0x',
r'(?im)in f (|[/a-z\.]:).*/test_lsan_leaks\.c:6:21$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.c:10:16$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.c:12:3$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.c:13:3$',
]],
'cpp': ['cpp', [
r'in operator new\[\]\(unsigned long\).*a\.out\.wasm\+0x',
r'(?im)in f\(\) (|[/a-z\.]:).*/test_lsan_leaks\.cpp:4:21$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.cpp:8:16$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.cpp:10:3$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.cpp:11:3$',
]],
})
@no_fastcomp('lsan not supported on fastcomp')
def test_lsan_stack_trace(self, ext, regexes):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_leaks.' + ext),
emcc_args=['-fsanitize=leak', '-s', 'ALLOW_MEMORY_GROWTH=1', '-g4'],
assert_returncode=None, literals=[
'Direct leak of 2048 byte(s) in 1 object(s) allocated from',
'Direct leak of 1337 byte(s) in 1 object(s) allocated from',
'Direct leak of 42 byte(s) in 1 object(s) allocated from',
], regexes=regexes)
@parameterized({
'c': ['c'],
'cpp': ['cpp'],
})
@no_fastcomp('lsan not supported on fastcomp')
def test_lsan_no_leak(self, ext):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_no_leak.' + ext),
emcc_args=['-fsanitize=leak', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'ASSERTIONS=0'],
regexes=[r'^\s*$'])
@no_fastcomp('lsan not supported on fastcomp')
def test_lsan_no_stack_trace(self):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_leaks.c'),
emcc_args=['-fsanitize=leak', '-s', 'ALLOW_MEMORY_GROWTH=1', '-DDISABLE_CONTEXT'],
assert_returncode=None, literals=[
'Direct leak of 3427 byte(s) in 3 object(s) allocated from:',
'SUMMARY: LeakSanitizer: 3427 byte(s) leaked in 3 allocation(s).',
])
@no_fastcomp('asan is not supported on fastcomp')
def test_asan_null_deref(self):
self.do_smart_test(path_from_root('tests', 'other', 'test_asan_null_deref.c'),
emcc_args=['-fsanitize=address', '-s', 'ALLOW_MEMORY_GROWTH=1'],
assert_returncode=None, literals=[
'AddressSanitizer: null-pointer-dereference on address',
])
@no_fastcomp('asan is not supported on fastcomp')
def test_asan_no_stack_trace(self):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_leaks.c'),
emcc_args=['-fsanitize=address', '-s', 'ALLOW_MEMORY_GROWTH=1', '-DDISABLE_CONTEXT', '-s', 'EXIT_RUNTIME'],
assert_returncode=None, literals=[
'Direct leak of 3427 byte(s) in 3 object(s) allocated from:',
'SUMMARY: AddressSanitizer: 3427 byte(s) leaked in 3 allocation(s).',
])
@no_fastcomp('asan is not supported on fastcomp')
def test_asan_pthread_stubs(self):
self.do_smart_test(path_from_root('tests', 'other', 'test_asan_pthread_stubs.c'), emcc_args=['-fsanitize=address', '-s', 'ALLOW_MEMORY_GROWTH=1'])
@parameterized({
'async': ['-s', 'WASM_ASYNC_COMPILATION=1'],
'sync': ['-s', 'WASM_ASYNC_COMPILATION=0'],
})
@no_fastcomp('offset converter is not supported on fastcomp')
def test_offset_converter(self, *args):
self.do_smart_test(path_from_root('tests', 'other', 'test_offset_converter.c'),
emcc_args=['-s', 'USE_OFFSET_CONVERTER', '-g4'] + list(args), literals=['ok'])
@no_windows('ptys and select are not available on windows')
@no_fastcomp('fastcomp clang detects colors differently')
def test_build_error_color(self):
create_test_file('src.c', 'int main() {')
returncode, output = self.run_on_pty([EMCC, 'src.c'])
self.assertNotEqual(returncode, 0)
self.assertIn(b"\x1b[1msrc.c:1:13: \x1b[0m\x1b[0;1;31merror: \x1b[0m\x1b[1mexpected '}'\x1b[0m", output)
self.assertIn(b"\x1b[31merror: ", output)
@parameterized({
'fno_diagnostics_color': ['-fno-diagnostics-color'],
'fdiagnostics_color_never': ['-fdiagnostics-color=never'],
})
@no_windows('ptys and select are not available on windows')
def test_pty_no_color(self, flag):
with open('src.c', 'w') as f:
f.write('int main() {')
returncode, output = self.run_on_pty([EMCC, flag, 'src.c'])
self.assertNotEqual(returncode, 0)
self.assertNotIn(b'\x1b', output)
@no_fastcomp('sanitizers are not supported on fastcomp')
def test_sanitizer_color(self):
create_test_file('src.c', '''
#include <emscripten.h>
int main() {
int *p = 0, q;
EM_ASM({ Module.printWithColors = true; });
q = *p;
}
''')
run_process([EMCC, '-fsanitize=null', 'src.c'])
output = run_js('a.out.js', stderr=PIPE, full_output=True)
self.assertIn('\x1b[1msrc.c', output)
@no_fastcomp('main param optimizations are upstream-only')
def test_main_reads_params(self):
create_test_file('no.c', '''
int main() {
return 42;
}
''')
run_process([EMCC, 'no.c', '-O3', '-o', 'no.js'])
no = os.path.getsize('no.js')
create_test_file('yes.c', '''
int main(int argc, char **argv) {
return argc;
}
''')
run_process([EMCC, 'yes.c', '-O3', '-o', 'yes.js'])
yes = os.path.getsize('yes.js')
# not having to set up argc/argv allows us to avoid including a
# significant amount of JS for string support (which is not needed
# otherwise in such a trivial program).
self.assertLess(no, 0.95 * yes)
@no_fastcomp('not optimized in fastcomp')
def test_INCOMING_MODULE_JS_API(self):
def test(args):
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O3', '--closure', '1'] + args)
for engine in JS_ENGINES:
self.assertContained('hello, world!', run_js('a.out.js', engine=engine))
with open('a.out.js') as f:
# ignore \r which on windows can increase the size
return len(f.read().replace('\r', ''))
normal = test([])
changed = test(['-s', 'INCOMING_MODULE_JS_API=[]'])
print('sizes', normal, changed)
# Changing this option to [] should decrease code size.
self.assertLess(changed, normal)
# Check an absolute code size as well, with some slack.
self.assertLess(abs(changed - 5795), 150)
def test_llvm_includes(self):
self.build('#include <stdatomic.h>', self.get_dir(), 'atomics.c')
def test_mmap_and_munmap(self):
emcc_args = []
for f in ['data_ro.dat', 'data_rw.dat']:
create_test_file(f, 'Test file')
emcc_args.extend(['--embed-file', f])
self.do_other_test('mmap_and_munmap', emcc_args)
def test_mmap_and_munmap_anonymous(self):
self.do_other_test('mmap_and_munmap_anonymous', emcc_args=['-s', 'NO_FILESYSTEM'])
def test_mmap_memorygrowth(self):
self.do_other_test('mmap_memorygrowth', ['-s', 'ALLOW_MEMORY_GROWTH=1'])
def test_files_and_module_assignment(self):
# a pre-js can set Module to a new object or otherwise undo file preloading/
# embedding changes to Module.preRun. we show an error to avoid confusion
create_test_file('pre.js', 'Module = {};')
create_test_file('src.cpp', r'''
#include <stdio.h>
int main() {
printf("file exists: %d\n", !!fopen("src.cpp", "rb"));
}
''')
run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js', '--embed-file', 'src.cpp'])
result = run_js('a.out.js', assert_returncode=None, stderr=PIPE, full_output=True)
self.assertContained('Module.preRun should exist because file support used it; did a pre-js delete it?', result)
def test_error(pre):
create_test_file('pre.js', pre)
run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js', '--embed-file', 'src.cpp'])
result = run_js('a.out.js', assert_returncode=None, stderr=PIPE, full_output=True)
self.assertContained('All preRun tasks that exist before user pre-js code should remain after; did you replace Module or modify Module.preRun?', result)
# error if the user replaces Module or Module.preRun
test_error('Module = { preRun: [] };')
test_error('Module.preRun = [];')
@no_fastcomp('fastcomp defines this in the backend itself, so it is always on there')
def test_EMSCRIPTEN_and_STRICT(self):
# __EMSCRIPTEN__ is the proper define; we support EMSCRIPTEN for legacy
# code, unless STRICT is enabled.
create_test_file('src.c', '''
#ifndef EMSCRIPTEN
#error "not defined"
#endif
''')
run_process([EMCC, 'src.c', '-c'])
self.expect_fail([EMCC, 'src.c', '-s', 'STRICT', '-c'])
def test_exception_settings(self):
for catching, throwing, opts in itertools.product([0, 1], repeat=3):
cmd = [EMCC, path_from_root('tests', 'other', 'exceptions_modes_symbols_defined.cpp'), '-s', 'DISABLE_EXCEPTION_THROWING=%d' % (1 - throwing), '-s', 'DISABLE_EXCEPTION_CATCHING=%d' % (1 - catching), '-O%d' % opts]
print(cmd)
if not throwing and not catching:
self.assertContained('DISABLE_EXCEPTION_THROWING was set (likely due to -fno-exceptions), which means no C++ exception throwing support code is linked in, but such support is required', self.expect_fail(cmd))
elif not throwing and catching:
self.assertContained('DISABLE_EXCEPTION_THROWING was set (probably from -fno-exceptions) but is not compatible with enabling exception catching (DISABLE_EXCEPTION_CATCHING=0)', self.expect_fail(cmd))
else:
run_process(cmd)
@no_fastcomp('new clang feature')
def test_fignore_exceptions(self):
# the new clang flag -fignore-exceptions basically is the same as -s DISABLE_EXCEPTION_CATCHING=1,
# that is, it allows throwing, but emits no support code for catching.
run_process([EMCC, path_from_root('tests', 'other', 'exceptions_modes_symbols_defined.cpp'), '-s', 'DISABLE_EXCEPTION_CATCHING=0'])
enable_size = os.path.getsize('a.out.wasm')
run_process([EMCC, path_from_root('tests', 'other', 'exceptions_modes_symbols_defined.cpp'), '-s', 'DISABLE_EXCEPTION_CATCHING=1'])
disable_size = os.path.getsize('a.out.wasm')
run_process([EMCC, path_from_root('tests', 'other', 'exceptions_modes_symbols_defined.cpp'), '-s', '-fignore-exceptions'])
ignore_size = os.path.getsize('a.out.wasm')
self.assertGreater(enable_size, disable_size)
self.assertEqual(disable_size, ignore_size)
@no_fastcomp('assumes wasm object files')
def test_f_exception(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
int main () {
try {
throw 42;
} catch (int e) {
printf("CAUGHT: %d\n", e);
}
return 0;
}
''')
for compile_flags, link_flags, expect_caught in [
# exceptions are off by default
([], [], False),
# enabling exceptions at link and compile works
(['-fexceptions'], ['-fexceptions'], True),
# just compile isn't enough as the JS runtime lacks support
(['-fexceptions'], [], False),
# just link isn't enough as codegen didn't emit exceptions support
([], ['-fexceptions'], False),
]:
print(compile_flags, link_flags, expect_caught)
run_process([EMCC, 'src.cpp', '-c', '-o', 'src.o'] + compile_flags)
run_process([EMCC, 'src.o'] + link_flags)
result = run_js('a.out.js', assert_returncode=None, stderr=PIPE)
self.assertContainedIf('CAUGHT', result, expect_caught)
def test_assertions_on_internal_api_changes(self):
create_test_file('src.c', r'''
#include <emscripten.h>
int main(int argc, char **argv) {
EM_ASM({
try {
Module['read'];
out('it should not be there');
} catch(e) {
out('error: ' + e);
}
});
}
''')
run_process([EMCC, 'src.c', '-s', 'ASSERTIONS'])
self.assertContained('Module.read has been replaced with plain read', run_js('a.out.js'))
def test_assertions_on_incoming_module_api_changes(self):
create_test_file('pre.js', r'''
var Module = {
read: function() {}
}
''')
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASSERTIONS', '--pre-js', 'pre.js'])
self.assertContained('Module.read option was removed', run_js('a.out.js', assert_returncode=None, stderr=PIPE))
def test_assertions_on_outgoing_module_api_changes(self):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
console.log();
function check(name) {
try {
Module[name];
console.log("success: " + name);
} catch(e) {
}
}
check("read");
// TODO check("setWindowTitle");
check("wasmBinary");
check("arguments");
});
}
''')
run_process([EMCC, 'src.cpp', '-s', 'ASSERTIONS'])
self.assertContained('''
Module.read has been replaced with plain read_ (the initial value can be provided on Module, but after startup the value is only looked for on a local variable of that name)
Module.wasmBinary has been replaced with plain wasmBinary (the initial value can be provided on Module, but after startup the value is only looked for on a local variable of that name)
Module.arguments has been replaced with plain arguments_ (the initial value can be provided on Module, but after startup the value is only looked for on a local variable of that name)
''', run_js('a.out.js', assert_returncode=None, stderr=PIPE))
def test_assertions_on_ready_promise(self):
# check that when assertions are on we give useful error messages for
# mistakenly thinking the Promise is an instance. I.e., once you could do
# Module()._main to get an instance and the main function, but after
# the breaking change in #10697 Module() now returns a promise, and to get
# the instance you must use .then() to get a callback with the instance.
create_test_file('test.js', r'''
try {
Module()._main;
} catch(e) {
console.log(e);
}
try {
Module().onRuntimeInitialized = 42;
} catch(e) {
console.log(e);
}
''')
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE', '-s', 'ASSERTIONS', '--extern-post-js', 'test.js'])
out = run_js('a.out.js')
self.assertContained('You are getting _main on the Promise object, instead of the instance. Use .then() to get called back with the instance, see the MODULARIZE docs in src/settings.js', out)
self.assertContained('You are setting onRuntimeInitialized on the Promise object, instead of the instance. Use .then() to get called back with the instance, see the MODULARIZE docs in src/settings.js', out)
def test_em_asm_duplicate_strings(self):
# We had a regression where tow different EM_ASM strings from two diffferent
# object files were de-duplicated in wasm-emscripten-finalize. This used to
# work when we used zero-based index for store the JS strings, but once we
# switched to absolute addresses the string needs to exist twice in the JS
# file.
create_test_file('foo.c', '''
#include <emscripten.h>
void foo() {
EM_ASM({ console.log('Hello, world!'); });
}
''')
create_test_file('main.c', '''
#include <emscripten.h>
void foo();
int main() {
foo();
EM_ASM({ console.log('Hello, world!'); });
return 0;
}
''')
run_process([EMCC, '-c', 'foo.c'])
run_process([EMCC, '-c', 'main.c'])
run_process([EMCC, 'foo.o', 'main.o'])
self.assertContained('Hello, world!\nHello, world!\n', run_js('a.out.js'))
def test_em_asm_strict_c(self):
create_test_file('src.c', '''
#include <emscripten/em_asm.h>
int main() {
EM_ASM({ console.log('Hello, world!'); });
}
''')
result = run_process([EMCC, '-std=c11', 'src.c'], stderr=PIPE, check=False)
self.assertNotEqual(result.returncode, 0)
self.assertIn('EM_ASM does not work in -std=c* modes, use -std=gnu* modes instead', result.stderr)
def test_boost_graph(self):
self.do_smart_test(path_from_root('tests', 'test_boost_graph.cpp'),
emcc_args=['-s', 'USE_BOOST_HEADERS=1'],
assert_returncode=0)
@no_fastcomp('EM_ASM and setjmp works fine on fastcomp')
def test_setjmp_em_asm(self):
create_test_file('src.c', '''
#include <emscripten.h>
#include <setjmp.h>
int main() {
jmp_buf buf;
setjmp(buf);
EM_ASM({
console.log("hello world");
});
}
''')
result = run_process([EMCC, 'src.c'], stderr=PIPE, check=False)
self.assertNotEqual(result.returncode, 0)
self.assertIn('Cannot use EM_ASM* alongside setjmp/longjmp', result.stderr)
self.assertIn('Please consider using EM_JS, or move the EM_ASM into another function.', result.stderr)
def test_missing_stdlibs(self):
# Certain standard libraries are expected to be useable via -l flags but
# don't actually exist in our standard library path. Make sure we don't
# error out when linking with these flags.
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-lm', '-ldl', '-lrt', '-lpthread'])
@no_fastcomp('lld-specific')
def test_supported_linker_flags(self):
out = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,--print-map'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `--print-map`', out)
out = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Xlinker', '--print-map'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `--print-map`', out)
out = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,-rpath=foo'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `-rpath=foo`', out)
out = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,-rpath-link,foo'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `-rpath-link`', out)
out = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'),
'-Wl,--no-check-features,-mllvm,-debug'], stderr=PIPE).stderr
self.assertNotContained('warning: ignoring unsupported linker flag', out)
out = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,-allow-shlib-undefined'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `-allow-shlib-undefined`', out)
out = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,--allow-shlib-undefined'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `--allow-shlib-undefined`', out)
out = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,-version-script,foo'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `-version-script`', out)
@no_fastcomp('lld-specific')
def test_linker_flags_pass_through(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,--waka'])
self.assertContained('wasm-ld: error: unknown argument: --waka', err)
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Xlinker', '--waka'])
self.assertContained('wasm-ld: error: unknown argument: --waka', err)
def test_linker_flags_unused(self):
err = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-c', '-lbar'], stderr=PIPE).stderr
self.assertContained("warning: argument unused during compilation: '-lbar' [-Wunused-command-line-argument]", err)
def test_non_wasm_without_wasm_in_vm(self):
# Test that our non-wasm output does not depend on wasm support in the vm.
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'WASM=0'])
with open('a.out.js') as f:
js = f.read()
with open('a.out.js', 'w') as f:
f.write('var WebAssembly = null;\n' + js)
for engine in JS_ENGINES:
self.assertContained('hello, world!', run_js('a.out.js', engine=engine))
def test_compile_only_with_object_extension(self):
# Emscripten supports compiling to an object file when the output has an
# object extension.
# Most compilers require the `-c` to be explicit.
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-c', '-o', 'hello1.o'])
err = run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-o', 'hello2.o'], stderr=PIPE).stderr
self.assertContained('warning: Assuming object file output in the absence of `-c`', err)
self.assertBinaryEqual('hello1.o', 'hello2.o')
def test_empty_output_extension(self):
# Default to JS output when no extension is present
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Werror', '-o', 'hello'])
self.assertContained('hello, world!', run_js('hello'))
def test_backwards_deps_in_archive(self):
# Test that JS dependencies from deps_info.json work for code linked via
# static archives using -l<name>
run_process([EMCC, path_from_root('tests', 'sockets', 'test_gethostbyname.c'), '-o', 'a.o'])
run_process([LLVM_AR, 'cr', 'liba.a', 'a.o'])
create_test_file('empty.c', 'static int foo = 0;')
run_process([EMCC, 'empty.c', '-la', '-L.'])
self.assertContained('success', run_js('a.out.js'))
def test_warning_flags(self):
create_test_file('not_object.bc', 'some text')
run_process([EMCC, '-c', '-o', 'hello.o', path_from_root('tests', 'hello_world.c')])
cmd = [EMCC, 'hello.o', 'not_object.bc', '-o', 'a.wasm']
# warning that is enabled by default
stderr = run_process(cmd, stderr=PIPE).stderr
self.assertContained('emcc: warning: not_object.bc is not a valid input file [-Winvalid-input]', stderr)
# -w to suppress warnings
stderr = run_process(cmd + ['-w'], stderr=PIPE).stderr
self.assertNotContained('warning', stderr)
# -Wno-invalid-input to suppress just this one warning
stderr = run_process(cmd + ['-Wno-invalid-input'], stderr=PIPE).stderr
self.assertNotContained('warning', stderr)
# with -Werror should fail
stderr = self.expect_fail(cmd + ['-Werror'])
self.assertContained('emcc: error: not_object.bc is not a valid input file [-Winvalid-input] [-Werror]', stderr)
# with -Werror + -Wno-error=<type> should only warn
stderr = run_process(cmd + ['-Werror', '-Wno-error=invalid-input'], stderr=PIPE).stderr
self.assertContained('emcc: warning: not_object.bc is not a valid input file [-Winvalid-input]', stderr)
# check that `-Werror=foo` also enales foo
stderr = self.expect_fail(cmd + ['-Werror=legacy-settings', '-s', 'TOTAL_MEMORY=1'])
self.assertContained('error: use of legacy setting: TOTAL_MEMORY (setting renamed to INITIAL_MEMORY) [-Wlegacy-settings] [-Werror]', stderr)
def test_emranlib(self):
create_test_file('foo.c', 'int foo = 1;')
create_test_file('bar.c', 'int bar = 2;')
run_process([EMCC, '-c', 'foo.c', 'bar.c'])
# Create a library with no archive map
run_process([EMAR, 'crS', 'liba.a', 'foo.o', 'bar.o'])
output = run_process([shared.LLVM_NM, '--print-armap', 'liba.a'], stdout=PIPE).stdout
self.assertNotContained('Archive map', output)
# Add an archive map
run_process([EMRANLIB, 'liba.a'])
output = run_process([shared.LLVM_NM, '--print-armap', 'liba.a'], stdout=PIPE).stdout
self.assertContained('Archive map', output)
def test_pthread_stub(self):
# Verify that programs containing pthread code can still be compiled even
# without enabling threads. This is possible becase we link in
# libpthread_stub.a
create_test_file('pthread.c', '''
#include <pthread.h>
int main() {
pthread_atfork(NULL, NULL, NULL);
return 0;
}
''')
run_process([EMCC, 'pthread.c'])
def test_stdin_preprocess(self):
create_test_file('temp.h', '#include <string>')
outputStdin = run_process([EMCC, '-x', 'c++', '-dM', '-E', '-'], input="#include <string>", stdout=PIPE).stdout
outputFile = run_process([EMCC, '-x', 'c++', '-dM', '-E', 'temp.h'], stdout=PIPE).stdout
self.assertTextDataIdentical(outputStdin, outputFile)
def test_stdin_compile_only(self):
# Should fail without -x lang specifier
with open(path_from_root('tests', 'hello_world.cpp')) as f:
err = self.expect_fail([EMCC, '-c', '-'], input=f.read())
self.assertContained('error: -E or -x required when input is from standard input', err)
with open(path_from_root('tests', 'hello_world.cpp')) as f:
run_process([EMCC, '-c', '-o', 'out.o', '-x', 'c++', '-'], input=f.read())
self.assertExists('out.o')
# Same again but without an explicit output filename
with open(path_from_root('tests', 'hello_world.cpp')) as f:
run_process([EMCC, '-c', '-x', 'c++', '-'], input=f.read())
self.assertExists('-.o')
def test_stdin_compile_and_link(self):
with open(path_from_root('tests', 'hello_world.cpp')) as f:
run_process([EMCC, '-x', 'c++', '-'], input=f.read())
self.assertContained('hello, world!', run_js('a.out.js'))
def is_object_file(self, filename):
if self.is_wasm_backend():
return building.is_wasm('-')
else:
return building.is_bitcode('-')
def test_stdout_link(self):
# linking to stdout `-` doesn't work, we have no way to pass such an output filename
# through post-link tools such as binaryen.
err = self.expect_fail([EMCC, '-o', '-', path_from_root('tests', 'hello_world.cpp')])
self.assertContained('invalid output filename: `-`', err)
self.assertNotExists('-')
err = self.expect_fail([EMCC, '-o', '-foo', path_from_root('tests', 'hello_world.cpp')])
self.assertContained('invalid output filename: `-foo`', err)
self.assertNotExists('-foo')
def test_output_to_nowhere(self):
nowhere = 'NULL' if WINDOWS else '/dev/null'
run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-o', nowhere, '-c'])
# Test that passing -s MIN_X_VERSION=-1 on the command line will result in browser X being not supported at all.
# I.e. -s MIN_X_VERSION=-1 is equal to -s MIN_X_VERSION=Infinity
def test_drop_support_for_browser(self):
# Test that -1 means "not supported"
run_process([EMCC, path_from_root('tests', 'test_html5.c'), '-s', 'MIN_IE_VERSION=-1'])
self.assertContained('allowsDeferredCalls: true', open('a.out.js').read())
self.assertNotContained('allowsDeferredCalls: JSEvents.isInternetExplorer()', open('a.out.js').read())
def test_errno_type(self):
create_test_file('errno_type.c', '''
#include <errno.h>
// Use of these constants in C preprocessor comparisons should work.
#if EPERM > 0
#define DAV1D_ERR(e) (-(e))
#else
#define DAV1D_ERR(e) (e)
#endif
''')
run_process([EMCC, 'errno_type.c'])
@no_fastcomp("uses standalone mode")
def test_standalone_syscalls(self):
run_process([EMCC, path_from_root('tests', 'other', 'standalone_syscalls', 'test.cpp'), '-o', 'test.wasm'])
with open(path_from_root('tests', 'other', 'standalone_syscalls', 'test.out')) as f:
expected = f.read()
for engine in WASM_ENGINES:
self.assertContained(expected, run_js('test.wasm', engine))
@no_windows('TODO: fix setjmp.h on clang on windows on ci')
@no_fastcomp("uses standalone mode")
def test_wasm2c_reactor(self):
# test compiling an unsafe library using wasm2c, then using it from a
# main program. this shows it is easy to use wasm2c as a sandboxing
# mechanism.
# first compile the library with emcc, getting a .c and .h
run_process([EMCC,
path_from_root('tests', 'other', 'wasm2c', 'unsafe-library.c'),
'-O3', '-o', 'lib.wasm', '-s', 'WASM2C', '--no-entry'])
# compile that .c to a native object
run_process([CLANG_CC, 'lib.wasm.c', '-c', '-O3', '-o', 'lib.o'])
# compile the main program natively normally, and link with the
# unsafe library
run_process([CLANG_CC,
path_from_root('tests', 'other', 'wasm2c', 'my-code.c'),
'-O3', 'lib.o', '-o', 'program.exe'])
output = run_process([os.path.abspath('program.exe')], stdout=PIPE).stdout
with open(path_from_root('tests', 'other', 'wasm2c', 'output.txt')) as f:
self.assertEqual(output, f.read())
@parameterized({
'wasm2js': (['-s', 'WASM=0'], ''),
'modularize': (['-s', 'MODULARIZE'], 'Module()'),
})
@no_fastcomp('wasm2js only')
def test_promise_polyfill(self, constant_args, extern_post_js):
def test(args):
# legacy browsers may lack Promise, which wasm2js depends on. see what
# happens when we kill the global Promise function.
create_test_file('extern-post.js', extern_post_js)
run_process([EMCC, path_from_root('tests', 'hello_world.cpp')] + constant_args + args + ['--extern-post-js', 'extern-post.js'])
with open('a.out.js') as f:
js = f.read()
with open('a.out.js', 'w') as f:
f.write('Promise = undefined;\n' + js)
return run_js('a.out.js', stderr=PIPE, full_output=True, assert_returncode=None)
# we fail without legacy support
self.assertNotContained('hello, world!', test([]))
# but work with it
self.assertContained('hello, world!', test(['-s', 'LEGACY_VM_SUPPORT']))
# Compile-test for -s USE_WEBGPU=1 and library_webgpu.js.
def test_webgpu_compiletest(self):
for args in [[], ['-s', 'ASSERTIONS=1']]:
run_process([EMCC, path_from_root('tests', 'webgpu_dummy.cpp'), '-s', 'USE_WEBGPU=1'] + args)
@no_fastcomp('lld only')
def test_signature_mismatch(self):
create_test_file('a.c', 'void foo(); int main() { foo(); return 0; }')
create_test_file('b.c', 'int foo() { return 1; }')
stderr = run_process([EMCC, 'a.c', 'b.c'], stderr=PIPE).stderr
self.assertContained('function signature mismatch: foo', stderr)
self.expect_fail([EMCC, '-Wl,--fatal-warnings', 'a.c', 'b.c'])
self.expect_fail([EMCC, '-s', 'STRICT', 'a.c', 'b.c'])
@no_fastcomp('lld only')
def test_lld_report_undefined(self):
create_test_file('main.c', 'void foo(); int main() { foo(); return 0; }')
stderr = self.expect_fail([EMCC, '-s', 'LLD_REPORT_UNDEFINED', 'main.c'])
self.assertContained('wasm-ld: error:', stderr)
self.assertContained('main_0.o: undefined symbol: foo', stderr)
@no_fastcomp('wasm backend only')
def test_4GB(self):
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=2GB'])
self.assertContained('INITIAL_MEMORY must be less than 2GB due to current spec limitations', stderr)
# Verifies that warning messages that Closure outputs are recorded to console
def test_closure_warnings(self):
proc = run_process([EMCC, path_from_root('tests', 'test_closure_warning.c'), '-O3', '--closure', '1', '-s', 'CLOSURE_WARNINGS=quiet'], stderr=PIPE)
self.assertNotContained('WARNING', proc.stderr)
proc = run_process([EMCC, path_from_root('tests', 'test_closure_warning.c'), '-O3', '--closure', '1', '-s', 'CLOSURE_WARNINGS=warn'], stderr=PIPE)
self.assertContained('WARNING - [JSC_REFERENCE_BEFORE_DECLARE] Variable referenced before declaration', proc.stderr)
self.expect_fail([EMCC, path_from_root('tests', 'test_closure_warning.c'), '-O3', '--closure', '1', '-s', 'CLOSURE_WARNINGS=error'])
@no_fastcomp('test wasm object files')
def test_bitcode_input(self):
# Verify that bitcode files are accepted as input
create_test_file('main.c', 'void foo(); int main() { return 0; }')
run_process([EMCC, '-emit-llvm', '-c', '-o', 'main.bc', 'main.c'])
self.assertTrue(building.is_bitcode('main.bc'))
run_process([EMCC, '-c', '-o', 'main.o', 'main.bc'])
self.assertTrue(building.is_wasm('main.o'))
def test_nostdlib(self):
# First ensure all the system libs are built
run_process([EMCC, path_from_root('tests', 'unistd', 'close.c')])
self.assertContained('undefined symbol:', self.expect_fail([EMCC, path_from_root('tests', 'unistd', 'close.c'), '-nostdlib']))
self.assertContained('undefined symbol:', self.expect_fail([EMCC, path_from_root('tests', 'unistd', 'close.c'), '-nodefaultlibs']))
# Build again but with explit system libraries
libs = ['-lc', '-lcompiler_rt']
if self.is_wasm_backend():
libs.append('-lc_rt_wasm')
run_process([EMCC, path_from_root('tests', 'unistd', 'close.c'), '-nostdlib'] + libs)
run_process([EMCC, path_from_root('tests', 'unistd', 'close.c'), '-nodefaultlibs'] + libs)
def test_argument_match(self):
# Verify that emcc arguments match precisely. We had a bug where only the prefix
# was matched
run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--js-opts', '10'])
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '--js-optsXX'])
self.assertContained("error: unsupported option '--js-optsXX'", err)
def test_missing_argument(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '--js-opts'])
self.assertContained("error: option '--js-opts' requires an argument", err)
def test_default_to_cxx(self):
create_test_file('foo.h', '#include <string.h>')
create_test_file('cxxfoo.h', '#include <string>')
# The default bahviour is to default to C++, which means the C++ header can be compiled even
# with emcc.
run_process([EMCC, '-c', 'cxxfoo.h'])
# But this means that C flags can't be passed (since we are assuming C++)
err = self.expect_fail([EMCC, '-std=gnu11', '-c', 'foo.h'])
self.assertContained("'-std=gnu11' not allowed with 'C++'", err)
# If we disable DEFAULT_TO_CXX the emcc can be used with cflags, but can't be used to build
# C++ headers
run_process([EMCC, '-std=gnu11', '-c', 'foo.h', '-s', 'DEFAULT_TO_CXX=0'])
err = self.expect_fail([EMCC, '-c', 'cxxfoo.h', '-s', 'DEFAULT_TO_CXX=0'])
self.assertContained("'string' file not found", err)
# Using em++ should alwasy work for C++ headers
run_process([EMXX, '-c', 'cxxfoo.h', '-s', 'DEFAULT_TO_CXX=0'])
# Or using emcc with `-x c++`
run_process([EMCC, '-c', 'cxxfoo.h', '-s', 'DEFAULT_TO_CXX=0', '-x', 'c++-header'])
@parameterized({
'': ([],),
'minimal': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_support_errno(self, args):
self.emcc_args += args
src = path_from_root('tests', 'core', 'test_support_errno.c')
output = path_from_root('tests', 'core', 'test_support_errno.out')
self.do_run_from_file(src, output)
size_default = os.path.getsize('src.c.o.js')
# Run the same test again but with SUPPORT_ERRNO disabled. This time we don't expect errno
# to be set after the failing syscall.
self.set_setting('SUPPORT_ERRNO', 0)
output = path_from_root('tests', 'core', 'test_support_errno_disabled.out')
self.do_run_from_file(src, output)
# Verify the JS output was smaller
self.assertLess(os.path.getsize('src.c.o.js'), size_default)
@no_fastcomp('no .s file support')
def test_assembly(self):
run_process([EMCC, '-c', path_from_root('tests', 'other', 'test_asm.s'), '-o', 'foo.o'])
src = path_from_root('tests', 'other', 'test_asm.c')
output = path_from_root('tests', 'other', 'test_asm.out')
self.emcc_args.append('foo.o')
self.do_run_from_file(src, output)
@no_fastcomp('no .s file support')
def test_assembly_preprocessed(self):
run_process([EMCC, '-c', path_from_root('tests', 'other', 'test_asm_cpp.S'), '-o', 'foo.o'])
src = path_from_root('tests', 'other', 'test_asm.c')
output = path_from_root('tests', 'other', 'test_asm.out')
self.emcc_args.append('foo.o')
self.do_run_from_file(src, output)
def test_export_global_address(self):
src = path_from_root('tests', 'other', 'test_export_global_address.c')
output = path_from_root('tests', 'other', 'test_export_global_address.out')
self.do_run_from_file(src, output)
@no_fastcomp('wasm-ld only')
def test_linker_version(self):
out = run_process([EMCC, '-Wl,--version'], stdout=PIPE).stdout
self.assertContained('LLD ', out)
# Tests that if a JS library function is missing, the linker will print out which function
# depended on the missing function.
def test_chained_js_error_diagnostics(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'test_chained_js_error_diagnostics.c'), '--js-library', path_from_root('tests', 'test_chained_js_error_diagnostics.js')])
self.assertContained("error: undefined symbol: nonexistent_function (referenced by bar__deps: ['nonexistent_function'], referenced by foo__deps: ['bar'], referenced by top-level compiled C/C++ code)", err)
def test_xclang_flag(self):
create_test_file('foo.h', ' ')
run_process([EMCC, '-c', '-o', 'out.o', '-Xclang', '-include', '-Xclang', 'foo.h', path_from_root('tests', 'hello_world.c')])
def test_emcc_size_parsing(self):
create_test_file('foo.h', ' ')
err = self.expect_fail([EMCC, '-s', 'TOTAL_MEMORY=X'])
self.assertContained('error: invalid byte size `X`. Valid suffixes are: kb, mb, gb, tb', err)
err = self.expect_fail([EMCC, '-s', 'TOTAL_MEMORY=11PB'])
self.assertContained('error: invalid byte size `11PB`. Valid suffixes are: kb, mb, gb, tb', err)
def test_native_call_before_init(self):
self.set_setting('ASSERTIONS')
self.set_setting('EXPORTED_FUNCTIONS', ['_foo'])
self.add_pre_run('console.log("calling foo"); Module["_foo"]();')
self.build('#include <stdio.h>\nint foo() { puts("foo called"); return 3; }', self.get_dir(), 'foo.c')
err = self.expect_fail(NODE_JS + ['foo.c.o.js'], stdout=PIPE)
self.assertContained('native function `foo` called before runtime initialization', err)
def test_native_call_after_exit(self):
self.set_setting('ASSERTIONS')
self.set_setting('EXIT_RUNTIME')
self.add_on_exit('console.log("calling main again"); Module["_main"]();')
self.build('#include <stdio.h>\nint main() { puts("foo called"); return 0; }', self.get_dir(), 'foo.c')
err = self.expect_fail(NODE_JS + ['foo.c.o.js'], stdout=PIPE)
self.assertContained('native function `main` called after runtime exit', err)
| 40.865605
| 382
| 0.622225
|
77e77629268a405100ac188230a74378bd208ddf
| 478
|
py
|
Python
|
models/Round6/base_case.py
|
gehilley/NondimensionalWeathering
|
8b59a00d59c026e1b3a3b8f7c3c6fe51a272c21a
|
[
"MIT"
] | null | null | null |
models/Round6/base_case.py
|
gehilley/NondimensionalWeathering
|
8b59a00d59c026e1b3a3b8f7c3c6fe51a272c21a
|
[
"MIT"
] | null | null | null |
models/Round6/base_case.py
|
gehilley/NondimensionalWeathering
|
8b59a00d59c026e1b3a3b8f7c3c6fe51a272c21a
|
[
"MIT"
] | null | null | null |
filename = 'models/Round6/base_case.p'
from weathering_model.weathering_model import run_weathering_model
import numpy as np
import pickle as p
# Run model:
L_star = 9.51
Y0_star = 3.56E-04
v_star = 2.88E+02
nx = 101
t_star_max = 35.8
t_star = np.linspace(0,t_star_max,num=11)
dx_star = L_star / float(nx)
x, X, Y = run_weathering_model(L_star, v_star, Y0_star, t_star, dxstar=dx_star)
p.dump((x, X, Y, L_star, Y0_star, v_star, nx, t_star, dx_star), open(filename, 'wb'))
| 22.761905
| 85
| 0.73431
|
9fa5a3b7ce8b947fb031c9941a103e897e433f3a
| 3,372
|
py
|
Python
|
29_nicePitchShifter.py
|
jaakjensen/PythonDSP
|
d4f5850a5379c14d531e6f9c6d43e03f53fb888d
|
[
"MIT"
] | 1
|
2022-01-19T10:40:41.000Z
|
2022-01-19T10:40:41.000Z
|
29_nicePitchShifter.py
|
jaakjensen/PythonDSP
|
d4f5850a5379c14d531e6f9c6d43e03f53fb888d
|
[
"MIT"
] | null | null | null |
29_nicePitchShifter.py
|
jaakjensen/PythonDSP
|
d4f5850a5379c14d531e6f9c6d43e03f53fb888d
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
import scipy.io.wavfile as wav
wav_fname = 'dspfiles/BibioGuitar.wav'
fs, inputSignal = wav.read(wav_fname)
Ts = 1/fs
semitones = 7 #how many semitones do we increase by?
tr = 2**(semitones/12)
dRate = 1 - tr #Delay rate of change
#Initialize the delay buffer
maxDelay = int(0.05 * fs) #may delay is 50 msecs
tau = (maxDelay/ abs(dRate)) * Ts #period of sawtooth LFO
freq = 1/tau #frequency of LFO
fade = round((tau*Fs)/8)
Hz = (freq/2) * (8/7)
#Calculate cross fades?????
#Conditional to handle pitch up and pitch down
if dRate > 0: #Pitch decrease
d1 = dRate * fade
d2 = maxDelay - 1
d1Temp = d1 # Used to control the length
d2Temp = d2 # of the LFO for proper amount of overlap
else : #Pitch increase
#initialize delay so LFO cycles line up with crossfade
d1 = maxDelay - maxDelay/8
d2 = 0
d1Temp = d1
d2Temp = d2
timelength = inputSignal.shape[0] / fs
samplelength = inputSignal.shape[0]
# Divide audio signal by max int value for signed 16 bit number
inputSignal = inputSignal/np.iinfo(np.int16).max
#Set up the time axis for the waveform (for plotting)
time = np.linspace(0, timelength, inputSignal.shape[0])
#initialize output signal
out = np.zeros(samplelength)
#Create a 1 x maxDelay buffer filled with zeros
audioBuffer1 = np.zeros(maxDelay)
audioBuffer2 = np.zeros(maxDelay)
#LFO Parameters
lfo1 = np.zeros(samplelength)
lfo2 = np.zeros(samplelength)
#Run the function through all the samples
for n in range(0,samplelength):
#Determine output of delay buffer, which could be a fractional delay time
intDelay = int(np.floor(d))
nextSamp = ((intDelay + 1) % (maxDelay))
frac = d - intDelay
if intDelay == 0: #when delay time = zero
#"out" comes "in", not just delay buffer
out[n] = (1-frac) * inputSignal[n,1] + frac * audioBuffer[0]
else:
out[n] = (1-frac) * audioBuffer[intDelay] + frac * audioBuffer[nextSamp]
audioBuffer[1:] = audioBuffer[0:-1]
audioBuffer[0] = inputSignal[n,1]
# Store the current delay in signal for plotting
lfo[n] = d
d = d+dRate #Change the delay time for the next loop
#if necessary, start a new cycle in LFO
if d<0:
d = maxDelay - 1
elif d > maxDelay - 1:
d = 0
########################################################################
print("DSP complete")
#set up the graphs
#fig, axes = plt.subplots(nrows=2,ncols=1)
#plot the original waveform
#axes[0].plot(time, inputSignal, label="Original Audio Signal")
#axes[0].set_xlabel("Time [s]")
#axes[0].set_ylabel("Amplitude")
#axes[0].legend(loc= 7)
#axes[0].set_xlim([0,1])
#plot the original waveform
#axes[1].plot(time, out, label="Processed Audio Signal")
#axes[1].set_xlabel("Time [s]")
#axes[1].set_ylabel("Amplitude")
#axes[1].legend(loc= 7)
#axes[1].set_xlim([0,1])
#Normalize the audio output level to max output
amplitude = np.iinfo(np.int16).max - 10
out = out*amplitude
#Truncate any non-integer/fractional data
#If we don't do this, the wav file won't be readable
out = np.asarray(out, dtype = np.int16)
#Write the data to an output file
wav.write("dspfiles/outputfiles/nicePitchShift.wav", 44100, out)
print("Wav file written")
#plt.show()
| 26.761905
| 80
| 0.650949
|
899263acbfc0d75aa9365eb92cb3e8ada285ee7e
| 12,005
|
py
|
Python
|
src/testdir/test_channel.py
|
uakms/macvim
|
72e6bbfa9f0a436ba0c93b8dede60bc307e07d19
|
[
"Vim"
] | 1
|
2019-01-23T10:07:39.000Z
|
2019-01-23T10:07:39.000Z
|
src/testdir/test_channel.py
|
uakms/macvim
|
72e6bbfa9f0a436ba0c93b8dede60bc307e07d19
|
[
"Vim"
] | null | null | null |
src/testdir/test_channel.py
|
uakms/macvim
|
72e6bbfa9f0a436ba0c93b8dede60bc307e07d19
|
[
"Vim"
] | 1
|
2021-11-07T21:46:41.000Z
|
2021-11-07T21:46:41.000Z
|
#!/usr/bin/python
#
# Server that will accept connections from a Vim channel.
# Used by test_channel.vim.
#
# This requires Python 2.6 or later.
from __future__ import print_function
import json
import socket
import sys
import time
import threading
try:
# Python 3
import socketserver
except ImportError:
# Python 2
import SocketServer as socketserver
class ThreadedTCPRequestHandler(socketserver.BaseRequestHandler):
def handle(self):
print("=== socket opened ===")
while True:
try:
received = self.request.recv(4096).decode('utf-8')
except socket.error:
print("=== socket error ===")
break
except IOError:
print("=== socket closed ===")
break
if received == '':
print("=== socket closed ===")
break
print("received: {0}".format(received))
# We may receive two messages at once. Take the part up to the
# newline, which should be after the matching "]".
todo = received
while todo != '':
splitidx = todo.find('\n')
if splitidx < 0:
used = todo
todo = ''
else:
used = todo[:splitidx]
todo = todo[splitidx + 1:]
if used != received:
print("using: {0}".format(used))
try:
decoded = json.loads(used)
except ValueError:
print("json decoding failed")
decoded = [-1, '']
# Send a response if the sequence number is positive.
if decoded[0] >= 0:
if decoded[1] == 'hello!':
# simply send back a string
response = "got it"
elif decoded[1] == 'malformed1':
cmd = '["ex",":"]wrong!["ex","smi"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
# Need to wait for Vim to give up, otherwise it
# sometimes fails on OS X.
time.sleep(0.2)
elif decoded[1] == 'malformed2':
cmd = '"unterminated string'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
# Need to wait for Vim to give up, otherwise the double
# quote in the "ok" response terminates the string.
time.sleep(0.2)
elif decoded[1] == 'malformed3':
cmd = '["ex","missing ]"'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
# Need to wait for Vim to give up, otherwise the ]
# in the "ok" response terminates the list.
time.sleep(0.2)
elif decoded[1] == 'split':
cmd = '["ex","let '
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
time.sleep(0.01)
cmd = 'g:split = 123"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1].startswith("echo "):
# send back the argument
response = decoded[1][5:]
time.sleep(0.01)
elif decoded[1] == 'make change':
# Send two ex commands at the same time, before
# replying to the request.
cmd = '["ex","call append(\\"$\\",\\"added1\\")"]'
cmd += '["ex","call append(\\"$\\",\\"added2\\")"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'bad command':
cmd = '["ex","foo bar"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'do normal':
# Send a normal command.
cmd = '["normal","G$s more\u001b"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'eval-works':
# Send an eval request. We ignore the response.
cmd = '["expr","\\"foo\\" . 123", -1]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'eval-special':
# Send an eval request. We ignore the response.
cmd = '["expr","\\"foo\x7f\x10\x01bar\\"", -2]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'eval-getline':
# Send an eval request. We ignore the response.
cmd = '["expr","getline(3)", -3]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'eval-fails':
# Send an eval request that will fail.
cmd = '["expr","xxx", -4]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'eval-error':
# Send an eval request that works but the result can't
# be encoded.
cmd = '["expr","function(\\"tr\\")", -5]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'eval-bad':
# Send an eval request missing the third argument.
cmd = '["expr","xxx"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'an expr':
# Send an expr request.
cmd = '["expr","setline(\\"$\\", [\\"one\\",\\"two\\",\\"three\\"])"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'call-func':
cmd = '["call","MyFunction",[1,2,3], 0]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'redraw':
cmd = '["redraw",""]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'redraw!':
cmd = '["redraw","force"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'empty-request':
cmd = '[]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'eval-result':
# Send back the last received eval result.
response = last_eval
elif decoded[1] == 'call me':
cmd = '[0,"we called you"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "ok"
elif decoded[1] == 'call me again':
cmd = '[0,"we did call you"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = ""
elif decoded[1] == 'send zero':
cmd = '[0,"zero index"]'
print("sending: {0}".format(cmd))
self.request.sendall(cmd.encode('utf-8'))
response = "sent zero"
elif decoded[1] == 'close me':
print("closing")
self.request.close()
response = ""
elif decoded[1] == 'wait a bit':
time.sleep(0.2)
response = "waited"
elif decoded[1] == '!quit!':
# we're done
self.server.shutdown()
return
elif decoded[1] == '!crash!':
# Crash!
42 / 0
else:
response = "what?"
if response == "":
print("no response")
else:
encoded = json.dumps([decoded[0], response])
print("sending: {0}".format(encoded))
self.request.sendall(encoded.encode('utf-8'))
# Negative numbers are used for "eval" responses.
elif decoded[0] < 0:
last_eval = decoded
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
pass
def writePortInFile(port):
# Write the port number in Xportnr, so that the test knows it.
f = open("Xportnr", "w")
f.write("{0}".format(port))
f.close()
if __name__ == "__main__":
HOST, PORT = "localhost", 0
# Wait half a second before opening the port to test waittime in ch_open().
# We do want to get the port number, get that first. We cannot open the
# socket, guess a port is free.
if len(sys.argv) >= 2 and sys.argv[1] == 'delay':
PORT = 13684
writePortInFile(PORT)
print("Wait for it...")
time.sleep(0.5)
server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
ip, port = server.server_address
# Start a thread with the server. That thread will then start a new thread
# for each connection.
server_thread = threading.Thread(target=server.serve_forever)
server_thread.start()
writePortInFile(port)
print("Listening on port {0}".format(port))
# Main thread terminates, but the server continues running
# until server.shutdown() is called.
try:
while server_thread.isAlive():
server_thread.join(1)
except (KeyboardInterrupt, SystemExit):
server.shutdown()
| 44.298893
| 94
| 0.422241
|
2575ea3e862be2a50d6af889cfd57de5930d5bb9
| 2,459
|
py
|
Python
|
poptimizer/portfolio/tests/test_portfolio.py
|
ArjaraGit/poptimizer
|
0d221d964791cc387dde44529cacb19d35e5febe
|
[
"Unlicense"
] | null | null | null |
poptimizer/portfolio/tests/test_portfolio.py
|
ArjaraGit/poptimizer
|
0d221d964791cc387dde44529cacb19d35e5febe
|
[
"Unlicense"
] | null | null | null |
poptimizer/portfolio/tests/test_portfolio.py
|
ArjaraGit/poptimizer
|
0d221d964791cc387dde44529cacb19d35e5febe
|
[
"Unlicense"
] | 1
|
2021-12-02T13:32:44.000Z
|
2021-12-02T13:32:44.000Z
|
import logging
from pathlib import Path
import numpy as np
import pandas as pd
import pytest
from poptimizer.config import POptimizerError
from poptimizer.portfolio import portfolio
from poptimizer.portfolio.portfolio import CASH, PORTFOLIO
PARAMS = dict(
name=["test"],
date="2018-03-19",
cash=1000,
positions=dict(GAZP=6820, VSMO=145, TTLK=1_230_000),
value=3_699_111,
)
@pytest.fixture(scope="module", name="port")
def make_portfolio():
return portfolio.Portfolio(**PARAMS)
def test_portfolio(monkeypatch, port):
monkeypatch.setattr(portfolio, "LIQUIDITY_DAYS", 100)
assert "ПОРТФЕЛЬ [test] - 2018-03-19" in str(port)
assert port.date == pd.Timestamp("2018-03-19")
assert port.index.tolist() == ["GAZP", "TTLK", "VSMO", CASH, PORTFOLIO]
assert np.allclose(port.shares, [6820, 1_230_000, 145, 1000, 1])
assert np.allclose(port.lot_size, [10, 1000, 1, 1, 1])
assert np.allclose(port.lots, [682, 1230, 145, 1000, 1])
assert np.allclose(port.price, [139.91, 0.1525, 17630, 1, 3_699_111])
assert np.allclose(port.value, [954_186, 187_575, 2_556_350, 1000, 3_699_111])
assert np.allclose(
port.weight,
[0.257_950_085_844_95, 0.050_708_129_601_95, 0.691_071_449_329_312, 0.000_270_335_223_788, 1],
)
assert np.allclose(port.turnover_factor, [1006.594863, 0.007234170197424721, 0.844742, 1007.446839, 1007.446839])
def test_portfolio_wrong_value():
with pytest.raises(POptimizerError) as error:
PARAMS["value"] = 123
portfolio.Portfolio(**PARAMS)
assert "Введенная стоимость портфеля 123" in str(error.value)
def fake_securities_with_reg_number():
return pd.Index(["SBER", "SBERP"])
def test_load_from_yaml(monkeypatch):
monkeypatch.setattr(portfolio.config, "PORT_PATH", Path(__file__).parent)
port = portfolio.load_from_yaml("2020-06-22")
assert isinstance(port, portfolio.Portfolio)
assert port.date == pd.Timestamp("2020-06-22")
print(port.value[PORTFOLIO])
assert list(port.index[:-2]) == ["AKRN", "GMKN", "VSMO"]
assert port.shares["AKRN"] == 1
assert port.shares["GMKN"] == 5
assert port.shares["VSMO"] == 4
assert port.shares["CASH"] == 369.02
def test_load_tickers(monkeypatch):
monkeypatch.setattr(portfolio.config, "PORT_PATH", Path(__file__).parent)
tickers = portfolio.load_tickers()
assert isinstance(tickers, tuple)
assert tickers == ("AKRN", "GMKN", "VSMO")
| 32.786667
| 117
| 0.701098
|
b466637895b7bb1bb07021ad297d64b537e22894
| 393
|
py
|
Python
|
python/ql/test/library-tests/frameworks/django-v2-v3/testproj/wsgi.py
|
timoles/codeql
|
2d24387e9e300bf03be35694816b1e76ae88a50c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/library-tests/frameworks/django-v2-v3/testproj/wsgi.py
|
baby636/codeql
|
097b6e5e3364ecc7103586d6feb308861e15538e
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/library-tests/frameworks/django-v2-v3/testproj/wsgi.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
"""
WSGI config for testproj project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'testproj.settings')
application = get_wsgi_application()
| 23.117647
| 78
| 0.78626
|
c49739da41ff9c2a4e8cee56862d90fab4023245
| 2,526
|
py
|
Python
|
tasks/util/release.py
|
jchesterpivotal/Faasm
|
d4e25baf0c69df7eea8614de3759792748f7b9d4
|
[
"Apache-2.0"
] | null | null | null |
tasks/util/release.py
|
jchesterpivotal/Faasm
|
d4e25baf0c69df7eea8614de3759792748f7b9d4
|
[
"Apache-2.0"
] | null | null | null |
tasks/util/release.py
|
jchesterpivotal/Faasm
|
d4e25baf0c69df7eea8614de3759792748f7b9d4
|
[
"Apache-2.0"
] | null | null | null |
from os.path import join
from subprocess import check_output
from tasks.util.env import FAASM_LOCAL_DIR, FAASM_TOOLCHAIN_FILE
from tasks.util.version import get_faasm_version
TOOLCHAIN_INSTALL = join(FAASM_LOCAL_DIR, "toolchain")
def _get_artifact_url(name, version=None):
version = version if version else get_faasm_version()
url = "https://github.com/lsds/Faasm/releases/download/v{}/{}".format(version, name)
return url
def get_sysroot_tar_name(version=None):
version = version if version else get_faasm_version()
return "faasm-sysroot-{}.tar.gz".format(version)
def get_sysroot_tar_path():
tar_name = get_sysroot_tar_name()
return join(FAASM_LOCAL_DIR, tar_name)
def get_sysroot_url(version=None):
tar_name = get_sysroot_tar_name(version=version)
return _get_artifact_url(tar_name, version=version)
def get_toolchain_tar_name(version=None):
version = version if version else get_faasm_version()
return "faasm-toolchain-{}.tar.gz".format(version)
def get_toolchain_tar_path():
tar_name = get_toolchain_tar_name()
return join(FAASM_LOCAL_DIR, tar_name)
def get_toolchain_url(version=None):
tar_name = get_toolchain_tar_name(version=version)
return _get_artifact_url(tar_name, version=version)
def get_runtime_tar_name(version=None):
version = version if version else get_faasm_version()
return "faasm-runtime-root-{}.tar.gz".format(version)
def get_runtime_tar_path():
tar_name = get_runtime_tar_name()
return join(FAASM_LOCAL_DIR, tar_name)
def get_runtime_url(version=None):
tar_name = get_runtime_tar_name(version=version)
return _get_artifact_url(tar_name, version=version)
def tar_toolchain():
tar_name = get_toolchain_tar_name()
tar_path = get_toolchain_tar_path()
print("Creating archive of Faasm toolchain")
check_output("tar -cf {} toolchain".format(tar_name), shell=True, cwd=FAASM_LOCAL_DIR)
return tar_name, tar_path
def tar_runtime_root():
tar_name = get_runtime_tar_name()
tar_path = get_runtime_tar_path()
# Compress
print("Creating archive of Faasm runtime root")
check_output("tar -cf {} runtime_root".format(tar_path), shell=True, cwd=FAASM_LOCAL_DIR)
return tar_name, tar_path
def tar_sysroot():
tar_name = get_sysroot_tar_name()
tar_path = get_sysroot_tar_path()
print("Creating archive of Faasm sysroot")
check_output("tar -cf {} llvm-sysroot".format(tar_name), shell=True, cwd=FAASM_LOCAL_DIR)
return tar_name, tar_path
| 28.066667
| 93
| 0.756928
|
feca693e5c590cb0ffedab8463eae4b4ecf6d2c5
| 1,425
|
py
|
Python
|
Kai/crab/NANOv7_Fri13/2017/ElEl/crab_cfg_2017_ElEl_B.py
|
NJManganelli/FourTopNAOD
|
9743d5b49bdbad27a74abb7b2d5b7295f678a0e3
|
[
"Apache-2.0"
] | 1
|
2022-01-17T17:29:38.000Z
|
2022-01-17T17:29:38.000Z
|
Kai/crab/NANOv7_Fri13/2017/ElEl/crab_cfg_2017_ElEl_B.py
|
NJManganelli/FourTopNAOD
|
9743d5b49bdbad27a74abb7b2d5b7295f678a0e3
|
[
"Apache-2.0"
] | null | null | null |
Kai/crab/NANOv7_Fri13/2017/ElEl/crab_cfg_2017_ElEl_B.py
|
NJManganelli/FourTopNAOD
|
9743d5b49bdbad27a74abb7b2d5b7295f678a0e3
|
[
"Apache-2.0"
] | 1
|
2021-12-15T10:56:50.000Z
|
2021-12-15T10:56:50.000Z
|
import os
from WMCore.Configuration import Configuration
from CRABClient.UserUtilities import config, getUsernameFromCRIC
config = Configuration()
config.section_("General")
config.General.requestName = '2017_ElEl_B'
config.General.transferOutputs = True
config.General.transferLogs = True
config.section_("JobType")
config.JobType.allowUndistributedCMSSW = True
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'PSet.py'
config.JobType.maxMemoryMB = 2000
config.JobType.maxJobRuntimeMin = 1315
config.JobType.numCores = 1
config.JobType.scriptExe = 'crab_script_2017_ElEl_B.sh'
config.JobType.inputFiles = ['crab_script_2017_ElEl_B.py',
os.path.join(os.environ['CMSSW_BASE'],'src/PhysicsTools/NanoAODTools/scripts/haddnano.py'),
]
config.JobType.outputFiles = ['hist.root']
config.JobType.sendPythonFolder = True
config.section_("Data")
config.Data.inputDataset = '/DoubleEG/Run2017B-02Apr2020-v1/NANOAOD'
config.Data.inputDBS = 'global'
config.Data.splitting = 'FileBased'
if config.Data.splitting == 'FileBased':
config.Data.unitsPerJob = 1
# config.Data.totalUnits = $TOTAL_UNITS
# config.Data.userInputFiles = []
config.Data.outLFNDirBase = '/store/user/{user}/Fri13'.format(user=getUsernameFromCRIC())
config.Data.publication = True
config.Data.outputDatasetTag = 'Fri13'
config.section_("Site")
config.Site.storageSite = 'T2_CH_CERN'
| 36.538462
| 120
| 0.762807
|
dda0c49e580be98242d3b8520e746c5991626259
| 11,404
|
py
|
Python
|
recbole/trainer/hyper_tuning.py
|
ValerieYang99/RecBole
|
19ea101b300fbf31bbc79d8efc80c65926834488
|
[
"MIT"
] | 1,773
|
2020-11-04T01:22:11.000Z
|
2022-03-31T08:05:41.000Z
|
recbole/trainer/hyper_tuning.py
|
chenyushuo/RecBole
|
f04084b8d2cffcb79eb9e4b21325f8f6c75c638e
|
[
"MIT"
] | 378
|
2020-11-05T02:42:27.000Z
|
2022-03-31T22:57:04.000Z
|
recbole/trainer/hyper_tuning.py
|
chenyushuo/RecBole
|
f04084b8d2cffcb79eb9e4b21325f8f6c75c638e
|
[
"MIT"
] | 354
|
2020-11-04T01:37:09.000Z
|
2022-03-31T10:39:32.000Z
|
# -*- coding: utf-8 -*-
# @Time : 2020/7/19 19:06
# @Author : Shanlei Mu
# @Email : slmu@ruc.edu.cn
# @File : hyper_tuning.py
"""
recbole.trainer.hyper_tuning
############################
"""
from functools import partial
import numpy as np
from recbole.utils.utils import dict2str
def _recursiveFindNodes(root, node_type='switch'):
from hyperopt.pyll.base import Apply
nodes = []
if isinstance(root, (list, tuple)):
for node in root:
nodes.extend(_recursiveFindNodes(node, node_type))
elif isinstance(root, dict):
for node in root.values():
nodes.extend(_recursiveFindNodes(node, node_type))
elif isinstance(root, (Apply)):
if root.name == node_type:
nodes.append(root)
for node in root.pos_args:
if node.name == node_type:
nodes.append(node)
for _, node in root.named_args:
if node.name == node_type:
nodes.append(node)
return nodes
def _parameters(space):
# Analyze the domain instance to find parameters
parameters = {}
if isinstance(space, dict):
space = list(space.values())
for node in _recursiveFindNodes(space, 'switch'):
# Find the name of this parameter
paramNode = node.pos_args[0]
assert paramNode.name == 'hyperopt_param'
paramName = paramNode.pos_args[0].obj
# Find all possible choices for this parameter
values = [literal.obj for literal in node.pos_args[1:]]
parameters[paramName] = np.array(range(len(values)))
return parameters
def _spacesize(space):
# Compute the number of possible combinations
params = _parameters(space)
return np.prod([len(values) for values in params.values()])
class ExhaustiveSearchError(Exception):
r""" ExhaustiveSearchError
"""
pass
def _validate_space_exhaustive_search(space):
from hyperopt.pyll.base import dfs, as_apply
from hyperopt.pyll.stochastic import implicit_stochastic_symbols
supported_stochastic_symbols = ['randint', 'quniform', 'qloguniform', 'qnormal', 'qlognormal', 'categorical']
for node in dfs(as_apply(space)):
if node.name in implicit_stochastic_symbols:
if node.name not in supported_stochastic_symbols:
raise ExhaustiveSearchError(
'Exhaustive search is only possible with the following stochastic symbols: '
'' + ', '.join(supported_stochastic_symbols)
)
def exhaustive_search(new_ids, domain, trials, seed, nbMaxSucessiveFailures=1000):
r""" This is for exhaustive search in HyperTuning.
"""
from hyperopt import pyll
from hyperopt.base import miscs_update_idxs_vals
# Build a hash set for previous trials
hashset = set([
hash(
frozenset([(key, value[0]) if len(value) > 0 else ((key, None))
for key, value in trial['misc']['vals'].items()])
) for trial in trials.trials
])
rng = np.random.RandomState(seed)
rval = []
for _, new_id in enumerate(new_ids):
newSample = False
nbSucessiveFailures = 0
while not newSample:
# -- sample new specs, idxs, vals
idxs, vals = pyll.rec_eval(domain.s_idxs_vals, memo={
domain.s_new_ids: [new_id],
domain.s_rng: rng,
})
new_result = domain.new_result()
new_misc = dict(tid=new_id, cmd=domain.cmd, workdir=domain.workdir)
miscs_update_idxs_vals([new_misc], idxs, vals)
# Compare with previous hashes
h = hash(frozenset([(key, value[0]) if len(value) > 0 else ((key, None)) for key, value in vals.items()]))
if h not in hashset:
newSample = True
else:
# Duplicated sample, ignore
nbSucessiveFailures += 1
if nbSucessiveFailures > nbMaxSucessiveFailures:
# No more samples to produce
return []
rval.extend(trials.new_trial_docs([new_id], [None], [new_result], [new_misc]))
return rval
class HyperTuning(object):
r"""HyperTuning Class is used to manage the parameter tuning process of recommender system models.
Given objective funciton, parameters range and optimization algorithm, using HyperTuning can find
the best result among these parameters
Note:
HyperTuning is based on the hyperopt (https://github.com/hyperopt/hyperopt)
Thanks to sbrodeur for the exhaustive search code.
https://github.com/hyperopt/hyperopt/issues/200
"""
def __init__(
self,
objective_function,
space=None,
params_file=None,
params_dict=None,
fixed_config_file_list=None,
algo='exhaustive',
max_evals=100
):
self.best_score = None
self.best_params = None
self.best_test_result = None
self.params2result = {}
self.objective_function = objective_function
self.max_evals = max_evals
self.fixed_config_file_list = fixed_config_file_list
if space:
self.space = space
elif params_file:
self.space = self._build_space_from_file(params_file)
elif params_dict:
self.space = self._build_space_from_dict(params_dict)
else:
raise ValueError('at least one of `space`, `params_file` and `params_dict` is provided')
if isinstance(algo, str):
if algo == 'exhaustive':
self.algo = partial(exhaustive_search, nbMaxSucessiveFailures=1000)
self.max_evals = _spacesize(self.space)
else:
raise ValueError('Illegal algo [{}]'.format(algo))
else:
self.algo = algo
@staticmethod
def _build_space_from_file(file):
from hyperopt import hp
space = {}
with open(file, 'r') as fp:
for line in fp:
para_list = line.strip().split(' ')
if len(para_list) < 3:
continue
para_name, para_type, para_value = para_list[0], para_list[1], "".join(para_list[2:])
if para_type == 'choice':
para_value = eval(para_value)
space[para_name] = hp.choice(para_name, para_value)
elif para_type == 'uniform':
low, high = para_value.strip().split(',')
space[para_name] = hp.uniform(para_name, float(low), float(high))
elif para_type == 'quniform':
low, high, q = para_value.strip().split(',')
space[para_name] = hp.quniform(para_name, float(low), float(high), float(q))
elif para_type == 'loguniform':
low, high = para_value.strip().split(',')
space[para_name] = hp.loguniform(para_name, float(low), float(high))
else:
raise ValueError('Illegal param type [{}]'.format(para_type))
return space
@staticmethod
def _build_space_from_dict(config_dict):
from hyperopt import hp
space = {}
for para_type in config_dict:
if para_type == 'choice':
for para_name in config_dict['choice']:
para_value = config_dict['choice'][para_name]
space[para_name] = hp.choice(para_name, para_value)
elif para_type == 'uniform':
for para_name in config_dict['uniform']:
para_value = config_dict['uniform'][para_name]
low = para_value[0]
high = para_value[1]
space[para_name] = hp.uniform(para_name, float(low), float(high))
elif para_type == 'quniform':
for para_name in config_dict['quniform']:
para_value = config_dict['quniform'][para_name]
low = para_value[0]
high = para_value[1]
q = para_value[2]
space[para_name] = hp.quniform(para_name, float(low), float(high), float(q))
elif para_type == 'loguniform':
for para_name in config_dict['loguniform']:
para_value = config_dict['loguniform'][para_name]
low = para_value[0]
high = para_value[1]
space[para_name] = hp.loguniform(para_name, float(low), float(high))
else:
raise ValueError('Illegal param type [{}]'.format(para_type))
return space
@staticmethod
def params2str(params):
r""" convert dict to str
Args:
params (dict): parameters dict
Returns:
str: parameters string
"""
params_str = ''
for param_name in params:
params_str += param_name + ':' + str(params[param_name]) + ', '
return params_str[:-2]
@staticmethod
def _print_result(result_dict: dict):
print('current best valid score: %.4f' % result_dict['best_valid_score'])
print('current best valid result:')
print(result_dict['best_valid_result'])
print('current test result:')
print(result_dict['test_result'])
print()
def export_result(self, output_file=None):
r""" Write the searched parameters and corresponding results to the file
Args:
output_file (str): the output file
"""
with open(output_file, 'w') as fp:
for params in self.params2result:
fp.write(params + '\n')
fp.write('Valid result:\n' + dict2str(self.params2result[params]['best_valid_result']) + '\n')
fp.write('Test result:\n' + dict2str(self.params2result[params]['test_result']) + '\n\n')
def trial(self, params):
r"""Given a set of parameters, return results and optimization status
Args:
params (dict): the parameter dictionary
"""
import hyperopt
config_dict = params.copy()
params_str = self.params2str(params)
print('running parameters:', config_dict)
result_dict = self.objective_function(config_dict, self.fixed_config_file_list)
self.params2result[params_str] = result_dict
score, bigger = result_dict['best_valid_score'], result_dict['valid_score_bigger']
if not self.best_score:
self.best_score = score
self.best_params = params
self._print_result(result_dict)
else:
if bigger:
if score > self.best_score:
self.best_score = score
self.best_params = params
self._print_result(result_dict)
else:
if score < self.best_score:
self.best_score = score
self.best_params = params
self._print_result(result_dict)
if bigger:
score = -score
return {'loss': score, 'status': hyperopt.STATUS_OK}
def run(self):
r""" begin to search the best parameters
"""
from hyperopt import fmin
fmin(self.trial, self.space, algo=self.algo, max_evals=self.max_evals)
| 36.787097
| 118
| 0.584532
|
579c30165d8cf07c02a7f02c78049ec4ec612d5a
| 1,713
|
py
|
Python
|
core/helper.py
|
ArcArcaman/Karnaugh-Map-Generator
|
47e17fa18164a287f2c34d6fc539b84a7a28793a
|
[
"MIT"
] | null | null | null |
core/helper.py
|
ArcArcaman/Karnaugh-Map-Generator
|
47e17fa18164a287f2c34d6fc539b84a7a28793a
|
[
"MIT"
] | null | null | null |
core/helper.py
|
ArcArcaman/Karnaugh-Map-Generator
|
47e17fa18164a287f2c34d6fc539b84a7a28793a
|
[
"MIT"
] | null | null | null |
def is_boolean(val):
if val == 0 or val == 1 or val == '0' or val == '1':
return True
return False
class KMap:
@classmethod
def find_id(cls, idx, size):
idx_int = int(idx)
_offset = [0, 1, 3, 2]
start_val = idx_int//4
offset_val = _offset[idx_int%4]
id = bin(4*start_val + offset_val)[2:]
return id.zfill(size)
@classmethod
def find_idx(cls, row_id, col_id):
row_int = int(row_id, 2)
col_int = int(col_id, 2)
_offset = [0, 1, 3, 2]
row_start_val = row_int//4
row_offset = _offset[row_int%4]
row_idx = 4*row_start_val + row_offset
col_start_val = col_int//4
col_offset = _offset[col_int%4]
col_idx = 4*col_start_val + col_offset
return (row_idx, col_idx)
def __init__(self, row_count, col_count, row_name, col_name):
self.row_bit_count = row_count
self.col_bit_count = col_count
self.row_count = 2**row_count
self.col_count = 2**col_count
self.row_name = row_name
self.col_name = col_name
self._kmap = [['X' for _ in range(2**col_count)] for _ in range(2**row_count)]
def set(self, row_id, col_id, val):
row_idx, col_idx = self.find_idx(row_id, col_id)
self._kmap[row_idx][col_idx] = val
def __str__(self):
result = self.row_name+" \\ "+self.col_name+"\n"
result += "\t"+"\t".join([self.find_id(i, self.col_bit_count) for i in range(self.col_count)])+"\n"
for i in range(self.row_count):
result += self.find_id(i, self.row_bit_count)+"\t"+"\t".join(self._kmap[i])+"\n"
return result
| 27.629032
| 107
| 0.579685
|
2ed3b44b831cd6b3a45b84e6d8fb1fb271b42ad9
| 15,730
|
py
|
Python
|
docs/conf.py
|
kimpers/airflow
|
88989200a66291580088188f06a6db503ac823e2
|
[
"Apache-2.0"
] | 1
|
2020-04-22T19:37:22.000Z
|
2020-04-22T19:37:22.000Z
|
docs/conf.py
|
kimpers/airflow
|
88989200a66291580088188f06a6db503ac823e2
|
[
"Apache-2.0"
] | 2
|
2021-03-11T04:23:28.000Z
|
2021-09-29T17:44:44.000Z
|
docs/conf.py
|
kimpers/airflow
|
88989200a66291580088188f06a6db503ac823e2
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# flake8: noqa
# Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Airflow documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 9 20:50:01 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
"""Configuration of Airflow Docs"""
import os
import sys
from typing import Dict
import airflow
autodoc_mock_imports = [
'MySQLdb',
'adal',
'analytics',
'azure',
'azure.cosmos',
'azure.datalake',
'azure.mgmt',
'boto3',
'botocore',
'bson',
'cassandra',
'celery',
'cloudant',
'cryptography',
'cx_Oracle',
'datadog',
'distributed',
'docker',
'google',
'google_auth_httplib2',
'googleapiclient',
'grpc',
'hdfs',
'httplib2',
'jaydebeapi',
'jenkins',
'jira',
'kubernetes',
'msrestazure',
'pandas',
'pandas_gbq',
'paramiko',
'pinotdb',
'psycopg2',
'pydruid',
'pyhive',
'pyhive',
'pymongo',
'pymssql',
'pysftp',
'qds_sdk',
'redis',
'simple_salesforce',
'slackclient',
'smbclient',
'snowflake',
'sshtunnel',
'tenacity',
'vertica_python',
'winrm',
'zdesk',
]
# Hack to allow changing for piece of the code to behave differently while
# the docs are being built. The main objective was to alter the
# behavior of the utils.apply_default that was hiding function headers
os.environ['BUILDING_AIRFLOW_DOCS'] = 'TRUE'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.join(os.path.dirname(__file__), 'exts'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinxarg.ext',
'sphinxcontrib.httpdomain',
'sphinx.ext.intersphinx',
'autoapi.extension',
'exampleinclude',
'docroles'
]
autodoc_default_options = {
'show-inheritance': True,
'members': True
}
viewcode_follow_imported_members = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Airflow'
# copyright = ''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# version = '1.0.0'
version = airflow.__version__
# The full version, including alpha/beta/rc tags.
# release = '1.0.0'
release = airflow.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
'_api/airflow/_vendor',
'_api/airflow/api',
'_api/airflow/bin',
'_api/airflow/config_templates',
'_api/airflow/configuration',
'_api/airflow/contrib/auth',
'_api/airflow/contrib/example_dags',
'_api/airflow/contrib/index.rst',
'_api/airflow/contrib/kubernetes',
'_api/airflow/contrib/task_runner',
'_api/airflow/contrib/utils',
'_api/airflow/dag',
'_api/airflow/default_login',
'_api/airflow/example_dags',
'_api/airflow/exceptions',
'_api/airflow/index.rst',
'_api/airflow/jobs',
'_api/airflow/lineage',
'_api/airflow/typing',
'_api/airflow/logging_config',
'_api/airflow/macros',
'_api/airflow/migrations',
'_api/airflow/plugins_manager',
'_api/airflow/security',
'_api/airflow/settings',
'_api/airflow/sentry',
'_api/airflow/stats',
'_api/airflow/task',
'_api/airflow/kubernetes',
'_api/airflow/ti_deps',
'_api/airflow/utils',
'_api/airflow/version',
'_api/airflow/www',
'_api/main',
'_api/airflow/gcp/index.rst',
'_api/airflow/gcp/example_dags',
'_api/airflow/gcp/utils',
'autoapi_templates',
'howto/operator/gcp/_partials',
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
keep_warnings = True
intersphinx_mapping = {
'boto3': ('https://boto3.amazonaws.com/v1/documentation/api/latest/', None),
'mongodb': ('https://api.mongodb.com/python/current/', None),
'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None),
'python': ('https://docs.python.org/3/', None),
'requests': ('https://requests.readthedocs.io/en/master/', None),
'sqlalchemy': ('https://docs.sqlalchemy.org/en/latest/', None),
'hdfs': ('https://hdfscli.readthedocs.io/en/latest/', None),
# google-cloud-python
'google-cloud-automl': ('https://googleapis.dev/python/automl/latest', None),
'google-cloud-bigquery': ('https://googleapis.dev/python/bigquery/latest', None),
'google-cloud-bigquery-datatransfer': ('https://googleapis.dev/python/bigquerydatatransfer/latest', None),
'google-cloud-bigquery-storage': ('https://googleapis.dev/python/bigquerystorage/latest', None),
'google-cloud-bigtable': ('https://googleapis.dev/python/bigtable/latest', None),
'google-cloud-container': ('https://googleapis.dev/python/container/latest', None),
'google-cloud-core': ('https://googleapis.dev/python/google-cloud-core/latest', None),
'google-cloud-datastore': ('https://googleapis.dev/python/datastore/latest', None),
'google-cloud-dlp': ('https://googleapis.dev/python/dlp/latest', None),
'google-cloud-kms': ('https://googleapis.dev/python/cloudkms/latest', None),
'google-cloud-language': ('https://googleapis.dev/python/language/latest', None),
'google-cloud-pubsub': ('https://googleapis.dev/python/pubsub/latest', None),
'google-cloud-redis': ('https://googleapis.dev/python/redis/latest', None),
'google-cloud-spanner': ('https://googleapis.dev/python/spanner/latest', None),
'google-cloud-speech': ('https://googleapis.dev/python/speech/latest', None),
'google-cloud-storage': ('https://googleapis.dev/python/storage/latest', None),
'google-cloud-tasks': ('https://googleapis.dev/python/cloudtasks/latest', None),
'google-cloud-texttospeech': ('https://googleapis.dev/python/texttospeech/latest', None),
'google-cloud-translate': ('https://googleapis.dev/python/translation/latest', None),
'google-cloud-videointelligence': ('https://googleapis.dev/python/videointelligence/latest', None),
'google-cloud-vision': ('https://googleapis.dev/python/vision/latest', None),
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
import sphinx_rtd_theme # isort:skip pylint: disable=wrong-import-position,wrong-import-order
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Airflow Documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = ""
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Airflowdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
} # type: Dict[str,str]
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Airflow.tex', 'Airflow Documentation',
'Apache Airflow', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'airflow', 'Airflow Documentation',
['Apache Airflow'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [(
'index', 'Airflow', 'Airflow Documentation',
'Apache Airflow', 'Airflow',
'Airflow is a system to programmatically author, schedule and monitor data pipelines.',
'Miscellaneous'
), ]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# sphinx-autoapi configuration
# See:
# https://sphinx-autoapi.readthedocs.io/en/latest/config.html
# Paths (relative or absolute) to the source code that you wish to generate
# your API documentation from.
autoapi_dirs = [
os.path.abspath('../airflow'),
]
# A directory that has user-defined templates to override our default templates.
autoapi_template_dir = 'autoapi_templates'
# A list of patterns to ignore when finding files
autoapi_ignore = [
# These modules are backcompat shims, don't build docs for them
'*/airflow/contrib/operators/s3_to_gcs_transfer_operator.py',
'*/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py',
'*/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py',
'*/airflow/kubernetes/kubernetes_request_factory/*',
'*/node_modules/*',
'*/migrations/*',
]
# Keep the AutoAPI generated files on the filesystem after the run.
# Useful for debugging.
autoapi_keep_files = True
# Relative path to output the AutoAPI files into. This can also be used to place the generated documentation
# anywhere in your documentation hierarchy.
autoapi_root = '_api'
# -- Options for example include ------------------------------------------
exampleinclude_sourceroot = os.path.abspath('..')
| 33.397028
| 110
| 0.701971
|
fb865fa4092408d9bc937c2e90853d0d8623f0a8
| 9,517
|
py
|
Python
|
coc/ext/discordlinks/__init__.py
|
notPlasticCat/coc.py
|
3b803a9c142d7554675418a6df6436dca90c5a05
|
[
"MIT"
] | 75
|
2019-04-18T18:27:24.000Z
|
2022-03-31T19:33:36.000Z
|
coc/ext/discordlinks/__init__.py
|
notPlasticCat/coc.py
|
3b803a9c142d7554675418a6df6436dca90c5a05
|
[
"MIT"
] | 69
|
2019-04-18T20:28:26.000Z
|
2022-03-10T14:06:07.000Z
|
coc/ext/discordlinks/__init__.py
|
notPlasticCat/coc.py
|
3b803a9c142d7554675418a6df6436dca90c5a05
|
[
"MIT"
] | 55
|
2019-04-17T21:11:22.000Z
|
2022-03-13T22:32:19.000Z
|
"""An extension that helps interact with the Clash of Clans Discord Junkies' Discord Links API."""
import asyncio
import base64
import logging
import typing
import json
from collections import namedtuple
from datetime import datetime
import aiohttp
from coc.http import json_or_text
from coc.utils import correct_tag
LOG = logging.getLogger(__name__)
AccessToken = namedtuple("AccessToken", ["token", "expires_at"])
def extract_expiry_from_jwt_token(token):
if isinstance(token, str):
token = token.encode("utf-8")
elif not isinstance(token, bytes):
# token was wrong somehow
return None
try:
signing, _ = token.rsplit(b".", 1)
_, payload = signing.split(b".", 1)
except ValueError:
return None # not enough segments
if len(payload) % 4 > 0:
payload += b"=" * (4 - len(payload) % 4)
bytes_payload = base64.urlsafe_b64decode(payload)
dict_payload = json.loads(bytes_payload)
try:
expiry = dict_payload["exp"]
return datetime.fromtimestamp(expiry)
except KeyError:
return None
def login(username: str, password: str, loop: asyncio.AbstractEventLoop = None) -> "DiscordLinkClient":
"""Eases logging into the API client.
For more information on this project, please join the discord server - <discord.gg/Eaja7gJ>
You must have your username and password as given on the server.
If unsure as to what this means, please reach out to an admin.
Parameters
-----------
username : str
Your username as given on the discord server.
password : str
Your password as given on the discord server
loop : Optional[:class:`asyncio.AbstractEventLoop`]
The :class:`asyncio.AbstractEventLoop` to use for HTTP requests.
An :func:`asyncio.get_event_loop()` will be used if ``None`` is passed
"""
if not isinstance(username, str) or not isinstance(password, str):
raise TypeError("username and password must both be a string")
if not username or not password:
raise ValueError("username or password must not be an empty string.")
if loop and not isinstance(loop, asyncio.AbstractEventLoop):
raise TypeError("loop must be of type asyncio.AbstractEventLoop, or None.")
return DiscordLinkClient(username, password, loop)
class DiscordLinkClient:
"""An extension that helps interact with the Clash of Clans Discord Junkies' Discord Links API.
For more information on this project, please join the discord server - <discord.gg/Eaja7gJ>
You must have your username and password as given on the server.
If unsure as to what this means, please reach out to an admin.
Parameters
-----------
username : str
Your username as given on the discord server.
password : str
Your password as given on the discord server
loop : Optional[:class:`asyncio.AbstractEventLoop`]
The :class:`asyncio.AbstractEventLoop` to use for HTTP requests.
An :func:`asyncio.get_event_loop()` will be used if ``None`` is passed
"""
BASE_URL = "https://cocdiscordlink.azurewebsites.net/api"
__slots__ = ("username", "password", "loop", "key", "http_session")
def __init__(self, username: str, password: str, loop: asyncio.AbstractEventLoop = None):
self.username = username
self.password = password
self.loop = loop or asyncio.get_event_loop()
self.key = None # set in get_key()
self.http_session = aiohttp.ClientSession(loop=self.loop)
async def _request(self, method, url, *, token_request: bool = False, **kwargs):
url = self.BASE_URL + url
if not token_request:
key = await self._get_key()
headers = {"authorization": "Bearer {}".format(key)}
kwargs["headers"] = headers
async with self.http_session.request(method, url, **kwargs) as response:
LOG.debug("%s (%s) has returned %s", url, method, response.status)
data = await json_or_text(response)
LOG.debug(data)
if 200 <= response.status < 300:
LOG.debug("%s has received %s", url, data)
return data
if response.status == 401:
await self._refresh_key()
return await self._request(method, url, **kwargs)
async def _get_key(self):
if not self.key or self.key.expires_at < datetime.utcnow():
await self._refresh_key()
return self.key.token
async def _refresh_key(self):
data = {
"username": self.username,
"password": self.password,
}
payload = await self._request("POST", "/login", token_request=True, json=data)
self.key = AccessToken(payload["token"], extract_expiry_from_jwt_token(payload["token"]))
async def get_link(self, player_tag: str) -> typing.Optional[int]:
"""Get a linked discord ID of a player tag.
Player tags can be found either in game or by from clan member lists.
Parameters
----------
player_tag: str
The player tag to search for.
Returns
--------
Optional[:class:`int`]
The discord ID linked to the player, or ``None`` if no link found.
"""
data = await self._request("GET", "/links/{}".format(correct_tag(player_tag, prefix="")))
try:
return int(data[0]["discordId"])
except (IndexError, KeyError, TypeError):
return None
async def get_links(self, *player_tag: str) -> typing.List[typing.Tuple[str, typing.Optional[int]]]:
r"""Get linked discord IDs for an iterable of player tags.
Player tags can be found either in game or by from clan member lists.
This is the recommended method to use when fetching links for multiple tags as it uses a different endpoint.
Parameters
----------
\*player_tag: :class:`str`
The player tags to search for.
Returns
--------
List[:class:`str`:, Optional[:class:`int`]]
A list of player_tag, discord_id tuple matches. Discord ID will be ``None`` if not found.
Example
--------
.. code-block:: python3
links = await client.get_links("#tag1", "#tag2", "#tag3")
for player_tag, discord_id in links:
print(player_tag, discord_id)
"""
tags = [correct_tag(tag, prefix="") for tag in player_tag]
data = await self._request("POST", "/links/batch", json=tags)
data = data or []
unclaimed_tags = set("#" + tag for tag in tags) - set(p["playerTag"] for p in data)
return [(p["playerTag"], int(p["discordId"])) for p in data] + [(tag, None) for tag in unclaimed_tags]
async def get_linked_players(self, discord_id: int) -> typing.List[str]:
"""Get a list of player tags linked to a discord ID.
Parameters
----------
discord_id: int
The discord ID to search for.
Returns
--------
List[:class:`str`]
A list of player tags attached to the discord ID. If no links found, this will be an empty list.
"""
data = await self._request("GET", "/links/{}".format(discord_id))
if not data:
return []
return [item["playerTag"] for item in data]
async def get_many_linked_players(self, *discord_id: int) -> typing.List[typing.Tuple[str, int]]:
r"""Get a linked discord ID of a player tag.
This is the recommended method to use when fetching links for multiple IDs as it uses a different endpoint.
Parameters
-----------
\*discord_id: :class:`str`
The discord IDs to search for.
Returns
--------
List[Tuple[:class:`int`, :class:`str`]]
A list containing (discord_id, tag) matches.
Example
-------
.. code-block:: python3
links = await client.get_many_linked_players(123456789, 234567890, 345678901)
for player_tag, discord_id in links:
print("{} is linked to {}".format(discord_id, player_tag))
"""
data = await self._request("POST", "/links/batch", json=[str(n) for n in discord_id])
if not data:
return []
return [(n["playerTag"], int(n["discordId"])) for n in data]
async def add_link(self, player_tag: str, discord_id: int):
"""Creates a link between a player tag and a discord ID for the shared junkies database.
Player tags can be found either in game or by from clan member lists.
Parameters
----------
player_tag : str
The player tag to add the link to.
discord_id: int
The discord ID to add the link to.
"""
data = {"playerTag": correct_tag(player_tag, prefix=""), "discordId": str(discord_id)}
return await self._request("POST", "/links", json=data)
async def delete_link(self, player_tag: str):
"""Deletes a link between a player tag and a discord ID for the shared junkies database.
Player tags can be found either in game or by from clan member lists.
Parameters
----------
player_tag : str
The player tag to remove the link from.
"""
return await self._request("DELETE", "/links/{}".format(correct_tag(player_tag, prefix="")))
| 34.357401
| 116
| 0.618262
|
0974afc3c8794d148b13d3236c2636a05d8d74b1
| 1,702
|
py
|
Python
|
data/level/level10363.py
|
levelupai/match3-level-similarity
|
cc9b28b8741b41bea1273c8bc9b4d265d79a1dca
|
[
"Apache-2.0"
] | null | null | null |
data/level/level10363.py
|
levelupai/match3-level-similarity
|
cc9b28b8741b41bea1273c8bc9b4d265d79a1dca
|
[
"Apache-2.0"
] | 6
|
2020-07-04T02:53:08.000Z
|
2022-03-11T23:53:14.000Z
|
data/level/level10363.py
|
levelupai/match3-level-similarity
|
cc9b28b8741b41bea1273c8bc9b4d265d79a1dca
|
[
"Apache-2.0"
] | 3
|
2019-12-31T11:42:59.000Z
|
2021-03-28T20:06:13.000Z
|
data = {'level_index': 10363, 'move_count': '30',
'board_info': {(0, 8): {}, (0, 7): {}, (0, 6): {}, (0, 5): {}, (0, 4): {}, (0, 3): {}, (0, 2): {}, (0, 1): {},
(0, 0): {'fall_point': (0, -1)}, (1, 8): {}, (1, 7): {}, (1, 6): {}, (1, 5): {}, (1, 4): {},
(1, 3): {}, (1, 2): {}, (1, 1): {}, (1, 0): {'fall_point': (0, -1)}, (2, 8): {}, (2, 7): {},
(2, 6): {}, (2, 5): {}, (2, 4): {}, (2, 3): {}, (2, 1): {}, (2, 0): {'fall_point': (0, -1)},
(3, 8): {}, (3, 7): {}, (3, 6): {}, (3, 4): {}, (3, 0): {'fall_point': (0, -1)},
(4, 7): {'bg_number': 41}, (4, 5): {}, (4, 4): {}, (4, 3): {}, (4, 1): {'bg_number': 41},
(5, 6): {'bg_number': 41}, (5, 5): {'base': (13, 1), 'cover': (63, 1)},
(5, 4): {'bg_number': 41}, (5, 3): {'base': (13, 1), 'cover': (63, 1)},
(5, 2): {'bg_number': 41}, (6, 7): {'bg_number': 41}, (6, 5): {}, (6, 4): {}, (6, 3): {},
(6, 1): {'bg_number': 41}, (7, 8): {}, (7, 7): {}, (7, 6): {}, (7, 4): {},
(7, 0): {'fall_point': (0, -1)}, (8, 8): {}, (8, 7): {}, (8, 6): {}, (8, 5): {}, (8, 4): {},
(8, 3): {}, (8, 1): {}, (8, 0): {'fall_point': (0, -1)}, (9, 8): {}, (9, 7): {}, (9, 6): {},
(9, 5): {}, (9, 4): {}, (9, 3): {}, (9, 2): {}, (9, 1): {}, (9, 0): {'fall_point': (0, -1)},
(10, 8): {}, (10, 7): {}, (10, 6): {}, (10, 5): {}, (10, 4): {}, (10, 3): {}, (10, 2): {},
(10, 1): {}, (10, 0): {'fall_point': (0, -1)}}, 'trans_info': {(0, 0): {41: 70}}}
| 100.117647
| 118
| 0.249119
|
3f9483fd7423dd03a262739e5f7072b81df8877a
| 475
|
py
|
Python
|
pythontutorials/books/AutomateTheBoringStuff/Ch11/P1_mapIt.py
|
JoseALermaIII/python-tutorials
|
9d6cb78beec0bb55e27c49da1217317ba4d5f4fc
|
[
"MIT"
] | 2
|
2017-04-20T02:57:19.000Z
|
2018-10-12T20:15:47.000Z
|
pythontutorials/books/AutomateTheBoringStuff/Ch11/P1_mapIt.py
|
JoseALermaIII/python-tutorials
|
9d6cb78beec0bb55e27c49da1217317ba4d5f4fc
|
[
"MIT"
] | 8
|
2021-03-18T21:50:16.000Z
|
2022-03-11T23:38:01.000Z
|
pythontutorials/books/AutomateTheBoringStuff/Ch11/P1_mapIt.py
|
JoseALermaIII/python-tutorials
|
9d6cb78beec0bb55e27c49da1217317ba4d5f4fc
|
[
"MIT"
] | 3
|
2018-08-30T20:30:50.000Z
|
2022-01-18T13:40:51.000Z
|
#! python3
"""Map it
Launches a map in the browser using an address from the command line or clipboard.
"""
def main():
import webbrowser, sys
from pyperclip import paste
if len(sys.argv) > 1:
# Get address from command line.
address = ' '.join(sys.argv[1:])
else:
# Get address from clipboard.
address = paste()
webbrowser.open("https://www.google.com/maps/place/" + address)
if __name__ == '__main__':
main()
| 19
| 82
| 0.618947
|
e9cca667220497a295845144c207f60f61a677f3
| 1,744
|
py
|
Python
|
pose_vis.py
|
rancheng/SE3_Pose_Interp
|
14ce5db190ec34ac6c5fc013aaa0b57711427818
|
[
"MIT"
] | 1
|
2022-02-17T06:30:36.000Z
|
2022-02-17T06:30:36.000Z
|
pose_vis.py
|
rancheng/SE3_Pose_Interp
|
14ce5db190ec34ac6c5fc013aaa0b57711427818
|
[
"MIT"
] | null | null | null |
pose_vis.py
|
rancheng/SE3_Pose_Interp
|
14ce5db190ec34ac6c5fc013aaa0b57711427818
|
[
"MIT"
] | null | null | null |
from evo.tools import file_interface
from evo.tools import plot
import matplotlib.pyplot as plt
import argparse
# temporarily override some package settings
from evo.tools.settings import SETTINGS
SETTINGS.plot_usetex = False
SETTINGS.plot_axis_marker_scale = 0.1
DROP_FRAMES = True # drop some keyframes to make the visualization more illusive
def main(kf_pose, full_pose):
traj_kf = file_interface.read_tum_trajectory_file(kf_pose)
traj_full = file_interface.read_tum_trajectory_file(full_pose)
fig = plt.figure()
ax = plot.prepare_axis(fig, plot.PlotMode.xyz)
traj_by_label = {
"keyframe pose": traj_kf,
"full-seq pose": traj_full
}
plot.traj(ax, plot.PlotMode.xyz, traj_kf,
style=SETTINGS.plot_reference_linestyle,
color=SETTINGS.plot_reference_color, label='keyframe pose',
alpha=SETTINGS.plot_reference_alpha)
plot.traj(ax, plot.PlotMode.xyz, traj_full,
style=SETTINGS.plot_trajectory_linestyle,
color='green', label='full-seq pose',
alpha=SETTINGS.plot_reference_alpha)
plot.draw_coordinate_axes(ax, traj_kf, plot.PlotMode.xyz,
SETTINGS.plot_axis_marker_scale)
plot.draw_coordinate_axes(ax, traj_full, plot.PlotMode.xyz,
SETTINGS.plot_axis_marker_scale * 0.1)
fig.axes.append(ax)
plt.show()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Visualize SE3 pose.")
parser.add_argument("--kf_pose", help="key frame pose in TUM file format.")
parser.add_argument("--full_pose", help="all-frame pose file in TUM format.")
args = parser.parse_args()
main(args.kf_pose, args.full_pose)
| 38.755556
| 81
| 0.699541
|
75e49f5b8e901efd7480e8383689fa28142b95a1
| 11,617
|
py
|
Python
|
zhmcclient/_hba.py
|
eaibmz/python-zhmcclient
|
901327b932cf7f0175bbb0d18638abbdb82eac04
|
[
"Apache-2.0"
] | null | null | null |
zhmcclient/_hba.py
|
eaibmz/python-zhmcclient
|
901327b932cf7f0175bbb0d18638abbdb82eac04
|
[
"Apache-2.0"
] | null | null | null |
zhmcclient/_hba.py
|
eaibmz/python-zhmcclient
|
901327b932cf7f0175bbb0d18638abbdb82eac04
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016-2021 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A :term:`HBA` (Host Bus Adapter) is a logical entity that provides a
:term:`Partition` with access to external storage area networks (SANs) through
an :term:`FCP Adapter`. More specifically, an HBA connects a Partition with an
:term:`Adapter Port` on an FCP Adapter.
HBA resources are contained in Partition resources.
HBA resources only exist in :term:`CPCs <CPC>` that are in DPM mode and when
the "dpm-storage-management" feature is not enabled. See section
:ref:`Storage Groups` for details. When the "dpm-storage-management" feature is
enabled, :term:`virtual HBAs <HBA>` are represented as
:term:`Virtual Storage Resource` resources.
"""
from __future__ import absolute_import
import copy
from ._manager import BaseManager
from ._resource import BaseResource
from ._logging import logged_api_call
from ._utils import matches_filters
__all__ = ['HbaManager', 'Hba']
class HbaManager(BaseManager):
"""
Manager providing access to the :term:`HBAs <HBA>` in a particular
:term:`Partition`.
Derived from :class:`~zhmcclient.BaseManager`; see there for common methods
and attributes.
Objects of this class are not directly created by the user; they are
accessible via the following instance variable of a
:class:`~zhmcclient.Partition` object (in DPM mode):
* :attr:`~zhmcclient.Partition.hbas`
Note that this instance variable will be `None` if the
"dpm-storage-management" feature is enabled.
"""
def __init__(self, partition):
# This function should not go into the docs.
# Parameters:
# partition (:class:`~zhmcclient.Partition`):
# Partition defining the scope for this manager.
super(HbaManager, self).__init__(
resource_class=Hba,
class_name='hba',
session=partition.manager.session,
parent=partition,
base_uri='{}/hbas'.format(partition.uri),
oid_prop='element-id',
uri_prop='element-uri',
name_prop='name',
query_props=[],
list_has_name=False)
@property
def partition(self):
"""
:class:`~zhmcclient.Partition`: :term:`Partition` defining the scope
for this manager.
"""
return self._parent
@logged_api_call
def list(self, full_properties=False, filter_args=None):
"""
List the HBAs in this Partition.
The returned HBAs have only the 'element-uri' property set.
Filtering is supported only for the 'element-uri' property.
Authorization requirements:
* Object-access permission to this Partition.
Parameters:
full_properties (bool):
Controls whether the full set of resource properties should be
retrieved, vs. only the short set as returned by the list
operation.
filter_args (dict):
Filter arguments that narrow the list of returned resources to
those that match the specified filter arguments. For details, see
:ref:`Filtering`.
`None` causes no filtering to happen, i.e. all resources are
returned.
Returns:
: A list of :class:`~zhmcclient.Hba` objects.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
resource_obj_list = []
uris = self.partition.get_property('hba-uris')
if uris:
for uri in uris:
resource_obj = self.resource_class(
manager=self,
uri=uri,
name=None,
properties=None)
if matches_filters(resource_obj, filter_args):
resource_obj_list.append(resource_obj)
if full_properties:
resource_obj.pull_full_properties()
self._name_uri_cache.update_from(resource_obj_list)
return resource_obj_list
@logged_api_call
def create(self, properties):
"""
Create and configure an HBA in this Partition.
The HBA must be backed by an adapter port on an FCP adapter.
The backing adapter port is specified in the "properties" parameter of
this method by setting the "adapter-port-uri" property to the URI of
the backing adapter port.
The value for the "adapter-port-uri" property can be determined from a
given adapter name and port index as shown in the following example
code (omitting any error handling):
.. code-block:: python
partition = ... # Partition object for the new HBA
adapter_name = 'FCP #1' # name of adapter with backing port
adapter_port_index = 0 # port index of backing port
adapter = partition.manager.cpc.adapters.find(name=adapter_name)
port = adapter.ports.find(index=adapter_port_index)
properties['adapter-port-uri'] = port.uri
Authorization requirements:
* Object-access permission to this Partition.
* Object-access permission to the backing Adapter for the new HBA.
* Task permission to the "Partition Details" task.
Parameters:
properties (dict): Initial property values.
Allowable properties are defined in section 'Request body contents'
in section 'Create HBA' in the :term:`HMC API` book.
Returns:
Hba:
The resource object for the new HBA.
The object will have its 'element-uri' property set as returned by
the HMC, and will also have the input properties set.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
result = self.session.post(self.partition.uri + '/hbas',
body=properties)
# There should not be overlaps, but just in case there are, the
# returned props should overwrite the input props:
props = copy.deepcopy(properties)
props.update(result)
name = props.get(self._name_prop, None)
uri = props[self._uri_prop]
hba = Hba(self, uri, name, props)
self._name_uri_cache.update(name, uri)
return hba
class Hba(BaseResource):
"""
Representation of an :term:`HBA`.
Derived from :class:`~zhmcclient.BaseResource`; see there for common
methods and attributes.
For the properties of an HBA resource, see section
'Data model - HBA Element Object' in section 'Partition object' in the
:term:`HMC API` book.
Objects of this class are not directly created by the user; they are
returned from creation or list functions on their manager object
(in this case, :class:`~zhmcclient.HbaManager`).
"""
def __init__(self, manager, uri, name=None, properties=None):
# This function should not go into the docs.
# Parameters:
# manager (:class:`~zhmcclient.HbaManager`):
# Manager object for this resource object.
# uri (string):
# Canonical URI path of the resource.
# name (string):
# Name of the resource.
# properties (dict):
# Properties to be set for this resource object. May be `None` or
# empty.
assert isinstance(manager, HbaManager), \
"Hba init: Expected manager type %s, got %s" % \
(HbaManager, type(manager))
super(Hba, self).__init__(manager, uri, name, properties)
@logged_api_call
def delete(self):
"""
Delete this HBA.
Authorization requirements:
* Object-access permission to the Partition containing this HBA.
* Task permission to the "Partition Details" task.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
# pylint: disable=protected-access
self.manager.session.delete(self._uri)
self.manager._name_uri_cache.delete(
self.get_properties_local(self.manager._name_prop, None))
@logged_api_call
def update_properties(self, properties):
"""
Update writeable properties of this HBA.
This method serializes with other methods that access or change
properties on the same Python object.
Authorization requirements:
* Object-access permission to the Partition containing this HBA.
* **TBD: Verify:** Object-access permission to the backing Adapter for
this HBA.
* Task permission to the "Partition Details" task.
Parameters:
properties (dict): New values for the properties to be updated.
Properties not to be updated are omitted.
Allowable properties are the properties with qualifier (w) in
section 'Data model - HBA Element Object' in the
:term:`HMC API` book.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
# pylint: disable=protected-access
self.manager.session.post(self.uri, body=properties)
is_rename = self.manager._name_prop in properties
if is_rename:
# Delete the old name from the cache
self.manager._name_uri_cache.delete(self.name)
self.update_properties_local(copy.deepcopy(properties))
if is_rename:
# Add the new name to the cache
self.manager._name_uri_cache.update(self.name, self.uri)
@logged_api_call
def reassign_port(self, port):
"""
Reassign this HBA to a new underlying :term:`FCP port`.
This method performs the HMC operation "Reassign Storage Adapter Port".
Authorization requirements:
* Object-access permission to the Partition containing this HBA.
* Object-access permission to the Adapter with the new Port.
* Task permission to the "Partition Details" task.
Parameters:
port (:class:`~zhmcclient.Port`): :term:`FCP port` to be used.
Raises:
:exc:`~zhmcclient.HTTPError`: See the HTTP status and reason codes of
operation "Reassign Storage Adapter Port" in the :term:`HMC API`
book.
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
body = {'adapter-port-uri': port.uri}
self.manager.session.post(
self._uri + '/operations/reassign-storage-adapter-port',
body=body)
self.update_properties_local(body)
| 34.369822
| 79
| 0.634329
|
99c3eb616a31b56cff5fd66f88cb213fc7fbf3f4
| 1,854
|
py
|
Python
|
src/pymodaq_plugins/daq_viewer_plugins/plugins_1D/daq_1Dviewer_Mock.py
|
jerlfan/pymodaq_plugins
|
c92d06b7766dc746a6f4b445865a5723a5fceb0d
|
[
"CECILL-B"
] | 5
|
2019-04-04T02:57:17.000Z
|
2019-10-23T18:13:57.000Z
|
src/pymodaq_plugins/daq_viewer_plugins/plugins_1D/daq_1Dviewer_Mock.py
|
jerlfan/pymodaq_plugins
|
c92d06b7766dc746a6f4b445865a5723a5fceb0d
|
[
"CECILL-B"
] | 7
|
2019-10-02T10:54:05.000Z
|
2021-03-29T13:55:39.000Z
|
src/pymodaq_plugins/daq_viewer_plugins/plugins_1D/daq_1Dviewer_Mock.py
|
jerlfan/pymodaq_plugins
|
c92d06b7766dc746a6f4b445865a5723a5fceb0d
|
[
"CECILL-B"
] | 43
|
2019-04-04T02:57:25.000Z
|
2022-02-04T15:26:32.000Z
|
from PyQt5.QtCore import QThread
from pymodaq.daq_viewer.utility_classes import DAQ_Viewer_base
import numpy as np
from easydict import EasyDict as edict
from collections import OrderedDict
from pymodaq.daq_utils.daq_utils import ThreadCommand, getLineInfo
from pymodaq.daq_utils.daq_utils import gauss1D
from pymodaq.daq_viewer.utility_classes import comon_parameters
from .daq_1Dviewer_Mock_spectro import DAQ_1DViewer_Mock_spectro
class DAQ_1DViewer_Mock(DAQ_1DViewer_Mock_spectro):
"""
Derived class from DAQ_1DViewer_Mock_spectro
Simulates a pixaleted spectrometer detector without builtin calibration of its energy axis
"""
def __init__(self, parent=None,
params_state=None): # init_params is a list of tuple where each tuple contains info on a 1D channel (Ntps,amplitude, width, position and noise)
super().__init__(parent, params_state)
def ini_detector(self, controller=None):
"""
Initialisation procedure of the detector updating the status dictionnary.
See Also
--------
set_Mock_data, daq_utils.ThreadCommand
"""
self.settings.child('x_axis', 'Npts').setValue(512)
self.settings.child('x_axis', 'x0').setValue(256)
self.settings.child('x_axis', 'dx').setValue(1)
super().ini_detector(controller)
# self.set_x_axis()
self.settings.child('Mock1', 'x0').setValue(125)
self.settings.child('Mock1', 'dx').setValue(20)
self.settings.child('Mock2', 'x0').setValue(325)
self.settings.child('Mock2', 'dx').setValue(20)
self.settings.child(('multi')).setValue(True)
self.settings.child(('rolling')).setValue(1)
self.settings.child(("laser_wl")).hide()
self.settings.child(('exposure_ms')).hide()
return self.status
| 37.08
| 161
| 0.695793
|
7b96648c3af8d957b55140f2c1d47a1b70d190ff
| 4,491
|
py
|
Python
|
electrum_atom/tests/test_mnemonic.py
|
bitcoin-atom/electrum-atom
|
156d4d54c5493bcda930efcb972a0c600c36a11d
|
[
"MIT"
] | 4
|
2018-11-17T23:40:04.000Z
|
2021-11-09T20:08:18.000Z
|
electrum_atom/tests/test_mnemonic.py
|
bitcoin-atom/electrum-atom
|
156d4d54c5493bcda930efcb972a0c600c36a11d
|
[
"MIT"
] | 1
|
2019-11-12T03:09:15.000Z
|
2019-11-12T03:09:15.000Z
|
electrum_atom/tests/test_mnemonic.py
|
bitcoin-atom/electrum-atom
|
156d4d54c5493bcda930efcb972a0c600c36a11d
|
[
"MIT"
] | 1
|
2018-09-11T23:30:16.000Z
|
2018-09-11T23:30:16.000Z
|
from electrum_atom import keystore
from electrum_atom import mnemonic
from electrum_atom import old_mnemonic
from electrum_atom.util import bh2u, bfh
from electrum_atom.bitcoin import is_new_seed
from electrum_atom.version import SEED_PREFIX_SW
from . import SequentialTestCase
from .test_wallet_vertical import UNICODE_HORROR
SEED_WORDS_JAPANESE = 'なのか ひろい しなん まなぶ つぶす さがす おしゃれ かわく おいかける けさき かいとう さたん'
assert bh2u(SEED_WORDS_JAPANESE.encode('utf8')) == 'e381aae381aee3818b20e381b2e3828de3818420e38197e381aae3829320e381bee381aae381b5e3829920e381a4e381b5e38299e3819920e38195e3818be38299e3819920e3818ae38197e38283e3828c20e3818be3828fe3818f20e3818ae38184e3818be38191e3828b20e38191e38195e3818d20e3818be38184e381a8e3818620e38195e3819fe38293'
SEED_WORDS_CHINESE = '眼 悲 叛 改 节 跃 衡 响 疆 股 遂 冬'
assert bh2u(SEED_WORDS_CHINESE.encode('utf8')) == 'e79cbc20e682b220e58f9b20e694b920e88a8220e8b78320e8a1a120e5938d20e7968620e882a120e9818220e586ac'
PASSPHRASE_CHINESE = '给我一些测试向量谷歌'
assert bh2u(PASSPHRASE_CHINESE.encode('utf8')) == 'e7bb99e68891e4b880e4ba9be6b58be8af95e59091e9878fe8b0b7e6ad8c'
class Test_NewMnemonic(SequentialTestCase):
def test_mnemonic_to_seed_basic(self):
seed = mnemonic.Mnemonic.mnemonic_to_seed(mnemonic='foobar', passphrase='none')
self.assertEqual('741b72fd15effece6bfe5a26a52184f66811bd2be363190e07a42cca442b1a5bb22b3ad0eb338197287e6d314866c7fba863ac65d3f156087a5052ebc7157fce',
bh2u(seed))
def test_mnemonic_to_seed_japanese(self):
words = SEED_WORDS_JAPANESE
self.assertTrue(is_new_seed(words))
m = mnemonic.Mnemonic(lang='ja')
self.assertEqual(1938439226660562861250521787963972783469, m.mnemonic_decode(words))
seed = mnemonic.Mnemonic.mnemonic_to_seed(mnemonic=words, passphrase='')
self.assertEqual('d3eaf0e44ddae3a5769cb08a26918e8b308258bcb057bb704c6f69713245c0b35cb92c03df9c9ece5eff826091b4e74041e010b701d44d610976ce8bfb66a8ad',
bh2u(seed))
def test_mnemonic_to_seed_japanese_with_unicode_horror(self):
words = SEED_WORDS_JAPANESE
self.assertTrue(is_new_seed(words))
seed = mnemonic.Mnemonic.mnemonic_to_seed(mnemonic=words, passphrase=UNICODE_HORROR)
self.assertEqual('251ee6b45b38ba0849e8f40794540f7e2c6d9d604c31d68d3ac50c034f8b64e4bc037c5e1e985a2fed8aad23560e690b03b120daf2e84dceb1d7857dda042457',
bh2u(seed))
def test_mnemonic_to_seed_chinese(self):
words = SEED_WORDS_CHINESE
self.assertTrue(is_new_seed(words, prefix=SEED_PREFIX_SW))
m = mnemonic.Mnemonic(lang='zh')
self.assertEqual(3083737086352778425940060465574397809099, m.mnemonic_decode(words))
seed = mnemonic.Mnemonic.mnemonic_to_seed(mnemonic=words, passphrase='')
self.assertEqual('0b9077db7b5a50dbb6f61821e2d35e255068a5847e221138048a20e12d80b673ce306b6fe7ac174ebc6751e11b7037be6ee9f17db8040bb44f8466d519ce2abf',
bh2u(seed))
def test_mnemonic_to_seed_chinese_with_passphrase(self):
words = SEED_WORDS_CHINESE
passphrase = PASSPHRASE_CHINESE
self.assertTrue(is_new_seed(words, prefix=SEED_PREFIX_SW))
seed = mnemonic.Mnemonic.mnemonic_to_seed(mnemonic=words, passphrase=passphrase)
self.assertEqual('6c03dd0615cf59963620c0af6840b52e867468cc64f20a1f4c8155705738e87b8edb0fc8a6cee4085776cb3a629ff88bb1a38f37085efdbf11ce9ec5a7fa5f71',
bh2u(seed))
def test_random_seeds(self):
iters = 10
m = mnemonic.Mnemonic(lang='en')
for _ in range(iters):
seed = m.make_seed()
i = m.mnemonic_decode(seed)
self.assertEqual(m.mnemonic_encode(i), seed)
class Test_OldMnemonic(SequentialTestCase):
def test(self):
seed = '8edad31a95e7d59f8837667510d75a4d'
result = old_mnemonic.mn_encode(seed)
words = 'hardly point goal hallway patience key stone difference ready caught listen fact'
self.assertEqual(result, words.split())
self.assertEqual(old_mnemonic.mn_decode(result), seed)
class Test_BIP39Checksum(SequentialTestCase):
def test(self):
mnemonic = u'gravity machine north sort system female filter attitude volume fold club stay feature office ecology stable narrow fog'
is_checksum_valid, is_wordlist_valid = keystore.bip39_is_checksum_valid(mnemonic)
self.assertTrue(is_wordlist_valid)
self.assertTrue(is_checksum_valid)
| 48.815217
| 333
| 0.77377
|
535a514b285b8823ac60881e023b0d61e08faf57
| 4,438
|
py
|
Python
|
authentication/forms.py
|
HiroshiFuu/cs-balloting
|
565eb3ee88769d88b27705828c10c7b5be964ef5
|
[
"MIT"
] | null | null | null |
authentication/forms.py
|
HiroshiFuu/cs-balloting
|
565eb3ee88769d88b27705828c10c7b5be964ef5
|
[
"MIT"
] | null | null | null |
authentication/forms.py
|
HiroshiFuu/cs-balloting
|
565eb3ee88769d88b27705828c10c7b5be964ef5
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
from django import forms
from django.contrib.auth import password_validation
from django.contrib.auth.forms import UserCreationForm
from django.utils.translation import gettext as _
from django.core.exceptions import ValidationError
from .models import AuthUser
from .models import Company
from .constants import USER_TYPES
from .constants import USER_TYPE_COMPANY
from .constants import USER_TYPE_USER
class LoginForm(forms.Form):
username = forms.CharField(
widget=forms.TextInput(
attrs={
"placeholder": "Username",
"class": "form-control"
}
))
password = forms.CharField(
widget=forms.PasswordInput(
attrs={
"placeholder": "Password",
"class": "form-control"
}
))
def gen_random_password():
return AuthUser.objects.make_random_password() + '!2Wq'
class CustomCompanyCreationForm(UserCreationForm):
error_messages = {
"password_mismatch": _("The two password fields didn’t match."),
}
random_password = gen_random_password()
username = forms.EmailField(
label=_("Username"),
widget=forms.EmailInput(
attrs={
'placeholder': 'E-mail',
'class': 'form-control'
}
),
required=True
)
company = forms.ModelChoiceField(
queryset=Company.objects.all(),
required=False
)
password1 = forms.CharField(
label=_("Password"),
strip=False,
widget=forms.PasswordInput(
attrs={
"placeholder": "Password",
"autocomplete": "off",
"class": "form-control",
},
render_value=True),
help_text=password_validation.password_validators_help_text_html(),
initial=random_password,
)
password2 = forms.CharField(
label=_("Password Confirmation"),
widget=forms.PasswordInput(
attrs={
"placeholder": "Password Confirmation",
"autocomplete": "off",
"class": "form-control"
},
render_value=True),
strip=False,
help_text=_("Enter the same password as before, for verification."),
initial=random_password,
)
is_staff = forms.BooleanField(
label="Is Company User",
initial=True,
required=False,
)
class Meta:
model = AuthUser
fields = '__all__'
class CustomUserCreationForm(UserCreationForm):
error_messages = {
"password_mismatch": _("The two password fields didn’t match."),
}
random_password = gen_random_password()
username = forms.EmailField(
label=_("Username"),
widget=forms.EmailInput(
attrs={
'placeholder': 'E-mail',
'class': 'form-control'
}
),
required=True
)
weight = forms.IntegerField(
required=True
)
# user_type = forms.ChoiceField(
# choices=USER_TYPES,
# initial=USER_TYPE_USER,
# required=True,
# disabled=True
# )
# company_user = forms.ModelChoiceField(
# queryset=AuthUser.objects.all().filter(user_type=USER_TYPE_COMPANY, is_staff=True, is_active=True),
# required=True,
# disabled=True
# )
is_staff = forms.BooleanField(
label="Is Company User",
initial=True,
required=False,
)
password1 = forms.CharField(
label=_("Password"),
strip=False,
widget=forms.PasswordInput(attrs={
"placeholder": "Password",
"autocomplete": "off",
"class": "form-control"},
render_value=True),
help_text=password_validation.password_validators_help_text_html(),
initial=random_password,
)
password2 = forms.CharField(
label=_("Password Confirmation"),
widget=forms.PasswordInput(attrs={
"placeholder": "Password Confirmation",
"autocomplete": "off",
"class": "form-control"},
render_value=True),
strip=False,
help_text=_("Enter the same password as before, for verification."),
initial=random_password,
)
class Meta:
model = AuthUser
fields = ('password1', 'password2')
| 28.632258
| 109
| 0.583146
|
3993278ce30c4911993455f7920ac78edeebc3aa
| 4,038
|
py
|
Python
|
phase2_reducer.py
|
someshdube/Crowdsourcing
|
497fd46415b4c0fc2be69d42e0661d7fe423b278
|
[
"Apache-2.0"
] | null | null | null |
phase2_reducer.py
|
someshdube/Crowdsourcing
|
497fd46415b4c0fc2be69d42e0661d7fe423b278
|
[
"Apache-2.0"
] | null | null | null |
phase2_reducer.py
|
someshdube/Crowdsourcing
|
497fd46415b4c0fc2be69d42e0661d7fe423b278
|
[
"Apache-2.0"
] | null | null | null |
"""
Script to create a list of question and single answer to them. The answers that pass a certain merge
threshold will be given only.
"""
# import spacy
from csgame.nlp_loader import nlp
import RedundancyV1
from collections import defaultdict
from operator import itemgetter
class AnswerReducer:
"""
Class to contain all methods to reduce the redundancy for answers
"""
def __init__(self, answers=None, questions=None):
"""
Class initializer.
"""
if answers is None or not isinstance(answers[0], list):
raise ValueError("No answer value is given. Or answer is not in form [[ans, id]]")
if questions is None or not isinstance(questions[0], list):
raise ValueError("No question value is given. Or question is not the form [[ques, id]]")
self.answers = answers
self.questions = questions
self.grouped_questions = {}
self.reducer = RedundancyV1.RedundancyRemover()
def grouper(self):
"""
Method to group the answers based on their ID to a given question
:return: Count of groups made
"""
for each in self.answers:
if each[1] in self.grouped_questions.keys():
self.grouped_questions[each[1]].append(each[0])
else:
# print("Came here")
self.grouped_questions.setdefault(each[1], [each[0]])
def remove_redundant_answers(self, answers):
"""
method will return a new list of unique questions and a dict of the ID'd merged.
:param answers: The set of answers [answer, id]
:return: new list of reduced answers and id merge list
"""
# Remove taboo words from the sentence
all_new = (' '.join(RedundancyV1.remove_taboo_words(answer)) for answer in answers)
all_old = []
old_new_pairs = defaultdict(list)
docs_old = list(map(nlp, all_old))
for qid_new, q_new in zip(answers, all_new):
doc_new = nlp(q_new)
for qid_old, doc_old in zip(all_old, docs_old):
# Uncomment the Prints below to see output. Remove them for production version
val = doc_new.similarity(doc_old)
if val > 0.70:
#print(doc_old.text)
#print(doc_new.text)
#print(val)
#print("\n_______________________\n")
old_new_pairs[qid_old].append(qid_new)
break
else: # if not broken
# If code reaches this point merge the questions
all_old.append(qid_new)
docs_old.append(doc_new)
return old_new_pairs
def reduce_within_groups(self):
"""
Reduces answers in groups and returns the answers.
Example if three out of 4 answers are similar then it will make those into a group and give them
:return: common answer and its id
"""
qid_to_ans = {}
for question, answers in self.grouped_questions.items():
old_new_pairs = self.remove_redundant_answers(answers)
lens = [(k,len(v)) for k, v in old_new_pairs.items()]
lens.sort(key=itemgetter(1)) # sort by number of matching questions
if len(lens) == 0 or (len(lens) == 2 and lens[0][1] == lens[1][1]):
qid_to_ans[question] = []
else:
qid_to_ans[question] = lens[0][0]
return qid_to_ans
if __name__ == "__main__":
questions = [
["What is the color of the cat", 1],
["How many legs does the cat have", 2]
]
answers = [
["The color is blue", 1],
["the cat is blue", 1],
["The sheep is pink", 1],
["The shape is pink", 1],
["The cat has 4 legs", 2],
["The cat has four legs", 2]
]
test_obj = AnswerReducer(questions=questions, answers=answers)
test_obj.grouper()
i = test_obj.reduce_within_groups()
print(i)
| 36.709091
| 104
| 0.589896
|
856bd31638fe4b81f1665cc7133ea88f508ae415
| 46,198
|
py
|
Python
|
sdk/servicebus/azure-servicebus/azure/servicebus/_common/message.py
|
sima-zhu/azure-sdk-for-python
|
a413dc783f0df7dc65e9c2ef9762fabff1708c4e
|
[
"MIT"
] | null | null | null |
sdk/servicebus/azure-servicebus/azure/servicebus/_common/message.py
|
sima-zhu/azure-sdk-for-python
|
a413dc783f0df7dc65e9c2ef9762fabff1708c4e
|
[
"MIT"
] | null | null | null |
sdk/servicebus/azure-servicebus/azure/servicebus/_common/message.py
|
sima-zhu/azure-sdk-for-python
|
a413dc783f0df7dc65e9c2ef9762fabff1708c4e
|
[
"MIT"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# -------------------------------------------------------------------------
# pylint: disable=too-many-lines
import datetime
import uuid
import functools
import logging
from typing import Optional, List, Union, Iterable, TYPE_CHECKING, Callable, Any
import uamqp.message
from uamqp.constants import MessageState
from .constants import (
_BATCH_MESSAGE_OVERHEAD_COST,
SETTLEMENT_ABANDON,
SETTLEMENT_COMPLETE,
SETTLEMENT_DEFER,
SETTLEMENT_DEADLETTER,
ReceiveMode,
_X_OPT_ENQUEUED_TIME,
_X_OPT_SEQUENCE_NUMBER,
_X_OPT_ENQUEUE_SEQUENCE_NUMBER,
_X_OPT_PARTITION_KEY,
_X_OPT_VIA_PARTITION_KEY,
_X_OPT_LOCKED_UNTIL,
_X_OPT_LOCK_TOKEN,
_X_OPT_SCHEDULED_ENQUEUE_TIME,
_X_OPT_DEAD_LETTER_SOURCE,
MGMT_RESPONSE_MESSAGE_EXPIRATION,
MGMT_REQUEST_DEAD_LETTER_REASON,
MGMT_REQUEST_DEAD_LETTER_ERROR_DESCRIPTION,
RECEIVER_LINK_DEAD_LETTER_REASON,
RECEIVER_LINK_DEAD_LETTER_ERROR_DESCRIPTION,
MESSAGE_COMPLETE,
MESSAGE_DEAD_LETTER,
MESSAGE_ABANDON,
MESSAGE_DEFER,
MESSAGE_RENEW_LOCK,
DEADLETTERNAME,
PROPERTIES_DEAD_LETTER_REASON,
PROPERTIES_DEAD_LETTER_ERROR_DESCRIPTION,
ANNOTATION_SYMBOL_PARTITION_KEY,
ANNOTATION_SYMBOL_VIA_PARTITION_KEY,
ANNOTATION_SYMBOL_SCHEDULED_ENQUEUE_TIME,
ANNOTATION_SYMBOL_KEY_MAP
)
from ..exceptions import (
MessageAlreadySettled,
MessageLockExpired,
SessionLockExpired,
MessageSettleFailed,
MessageContentTooLarge,
ServiceBusError)
from .utils import utc_from_timestamp, utc_now, transform_messages_to_sendable_if_needed
if TYPE_CHECKING:
from .._servicebus_receiver import ServiceBusReceiver
from .._servicebus_session_receiver import ServiceBusSessionReceiver
_LOGGER = logging.getLogger(__name__)
class Message(object): # pylint: disable=too-many-public-methods,too-many-instance-attributes
"""A Service Bus Message.
:param body: The data to send in a single message.
:type body: Union[str, bytes]
:keyword dict properties: The user defined properties on the message.
:keyword str session_id: The session identifier of the message for a sessionful entity.
:keyword str message_id: The id to identify the message.
:keyword datetime.datetime scheduled_enqueue_time_utc: The utc scheduled enqueue time to the message.
:keyword datetime.timedelta time_to_live: The life duration of a message.
:keyword str content_type: The content type descriptor.
:keyword str correlation_id: The correlation identifier.
:keyword str label: The application specific label.
:keyword str partition_key: The partition key for sending a message to a partitioned entity.
:keyword str via_partition_key: The partition key for sending a message into an entity via a partitioned
transfer queue.
:keyword str to: The `to` address used for auto_forward chaining scenarios.
:keyword str reply_to: The address of an entity to send replies to.
:keyword str reply_to_session_id: The session identifier augmenting the `reply_to` address.
:keyword str encoding: The encoding for string data. Default is UTF-8.
:ivar AMQPMessage amqp_message: Advanced use only. The internal AMQP message payload that is sent or received.
.. admonition:: Example:
.. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py
:start-after: [START send_complex_message]
:end-before: [END send_complex_message]
:language: python
:dedent: 4
:caption: Sending a message with additional properties
"""
def __init__(self, body, **kwargs):
# type: (Union[str, bytes], Any) -> None
# Although we might normally thread through **kwargs this causes
# problems as MessageProperties won't absorb spurious args.
self._encoding = kwargs.pop("encoding", 'UTF-8')
self._amqp_properties = uamqp.message.MessageProperties(encoding=self._encoding)
self._amqp_header = uamqp.message.MessageHeader()
if 'message' in kwargs:
# Note: This cannot be renamed until UAMQP no longer relies on this specific name.
self.message = kwargs['message']
self._amqp_properties = self.message.properties
self._amqp_header = self.message.header
else:
self._build_message(body)
self.properties = kwargs.pop("properties", None)
self.session_id = kwargs.pop("session_id", None)
self.message_id = kwargs.get("message_id", None)
self.content_type = kwargs.pop("content_type", None)
self.correlation_id = kwargs.pop("correlation_id", None)
self.to = kwargs.pop("to", None)
self.reply_to = kwargs.pop("reply_to", None)
self.reply_to_session_id = kwargs.pop("reply_to_session_id", None)
self.label = kwargs.pop("label", None)
self.scheduled_enqueue_time_utc = kwargs.pop("scheduled_enqueue_time_utc", None)
self.time_to_live = kwargs.pop("time_to_live", None)
self.partition_key = kwargs.pop("partition_key", None)
self.via_partition_key = kwargs.pop("via_partition_key", None)
# If message is the full message, amqp_message is the "public facing interface" for what we expose.
self.amqp_message = AMQPMessage(self.message)
def __str__(self):
return str(self.message)
def _build_message(self, body):
if isinstance(body, list) and body: # TODO: This only works for a list of bytes/strings
self.message = uamqp.Message(body[0], properties=self._amqp_properties, header=self._amqp_header)
for more in body[1:]:
self.message._body.append(more) # pylint: disable=protected-access
elif body is None:
raise ValueError("Message body cannot be None.")
else:
self.message = uamqp.Message(body, properties=self._amqp_properties, header=self._amqp_header)
def _set_message_annotations(self, key, value):
if not self.message.annotations:
self.message.annotations = {}
if isinstance(self, ReceivedMessage):
try:
del self.message.annotations[key]
except KeyError:
pass
if value is None:
try:
del self.message.annotations[ANNOTATION_SYMBOL_KEY_MAP[key]]
except KeyError:
pass
else:
self.message.annotations[ANNOTATION_SYMBOL_KEY_MAP[key]] = value
def _to_outgoing_message(self):
# type: () -> Message
self.message.state = MessageState.WaitingToBeSent
self.message._response = None # pylint: disable=protected-access
return self
@property
def session_id(self):
# type: () -> str
"""The session identifier of the message for a sessionful entity.
For sessionful entities, this application-defined value specifies the session affiliation of the message.
Messages with the same session identifier are subject to summary locking and enable exact in-order
processing and demultiplexing. For non-sessionful entities, this value is ignored.
See Message Sessions in `https://docs.microsoft.com/azure/service-bus-messaging/message-sessions`.
:rtype: str
"""
try:
return self._amqp_properties.group_id.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return self._amqp_properties.group_id
@session_id.setter
def session_id(self, value):
# type: (str) -> None
self._amqp_properties.group_id = value
@property
def properties(self):
# type: () -> dict
"""The user defined properties on the message.
:rtype: dict
"""
return self.message.application_properties
@properties.setter
def properties(self, value):
# type: (dict) -> None
self.message.application_properties = value
@property
def partition_key(self):
# type: () -> Optional[str]
""" The partition key for sending a message to a partitioned entity.
Setting this value enables assigning related messages to the same internal partition, so that submission
sequence order is correctly recorded.
The partition is chosen by a hash function over this value and cannot be chosen directly.
See Partitioned queues and topics in
`https://docs.microsoft.com/azure/service-bus-messaging/service-bus-partitioning`.
:rtype: str
"""
p_key = None
try:
p_key = self.message.annotations.get(_X_OPT_PARTITION_KEY) or \
self.message.annotations.get(ANNOTATION_SYMBOL_PARTITION_KEY)
return p_key.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return p_key
@partition_key.setter
def partition_key(self, value):
# type: (str) -> None
self._set_message_annotations(_X_OPT_PARTITION_KEY, value)
@property
def via_partition_key(self):
# type: () -> Optional[str]
""" The partition key for sending a message into an entity via a partitioned transfer queue.
If a message is sent via a transfer queue in the scope of a transaction, this value selects the transfer
queue partition: This is functionally equivalent to `partition_key` and ensures that messages are kept
together and in order as they are transferred.
See Transfers and Send Via in
`https://docs.microsoft.com/azure/service-bus-messaging/service-bus-transactions#transfers-and-send-via`.
:rtype: str
"""
via_p_key = None
try:
via_p_key = self.message.annotations.get(_X_OPT_VIA_PARTITION_KEY) or \
self.message.annotations.get(ANNOTATION_SYMBOL_VIA_PARTITION_KEY)
return via_p_key.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return via_p_key
@via_partition_key.setter
def via_partition_key(self, value):
# type: (str) -> None
self._set_message_annotations(_X_OPT_VIA_PARTITION_KEY, value)
@property
def time_to_live(self):
# type: () -> Optional[datetime.timedelta]
"""The life duration of a message.
This value is the relative duration after which the message expires, starting from the instant the message
has been accepted and stored by the broker, as captured in `enqueued_time_utc`.
When not set explicitly, the assumed value is the DefaultTimeToLive for the respective queue or topic.
A message-level time-to-live value cannot be longer than the entity's time-to-live setting and it is silently
adjusted if it does.
See Expiration in `https://docs.microsoft.com/azure/service-bus-messaging/message-expiration`
:rtype: ~datetime.timedelta
"""
if self._amqp_header and self._amqp_header.time_to_live:
return datetime.timedelta(milliseconds=self._amqp_header.time_to_live)
return None
@time_to_live.setter
def time_to_live(self, value):
# type: (datetime.timedelta) -> None
if not self._amqp_header:
self._amqp_header = uamqp.message.MessageHeader()
if value is None:
self._amqp_header.time_to_live = value
elif isinstance(value, datetime.timedelta):
self._amqp_header.time_to_live = value.seconds * 1000
else:
self._amqp_header.time_to_live = int(value) * 1000
@property
def scheduled_enqueue_time_utc(self):
# type: () -> Optional[datetime.datetime]
"""The utc scheduled enqueue time to the message.
This property can be used for scheduling when sending a message through `ServiceBusSender.send` method.
If cancelling scheduled messages is required, you should use the `ServiceBusSender.schedule` method,
which returns sequence numbers that can be used for future cancellation.
`scheduled_enqueue_time_utc` is None if not set.
:rtype: ~datetime.datetime
"""
if self.message.annotations:
timestamp = self.message.annotations.get(_X_OPT_SCHEDULED_ENQUEUE_TIME) or \
self.message.annotations.get(ANNOTATION_SYMBOL_SCHEDULED_ENQUEUE_TIME)
if timestamp:
try:
in_seconds = timestamp/1000.0
return utc_from_timestamp(in_seconds)
except TypeError:
return timestamp
return None
@scheduled_enqueue_time_utc.setter
def scheduled_enqueue_time_utc(self, value):
# type: (datetime.datetime) -> None
if not self._amqp_properties.message_id:
self._amqp_properties.message_id = str(uuid.uuid4())
self._set_message_annotations(_X_OPT_SCHEDULED_ENQUEUE_TIME, value)
@property
def body(self):
# type: () -> Union[bytes, Iterable[bytes]]
"""The body of the Message.
:rtype: bytes or Iterable[bytes]
"""
return self.message.get_data()
@property
def content_type(self):
# type: () -> str
"""The content type descriptor.
Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5,
for example "application/json".
:rtype: str
"""
try:
return self._amqp_properties.content_type.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return self._amqp_properties.content_type
@content_type.setter
def content_type(self, val):
# type: (str) -> None
self._amqp_properties.content_type = val
@property
def correlation_id(self):
# type: () -> str
# pylint: disable=line-too-long
"""The correlation identifier.
Allows an application to specify a context for the message for the purposes of correlation, for example
reflecting the MessageId of a message that is being replied to.
See Message Routing and Correlation in
`https://docs.microsoft.com/azure/service-bus-messaging/service-bus-messages-payloads?#message-routing-and-correlation`.
:rtype: str
"""
try:
return self._amqp_properties.correlation_id.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return self._amqp_properties.correlation_id
@correlation_id.setter
def correlation_id(self, val):
# type: (str) -> None
self._amqp_properties.correlation_id = val
@property
def label(self):
# type: () -> str
"""The application specific label.
This property enables the application to indicate the purpose of the message to the receiver in a standardized
fashion, similar to an email subject line.
:rtype: str
"""
try:
return self._amqp_properties.subject.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return self._amqp_properties.subject
@label.setter
def label(self, val):
# type: (str) -> None
self._amqp_properties.subject = val
@property
def message_id(self):
# type: () -> str
"""The id to identify the message.
The message identifier is an application-defined value that uniquely identifies the message and its payload.
The identifier is a free-form string and can reflect a GUID or an identifier derived from the
application context. If enabled, the duplicate detection (see
`https://docs.microsoft.com/azure/service-bus-messaging/duplicate-detection`)
feature identifies and removes second and further submissions of messages with the same message id.
:rtype: str
"""
try:
return self._amqp_properties.message_id.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return self._amqp_properties.message_id
@message_id.setter
def message_id(self, val):
# type: (str) -> None
self._amqp_properties.message_id = val
@property
def reply_to(self):
# type: () -> str
# pylint: disable=line-too-long
"""The address of an entity to send replies to.
This optional and application-defined value is a standard way to express a reply path to the receiver of
the message. When a sender expects a reply, it sets the value to the absolute or relative path of the queue
or topic it expects the reply to be sent to.
See Message Routing and Correlation in
`https://docs.microsoft.com/azure/service-bus-messaging/service-bus-messages-payloads?#message-routing-and-correlation`.
:rtype: str
"""
try:
return self._amqp_properties.reply_to.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return self._amqp_properties.reply_to
@reply_to.setter
def reply_to(self, val):
# type: (str) -> None
self._amqp_properties.reply_to = val
@property
def reply_to_session_id(self):
# type: () -> str
# pylint: disable=line-too-long
"""The session identifier augmenting the `reply_to` address.
This value augments the `reply_to` information and specifies which session id should be set for the reply
when sent to the reply entity.
See Message Routing and Correlation in
`https://docs.microsoft.com/azure/service-bus-messaging/service-bus-messages-payloads?#message-routing-and-correlation`.
:rtype: str
"""
try:
return self._amqp_properties.reply_to_group_id.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return self._amqp_properties.reply_to_group_id
@reply_to_session_id.setter
def reply_to_session_id(self, val):
# type: (str) -> None
self._amqp_properties.reply_to_group_id = val
@property
def to(self):
# type: () -> str
"""The `to` address.
This property is reserved for future use in routing scenarios and presently ignored by the broker itself.
Applications can use this value in rule-driven auto-forward chaining scenarios to indicate the intended
logical destination of the message.
See https://docs.microsoft.com/azure/service-bus-messaging/service-bus-auto-forwarding for more details.
:rtype: str
"""
try:
return self._amqp_properties.to.decode('UTF-8')
except (AttributeError, UnicodeDecodeError):
return self._amqp_properties.to
@to.setter
def to(self, val):
# type: (str) -> None
self._amqp_properties.to = val
class BatchMessage(object):
"""A batch of messages.
Sending messages in a batch is more performant than sending individual message.
BatchMessage helps you create the maximum allowed size batch of `Message` to improve sending performance.
Use the `add` method to add messages until the maximum batch size limit in bytes has been reached -
at which point a `ValueError` will be raised.
**Please use the create_batch method of ServiceBusSender
to create a BatchMessage object instead of instantiating a BatchMessage object directly.**
:ivar max_size_in_bytes: The maximum size of bytes data that a BatchMessage object can hold.
:vartype max_size_in_bytes: int
:ivar message: Internal AMQP BatchMessage object.
:vartype message: ~uamqp.BatchMessage
:param int max_size_in_bytes: The maximum size of bytes data that a BatchMessage object can hold.
"""
def __init__(self, max_size_in_bytes=None):
# type: (Optional[int]) -> None
self.max_size_in_bytes = max_size_in_bytes or uamqp.constants.MAX_MESSAGE_LENGTH_BYTES
self.message = uamqp.BatchMessage(data=[], multi_messages=False, properties=None)
self._size = self.message.gather()[0].get_message_encoded_size()
self._count = 0
self._messages = [] # type: List[Message]
def __repr__(self):
# type: () -> str
batch_repr = "max_size_in_bytes={}, message_count={}".format(
self.max_size_in_bytes, self._count
)
return "BatchMessage({})".format(batch_repr)
def __len__(self):
return self._count
def _from_list(self, messages):
for each in messages:
if not isinstance(each, Message):
raise TypeError("Only Message or an iterable object containing Message objects are accepted."
"Received instead: {}".format(each.__class__.__name__))
self.add(each)
@property
def size_in_bytes(self):
# type: () -> int
"""The combined size of the events in the batch, in bytes.
:rtype: int
"""
return self._size
def add(self, message):
# type: (Message) -> None
"""Try to add a single Message to the batch.
The total size of an added message is the sum of its body, properties, etc.
If this added size results in the batch exceeding the maximum batch size, a `ValueError` will
be raised.
:param message: The Message to be added to the batch.
:type message: ~azure.servicebus.Message
:rtype: None
:raises: :class: ~azure.servicebus.exceptions.MessageContentTooLarge, when exceeding the size limit.
"""
message = transform_messages_to_sendable_if_needed(message)
message_size = message.message.get_message_encoded_size()
# For a BatchMessage, if the encoded_message_size of event_data is < 256, then the overhead cost to encode that
# message into the BatchMessage would be 5 bytes, if >= 256, it would be 8 bytes.
size_after_add = (
self._size
+ message_size
+ _BATCH_MESSAGE_OVERHEAD_COST[0 if (message_size < 256) else 1]
)
if size_after_add > self.max_size_in_bytes:
raise MessageContentTooLarge(
"BatchMessage has reached its size limit: {}".format(
self.max_size_in_bytes
)
)
self.message._body_gen.append(message) # pylint: disable=protected-access
self._size = size_after_add
self._count += 1
self._messages.append(message)
class PeekedMessage(Message):
"""A preview message.
This message is still on the queue, and unlocked.
A peeked message cannot be completed, abandoned, dead-lettered or deferred.
It has no lock token or expiry.
"""
def __init__(self, message):
# type: (uamqp.message.Message) -> None
super(PeekedMessage, self).__init__(None, message=message) # type: ignore
def _to_outgoing_message(self):
# type: () -> Message
amqp_message = self.message
amqp_body = amqp_message._body # pylint: disable=protected-access
if isinstance(amqp_body, uamqp.message.DataBody):
body = b''.join(amqp_body.data)
else:
# amqp_body is type of uamqp.message.ValueBody
body = amqp_body.data
return Message(
body=body,
content_type=self.content_type,
correlation_id=self.correlation_id,
label=self.label,
message_id=self.message_id,
partition_key=self.partition_key,
properties=self.properties,
reply_to=self.reply_to,
reply_to_session_id=self.reply_to_session_id,
session_id=self.session_id,
scheduled_enqueue_time_utc=self.scheduled_enqueue_time_utc,
time_to_live=self.time_to_live,
to=self.to,
via_partition_key=self.via_partition_key
)
@property
def dead_letter_error_description(self):
# type: () -> Optional[str]
"""
Dead letter error description, when the message is received from a deadletter subqueue of an entity.
:rtype: str
"""
if self.message.application_properties:
try:
return self.message.application_properties.get(PROPERTIES_DEAD_LETTER_ERROR_DESCRIPTION).decode('UTF-8')
except AttributeError:
pass
return None
@property
def dead_letter_reason(self):
# type: () -> Optional[str]
"""
Dead letter reason, when the message is received from a deadletter subqueue of an entity.
:rtype: str
"""
if self.message.application_properties:
try:
return self.message.application_properties.get(PROPERTIES_DEAD_LETTER_REASON).decode('UTF-8')
except AttributeError:
pass
return None
@property
def dead_letter_source(self):
# type: () -> Optional[str]
"""
The name of the queue or subscription that this message was enqueued on, before it was deadlettered.
This property is only set in messages that have been dead-lettered and subsequently auto-forwarded
from the dead-letter queue to another entity. Indicates the entity in which the message was dead-lettered.
:rtype: str
"""
if self.message.annotations:
try:
return self.message.annotations.get(_X_OPT_DEAD_LETTER_SOURCE).decode('UTF-8')
except AttributeError:
pass
return None
@property
def delivery_count(self):
# type: () -> Optional[int]
"""
Number of deliveries that have been attempted for this message. The count is incremented
when a message lock expires or the message is explicitly abandoned by the receiver.
:rtype: int
"""
if self._amqp_header:
return self._amqp_header.delivery_count
return None
@property
def enqueued_sequence_number(self):
# type: () -> Optional[int]
"""
For messages that have been auto-forwarded, this property reflects the sequence number that had
first been assigned to the message at its original point of submission.
:rtype: int
"""
if self.message.annotations:
return self.message.annotations.get(_X_OPT_ENQUEUE_SEQUENCE_NUMBER)
return None
@property
def enqueued_time_utc(self):
# type: () -> Optional[datetime.datetime]
"""
The UTC datetime at which the message has been accepted and stored in the entity.
:rtype: ~datetime.datetime
"""
if self.message.annotations:
timestamp = self.message.annotations.get(_X_OPT_ENQUEUED_TIME)
if timestamp:
in_seconds = timestamp/1000.0
return utc_from_timestamp(in_seconds)
return None
@property
def expires_at_utc(self):
# type: () -> Optional[datetime.datetime]
"""
The UTC datetime at which the message is marked for removal and no longer available for retrieval
from the entity due to expiration. Expiry is controlled by the `Message.time_to_live` property.
This property is computed from `Message.enqueued_time_utc` + `Message.time_to_live`.
:rtype: ~datetime.datetime
"""
if self.enqueued_time_utc and self.time_to_live:
return self.enqueued_time_utc + self.time_to_live
return None
@property
def sequence_number(self):
# type: () -> Optional[int]
"""
The unique number assigned to a message by Service Bus. The sequence number is a unique 64-bit integer
assigned to a message as it is accepted and stored by the broker and functions as its true identifier.
For partitioned entities, the topmost 16 bits reflect the partition identifier.
Sequence numbers monotonically increase. They roll over to 0 when the 48-64 bit range is exhausted.
:rtype: int
"""
if self.message.annotations:
return self.message.annotations.get(_X_OPT_SEQUENCE_NUMBER)
return None
class ReceivedMessageBase(PeekedMessage):
"""
A Service Bus Message received from service side.
:ivar auto_renew_error: Error when AutoLockRenew is used and it fails to renew the message lock.
:vartype auto_renew_error: ~azure.servicebus.AutoLockRenewTimeout or ~azure.servicebus.AutoLockRenewFailed
.. admonition:: Example:
.. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py
:start-after: [START receive_complex_message]
:end-before: [END receive_complex_message]
:language: python
:dedent: 4
:caption: Checking the properties on a received message.
"""
def __init__(self, message, receive_mode=ReceiveMode.PeekLock, **kwargs):
# type: (uamqp.message.Message, ReceiveMode, Any) -> None
super(ReceivedMessageBase, self).__init__(message=message)
self._settled = (receive_mode == ReceiveMode.ReceiveAndDelete)
self._received_timestamp_utc = utc_now()
self._is_deferred_message = kwargs.get("is_deferred_message", False)
self.auto_renew_error = None # type: Optional[Exception]
try:
self._receiver = kwargs.pop("receiver") # type: Union[ServiceBusReceiver, ServiceBusSessionReceiver]
except KeyError:
raise TypeError("ReceivedMessage requires a receiver to be initialized. This class should never be" + \
"initialized by a user; the Message class should be utilized instead.")
self._expiry = None # type: Optional[datetime.datetime]
def _check_live(self, action):
# pylint: disable=no-member
if not self._receiver or not self._receiver._running: # pylint: disable=protected-access
raise MessageSettleFailed(action, "Orphan message had no open connection.")
if self._settled:
raise MessageAlreadySettled(action)
try:
if self._lock_expired:
raise MessageLockExpired(inner_exception=self.auto_renew_error)
except TypeError:
pass
try:
if self._receiver.session._lock_expired: # pylint: disable=protected-access
raise SessionLockExpired(inner_exception=self._receiver.session.auto_renew_error)
except AttributeError:
pass
def _settle_via_mgmt_link(self, settle_operation, dead_letter_reason=None, dead_letter_error_description=None):
# type: (str, Optional[str], Optional[str]) -> Callable
# pylint: disable=protected-access
if settle_operation == MESSAGE_COMPLETE:
return functools.partial(
self._receiver._settle_message,
SETTLEMENT_COMPLETE,
[self.lock_token],
)
if settle_operation == MESSAGE_ABANDON:
return functools.partial(
self._receiver._settle_message,
SETTLEMENT_ABANDON,
[self.lock_token],
)
if settle_operation == MESSAGE_DEAD_LETTER:
return functools.partial(
self._receiver._settle_message,
SETTLEMENT_DEADLETTER,
[self.lock_token],
dead_letter_details={
MGMT_REQUEST_DEAD_LETTER_REASON: dead_letter_reason or "",
MGMT_REQUEST_DEAD_LETTER_ERROR_DESCRIPTION: dead_letter_error_description or ""
}
)
if settle_operation == MESSAGE_DEFER:
return functools.partial(
self._receiver._settle_message,
SETTLEMENT_DEFER,
[self.lock_token],
)
raise ValueError("Unsupported settle operation type: {}".format(settle_operation))
def _settle_via_receiver_link(self, settle_operation, dead_letter_reason=None, dead_letter_error_description=None):
# type: (str, Optional[str], Optional[str]) -> Callable
if settle_operation == MESSAGE_COMPLETE:
return functools.partial(self.message.accept)
if settle_operation == MESSAGE_ABANDON:
return functools.partial(self.message.modify, True, False)
if settle_operation == MESSAGE_DEAD_LETTER:
return functools.partial(
self.message.reject,
condition=DEADLETTERNAME,
description=dead_letter_error_description,
info={
RECEIVER_LINK_DEAD_LETTER_REASON: dead_letter_reason,
RECEIVER_LINK_DEAD_LETTER_ERROR_DESCRIPTION: dead_letter_error_description
}
)
if settle_operation == MESSAGE_DEFER:
return functools.partial(self.message.modify, True, True)
raise ValueError("Unsupported settle operation type: {}".format(settle_operation))
@property
def _lock_expired(self):
# type: () -> bool
# pylint: disable=protected-access
"""
Whether the lock on the message has expired.
:rtype: bool
"""
try:
if self._receiver.session: # type: ignore
raise TypeError("Session messages do not expire. Please use the Session expiry instead.")
except AttributeError: # Is not a session receiver
pass
if self.locked_until_utc and self.locked_until_utc <= utc_now():
return True
return False
@property
def lock_token(self):
# type: () -> Optional[Union[uuid.UUID, str]]
"""
The lock token for the current message serving as a reference to the lock that
is being held by the broker in PeekLock mode.
:rtype: ~uuid.UUID or str
"""
if self._settled:
return None
if self.message.delivery_tag:
return uuid.UUID(bytes_le=self.message.delivery_tag)
delivery_annotations = self.message.delivery_annotations
if delivery_annotations:
return delivery_annotations.get(_X_OPT_LOCK_TOKEN)
return None
@property
def locked_until_utc(self):
# type: () -> Optional[datetime.datetime]
# pylint: disable=protected-access
"""
The UTC datetime until which the message will be locked in the queue/subscription.
When the lock expires, delivery count of hte message is incremented and the message
is again available for retrieval.
:rtype: datetime.datetime
"""
try:
if self._settled or self._receiver.session: # type: ignore
return None
except AttributeError: # not settled, and isn't session receiver.
pass
if self._expiry:
return self._expiry
if self.message.annotations and _X_OPT_LOCKED_UNTIL in self.message.annotations:
expiry_in_seconds = self.message.annotations[_X_OPT_LOCKED_UNTIL]/1000
self._expiry = utc_from_timestamp(expiry_in_seconds)
return self._expiry
class ReceivedMessage(ReceivedMessageBase):
def _settle_message(
self,
settle_operation,
dead_letter_reason=None,
dead_letter_error_description=None,
):
# type: (str, Optional[str], Optional[str]) -> None
try:
if not self._is_deferred_message:
try:
self._settle_via_receiver_link(settle_operation,
dead_letter_reason=dead_letter_reason,
dead_letter_error_description=dead_letter_error_description)()
return
except RuntimeError as exception:
_LOGGER.info(
"Message settling: %r has encountered an exception (%r)."
"Trying to settle through management link",
settle_operation,
exception
)
self._settle_via_mgmt_link(settle_operation,
dead_letter_reason=dead_letter_reason,
dead_letter_error_description=dead_letter_error_description)()
except Exception as e:
raise MessageSettleFailed(settle_operation, e)
def complete(self):
# type: () -> None
"""Complete the message.
This removes the message from the queue.
:rtype: None
:raises: ~azure.servicebus.exceptions.MessageAlreadySettled if the message has been settled.
:raises: ~azure.servicebus.exceptions.MessageLockExpired if message lock has already expired.
:raises: ~azure.servicebus.exceptions.SessionLockExpired if session lock has already expired.
:raises: ~azure.servicebus.exceptions.MessageSettleFailed if message settle operation fails.
.. admonition:: Example:
.. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py
:start-after: [START receive_sync]
:end-before: [END receive_sync]
:language: python
:dedent: 4
:caption: Completing a received message to remove it from the queue.
"""
# pylint: disable=protected-access
self._check_live(MESSAGE_COMPLETE)
self._settle_message(MESSAGE_COMPLETE)
self._settled = True
def dead_letter(self, reason=None, error_description=None):
# type: (Optional[str], Optional[str]) -> None
"""Move the message to the Dead Letter queue.
The Dead Letter queue is a sub-queue that can be
used to store messages that failed to process correctly, or otherwise require further inspection
or processing. The queue can also be configured to send expired messages to the Dead Letter queue.
:param str reason: The reason for dead-lettering the message.
:param str error_description: The detailed error description for dead-lettering the message.
:rtype: None
:raises: ~azure.servicebus.exceptions.MessageAlreadySettled if the message has been settled.
:raises: ~azure.servicebus.exceptions.MessageLockExpired if message lock has already expired.
:raises: ~azure.servicebus.exceptions.SessionLockExpired if session lock has already expired.
:raises: ~azure.servicebus.exceptions.MessageSettleFailed if message settle operation fails.
.. admonition:: Example:
.. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py
:start-after: [START receive_deadletter_sync]
:end-before: [END receive_deadletter_sync]
:language: python
:dedent: 4
:caption: Dead letter a message to remove it from the queue by sending it to the dead letter subqueue,
and receiving it from there.
"""
# pylint: disable=protected-access
self._check_live(MESSAGE_DEAD_LETTER)
self._settle_message(MESSAGE_DEAD_LETTER,
dead_letter_reason=reason,
dead_letter_error_description=error_description)
self._settled = True
def abandon(self):
# type: () -> None
"""Abandon the message.
This message will be returned to the queue and made available to be received again.
:rtype: None
:raises: ~azure.servicebus.exceptions.MessageAlreadySettled if the message has been settled.
:raises: ~azure.servicebus.exceptions.MessageLockExpired if message lock has already expired.
:raises: ~azure.servicebus.exceptions.SessionLockExpired if session lock has already expired.
:raises: ~azure.servicebus.exceptions.MessageSettleFailed if message settle operation fails.
.. admonition:: Example:
.. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py
:start-after: [START abandon_message]
:end-before: [END abandon_message]
:language: python
:dedent: 4
:caption: Abandoning a received message to return it immediately to the queue.
"""
# pylint: disable=protected-access
self._check_live(MESSAGE_ABANDON)
self._settle_message(MESSAGE_ABANDON)
self._settled = True
def defer(self):
# type: () -> None
"""Defer the message.
This message will remain in the queue but must be requested
specifically by its sequence number in order to be received.
:rtype: None
:raises: ~azure.servicebus.exceptions.MessageAlreadySettled if the message has been settled.
:raises: ~azure.servicebus.exceptions.MessageLockExpired if message lock has already expired.
:raises: ~azure.servicebus.exceptions.SessionLockExpired if session lock has already expired.
:raises: ~azure.servicebus.exceptions.MessageSettleFailed if message settle operation fails.
.. admonition:: Example:
.. literalinclude:: ../samples/sync_samples/sample_code_servicebus.py
:start-after: [START receive_defer_sync]
:end-before: [END receive_defer_sync]
:language: python
:dedent: 4
:caption: Deferring a received message sets it aside such that it can only be received
by calling receive_deffered_messages with its sequence number
"""
self._check_live(MESSAGE_DEFER)
self._settle_message(MESSAGE_DEFER)
self._settled = True
def renew_lock(self):
# type: () -> datetime.datetime
# pylint: disable=protected-access,no-member
"""Renew the message lock.
This will maintain the lock on the message to ensure it is not returned to the queue
to be reprocessed.
In order to complete (or otherwise settle) the message, the lock must be maintained,
and cannot already have expired; an expired lock cannot be renewed.
Messages received via ReceiveAndDelete mode are not locked, and therefore cannot be renewed.
This operation is only available for non-sessionful messages as well.
Lock renewal can be performed as a background task by registering the message with an
`azure.servicebus.AutoLockRenew` instance.
:returns: The utc datetime the lock is set to expire at.
:rtype: datetime.datetime
:raises: TypeError if the message is sessionful.
:raises: ~azure.servicebus.exceptions.MessageLockExpired is message lock has already expired.
:raises: ~azure.servicebus.exceptions.MessageAlreadySettled is message has already been settled.
"""
try:
if self._receiver.session: # type: ignore
raise TypeError("Session messages cannot be renewed. Please renew the Session lock instead.")
except AttributeError:
pass
self._check_live(MESSAGE_RENEW_LOCK)
token = self.lock_token
if not token:
raise ValueError("Unable to renew lock - no lock token found.")
expiry = self._receiver._renew_locks(token) # type: ignore
self._expiry = utc_from_timestamp(expiry[MGMT_RESPONSE_MESSAGE_EXPIRATION][0]/1000.0) # type: datetime.datetime
return self._expiry
class AMQPMessage(object):
"""
The internal AMQP message that this ServiceBusMessage represents.
:param properties: Properties to add to the message.
:type properties: ~uamqp.message.MessageProperties
:param application_properties: Service specific application properties.
:type application_properties: dict
:param annotations: Service specific message annotations. Keys in the dictionary
must be `uamqp.types.AMQPSymbol` or `uamqp.types.AMQPuLong`.
:type annotations: dict
:param delivery_annotations: Delivery-specific non-standard properties at the head of the message.
Delivery annotations convey information from the sending peer to the receiving peer.
Keys in the dictionary must be `uamqp.types.AMQPSymbol` or `uamqp.types.AMQPuLong`.
:type delivery_annotations: dict
:param header: The message header.
:type header: ~uamqp.message.MessageHeader
:param footer: The message footer.
:type footer: dict
"""
def __init__(self, message):
# type: (uamqp.Message) -> None
self._message = message
@property
def properties(self):
return self._message.properties
@properties.setter
def properties(self, value):
self._message.properties = value
@property
def application_properties(self):
return self._message.application_properties
@application_properties.setter
def application_properties(self, value):
self._message.application_properties = value
@property
def annotations(self):
return self._message.annotations
@annotations.setter
def annotations(self, value):
self._message.annotations = value
@property
def delivery_annotations(self):
return self._message.delivery_annotations
@delivery_annotations.setter
def delivery_annotations(self, value):
self._message.delivery_annotations = value
@property
def header(self):
return self._message.header
@header.setter
def header(self, value):
self._message.header = value
@property
def footer(self):
return self._message.footer
@footer.setter
def footer(self, value):
self._message.footer = value
| 39.963668
| 128
| 0.655591
|
2bb8ea9f625ea410f3c8382797f211b4f7ba5b71
| 902
|
py
|
Python
|
setup.py
|
jfjlaros/online-array
|
e946f9d8e89a77f67317eb6136f5c5d1df9d20ed
|
[
"MIT"
] | null | null | null |
setup.py
|
jfjlaros/online-array
|
e946f9d8e89a77f67317eb6136f5c5d1df9d20ed
|
[
"MIT"
] | null | null | null |
setup.py
|
jfjlaros/online-array
|
e946f9d8e89a77f67317eb6136f5c5d1df9d20ed
|
[
"MIT"
] | null | null | null |
import sys
from setuptools import setup
import online_array as distmeta
if sys.version_info < (2, 6):
raise Exception('online-array requires Python 2.6 or higher.')
setup(
name='online-array',
version=distmeta.__version__,
description='An array-like object that calls a function.',
long_description=distmeta.__doc__,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['online_array'],
install_requires=['numpy'],
entry_points = {},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering'],
keywords='mathematics, numpy')
| 28.1875
| 66
| 0.672949
|
164fa31ee96da494e24a05d52f91833b168f084a
| 3,014
|
py
|
Python
|
scrape_mars.py
|
sadik-20/Web-Scraping-Challenge
|
d428615e43ccf893ae4f7fc3b9927fc019abb421
|
[
"ADSL"
] | null | null | null |
scrape_mars.py
|
sadik-20/Web-Scraping-Challenge
|
d428615e43ccf893ae4f7fc3b9927fc019abb421
|
[
"ADSL"
] | null | null | null |
scrape_mars.py
|
sadik-20/Web-Scraping-Challenge
|
d428615e43ccf893ae4f7fc3b9927fc019abb421
|
[
"ADSL"
] | null | null | null |
# --- dependencies and setup ---
from splinter import Browser
from bs4 import BeautifulSoup as bs
import pandas as pd
import time
import requests
#from webdriver_manager.chrome import ChromeDriverManager
# This is for debugging
def savetofile(contents):
file = open('_temporary.txt',"w",encoding="utf-8")
file.write(contents)
file.close()
def scrape():
executable_path = {"executable_path": "/usr/local/bin/chromedriver"}
browser = Browser("chrome", **executable_path, headless=False)
# NASA Mars News
url = 'https://mars.nasa.gov/news/'
browser.visit(url)
time.sleep(3)
html = browser.html
soup = bs(html, 'html.parser')
slides = soup.find_all('li', class_='slide')
content_title = slides[0].find('div', class_='content_title')
news_title = content_title.text.strip()
article_teaser_body = slides[0].find('div', class_='article_teaser_body')
news_p = article_teaser_body.text.strip()
# JPL Mars Space Images
base_url = 'https://www.jpl.nasa.gov'
url = base_url + '/spaceimages/?search=&category=Mars'
browser.visit(url)
time.sleep(1)
html = browser.html
soup = bs(html, 'html.parser')
featured_image_url = base_url + soup.find('a',class_='button fancybox')['data-fancybox-href']
# Mars facts
url = 'https://space-facts.com/mars/'
browser.visit(url) # not necessary, but added for checking the operation
time.sleep(1)
dfs = pd.read_html(url)
for df in dfs:
try:
df = df.rename(columns={0: "Description", 1: "Value"})
df = df.set_index("Description")
marsfacts_html = df.to_html().replace('\n', '')
# df.to_html('marsfacts.html') # to save to a file to test
break
except:
continue
# Mars Hemispheres
base_url = 'https://astrogeology.usgs.gov'
url = base_url + '/search/results?q=hemisphere+enhanced&k1=target&v1=Mars'
browser.visit(url)
time.sleep(1)
html = browser.html
soup = bs(html, 'html.parser')
items = soup.find_all('div', class_='item')
urls = []
titles = []
for item in items:
urls.append(base_url + item.find('a')['href'])
titles.append(item.find('h3').text.strip())
img_urls = []
for oneurl in urls:
browser.visit(oneurl)
time.sleep(1)
html = browser.html
soup = bs(html, 'html.parser')
oneurl = base_url+soup.find('img',class_='wide-image')['src']
img_urls.append(oneurl)
hemisphere_image_urls = []
for i in range(len(titles)):
hemisphere_image_urls.append({'title':titles[i],'img_url':img_urls[i]})
# Assigning scraped data to a page
marspage = {}
marspage["news_title"] = news_title
marspage["news_p"] = news_p
marspage["featured_image_url"] = featured_image_url
marspage["marsfacts_html"] = marsfacts_html
marspage["hemisphere_image_urls"] = hemisphere_image_urls
return marspage
| 28.168224
| 101
| 0.637359
|
7bc2082b84ed08dc1ef6019ee65fc939d7c361c7
| 821
|
pyp
|
Python
|
LDDImport.pyp
|
gemvfx/LDDImport
|
07179ce7feb474a78df46e204a9d7589fd140811
|
[
"MIT"
] | 1
|
2019-05-08T10:50:45.000Z
|
2019-05-08T10:50:45.000Z
|
LDDImport.pyp
|
gemvfx/LDDImport
|
07179ce7feb474a78df46e204a9d7589fd140811
|
[
"MIT"
] | 2
|
2016-10-12T15:02:56.000Z
|
2019-05-17T11:46:25.000Z
|
LDDImport.pyp
|
gemvfx/LDDImport
|
07179ce7feb474a78df46e204a9d7589fd140811
|
[
"MIT"
] | null | null | null |
"""
Lego Digital Designer C4D Importer
Copyright: Gerhard Messer
Written for CINEMA 4D R16+
Version History
0.1 - 2015-05-18 - initial plugin
"""
import c4d
from c4d import gui, plugins, utils, bitmaps, storage
import os, sys
#1033677 LDDImport -- registered 25.09.2014
PLUGIN_ID = 1033677
class Olddimporter(c4d.plugins.ObjectData):
print "hihi"
pass
def main():
bmp = bitmaps.BaseBitmap()
dir, f = os.path.split(__file__)
fn = os.path.join(dir, "res", "LDDimport.png")
bmp.InitWith(fn)
c4d.plugins.RegisterObjectPlugin(id=PLUGIN_ID, str="LDD Importer", g=Olddimporter,
description="LDDImporter", info=c4d.OBJECT_GENERATOR, icon=None)
if __name__ == "__main__":
main()
| 22.805556
| 104
| 0.618758
|
8db9f20d7b1c5f23e3905f72b0d1cda12f6482a9
| 994
|
py
|
Python
|
sdk/servicebus/azure-servicebus/azure/servicebus/aio/__init__.py
|
LianwMS/azure-sdk-for-python
|
612d7bca9de86ee1bd1fa59291d7bf897ba9213f
|
[
"MIT"
] | 1
|
2020-08-17T14:40:09.000Z
|
2020-08-17T14:40:09.000Z
|
sdk/servicebus/azure-servicebus/azure/servicebus/aio/__init__.py
|
LianwMS/azure-sdk-for-python
|
612d7bca9de86ee1bd1fa59291d7bf897ba9213f
|
[
"MIT"
] | 2
|
2020-07-17T13:57:08.000Z
|
2020-07-21T18:30:37.000Z
|
sdk/servicebus/azure-servicebus/azure/servicebus/aio/__init__.py
|
LianwMS/azure-sdk-for-python
|
612d7bca9de86ee1bd1fa59291d7bf897ba9213f
|
[
"MIT"
] | 1
|
2020-09-18T13:20:20.000Z
|
2020-09-18T13:20:20.000Z
|
# ------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# -------------------------------------------------------------------------
from ._async_message import ReceivedMessage
from ._base_handler_async import ServiceBusSharedKeyCredential
from ._servicebus_sender_async import ServiceBusSender
from ._servicebus_receiver_async import ServiceBusReceiver
from ._servicebus_session_receiver_async import ServiceBusSessionReceiver
from ._servicebus_session_async import ServiceBusSession
from ._servicebus_client_async import ServiceBusClient
from ._async_auto_lock_renewer import AutoLockRenew
__all__ = [
'ReceivedMessage',
'ServiceBusClient',
'ServiceBusSender',
'ServiceBusReceiver',
'ServiceBusSessionReceiver',
'ServiceBusSharedKeyCredential',
'AutoLockRenew',
'ServiceBusSession'
]
| 39.76
| 75
| 0.702213
|
a7618d26e0e366be168fbc8dbea3be23f14c033f
| 17,860
|
py
|
Python
|
c4/system/deviceManager.py
|
Brewgarten/c4-system-manager
|
6fdec33ced4b1cb32d82a24cd168447a899b7e10
|
[
"MIT"
] | null | null | null |
c4/system/deviceManager.py
|
Brewgarten/c4-system-manager
|
6fdec33ced4b1cb32d82a24cd168447a899b7e10
|
[
"MIT"
] | 1
|
2017-10-17T21:51:40.000Z
|
2017-10-17T21:51:40.000Z
|
c4/system/deviceManager.py
|
Brewgarten/c4-system-manager
|
6fdec33ced4b1cb32d82a24cd168447a899b7e10
|
[
"MIT"
] | null | null | null |
"""
Copyright (c) IBM 2015-2017. All Rights Reserved.
Project name: c4-system-manager
This project is licensed under the MIT License, see LICENSE
This module provides a device manager interface to implement device managers.
In particular one should extend :py:class:`~c4.system.deviceManager.DeviceManagerImplementation`
to implement various message handlers. Note that by default the handlers follow this pattern:
``def handle<MessageType>(self, message)``
or
``def handle<MessageType>(self, message, envelope)``
and return a ``dict`` which becomes message result.
An instance of a device manager can then be created using the
:py:class:`~c4.system.deviceManager.DeviceManager` class
Example
-------
The following creates a device manager that is able to deal with
:py:class:`~c4.system.messages.Status` messages.
.. code-block:: python
import c4.system.deviceManager
class MyDM(c4.system.deviceManager.DeviceManagerImplementation):
def handleStatus(self, message):
return {"healthy": True}
It can then be instantiated using
.. code-block:: python
deviceManager = DeviceManager("localhost", "myDeviceManager")
"""
import ctypes
import inspect
import logging
import multiprocessing
import re
import sys
import time
from c4.messaging import (DealerRouter,
RouterClient,
callMessageHandler)
from c4.system.backend import Backend
from c4.system.configuration import States
from c4.system.messages import LocalStopDeviceManager
from c4.utils.command import run
from c4.utils.jsonutil import JSONSerializable, Datetime
from c4.utils.logutil import ClassLogger
from c4.utils.util import callWithVariableArguments, getVariableArguments
from c4.system.monitoring import ClassMonitor
log = logging.getLogger(__name__)
NOT_RUNNING_ACTIONS = set([
"LocalStartDeviceManager",
"LocalStopDeviceManager",
"Status"
])
def operation(implementation):
"""
Operation decorator to be used on methods of the device manager that
should be exposed externally as operations
:param implementation: a device manager method
:returns: method decorated with additional operation information
"""
handlerArgSpec = inspect.getargspec(implementation)
if inspect.ismethod(implementation):
handlerArguments = handlerArgSpec.args[1:]
elif inspect.isfunction(implementation):
handlerArguments = handlerArgSpec.args
else:
log.error("%s needs to be a method or function", implementation)
return implementation
if "self" in handlerArguments:
handlerArguments.remove("self")
if handlerArgSpec.defaults is None:
handlerDefaults = []
else:
handlerDefaults = handlerArgSpec.defaults
lastRequiredArgumentIndex = len(handlerArguments)-len(handlerDefaults)
requiredHandlerArguments = handlerArguments[:lastRequiredArgumentIndex]
optionalHandlerArguments = handlerArguments[lastRequiredArgumentIndex:]
# add operation information to the implementation
implementation.operation = {
"name": implementation.__name__
}
if implementation.__doc__:
descriptionLines = [
line.strip()
for line in implementation.__doc__.strip().splitlines()
if line
]
implementation.operation["description"] = "\n".join(descriptionLines)
if requiredHandlerArguments:
implementation.operation["required"] = requiredHandlerArguments
if optionalHandlerArguments:
implementation.operation["optional"] = optionalHandlerArguments
return implementation
@ClassLogger
class DeviceManager(DealerRouter):
"""
Device Manager
:param clusterInfo: cluster information
:type clusterInfo: :class:`~c4.system.configuration.ClusterInfo`
:param name: name
:type name: str
:param implementation: implementation of handlers
:type implementation: :class:`~c4.system.deviceManager.DeviceManagerImplementation`
:param properties: optional properties
:type properties: dict
:raises MessagingException: if either parent device manager is not set up or device manager address is already in use
"""
def __init__(self, clusterInfo, name, implementation, properties=None):
addressParts = name.split("/")
addressParts.insert(0, clusterInfo.node)
routerAddress = "/".join(addressParts[:-1])
address = "/".join(addressParts)
if not properties:
properties = {}
super(DeviceManager, self).__init__(routerAddress, address, maxThreads=properties.get("maxThreads", 2), register=True, name="DM")
self.clusterInfo = clusterInfo
# set up device manager implementation
self.implementation = implementation(self.clusterInfo, name, properties)
self.addHandler(self.implementation.routeMessage)
@property
def node(self):
"""
Node name
:returns: str
"""
return self.clusterInfo.node
def run(self):
"""
Override DealerRouter::run in order to introduce this hack
"""
#FIXME: this is a hack to prevent etcd operations from hanging later on in this process.
Backend().configuration.getAliases()
super(DeviceManager, self).run()
def start(self, timeout=60):
"""
Start the device manager
:param timeout: timeout in seconds
:type timeout: int
:returns: whether start was successful
:rtype: bool
"""
self.log.debug("starting device mananager '%s'", self.address)
return super(DeviceManager, self).start(timeout=timeout)
def stop(self, timeout=60):
"""
Stop the device manager
:param timeout: timeout in seconds
:type timeout: int
:returns: whether stop was successful
:rtype: bool
"""
self.log.debug("stopping device manager '%s' on '%s'", self.address, self.node)
# stop child device managers, this is required, otherwise device manager processes won't stop
if self.implementation.state == States.RUNNING:
client = RouterClient(self.address)
client.sendRequest(LocalStopDeviceManager(self.routerAddress, self.address))
# give device managers and sub processes time to stop
waitTime = Backend().configuration.getPlatform().settings.get("system.timeout", 60)
end = time.time() + waitTime
while time.time() < end:
if self.implementation.state != States.REGISTERED:
self.log.debug("waiting for device manager '%s' to return to '%s', current state is '%s'",
self.address,
repr(States.REGISTERED),
repr(self.implementation.state))
time.sleep(1)
else:
break
else:
self.log.error("waiting for device manager '%s' to return to '%s' timed out",
self.address,
repr(States.REGISTERED))
return False
return super(DeviceManager, self).stop(timeout=timeout)
@ClassLogger
class DeviceManagerImplementation(object):
"""
Device manager implementation which provides the handlers for messages.
:param clusterInfo: cluster information
:type clusterInfo: :class:`~c4.system.configuration.ClusterInfo`
:param name: name
:type name: str
:param properties: optional properties
:type properties: dict
"""
def __init__(self, clusterInfo, name, properties=None):
super(DeviceManagerImplementation, self).__init__()
self.clusterInfo = clusterInfo
self.name = name
if properties is None:
self.properties = {}
else:
self.properties = properties
self._state = multiprocessing.Value(ctypes.c_char_p, States.REGISTERED.name) # @UndefinedVariable
@classmethod
def getOperations(cls):
"""
Get operations associated with this implementation
:returns: operations map
:rtype: dict
"""
operations = {
name: method.operation
for name, method in inspect.getmembers(cls, inspect.ismethod)
if hasattr(method, "operation")
}
return operations
@property
def node(self):
"""
Node name
:returns: str
"""
return self.clusterInfo.node
def handleLocalStartDeviceManager(self, message, envelope):
"""
Handle :class:`~c4.system.messages.LocalStartDeviceManager` messages
:param message: message
:type message: dict
:param envelope: envelope
:type envelope: :class:`~c4.system.messages.Envelope`
"""
self.log.debug("received start request")
self.state = States.RUNNING
module = sys.modules[self.__class__.__module__]
return {
"state": self.state,
"version": getattr(module, "__version__", "unknown")
}
def handleLocalStopDeviceManager(self, message, envelope):
"""
Handle :class:`~c4.system.messages.LocalStopDeviceManager` messages
:param message: message
:type message: dict
:param envelope: envelope
:type envelope: :class:`~c4.system.messages.Envelope`
"""
self.log.debug("received stop request")
self.state = States.REGISTERED
return {
"state": self.state
}
def handleOperation(self, message):
"""
Handle :class:`~c4.system.messages.Operation` messages
:param message: message
:type message: dict
:param envelope: envelope
:type envelope: :class:`~c4.system.messages.Envelope`
"""
operations = self.getOperations()
if message.get("name", "unknown") in operations:
operationImplementation = getattr(self, message["name"])
arguments = message.get("arguments", [])
keywordArguments = message.get("keywordArguments", {})
# get information on the operation implementation
handlerArgumentMap, leftOverArguments, leftOverKeywords = getVariableArguments(operationImplementation, *arguments, **keywordArguments)
# check for missing required arguments
missingArguments = [
key
for key, value in handlerArgumentMap.items()
if value == "_notset_"
]
if missingArguments:
return {
"error": "'{0}' is missing required arguments '{1}'".format(
message["name"],
",".join(missingArguments)
)
}
response = callWithVariableArguments(operationImplementation,
*arguments,
**keywordArguments)
if response is not None:
warning = []
if leftOverArguments:
warning.append("'{0}' has left over arguments '{1}'".format(
message["name"],
",".join(str(a) for a in leftOverArguments)
))
if leftOverKeywords:
warning.append("'{0}' has left over keyword arguments '{1}'".format(
message["name"],
",".join(leftOverKeywords)
))
if warning:
response["warning"] = "\n".join(warning)
return response
else:
return {"error": "unsupported operation '{0}'".format(message.get("name", message))}
def routeMessage(self, envelopeString, envelope):
"""
Route message packaged in an WS-Addressing like envelope accordingly
:param envelopeString: envelope JSON string
:type envelopeString: str
:param envelope: envelope
:type envelope: :class:`~c4.messaging.Envelope`
:returns: response
"""
if self.state == States.RUNNING or envelope.Action in NOT_RUNNING_ACTIONS:
return callMessageHandler(self, envelope)
else:
warning = "message with action '{action}' will not be handled because it is not allowed when the device is not in 'RUNNING' state, currently '{state}'".format(
action=envelope.Action,
state=self.state.name)
self.log.warning(warning)
return {"warning": warning}
@property
def state(self):
"""
Device manager state
"""
return States.valueOf(self._state.value)
@state.setter
def state(self, state):
if isinstance(state, States):
with self._state.get_lock():
self._state.value = state.name
else:
self.log.error("'%s' does not match enum of type '%s'", state, States)
@ClassLogger
@ClassMonitor
class ConfiguredDeviceManagerImplementation(DeviceManagerImplementation):
"""
Device manager implementation for services with a specified DeviceManagerConfiguration
"""
def __init__(self, clusterInfo, name, properties=None):
super(ConfiguredDeviceManagerImplementation, self).__init__(clusterInfo, name, properties=properties)
if properties.has_key("configuration"):
self.dmConfiguration = properties["configuration"]
else:
raise ValueError("No DeviceManagerConfiguration found in properties.")
self.statusWarningIssued = False
# Override the default event (class name) with the name of the managed device.
self.monitor.event = name
def handleLocalStartDeviceManager(self, message, envelope):
"""
Handle :class:`~c4.system.messages.LocalStartDeviceManager` messages
:param message: message
:type message: dict
:param envelope: envelope
:type envelope: :class:`~c4.system.messages.Envelope`
"""
self.log.info("Starting %s device manager", self.name)
self.start()
return super(ConfiguredDeviceManagerImplementation, self).handleLocalStartDeviceManager(message, envelope)
def handleLocalStopDeviceManager(self, message, envelope):
"""
Handle :class:`~c4.system.messages.LocalStopDeviceManager` messages
:param message: message
:type message: dict
:param envelope: envelope
:type envelope: :class:`~c4.system.messages.Envelope`
"""
if not self.dmConfiguration.alwaysOn:
self.stop()
return super(ConfiguredDeviceManagerImplementation, self).handleLocalStopDeviceManager(message, envelope)
def handleStatus(self):
"""
The handler for an incoming Status message.
"""
stdout, stderr, rc = run(self.dmConfiguration.statusCommand)
if self.dmConfiguration.statusRegex:
runningPattern = re.compile(self.dmConfiguration.statusRegex)
match = runningPattern.search(stdout)
status = ConfiguredDeviceManagerStatus.OK if match else ConfiguredDeviceManagerStatus.FAILED
else:
status = ConfiguredDeviceManagerStatus.OK if rc == self.dmConfiguration.rc else ConfiguredDeviceManagerStatus.FAILED
if status == ConfiguredDeviceManagerStatus.FAILED:
# Suppress the warning if 1) we aren't running or 2) we already issued the warning once
if self.state == States.RUNNING and not self.statusWarningIssued:
self.log.warning("Unexpected %s status: stdout: %s, stderr: %s, rc: %s", self.name, stdout, stderr, rc)
self.statusWarningIssued = True
else:
self.statusWarningIssued = False
return ConfiguredDeviceManagerStatus(self.state, status)
@operation
def start(self, isRecovery=False):
"""
Start the configured service.
"""
if self.state == States.STARTING:
self.log.debug("%s received start request, but state is already STARTING.", self.name)
return
# Make sure we didn't already recover to OK status
if self.handleStatus().status != ConfiguredDeviceManagerStatus.OK:
self.state = States.STARTING
stdout, stderr, rc = run(self.dmConfiguration.startCommand)
if rc != 0:
self.log.error("Error starting {0} service. stdout: %s, stderr: %s, rc: %s", self.name, stdout, stderr, rc)
if isRecovery:
self.monitor.report(self.monitor.SUCCESS if rc ==0 else self.monitor.FAILURE)
self.state = States.RUNNING
@operation
def stop(self):
"""
Stop the configured service.
"""
stdout, stderr, rc = run(self.dmConfiguration.stopCommand)
if rc != 0:
self.log.error("Error stopping {0} service. stdout: %s, stderr: %s, rc: %s", self.name, stdout, stderr, rc)
class DeviceManagerStatus(JSONSerializable):
"""
Device manager status which can be extended to include additional details
"""
def __init__(self):
super(DeviceManagerStatus, self).__init__()
self.timestamp = Datetime.utcnow()
class ConfiguredDeviceManagerStatus(DeviceManagerStatus):
"""
Policy engine device manager status
:param state: state
:type state: :class:`~c4.system.configuration.States`
"""
OK = "OK"
FAILED = "FAILED"
def __init__(self, state, status):
super(ConfiguredDeviceManagerStatus, self).__init__()
self.state = state
self.status = status
| 36.300813
| 171
| 0.633315
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.