hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6b35e9df8dffac5726297f21906b18f189845d37
| 298
|
py
|
Python
|
automation/tincrepo/main/pxf/features/hdfs/readable/text/encoding/runTest.py
|
lchx1010/pxf
|
f6e11f91fb8c01ed27fc829beb3800f3b253c209
|
[
"Apache-2.0"
] | 46
|
2018-10-22T23:34:03.000Z
|
2022-03-31T09:31:34.000Z
|
automation/tincrepo/main/pxf/features/hdfs/readable/text/encoding/runTest.py
|
lchx1010/pxf
|
f6e11f91fb8c01ed27fc829beb3800f3b253c209
|
[
"Apache-2.0"
] | 317
|
2018-10-05T23:51:48.000Z
|
2022-03-22T17:38:52.000Z
|
automation/tincrepo/main/pxf/features/hdfs/readable/text/encoding/runTest.py
|
lchx1010/pxf
|
f6e11f91fb8c01ed27fc829beb3800f3b253c209
|
[
"Apache-2.0"
] | 46
|
2018-10-10T18:55:00.000Z
|
2022-03-28T07:27:04.000Z
|
from mpp.models import SQLTestCase
from mpp.models import SQLConcurrencyTestCase
class PxfHdfsReadSmallDataDifferentEncoding(SQLConcurrencyTestCase):
"""
@db_name pxfautomation
@concurrency 1
@gpdiff True
"""
sql_dir = 'sql'
ans_dir = 'expected'
out_dir = 'output'
| 22.923077
| 68
| 0.721477
|
26017e793f9418a7b8a1a11eabfeaf3ebe3d6463
| 34
|
py
|
Python
|
python/pyfunc-scaffolding/{{cookiecutter.model_slug}}/src/constant.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | 97
|
2020-10-15T08:03:56.000Z
|
2022-03-31T22:30:59.000Z
|
python/pyfunc-scaffolding/{{cookiecutter.model_slug}}/src/constant.py
|
ibnummuhammad/merlin
|
acf10a350bcacfdfe67f7020d535467b71ff1d89
|
[
"Apache-2.0"
] | 91
|
2020-10-26T03:15:27.000Z
|
2022-03-31T10:19:55.000Z
|
python/pyfunc-scaffolding/{{cookiecutter.model_slug}}/src/constant.py
|
ibnummuhammad/merlin
|
acf10a350bcacfdfe67f7020d535467b71ff1d89
|
[
"Apache-2.0"
] | 26
|
2020-10-21T03:53:36.000Z
|
2022-03-16T06:43:15.000Z
|
ARTIFACT_MODEL_PATH = "model_path"
| 34
| 34
| 0.852941
|
3105ff23b9c64a55ea00bc0b0607d9b5dbb914e5
| 134
|
py
|
Python
|
toeplitzlda/classification/__init__.py
|
jsosulski/toeplitzlda
|
ed56fb22fa70fb6c58b0c18db560eb3fd3f488c3
|
[
"BSD-3-Clause-Clear"
] | 3
|
2022-02-14T12:47:33.000Z
|
2022-03-17T11:48:57.000Z
|
toeplitzlda/classification/__init__.py
|
jsosulski/toeplitzlda
|
ed56fb22fa70fb6c58b0c18db560eb3fd3f488c3
|
[
"BSD-3-Clause-Clear"
] | 3
|
2022-02-11T11:05:22.000Z
|
2022-02-11T11:06:51.000Z
|
toeplitzlda/classification/__init__.py
|
jsosulski/toeplitzlda
|
ed56fb22fa70fb6c58b0c18db560eb3fd3f488c3
|
[
"BSD-3-Clause-Clear"
] | 1
|
2022-03-07T08:13:03.000Z
|
2022-03-07T08:13:03.000Z
|
from . import covariance
from .toeplitzlda import (
EpochsVectorizer,
ShrinkageLinearDiscriminantAnalysis,
ToeplitzLDA,
)
| 19.142857
| 40
| 0.768657
|
c79c71cf81c88bda220c698b29b9b2602a9616b2
| 19,614
|
py
|
Python
|
lib/python2.7/site-packages/scipy/io/tests/test_idl.py
|
wfehrnstrom/harmonize
|
e5661d24b2021739e8ac4bf1d3a530eda4e155b3
|
[
"MIT"
] | 6,989
|
2017-07-18T06:23:18.000Z
|
2022-03-31T15:58:36.000Z
|
SLpackage/private/thirdparty/pythonpkgs/scipy/scipy_0.19.1/lib/python2.7/site-packages/scipy/io/tests/test_idl.py
|
fanglab/6mASCOPE
|
3f1fdcb7693ff152f17623ce549526ec272698b1
|
[
"BSD-3-Clause"
] | 1,978
|
2017-07-18T09:17:58.000Z
|
2022-03-31T14:28:43.000Z
|
SLpackage/private/thirdparty/pythonpkgs/scipy/scipy_0.19.1/lib/python2.7/site-packages/scipy/io/tests/test_idl.py
|
fanglab/6mASCOPE
|
3f1fdcb7693ff152f17623ce549526ec272698b1
|
[
"BSD-3-Clause"
] | 1,228
|
2017-07-18T09:03:13.000Z
|
2022-03-29T05:57:40.000Z
|
from __future__ import division, print_function, absolute_import
from os import path
import warnings
DATA_PATH = path.join(path.dirname(__file__), 'data')
import numpy as np
from numpy.testing import (assert_equal, assert_array_equal, run_module_suite,
assert_)
from scipy.io.idl import readsav
def object_array(*args):
"""Constructs a numpy array of objects"""
array = np.empty(len(args), dtype=object)
for i in range(len(args)):
array[i] = args[i]
return array
def assert_identical(a, b):
"""Assert whether value AND type are the same"""
assert_equal(a, b)
if type(b) is str:
assert_equal(type(a), type(b))
else:
assert_equal(np.asarray(a).dtype.type, np.asarray(b).dtype.type)
def assert_array_identical(a, b):
"""Assert whether values AND type are the same"""
assert_array_equal(a, b)
assert_equal(a.dtype.type, b.dtype.type)
# Define vectorized ID function for pointer arrays
vect_id = np.vectorize(id)
class TestIdict:
def test_idict(self):
custom_dict = {'a': np.int16(999)}
original_id = id(custom_dict)
s = readsav(path.join(DATA_PATH, 'scalar_byte.sav'), idict=custom_dict, verbose=False)
assert_equal(original_id, id(s))
assert_('a' in s)
assert_identical(s['a'], np.int16(999))
assert_identical(s['i8u'], np.uint8(234))
class TestScalars:
# Test that scalar values are read in with the correct value and type
def test_byte(self):
s = readsav(path.join(DATA_PATH, 'scalar_byte.sav'), verbose=False)
assert_identical(s.i8u, np.uint8(234))
def test_int16(self):
s = readsav(path.join(DATA_PATH, 'scalar_int16.sav'), verbose=False)
assert_identical(s.i16s, np.int16(-23456))
def test_int32(self):
s = readsav(path.join(DATA_PATH, 'scalar_int32.sav'), verbose=False)
assert_identical(s.i32s, np.int32(-1234567890))
def test_float32(self):
s = readsav(path.join(DATA_PATH, 'scalar_float32.sav'), verbose=False)
assert_identical(s.f32, np.float32(-3.1234567e+37))
def test_float64(self):
s = readsav(path.join(DATA_PATH, 'scalar_float64.sav'), verbose=False)
assert_identical(s.f64, np.float64(-1.1976931348623157e+307))
def test_complex32(self):
s = readsav(path.join(DATA_PATH, 'scalar_complex32.sav'), verbose=False)
assert_identical(s.c32, np.complex64(3.124442e13-2.312442e31j))
def test_bytes(self):
s = readsav(path.join(DATA_PATH, 'scalar_string.sav'), verbose=False)
assert_identical(s.s, np.bytes_("The quick brown fox jumps over the lazy python"))
def test_structure(self):
pass
def test_complex64(self):
s = readsav(path.join(DATA_PATH, 'scalar_complex64.sav'), verbose=False)
assert_identical(s.c64, np.complex128(1.1987253647623157e+112-5.1987258887729157e+307j))
def test_heap_pointer(self):
pass
def test_object_reference(self):
pass
def test_uint16(self):
s = readsav(path.join(DATA_PATH, 'scalar_uint16.sav'), verbose=False)
assert_identical(s.i16u, np.uint16(65511))
def test_uint32(self):
s = readsav(path.join(DATA_PATH, 'scalar_uint32.sav'), verbose=False)
assert_identical(s.i32u, np.uint32(4294967233))
def test_int64(self):
s = readsav(path.join(DATA_PATH, 'scalar_int64.sav'), verbose=False)
assert_identical(s.i64s, np.int64(-9223372036854774567))
def test_uint64(self):
s = readsav(path.join(DATA_PATH, 'scalar_uint64.sav'), verbose=False)
assert_identical(s.i64u, np.uint64(18446744073709529285))
class TestCompressed(TestScalars):
# Test that compressed .sav files can be read in
def test_compressed(self):
s = readsav(path.join(DATA_PATH, 'various_compressed.sav'), verbose=False)
assert_identical(s.i8u, np.uint8(234))
assert_identical(s.f32, np.float32(-3.1234567e+37))
assert_identical(s.c64, np.complex128(1.1987253647623157e+112-5.1987258887729157e+307j))
assert_equal(s.array5d.shape, (4, 3, 4, 6, 5))
assert_identical(s.arrays.a[0], np.array([1, 2, 3], dtype=np.int16))
assert_identical(s.arrays.b[0], np.array([4., 5., 6., 7.], dtype=np.float32))
assert_identical(s.arrays.c[0], np.array([np.complex64(1+2j), np.complex64(7+8j)]))
assert_identical(s.arrays.d[0], np.array([b"cheese", b"bacon", b"spam"], dtype=object))
class TestArrayDimensions:
# Test that multi-dimensional arrays are read in with the correct dimensions
def test_1d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_1d.sav'), verbose=False)
assert_equal(s.array1d.shape, (123, ))
def test_2d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_2d.sav'), verbose=False)
assert_equal(s.array2d.shape, (22, 12))
def test_3d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_3d.sav'), verbose=False)
assert_equal(s.array3d.shape, (11, 22, 12))
def test_4d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_4d.sav'), verbose=False)
assert_equal(s.array4d.shape, (4, 5, 8, 7))
def test_5d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_5d.sav'), verbose=False)
assert_equal(s.array5d.shape, (4, 3, 4, 6, 5))
def test_6d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_6d.sav'), verbose=False)
assert_equal(s.array6d.shape, (3, 6, 4, 5, 3, 4))
def test_7d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_7d.sav'), verbose=False)
assert_equal(s.array7d.shape, (2, 1, 2, 3, 4, 3, 2))
def test_8d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_8d.sav'), verbose=False)
assert_equal(s.array8d.shape, (4, 3, 2, 1, 2, 3, 5, 4))
class TestStructures:
def test_scalars(self):
s = readsav(path.join(DATA_PATH, 'struct_scalars.sav'), verbose=False)
assert_identical(s.scalars.a, np.array(np.int16(1)))
assert_identical(s.scalars.b, np.array(np.int32(2)))
assert_identical(s.scalars.c, np.array(np.float32(3.)))
assert_identical(s.scalars.d, np.array(np.float64(4.)))
assert_identical(s.scalars.e, np.array([b"spam"], dtype=object))
assert_identical(s.scalars.f, np.array(np.complex64(-1.+3j)))
def test_scalars_replicated(self):
s = readsav(path.join(DATA_PATH, 'struct_scalars_replicated.sav'), verbose=False)
assert_identical(s.scalars_rep.a, np.repeat(np.int16(1), 5))
assert_identical(s.scalars_rep.b, np.repeat(np.int32(2), 5))
assert_identical(s.scalars_rep.c, np.repeat(np.float32(3.), 5))
assert_identical(s.scalars_rep.d, np.repeat(np.float64(4.), 5))
assert_identical(s.scalars_rep.e, np.repeat(b"spam", 5).astype(object))
assert_identical(s.scalars_rep.f, np.repeat(np.complex64(-1.+3j), 5))
def test_scalars_replicated_3d(self):
s = readsav(path.join(DATA_PATH, 'struct_scalars_replicated_3d.sav'), verbose=False)
assert_identical(s.scalars_rep.a, np.repeat(np.int16(1), 24).reshape(4, 3, 2))
assert_identical(s.scalars_rep.b, np.repeat(np.int32(2), 24).reshape(4, 3, 2))
assert_identical(s.scalars_rep.c, np.repeat(np.float32(3.), 24).reshape(4, 3, 2))
assert_identical(s.scalars_rep.d, np.repeat(np.float64(4.), 24).reshape(4, 3, 2))
assert_identical(s.scalars_rep.e, np.repeat(b"spam", 24).reshape(4, 3, 2).astype(object))
assert_identical(s.scalars_rep.f, np.repeat(np.complex64(-1.+3j), 24).reshape(4, 3, 2))
def test_arrays(self):
s = readsav(path.join(DATA_PATH, 'struct_arrays.sav'), verbose=False)
assert_array_identical(s.arrays.a[0], np.array([1, 2, 3], dtype=np.int16))
assert_array_identical(s.arrays.b[0], np.array([4., 5., 6., 7.], dtype=np.float32))
assert_array_identical(s.arrays.c[0], np.array([np.complex64(1+2j), np.complex64(7+8j)]))
assert_array_identical(s.arrays.d[0], np.array([b"cheese", b"bacon", b"spam"], dtype=object))
def test_arrays_replicated(self):
s = readsav(path.join(DATA_PATH, 'struct_arrays_replicated.sav'), verbose=False)
# Check column types
assert_(s.arrays_rep.a.dtype.type is np.object_)
assert_(s.arrays_rep.b.dtype.type is np.object_)
assert_(s.arrays_rep.c.dtype.type is np.object_)
assert_(s.arrays_rep.d.dtype.type is np.object_)
# Check column shapes
assert_equal(s.arrays_rep.a.shape, (5, ))
assert_equal(s.arrays_rep.b.shape, (5, ))
assert_equal(s.arrays_rep.c.shape, (5, ))
assert_equal(s.arrays_rep.d.shape, (5, ))
# Check values
for i in range(5):
assert_array_identical(s.arrays_rep.a[i],
np.array([1, 2, 3], dtype=np.int16))
assert_array_identical(s.arrays_rep.b[i],
np.array([4., 5., 6., 7.], dtype=np.float32))
assert_array_identical(s.arrays_rep.c[i],
np.array([np.complex64(1+2j),
np.complex64(7+8j)]))
assert_array_identical(s.arrays_rep.d[i],
np.array([b"cheese", b"bacon", b"spam"],
dtype=object))
def test_arrays_replicated_3d(self):
s = readsav(path.join(DATA_PATH, 'struct_arrays_replicated_3d.sav'), verbose=False)
# Check column types
assert_(s.arrays_rep.a.dtype.type is np.object_)
assert_(s.arrays_rep.b.dtype.type is np.object_)
assert_(s.arrays_rep.c.dtype.type is np.object_)
assert_(s.arrays_rep.d.dtype.type is np.object_)
# Check column shapes
assert_equal(s.arrays_rep.a.shape, (4, 3, 2))
assert_equal(s.arrays_rep.b.shape, (4, 3, 2))
assert_equal(s.arrays_rep.c.shape, (4, 3, 2))
assert_equal(s.arrays_rep.d.shape, (4, 3, 2))
# Check values
for i in range(4):
for j in range(3):
for k in range(2):
assert_array_identical(s.arrays_rep.a[i, j, k],
np.array([1, 2, 3], dtype=np.int16))
assert_array_identical(s.arrays_rep.b[i, j, k],
np.array([4., 5., 6., 7.],
dtype=np.float32))
assert_array_identical(s.arrays_rep.c[i, j, k],
np.array([np.complex64(1+2j),
np.complex64(7+8j)]))
assert_array_identical(s.arrays_rep.d[i, j, k],
np.array([b"cheese", b"bacon", b"spam"],
dtype=object))
def test_inheritance(self):
s = readsav(path.join(DATA_PATH, 'struct_inherit.sav'), verbose=False)
assert_identical(s.fc.x, np.array([0], dtype=np.int16))
assert_identical(s.fc.y, np.array([0], dtype=np.int16))
assert_identical(s.fc.r, np.array([0], dtype=np.int16))
assert_identical(s.fc.c, np.array([4], dtype=np.int16))
def test_arrays_corrupt_idl80(self):
# test byte arrays with missing nbyte information from IDL 8.0 .sav file
with warnings.catch_warnings():
warnings.simplefilter('ignore')
s = readsav(path.join(DATA_PATH,'struct_arrays_byte_idl80.sav'),
verbose=False)
assert_identical(s.y.x[0], np.array([55,66], dtype=np.uint8))
class TestPointers:
# Check that pointers in .sav files produce references to the same object in Python
def test_pointers(self):
s = readsav(path.join(DATA_PATH, 'scalar_heap_pointer.sav'), verbose=False)
assert_identical(s.c64_pointer1, np.complex128(1.1987253647623157e+112-5.1987258887729157e+307j))
assert_identical(s.c64_pointer2, np.complex128(1.1987253647623157e+112-5.1987258887729157e+307j))
assert_(s.c64_pointer1 is s.c64_pointer2)
class TestPointerArray:
# Test that pointers in arrays are correctly read in
def test_1d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_pointer_1d.sav'), verbose=False)
assert_equal(s.array1d.shape, (123, ))
assert_(np.all(s.array1d == np.float32(4.)))
assert_(np.all(vect_id(s.array1d) == id(s.array1d[0])))
def test_2d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_pointer_2d.sav'), verbose=False)
assert_equal(s.array2d.shape, (22, 12))
assert_(np.all(s.array2d == np.float32(4.)))
assert_(np.all(vect_id(s.array2d) == id(s.array2d[0,0])))
def test_3d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_pointer_3d.sav'), verbose=False)
assert_equal(s.array3d.shape, (11, 22, 12))
assert_(np.all(s.array3d == np.float32(4.)))
assert_(np.all(vect_id(s.array3d) == id(s.array3d[0,0,0])))
def test_4d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_pointer_4d.sav'), verbose=False)
assert_equal(s.array4d.shape, (4, 5, 8, 7))
assert_(np.all(s.array4d == np.float32(4.)))
assert_(np.all(vect_id(s.array4d) == id(s.array4d[0,0,0,0])))
def test_5d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_pointer_5d.sav'), verbose=False)
assert_equal(s.array5d.shape, (4, 3, 4, 6, 5))
assert_(np.all(s.array5d == np.float32(4.)))
assert_(np.all(vect_id(s.array5d) == id(s.array5d[0,0,0,0,0])))
def test_6d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_pointer_6d.sav'), verbose=False)
assert_equal(s.array6d.shape, (3, 6, 4, 5, 3, 4))
assert_(np.all(s.array6d == np.float32(4.)))
assert_(np.all(vect_id(s.array6d) == id(s.array6d[0,0,0,0,0,0])))
def test_7d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_pointer_7d.sav'), verbose=False)
assert_equal(s.array7d.shape, (2, 1, 2, 3, 4, 3, 2))
assert_(np.all(s.array7d == np.float32(4.)))
assert_(np.all(vect_id(s.array7d) == id(s.array7d[0,0,0,0,0,0,0])))
def test_8d(self):
s = readsav(path.join(DATA_PATH, 'array_float32_pointer_8d.sav'), verbose=False)
assert_equal(s.array8d.shape, (4, 3, 2, 1, 2, 3, 5, 4))
assert_(np.all(s.array8d == np.float32(4.)))
assert_(np.all(vect_id(s.array8d) == id(s.array8d[0,0,0,0,0,0,0,0])))
class TestPointerStructures:
# Test that structures are correctly read in
def test_scalars(self):
s = readsav(path.join(DATA_PATH, 'struct_pointers.sav'), verbose=False)
assert_identical(s.pointers.g, np.array(np.float32(4.), dtype=np.object_))
assert_identical(s.pointers.h, np.array(np.float32(4.), dtype=np.object_))
assert_(id(s.pointers.g[0]) == id(s.pointers.h[0]))
def test_pointers_replicated(self):
s = readsav(path.join(DATA_PATH, 'struct_pointers_replicated.sav'), verbose=False)
assert_identical(s.pointers_rep.g, np.repeat(np.float32(4.), 5).astype(np.object_))
assert_identical(s.pointers_rep.h, np.repeat(np.float32(4.), 5).astype(np.object_))
assert_(np.all(vect_id(s.pointers_rep.g) == vect_id(s.pointers_rep.h)))
def test_pointers_replicated_3d(self):
s = readsav(path.join(DATA_PATH, 'struct_pointers_replicated_3d.sav'), verbose=False)
s_expect = np.repeat(np.float32(4.), 24).reshape(4, 3, 2).astype(np.object_)
assert_identical(s.pointers_rep.g, s_expect)
assert_identical(s.pointers_rep.h, s_expect)
assert_(np.all(vect_id(s.pointers_rep.g) == vect_id(s.pointers_rep.h)))
def test_arrays(self):
s = readsav(path.join(DATA_PATH, 'struct_pointer_arrays.sav'), verbose=False)
assert_array_identical(s.arrays.g[0], np.repeat(np.float32(4.), 2).astype(np.object_))
assert_array_identical(s.arrays.h[0], np.repeat(np.float32(4.), 3).astype(np.object_))
assert_(np.all(vect_id(s.arrays.g[0]) == id(s.arrays.g[0][0])))
assert_(np.all(vect_id(s.arrays.h[0]) == id(s.arrays.h[0][0])))
assert_(id(s.arrays.g[0][0]) == id(s.arrays.h[0][0]))
def test_arrays_replicated(self):
s = readsav(path.join(DATA_PATH, 'struct_pointer_arrays_replicated.sav'), verbose=False)
# Check column types
assert_(s.arrays_rep.g.dtype.type is np.object_)
assert_(s.arrays_rep.h.dtype.type is np.object_)
# Check column shapes
assert_equal(s.arrays_rep.g.shape, (5, ))
assert_equal(s.arrays_rep.h.shape, (5, ))
# Check values
for i in range(5):
assert_array_identical(s.arrays_rep.g[i], np.repeat(np.float32(4.), 2).astype(np.object_))
assert_array_identical(s.arrays_rep.h[i], np.repeat(np.float32(4.), 3).astype(np.object_))
assert_(np.all(vect_id(s.arrays_rep.g[i]) == id(s.arrays_rep.g[0][0])))
assert_(np.all(vect_id(s.arrays_rep.h[i]) == id(s.arrays_rep.h[0][0])))
def test_arrays_replicated_3d(self):
pth = path.join(DATA_PATH, 'struct_pointer_arrays_replicated_3d.sav')
s = readsav(pth, verbose=False)
# Check column types
assert_(s.arrays_rep.g.dtype.type is np.object_)
assert_(s.arrays_rep.h.dtype.type is np.object_)
# Check column shapes
assert_equal(s.arrays_rep.g.shape, (4, 3, 2))
assert_equal(s.arrays_rep.h.shape, (4, 3, 2))
# Check values
for i in range(4):
for j in range(3):
for k in range(2):
assert_array_identical(s.arrays_rep.g[i, j, k],
np.repeat(np.float32(4.), 2).astype(np.object_))
assert_array_identical(s.arrays_rep.h[i, j, k],
np.repeat(np.float32(4.), 3).astype(np.object_))
assert_(np.all(vect_id(s.arrays_rep.g[i, j, k]) == id(s.arrays_rep.g[0, 0, 0][0])))
assert_(np.all(vect_id(s.arrays_rep.h[i, j, k]) == id(s.arrays_rep.h[0, 0, 0][0])))
class TestTags:
'''Test that sav files with description tag read at all'''
def test_description(self):
s = readsav(path.join(DATA_PATH, 'scalar_byte_descr.sav'), verbose=False)
assert_identical(s.i8u, np.uint8(234))
def test_null_pointer():
# Regression test for null pointers.
s = readsav(path.join(DATA_PATH, 'null_pointer.sav'), verbose=False)
assert_identical(s.point, None)
assert_identical(s.check, np.int16(5))
def test_invalid_pointer():
# Regression test for invalid pointers (gh-4613).
# In some files in the wild, pointers can sometimes refer to a heap
# variable that does not exist. In that case, we now gracefully fail for
# that variable and replace the variable with None and emit a warning.
# Since it's difficult to artificially produce such files, the file used
# here has been edited to force the pointer reference to be invalid.
with warnings.catch_warnings(record=True) as w:
s = readsav(path.join(DATA_PATH, 'invalid_pointer.sav'), verbose=False)
assert_(len(w) == 1)
assert_(str(w[0].message) == ("Variable referenced by pointer not found in "
"heap: variable will be set to None"))
assert_identical(s['a'], np.array([None, None]))
if __name__ == "__main__":
run_module_suite()
| 44.175676
| 105
| 0.632456
|
84178a0ece7f70b8a176b3ad1fcbafd91f457b19
| 422
|
py
|
Python
|
kubails/conftest.py
|
DevinSit/kubails
|
b3b2f9487d815868f0fbe9fae649789a40b50ad8
|
[
"MIT"
] | 2
|
2019-05-28T00:26:52.000Z
|
2019-08-02T23:02:19.000Z
|
kubails/conftest.py
|
DevinSit/kubails
|
b3b2f9487d815868f0fbe9fae649789a40b50ad8
|
[
"MIT"
] | 51
|
2019-12-23T04:34:40.000Z
|
2022-02-12T02:28:44.000Z
|
kubails/conftest.py
|
DevinSit/kubails
|
b3b2f9487d815868f0fbe9fae649789a40b50ad8
|
[
"MIT"
] | 1
|
2019-09-11T20:12:18.000Z
|
2019-09-11T20:12:18.000Z
|
"""
The first-run configuration file for PyTest. PyTest runs the code in this file before any tests.
So far, this is just used to set a flag so that code can check whether or not it's running in a test
(used to disable the file logger when testing).
For more information about conftest.py, see https://docs.pytest.org/en/2.7.3/plugins.html.
"""
import sys
def pytest_configure(config):
sys._called_from_test = True
| 32.461538
| 100
| 0.755924
|
ebbec2b31f51b42977f815252637c6fdbe6791a0
| 539
|
py
|
Python
|
src/the_pitch/indicators/ema.py
|
dpasse/the_pitch
|
8b36cf9a4c798fc22feda55d23ed06218be30965
|
[
"MIT"
] | null | null | null |
src/the_pitch/indicators/ema.py
|
dpasse/the_pitch
|
8b36cf9a4c798fc22feda55d23ed06218be30965
|
[
"MIT"
] | null | null | null |
src/the_pitch/indicators/ema.py
|
dpasse/the_pitch
|
8b36cf9a4c798fc22feda55d23ed06218be30965
|
[
"MIT"
] | null | null | null |
import pandas as pd
from .abstract_indicator import AbstractIndicator
from ..converters import utils
class EMA(AbstractIndicator):
def __init__(self, column: str = 'close', period: int = 20):
super().__init__(f'ema_{column}_{period}')
self.column = column
self.period = period
def compute(self, df: pd.DataFrame, **kwargs) -> pd.DataFrame:
df[self.name] = df[self.column].transform(
lambda x: x.ewm(span=self.period).mean()
).map(utils.decimal_from_float)
return df
| 28.368421
| 66
| 0.653061
|
3f464d4ec6ce8060b10d6baec0d84a36a683ccf0
| 590
|
py
|
Python
|
experiment_scripts/read_pickle.py
|
anilesec/attention-op
|
7a20650e3ae42318cdde6be138b020f7f161bfce
|
[
"MIT"
] | null | null | null |
experiment_scripts/read_pickle.py
|
anilesec/attention-op
|
7a20650e3ae42318cdde6be138b020f7f161bfce
|
[
"MIT"
] | null | null | null |
experiment_scripts/read_pickle.py
|
anilesec/attention-op
|
7a20650e3ae42318cdde6be138b020f7f161bfce
|
[
"MIT"
] | null | null | null |
import pickle
import numpy as np
objects = []
with(open("./results/tsp/tsp100_test_seed1234/tsp100_test_seed1234-pretrained_tsp_100-greedy-t1-0-10000.pkl", "rb")) as openfile:
while True:
try:
objects.append(pickle.load(openfile))
except EOFError:
break
#print(isinstance(objects, list))
#print(type(objects))
#print(objects[0])
temp = []
for x in objects[0][0]:
temp.append(x[0])
# print(x[0])
print(np.mean(np.array(temp)))
#for t in len(temp):
# print(t)
#for t in range(len(objects)):
# if t < 5:
# print(objects[t])
| 21.071429
| 129
| 0.632203
|
372421a4cd7755ff3c4ed104baf281d5a6f29ede
| 4,969
|
py
|
Python
|
Polynomials.py
|
DatHydroGuy/QRCode
|
3145ad7b5b64bb5561c772c1b4d8875f9858dc37
|
[
"MIT"
] | null | null | null |
Polynomials.py
|
DatHydroGuy/QRCode
|
3145ad7b5b64bb5561c772c1b4d8875f9858dc37
|
[
"MIT"
] | null | null | null |
Polynomials.py
|
DatHydroGuy/QRCode
|
3145ad7b5b64bb5561c772c1b4d8875f9858dc37
|
[
"MIT"
] | null | null | null |
class Polynomials:
"""
All calculations must be performed within Galois field GF(256).
Polynomials are represented in alpha notation.
10 251 9 67 8 46 7 61 6 118 5 70 4 64 3 94 2 32 45
For example: x + α x + α x + α x + α x + α x + α x + α x + α x + α x + α
where α represents the number 2 in GF(256)
"""
exponents = [1, 2, 4, 8, 16, 32, 64, 128, 29, 58, 116, 232, 205, 135, 19, 38, 76, 152, 45, 90, 180, 117, 234, 201,
143, 3, 6, 12, 24, 48, 96, 192, 157, 39, 78, 156, 37, 74, 148, 53, 106, 212, 181, 119, 238, 193, 159,
35, 70, 140, 5, 10, 20, 40, 80, 160, 93, 186, 105, 210, 185, 111, 222, 161, 95, 190, 97, 194, 153, 47,
94, 188, 101, 202, 137, 15, 30, 60, 120, 240, 253, 231, 211, 187, 107, 214, 177, 127, 254, 225, 223,
163, 91, 182, 113, 226, 217, 175, 67, 134, 17, 34, 68, 136, 13, 26, 52, 104, 208, 189, 103, 206, 129,
31, 62, 124, 248, 237, 199, 147, 59, 118, 236, 197, 151, 51, 102, 204, 133, 23, 46, 92, 184, 109, 218,
169, 79, 158, 33, 66, 132, 21, 42, 84, 168, 77, 154, 41, 82, 164, 85, 170, 73, 146, 57, 114, 228, 213,
183, 115, 230, 209, 191, 99, 198, 145, 63, 126, 252, 229, 215, 179, 123, 246, 241, 255, 227, 219, 171,
75, 150, 49, 98, 196, 149, 55, 110, 220, 165, 87, 174, 65, 130, 25, 50, 100, 200, 141, 7, 14, 28, 56,
112, 224, 221, 167, 83, 166, 81, 162, 89, 178, 121, 242, 249, 239, 195, 155, 43, 86, 172, 69, 138, 9,
18, 36, 72, 144, 61, 122, 244, 245, 247, 243, 251, 235, 203, 139, 11, 22, 44, 88, 176, 125, 250, 233,
207, 131, 27, 54, 108, 216, 173, 71, 142, 1]
@staticmethod
def fix_exponent(exponent):
return (exponent % 256) + (exponent // 256)
def add_factors(self, factor1, factor2):
val1 = self.exponents[factor1]
val2 = self.exponents[factor2]
new_val = val1 ^ val2
return self.exponents.index(new_val)
def generator(self, number_of_terms, in_alpha_format=True):
alpha_polynomial = [0, 0] # our initial representation of αx + α
for i in range(number_of_terms):
high = []
low = []
new_poly = [0, i + 1]
for j in alpha_polynomial:
new_high = self.fix_exponent(j + new_poly[0]) if j + new_poly[0] > 255 else j + new_poly[0]
high.append(new_high)
new_low = self.fix_exponent(j + new_poly[1]) if j + new_poly[1] > 255 else j + new_poly[1]
low.append(new_low)
alpha_polynomial = [high[0]] + [self.add_factors(low[k], high[k + 1]) for k in range(len(high) - 1)] +\
[low[-1]]
return alpha_polynomial if in_alpha_format else self.convert_alpha_to_polynomial(alpha_polynomial)
def convert_value_to_alpha(self, value):
return self.exponents.index(value)
def convert_alpha_to_value(self, value):
return self.exponents[value]
def convert_polynomial_to_alpha(self, polynomial):
return [self.convert_value_to_alpha(i) for i in polynomial]
def convert_alpha_to_polynomial(self, alpha_polynomial):
return [self.convert_alpha_to_value(i) for i in alpha_polynomial]
def divide_polynomials(self, dividend, divisor):
num_divisor_poly_terms = len(divisor)
num_dividend_poly_terms = len(dividend)
result = [x for x in dividend] + [0 for _ in range(num_divisor_poly_terms - 1)]
divisor_alpha = self.convert_polynomial_to_alpha(divisor)
multiply_poly = [x for x in divisor_alpha] + [0 for _ in range(num_dividend_poly_terms - 1)]
xor_poly = [x for x in result]
i = 0
while i < num_dividend_poly_terms:
# multiply divisor polynomial by lead term of the dividend polynomial
dividend_lead_term = self.convert_value_to_alpha(result[0])
for i2 in range(num_divisor_poly_terms):
multiply_poly[i2] = (divisor_alpha[i2] + dividend_lead_term) % 255
multiply_poly[i2] = self.convert_alpha_to_value(multiply_poly[i2])
result = [multiply_poly[i] ^ msg for i, msg in enumerate(xor_poly)]
result = result[1:] + [0]
while result[0] == 0:
result = result[1:] + [0] # a leading zero means we can divide again
i += 1
xor_poly = [x for x in result]
i += 1
result = result[:(num_divisor_poly_terms - 1)]
return result
#
# if __name__ == '__main__':
# poly = Polynomials()
# divid = [32, 91, 11, 120, 209, 114, 220, 77, 67, 64, 236, 17, 236, 17, 236, 17]
# divis = [1, 216, 194, 159, 111, 199, 94, 95, 113, 157, 193]
# a = poly.divide_polynomials(divid, divis)
# print(divid)
# print(divis)
# print(a)
| 52.861702
| 119
| 0.565104
|
a2d46520e252237a11d580d6876fd50c3c16c705
| 2,684
|
py
|
Python
|
kobart/pytorch_kobart.py
|
swoook/KoBART-cloned
|
7d0812c8e94ded728b329070105940cbd59ab115
|
[
"MIT"
] | null | null | null |
kobart/pytorch_kobart.py
|
swoook/KoBART-cloned
|
7d0812c8e94ded728b329070105940cbd59ab115
|
[
"MIT"
] | null | null | null |
kobart/pytorch_kobart.py
|
swoook/KoBART-cloned
|
7d0812c8e94ded728b329070105940cbd59ab115
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# Modified MIT License
# Software Copyright (c) 2020 SK telecom
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
# The above copyright notice and this permission notice need not be included
# with content created by the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
import os
import shutil
from zipfile import ZipFile
from kobart.utils import download as _download
pytorch_kobart = {
"url": "https://kobert.blob.core.windows.net/models/kobart/kobart_base_cased_ff4bda5738.zip",
"fname": "kobart_base_cased_ff4bda5738.zip",
"chksum": "ff4bda5738",
}
def get_pytorch_kobart_model(ctx="cpu", cachedir=".cache"):
# download model
global pytorch_kobart
model_info = pytorch_kobart
model_zip, is_cached = _download(
model_info["url"], model_info["fname"], model_info["chksum"], cachedir=cachedir
)
cachedir_full = os.path.expanduser(cachedir)
model_path = os.path.join(cachedir_full, "kobart_from_pretrained")
if not os.path.exists(model_path) or not is_cached:
if not is_cached:
shutil.rmtree(model_path, ignore_errors=True)
zipf = ZipFile(os.path.expanduser(model_zip))
zipf.extractall(path=cachedir_full)
return model_path
if __name__ == "__main__":
# pip install git+https://github.com/SKT-AI/KoBART#egg=kobart
from transformers import BartModel
from kobart import get_pytorch_kobart_model, get_kobart_tokenizer
kobart_tokenizer = get_kobart_tokenizer()
print(kobart_tokenizer.tokenize("안녕하세요. 한국어 BART 입니다.🤣:)l^o"))
model = BartModel.from_pretrained(get_pytorch_kobart_model())
inputs = kobart_tokenizer(["안녕하세요."], return_tensors="pt")
print(model(inputs["input_ids"]))
| 41.292308
| 106
| 0.753353
|
d6a830bcd4955a2908f85722e9da8e7db1d12076
| 9,581
|
py
|
Python
|
Examples/TestZMQ.py
|
robmakie/dash.py
|
5b0e4515be96224e0b34c1126eee9b17789d38d1
|
[
"MIT"
] | null | null | null |
Examples/TestZMQ.py
|
robmakie/dash.py
|
5b0e4515be96224e0b34c1126eee9b17789d38d1
|
[
"MIT"
] | null | null | null |
Examples/TestZMQ.py
|
robmakie/dash.py
|
5b0e4515be96224e0b34c1126eee9b17789d38d1
|
[
"MIT"
] | null | null | null |
#!/bin/python3
import time
import random
import argparse
import signal
import dashio
import logging
import platform
class TestControls:
def signal_cntrl_c(self, os_signal, os_frame):
self.shutdown = True
def init_logging(self, logfilename, level):
log_level = logging.WARN
if level == 1:
log_level = logging.INFO
elif level == 2:
log_level = logging.DEBUG
if not logfilename:
formatter = logging.Formatter("%(asctime)s, %(message)s")
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger = logging.getLogger()
logger.addHandler(handler)
logger.setLevel(log_level)
else:
logging.basicConfig(
filename=logfilename,
level=log_level,
format="%(asctime)s, %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
logging.info("==== Started ====")
def parse_commandline_arguments(self):
parser = argparse.ArgumentParser()
parser.add_argument(
"-v",
"--verbose",
const=1,
default=1,
type=int,
nargs="?",
help="""increase verbosity:
0 = only warnings, 1 = info, 2 = debug.
No number means info. Default is no verbosity.""",
)
parser.add_argument("-u", "--url", help="Host URL.", dest="url", default="tcp://*")
parser.add_argument(
"-c", "--connection_name", dest="connection", default="TestTCP", help="IotDashboard Connection name"
)
parser.add_argument("-d", "--device_id", dest="device_id", default="00001", help="IotDashboard Device ID.")
parser.add_argument("-s", "--sub_port", dest="sub_port", type=int, default=5556, help="Port number")
parser.add_argument("-p", "--pub_port", dest="pub_port", type=int, default=5555, help="Port number")
parser.add_argument(
"-n", "--device_name", dest="device_name", default="TCPTest", help="Alias name for device."
)
parser.add_argument("-l", "--logfile", dest="logfilename", default="", help="logfile location", metavar="FILE")
args = parser.parse_args()
return args
def up_btn_event_handler(self, msg):
if self.sldr_cntrl.bar1_value < self.sldr_cntrl.max:
self.sldr_cntrl.bar1_value += 1
self.sldr_dbl_cntrl.bar1_value += 1
def down_btn_event_handler(self, msg):
if self.sldr_cntrl.bar1_value > self.sldr_cntrl.min:
self.sldr_cntrl.bar1_value -= 1
self.sldr_dbl_cntrl.bar1_value -= 1
def slider_event_handler(self, msg):
self.sldr_cntrl.slider_value = float(msg[0])
self.knb_control.knob_dial_value = float(msg[0])
def slider_dbl_event_handler(self, msg):
self.sldr_dbl_cntrl.slider_value = float(msg[0])
self.selector_ctrl.position = int(float(msg[0]))
def knob_event_handler(self, msg):
self.knb_control.knob_value = float(msg[0])
self.dl_control.dial_value = float(msg[0])
self.sldr_dbl_cntrl.bar2_value = float(msg[0])
def text_cntrl_message_handler(self, msg):
self.device.send_popup_message("TCPTest", "Text Box message", msg[0])
self.text_cntrl.text = "Popup sent: " + msg[0]
logging.info(msg)
def selector_ctrl_handler(self, msg):
print(self.selector_ctrl.selection_list[int(msg[0])])
def __init__(self):
# Catch CNTRL-C signel
signal.signal(signal.SIGINT, self.signal_cntrl_c)
self.shutdown = False
args = self.parse_commandline_arguments()
self.init_logging(args.logfilename, args.verbose)
logging.info(" Serving on: ZMQ")
logging.info("Connection ID: %s", args.connection)
logging.info(" Device ID: %s", args.device_id)
logging.info(" Device Name: %s", args.device_name)
self.device = dashio.dashDevice(args.connection, args.device_id, args.device_name)
time.sleep(1)
self.zmq_con = dashio.zmqConnection(pub_port=args.pub_port, sub_port=args.sub_port)
time.sleep(1)
self.zmq_con.add_device(self.device)
self.connection = args.connection
self.page_name = "TestZMQ: " + platform.node()
self.page_test = dashio.Page("TestZMQ", self.page_name, 1)
self.up_btn = dashio.Button("UP_BTN", control_position=dashio.ControlPosition(0.02, 0.01, 0.22, 0.12))
self.up_btn.btn_state = dashio.ButtonState.OFF
self.up_btn.icon_name = dashio.Icon.UP
self.up_btn.on_color = dashio.Color.GREEN
self.up_btn.text = ""
self.up_btn.text_color = dashio.Color.WHITE
self.up_btn.title = "Up"
self.up_btn.message_rx_event += self.up_btn_event_handler
self.page_test.add_control(self.up_btn)
self.down_btn = dashio.Button(
"DOWN_BTN", control_position=dashio.ControlPosition(0.02, 0.78, 0.22, 0.12)
)
self.down_btn.btn_state = dashio.ButtonState.OFF
self.down_btn.icon_name = dashio.Icon.DOWN
self.down_btn.on_color = dashio.Color.GREEN
self.down_btn.text = ""
self.down_btn.text_color = dashio.Color.WHITE
self.down_btn.title = "Down"
self.down_btn.message_rx_event += self.down_btn_event_handler
self.page_test.add_control(self.down_btn)
self.sldr_cntrl = dashio.SliderSingleBar(
"SLDR", control_position=dashio.ControlPosition(0.02, 0.13, 0.22, 0.65)
)
self.sldr_cntrl.title = "Slider"
self.sldr_cntrl.max = 10
self.sldr_cntrl.slider_enabled = True
self.sldr_cntrl.red_value
self.sldr_cntrl.message_rx_event += self.slider_event_handler
self.page_test.add_control(self.sldr_cntrl)
self.sldr_dbl_cntrl = dashio.SliderDoubleBar(
"SLDR_DBL", control_position=dashio.ControlPosition(0.78, 0.01, 0.2, 0.89)
)
self.sldr_dbl_cntrl.title = "Slider Double"
self.sldr_dbl_cntrl.max = 5
self.sldr_dbl_cntrl.slider_enabled = True
self.sldr_dbl_cntrl.red_value
self.sldr_dbl_cntrl.message_rx_event += self.slider_dbl_event_handler
self.page_test.add_control(self.sldr_dbl_cntrl)
self.knb_control = dashio.Knob("KNB", control_position=dashio.ControlPosition(0.24, 0.14, 0.54, 0.21))
self.knb_control.title = "A Knob"
self.knb_control.max = 10
self.knb_control.red_value = 10
self.knb_control.message_rx_event += self.knob_event_handler
self.page_test.add_control(self.knb_control)
self.dl_control = dashio.Dial("DIAL1", control_position=dashio.ControlPosition(0.24, 0.57, 0.54, 0.21))
self.dl_control.title = "A Dial"
self.dl_control.max = 10
self.page_test.add_control(self.dl_control)
self.text_cntrl = dashio.TextBox(
"TXT1", control_position=dashio.ControlPosition(0.24, 0.78, 0.54, 0.12)
)
self.text_cntrl.text = "Hello"
self.text_cntrl.title = "A text control"
self.text_cntrl.keyboard_type = dashio.Keyboard.ALL_CHARS
self.text_cntrl.close_key_board_on_send = True
self.text_cntrl.message_rx_event += self.text_cntrl_message_handler
self.page_test.add_control(self.text_cntrl)
self.alarm_ctrl = dashio.Alarm("TestingAlarms", "Test Alarms", "Hello", "Test of Shared Alarms")
self.device.add_control(self.alarm_ctrl)
self.comp_control = dashio.Compass("COMP1", control_position=dashio.ControlPosition(0.24, 0.35, 0.54, 0.22))
self.comp_control.title = "A compass"
self.page_test.add_control(self.comp_control)
self.selector_ctrl = dashio.Selector(
"TestSelector", "A Selector", control_position=dashio.ControlPosition(0.24, 0.01, 0.54, 0.13)
)
self.selector_ctrl.message_rx_event += self.selector_ctrl_handler
self.selector_ctrl.add_selection("First")
self.selector_ctrl.add_selection("Second")
self.selector_ctrl.add_selection("Third")
self.selector_ctrl.add_selection("Forth")
self.selector_ctrl.add_selection("Fifth")
self.page_test.add_control(self.selector_ctrl)
self.label_ctrl = dashio.Label(
"LabelID",
"A label",
style=dashio.LabelStyle.GROUP,
color=dashio.Color.BLUE,
control_position=dashio.ControlPosition(0.0, 0.0, 1.0, 0.93),
)
self.page_test.add_control(self.label_ctrl)
self.device.add_control(self.label_ctrl)
self.device.add_control(self.page_test)
self.device.add_control(self.selector_ctrl)
self.device.add_control(self.comp_control)
self.device.add_control(self.text_cntrl)
self.device.add_control(self.dl_control)
self.device.add_control(self.knb_control)
self.device.add_control(self.sldr_dbl_cntrl)
self.device.add_control(self.sldr_cntrl)
self.device.add_control(self.down_btn)
self.device.add_control(self.up_btn)
while not self.shutdown:
time.sleep(5)
self.comp_control.direction_value = random.random() * 360
self.device.send_popup_message("TestControls", "Shutting down", "Goodbye")
time.sleep(1)
self.device.close()
self.zmq_con.close()
def main():
tc = TestControls()
if __name__ == "__main__":
main()
| 40.256303
| 119
| 0.640539
|
94682429f2262ba13928af9a5ccf26bb6339b548
| 23,215
|
py
|
Python
|
tests/test_ssm/test_ssm_boto3.py
|
Joeskyyy/moto
|
46bf7f01767bdc745067dbd8c19f68758b5f7ef0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_ssm/test_ssm_boto3.py
|
Joeskyyy/moto
|
46bf7f01767bdc745067dbd8c19f68758b5f7ef0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_ssm/test_ssm_boto3.py
|
Joeskyyy/moto
|
46bf7f01767bdc745067dbd8c19f68758b5f7ef0
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
import boto3
import botocore.exceptions
import sure # noqa
import datetime
import uuid
import json
from botocore.exceptions import ClientError
from nose.tools import assert_raises
from moto import mock_ssm, mock_cloudformation
@mock_ssm
def test_delete_parameter():
client = boto3.client('ssm', region_name='us-east-1')
client.put_parameter(
Name='test',
Description='A test parameter',
Value='value',
Type='String')
response = client.get_parameters(Names=['test'])
len(response['Parameters']).should.equal(1)
client.delete_parameter(Name='test')
response = client.get_parameters(Names=['test'])
len(response['Parameters']).should.equal(0)
@mock_ssm
def test_delete_parameters():
client = boto3.client('ssm', region_name='us-east-1')
client.put_parameter(
Name='test',
Description='A test parameter',
Value='value',
Type='String')
response = client.get_parameters(Names=['test'])
len(response['Parameters']).should.equal(1)
result = client.delete_parameters(Names=['test', 'invalid'])
len(result['DeletedParameters']).should.equal(1)
len(result['InvalidParameters']).should.equal(1)
response = client.get_parameters(Names=['test'])
len(response['Parameters']).should.equal(0)
@mock_ssm
def test_get_parameters_by_path():
client = boto3.client('ssm', region_name='us-east-1')
client.put_parameter(
Name='/foo/name1',
Description='A test parameter',
Value='value1',
Type='String')
client.put_parameter(
Name='/foo/name2',
Description='A test parameter',
Value='value2',
Type='String')
client.put_parameter(
Name='/bar/name3',
Description='A test parameter',
Value='value3',
Type='String')
client.put_parameter(
Name='/bar/name3/name4',
Description='A test parameter',
Value='value4',
Type='String')
client.put_parameter(
Name='/baz/name1',
Description='A test parameter (list)',
Value='value1,value2,value3',
Type='StringList')
client.put_parameter(
Name='/baz/name2',
Description='A test parameter',
Value='value1',
Type='String')
client.put_parameter(
Name='/baz/pwd',
Description='A secure test parameter',
Value='my_secret',
Type='SecureString',
KeyId='alias/aws/ssm')
client.put_parameter(
Name='foo',
Description='A test parameter',
Value='bar',
Type='String')
client.put_parameter(
Name='baz',
Description='A test parameter',
Value='qux',
Type='String')
response = client.get_parameters_by_path(Path='/', Recursive=False)
len(response['Parameters']).should.equal(2)
{p['Value'] for p in response['Parameters']}.should.equal(
set(['bar', 'qux'])
)
response = client.get_parameters_by_path(Path='/', Recursive=True)
len(response['Parameters']).should.equal(9)
response = client.get_parameters_by_path(Path='/foo')
len(response['Parameters']).should.equal(2)
{p['Value'] for p in response['Parameters']}.should.equal(
set(['value1', 'value2'])
)
response = client.get_parameters_by_path(Path='/bar', Recursive=False)
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Value'].should.equal('value3')
response = client.get_parameters_by_path(Path='/bar', Recursive=True)
len(response['Parameters']).should.equal(2)
{p['Value'] for p in response['Parameters']}.should.equal(
set(['value3', 'value4'])
)
response = client.get_parameters_by_path(Path='/baz')
len(response['Parameters']).should.equal(3)
filters = [{
'Key': 'Type',
'Option': 'Equals',
'Values': ['StringList'],
}]
response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters)
len(response['Parameters']).should.equal(1)
{p['Name'] for p in response['Parameters']}.should.equal(
set(['/baz/name1'])
)
# note: 'Option' is optional (default: 'Equals')
filters = [{
'Key': 'Type',
'Values': ['StringList'],
}]
response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters)
len(response['Parameters']).should.equal(1)
{p['Name'] for p in response['Parameters']}.should.equal(
set(['/baz/name1'])
)
filters = [{
'Key': 'Type',
'Option': 'Equals',
'Values': ['String'],
}]
response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters)
len(response['Parameters']).should.equal(1)
{p['Name'] for p in response['Parameters']}.should.equal(
set(['/baz/name2'])
)
filters = [{
'Key': 'Type',
'Option': 'Equals',
'Values': ['String', 'SecureString'],
}]
response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters)
len(response['Parameters']).should.equal(2)
{p['Name'] for p in response['Parameters']}.should.equal(
set(['/baz/name2', '/baz/pwd'])
)
filters = [{
'Key': 'Type',
'Option': 'BeginsWith',
'Values': ['String'],
}]
response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters)
len(response['Parameters']).should.equal(2)
{p['Name'] for p in response['Parameters']}.should.equal(
set(['/baz/name1', '/baz/name2'])
)
filters = [{
'Key': 'KeyId',
'Option': 'Equals',
'Values': ['alias/aws/ssm'],
}]
response = client.get_parameters_by_path(Path='/baz', ParameterFilters=filters)
len(response['Parameters']).should.equal(1)
{p['Name'] for p in response['Parameters']}.should.equal(
set(['/baz/pwd'])
)
@mock_ssm
def test_put_parameter():
client = boto3.client('ssm', region_name='us-east-1')
response = client.put_parameter(
Name='test',
Description='A test parameter',
Value='value',
Type='String')
response['Version'].should.equal(1)
response = client.get_parameters(
Names=[
'test'
],
WithDecryption=False)
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('test')
response['Parameters'][0]['Value'].should.equal('value')
response['Parameters'][0]['Type'].should.equal('String')
response['Parameters'][0]['Version'].should.equal(1)
try:
client.put_parameter(
Name='test',
Description='desc 2',
Value='value 2',
Type='String')
raise RuntimeError('Should fail')
except botocore.exceptions.ClientError as err:
err.operation_name.should.equal('PutParameter')
err.response['Error']['Message'].should.equal('Parameter test already exists.')
response = client.get_parameters(
Names=[
'test'
],
WithDecryption=False)
# without overwrite nothing change
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('test')
response['Parameters'][0]['Value'].should.equal('value')
response['Parameters'][0]['Type'].should.equal('String')
response['Parameters'][0]['Version'].should.equal(1)
response = client.put_parameter(
Name='test',
Description='desc 3',
Value='value 3',
Type='String',
Overwrite=True)
response['Version'].should.equal(2)
response = client.get_parameters(
Names=[
'test'
],
WithDecryption=False)
# without overwrite nothing change
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('test')
response['Parameters'][0]['Value'].should.equal('value 3')
response['Parameters'][0]['Type'].should.equal('String')
response['Parameters'][0]['Version'].should.equal(2)
@mock_ssm
def test_get_parameter():
client = boto3.client('ssm', region_name='us-east-1')
client.put_parameter(
Name='test',
Description='A test parameter',
Value='value',
Type='String')
response = client.get_parameter(
Name='test',
WithDecryption=False)
response['Parameter']['Name'].should.equal('test')
response['Parameter']['Value'].should.equal('value')
response['Parameter']['Type'].should.equal('String')
@mock_ssm
def test_get_nonexistant_parameter():
client = boto3.client('ssm', region_name='us-east-1')
try:
client.get_parameter(
Name='test_noexist',
WithDecryption=False)
raise RuntimeError('Should of failed')
except botocore.exceptions.ClientError as err:
err.operation_name.should.equal('GetParameter')
err.response['Error']['Message'].should.equal('Parameter test_noexist not found.')
@mock_ssm
def test_describe_parameters():
client = boto3.client('ssm', region_name='us-east-1')
client.put_parameter(
Name='test',
Description='A test parameter',
Value='value',
Type='String',
AllowedPattern=r'.*')
response = client.describe_parameters()
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('test')
response['Parameters'][0]['Type'].should.equal('String')
response['Parameters'][0]['AllowedPattern'].should.equal(r'.*')
@mock_ssm
def test_describe_parameters_paging():
client = boto3.client('ssm', region_name='us-east-1')
for i in range(50):
client.put_parameter(
Name="param-%d" % i,
Value="value-%d" % i,
Type="String"
)
response = client.describe_parameters()
len(response['Parameters']).should.equal(10)
response['NextToken'].should.equal('10')
response = client.describe_parameters(NextToken=response['NextToken'])
len(response['Parameters']).should.equal(10)
response['NextToken'].should.equal('20')
response = client.describe_parameters(NextToken=response['NextToken'])
len(response['Parameters']).should.equal(10)
response['NextToken'].should.equal('30')
response = client.describe_parameters(NextToken=response['NextToken'])
len(response['Parameters']).should.equal(10)
response['NextToken'].should.equal('40')
response = client.describe_parameters(NextToken=response['NextToken'])
len(response['Parameters']).should.equal(10)
response['NextToken'].should.equal('50')
response = client.describe_parameters(NextToken=response['NextToken'])
len(response['Parameters']).should.equal(0)
''.should.equal(response.get('NextToken', ''))
@mock_ssm
def test_describe_parameters_filter_names():
client = boto3.client('ssm', region_name='us-east-1')
for i in range(50):
p = {
'Name': "param-%d" % i,
'Value': "value-%d" % i,
'Type': "String"
}
if i % 5 == 0:
p['Type'] = 'SecureString'
p['KeyId'] = 'a key'
client.put_parameter(**p)
response = client.describe_parameters(Filters=[
{
'Key': 'Name',
'Values': ['param-22']
},
])
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('param-22')
response['Parameters'][0]['Type'].should.equal('String')
''.should.equal(response.get('NextToken', ''))
@mock_ssm
def test_describe_parameters_filter_type():
client = boto3.client('ssm', region_name='us-east-1')
for i in range(50):
p = {
'Name': "param-%d" % i,
'Value': "value-%d" % i,
'Type': "String"
}
if i % 5 == 0:
p['Type'] = 'SecureString'
p['KeyId'] = 'a key'
client.put_parameter(**p)
response = client.describe_parameters(Filters=[
{
'Key': 'Type',
'Values': ['SecureString']
},
])
len(response['Parameters']).should.equal(10)
response['Parameters'][0]['Type'].should.equal('SecureString')
'10'.should.equal(response.get('NextToken', ''))
@mock_ssm
def test_describe_parameters_filter_keyid():
client = boto3.client('ssm', region_name='us-east-1')
for i in range(50):
p = {
'Name': "param-%d" % i,
'Value': "value-%d" % i,
'Type': "String"
}
if i % 5 == 0:
p['Type'] = 'SecureString'
p['KeyId'] = "key:%d" % i
client.put_parameter(**p)
response = client.describe_parameters(Filters=[
{
'Key': 'KeyId',
'Values': ['key:10']
},
])
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('param-10')
response['Parameters'][0]['Type'].should.equal('SecureString')
''.should.equal(response.get('NextToken', ''))
@mock_ssm
def test_describe_parameters_attributes():
client = boto3.client('ssm', region_name='us-east-1')
client.put_parameter(
Name='aa',
Value='11',
Type='String',
Description='my description'
)
client.put_parameter(
Name='bb',
Value='22',
Type='String'
)
response = client.describe_parameters()
len(response['Parameters']).should.equal(2)
response['Parameters'][0]['Description'].should.equal('my description')
response['Parameters'][0]['Version'].should.equal(1)
response['Parameters'][0]['LastModifiedDate'].should.be.a(datetime.date)
response['Parameters'][0]['LastModifiedUser'].should.equal('N/A')
response['Parameters'][1].get('Description').should.be.none
response['Parameters'][1]['Version'].should.equal(1)
@mock_ssm
def test_get_parameter_invalid():
client = client = boto3.client('ssm', region_name='us-east-1')
response = client.get_parameters(
Names=[
'invalid'
],
WithDecryption=False)
len(response['Parameters']).should.equal(0)
len(response['InvalidParameters']).should.equal(1)
response['InvalidParameters'][0].should.equal('invalid')
@mock_ssm
def test_put_parameter_secure_default_kms():
client = boto3.client('ssm', region_name='us-east-1')
client.put_parameter(
Name='test',
Description='A test parameter',
Value='value',
Type='SecureString')
response = client.get_parameters(
Names=[
'test'
],
WithDecryption=False)
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('test')
response['Parameters'][0]['Value'].should.equal('kms:default:value')
response['Parameters'][0]['Type'].should.equal('SecureString')
response = client.get_parameters(
Names=[
'test'
],
WithDecryption=True)
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('test')
response['Parameters'][0]['Value'].should.equal('value')
response['Parameters'][0]['Type'].should.equal('SecureString')
@mock_ssm
def test_put_parameter_secure_custom_kms():
client = boto3.client('ssm', region_name='us-east-1')
client.put_parameter(
Name='test',
Description='A test parameter',
Value='value',
Type='SecureString',
KeyId='foo')
response = client.get_parameters(
Names=[
'test'
],
WithDecryption=False)
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('test')
response['Parameters'][0]['Value'].should.equal('kms:foo:value')
response['Parameters'][0]['Type'].should.equal('SecureString')
response = client.get_parameters(
Names=[
'test'
],
WithDecryption=True)
len(response['Parameters']).should.equal(1)
response['Parameters'][0]['Name'].should.equal('test')
response['Parameters'][0]['Value'].should.equal('value')
response['Parameters'][0]['Type'].should.equal('SecureString')
@mock_ssm
def test_add_remove_list_tags_for_resource():
client = boto3.client('ssm', region_name='us-east-1')
client.add_tags_to_resource(
ResourceId='test',
ResourceType='Parameter',
Tags=[{'Key': 'test-key', 'Value': 'test-value'}]
)
response = client.list_tags_for_resource(
ResourceId='test',
ResourceType='Parameter'
)
len(response['TagList']).should.equal(1)
response['TagList'][0]['Key'].should.equal('test-key')
response['TagList'][0]['Value'].should.equal('test-value')
client.remove_tags_from_resource(
ResourceId='test',
ResourceType='Parameter',
TagKeys=['test-key']
)
response = client.list_tags_for_resource(
ResourceId='test',
ResourceType='Parameter'
)
len(response['TagList']).should.equal(0)
@mock_ssm
def test_send_command():
ssm_document = 'AWS-RunShellScript'
params = {'commands': ['#!/bin/bash\necho \'hello world\'']}
client = boto3.client('ssm', region_name='us-east-1')
# note the timeout is determined server side, so this is a simpler check.
before = datetime.datetime.now()
response = client.send_command(
InstanceIds=['i-123456'],
DocumentName=ssm_document,
Parameters=params,
OutputS3Region='us-east-2',
OutputS3BucketName='the-bucket',
OutputS3KeyPrefix='pref'
)
cmd = response['Command']
cmd['CommandId'].should_not.be(None)
cmd['DocumentName'].should.equal(ssm_document)
cmd['Parameters'].should.equal(params)
cmd['OutputS3Region'].should.equal('us-east-2')
cmd['OutputS3BucketName'].should.equal('the-bucket')
cmd['OutputS3KeyPrefix'].should.equal('pref')
cmd['ExpiresAfter'].should.be.greater_than(before)
# test sending a command without any optional parameters
response = client.send_command(
DocumentName=ssm_document)
cmd = response['Command']
cmd['CommandId'].should_not.be(None)
cmd['DocumentName'].should.equal(ssm_document)
@mock_ssm
def test_list_commands():
client = boto3.client('ssm', region_name='us-east-1')
ssm_document = 'AWS-RunShellScript'
params = {'commands': ['#!/bin/bash\necho \'hello world\'']}
response = client.send_command(
InstanceIds=['i-123456'],
DocumentName=ssm_document,
Parameters=params,
OutputS3Region='us-east-2',
OutputS3BucketName='the-bucket',
OutputS3KeyPrefix='pref')
cmd = response['Command']
cmd_id = cmd['CommandId']
# get the command by id
response = client.list_commands(
CommandId=cmd_id)
cmds = response['Commands']
len(cmds).should.equal(1)
cmds[0]['CommandId'].should.equal(cmd_id)
# add another command with the same instance id to test listing by
# instance id
client.send_command(
InstanceIds=['i-123456'],
DocumentName=ssm_document)
response = client.list_commands(
InstanceId='i-123456')
cmds = response['Commands']
len(cmds).should.equal(2)
for cmd in cmds:
cmd['InstanceIds'].should.contain('i-123456')
# test the error case for an invalid command id
with assert_raises(ClientError):
response = client.list_commands(
CommandId=str(uuid.uuid4()))
@mock_ssm
def test_get_command_invocation():
client = boto3.client('ssm', region_name='us-east-1')
ssm_document = 'AWS-RunShellScript'
params = {'commands': ['#!/bin/bash\necho \'hello world\'']}
response = client.send_command(
InstanceIds=['i-123456', 'i-234567', 'i-345678'],
DocumentName=ssm_document,
Parameters=params,
OutputS3Region='us-east-2',
OutputS3BucketName='the-bucket',
OutputS3KeyPrefix='pref')
cmd = response['Command']
cmd_id = cmd['CommandId']
instance_id = 'i-345678'
invocation_response = client.get_command_invocation(
CommandId=cmd_id,
InstanceId=instance_id,
PluginName='aws:runShellScript')
invocation_response['CommandId'].should.equal(cmd_id)
invocation_response['InstanceId'].should.equal(instance_id)
# test the error case for an invalid instance id
with assert_raises(ClientError):
invocation_response = client.get_command_invocation(
CommandId=cmd_id,
InstanceId='i-FAKE')
# test the error case for an invalid plugin name
with assert_raises(ClientError):
invocation_response = client.get_command_invocation(
CommandId=cmd_id,
InstanceId=instance_id,
PluginName='FAKE')
@mock_ssm
@mock_cloudformation
def test_get_command_invocations_from_stack():
stack_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "Test Stack",
"Resources": {
"EC2Instance1": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "ami-test-image-id",
"KeyName": "test",
"InstanceType": "t2.micro",
"Tags": [
{
"Key": "Test Description",
"Value": "Test tag"
},
{
"Key": "Test Name",
"Value": "Name tag for tests"
}
]
}
}
},
"Outputs": {
"test": {
"Description": "Test Output",
"Value": "Test output value",
"Export": {
"Name": "Test value to export"
}
},
"PublicIP": {
"Value": "Test public ip"
}
}
}
cloudformation_client = boto3.client(
'cloudformation',
region_name='us-east-1')
stack_template_str = json.dumps(stack_template)
response = cloudformation_client.create_stack(
StackName='test_stack',
TemplateBody=stack_template_str,
Capabilities=('CAPABILITY_IAM', ))
client = boto3.client('ssm', region_name='us-east-1')
ssm_document = 'AWS-RunShellScript'
params = {'commands': ['#!/bin/bash\necho \'hello world\'']}
response = client.send_command(
Targets=[{
'Key': 'tag:aws:cloudformation:stack-name',
'Values': ('test_stack', )}],
DocumentName=ssm_document,
Parameters=params,
OutputS3Region='us-east-2',
OutputS3BucketName='the-bucket',
OutputS3KeyPrefix='pref')
cmd = response['Command']
cmd_id = cmd['CommandId']
instance_ids = cmd['InstanceIds']
invocation_response = client.get_command_invocation(
CommandId=cmd_id,
InstanceId=instance_ids[0],
PluginName='aws:runShellScript')
| 29.423321
| 90
| 0.605643
|
5712bfd11bb2dd248b66269c69d910fa87d5bf2a
| 10,826
|
py
|
Python
|
cross3d/maya/mayasceneviewport.py
|
vedantirb/cross3d
|
e27c2dc80bf607ef7ccf3970b713bfea8211228b
|
[
"MIT"
] | 129
|
2016-07-26T16:00:56.000Z
|
2021-08-07T03:44:41.000Z
|
cross3d/maya/mayasceneviewport.py
|
vedantirb/cross3d
|
e27c2dc80bf607ef7ccf3970b713bfea8211228b
|
[
"MIT"
] | 3
|
2016-08-26T01:37:03.000Z
|
2019-08-13T01:46:01.000Z
|
cross3d/maya/mayasceneviewport.py
|
vedantirb/cross3d
|
e27c2dc80bf607ef7ccf3970b713bfea8211228b
|
[
"MIT"
] | 33
|
2016-07-30T00:26:00.000Z
|
2022-03-09T07:10:54.000Z
|
import glob
import os
from functools import partial
import maya.cmds as cmds
import maya.OpenMaya as om
import maya.OpenMayaUI as omUI
import cross3d
from cross3d import Exceptions, ExceptionRouter
from cross3d.classes import FrameRange
from cross3d.abstract.abstractsceneviewport import AbstractSceneViewport
#------------------------------------------------------------------------------------------------------------------------
class MayaSceneViewport(AbstractSceneViewport):
# From the Docs:
# To determine which settings are available on your system, use the `playblast -options`
# command. This will display a system-specific dialog with supported compression formats.
_validPlayblastFormats = ['gif', 'si', 'rla', 'tif', 'tifu', 'sgi', 'als', 'maya', 'jpg',
'eps', 'cin', 'yuv', 'tga', 'bmp', 'psd', 'png', 'dds', 'psdLayered', 'avi', 'mov']
def __init__( self, scene, viewportID=None ):
super(MayaSceneViewport, self).__init__(scene, viewportID)
if viewportID == None:
self._nativePointer = omUI.M3dView.active3dView()
else:
self._nativePointer = omUI.M3dView()
omUI.M3dView.get3dView(viewportID, self._nativePointer)
self._name = cross3d.SceneWrapper._mObjName(self._nativeCamera())
#--------------------------------------------------------------------------------
# Private Methods
#--------------------------------------------------------------------------------
def _nativeCamera(self):
undocumentedPythonFunctionRequirement = om.MDagPath()
with ExceptionRouter():
self._nativePointer.getCamera(undocumentedPythonFunctionRequirement)
return undocumentedPythonFunctionRequirement.node()
def _setNativeCamera(self, nativeCamera):
nativeCamera = cross3d.SceneWrapper._asMOBject(nativeCamera)
with ExceptionRouter():
dagPath = om.MDagPath.getAPathTo(nativeCamera)
self._nativePointer.setCamera(dagPath)
# Ensure the viewport is refreshed
cross3d.application.refresh()
return True
#--------------------------------------------------------------------------------
# Public Methods
#--------------------------------------------------------------------------------
def cameraName(self):
""" Return the viewport's camera name """
return self.camera().path()
def createCamera(self, name='Camera', type='Standard'):
""" Creates a camera that matches that viewport. """
camera = self._scene.createCamera(name, type)
camera.matchCamera(self.camera())
# Default cameras are hidden. Force the camera visible
camera.setHidden(False)
return camera
def generatePlayblast(
self,
fileName,
frameRange=None,
resolution=None,
slate=None,
effects=True,
geometryOnly=True,
pathFormat=r'{basePath}\{fileName}.{frame}.{ext}'):
fileName, ext = os.path.splitext(fileName)
# Make sure a invalid file format was not requested
if ext.replace('.', '').lower() not in self._validPlayblastFormats:
raise Exceptions.FileFormatNotSupported('The file format {ext} is not supported by Maya'.format(ext=ext))
playblastFormat = 'image'
compression = ext.replace('.', '')
quality = 100
if ext.lower() == '.mov':
playblastFormat = 'qt'
elif ext.lower() == '.avi':
playblastFormat = 'avi'
compression = None
if isinstance(frameRange, int):
frameRange = FrameRange([frameRange, frameRange])
if not frameRange:
frameRange = self._scene.animationRange()
# TODO: Make generating movies not require setting frame padding to 1
padding = 1
if not resolution:
resolution = self._scene.renderSize()
# TODO: Add support for these arguments
if slate != None:
# Note: this is probably how we can handle slate
#cmds.headsUpDisplay( 'blurBurnin', section=8, block=0, blockAlignment='right', dw=50, label='This is my burnin')
cross3d.logger.debug('slate is not implemented in Maya')
if pathFormat != r'{basePath}\{fileName}.{frame}.{ext}':
cross3d.logger.debug('pathFormat is not implemented in Maya')
# Prepare to detect if the playblast was canceled
formatter = '{fileName}.{frame:0%i}{ext}' % padding
lastFrameFileName = formatter.format(fileName=fileName, frame=frameRange[1], ext=ext)
try:
lastFrameStartTime = os.path.getmtime(lastFrameFileName)
except os.error:
lastFrameStartTime = 0
# to properly generate a playblast
# pass the width/height to the playblast command
# set the camera displayOptions
# set overscan to 1.0 and lock it
# uncheck all options
# set camera\Film Back
# Fit Resolution Gate to overscan
# set proper film aspect ratio?
# set the render resolution?
# MCH 10/16/14 NOTE: Info on parsing playblast Display Menu if we decide to add support for that later
#--------------------------------------------------------------------------------
#for i in cmds.optionVar(list=True):
# if i.startswith('playblastShow'):
# print cmds.optionVar(query=i), i
# # Set the value
# cmds.optionVar( intValue=(i, False)
# # Update the playblast menus
# maya.mel.eval('updatePlayblastPluginMenus()')
#--------------------------------------------------------------------------------
cam = self.camera()
name = cam.path()
overscanLocked = cmds.getAttr("{name}.overscan".format(name=cam.path()), lock=True)
if overscanLocked:
# unlock overscan if it is locked
cmds.setAttr("{name}.overscan".format(name=name), lock=False)
# create a StateLocker object to backup the current values before setting them
from blur3d.lib.statelockerlib import StateLocker
with StateLocker() as stateLocker:
# Currently the state locker isnt the most convienent to use
def setPropertyLocker(obj, key, value):
stateLocker.setMethodArgs(obj, obj.setProperty, partial(obj.property, key), key, value)
# Set FilmBack.FitResolutionGate to Overscan
setPropertyLocker(cam, 'filmFit', 3)
# uncheck Display Film Gate
setPropertyLocker(cam, 'displayFilmGate', 0)
# uncheck Display Resolution
setPropertyLocker(cam, 'displayResolution', 0)
# Set overscan to 1.0
setPropertyLocker(cam, 'overscan', 1.0)
# Store and restore these settings using modelEditor
# The key is the property to query/edit, the value is the value used while playblasting
modelEditorOverrides = {'sel':False}
# Find the current viewport so we can apply the viewport settings
panel = cmds.getPanel(withFocus=True)
# Check for if non-viewport panel's are active
if not panel in cmds.getPanel(type='modelPanel'):
panel = 'modelPanel4'
if geometryOnly:
modelEditorOverrides['nurbsSurfaces'] = True
modelEditorOverrides['polymeshes'] = True
modelEditorOverrides['subdivSurfaces'] = True
# HACK: This records the viewport show options, sets them to playblast options, then
# restores them
# TODO: Make this load the settings from the playblast overrides
attrs = ['nurbsCurves', 'nurbsSurfaces', 'cv', 'hulls', 'polymeshes',
'subdivSurfaces', 'planes', 'lights', 'cameras', 'imagePlane', 'joints',
'ikHandles', 'dynamics', 'deformers', 'fluids', 'hairSystems', 'follicles',
'nCloths', 'nParticles', 'nRigids', 'dynamicConstraints', 'locators',
'dimensions', 'pivots', 'handles', 'textures', 'strokes', 'motionTrails',
'pluginShapes', 'clipGhosts', 'greasePencils', 'manipulators', 'grid', 'hud']
# Disable display of all of these options as long as modelEditorOverrides doesnt
# already contain a setting key
updateDict = dict([(attr, False) for attr in attrs if attr not in modelEditorOverrides])
modelEditorOverrides.update(updateDict)
# New features in 2015
if cross3d.application.version() > 2014 and 'particleInstancers' not in modelEditorOverrides:
modelEditorOverrides.update(particleInstancers=False)
if effects == True:
modelEditorOverrides.update(displayTextures=True, displayLights='all')
setPropertyLocker(self._scene, 'hardwareRenderingGlobals.ssaoEnable', 1)
setPropertyLocker(self._scene, 'hardwareRenderingGlobals.motionBlurEnable', 1)
setPropertyLocker(self._scene, 'hardwareRenderingGlobals.multiSampleEnable', True)
# TODO: Add Camera.setDeptOfField to cross3d
ntp = cam._nativeTypePointer
stateLocker.setMethod(ntp, ntp.setDepthOfField, ntp.isDepthOfField, True)
if effects == False:
modelEditorOverrides.update(displayTextures=False, displayLights='default')
setPropertyLocker(self._scene, 'hardwareRenderingGlobals.ssaoEnable', 0)
setPropertyLocker(self._scene, 'hardwareRenderingGlobals.motionBlurEnable', 0)
setPropertyLocker(self._scene, 'hardwareRenderingGlobals.multiSampleEnable', False)
# TODO: Add Camera.setDeptOfField to cross3d
ntp = cam._nativeTypePointer
stateLocker.setMethod(ntp, ntp.setDepthOfField, ntp.isDepthOfField, False)
# Store the current values
modelEditorStates = {}
for option, value in modelEditorOverrides.iteritems():
# Store the current value
modelEditorStates[option] = cmds.modelEditor(panel, query=True, **{option: True})
# Set the playblast value
cmds.modelEditor(panel, edit=True, **{option: value})
# # Uncomment this code to update the ui so you can see what options get disabled in the toolbar
# from PyQt4.QtGui import QApplication, QMessageBox
# QApplication.processEvents()
# QMessageBox.question(None, 'Temp', 'update')
# generate playblast
cmds.playblast(
width=resolution.width(),
height=resolution.height(),
startTime=frameRange.start(),
endTime=frameRange.end(),
percent=100,
filename=fileName,
showOrnaments=False,
format=playblastFormat,
compression=compression,
quality=quality,
framePadding=padding,
viewer=False)
# Restore the modelEditor options to their previous value
for option, value in modelEditorStates.iteritems():
cmds.modelEditor(panel, edit=True, **{option: value})
if overscanLocked:
# relock overscan
cmds.setAttr("{name}.overscan".format(name=name), lock=True)
# No way to detect if a avi or quicktime was canceled
if ext.lower() in ('.mov', '.avi'):
return True
# If the capture was not completed we just return False.
try:
lastFrameEndTime = os.path.getmtime(lastFrameFileName)
if not lastFrameStartTime < lastFrameEndTime:
return False
except os.error:
return False
return True
def refresh(self):
self._nativePointer.refresh(False, True)
# register the symbol
cross3d.registerSymbol('SceneViewport', MayaSceneViewport)
| 41.478927
| 122
| 0.664234
|
64205aef13b3e88988f6d87e5edfd2b143f7f3cc
| 10,151
|
py
|
Python
|
neural_networks_tutorial.py
|
niumeng07/tutorials
|
4784b50e75424aa0289411912ac4f4c4e49a6cbc
|
[
"BSD-3-Clause"
] | 1
|
2019-10-28T07:37:04.000Z
|
2019-10-28T07:37:04.000Z
|
neural_networks_tutorial.py
|
niumeng07/tutorials
|
4784b50e75424aa0289411912ac4f4c4e49a6cbc
|
[
"BSD-3-Clause"
] | null | null | null |
neural_networks_tutorial.py
|
niumeng07/tutorials
|
4784b50e75424aa0289411912ac4f4c4e49a6cbc
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Neural Networks
===============
Neural networks can be constructed using the ``torch.nn`` package.
Now that you had a glimpse of ``autograd``, ``nn`` depends on
``autograd`` to define models and differentiate them.
An ``nn.Module`` contains layers, and a method ``forward(input)``\ that
returns the ``output``.
For example, look at this network that classifies digit images:
.. figure:: /_static/img/mnist.png
:alt: convnet
convnet
It is a simple feed-forward network. It takes the input, feeds it
through several layers one after the other, and then finally gives the
output.
A typical training procedure for a neural network is as follows:
- Define the neural network that has some learnable parameters (or
weights)
- Iterate over a dataset of inputs
- Process input through the network
- Compute the loss (how far is the output from being correct)
- Propagate gradients back into the network’s parameters
- Update the weights of the network, typically using a simple update rule:
``weight = weight - learning_rate * gradient``
Define the network
------------------
Let’s define this network:
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
def LOG(Name, *args):
SEGMENT()
print Name,
for item in args:
print type(item), item,
pass
def SEGMENT():
print "-" * 50
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# 1 input image channel, 6 output channels, 5x5 square convolution
# kernel
self.conv1 = nn.Conv2d(1, 6, 5)
self.conv2 = nn.Conv2d(6, 16, 5)
# an affine operation: y = Wx + b
self.fc1 = nn.Linear(16 * 5 * 5, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
LOG(x.size()) #(1, 1, 32, 32)
# Max pooling over a (2, 2) window
x = F.max_pool2d(F.relu(self.conv1(x)), (2, 2))
LOG(x.size()) #(1, 6, 14, 14)
# If the size is a square you can only specify a single number
x = F.max_pool2d(F.relu(self.conv2(x)), 2)
LOG("x.size", x.size()) # (1, 16, 5, 5)
x = x.view(-1, self.num_flat_features(x)) # 16 * 5 * 5
LOG("x.size", x.size()) # 1 * 400
x = F.relu(self.fc1(x))
LOG("x.size", x.size()) # 1 * 120
x = F.relu(self.fc2(x))
LOG("x.size", x.size()) # 1 * 84
x = self.fc3(x)
LOG("x.size", x.size()) # 1 * 10
return x
def num_flat_features(self, x):
size = x.size()[1:] # all dimensions except the batch dimension
num_features = 1
for s in size:
num_features *= s
# print num_features # 400
return num_features
net = Net()
print(net)
########################################################################
# You just have to define the ``forward`` function, and the ``backward``
# function (where gradients are computed) is automatically defined for you
# using ``autograd``.
# You can use any of the Tensor operations in the ``forward`` function.
#
# The learnable parameters of a model are returned by ``net.parameters()``
params = list(net.parameters())
#print(params)
print(len(params))
print(params[0].size()) # conv1's .weight
########################################################################
# Let try a random 32x32 input.
# Note: expected input size of this net (LeNet) is 32x32. To use this net on
# MNIST dataset, please resize the images from the dataset to 32x32.
input = torch.randn(1, 1, 32, 32)
out = net(input)
print(type(out)) # <class 'torch.Tensor'>
print(out) # tensor([[ 0.0991, 0.0591, 0.0331, -0.0140, 0.0211, -0.1913, 0.0070, -0.0370, -0.0814, -0.1437]], grad_fn=<ThAddmmBackward>)
########################################################################
# Zero the gradient buffers of all parameters and backprops with random
# gradients:
net.zero_grad() #清空/初始化梯度
out.backward(torch.randn(1, 10)) #backprops with random gradients
########################################################################
# .. note::
#
# ``torch.nn`` only supports mini-batches. The entire ``torch.nn``
# package only supports inputs that are a mini-batch of samples, and not
# a single sample.
#
# For example, ``nn.Conv2d`` will take in a 4D Tensor of
# ``nSamples x nChannels x Height x Width``.
#
# If you have a single sample, just use ``input.unsqueeze(0)`` to add
# a fake batch dimension.
#
# Before proceeding further, let's recap all the classes you’ve seen so far.
#
# **Recap:**
# - ``torch.Tensor`` - A *multi-dimensional array* with support for autograd
# operations like ``backward()``. Also *holds the gradient* w.r.t. the
# tensor.
# - ``nn.Module`` - Neural network module. *Convenient way of
# encapsulating parameters*, with helpers for moving them to GPU,
# exporting, loading, etc.
# - ``nn.Parameter`` - A kind of Tensor, that is *automatically
# registered as a parameter when assigned as an attribute to a*
# ``Module``.
# - ``autograd.Function`` - Implements *forward and backward definitions
# of an autograd operation*. Every ``Tensor`` operation creates at
# least a single ``Function`` node that connects to functions that
# created a ``Tensor`` and *encodes its history*.
#
# **At this point, we covered:**
# - Defining a neural network
# - Processing inputs and calling backward
#
# **Still Left:**
# - Computing the loss
# - Updating the weights of the network
#
# Loss Function
# -------------
# A loss function takes the (output, target) pair of inputs, and computes a
# value that estimates how far away the output is from the target.
#
# There are several different
# `loss functions <https://pytorch.org/docs/nn.html#loss-functions>`_ under the
# nn package .
# A simple loss is: ``nn.MSELoss`` which computes the mean-squared error
# between the input and the target.
#
# For example:
def LOG(Name, *args):
SEGMENT()
print Name
for item in args:
print type(item), item
SEGMENT()
pass
def SEGMENT():
print "-" * 50
LOG("input", input.size(), input) #input: (1, 1, 32, 32)
output = net(input) #forward的返回
LOG("output", output.size(), output) # 1 * 10
target = torch.randn(10) # a dummy target, for example
LOG("target", target) # 1 * 10
target = target.view(1, -1) # make it the same shape as output
LOG("target", target) # 1 * 10
criterion = nn.MSELoss()
LOG("criterion", criterion) ## Loss Function
loss = criterion(output, target)
LOG("loss", loss)
########################################################################
# Now, if you follow ``loss`` in the backward direction, using its
# ``.grad_fn`` attribute, you will see a graph of computations that looks
# like this:
#
# ::
#
# input -> conv2d -> relu -> maxpool2d -> conv2d -> relu -> maxpool2d
# -> view -> linear -> relu -> linear -> relu -> linear
# -> MSELoss
# -> loss
#
# So, when we call ``loss.backward()``, the whole graph is differentiated
# w.r.t. the loss, and all Tensors in the graph that has ``requires_grad=True``
# will have their ``.grad`` Tensor accumulated with the gradient.
#
# For illustration, let us follow a few steps backward:
LOG("loss.grad_fn", loss.grad_fn) # MSELoss
LOG("loss.grad_fn.next_functions[0][0]", loss.grad_fn.next_functions[0][0]) # Linear
LOG("loss.grad_fn.next_functions[0][0].next_functions[0][0]", loss.grad_fn.next_functions[0][0].next_functions[0][0]) # ReLU
########################################################################
# Backprop
# --------
# To backpropagate the error all we have to do is to ``loss.backward()``.
# You need to clear the existing gradients though, else gradients will be
# accumulated to existing gradients.
#
#
# Now we shall call ``loss.backward()``, and have a look at conv1's bias
# gradients before and after the backward.
net.zero_grad() # zeroes the gradient buffers of all parameters
print('conv1.bias.grad before backward')
print(net.conv1.bias.grad)
loss.backward()
print('conv1.bias.grad after backward')
print(net.conv1.bias.grad)
########################################################################
# Now, we have seen how to use loss functions.
#
# **Read Later:**
#
# The neural network package contains various modules and loss functions
# that form the building blocks of deep neural networks. A full list with
# documentation is `here <https://pytorch.org/docs/nn>`_.
#
# **The only thing left to learn is:**
#
# - Updating the weights of the network
#
# Update the weights
# ------------------
# The simplest update rule used in practice is the Stochastic Gradient
# Descent (SGD):
#
# ``weight = weight - learning_rate * gradient``
#
# We can implement this using simple python code:
#
# .. code:: python
#
# learning_rate = 0.01
# for f in net.parameters():
# f.data.sub_(f.grad.data * learning_rate)
#
# However, as you use neural networks, you want to use various different
# update rules such as SGD, Nesterov-SGD, Adam, RMSProp, etc.
# To enable this, we built a small package: ``torch.optim`` that
# implements all these methods. Using it is very simple:
LOG("optmizer learning" + "-" * 30)
import torch.optim as optim
# create your optimizer
optimizer = optim.SGD(net.parameters(), lr=0.001)
# in your training loop:
# Observe how gradient buffers had to be manually set to zero using optimizer.zero_grad(). This is because gradients are accumulated as explained in Backprop section.
input = torch.randn(1, 1, 32, 32)
output = net(input)
target = torch.randn(10)
target = target.view(1, -1)
loss = criterion(output, target)
loss.backward()
for i in range(100):
optimizer.zero_grad() # zero the gradient buffers
optimizer.step() # Does the update
print loss
###############################################################
# .. Note::
#
# Observe how gradient buffers had to be manually set to zero using
# ``optimizer.zero_grad()``. This is because gradients are accumulated
# as explained in `Backprop`_ section.
| 33.391447
| 166
| 0.622796
|
d4c0348365e2546951925353723928d8b8f88f04
| 1,182
|
py
|
Python
|
books/PythonCleanCode/ch5_decorator/decorator_side_effect_1.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
books/PythonCleanCode/ch5_decorator/decorator_side_effect_1.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
books/PythonCleanCode/ch5_decorator/decorator_side_effect_1.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
"""Clean Code in Python - Chapter 5: Decorators
> Undesired side effects on decorators
"""
import time
from functools import wraps
from log import logger
def traced_function_wrong(function):
"""An example of a badly defined decorator."""
logger.debug("started execution of %s", function)
start_time = time.time()
@wraps(function)
def wrapped(*args, **kwargs):
result = function(*args, **kwargs)
logger.info("function %s took %.2fs", function, time.time() - start_time)
return result
return wrapped
@traced_function_wrong
def process_with_delay(callback, delay=0):
logger.info("sleep(%d)", delay)
return callback
def traced_function(function):
@wraps(function)
def wrapped(*args, **kwargs):
logger.info("started execution of %s", function)
start_time = time.time()
result = function(*args, **kwargs)
logger.info("function %s took %.2fs", function, time.time() - start_time)
return result
return wrapped
@traced_function
def call_with_delay(callback, delay=0):
logger.info("sleep(%s)", delay)
return callback
| 25.148936
| 82
| 0.645516
|
2ff99af4a56c934c0bb6b2bb59d902012579c3a5
| 1,826
|
py
|
Python
|
Cream/tools/generate_subImageNet.py
|
wkcn/AutoML
|
106cb0fbc19fa116c4ef5caf232acaaea85822c8
|
[
"MIT"
] | 307
|
2020-10-29T13:17:02.000Z
|
2022-03-30T09:55:49.000Z
|
Cream/tools/generate_subImageNet.py
|
QPC-database/AutoML
|
75d86a5e3366a6c9af6c2001ee1ba4fc41c5d6cb
|
[
"MIT"
] | 42
|
2020-10-30T07:09:48.000Z
|
2022-03-29T13:54:56.000Z
|
Cream/tools/generate_subImageNet.py
|
QPC-database/AutoML
|
75d86a5e3366a6c9af6c2001ee1ba4fc41c5d6cb
|
[
"MIT"
] | 64
|
2020-10-30T10:08:48.000Z
|
2022-03-30T06:51:01.000Z
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# Written by Hao Du and Houwen Peng
# email: haodu8-c@my.cityu.edu.hk and houwen.peng@microsoft.com
# This file is to demonstrate how to generate subImagenet.
import os
data_path = './data'
ImageNet_train_path = os.path.join(data_path, 'imagenet/train')
subImageNet_name = 'subImageNet'
class_idx_txt_path = os.path.join(data_path, subImageNet_name)
# train
classes = sorted(os.listdir(ImageNet_train_path))
if not os.path.exists(os.path.join(data_path, subImageNet_name)):
os.mkdir(os.path.join(data_path, subImageNet_name))
subImageNet = dict()
with open(os.path.join(class_idx_txt_path, 'subimages_list.txt'), 'w') as f:
subImageNet_class = classes[:100]
for iclass in subImageNet_class:
class_path = os.path.join(ImageNet_train_path, iclass)
if not os.path.exists(
os.path.join(
data_path,
subImageNet_name,
iclass)):
os.mkdir(os.path.join(data_path, subImageNet_name, iclass))
images = sorted(os.listdir(class_path))
subImages = images[:350]
# print("{}\n".format(subImages))
f.write("{}\n".format(subImages))
subImageNet[iclass] = subImages
for image in subImages:
raw_path = os.path.join(ImageNet_train_path, iclass, image)
new_ipath = os.path.join(
data_path, subImageNet_name, iclass, image)
os.system('cp {} {}'.format(raw_path, new_ipath))
sub_classes = sorted(subImageNet.keys())
with open(os.path.join(class_idx_txt_path, 'info.txt'), 'w') as f:
class_idx = 0
for key in sub_classes:
images = sorted((subImageNet[key]))
# print(len(images))
f.write("{}\n".format(key))
class_idx = class_idx + 1
| 37.265306
| 76
| 0.659912
|
c2f4f20073b0bce76dd88d83f072cce959ea2199
| 45,792
|
py
|
Python
|
src/sardana/taurus/core/tango/sardana/macroserver.py
|
tiagocoutinho/sardana
|
d8689755d7e11d60138f1e601a4d177cd412fdb1
|
[
"CC-BY-3.0"
] | null | null | null |
src/sardana/taurus/core/tango/sardana/macroserver.py
|
tiagocoutinho/sardana
|
d8689755d7e11d60138f1e601a4d177cd412fdb1
|
[
"CC-BY-3.0"
] | null | null | null |
src/sardana/taurus/core/tango/sardana/macroserver.py
|
tiagocoutinho/sardana
|
d8689755d7e11d60138f1e601a4d177cd412fdb1
|
[
"CC-BY-3.0"
] | null | null | null |
#!/usr/bin/env python
##############################################################################
##
# This file is part of Sardana
##
# http://www.sardana-controls.org/
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Sardana is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Sardana is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""The macroserver submodule. It contains specific part of macroserver"""
__all__ = ['BaseInputHandler', 'BaseDoor', 'BaseMacroServer',
'registerExtensions']
__docformat__ = 'restructuredtext'
import sys
import time
import uuid
import math
import weakref
import threading
import os.path as osp
import os
from lxml import etree
import PyTango
from taurus import Device, Factory
from taurus.core.taurusmanager import TaurusManager
from taurus.core.taurusbasetypes import TaurusEventType, TaurusSWDevState, \
TaurusSerializationMode
from taurus.core import TaurusDevState
from taurus.core.util.log import Logger
from taurus.core.util.containers import CaselessDict
from taurus.core.util.codecs import CodecFactory
from taurus.core.util.event import EventGenerator, AttributeEventWait
from taurus.core.tango import TangoDevice
from sardana.sardanautils import recur_map
from .macro import MacroInfo, Macro, MacroNode, ParamFactory, \
SingleParamNode, ParamNode, createMacroNode
from .sardana import BaseSardanaElementContainer, BaseSardanaElement
from .pool import getChannelConfigs
from itertools import zip_longest
CHANGE_EVT_TYPES = TaurusEventType.Change, TaurusEventType.Periodic
def _get_console_width():
try:
width = int(os.popen('stty size', 'r').read().split()[1])
except Exception:
width = float('inf')
return width
def _get_nb_lines(nb_chrs, max_chrs):
return int(math.ceil(float(nb_chrs)/max_chrs))
class Attr(Logger, EventGenerator):
def __init__(self, dev, name, obj_class, attr):
self._dev = weakref.ref(dev)
self._obj_class = obj_class
self._attr = attr
self.call__init__(Logger, name)
event_name = '%s %s' % (dev.getNormalName(), name)
self.call__init__(EventGenerator, event_name)
self._attr.addListener(self)
def eventReceived(self, src, type, evt_value):
if type == TaurusEventType.Error:
self.fireEvent(None)
elif type != TaurusEventType.Config:
if evt_value:
self.fireEvent(evt_value.rvalue)
else:
self.fireEvent(None)
def getTaurusAttribute(self):
return self._attr
def __getattr__(self, name):
return getattr(self._attr, name)
class LogAttr(Attr):
def __init__(self, dev, name, obj_class, attr, max_buff_size=4096):
self._log_buffer = []
self._max_buff_size = max_buff_size
self.call__init__(Attr, dev, name, obj_class, attr)
def getLogBuffer(self):
return self._log_buffer
def clearLogBuffer(self):
self._log_buffer = []
def eventReceived(self, src, type, evt_value):
if type == TaurusEventType.Change:
if evt_value is None or evt_value.rvalue is None:
self.fireEvent(None)
else:
self._log_buffer.extend(evt_value.rvalue)
while len(self._log_buffer) > self._max_buff_size:
self._log_buffer.pop(0)
if evt_value:
self.fireEvent(evt_value.rvalue)
class BaseInputHandler(object):
def __init__(self):
try:
self._input = raw_input
except NameError:
self._input = input
def input(self, input_data=None):
if input_data is None:
input_data = {}
prompt = input_data.get('prompt')
ret = dict(input=None, cancel=False)
try:
if prompt is None:
ret['input'] = self._input()
else:
ret['input'] = self._input(prompt)
except:
ret['cancel'] = True
return ret
def input_timeout(self, input_data):
print("input timeout")
class MacroServerDevice(TangoDevice):
"""A class encapsulating a generic macro server device (usually a
MacroServer or a Door"""
def _getEventWait(self):
if not hasattr(self, '_evt_wait'):
# create an object that waits for attribute events.
# each time we use it we have to connect and disconnect to an
# attribute
self._evt_wait = AttributeEventWait()
return self._evt_wait
class ExperimentConfiguration(object):
def __init__(self, door):
self._door = door
def get(self, cache=False):
door = self._door
macro_server = door.macro_server
env = door.getEnvironment()
ret = dict(ScanDir=env.get('ScanDir'),
DataCompressionRank=env.get('DataCompressionRank', 1),
PreScanSnapshot=env.get('PreScanSnapshot', []))
scan_file = env.get('ScanFile')
if scan_file is None:
scan_file = []
elif isinstance(scan_file, str):
scan_file = [scan_file]
ret['ScanFile'] = scan_file
mnt_grps = macro_server.getElementsOfType("MeasurementGroup")
mnt_grps_names = [mnt_grp.name for mnt_grp in list(mnt_grps.values())]
mnt_grps_full_names = list(mnt_grps.keys())
active_mnt_grp = env.get('ActiveMntGrp')
if active_mnt_grp is None and len(mnt_grps):
active_mnt_grp = mnt_grps_names[0]
door.putEnvironment('ActiveMntGrp', active_mnt_grp)
ret['ActiveMntGrp'] = active_mnt_grp
ret['MntGrpConfigs'] = mnt_grp_configs = CaselessDict()
if len(mnt_grps) == 0:
return ret
mnt_grp_grps = PyTango.Group("grp")
# use full names cause we may be using a different Tango database
mnt_grp_grps.add(mnt_grps_full_names)
codec = CodecFactory().getCodec('json')
replies = mnt_grp_grps.read_attribute("configuration")
for mnt_grp, reply in zip(mnt_grps_names, replies):
try:
mnt_grp_configs[mnt_grp] = \
codec.decode(('json', reply.get_data().value))[1]
except Exception as e:
from taurus.core.util.log import warning
warning('Cannot load Measurement group "%s": %s',
repr(mnt_grp), repr(e))
return ret
def set(self, conf, mnt_grps=None):
"""Sets the ExperimentConfiguration dictionary."""
if mnt_grps is None:
mnt_grps = list(conf['MntGrpConfigs'].keys())
codec = CodecFactory().getCodec('json')
msg_error = ''
for mnt_grp in mnt_grps:
try:
mnt_grp_cfg = conf['MntGrpConfigs'][mnt_grp]
if mnt_grp_cfg is None: # a mntGrp to be deleted
pool = self._getPoolOfElement(mnt_grp)
pool.DeleteElement(mnt_grp)
else:
try:
# TODO: Fix incorrect implementation. It must check if
# the measurement group is part of the Pools
# controlled by the MacroServer. Otherwise,
# it must raise an exception.
mnt_grp_dev = Device(mnt_grp)
except Exception:
# if the mnt_grp did not already exist, create it now
chconfigs = getChannelConfigs(mnt_grp_cfg)
chnames, chinfos = list(zip(*chconfigs)) # unzipping
# We assume that all the channels belong to the same
# pool!
pool = self._getPoolOfElement(chnames[0])
pool.createMeasurementGroup([mnt_grp] + list(chnames))
mnt_grp_dev = Device(mnt_grp)
# TODO when we start using measurement group extension
# change the code below with the following:
# mnt_grp.setConfiguration(mnt_grp_cfg)
data = codec.encode(('', mnt_grp_cfg))[1]
mnt_grp_dev.write_attribute('configuration', data)
except PyTango.DevFailed as df:
# Take the description of the first exception.
desc = df.args[0].desc
desc = desc.replace('\r', '')
desc = desc.replace('\n', '')
msg_error += 'Measurement Group {0}:\n'\
'{1}\n\n'.format(mnt_grp, desc)
if len(msg_error) > 0:
raise RuntimeError(msg_error)
# Send the environment changes
env = dict(ScanDir=conf.get('ScanDir'),
ScanFile=conf.get('ScanFile'),
DataCompressionRank=conf.get('DataCompressionRank', -1),
ActiveMntGrp=conf.get('ActiveMntGrp'),
PreScanSnapshot=conf.get('PreScanSnapshot'))
self._door.putEnvironments(env)
def _getPoolOfElement(self, elementname):
ms = self._door.macro_server
einfo = ms.getElementInfo(elementname)
poolname = einfo.pool
return ms.getElementInfo(poolname)
# @property
# def _pool(self):
# pooldict = self._door.macro_server.getElementsOfType('Pool')
# if len(pooldict)==0:
# raise ValueError('Cannot access the Pool')
# elif len(pooldict)>1:
# raise ValueError('Multiple pools are not supported')
# poolinfo = pooldict.values()[0]
# return poolinfo
class BaseDoor(MacroServerDevice):
""" Class encapsulating Door device functionality."""
On = PyTango.DevState.ON
Running = PyTango.DevState.RUNNING
Paused = PyTango.DevState.STANDBY
Critical = 'Critical'
Error = 'Error'
Warning = 'Warning'
Info = 'Info'
Output = 'Output'
Debug = 'Debug'
Result = 'Result'
RecordData = 'RecordData'
BlockStart = '<BLOCK>'
BlockFinish = '</BLOCK>'
log_streams = (Error, Warning, Info, Output, Debug, Result)
# maximum execution time without user interruption
# this also means a time window within door state events must arrive
# 0.1 s was not enough on Windows (see sardana-ord/sardana#725)
InteractiveTimeout = .3
def __init__(self, name, **kw):
self._log_attr = CaselessDict()
self._block_lines = 0
self._in_block = False
self._macro_server = None
self._running_macros = None
self._running_macro = None
self._last_running_macro = None
self._user_xml = None
self._ignore_logs = kw.get("ignore_logs", False)
self._silent = kw.get("silent", True)
self._debug = kw.get("debug", False)
self._output_stream = kw.get("output", sys.stdout)
self._writeLock = threading.Lock()
self._input_handler = self.create_input_handler()
self._len_last_data_line = 1
self.call__init__(MacroServerDevice, name, **kw)
self._old_door_state = PyTango.DevState.UNKNOWN
self._old_sw_door_state = TaurusDevState.Undefined
self.stateObj.addListener(self.stateChanged)
for log_name in self.log_streams:
tg_attr = self.getAttribute(log_name)
attr = LogAttr(self, log_name, None, tg_attr)
if log_name == 'Result':
attr.subscribeEvent(self.resultReceived, log_name)
else:
attr.subscribeEvent(self.logReceived, log_name)
self._log_attr[log_name] = attr
self.__input_attr = self.getAttribute("Input")
self.__input_attr.addListener(self.inputReceived)
self.__record_data_attr = self.getAttribute('RecordData')
self.__record_data_attr.addListener(self.recordDataReceived)
self.__macro_status_attr = self.getAttribute('MacroStatus')
self.__macro_status_attr.addListener(self.macroStatusReceived)
self._experiment_configuration = ExperimentConfiguration(self)
def create_input_handler(self):
return BaseInputHandler()
def get_input_handler(self):
return self._input_handler
def get_color_mode(self):
return "NoColor"
# def macrosChanged(self, s, v, t):
# pass
@property
def log_start(self):
if not hasattr(self, "_log_start"):
import taurus.core.util.console
if self.get_color_mode() == "NoColor":
kls = taurus.core.util.console.NoColors
else:
kls = taurus.core.util.console.TermColors
self._log_start = {BaseDoor.Critical: kls.LightRed,
BaseDoor.Error: kls.Red,
BaseDoor.Info: kls.LightBlue,
BaseDoor.Warning: kls.Brown,
BaseDoor.Output: kls.Normal,
BaseDoor.Debug: kls.DarkGray,
BaseDoor.Result: kls.LightGreen}
return self._log_start
@property
def log_stop(self):
if not hasattr(self, "_log_stop"):
import taurus.core.util.console
if self.get_color_mode() == "NoColor":
kls = taurus.core.util.console.NoColors
else:
kls = taurus.core.util.console.TermColors
self._log_stop = {BaseDoor.Critical: kls.Normal,
BaseDoor.Error: kls.Normal,
BaseDoor.Info: kls.Normal,
BaseDoor.Warning: kls.Normal,
BaseDoor.Output: kls.Normal,
BaseDoor.Debug: kls.Normal,
BaseDoor.Result: kls.Normal}
return self._log_stop
def getStateAttr(self):
return self._state_attr
@property
def macro_server(self):
if self._macro_server is None:
self._macro_server = self._get_macroserver_for_door()
return self._macro_server
def _get_macroserver_for_door(self):
"""Returns the MacroServer device object in the same DeviceServer as
this door"""
db = self.factory().getDatabase()
door_name = self.dev_name()
server_list = list(db.get_server_list('MacroServer/*'))
server_list += list(db.get_server_list('Sardana/*'))
server_devs = None
for server in server_list:
server_devs = db.get_device_class_list(server)
devs, klasses = server_devs[0::2], server_devs[1::2]
for dev in devs:
if dev.lower() == door_name:
for i, klass in enumerate(klasses):
if klass == 'MacroServer':
return self.factory().getDevice(devs[i])
else:
return None
def setDebugMode(self, state):
self._debug = state
def getDebugMode(self):
return self._debug
def setSilent(self, yesno):
self._silent = yesno
def isSilent(self):
return self._silent
def getLogObj(self, log_name='Debug'):
return self._log_attr.get(log_name, None)
def getRunningXML(self):
return self._user_xml
def getRunningMacro(self):
return self._running_macro
def getLastRunningMacro(self):
return self._last_running_macro
def abort(self, synch=True):
if not synch:
self.command_inout("AbortMacro")
return
evt_wait = AttributeEventWait(self.getAttribute("state"))
evt_wait.lock()
try:
time_stamp = time.time()
self.command_inout("AbortMacro")
evt_wait.waitEvent(self.Running, equal=False, after=time_stamp,
timeout=self.InteractiveTimeout)
finally:
evt_wait.unlock()
evt_wait.disconnect()
def stop(self, synch=True):
if not synch:
self.command_inout("StopMacro")
return
evt_wait = AttributeEventWait(self.getAttribute("state"))
evt_wait.lock()
try:
time_stamp = time.time()
self.command_inout("StopMacro")
evt_wait.waitEvent(self.Running, equal=False, after=time_stamp,
timeout=self.InteractiveTimeout)
finally:
evt_wait.unlock()
evt_wait.disconnect()
def _clearRunMacro(self):
# Clear the log buffer
list(map(LogAttr.clearLogBuffer, list(self._log_attr.values())))
self._running_macros = None
self._running_macro = None
self._user_xml = None
self._block_lines = 0
def _createMacroXml(self, macro_name, macro_params):
"""Creation of the macro XML object.
:param macro_name: (str) macro name
:param macro_params: (sequence[str]) list of parameter values,
if repeat parameters are used parameter values may be sequences
itself.
:return (lxml.etree._Element) macro XML element
"""
macro_info = self.macro_server.getMacroInfoObj(macro_name)
params_def = macro_info.parameters
macro_node = createMacroNode(macro_name, params_def, macro_params)
return macro_node.toXml()
def preRunMacro(self, obj, parameters):
self._clearRunMacro()
xml_root = None
if isinstance(obj, str):
if obj.startswith('<') and not parameters:
xml_root = etree.fromstring(obj)
else:
macros = []
if len(parameters) == 0:
macros_strs = obj.split('\n')
for m in macros_strs:
pars = m.split()
macros.append((pars[0], pars[1:]))
else:
parameters = recur_map(str, parameters)
macros.append((obj, parameters))
xml_root = xml_seq = etree.Element('sequence')
for m in macros:
macro_name = m[0]
macro_params = m[1]
xml_macro = self._createMacroXml(macro_name, macro_params)
xml_macro.set('id', str(uuid.uuid1()))
xml_seq.append(xml_macro)
elif etree.iselement(obj):
xml_root = obj
else:
raise TypeError('obj must be a string or a etree.Element')
self._running_macros = {}
for macro_xml in xml_root.xpath('//macro'):
id, name = macro_xml.get('id'), macro_xml.get('name')
self._running_macros[id] = Macro(self, name, id, macro_xml)
return xml_root
def postRunMacro(self, result, synch):
pass
def runMacro(self, obj, parameters=[], synch=False):
self._user_xml = self.preRunMacro(obj, parameters)
result = self._runMacro(self._user_xml, synch=synch)
return self.postRunMacro(result, synch)
def _runMacro(self, xml, synch=False):
if not synch:
return self.command_inout("RunMacro",
[etree.tostring(xml,
encoding='unicode')])
timeout = self.InteractiveTimeout
evt_wait = self._getEventWait()
evt_wait.connect(self.getAttribute("state"))
evt_wait.lock()
try:
evt_wait.waitEvent(self.Running, equal=False, timeout=timeout)
# Clear event set to not confuse the value coming from the
# connection with the event of of end of the macro execution
# in the next wait event. This was observed on Windows where
# the time stamp resolution is not better than 1 ms.
evt_wait.clearEventSet()
ts = time.time()
result = self.command_inout("RunMacro",
[etree.tostring(xml,
encoding='unicode')])
evt_wait.waitEvent(self.Running, after=ts, timeout=timeout)
if synch:
evt_wait.waitEvent(self.Running, equal=False, after=ts,
timeout=timeout)
finally:
self._clearRunMacro()
evt_wait.unlock()
evt_wait.disconnect()
return result
def stateChanged(self, s, t, v):
# In contrary to the Taurus3 the Taurus4 raises exceptions when the
# device server is getting down and we try to retrieve the state.
# In this case provide the same behavior as Taurus3 - assign None to
# the old state
try:
self._old_door_state = self.stateObj.rvalue
except PyTango.DevFailed:
self._old_door_state = None
self._old_sw_door_state = self.state
def resultReceived(self, log_name, result):
"""Method invoked by the arrival of a change event on the Result
attribute"""
if self._ignore_logs or self._running_macro is None:
return
self._running_macro.setResult(result)
return result
def putEnvironment(self, name, value):
self.macro_server.putEnvironment(name, value)
def putEnvironments(self, obj):
self.macro_server.putEnvironments(obj)
setEnvironment = putEnvironment
setEnvironments = putEnvironments
def getEnvironment(self, name=None):
return self.macro_server.getEnvironment(name=name)
def inputReceived(self, s, t, v):
if t not in CHANGE_EVT_TYPES:
return
if v is None or self._running_macros is None:
return
input_data = CodecFactory().decode(('json', v.value))
self.processInput(input_data)
def processInput(self, input_data):
TaurusManager().addJob(self._processInput, None, input_data)
def _processInput(self, input_data):
input_type = input_data['type']
if input_type == 'input':
result = self._input_handler.input(input_data)
if result['input'] is '' and 'default_value' in input_data:
result['input'] = input_data['default_value']
result = CodecFactory().encode('json', ('', result))[1]
self.write_attribute('Input', result)
elif input_type == 'timeout':
self._input_handler.input_timeout(input_data)
def recordDataReceived(self, s, t, v):
if t not in CHANGE_EVT_TYPES:
return
return self._processRecordData(v)
def _processRecordData(self, data):
if data is None or data.rvalue is None:
return
data = data.rvalue
size = len(data[1])
if size == 0:
return
format = data[0]
codec = CodecFactory().getCodec(format)
data = codec.decode(data)
return data
def processRecordData(self, data):
pass
def macroStatusReceived(self, s, t, v):
if v is None or self._running_macros is None:
return
if t not in CHANGE_EVT_TYPES:
return
v = v.value
if not len(v[1]):
return
format = v[0]
codec = CodecFactory().getCodec(format)
fmt, data = codec.decode(v)
for macro_status in data:
id = macro_status.get('id')
macro = self._running_macros.get(id)
self._last_running_macro = self._running_macro = macro
# if we don't have the ID it's because the macro is running a
# submacro or another client is connected to the same door (shame
# on him!) and executing a macro we discard this event
if macro is not None:
macro.__dict__.update(macro_status)
return data
def logReceived(self, log_name, output):
max_chrs = _get_console_width()
if not output or self._silent or self._ignore_logs:
return
if log_name == self.Debug and not self._debug:
return
o = self.log_start[log_name]
for line in output:
if not self._debug:
if line == self.BlockStart:
self._in_block = True
for i in range(self._block_lines):
if max_chrs == float('inf'):
nb_lines = 1
else:
nb_lines = _get_nb_lines(
self._len_last_data_line,
max_chrs)
# per each line: erase current line,
# go up one line and erase current line
o += '\x1b[2K\x1b[1A\x1b[2K' * nb_lines
self._block_lines = 0
continue
elif line == self.BlockFinish:
self._in_block = False
continue
else:
self._len_last_data_line = len(line)
if self._in_block:
self._block_lines += 1
else:
self._block_lines = 0
o += "%s\n" % line
o += self.log_stop[log_name]
self.write(o)
def write(self, msg, stream=None):
if self.isSilent():
return
self._output_stream = sys.stdout
out = self._output_stream
if stream is not None:
start, stop = self.log_start.get(stream), self.log_stop.get(stream)
if start is not None and stop is not None:
out.write(start)
out.write(msg)
out.write(stop)
out.flush()
return
out.write(msg)
out.flush()
def writeln(self, msg='', stream=None):
self.write("%s\n" % msg, stream=stream)
def getExperimentConfigurationObj(self):
return self._experiment_configuration
def getExperimentConfiguration(self):
return self._experiment_configuration.get()
def setExperimentConfiguration(self, config, mnt_grps=None):
self._experiment_configuration.set(config, mnt_grps=mnt_grps)
class UnknownMacroServerElementFormat(Exception):
pass
class MacroPath(object):
def __init__(self, ms):
self._ms = weakref.ref(ms)
self.refresh()
def refresh(self):
self.macro_path = mp = self._ms().get_property("MacroPath")[
"MacroPath"]
self.base_macro_path = osp.commonprefix(self.macro_path)
self.rel_macro_path = [osp.relpath for p in (mp, self.base_macro_path)]
class Environment(dict):
def __init__(self, macro_server):
dict.__setattr__(self, "_macro_server_", weakref.ref(macro_server))
def __setattr__(self, key, value):
ms = self._macro_server_()
if ms is not None:
ms.putEnvironment(key, value)
def __getattr__(self, key):
return self[key]
def __delattr__(self, key):
ms = self._macro_server_()
if ms is not None:
ms.removeEnvironment(key)
def __dir__(self):
return [key for key in list(self.keys()) if not key.startswith("_")]
class BaseMacroServer(MacroServerDevice):
"""Class encapsulating Macro Server device functionality."""
def __init__(self, name, **kw):
self._env = Environment(self)
self._elements = BaseSardanaElementContainer()
self.call__init__(MacroServerDevice, name, **kw)
self.__elems_attr = self.getAttribute("Elements")
try:
serialization_mode = TaurusSerializationMode.TangoSerial
except AttributeError:
serialization_mode = TaurusSerializationMode.Serial
self.__elems_attr.setSerializationMode(serialization_mode)
self.__elems_attr.addListener(self.on_elements_changed)
self.__elems_attr.setSerializationMode(
TaurusSerializationMode.Concurrent)
self.__env_attr = self.getAttribute('Environment')
try:
serialization_mode = TaurusSerializationMode.TangoSerial
except AttributeError:
serialization_mode = TaurusSerializationMode.Serial
self.__env_attr.setSerializationMode(serialization_mode)
self.__env_attr.addListener(self.on_environment_changed)
self.__env_attr.setSerializationMode(
TaurusSerializationMode.Concurrent)
NO_CLASS_TYPES = 'ControllerClass', 'ControllerLibrary', \
'MacroLibrary', 'Instrument', 'Meta', 'ParameterType'
def on_environment_changed(self, evt_src, evt_type, evt_value):
try:
return self._on_environment_changed(evt_src, evt_type, evt_value)
except Exception:
self.error("Exception occurred processing environment")
self.error("Details:", exc_info=1)
return set(), set(), set()
def _on_environment_changed(self, evt_src, evt_type, evt_value):
ret = added, removed, changed = set(), set(), set()
if evt_type not in CHANGE_EVT_TYPES:
return ret
env = CodecFactory().decode(evt_value.rvalue)
for key, value in list(env.get('new', {}).items()):
self._addEnvironment(key, value)
added.add(key)
for key in env.get('del', []):
self._removeEnvironment(key)
removed.add(key)
for key, value in list(env.get('change', {}).items()):
self._removeEnvironment(key)
self._addEnvironment(key, value)
changed.add(key)
return ret
def _addEnvironment(self, key, value):
self._env[key] = value
def _removeEnvironment(self, key):
try:
self._env.pop(key)
except KeyError:
pass
def putEnvironment(self, name, value):
self.putEnvironments({name: value})
def putEnvironments(self, obj):
obj = dict(new=obj)
codec = CodecFactory().getCodec('pickle')
self.write_attribute('Environment', codec.encode(('', obj)))
setEnvironment = putEnvironment
setEnvironments = putEnvironments
def getEnvironment(self, name=None):
if name is None:
return self._env
else:
return self._env[name]
def removeEnvironment(self, key):
keys = key,
return self.removeEnvironments(keys)
def removeEnvironments(self, keys):
obj = {'del': keys}
codec = CodecFactory().getCodec('pickle')
self.write_attribute('Environment', codec.encode(('', obj)))
def getObject(self, element_info):
elem_type = element_info.getType()
if elem_type in self.NO_CLASS_TYPES:
obj = object()
elif "MacroCode" in element_info.interfaces:
obj = self._createMacroClassObject(element_info)
else:
obj = self._createDeviceObject(element_info)
return obj
def _createMacroClassObject(self, element_info):
return MacroInfo(from_json=element_info._data)
def _createDeviceObject(self, element_info):
return Factory().getDevice(element_info.full_name)
def on_elements_changed(self, evt_src, evt_type, evt_value):
try:
return self._on_elements_changed(evt_src, evt_type, evt_value)
except Exception:
self.error("Exception occurred processing elements")
self.error("Details:", exc_info=1)
return set(), set(), set()
def _on_elements_changed(self, evt_src, evt_type, evt_value):
ret = added, removed, changed = set(), set(), set()
if evt_type not in CHANGE_EVT_TYPES:
return ret
try:
elems = CodecFactory().decode(evt_value.rvalue)
except:
self.error("Could not decode element info format=%s len=%s",
evt_value.rvalue[0], len(evt_value.rvalue[1]))
return ret
for element_data in elems.get('new', ()):
element_data['manager'] = self
element = self._addElement(element_data)
added.add(element)
for element_data in elems.get('del', ()):
element = self._removeElement(element_data)
removed.add(element)
for element_data in elems.get('change', ()):
element = self._removeElement(element_data)
element_data['manager'] = self
element = self._addElement(element_data)
changed.add(element)
return ret
def _addElement(self, element_data):
element = BaseSardanaElement(**element_data)
self.getElementsInfo().addElement(element)
return element
def _removeElement(self, element_data):
full_name = element_data['full_name']
element = self.getElementInfo(full_name)
self.getElementsInfo().removeElement(element)
return element
def getElementsInfo(self):
return self._elements
def getElements(self):
return self.getElementsInfo().getElements()
def getElementInfo(self, name):
return self.getElementsInfo().getElement(name)
def getElementNamesOfType(self, elem_type):
return self.getElementsInfo().getElementNamesOfType(elem_type)
def getElementNamesWithInterface(self, interface):
return self.getElementsInfo().getElementNamesWithInterface(interface)
def getElementsWithInterface(self, interface):
return self.getElementsInfo().getElementsWithInterface(interface)
def getElementsWithInterfaces(self, interfaces):
return self.getElementsInfo().getElementsWithInterfaces(interfaces)
def getElementsOfType(self, elem_type):
return self.getElementsInfo().getElementsOfType(elem_type)
def getElementsOfTypes(self, elem_types):
elems = CaselessDict()
for elem_type in elem_types:
elems.update(self.getElementsOfType(elem_type))
return elems
def getInterfaces(self):
return self.getElementsInfo().getInterfaces()
def getExpChannelElements(self):
channel_types = "CTExpChannel", "ZeroDExpChannel", "OneDExpChannel", \
"TwoDExpChannel", "PseudoCounter"
return self.getElementsOfTypes(channel_types)
# -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
# Macro API
# -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
def getMacros(self):
iname = 'MacroCode'
return dict(self.getElementsInfo().getElementsWithInterface(iname))
def getMacroInfoObj(self, macro_name):
iname = 'MacroCode'
return self.getElementsInfo().getElementWithInterface(macro_name,
iname)
def getMacroStrList(self):
return self.getElementNamesWithInterface('MacroCode')
def getMacroNodeObj(self, macro_name):
"""
This method retrieves information about macro from MacroServer
and creates MacroNode object, filled with all information about
parameters.
:param macro_name: (str) macro name
:return: (MacroNode)
See Also: fillMacroNodeAddidtionalInfos
"""
macroInfoObj = self.getMacroInfoObj(macro_name)
if macroInfoObj is None:
return
# fill macro parameters
paramsInfo = macroInfoObj.parameters
macroNode = MacroNode(name=macro_name, params_def=paramsInfo)
hasParams = bool(len(paramsInfo))
macroNode.setHasParams(hasParams)
# fill allowed hook places
allowedHookPlaces = []
hints = macroInfoObj.hints
if hints is not None:
for hook in hints.get('allowsHooks', []):
allowedHookPlaces.append(str(hook))
macroNode.setAllowedHookPlaces(allowedHookPlaces)
return macroNode
def validateMacroName(self, macroName):
macroInfo = self.getElementInfo(macroName)
if macroInfo is None:
raise Exception(
"%s macro does not exist in this sardana system." % macroName)
elif macroInfo.type != 'MacroClass':
raise Exception("%s element is not a macro." % macroName)
return True
def validateMacroNode(self, macroNode):
paramNodes = macroNode.children()
for paramNode in paramNodes:
self.validateParamNode(paramNode)
return True
def validateParamNode(self, paramNode):
assert isinstance(paramNode, ParamNode)
if isinstance(paramNode, SingleParamNode):
self.validateSingleParam(paramNode)
else:
self.validateRepeatParam(paramNode)
return True
def validateSingleParam(self, singleParamNode):
name = singleParamNode.name()
type = singleParamNode.type()
value = singleParamNode.value()
if type == "Boolean":
pass
elif type == "Env":
pass
elif type == "File":
pass
elif type == "Filename":
pass
elif type == "MotorParam":
pass
elif type == "String":
pass
elif type == "User":
pass
elif type == "MotorParam":
pass
elif type == "Integer":
int(value)
min = singleParamNode.min()
max = singleParamNode.max()
if min is not None and value < min:
raise Exception(
"%s parameter value: %s is below minimum allowed value."
% (name, value))
if max is not None and value > max:
raise Exception(
"%s parameter value: %s is above maximum allowed value."
% (name, value))
elif type == "Float":
float(value)
min = singleParamNode.min()
max = singleParamNode.max()
if min is not None and value < min:
raise Exception(
"%s parameter value: %s is below minimum allowed value."
% (name, value))
if max is not None and value > max:
raise Exception(
"%s parameter value: %s is above maximum allowed value."
% (name, value))
else:
allowedInterfaces = list(self.getInterfaces().keys())
if type not in allowedInterfaces:
raise Exception(
"No element with %s interface exist in this sardana "
"system." % type)
allowedValues = self.getElementNamesWithInterface(type)
if value not in allowedValues:
raise Exception(
"%s element with %s interface does not exist in this "
"sardana system." % (value, type))
return True
def validateRepeatParam(self, repeatParamNode):
paramName = repeatParamNode.name()
if repeatParamNode.isBelowMin():
raise Exception(
"%s param repeats has not enough repeats." % (paramName))
if repeatParamNode.isAboveMax():
raise Exception(
"%s param repeat has too many repeats." % (paramName))
repetitions = repeatParamNode.children()
for repeat in repetitions:
params = repeat.children()
for param in params:
if isinstance(param, SingleParamNode):
self.validateSingleParam(param)
else:
self.validateRepeatParam(param)
return True
def fillMacroNodeAdditionalInfos(self, macroNode):
"""
This method fills macroNode information which couldn't be stored
in XML file.
:param macroNode: (MacroNode) macro node obj populated from XML
information
See also: getMacroNodeObj
"""
macroName = macroNode.name()
macroInfoObj = self.getMacroInfoObj(macroName)
if macroInfoObj is None:
msg = "It was not possible to get information about {0} " \
"macro. Check if MacroServer is alive and if this macro " \
"exist.".format(macroName)
self.info(msg)
raise Exception("no info about macro {0}".format(macroName))
allowedHookPlaces = []
hints = macroInfoObj.hints or {}
for hook in hints.get("allowsHooks", []):
allowedHookPlaces.append(str(hook))
macroNode.setAllowedHookPlaces(allowedHookPlaces)
hasParams = macroInfoObj.hasParams()
macroNode.setHasParams(hasParams)
if hasParams:
paramList = macroInfoObj.getParamList()
for paramNode, paramInfo in zip(macroNode.params(), paramList):
self.__fillParamNodeAdditionalInfos(paramNode, paramInfo)
def __fillParamNodeAdditionalInfos(self, paramNode, paramInfo):
"""
This is a protected method foreseen to use only internally by
fillMacroNodeAdditionaInfos, to be called for every param node obj."""
type = paramInfo.get('type')
paramNode.setDescription(str(paramInfo.get("description")))
min = paramInfo.get("min")
paramNode.setMin(min)
max = paramInfo.get("max")
paramNode.setMax(max)
if isinstance(type, list):
paramNode.setParamsInfo(type)
for repeatNode in paramNode.children():
for internalParamNode, internalParamInfo in zip(
repeatNode.children(), type):
self.__fillParamNodeAdditionalInfos(
internalParamNode, internalParamInfo)
else:
paramNode.setType(str(type))
paramNode.setDefValue(str(paramInfo.get("default_value")))
def __fillParamNodesValues(self, paramInfo, paramNode):
"""
This is a protected method foreseen to use only internally by
__fillParamNodesValues, to be called for every param node obj.
:param paramInfo, paramNode:
:return:
"""
paramType = paramInfo.get('type')
paramNode.setDescription(str(paramInfo.get("description")))
min = paramInfo.get("min")
paramNode.setMin(min)
max = paramInfo.get("max")
paramNode.setMax(max)
paramNode.setName(paramInfo['name'])
if isinstance(paramType, list):
for repeatNode in paramNode.children():
children = repeatNode.children()
for child, paramT in zip_longest(children, paramType):
if child is None:
node = ParamFactory(paramT, repeatNode)
repeatNode.insertChild(node)
else:
self.__fillParamNodesValues(paramT, child)
else:
paramNode.setType(str(paramType))
paramNode.setDefValue(str(paramInfo.get("default_value")))
def printTree(self, nodes, tabs=0):
tabs = tabs + 1
for node in nodes:
print(('\t'*tabs) + str(type(node)) + str(node))
if isinstance(node, SingleParamNode):
pass
else:
nodes = node.children()
self.printTree(nodes, tabs)
def __recreateParamNodeAdditionalInfos(self, paramNode, paramInfo):
"""
This is a protected method foreseen to use only internally by
fillMacroNodeAdditionaInfos, to be called for every param node obj."""
paramType = paramInfo.get('type')
min = paramInfo.get("min")
max = paramInfo.get("max")
paramNode.setMin(min)
paramNode.setMax(max)
paramNode.setDescription(str(paramInfo.get("description")))
if isinstance(paramType, list):
paramNode.setParamsInfo(paramType)
for repeatNode in paramNode.children():
for internalParamNode, internalParamInfo in zip(
repeatNode.children(), paramType):
self.__recreateParamNodeAdditionalInfos(
internalParamNode, internalParamInfo)
else:
paramNode.setType(paramType)
paramNode.setDefValue(str(paramInfo.get("default_value")))
def getMacroPathObj(self, cache=False):
if not hasattr(self, "_macro_path"):
self._macro_path = MacroPath(self)
elif not cache:
self._macro_path.refresh()
return self._macro_path
def registerExtensions():
"""Registers the macroserver extensions in the
:class:`taurus.core.tango.TangoFactory`"""
factory = Factory('tango')
factory.registerDeviceClass('MacroServer', BaseMacroServer)
factory.registerDeviceClass('Door', BaseDoor)
def unregisterExtensions():
"""Registers the macroserver extensions in the
:class:`taurus.core.tango.TangoFactory`"""
factory = Factory('tango')
factory.unregisterDeviceClass('MacroServer')
factory.unregisterDeviceClass('Door')
| 36.085106
| 79
| 0.595453
|
37e4e76f9340b0eb091e5620895c2a0a1b725ced
| 1,938
|
py
|
Python
|
reader/image_consumer.py
|
Commin/SINETStream-videostreaming
|
ca64d229697c7f939156b3d014c13af67a20d2e6
|
[
"Apache-2.0"
] | 1
|
2021-02-14T04:23:35.000Z
|
2021-02-14T04:23:35.000Z
|
reader/image_consumer.py
|
Commin/SINETStream-videostreaming
|
ca64d229697c7f939156b3d014c13af67a20d2e6
|
[
"Apache-2.0"
] | null | null | null |
reader/image_consumer.py
|
Commin/SINETStream-videostreaming
|
ca64d229697c7f939156b3d014c13af67a20d2e6
|
[
"Apache-2.0"
] | 1
|
2021-01-21T05:06:53.000Z
|
2021-01-21T05:06:53.000Z
|
#!/usr/bin/env python3
# Copyright (C) 2020 National Institute of Informatics
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from argparse import ArgumentParser
import os
import cv2
from sinetstream import MessageReader
logging.basicConfig(level=logging.INFO)
def consumer(service, output):
with MessageReader(service, value_type='image') as reader:
for message in reader:
if show_image(message):
break
if output != None:
cv2.imwrite(output, message.value)
print("Write file.")
def show_image(message):
window_name = message.topic
image = message.value
cv2.imshow(window_name, image)
# Hit 'q' to stop
return cv2.waitKey(25) & 0xFF == ord("q")
if __name__ == '__main__':
parser = ArgumentParser(description="SINETStream Consumer")
parser.add_argument("-s", "--service", metavar="SERVICE_NAME", required=True)
parser.add_argument("-o", "--output", metavar="OUTPUT_FILE_NAME", default=None)
args = parser.parse_args()
print(f": service={args.service}")
try:
consumer(args.service, args.output)
except KeyboardInterrupt:
pass
| 31.258065
| 83
| 0.707946
|
54827a07e265b6f4ebe09cba9578197d6214fe1a
| 94
|
py
|
Python
|
inbm/telemetry-agent/telemetry/__init__.py
|
ahameedx/intel-inb-manageability
|
aca445fa4cef0b608e6e88e74476547e10c06073
|
[
"Apache-2.0"
] | 5
|
2021-12-13T21:19:31.000Z
|
2022-01-18T18:29:43.000Z
|
inbm/telemetry-agent/telemetry/__init__.py
|
ahameedx/intel-inb-manageability
|
aca445fa4cef0b608e6e88e74476547e10c06073
|
[
"Apache-2.0"
] | 45
|
2021-12-30T17:21:09.000Z
|
2022-03-29T22:47:32.000Z
|
inbm/telemetry-agent/telemetry/__init__.py
|
ahameedx/intel-inb-manageability
|
aca445fa4cef0b608e6e88e74476547e10c06073
|
[
"Apache-2.0"
] | 4
|
2022-01-26T17:42:54.000Z
|
2022-03-30T04:48:04.000Z
|
"""
Copyright (C) 2017-2022 Intel Corporation
SPDX-License-Identifier: Apache-2.0
"""
| 18.8
| 45
| 0.670213
|
c4c87550b2f0f6db9984b33d9b95ef201d64242e
| 1,272
|
py
|
Python
|
pre_commit_hooks/generic_crlf_remove.py
|
beyse/pre-commit-hooks
|
249c3329da8734afa49629b66a192e0fb264399e
|
[
"MIT"
] | 3
|
2022-01-06T02:53:57.000Z
|
2022-02-22T22:12:53.000Z
|
hooks/python/generic_crlf_remove.py
|
jphppd/pre-commit-hooks
|
8acd752a70690be39e60f811958b71a18709ec16
|
[
"MIT"
] | null | null | null |
hooks/python/generic_crlf_remove.py
|
jphppd/pre-commit-hooks
|
8acd752a70690be39e60f811958b71a18709ec16
|
[
"MIT"
] | 1
|
2022-03-11T15:11:18.000Z
|
2022-03-11T15:11:18.000Z
|
import argparse
import sys
def contains_crlf(filename):
with open(filename, mode='rb') as file_checked:
for line in file_checked.readlines():
if line.endswith(b'\r\n'):
return True
return False
def removes_crlf_in_file(filename):
with open(filename, mode='rb') as file_processed:
lines = file_processed.readlines()
lines = [line.replace(b'\r\n', b'\n') for line in lines]
with open(filename, mode='wb') as file_processed:
for line in lines:
file_processed.write(line)
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='filenames to check')
args = parser.parse_args(argv)
files_with_crlf = list(filter(contains_crlf, args.filenames))
for file_with_crlf in files_with_crlf:
print('Removing CRLF end-lines in: {}'.format(file_with_crlf))
removes_crlf_in_file(file_with_crlf)
if files_with_crlf:
print('')
print('CRLF end-lines have been successfully removed. Now aborting the commit.')
print('You can check the changes made. Then simply "git add --update ." and re-commit')
return 1
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 31.8
| 95
| 0.662736
|
6f4024ea80751856de1352ad99d6cf9a8cac3c60
| 2,701
|
py
|
Python
|
chroma/g4daenode/daesubtree.py
|
NuTufts/chroma_lartpc
|
ea6d1a62d22eeeaac069efdef1068a56be683fcc
|
[
"BSD-3-Clause"
] | null | null | null |
chroma/g4daenode/daesubtree.py
|
NuTufts/chroma_lartpc
|
ea6d1a62d22eeeaac069efdef1068a56be683fcc
|
[
"BSD-3-Clause"
] | null | null | null |
chroma/g4daenode/daesubtree.py
|
NuTufts/chroma_lartpc
|
ea6d1a62d22eeeaac069efdef1068a56be683fcc
|
[
"BSD-3-Clause"
] | null | null | null |
import collada
class DAESubTree(list):
"""
Flattens all or part of a tree of nodes into this list.
Only requires node instances to have a children attribute
which lists other nodes. The list is composed of either:
#. a string representation of the node
#. a tuple of (node, depth, sibdex, indent)
"""
def __init__(self, top, maxdepth=-1, text=True, maxsibling = 5):
"""
:param top: root node instance such as `DAENode`
:param maxdepth: integer maximum recursion depth, default of -1 for unlimited
:param text: when True makes makes list-of-strings representation of tree, otherwise
make list of tuples incoporating the node in first slot of each tuple
:param maxsibling: siblings are skipped when the number of children
of a node exceeds 2*maxsibling,
the first and last `maxsiblings` are incorporated into this list
"""
list.__init__(self)
self.maxdepth = maxdepth
self.text = text
self.cut = maxsibling
self( top )
__str__ = lambda _:"\n".join(_)
def __call__(self, node, depth=0, sibdex=-1, nsibling=-1 ):
"""
:param node:
:param depth:
:param sibdex: sibling index from 0:nsibling-1
:param nsibling:
"""
if not hasattr(node,'children'):
nchildren = 0
else:
nchildren = len(node.children)
pass
elided = type(node) == str
indent = " " * depth # done here as difficult to do in a webpy template
if self.text:
if elided:
obj = "..."
else:
nodelabel = "%-2d %-5d %-3d" % (depth, node.index, nchildren )
obj = "[%s] %s %3d/%3d : %s " % (nodelabel, indent, sibdex, nsibling, node)
else:
obj = (node, depth, sibdex, indent)
pass
self.append( obj )
if nchildren == 0:# leaf
pass
else:
if depth == self.maxdepth:
pass
else:
shorten = nchildren > self.cut*2
for sibdex, child in enumerate(node.children):
if shorten:
if sibdex < self.cut or sibdex > nchildren - self.cut:
pass
elif sibdex == self.cut:
child = "..."
else:
continue
pass
self(child, depth + 1, sibdex, nchildren )
| 35.077922
| 92
| 0.500555
|
1fb70c0fab9c4f2f0e6676ad0301943a87030503
| 14,707
|
py
|
Python
|
espnet/bin/asr_recog.py
|
Hertin/espnet
|
a0f2175df08b4750a9f0305c20b8c11f6e941867
|
[
"Apache-2.0"
] | null | null | null |
espnet/bin/asr_recog.py
|
Hertin/espnet
|
a0f2175df08b4750a9f0305c20b8c11f6e941867
|
[
"Apache-2.0"
] | null | null | null |
espnet/bin/asr_recog.py
|
Hertin/espnet
|
a0f2175df08b4750a9f0305c20b8c11f6e941867
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# encoding: utf-8
# Copyright 2017 Johns Hopkins University (Shinji Watanabe)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""End-to-end speech recognition model decoding script."""
import configargparse
import logging
import os
import random
import sys
import yaml
import numpy as np
from espnet.utils.cli_utils import strtobool
# NOTE: you need this func to generate our sphinx doc
def get_parser():
"""Get default arguments."""
parser = configargparse.ArgumentParser(
description="Transcribe text from speech using "
"a speech recognition model on one CPU or GPU",
config_file_parser_class=configargparse.YAMLConfigFileParser,
formatter_class=configargparse.ArgumentDefaultsHelpFormatter,
)
# general configuration
parser.add("--config", is_config_file=True, help="Config file path")
parser.add("--train-langs", action="append", help="training languages")
parser.add(
"--config2",
is_config_file=True,
help="Second config file path that overwrites the settings in `--config`",
)
parser.add(
"--config3",
is_config_file=True,
help="Third config file path that overwrites the settings "
"in `--config` and `--config2`",
)
parser.add_argument("--ngpu", type=int, default=0, help="Number of GPUs")
parser.add_argument(
"--dtype",
choices=("float16", "float32", "float64"),
default="float32",
help="Float precision (only available in --api v2)",
)
parser.add_argument(
"--backend",
type=str,
default="chainer",
choices=["chainer", "pytorch"],
help="Backend library",
)
parser.add_argument("--debugmode", type=int, default=1, help="Debugmode")
parser.add_argument("--seed", type=int, default=1, help="Random seed")
parser.add_argument("--verbose", "-V", type=int, default=1, help="Verbose option")
parser.add_argument(
"--batchsize",
type=int,
default=1,
help="Batch size for beam search (0: means no batch processing)",
)
parser.add_argument(
"--preprocess-conf",
type=str,
default=None,
help="The configuration file for the pre-processing",
)
parser.add_argument(
"--api",
default="v1",
choices=["v1", "v2"],
help="Beam search APIs "
"v1: Default API. It only supports the ASRInterface.recognize method "
"and DefaultRNNLM. "
"v2: Experimental API. It supports any models that implements ScorerInterface.",
)
# task related
parser.add_argument(
"--recog-json", type=str, help="Filename of recognition data (json)"
)
parser.add_argument(
"--result-label",
type=str,
required=True,
help="Filename of result label data (json)",
)
# model (parameter) related
parser.add_argument(
"--model", type=str, required=True, help="Model file parameters to read"
)
parser.add_argument(
"--model-conf", type=str, default=None, help="Model config file"
)
parser.add_argument(
"--num-spkrs",
type=int,
default=1,
choices=[1, 2],
help="Number of speakers in the speech",
)
parser.add_argument(
"--num-encs", default=1, type=int, help="Number of encoders in the model."
)
# search related
parser.add_argument("--nbest", type=int, default=1, help="Output N-best hypotheses")
parser.add_argument("--beam-size", type=int, default=1, help="Beam size")
parser.add_argument("--penalty", type=float, default=0.0, help="Incertion penalty")
parser.add_argument(
"--maxlenratio",
type=float,
default=0.0,
help="""Input length ratio to obtain max output length.
If maxlenratio=0.0 (default), it uses a end-detect function
to automatically find maximum hypothesis lengths""",
)
parser.add_argument(
"--minlenratio",
type=float,
default=0.0,
help="Input length ratio to obtain min output length",
)
parser.add_argument(
"--ctc-weight", type=float, default=0.0, help="CTC weight in joint decoding"
)
parser.add_argument(
"--weights-ctc-dec",
type=float,
action="append",
help="ctc weight assigned to each encoder during decoding."
"[in multi-encoder mode only]",
)
parser.add_argument(
"--ctc-window-margin",
type=int,
default=0,
help="""Use CTC window with margin parameter to accelerate
CTC/attention decoding especially on GPU. Smaller magin
makes decoding faster, but may increase search errors.
If margin=0 (default), this function is disabled""",
)
# transducer related
parser.add_argument(
"--score-norm-transducer",
type=strtobool,
nargs="?",
default=True,
help="Normalize transducer scores by length",
)
parser.add_argument(
"--search-type",
type=str,
default="default",
choices=["default", "nsc", "tsd", "alsd"],
help="""Type of beam search implementation to use during inference.
Can be either: default beam search, n-step constrained beam search ("nsc"),
time-synchronous decoding ("tsd") or alignment-length synchronous decoding
("alsd").
Additional associated parameters: "nstep" + "prefix-alpha" (for nsc),
"max-sym-exp" (for tsd) and "u-max" (for alsd)""",
)
parser.add_argument(
"--nstep",
type=int,
default=1,
help="Number of expansion steps allowed in NSC beam search.",
)
parser.add_argument(
"--prefix-alpha",
type=int,
default=2,
help="Length prefix difference allowed in NSC beam search.",
)
parser.add_argument(
"--max-sym-exp",
type=int,
default=2,
help="Number of symbol expansions allowed in TSD decoding.",
)
parser.add_argument(
"--u-max",
type=int,
default=400,
help="Length prefix difference allowed in ALSD beam search.",
)
parser.add_argument(
"--score-norm",
type=strtobool,
nargs="?",
default=True,
help="Normalize transducer scores by length",
)
# rnnlm related
parser.add_argument(
"--rnnlm", type=str, default=None, help="RNNLM model file to read"
)
parser.add_argument(
"--rnnlm-conf", type=str, default=None, help="RNNLM model config file to read"
)
parser.add_argument(
"--word-rnnlm", type=str, default=None, help="Word RNNLM model file to read"
)
parser.add_argument(
"--word-rnnlm-conf",
type=str,
default=None,
help="Word RNNLM model config file to read",
)
parser.add_argument("--word-dict", type=str, default=None, help="Word list to read")
parser.add_argument("--lm-weight", type=float, default=0.1, help="RNNLM weight")
# ngram related
parser.add_argument(
"--ngram-model", type=str, default=None, help="ngram model file to read"
)
parser.add_argument("--ngram-weight", type=float, default=0.1, help="ngram weight")
parser.add_argument(
"--ngram-scorer",
type=str,
default="part",
choices=("full", "part"),
help="""if the ngram is set as a part scorer, similar with CTC scorer,
ngram scorer only scores topK hypethesis.
if the ngram is set as full scorer, ngram scorer scores all hypthesis
the decoding speed of part scorer is musch faster than full one""",
)
# streaming related
parser.add_argument(
"--streaming-mode",
type=str,
default=None,
choices=["window", "segment"],
help="""Use streaming recognizer for inference.
`--batchsize` must be set to 0 to enable this mode""",
)
parser.add_argument("--streaming-window", type=int, default=10, help="Window size")
parser.add_argument(
"--streaming-min-blank-dur",
type=int,
default=10,
help="Minimum blank duration threshold",
)
parser.add_argument(
"--streaming-onset-margin", type=int, default=1, help="Onset margin"
)
parser.add_argument(
"--streaming-offset-margin", type=int, default=1, help="Offset margin"
)
# non-autoregressive related
# Mask CTC related. See https://arxiv.org/abs/2005.08700 for the detail.
parser.add_argument(
"--maskctc-n-iterations",
type=int,
default=10,
help="Number of decoding iterations."
"For Mask CTC, set 0 to predict 1 mask/iter.",
)
parser.add_argument(
"--maskctc-probability-threshold",
type=float,
default=0.999,
help="Threshold probability for CTC output",
)
parser.add_argument(
"--recog-function", type=str, default="recog", help="Recognition function"
)
parser.add_argument(
"--embedding-save-dir",
type=str,
default=None,
help="Filename to save the neural embeddings"
)
parser.add_argument(
"--lang2ph",
type=str,
default=None,
help="Filename of language to phoneme dictionary"
)
parser.add_argument(
"--recog-size",
type=int,
default=None,
help="Recognition set size",
)
parser.add_argument(
"--mask-phoneme",
type=strtobool,
default=False,
help="Filename of language to phoneme dictionary"
)
parser.add_argument(
"--lang-label",
type=strtobool,
default=False,
help="Filename of language to phoneme dictionary"
)
parser.add_argument(
"--fake-lang-label",
type=str,
default=None,
help="Filename of language to phoneme dictionary"
)
parser.add_argument(
"--lang-model",
type=str,
default=None, nargs="?",
help="Filename of language to phoneme dictionary"
)
parser.add_argument(
"--lang-model-weight",
type=float,
default=0, nargs="?",
help="Filename of language to phoneme dictionary"
)
return parser
def main(args):
"""Run the main decoding function."""
parser = get_parser()
args = parser.parse_args(args)
if args.ngpu == 0 and args.dtype == "float16":
raise ValueError(f"--dtype {args.dtype} does not support the CPU backend.")
# logging info
if args.verbose == 1:
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s",
)
elif args.verbose == 2:
logging.basicConfig(
level=logging.DEBUG,
format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s",
)
else:
logging.basicConfig(
level=logging.WARN,
format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s",
)
logging.warning("Skip DEBUG/INFO messages")
# check CUDA_VISIBLE_DEVICES
if args.ngpu > 0:
cvd = os.environ.get("CUDA_VISIBLE_DEVICES")
if cvd is None:
logging.warning("CUDA_VISIBLE_DEVICES is not set.")
elif args.ngpu != len(cvd.split(",")):
logging.error("#gpus is not matched with CUDA_VISIBLE_DEVICES.")
sys.exit(1)
# TODO(mn5k): support of multiple GPUs
if args.ngpu > 1:
logging.error("The program only supports ngpu=1.")
sys.exit(1)
# display PYTHONPATH
logging.info("python path = " + os.environ.get("PYTHONPATH", "(None)"))
# seed setting
random.seed(args.seed)
np.random.seed(args.seed)
logging.info("set random seed = %d" % args.seed)
# validate rnn options
if args.rnnlm is not None and args.word_rnnlm is not None:
logging.error(
"It seems that both --rnnlm and --word-rnnlm are specified. "
"Please use either option."
)
sys.exit(1)
# recog
logging.info("backend = " + args.backend)
if args.num_spkrs == 1:
if args.backend == "chainer":
from espnet.asr.chainer_backend.asr import recog
recog(args)
elif args.backend == "pytorch":
if args.num_encs == 1:
# Experimental API that supports custom LMs
if args.api == "v2":
if args.recog_function == 'recog_ctconly':
from espnet.asr.pytorch_backend.recog import recog_ctconly
recog_ctconly(args)
elif args.recog_function == 'recog_ctconly_lang':
from espnet.asr.pytorch_backend.recog import recog_ctconly_lang
recog_ctconly_lang(args)
elif args.recog_function == 'recog_v2':
from espnet.asr.pytorch_backend.recog import recog_v2
recog_v2(args)
elif args.recog_function == 'recog_seg':
logging.warning(f'recog size {args.recog_size}')
from espnet.asr.pytorch_backend.recog import recog_seg
recog_seg(args)
else:
raise NotImplementedError('Recognition function is not found.')
else:
from espnet.asr.pytorch_backend.asr import recog
if args.dtype != "float32":
raise NotImplementedError(
f"`--dtype {args.dtype}` is only available with `--api v2`"
)
recog(args)
else:
if args.api == "v2":
raise NotImplementedError(
f"--num-encs {args.num_encs} > 1 is not supported in --api v2"
)
else:
from espnet.asr.pytorch_backend.asr import recog
recog(args)
else:
raise ValueError("Only chainer and pytorch are supported.")
elif args.num_spkrs == 2:
if args.backend == "pytorch":
from espnet.asr.pytorch_backend.asr_mix import recog
recog(args)
else:
raise ValueError("Only pytorch is supported.")
if __name__ == "__main__":
main(sys.argv[1:])
| 33.273756
| 88
| 0.581832
|
9630fe8a8dc1983a69192320430211bc9d5b41a5
| 9,098
|
py
|
Python
|
rManifolds/rUnifKnot.py
|
mauriciogtec/rManifolds
|
07e45871df5f356dca2fae16cb7769c8f865e892
|
[
"MIT"
] | 3
|
2017-10-21T15:24:56.000Z
|
2021-07-27T20:32:20.000Z
|
rManifolds/rUnifKnot.py
|
mauriciogtec/rManifolds
|
07e45871df5f356dca2fae16cb7769c8f865e892
|
[
"MIT"
] | 1
|
2017-10-21T15:25:48.000Z
|
2017-10-21T18:34:09.000Z
|
rManifolds/rUnifKnot.py
|
mauriciogtec/rManifolds
|
07e45871df5f356dca2fae16cb7769c8f865e892
|
[
"MIT"
] | 3
|
2018-03-27T14:16:14.000Z
|
2021-07-27T20:32:22.000Z
|
import random, operator, math, spherogram, numpy
class rUnifKnot():
""
def __init__(self,n,filename=""):
# Author: Mauricio B. Garcia Tec.
# Date: Februrary 2014.
# This function generates a random knot in SnapPy using the module Spherogram
# In: int n.-the number of desired random vertices.
# Out: a object of type Link in SnapPy
random.seed() # iniatilize random number generator
self.num_components = 1
# Generation of n random vertices.
const = 500 # So that the knot can be visualized in the PLink editor witout automatic scaling
x = [const*random.random() for i in range(n)]
y = [const*random.random() for i in range(n)]
z = [const*random.random() for i in range(n)]
# 1. This information shows that a knot is to be saved (a one-component link).
output = '% Link Projection \n1 \n 0 0'
# 2. Prints vertices information.
output += '\n'+str(n)
for i in range(n):
output += '\n '+str(x[i])+' '+str(y[i])
# 3. Connectivity information in standard form.
output += '\n'+str(n)
for i in range(n):
output += '\n '+ str((i+1) % n) +' '+str(i)
# 1. Crossings:
# CHALLENGE: change from vertex information to crossings information.
# We shall add vertex by vertex, looking for crossings with the ones already added.
crossinfo = [] # used for storing crossing information. A list with 6-tuples containing
# (a) an edge, (b) another edge (each crossing pair appears twice)
# (c) 1 if first edge goes above, -1 if second index goes above
# (d) relative distance from the crossing point to the origin of the first-edge
# (e) orientation of the crossing: 1 if the edge passing above is going to the right else -1
# (f) (pot) the coordinates of the point
# (g) crossid (see below)
crossid = 0 # an id for crossings since each crossing appears twice.
# The following two will be used when printing the .lnk file
under = [] #
above = [] #
for i in range(n):
# We add the i-th edge and then check for crossings with the ones already added. We use %n% since it is a circular list.
a0,b0,c0,a1,b1,c1 = x[i],y[i],z[i],x[(i+1)%n],y[(i+1)%n],z[(i+1)%n]
for j in range(i-1):
# Projection onto the x an y coordinates and use a parametric model to verify there is a crossing.
# The formula in use comes from solving the system of two linear equations on t and u:
# (x0,y0)+t(x1-x0,y1-y0) = (a0,b0)+u(a1-a0,b1-b0).
# If 0 <= t,u <=1, then there is a crossing between two line segements.
x0,y0,z0,x1,y1,z1 = x[j],y[j],z[j],x[(j+1)%n],y[(j+1)%n],z[(j+1)%n]
A,B,C,D = a1-a0,-(x1-x0),b1-b0,-(y1-y0)
det = A*D-B*C
t = (D*(x0-a0)-B*(y0-b0))/det
u = (-C*(x0-a0)+A*(y0-b0))/det
if 0<t and t<1 and 0<u and u<1:
if c0+t*(c1-c0) > z0+u*(z1-z0):
aux = 1 # first index goes above.
under += [j]
above += [i]
else:
aux = -1 # second-index goes above.
under += [i]
above += [j]
# To know whether the edge above is going to the right or to the left with respect to edge below
# we will use the determinant.
# convention: 1 = the above edge is to the right with respect to the edge below
# -1 = the above edge is to the left with respect to the edge below
coords = [x0+u*(x1-x0),y0+u*(y1-y0),z0+u*(z1-z0)] # crossing coordinates (if crossing)
signum = math.copysign(1,(x1-x0)*(b1-b0)-(a1-a0)*(y1-y0))
crossinfo += [(j,i,aux,u,aux*signum,coords,crossid)] # the smaller the u, the closest it is to the vertex vec[j]
crossinfo += [(i,j,-aux,t,aux*signum,coords,crossid)] # the smaller the t, the closest it is to the vertex vec[i]
# Saving the t and u allows us to recreate the crossing information while going around the knot.
crossid += 1 # one more crossing
N = len(crossinfo) # vector length.
totcross = N/2 # total number of crossings found.
# We want to sort the crossings as if we were traveling the from one vertex to the next in order and going around the knot.
crossinfo.sort(key=operator.itemgetter(0,3)) # (Nice stuff!).
crossid = [row[6] for row in crossinfo] # we extract the crossid column for simplicit
##############
# With generated knot we will I) build a .lnk file a store it there II) create an object using the spherogram module
# I) Print the .lnk file for the PLink editor.
output += '\n'+str(totcross)
for i in range(totcross):
output += '\n '+str(under[i])+' '+str(above[i])
output += '\n'+str(-1)
if (filename!=""):
namelen = len(filename)
if (namelen >4):
if (filename[(namelen-4):namelen]!=".lnk"):
filename = filename + ".lnk"
else:
filename = filename + ".lnk"
self.filename = filename
try:
with open(filename,'wt') as file_output:
file_output.write(output)
if ("/" not in filename):
print('A file named "'+filename+'" was created in the current working directory.')
else:
print('A file in \n'+filename+'\nwas created')
except ValueError:
print('A file in the specified directory could not be created!')
# II) Use the Spherogram module instead.
crossings = [spherogram.links.links.Crossing(j) for j in range(totcross)] # to start a variable for crossing conventions
# We need to mark each crossing
k = 0
while k < N:
# We will proceed as follows:
# For each crossing in visit order (crossinfo) we will have two cases: whether the flows is going above or below.
# In both cases we look at the previous and the following knot and with that determine how to tie the crossings.
# The convention is that each crossing has entries 0,1,2,3 where 0 corresponds to the edge incoming from below
# having the lowest index. Entries 1,2,3 and assigned moving counterclockwise.
# There will be 3 interesting cases. If the flow goes below, if the flow goes above in standard direction (the above
# edge passes to the right) and if it goes above in skew direction (to the left).
# FIRST CASE
# We know in this case that the previous crossing in the list will be connected to crossings[k][0] and
# the one that follows to crossings[k][2]. We need now to check the crossing type of the previous and
# the following to know to which of their entries to connect.
if crossinfo[k][2]==-1:
prev = 0
proc = 2
# SECOND CASE
# If above then the sign will determine which entry to connect.
else:
if crossinfo[k][4]==1:
prev = 3
proc = 1
# THIRD CASE
else:
prev = 1
proc = 3
# Now we connect to the previous and next crossings again depending on their type.
if crossinfo[(k-1)%N][2]==-1: # Case a) The previous crossing's flow is below.
crossings[crossid[k]][prev] = crossings[crossid[(k-1)%N]][2]
else: # Case b) If the flow is going above, we need to know we must look at the signum (left or right).
if crossinfo[(k-1)%N][4] == 1: # Case i) standard direction, the above edge passes to the right.
crossings[crossid[k]][prev] = crossings[crossid[(k-1)%N]][1]
else: # Case ii) skew direction
crossings[crossid[k]][prev] = crossings[crossid[(k-1)%N]][3]
# The instructions are similar for the following cross.
if crossinfo[(k+1)%N][2]==-1: # Case a) The follwing crossing's flow is below.
crossings[crossid[k]][proc] = crossings[crossid[(k+1)%N]][0]
else: # Case b) Flow goes above, we look at signum
if crossinfo[(k+1)%N][4] == 1: # Case i) standard direction, the above edge passes to the right.
crossings[crossid[k]][proc] = crossings[crossid[(k+1)%N]][3]
else: # Case ii) skew direction
crossings[crossid[k]][proc] = crossings[crossid[(k+1)%N]][1]
k += 1
# We know store the complement of the braid into a manifold object.
self.manifold = spherogram.links.links.Link(crossings).exterior()
| 56.509317
| 133
| 0.562102
|
594fc3bceb0011a3f8cfcbdf5531a9ed9e5e8039
| 498
|
py
|
Python
|
tests/api_root.py
|
talavis/DataGraphics
|
d2be4e83b9ce6aeb89a3021ec9ed61793f24cb87
|
[
"MIT"
] | null | null | null |
tests/api_root.py
|
talavis/DataGraphics
|
d2be4e83b9ce6aeb89a3021ec9ed61793f24cb87
|
[
"MIT"
] | null | null | null |
tests/api_root.py
|
talavis/DataGraphics
|
d2be4e83b9ce6aeb89a3021ec9ed61793f24cb87
|
[
"MIT"
] | null | null | null |
"Test the API Root resource."
import http.client
import unittest
import requests
import utils
class Root(utils.ApiMixin, unittest.TestCase):
"Test the API Root resource."
def test_root_data(self):
"Get root information."
url = f"{self.settings['BASE_URL']}api"
response = requests.get(url, headers=self.headers)
self.assertEqual(response.status_code, http.client.OK)
self.check_schema(response)
if __name__ == "__main__":
unittest.main()
| 20.75
| 62
| 0.688755
|
6e93ad89b1c4232026ec37e2384e9ef0528f82d2
| 10,391
|
py
|
Python
|
mule/python/mule/JobParallelization.py
|
valentinaschueller/sweet
|
27e99c7a110c99deeadee70688c186d82b39ac90
|
[
"MIT"
] | 6
|
2017-11-20T08:12:46.000Z
|
2021-03-11T15:32:36.000Z
|
mule/python/mule/JobParallelization.py
|
valentinaschueller/sweet
|
27e99c7a110c99deeadee70688c186d82b39ac90
|
[
"MIT"
] | 4
|
2018-02-02T21:46:33.000Z
|
2022-01-11T11:10:27.000Z
|
mule/python/mule/JobParallelization.py
|
valentinaschueller/sweet
|
27e99c7a110c99deeadee70688c186d82b39ac90
|
[
"MIT"
] | 12
|
2016-03-01T18:33:34.000Z
|
2022-02-08T22:20:31.000Z
|
import math
import sys
from functools import reduce
import operator
from mule.InfoError import *
from mule.JobPlatformResources import *
from mule.JobParallelizationDimOptions import *
__all__ = ['JobParallelization']
def _prod(iterable):
return reduce(operator.mul, iterable, 1)
class JobParallelization(InfoError):
"""
This class stores information on how each program should be executed on a platform
The application has to initialize the variables in a sufficient way so that the
final configuration to execute the program on a cluster can be inferred from this.
Terminology:
------------
'num_threads':
Number of running threads within one MPI rank
'num_cores':
Number of physical processing cores
'rank':
MPI rank
"""
def __init__(self, dummy_init = False):
self.init_phase = True
InfoError.__init__(self, "JobParallelization")
self.reset(dummy_init)
#
# WARNING:
# Leave these variables here to ensure being not influenced by reset()
#
#
# Disable utilization of `mpiexec` to run job
# This is required to run e.g. the validation scripts should be
# (currently) executed on a single node and without MPI support
self.mpiexec_disabled = False
# Force disabling of turbo mode (if supported)
self.force_turbo_off = False
# Qualitative settings
# Allow oversubscription (aka Hyperthreading)
self.core_oversubscription = False
# affinities:
# compact, scatter
self.core_affinity = None
# max wallclock time, default: 1h
self.max_wallclock_seconds = 60*60
self.init_phase = False
def __setattr__(self, name, value):
if name != 'init_phase':
if not self.init_phase:
if not name in self.__dict__:
raise Exception("Attribute '"+name+"' does not exist!")
self.__dict__[name] = value
def reset(self, dummy_init = False):
"""
Reset functionality for a fresh configuration in case that a new setup is triggered
"""
# Number of cores per rank
self.num_cores_per_rank : int = None
# Number of threads per rank
self.num_threads_per_rank = None
# Number of ranks per node
self.num_ranks_per_node = None
# Number of cores per node
self.num_cores_per_node : int = None
# Number of total ranks
self.num_ranks = None
# Number of total nodes
self.num_nodes = None
# Number of total cores
self.num_cores = None
# List with parallelization information in each dimension
# Note, that space dimension can and should be treated as a single dimension
self.pardims = None
self.pardims_dict = {}
def get_max_wallclock_seconds_hh_mm_ss(self):
"""
Return properly formatted self.max_wallclock_seconds usable for job scripts
"""
secs = self.max_wallclock_seconds
# seconds
s = int(secs)
m = s // 60
s = s % 60
h = m // 60
m = m % 60
stest = h*60*60 + m*60 + s
if int(secs) != stest:
print(secs)
print(stest)
raise Exception("Internal error!")
return str(h).zfill(2)+":"+str(m).zfill(2)+":"+str(s).zfill(2)
def print(self):
if self.pardims == None:
print("No dimension-wise parallelization information specified")
else:
for i in self.pardims:
i.hline()
i.print()
self.hline()
self.hline()
self.info("num_cores_per_rank: "+str(self.num_cores_per_rank))
self.info("num_threads_per_rank: "+str(self.num_threads_per_rank))
self.info("num_ranks_per_node: "+str(self.num_ranks_per_node))
self.info("num_cores_per_node: "+str(self.num_cores_per_node))
self.info("num_ranks: "+str(self.num_ranks))
self.info("num_nodes: "+str(self.num_nodes))
self.info("num_cores: "+str(self.num_cores))
self.info("max_wallclock_seconds: "+str(self.max_wallclock_seconds))
self.info("mpiexec_disabled: "+str(self.mpiexec_disabled))
self.info("force_turbo_off: "+str(self.force_turbo_off))
def dummy_setup_if_no_setup(self, platform_resources : JobPlatformResources):
"""
Setup a dummy parallelization dimension to use one rank on one node and all cores on the node
"""
if self.pardims == None:
dummy = JobParallelizationDimOptions("dummy")
dummy.num_cores = platform_resources.num_cores_per_node
dummy.num_cores_per_rank = dummy.num_cores
dummy.num_threads_per_rank = dummy.num_cores
dummy.num_ranks = 1
self.setup([dummy], platform_resources)
self.print()
def setup(self, list_pardims, platform_resources : JobPlatformResources):
"""
Setup data which is required by the platform specific scripts to
generate the job scripts
Parameters
----------
list_pardims: JobParallelizationDimOptions
List with options for parallelization in each dimension
platform_resources
reference to jobgeneration class
#mode : string
# 'serial': No parallelization
"""
self.reset()
self.pardims = list_pardims
# # Support space-only parallelization without list
if not isinstance(self.pardims, list):
self.pardims = [self.pardims]
# First, we setup each dimension
# This also runs a validation checker over it
dim_id = 0
self.pardims_dict = {}
for i in self.pardims:
i.setup(dim_id)
dim_id += 1
self.pardims_dict[i.dim_name] = i
# Compute total number of resources over all dimensions
self.num_cores_per_rank = _prod(i.num_cores_per_rank for i in self.pardims)
# Check if number of cores per rank exceeds the available number of cores per node
if self.num_cores_per_rank > platform_resources.num_cores_per_node:
self.print()
self.error("Invalid config for parallelization: self.num_cores_per_rank >= platform_resources.num_cores_per_node")
# Number of ranks
self.num_ranks = _prod(i.num_ranks for i in self.pardims)
if self.num_ranks <= 0:
self.error("self.num_ranks <= 0")
# Check how many ranks we can run on each node
self.num_ranks_per_node = int(math.ceil(platform_resources.num_cores_per_node // self.num_cores_per_rank))
if self.num_ranks_per_node <= 0:
self.error("self.num_ranks_per_node <= 0")
# Reduce ranks per node if only a single node is used with all ranks on this particular node
if self.num_ranks_per_node > self.num_ranks:
self.num_ranks_per_node = self.num_ranks
# Compute number of cores per node
if self.num_cores_per_node == None:
self.num_cores_per_node = self.num_cores_per_rank*self.num_ranks_per_node
#
# Compute raw numbers and compare to new number
# The new number must be always \leq than the raw number
# due to additional restrictions
#
# We do this mainly for debugging restrictions
#
# VALIDATION for inconsistencies
raw_num_ranks = _prod(i.num_ranks for i in self.pardims)
if self.num_ranks < raw_num_ranks:
self.print()
self.error("Internal error: self.num_ranks < raw_num_ranks")
# Number of nodes
self.num_nodes = int(math.ceil(self.num_ranks / self.num_ranks_per_node))
if self.num_nodes <= 0:
self.error("self.num_nodes <= 0")
# VALIDATION for inconsistencies
if self.num_nodes * self.num_ranks_per_node != self.num_ranks:
self.print()
self.error("Error: self.num_nodes * self.num_ranks_per_node != self.num_ranks\n******* Please change your job settings to avoid this *******")
self.num_cores = self.num_nodes * platform_resources.num_cores_per_node
#
# VALIDATION for hardware restrictions
#
# Enough computing cores?
if self.num_ranks*self.num_cores_per_rank > platform_resources.num_cores:
self.print()
self.error("Invalid config for parallelization: self.num_ranks*self.num_cores_per_rank > platform_resources.num_cores")
if self.num_cores > platform_resources.num_cores:
self.print()
self.error("Invalid config for parallelization: self.num_cores > platform_resources.num_cores")
#
# Finally, setup variables without any restrictions
#
# Number of total (e.g. OpenMP) threads per rank (There are no restrictions for logical threading)
self.num_threads_per_rank = _prod(i.num_threads_per_rank for i in self.pardims)
def getUniqueID(self, i_filters):
"""
Return a unique ID including *all* string and number attributes of this class
i_filter:
list of filter names to filter out from unique ID generation
"""
retval = ''
if not 'parallelization' in i_filters:
if not 'parallelization.mpi_ranks' in i_filters:
# mpi ranks
retval += "_r"+str(self.num_ranks).zfill(5)
if not 'parallelization.cores_per_rank' in i_filters:
# cores per rank
retval += "_cpr"+str(self.num_cores_per_rank).zfill(3)
if not 'parallelization.threads_per_rank' in i_filters:
# threads per rank
retval += "_tpr"+str(self.num_threads_per_rank).zfill(3)
if not 'parallelization.dims' in i_filters:
retval += "_DIMS"
for i in self.pardims:
retval += '_'+i.dim_name+str(i.num_cores).zfill(3)
if retval != '':
retval = 'PAR'+retval
return retval
if __name__ == "__main__":
p = JobParallelization()
s = p.getUniqueID()
p.info(s)
p.print()
p.info("FIN")
| 31.776758
| 154
| 0.621114
|
1e1263a21dd51f5c5511e5ff651ffe529655b923
| 31
|
py
|
Python
|
mmterm/__init__.py
|
sbliven/mmterm
|
042b2280fc58932870ef7f079d8743e4fc420ee7
|
[
"MIT"
] | null | null | null |
mmterm/__init__.py
|
sbliven/mmterm
|
042b2280fc58932870ef7f079d8743e4fc420ee7
|
[
"MIT"
] | null | null | null |
mmterm/__init__.py
|
sbliven/mmterm
|
042b2280fc58932870ef7f079d8743e4fc420ee7
|
[
"MIT"
] | null | null | null |
from .view import view_protein
| 15.5
| 30
| 0.83871
|
ae90dcdbab32670e3ffa719957d44c887763814d
| 4,295
|
py
|
Python
|
social/actions.py
|
dongweiming/flask_reveal
|
739a0c49d2fbf5a36c7b9cb11ec1303b9c8b0e83
|
[
"BSD-3-Clause"
] | 41
|
2015-01-31T08:23:04.000Z
|
2021-03-20T08:55:23.000Z
|
social/actions.py
|
dongweiming/flask_reveal
|
739a0c49d2fbf5a36c7b9cb11ec1303b9c8b0e83
|
[
"BSD-3-Clause"
] | null | null | null |
social/actions.py
|
dongweiming/flask_reveal
|
739a0c49d2fbf5a36c7b9cb11ec1303b9c8b0e83
|
[
"BSD-3-Clause"
] | 19
|
2015-01-17T11:50:35.000Z
|
2019-05-01T22:49:02.000Z
|
from social.p3 import quote
from social.utils import sanitize_redirect, user_is_authenticated, \
user_is_active
def do_auth(strategy, redirect_name='next'):
# Save any defined next value into session
data = strategy.request_data(merge=False)
# Save extra data into session.
for field_name in strategy.setting('FIELDS_STORED_IN_SESSION', []):
if field_name in data:
strategy.session_set(field_name, data[field_name])
if redirect_name in data:
# Check and sanitize a user-defined GET/POST next field value
redirect_uri = data[redirect_name]
if strategy.setting('SANITIZE_REDIRECTS', True):
redirect_uri = sanitize_redirect(strategy.request_host(),
redirect_uri)
strategy.session_set(
redirect_name,
redirect_uri or strategy.setting('LOGIN_REDIRECT_URL')
)
return strategy.start()
def do_complete(strategy, login, user=None, redirect_name='next',
*args, **kwargs):
# pop redirect value before the session is trashed on login()
data = strategy.request_data()
redirect_value = strategy.session_get(redirect_name, '') or \
data.get(redirect_name, '')
is_authenticated = user_is_authenticated(user)
user = is_authenticated and user or None
default_redirect = strategy.setting('LOGIN_REDIRECT_URL')
url = default_redirect
login_error_url = strategy.setting('LOGIN_ERROR_URL') or \
strategy.setting('LOGIN_URL')
if strategy.session_get('partial_pipeline'):
idx, backend, xargs, xkwargs = strategy.from_session(
strategy.session_pop('partial_pipeline')
)
if backend == strategy.backend_name:
kwargs = kwargs.copy()
kwargs.setdefault('user', user)
kwargs.update(xkwargs)
user = strategy.continue_pipeline(pipeline_index=idx,
*xargs, **xkwargs)
else:
strategy.clean_partial_pipeline()
user = strategy.complete(user=user, request=strategy.request,
*args, **kwargs)
else:
user = strategy.complete(user=user, request=strategy.request,
*args, **kwargs)
if strategy.is_response(user):
return user
if is_authenticated:
if not user:
url = redirect_value or default_redirect
else:
url = redirect_value or \
strategy.setting('NEW_ASSOCIATION_REDIRECT_URL') or \
default_redirect
elif user:
if user_is_active(user):
# catch is_new/social_user in case login() resets the instance
is_new = getattr(user, 'is_new', False)
social_user = user.social_user
login(strategy, user)
# store last login backend name in session
strategy.session_set('social_auth_last_login_backend',
social_user.provider)
# Remove possible redirect URL from session, if this is a new
# account, send him to the new-users-page if defined.
new_user_redirect = strategy.setting('NEW_USER_REDIRECT_URL')
if new_user_redirect and is_new:
url = new_user_redirect
else:
url = redirect_value or default_redirect
else:
url = strategy.setting('INACTIVE_USER_URL', login_error_url)
else:
url = login_error_url
if redirect_value and redirect_value != url:
redirect_value = quote(redirect_value)
url += ('?' in url and '&' or '?') + \
'%s=%s' % (redirect_name, redirect_value)
if url == '/':
url = '/' + user.username
return strategy.redirect(url)
def do_disconnect(strategy, user, association_id=None, redirect_name='next'):
strategy.disconnect(user=user, association_id=association_id)
data = strategy.request_data()
return strategy.redirect(data.get(redirect_name, '') or
strategy.setting('DISCONNECT_REDIRECT_URL') or
strategy.setting('LOGIN_REDIRECT_URL'))
| 40.904762
| 77
| 0.609546
|
9b16d505de142a48ac819e9b52132bcf642b945d
| 765
|
py
|
Python
|
river/optim/__init__.py
|
online-ml/creme
|
60872844e6052b5ef20e4075aea30f9031377136
|
[
"BSD-3-Clause"
] | 1,105
|
2019-01-24T15:15:30.000Z
|
2020-11-10T18:27:00.000Z
|
river/optim/__init__.py
|
online-ml/creme
|
60872844e6052b5ef20e4075aea30f9031377136
|
[
"BSD-3-Clause"
] | 328
|
2019-01-25T13:48:43.000Z
|
2020-11-11T11:41:44.000Z
|
river/optim/__init__.py
|
online-ml/creme
|
60872844e6052b5ef20e4075aea30f9031377136
|
[
"BSD-3-Clause"
] | 150
|
2019-01-29T19:05:21.000Z
|
2020-11-11T11:50:14.000Z
|
"""Stochastic optimization."""
from . import base, initializers, losses, schedulers
from .ada_bound import AdaBound
from .ada_delta import AdaDelta
from .ada_grad import AdaGrad
from .ada_max import AdaMax
from .adam import Adam
from .ams_grad import AMSGrad
from .average import Averager
from .ftrl import FTRLProximal
from .momentum import Momentum
from .nadam import Nadam
from .nesterov import NesterovMomentum
from .rms_prop import RMSProp
from .sgd import SGD
__all__ = [
"base",
"AdaBound",
"AdaDelta",
"AdaGrad",
"Adam",
"AMSGrad",
"AdaMax",
"Averager",
"FTRLProximal",
"initializers",
"losses",
"Momentum",
"Nadam",
"NesterovMomentum",
"Optimizer",
"RMSProp",
"schedulers",
"SGD",
]
| 20.675676
| 52
| 0.690196
|
7b415d09fc99ae10960e90f37cbe2ebc258a9265
| 20
|
py
|
Python
|
catalog/apps/__init__.py
|
sonico999/django-model
|
8e2d1136573adbcf6fe3e2b7bf2b192c0b63207a
|
[
"MIT"
] | null | null | null |
catalog/apps/__init__.py
|
sonico999/django-model
|
8e2d1136573adbcf6fe3e2b7bf2b192c0b63207a
|
[
"MIT"
] | null | null | null |
catalog/apps/__init__.py
|
sonico999/django-model
|
8e2d1136573adbcf6fe3e2b7bf2b192c0b63207a
|
[
"MIT"
] | null | null | null |
__author__ = 'eveR'
| 10
| 19
| 0.7
|
f71d91b6162a24bdd995d0f0381a0643a85de953
| 4,535
|
py
|
Python
|
Chapter03/Chapter_3/musegen/musegen.py
|
YMandCL/Hands-On-Deep-Learning-for-Games
|
0225661409c3bf59ae6b7996c254bb485ebd10cb
|
[
"MIT"
] | 33
|
2018-12-29T15:39:20.000Z
|
2022-03-18T14:36:11.000Z
|
Chapter03/Chapter_3/musegen/musegen.py
|
YMandCL/Hands-On-Deep-Learning-for-Games
|
0225661409c3bf59ae6b7996c254bb485ebd10cb
|
[
"MIT"
] | 4
|
2019-05-01T08:30:47.000Z
|
2020-08-14T21:13:53.000Z
|
Chapter03/Chapter_3/musegen/musegen.py
|
YMandCL/Hands-On-Deep-Learning-for-Games
|
0225661409c3bf59ae6b7996c254bb485ebd10cb
|
[
"MIT"
] | 14
|
2019-01-13T15:52:08.000Z
|
2021-10-10T06:14:39.000Z
|
# Currently this script is configured to use the note-generator model.
from config import sequence_length, output_dir, note_generator_dir
from helper import loadChorales, loadModelAndWeights, createPitchSpecificVocabularies, createDurationVocabularySpecific
from music21 import note, instrument, stream, duration
import numpy as np
import os
# disable GPU processing
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = ""
# ----------------------------------------------
from keras.utils import to_categorical
# select the epoch to use when loading the weights of the model generator
generator_epoch = 43
# how many notes to generate ('end' marks are created along the way and the result is split into pieces)
number_of_notes = 200
# load chorales to create the vocabularies
print('loading chorales...')
notes = loadChorales()
# create the vocabulary
note_vocab, note_names_vocab, note_vocab_categorical = createPitchSpecificVocabularies([x[0] for (x, _) in notes])
duration_vocab = createDurationVocabularySpecific([d for (_, d) in notes])
duration_vocab_categorical = to_categorical(range(len(duration_vocab)))
note_to_int = dict((note, number) for number, note in enumerate(note_vocab))
int_to_note = dict((number, note) for number, note in enumerate(note_vocab))
duration_to_int = dict((dur, number) for number, dur in enumerate(duration_vocab))
duration_dim = duration_vocab.shape[0]
pitch_dim = np.array(note_vocab).shape[0]
print('loading networks...')
dir_path = os.path.dirname(os.path.realpath(__file__))
generator = loadModelAndWeights(os.path.join(dir_path, note_generator_dir, 'model.json'),
os.path.join(dir_path, note_generator_dir, 'weights-{:02d}.hdf5'.format(generator_epoch)))
# make a melody!!!
pitch_input = np.eye(pitch_dim)[np.random.choice(pitch_dim, size=sequence_length)]
duration_input = np.eye(duration_dim)[np.random.choice(duration_dim, size=sequence_length)]
print('generating output...')
# generate notes
generator_output = []
for _ in range(number_of_notes):
# reshape inputs
pi = np.reshape(pitch_input, (1, sequence_length, pitch_dim))
di = np.reshape(duration_input, (1, sequence_length, duration_dim))
# make prediction
pitch_pred, dur_pred = generator.predict({'pitches_input': pi, 'durations_input': di}, verbose=0)
generator_output.append((pitch_pred, dur_pred))
pitch_input = np.vstack([pitch_input, pitch_pred])
pitch_input = pitch_input[1:len(pitch_input)]
duration_input = np.vstack([duration_input, dur_pred])
duration_input = duration_input[1:len(duration_input)]
output_notes = [(int_to_note[np.argmax(n)], duration_vocab[np.argmax(d)]) for (n, d) in generator_output]
output_notes = np.array(output_notes)
output_notes = np.reshape(output_notes, (-1, 2))
# output_notes contains: pitch values in midi format (integers), 'rest' marks, 'end' marks
# split the generated notes into pieces based on 'end' marks
indices = []
for (ind, (n, _)) in enumerate(output_notes):
if n == 'end':
indices.append(ind)
indices = np.insert(np.reshape(indices, (-1)), 0, 0)
pieces = [output_notes]
if len(indices) > 1:
pieces = ([ output_notes[(indices[j] + 1):indices[j + 1] ] for j in range(len(indices) - 1)])
print('writing output to disk...')
os.makedirs(os.path.join(dir_path, output_dir, 'note-generator'), exist_ok=True)
# output pieces to midi files
for index, notes in enumerate(pieces):
midi_notes = []
offset = 0
for n, d in notes:
# since a duration of 0 is included in the vocabulary (for the 'end' marks), the network may generate a 0 duration for other notes
# naively correct and report this erroneous behaviour
if abs(float(d)) < 0.001:
print('found zero duration')
d = '1.0'
if n == 'rest':
new_note = note.Rest()
new_note.duration = duration.Duration(float(d))
new_note.offset = offset
new_note.storedInstrument = instrument.Piano()
midi_notes.append(new_note)
else:
new_note = note.Note(int(n))
new_note.duration = duration.Duration(float(d))
new_note.offset = offset
new_note.storedInstrument = instrument.Piano()
midi_notes.append(new_note)
offset += float(d)
midi_stream = stream.Stream(midi_notes)
midi_stream.write('midi', fp=os.path.join(dir_path, output_dir, 'note-generator', 'sample-{}.mid'.format(index)))
| 39.094828
| 138
| 0.704961
|
277d248697293716404d4dde831b68f4c36842bf
| 203
|
py
|
Python
|
Unit2/P2.4.4_A.py
|
capravictoriae/Space-Mission-Design-and-Operations-EE585x-MOOC
|
a5f6aaaae06969eaa5e3d0847093bdb7433083b8
|
[
"Apache-2.0"
] | null | null | null |
Unit2/P2.4.4_A.py
|
capravictoriae/Space-Mission-Design-and-Operations-EE585x-MOOC
|
a5f6aaaae06969eaa5e3d0847093bdb7433083b8
|
[
"Apache-2.0"
] | null | null | null |
Unit2/P2.4.4_A.py
|
capravictoriae/Space-Mission-Design-and-Operations-EE585x-MOOC
|
a5f6aaaae06969eaa5e3d0847093bdb7433083b8
|
[
"Apache-2.0"
] | 2
|
2021-08-20T18:47:26.000Z
|
2021-08-23T16:39:59.000Z
|
earth_mu = 3.986e14
earth_r = 6.378e6 # m
speed = 7.76 * 1000 # m/s
# V = sqrt(mu/r) => V^2 = mu/r => V^2/mu = 1/r
# mu / V^2 = r
dist = earth_mu / (speed**2)
dist = dist - earth_r
print(dist/1000)
| 16.916667
| 46
| 0.55665
|
eeb06ce8f69c574aa71090d7b36ba6533a8aee54
| 7,723
|
py
|
Python
|
youtube_comment_downloader/downloader.py
|
leostormrage/youtube-comment-downloader
|
d82419d0653cb1cbd7d3c873dc9f9d70bbc3b0e2
|
[
"MIT"
] | 1
|
2021-05-08T04:30:58.000Z
|
2021-05-08T04:30:58.000Z
|
youtube_comment_downloader/downloader.py
|
leostormrage/youtube-comment-downloader
|
d82419d0653cb1cbd7d3c873dc9f9d70bbc3b0e2
|
[
"MIT"
] | null | null | null |
youtube_comment_downloader/downloader.py
|
leostormrage/youtube-comment-downloader
|
d82419d0653cb1cbd7d3c873dc9f9d70bbc3b0e2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from __future__ import print_function
import argparse
import io
import json
import os
import sys
import time
import requests
YOUTUBE_VIDEO_URL = 'https://www.youtube.com/watch?v={youtube_id}'
YOUTUBE_COMMENTS_AJAX_URL = 'https://www.youtube.com/comment_service_ajax'
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36'
SORT_BY_POPULAR = 0
SORT_BY_RECENT = 1
def find_value(html, key, num_chars=2, separator='"'):
pos_begin = html.find(key) + len(key) + num_chars
pos_end = html.find(separator, pos_begin)
return html[pos_begin: pos_end]
def ajax_request(session, url, params=None, data=None, headers=None, retries=5, sleep=20):
for _ in range(retries):
response = session.post(url, params=params, data=data, headers=headers)
if response.status_code == 200:
return response.json()
if response.status_code in [403, 413]:
return {}
else:
time.sleep(sleep)
def download_comments(youtube_id, sort_by=SORT_BY_RECENT, sleep=.1):
session = requests.Session()
session.headers['User-Agent'] = USER_AGENT
response = session.get(YOUTUBE_VIDEO_URL.format(youtube_id=youtube_id))
if 'uxe=' in response.request.url:
session.cookies.set('CONSENT', 'YES+cb', domain='.youtube.com')
response = session.get(YOUTUBE_VIDEO_URL.format(youtube_id=youtube_id))
html = response.text
session_token = find_value(html, 'XSRF_TOKEN', 3)
session_token = session_token.encode('ascii').decode('unicode-escape')
data = json.loads(find_value(html, 'var ytInitialData = ', 0, '};') + '}')
for renderer in search_dict(data, 'itemSectionRenderer'):
ncd = next(search_dict(renderer, 'nextContinuationData'), None)
if ncd:
break
if not ncd:
# Comments disabled?
return
needs_sorting = sort_by != SORT_BY_POPULAR
continuations = [(ncd['continuation'], ncd['clickTrackingParams'], 'action_get_comments')]
while continuations:
continuation, itct, action = continuations.pop()
response = ajax_request(session, YOUTUBE_COMMENTS_AJAX_URL,
params={action: 1,
'pbj': 1,
'ctoken': continuation,
'continuation': continuation,
'itct': itct},
data={'session_token': session_token},
headers={'X-YouTube-Client-Name': '1',
'X-YouTube-Client-Version': '2.20201202.06.01'})
if not response:
break
if list(search_dict(response, 'externalErrorMessage')):
raise RuntimeError('Error returned from server: ' + next(search_dict(response, 'externalErrorMessage')))
if needs_sorting:
sort_menu = next(search_dict(response, 'sortFilterSubMenuRenderer'), {}).get('subMenuItems', [])
if sort_by < len(sort_menu):
ncd = sort_menu[sort_by]['continuation']['reloadContinuationData']
continuations = [(ncd['continuation'], ncd['clickTrackingParams'], 'action_get_comments')]
needs_sorting = False
continue
raise RuntimeError('Failed to set sorting')
if action == 'action_get_comments':
section = next(search_dict(response, 'itemSectionContinuation'), {})
for continuation in section.get('continuations', []):
ncd = continuation['nextContinuationData']
continuations.append((ncd['continuation'], ncd['clickTrackingParams'], 'action_get_comments'))
for item in section.get('contents', []):
continuations.extend([(ncd['continuation'], ncd['clickTrackingParams'], 'action_get_comment_replies')
for ncd in search_dict(item, 'nextContinuationData')])
elif action == 'action_get_comment_replies':
continuations.extend([(ncd['continuation'], ncd['clickTrackingParams'], 'action_get_comment_replies')
for ncd in search_dict(response, 'nextContinuationData')])
for comment in search_dict(response, 'commentRenderer'):
yield {'cid': comment['commentId'],
'text': ''.join([c['text'] for c in comment['contentText'].get('runs', [])]),
'time': comment['publishedTimeText']['runs'][0]['text'],
'author': comment.get('authorText', {}).get('simpleText', ''),
'channel': comment['authorEndpoint']['browseEndpoint']['browseId'],
'votes': comment.get('voteCount', {}).get('simpleText', '0'),
'photo': comment['authorThumbnail']['thumbnails'][-1]['url'],
'heart': next(search_dict(comment, 'isHearted'), False)}
time.sleep(sleep)
def search_dict(partial, search_key):
stack = [partial]
while stack:
current_item = stack.pop()
if isinstance(current_item, dict):
for key, value in current_item.items():
if key == search_key:
yield value
else:
stack.append(value)
elif isinstance(current_item, list):
for value in current_item:
stack.append(value)
def main(argv = None):
parser = argparse.ArgumentParser(add_help=False, description=('Download Youtube comments without using the Youtube API'))
parser.add_argument('--help', '-h', action='help', default=argparse.SUPPRESS, help='Show this help message and exit')
parser.add_argument('--youtubeid', '-y', help='ID of Youtube video for which to download the comments')
parser.add_argument('--output', '-o', help='Output filename (output format is line delimited JSON)')
parser.add_argument('--limit', '-l', type=int, help='Limit the number of comments')
parser.add_argument('--sort', '-s', type=int, default=SORT_BY_RECENT,
help='Whether to download popular (0) or recent comments (1). Defaults to 1')
try:
args = parser.parse_args() if argv is None else parser.parse_args(argv)
youtube_id = args.youtubeid
output = args.output
limit = args.limit
if not youtube_id or not output:
parser.print_usage()
raise ValueError('you need to specify a Youtube ID and an output filename')
if os.sep in output:
outdir = os.path.dirname(output)
if not os.path.exists(outdir):
os.makedirs(outdir)
print('Downloading Youtube comments for video:', youtube_id)
count = 0
with io.open(output, 'w', encoding='utf8') as fp:
sys.stdout.write('Downloaded %d comment(s)\r' % count)
sys.stdout.flush()
start_time = time.time()
for comment in download_comments(youtube_id, args.sort):
comment_json = json.dumps(comment, ensure_ascii=False)
print(comment_json.decode('utf-8') if isinstance(comment_json, bytes) else comment_json, file=fp)
count += 1
sys.stdout.write('Downloaded %d comment(s)\r' % count)
sys.stdout.flush()
if limit and count >= limit:
break
print('\n[{:.2f} seconds] Done!'.format(time.time() - start_time))
except Exception as e:
print('Error:', str(e))
sys.exit(1)
if __name__ == "__main__":
main(sys.argv[1:])
| 42.668508
| 130
| 0.600673
|
89ef2fd432ef2d4d7327050deb24591d4f209e84
| 4,273
|
py
|
Python
|
beta_rec/experiment/experiment.py
|
mengzaiqiao/TVBR
|
cdac86a753c41f8f3c55a025be8d88dd305325f5
|
[
"MIT"
] | 126
|
2020-03-19T02:30:23.000Z
|
2022-03-15T11:10:46.000Z
|
beta_rec/experiment/experiment.py
|
mengzaiqiao/TVBR
|
cdac86a753c41f8f3c55a025be8d88dd305325f5
|
[
"MIT"
] | 174
|
2020-03-15T17:28:10.000Z
|
2022-03-15T22:38:51.000Z
|
beta_rec/experiment/experiment.py
|
mengzaiqiao/TVBR
|
cdac86a753c41f8f3c55a025be8d88dd305325f5
|
[
"MIT"
] | 38
|
2020-03-19T00:38:47.000Z
|
2022-02-24T11:03:12.000Z
|
# coding=utf-8
"""
This is the implementation of experimental pipeline.
This class is still under development.
"""
import pandas as pd
from tabulate import tabulate
def print_result_as_table(results, tag=None):
"""Print results as a table."""
eval_infos = set()
for result in results:
eval_infos.update(result.keys())
eval_infos = list(eval_infos)
print("-" * 80)
if tag is not None:
print(tag)
for result in results:
for eval_info in eval_infos:
if eval_info not in result:
result["eval_info"] = "--"
df = pd.DataFrame(results)
df = df.set_index("model")
df = df.T
print(tabulate(df, headers=df.columns, tablefmt="psql"))
print("-" * 80)
class Experiment:
"""This enables the flow of an experiment with the beta-rec platform.
Args:
datasets: array of :obj: '<beta_rec.datasets>', required
the experimental datasets (e.g. MovieLens)
eval_methods: : array of string, required
the evaluation method (e.g. ['load_leave_one_out'])
models: array of :obj:`<beta_rec.recommenders>`, required
A collection of recommender models to evaluate, e.g., [MF, GCN].
metrics: array of string, default: None and every model has its default
evaluation metrics in the configuration file.
A collection of metrics to use to evaluate all the recommender
models, e.g., ['ndcg', 'precision', 'recall'].
eval_score: array of integer, default: None and every model has its default
evaluation score in the configuration file.
A list integer values to define evaluation scope on, \
e.g., [1, 10, 20].
model_dir: str, optional, default: None
Path to a directory for loading a pretrained model
save_dir: str, optional, default: None
Path to a directory for storing trained models and logs. If None,
models will NOT be stored and logs will be saved in the current
working directory.
result_file: str, optional, default: None and every model will be saved
in a result file that indicated in the configuration.
The name of the result saving file, which starts with the model name
and followed by the given result file string as the affix.
"""
def __init__(
self,
datasets,
models,
metrics=None,
eval_scopes=None,
model_dir=None,
result_file=None,
save_dir=None,
):
"""Initialise required inputs for the expriment pipeline."""
self.datasets = datasets
self.models = models
self.metrics = metrics
self.eval_scopes = eval_scopes
self.result_file = result_file
self.save_dir = save_dir
self.update_config()
def run(self):
"""Run the experiment."""
results = []
for data in self.datasets:
for model in self.models:
model.train(data)
result = model.test(data.test[0])
results.extend(result)
print_result_as_table(results)
def load_pretrained_model(self):
"""Load the pretrained model."""
for data in self.datasets:
for model in self.models:
model.init_engine(data)
model.load(model_dir=self.model_dir)
model.predict(data.test[0])
def update_config(self):
"""Update the configuration of models."""
if self.metrics is not None:
for model in self.models:
model.config["system"]["metrics"] = self.metrics
if self.eval_scopes is not None:
for model in self.models:
model.config["system"]["k"] = self.eval_scopes
if self.result_file is not None:
for idx, model in enumerate(self.models):
model.config["system"]["result_file"] = (
"model_"
+ str(idx)
+ "_"
+ self.config["model"]["model"]
+ "_"
+ self.result_file
)
if self.save_dir is not None:
for model in self.models:
model.config["system"]["result_dir"] = self.save_dir
| 35.608333
| 79
| 0.598877
|
790ed6be71e6a2011d907c0c08f1e39bd4f7c044
| 3,586
|
py
|
Python
|
CoffeeAPI/CoffeeAPI/settings.py
|
Mohammed-abdelawal/coffee_api
|
2939efdd048c7ee392c4afab368e3d01aab994f7
|
[
"MIT"
] | 1
|
2020-09-21T17:17:52.000Z
|
2020-09-21T17:17:52.000Z
|
CoffeeAPI/CoffeeAPI/settings.py
|
Mohammed-abdelawal/coffee_api
|
2939efdd048c7ee392c4afab368e3d01aab994f7
|
[
"MIT"
] | null | null | null |
CoffeeAPI/CoffeeAPI/settings.py
|
Mohammed-abdelawal/coffee_api
|
2939efdd048c7ee392c4afab368e3d01aab994f7
|
[
"MIT"
] | null | null | null |
"""
Django settings for CoffeeAPI project.
Generated by 'django-admin startproject' using Django 3.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
import urllib
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '2z$9iq)q+$an2fm4gj271_*z-r#x86pcc976)^eh@8kuc*#@7h'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'core',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'CoffeeAPI.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'CoffeeAPI.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
"""
DATABASES = {
'default': {
'ENGINE': 'djongo',
"NAME": 'mongodb+srv://mohammed-mongo:iF7MzKLgXvgL57ve@coffee-api.u2crw.mongodb.net/test?retryWrites=true&w=majority'
}
}
"""
DATABASES = {
"default": {
"ENGINE": "djongo",
"CLIENT": {
"host": "mongodb+srv://mohammed-mongo:iF7MzKLgXvgL57ve@coffee-api.u2crw.mongodb.net/?retryWrites=true&w=majority",
"username": "mohammed-mongo",
"password": "iF7MzKLgXvgL57ve",
"name": "test",
"authMechanism": "SCRAM-SHA-1",
},
}}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| 26.367647
| 126
| 0.678751
|
842b049144b3ec0fa4c40189d9bac14f6de3b23f
| 709
|
py
|
Python
|
logger.py
|
Term-inator/Brain-Tumor-Detection
|
b59715092cca7a17b589b5d906983eb42ee4ad87
|
[
"MIT"
] | null | null | null |
logger.py
|
Term-inator/Brain-Tumor-Detection
|
b59715092cca7a17b589b5d906983eb42ee4ad87
|
[
"MIT"
] | null | null | null |
logger.py
|
Term-inator/Brain-Tumor-Detection
|
b59715092cca7a17b589b5d906983eb42ee4ad87
|
[
"MIT"
] | null | null | null |
from logging import getLogger, INFO, FileHandler, Formatter, StreamHandler
LOGGER = None
handlers = []
def init_logger(log_file='./train.log'):
logger = getLogger(__name__)
logger.setLevel(INFO)
handler1 = StreamHandler()
handler1.setFormatter(Formatter("%(message)s"))
handler2 = FileHandler(filename=log_file)
handler2.setFormatter(Formatter("%(message)s"))
logger.addHandler(handler1)
logger.addHandler(handler2)
handlers.append(handler1)
handlers.append(handler2)
global LOGGER
LOGGER = logger
def close_logger():
global LOGGER
for handler in handlers:
LOGGER.removeHandler(handler)
def Logger():
global LOGGER
return LOGGER
| 22.15625
| 74
| 0.71086
|
4b6017f697f2f6b05c63c0471a71fa057d69bbbb
| 2,191
|
py
|
Python
|
src/oci/cloud_guard/models/resource_profile_collection.py
|
pabs3/oci-python-sdk
|
437ba18ce39af2d1090e277c4bb8750c89f83021
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/cloud_guard/models/resource_profile_collection.py
|
pabs3/oci-python-sdk
|
437ba18ce39af2d1090e277c4bb8750c89f83021
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/cloud_guard/models/resource_profile_collection.py
|
pabs3/oci-python-sdk
|
437ba18ce39af2d1090e277c4bb8750c89f83021
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ResourceProfileCollection(object):
"""
Collection of resource profile summary.
"""
def __init__(self, **kwargs):
"""
Initializes a new ResourceProfileCollection object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param items:
The value to assign to the items property of this ResourceProfileCollection.
:type items: list[oci.cloud_guard.models.ResourceProfileSummary]
"""
self.swagger_types = {
'items': 'list[ResourceProfileSummary]'
}
self.attribute_map = {
'items': 'items'
}
self._items = None
@property
def items(self):
"""
**[Required]** Gets the items of this ResourceProfileCollection.
List of resource profiles
:return: The items of this ResourceProfileCollection.
:rtype: list[oci.cloud_guard.models.ResourceProfileSummary]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this ResourceProfileCollection.
List of resource profiles
:param items: The items of this ResourceProfileCollection.
:type: list[oci.cloud_guard.models.ResourceProfileSummary]
"""
self._items = items
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 30.859155
| 245
| 0.668644
|
a3496a6c9421b2a3a494864d3a6770b58bd5f503
| 4,622
|
py
|
Python
|
clang-tidy-to-junit.py
|
brel-ge/clang-tidy-to-codeclimate
|
b3182da22cd3fef7eec0064e673c0fa426726e46
|
[
"MIT"
] | null | null | null |
clang-tidy-to-junit.py
|
brel-ge/clang-tidy-to-codeclimate
|
b3182da22cd3fef7eec0064e673c0fa426726e46
|
[
"MIT"
] | null | null | null |
clang-tidy-to-junit.py
|
brel-ge/clang-tidy-to-codeclimate
|
b3182da22cd3fef7eec0064e673c0fa426726e46
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
import collections
import re
import logging
import itertools
from xml.sax.saxutils import escape
# Create a `ErrorDescription` tuple with all the information we want to keep.
ErrorDescription = collections.namedtuple(
'ErrorDescription', 'file line column error error_identifier description')
class ClangTidyConverter:
# All the errors encountered.
errors = []
# Parses the error.
# Group 1: file path
# Group 2: line
# Group 3: column
# Group 4: error message
# Group 5: error identifier
error_regex = re.compile(
r"^([\w\/\.\-\ ]+):(\d+):(\d+): (.+) (\[[\w\-,\.]+\])$")
# This identifies the main error line (it has a [the-warning-type] at the end)
# We only create a new error when we encounter one of those.
main_error_identifier = re.compile(r'\[[\w\-,\.]+\]$')
def __init__(self, basename):
self.basename = basename
def print_junit_file(self, output_file):
# Write the header.
output_file.write("""<?xml version="1.0" encoding="UTF-8" ?>
<testsuites id="1" name="Clang-Tidy" tests="{error_count}" errors="{error_count}" failures="0" time="0">""".format(error_count=len(self.errors)))
sorted_errors = sorted(self.errors, key=lambda x: x.file)
# Iterate through the errors, grouped by file.
for file, errorIterator in itertools.groupby(sorted_errors, key=lambda x: x.file):
errors = list(errorIterator)
error_count = len(errors)
# Each file gets a test-suite
output_file.write("""\n <testsuite errors="{error_count}" name="{file}" tests="{error_count}" failures="0" time="0">\n"""
.format(error_count=error_count, file=file))
for error in errors:
# Write each error as a test case.
output_file.write("""
<testcase id="{id}" name="{id}" time="0">
<failure message="{message}">
{htmldata}
</failure>
</testcase>""".format(id="[{}/{}] {}".format(error.line, error.column, error.error_identifier), message=escape(error.error),
htmldata=escape(error.description)))
output_file.write("\n </testsuite>\n")
output_file.write("</testsuites>\n")
def process_error(self, error_array):
if len(error_array) == 0:
return
result = self.error_regex.match(error_array[0])
if result is None:
logging.warning(
'Could not match error_array to regex: %s', error_array)
return
# We remove the `basename` from the `file_path` to make prettier filenames in the JUnit file.
file_path = result.group(1).replace(self.basename, "")
error = ErrorDescription(file_path, int(result.group(2)), int(
result.group(3)), result.group(4), result.group(5), "\n".join(error_array[1:]))
self.errors.append(error)
def convert(self, input_file, output_file):
# Collect all lines related to one error.
current_error = []
for line in input_file:
# If the line starts with a `/`, it is a line about a file.
if line[0] == '/':
# Look if it is the start of a error
if self.main_error_identifier.search(line, re.M):
# If so, process any `current_error` we might have
self.process_error(current_error)
# Initialize `current_error` with the first line of the error.
current_error = [line]
else:
# Otherwise, append the line to the error.
current_error.append(line)
elif len(current_error) > 0:
# If the line didn't start with a `/` and we have a `current_error`, we simply append
# the line as additional information.
current_error.append(line)
else:
pass
# If we still have any current_error after we read all the lines,
# process it.
if len(current_error) > 0:
self.process_error(current_error)
# Print the junit file.
self.print_junit_file(output_file)
if __name__ == "__main__":
if len(sys.argv) < 2:
logging.error("Usage: %s base-filename-path", sys.argv[0])
logging.error(
" base-filename-path: Removed from the filenames to make nicer paths.")
sys.exit(1)
converter = ClangTidyConverter(sys.argv[1])
converter.convert(sys.stdin, sys.stdout)
| 39.504274
| 145
| 0.59325
|
c88a7e1c12856b619c7082dd4e79420019e12747
| 15,571
|
py
|
Python
|
lib/third_party/google/bigtable/admin/v2/table_pb2.py
|
bopopescu/Google-Cloud-SDK-1
|
c4683bacb2f6192d8a816932e438a0493085469b
|
[
"Apache-2.0"
] | null | null | null |
lib/third_party/google/bigtable/admin/v2/table_pb2.py
|
bopopescu/Google-Cloud-SDK-1
|
c4683bacb2f6192d8a816932e438a0493085469b
|
[
"Apache-2.0"
] | null | null | null |
lib/third_party/google/bigtable/admin/v2/table_pb2.py
|
bopopescu/Google-Cloud-SDK-1
|
c4683bacb2f6192d8a816932e438a0493085469b
|
[
"Apache-2.0"
] | 1
|
2020-07-24T20:13:29.000Z
|
2020-07-24T20:13:29.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/bigtable/admin/v2/table.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/bigtable/admin/v2/table.proto',
package='google.bigtable.admin.v2',
syntax='proto3',
serialized_pb=_b('\n$google/bigtable/admin/v2/table.proto\x12\x18google.bigtable.admin.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xa0\x03\n\x05Table\x12\x0c\n\x04name\x18\x01 \x01(\t\x12L\n\x0f\x63olumn_families\x18\x03 \x03(\x0b\x32\x33.google.bigtable.admin.v2.Table.ColumnFamiliesEntry\x12I\n\x0bgranularity\x18\x04 \x01(\x0e\x32\x34.google.bigtable.admin.v2.Table.TimestampGranularity\x1a]\n\x13\x43olumnFamiliesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.bigtable.admin.v2.ColumnFamily:\x02\x38\x01\"I\n\x14TimestampGranularity\x12%\n!TIMESTAMP_GRANULARITY_UNSPECIFIED\x10\x00\x12\n\n\x06MILLIS\x10\x01\"F\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\r\n\tNAME_ONLY\x10\x01\x12\x0f\n\x0bSCHEMA_VIEW\x10\x02\x12\x08\n\x04\x46ULL\x10\x04\"A\n\x0c\x43olumnFamily\x12\x31\n\x07gc_rule\x18\x01 \x01(\x0b\x32 .google.bigtable.admin.v2.GcRule\"\xd5\x02\n\x06GcRule\x12\x1a\n\x10max_num_versions\x18\x01 \x01(\x05H\x00\x12,\n\x07max_age\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x45\n\x0cintersection\x18\x03 \x01(\x0b\x32-.google.bigtable.admin.v2.GcRule.IntersectionH\x00\x12\x37\n\x05union\x18\x04 \x01(\x0b\x32&.google.bigtable.admin.v2.GcRule.UnionH\x00\x1a?\n\x0cIntersection\x12/\n\x05rules\x18\x01 \x03(\x0b\x32 .google.bigtable.admin.v2.GcRule\x1a\x38\n\x05Union\x12/\n\x05rules\x18\x01 \x03(\x0b\x32 .google.bigtable.admin.v2.GcRuleB\x06\n\x04ruleBk\n\x1c\x63om.google.bigtable.admin.v2B\nTableProtoP\x01Z=google.golang.org/genproto/googleapis/bigtable/admin/v2;adminb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,])
_TABLE_TIMESTAMPGRANULARITY = _descriptor.EnumDescriptor(
name='TimestampGranularity',
full_name='google.bigtable.admin.v2.Table.TimestampGranularity',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='TIMESTAMP_GRANULARITY_UNSPECIFIED', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MILLIS', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=433,
serialized_end=506,
)
_sym_db.RegisterEnumDescriptor(_TABLE_TIMESTAMPGRANULARITY)
_TABLE_VIEW = _descriptor.EnumDescriptor(
name='View',
full_name='google.bigtable.admin.v2.Table.View',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='VIEW_UNSPECIFIED', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NAME_ONLY', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SCHEMA_VIEW', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FULL', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=508,
serialized_end=578,
)
_sym_db.RegisterEnumDescriptor(_TABLE_VIEW)
_TABLE_COLUMNFAMILIESENTRY = _descriptor.Descriptor(
name='ColumnFamiliesEntry',
full_name='google.bigtable.admin.v2.Table.ColumnFamiliesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='google.bigtable.admin.v2.Table.ColumnFamiliesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='google.bigtable.admin.v2.Table.ColumnFamiliesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=338,
serialized_end=431,
)
_TABLE = _descriptor.Descriptor(
name='Table',
full_name='google.bigtable.admin.v2.Table',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.bigtable.admin.v2.Table.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='column_families', full_name='google.bigtable.admin.v2.Table.column_families', index=1,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='granularity', full_name='google.bigtable.admin.v2.Table.granularity', index=2,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_TABLE_COLUMNFAMILIESENTRY, ],
enum_types=[
_TABLE_TIMESTAMPGRANULARITY,
_TABLE_VIEW,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=162,
serialized_end=578,
)
_COLUMNFAMILY = _descriptor.Descriptor(
name='ColumnFamily',
full_name='google.bigtable.admin.v2.ColumnFamily',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='gc_rule', full_name='google.bigtable.admin.v2.ColumnFamily.gc_rule', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=580,
serialized_end=645,
)
_GCRULE_INTERSECTION = _descriptor.Descriptor(
name='Intersection',
full_name='google.bigtable.admin.v2.GcRule.Intersection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='rules', full_name='google.bigtable.admin.v2.GcRule.Intersection.rules', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=860,
serialized_end=923,
)
_GCRULE_UNION = _descriptor.Descriptor(
name='Union',
full_name='google.bigtable.admin.v2.GcRule.Union',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='rules', full_name='google.bigtable.admin.v2.GcRule.Union.rules', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=925,
serialized_end=981,
)
_GCRULE = _descriptor.Descriptor(
name='GcRule',
full_name='google.bigtable.admin.v2.GcRule',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='max_num_versions', full_name='google.bigtable.admin.v2.GcRule.max_num_versions', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_age', full_name='google.bigtable.admin.v2.GcRule.max_age', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='intersection', full_name='google.bigtable.admin.v2.GcRule.intersection', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='union', full_name='google.bigtable.admin.v2.GcRule.union', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_GCRULE_INTERSECTION, _GCRULE_UNION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='rule', full_name='google.bigtable.admin.v2.GcRule.rule',
index=0, containing_type=None, fields=[]),
],
serialized_start=648,
serialized_end=989,
)
_TABLE_COLUMNFAMILIESENTRY.fields_by_name['value'].message_type = _COLUMNFAMILY
_TABLE_COLUMNFAMILIESENTRY.containing_type = _TABLE
_TABLE.fields_by_name['column_families'].message_type = _TABLE_COLUMNFAMILIESENTRY
_TABLE.fields_by_name['granularity'].enum_type = _TABLE_TIMESTAMPGRANULARITY
_TABLE_TIMESTAMPGRANULARITY.containing_type = _TABLE
_TABLE_VIEW.containing_type = _TABLE
_COLUMNFAMILY.fields_by_name['gc_rule'].message_type = _GCRULE
_GCRULE_INTERSECTION.fields_by_name['rules'].message_type = _GCRULE
_GCRULE_INTERSECTION.containing_type = _GCRULE
_GCRULE_UNION.fields_by_name['rules'].message_type = _GCRULE
_GCRULE_UNION.containing_type = _GCRULE
_GCRULE.fields_by_name['max_age'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_GCRULE.fields_by_name['intersection'].message_type = _GCRULE_INTERSECTION
_GCRULE.fields_by_name['union'].message_type = _GCRULE_UNION
_GCRULE.oneofs_by_name['rule'].fields.append(
_GCRULE.fields_by_name['max_num_versions'])
_GCRULE.fields_by_name['max_num_versions'].containing_oneof = _GCRULE.oneofs_by_name['rule']
_GCRULE.oneofs_by_name['rule'].fields.append(
_GCRULE.fields_by_name['max_age'])
_GCRULE.fields_by_name['max_age'].containing_oneof = _GCRULE.oneofs_by_name['rule']
_GCRULE.oneofs_by_name['rule'].fields.append(
_GCRULE.fields_by_name['intersection'])
_GCRULE.fields_by_name['intersection'].containing_oneof = _GCRULE.oneofs_by_name['rule']
_GCRULE.oneofs_by_name['rule'].fields.append(
_GCRULE.fields_by_name['union'])
_GCRULE.fields_by_name['union'].containing_oneof = _GCRULE.oneofs_by_name['rule']
DESCRIPTOR.message_types_by_name['Table'] = _TABLE
DESCRIPTOR.message_types_by_name['ColumnFamily'] = _COLUMNFAMILY
DESCRIPTOR.message_types_by_name['GcRule'] = _GCRULE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Table = _reflection.GeneratedProtocolMessageType('Table', (_message.Message,), dict(
ColumnFamiliesEntry = _reflection.GeneratedProtocolMessageType('ColumnFamiliesEntry', (_message.Message,), dict(
DESCRIPTOR = _TABLE_COLUMNFAMILIESENTRY,
__module__ = 'google.bigtable.admin.v2.table_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Table.ColumnFamiliesEntry)
))
,
DESCRIPTOR = _TABLE,
__module__ = 'google.bigtable.admin.v2.table_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Table)
))
_sym_db.RegisterMessage(Table)
_sym_db.RegisterMessage(Table.ColumnFamiliesEntry)
ColumnFamily = _reflection.GeneratedProtocolMessageType('ColumnFamily', (_message.Message,), dict(
DESCRIPTOR = _COLUMNFAMILY,
__module__ = 'google.bigtable.admin.v2.table_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ColumnFamily)
))
_sym_db.RegisterMessage(ColumnFamily)
GcRule = _reflection.GeneratedProtocolMessageType('GcRule', (_message.Message,), dict(
Intersection = _reflection.GeneratedProtocolMessageType('Intersection', (_message.Message,), dict(
DESCRIPTOR = _GCRULE_INTERSECTION,
__module__ = 'google.bigtable.admin.v2.table_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GcRule.Intersection)
))
,
Union = _reflection.GeneratedProtocolMessageType('Union', (_message.Message,), dict(
DESCRIPTOR = _GCRULE_UNION,
__module__ = 'google.bigtable.admin.v2.table_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GcRule.Union)
))
,
DESCRIPTOR = _GCRULE,
__module__ = 'google.bigtable.admin.v2.table_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GcRule)
))
_sym_db.RegisterMessage(GcRule)
_sym_db.RegisterMessage(GcRule.Intersection)
_sym_db.RegisterMessage(GcRule.Union)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.bigtable.admin.v2B\nTableProtoP\001Z=google.golang.org/genproto/googleapis/bigtable/admin/v2;admin'))
_TABLE_COLUMNFAMILIESENTRY.has_options = True
_TABLE_COLUMNFAMILIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
try:
# THESE ELEMENTS WILL BE DEPRECATED.
# Please use the generated *_pb2_grpc.py files instead.
import grpc
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
except ImportError:
pass
# @@protoc_insertion_point(module_scope)
| 38.446914
| 1,633
| 0.758397
|
5230cc3d85fac014944581d9d21b50b4614e3f8b
| 454
|
py
|
Python
|
custom_components/edgeos/models/storage_data.py
|
kcleong/homeassistant-config
|
15b7bc75f5d1055d8620ced87eed9d563475296d
|
[
"MIT"
] | 17
|
2020-07-17T13:50:48.000Z
|
2022-03-25T22:10:17.000Z
|
custom_components/edgeos/models/storage_data.py
|
kcleong/homeassistant-config
|
15b7bc75f5d1055d8620ced87eed9d563475296d
|
[
"MIT"
] | 17
|
2020-07-14T00:09:42.000Z
|
2021-12-11T22:19:58.000Z
|
custom_components/edgeos/models/storage_data.py
|
kcleong/homeassistant-config
|
15b7bc75f5d1055d8620ced87eed9d563475296d
|
[
"MIT"
] | 7
|
2020-07-31T10:39:09.000Z
|
2021-12-11T13:54:25.000Z
|
from typing import Optional
class StorageData:
key: Optional[str]
def __init__(self):
self.key = None
@staticmethod
def from_dict(obj: dict):
data = StorageData()
if obj is not None:
data.key = obj.get("key")
return data
def to_dict(self):
obj = {"key": self.key}
return obj
def __repr__(self):
to_string = f"{self.to_dict()}"
return to_string
| 16.214286
| 39
| 0.557269
|
a5c1fa949d642f013abe4b39a61b33b0f94038ae
| 1,345
|
py
|
Python
|
example_applications/composite_median/convert_to_geotiff.py
|
fangfy/radar
|
6bb8a10396513711a507f8298c22df0e0565d4a7
|
[
"Apache-2.0"
] | null | null | null |
example_applications/composite_median/convert_to_geotiff.py
|
fangfy/radar
|
6bb8a10396513711a507f8298c22df0e0565d4a7
|
[
"Apache-2.0"
] | null | null | null |
example_applications/composite_median/convert_to_geotiff.py
|
fangfy/radar
|
6bb8a10396513711a507f8298c22df0e0565d4a7
|
[
"Apache-2.0"
] | null | null | null |
import glob, os
from datacube import helpers
from datacube.utils import geometry
import xarray as xr
import subprocess
def convert_to_tiff(filename, var=None, outputdir = 'geotiff'):
if var is None:
ds = xr.open_dataset(filename)
varnames = list(ds.data_vars)
if None in varnames:
raise ValueError(varnames)
else:
ds = xr.open_dataset(filename)
varnames = [var]
for var in varnames:
outputname = '%s/%s'%(outputdir, filename.split('/')[-1].replace('.nc','_%s.tif'%var.lower()))
if os.path.exists(outputname): continue
#print(outputname)
try:
ds_output = ds[var].to_dataset(name=var)
except:
print(ds.data_vars)
#ds_output = ds_output.sortby('y', ascending=False)
#ds = ds.astype('float64')
ds_output.attrs['crs'] = geometry.CRS('EPSG:3577')
#print(ds)
helpers.write_geotiff(outputname, ds_output)
return varnames
filenames = glob.glob('s1_median/*.nc')
outputdir = 's1_median_geotiff'
for filename in filenames:
varnames = convert_to_tiff(filename, outputdir = outputdir)
for var in varnames:
vrtname = 's1_median.vrt'
#if not os.path.exists(vrtname):
cmd = 'gdalbuildvrt %s %s/*_%s.tif'%(vrtname, outputdir, var.lower())
subprocess.call(cmd, shell=True)
| 29.888889
| 102
| 0.646097
|
556ccb15c204a36291b07953e73172d93171cbe1
| 40,046
|
py
|
Python
|
transformers/modeling_tf_xlm.py
|
Tarpelite/UniNLP
|
176c2a0f88c8054bf69e1f92693d353737367c34
|
[
"MIT"
] | 72
|
2021-05-27T06:32:20.000Z
|
2022-03-31T03:04:58.000Z
|
transformers/modeling_tf_xlm.py
|
DaDaMrX/ReaLiSe
|
25843e0c2c32b3a364cee857b2e4f5ba8b2764e9
|
[
"MIT"
] | 10
|
2021-05-27T05:13:50.000Z
|
2022-03-27T09:10:47.000Z
|
transformers/modeling_tf_xlm.py
|
DaDaMrX/ReaLiSe
|
25843e0c2c32b3a364cee857b2e4f5ba8b2764e9
|
[
"MIT"
] | 13
|
2021-06-24T04:44:14.000Z
|
2022-03-03T12:57:23.000Z
|
# coding=utf-8
# Copyright 2019-present, Facebook, Inc and the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" TF 2.0 XLM model.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import math
import os
import itertools
import numpy as np
import tensorflow as tf
from .configuration_xlm import XLMConfig
from .modeling_tf_utils import TFPreTrainedModel, TFSharedEmbeddings, TFSequenceSummary, shape_list, get_initializer, DUMMY_INPUTS
from .file_utils import add_start_docstrings
logger = logging.getLogger(__name__)
TF_XLM_PRETRAINED_MODEL_ARCHIVE_MAP = {
'xlm-mlm-en-2048': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-en-2048-tf_model.h5",
'xlm-mlm-ende-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-ende-1024-tf_model.h5",
'xlm-mlm-enfr-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-enfr-1024-tf_model.h5",
'xlm-mlm-enro-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-enro-1024-tf_model.h5",
'xlm-mlm-tlm-xnli15-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-tlm-xnli15-1024-tf_model.h5",
'xlm-mlm-xnli15-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-xnli15-1024-tf_model.h5",
'xlm-clm-enfr-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-clm-enfr-1024-tf_model.h5",
'xlm-clm-ende-1024': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-clm-ende-1024-tf_model.h5",
'xlm-mlm-17-1280': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-17-1280-tf_model.h5",
'xlm-mlm-100-1280': "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-100-1280-tf_model.h5",
}
def create_sinusoidal_embeddings(n_pos, dim, out):
position_enc = np.array([
[pos / np.power(10000, 2 * (j // 2) / dim) for j in range(dim)]
for pos in range(n_pos)
])
out[:, 0::2] = tf.constant(np.sin(position_enc[:, 0::2]))
out[:, 1::2] = tf.constant(np.cos(position_enc[:, 1::2]))
def gelu(x):
""" Gaussian Error Linear Unit.
Original Implementation of the gelu activation function in Google Bert repo when initially created.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
Also see https://arxiv.org/abs/1606.08415
"""
cdf = 0.5 * (1.0 + tf.math.erf(x / tf.math.sqrt(2.0)))
return x * cdf
def get_masks(slen, lengths, causal, padding_mask=None, dtype=tf.float32):
"""
Generate hidden states mask, and optionally an attention mask.
"""
bs = shape_list(lengths)[0]
if padding_mask is not None:
mask = padding_mask
else:
# assert lengths.max().item() <= slen
alen = tf.range(slen)
mask = tf.math.less(alen, lengths[:, tf.newaxis])
# attention mask is the same as mask, or triangular inferior attention (causal)
if causal:
attn_mask = tf.less_equal(tf.tile(alen[tf.newaxis, tf.newaxis, :], (bs, slen, 1)),
alen[tf.newaxis, :, tf.newaxis])
else:
attn_mask = mask
# sanity check
# assert shape_list(mask) == [bs, slen]
tf.debugging.assert_equal(shape_list(mask), [bs, slen])
assert causal is False or shape_list(attn_mask) == [bs, slen, slen]
mask = tf.cast(mask, dtype=dtype)
attn_mask = tf.cast(attn_mask, dtype=dtype)
return mask, attn_mask
class TFMultiHeadAttention(tf.keras.layers.Layer):
NEW_ID = itertools.count()
def __init__(self, n_heads, dim, config, **kwargs):
super(TFMultiHeadAttention, self).__init__(**kwargs)
self.layer_id = next(TFMultiHeadAttention.NEW_ID)
self.output_attentions = config.output_attentions
self.dim = dim
self.n_heads = n_heads
assert self.dim % self.n_heads == 0
self.q_lin = tf.keras.layers.Dense(dim, kernel_initializer=get_initializer(config.init_std), name='q_lin')
self.k_lin = tf.keras.layers.Dense(dim, kernel_initializer=get_initializer(config.init_std), name='k_lin')
self.v_lin = tf.keras.layers.Dense(dim, kernel_initializer=get_initializer(config.init_std), name='v_lin')
self.out_lin = tf.keras.layers.Dense(dim, kernel_initializer=get_initializer(config.init_std), name='out_lin')
self.dropout = tf.keras.layers.Dropout(config.attention_dropout)
self.pruned_heads = set()
def prune_heads(self, heads):
raise NotImplementedError
def call(self, inputs, training=False):
"""
Self-attention (if kv is None) or attention over source sentence (provided by kv).
"""
input, mask, kv, cache, head_mask = inputs
# Input is (bs, qlen, dim)
# Mask is (bs, klen) (non-causal) or (bs, klen, klen)
bs, qlen, dim = shape_list(input)
if kv is None:
klen = qlen if cache is None else cache['slen'] + qlen
else:
klen = shape_list(kv)[1]
# assert dim == self.dim, 'Dimensions do not match: %s input vs %s configured' % (dim, self.dim)
n_heads = self.n_heads
dim_per_head = self.dim // n_heads
mask_reshape = (bs, 1, qlen, klen) if len(shape_list(mask)) == 3 else (bs, 1, 1, klen)
def shape(x):
""" projection """
return tf.transpose(tf.reshape(x, (bs, -1, self.n_heads, dim_per_head)), perm=(0, 2, 1, 3))
def unshape(x):
""" compute context """
return tf.reshape(tf.transpose(x, perm=(0, 2, 1, 3)), (bs, -1, self.n_heads * dim_per_head))
q = shape(self.q_lin(input)) # (bs, n_heads, qlen, dim_per_head)
if kv is None:
k = shape(self.k_lin(input)) # (bs, n_heads, qlen, dim_per_head)
v = shape(self.v_lin(input)) # (bs, n_heads, qlen, dim_per_head)
elif cache is None or self.layer_id not in cache:
k = v = kv
k = shape(self.k_lin(k)) # (bs, n_heads, qlen, dim_per_head)
v = shape(self.v_lin(v)) # (bs, n_heads, qlen, dim_per_head)
if cache is not None:
if self.layer_id in cache:
if kv is None:
k_, v_ = cache[self.layer_id]
k = tf.concat([k_, k], axis=2) # (bs, n_heads, klen, dim_per_head)
v = tf.concat([v_, v], axis=2) # (bs, n_heads, klen, dim_per_head)
else:
k, v = cache[self.layer_id]
cache[self.layer_id] = (k, v)
q = q / math.sqrt(dim_per_head) # (bs, n_heads, qlen, dim_per_head)
scores = tf.matmul(q, k, transpose_b=True) # (bs, n_heads, qlen, klen)
mask = tf.reshape(mask, mask_reshape) # (bs, n_heads, qlen, klen)
# scores.masked_fill_(mask, -float('inf')) # (bs, n_heads, qlen, klen)
scores = scores - 1e30 * (1.0 - mask)
weights = tf.nn.softmax(scores, axis=-1) # (bs, n_heads, qlen, klen)
weights = self.dropout(weights, training=training) # (bs, n_heads, qlen, klen)
# Mask heads if we want to
if head_mask is not None:
weights = weights * head_mask
context = tf.matmul(weights, v) # (bs, n_heads, qlen, dim_per_head)
context = unshape(context) # (bs, qlen, dim)
outputs = (self.out_lin(context),)
if self.output_attentions:
outputs = outputs + (weights,)
return outputs
class TFTransformerFFN(tf.keras.layers.Layer):
def __init__(self, in_dim, dim_hidden, out_dim, config, **kwargs):
super(TFTransformerFFN, self).__init__(**kwargs)
self.lin1 = tf.keras.layers.Dense(dim_hidden, kernel_initializer=get_initializer(config.init_std), name='lin1')
self.lin2 = tf.keras.layers.Dense(out_dim, kernel_initializer=get_initializer(config.init_std), name='lin2')
self.act = tf.keras.layers.Activation(gelu) if config.gelu_activation else tf.keras.activations.relu
self.dropout = tf.keras.layers.Dropout(config.dropout)
def call(self, input, training=False):
x = self.lin1(input)
x = self.act(x)
x = self.lin2(x)
x = self.dropout(x, training=training)
return x
class TFXLMMainLayer(tf.keras.layers.Layer):
def __init__(self, config, **kwargs):
super(TFXLMMainLayer, self).__init__(**kwargs)
self.output_attentions = config.output_attentions
self.output_hidden_states = config.output_hidden_states
# encoder / decoder, output layer
self.is_encoder = config.is_encoder
self.is_decoder = not config.is_encoder
if self.is_decoder:
raise NotImplementedError("Currently XLM can only be used as an encoder")
# self.with_output = with_output
self.causal = config.causal
# dictionary / languages
self.n_langs = config.n_langs
self.use_lang_emb = config.use_lang_emb
self.n_words = config.n_words
self.eos_index = config.eos_index
self.pad_index = config.pad_index
# self.dico = dico
# self.id2lang = config.id2lang
# self.lang2id = config.lang2id
# assert len(self.dico) == self.n_words
# assert len(self.id2lang) == len(self.lang2id) == self.n_langs
# model parameters
self.dim = config.emb_dim # 512 by default
self.hidden_dim = self.dim * 4 # 2048 by default
self.n_heads = config.n_heads # 8 by default
self.n_layers = config.n_layers
assert self.dim % self.n_heads == 0, 'transformer dim must be a multiple of n_heads'
# embeddings
self.dropout = tf.keras.layers.Dropout(config.dropout)
self.attention_dropout = tf.keras.layers.Dropout(config.attention_dropout)
self.position_embeddings = tf.keras.layers.Embedding(config.max_position_embeddings,
self.dim,
embeddings_initializer=get_initializer(config.embed_init_std),
name='position_embeddings')
if config.sinusoidal_embeddings:
raise NotImplementedError
# create_sinusoidal_embeddings(config.max_position_embeddings, self.dim, out=self.position_embeddings.weight)
if config.n_langs > 1 and config.use_lang_emb:
self.lang_embeddings = tf.keras.layers.Embedding(self.n_langs,
self.dim,
embeddings_initializer=get_initializer(config.embed_init_std),
name='lang_embeddings')
self.embeddings = TFSharedEmbeddings(self.n_words, self.dim, initializer_range=config.embed_init_std, name='embeddings') # padding_idx=self.pad_index)
self.layer_norm_emb = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name='layer_norm_emb')
# transformer layers
self.attentions = []
self.layer_norm1 = []
self.ffns = []
self.layer_norm2 = []
# if self.is_decoder:
# self.layer_norm15 = []
# self.encoder_attn = []
for i in range(self.n_layers):
self.attentions.append(TFMultiHeadAttention(self.n_heads, self.dim, config=config, name='attentions_._{}'.format(i)))
self.layer_norm1.append(tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name='layer_norm1_._{}'.format(i)))
# if self.is_decoder:
# self.layer_norm15.append(nn.LayerNorm(self.dim, eps=config.layer_norm_eps))
# self.encoder_attn.append(MultiHeadAttention(self.n_heads, self.dim, dropout=self.attention_dropout))
self.ffns.append(TFTransformerFFN(self.dim, self.hidden_dim, self.dim, config=config, name='ffns_._{}'.format(i)))
self.layer_norm2.append(tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name='layer_norm2_._{}'.format(i)))
if hasattr(config, "pruned_heads"):
pruned_heads = config.pruned_heads.copy().items()
config.pruned_heads = {}
for layer, heads in pruned_heads:
if self.attentions[int(layer)].n_heads == config.n_heads:
self.prune_heads({int(layer): list(map(int, heads))})
def get_input_embeddings(self):
return self.embeddings
def _resize_token_embeddings(self, new_num_tokens):
raise NotImplementedError
def _prune_heads(self, heads_to_prune):
""" Prunes heads of the model.
heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
See base class PreTrainedModel
"""
raise NotImplementedError
def call(self, inputs, attention_mask=None, langs=None, token_type_ids=None,
position_ids=None, lengths=None, cache=None, head_mask=None, inputs_embeds=None,
training=False): # removed: src_enc=None, src_len=None
if isinstance(inputs, (tuple, list)):
input_ids = inputs[0]
attention_mask = inputs[1] if len(inputs) > 1 else attention_mask
langs = inputs[2] if len(inputs) > 2 else langs
token_type_ids = inputs[3] if len(inputs) > 3 else token_type_ids
position_ids = inputs[4] if len(inputs) > 4 else position_ids
lengths = inputs[5] if len(inputs) > 5 else lengths
cache = inputs[6] if len(inputs) > 6 else cache
head_mask = inputs[7] if len(inputs) > 7 else head_mask
inputs_embeds = inputs[8] if len(inputs) > 8 else inputs_embeds
assert len(inputs) <= 9, "Too many inputs."
elif isinstance(inputs, dict):
input_ids = inputs.get('input_ids')
attention_mask = inputs.get('attention_mask', attention_mask)
langs = inputs.get('langs', langs)
token_type_ids = inputs.get('token_type_ids', token_type_ids)
position_ids = inputs.get('position_ids', position_ids)
lengths = inputs.get('lengths', lengths)
cache = inputs.get('cache', cache)
head_mask = inputs.get('head_mask', head_mask)
inputs_embeds = inputs.get('inputs_embeds', inputs_embeds)
assert len(inputs) <= 9, "Too many inputs."
else:
input_ids = inputs
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
bs, slen = shape_list(input_ids)
elif inputs_embeds is not None:
bs, slen = shape_list(inputs_embeds)[:2]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
if lengths is None:
if input_ids is not None:
lengths = tf.reduce_sum(tf.cast(tf.not_equal(input_ids, self.pad_index), dtype=tf.int32), axis=1)
else:
lengths = tf.convert_to_tensor([slen]*bs, tf.int32)
# mask = input_ids != self.pad_index
# check inputs
# assert shape_list(lengths)[0] == bs
tf.debugging.assert_equal(shape_list(lengths)[0], bs)
# assert lengths.max().item() <= slen
# input_ids = input_ids.transpose(0, 1) # batch size as dimension 0
# assert (src_enc is None) == (src_len is None)
# if src_enc is not None:
# assert self.is_decoder
# assert src_enc.size(0) == bs
# generate masks
mask, attn_mask = get_masks(slen, lengths, self.causal, padding_mask=attention_mask)
# if self.is_decoder and src_enc is not None:
# src_mask = torch.arange(src_len.max(), dtype=torch.long, device=lengths.device) < src_len[:, None]
# position_ids
if position_ids is None:
position_ids = tf.expand_dims(tf.range(slen), axis=0)
else:
# assert shape_list(position_ids) == [bs, slen] # (slen, bs)
tf.debugging.assert_equal(shape_list(position_ids), [bs, slen])
# position_ids = position_ids.transpose(0, 1)
# langs
if langs is not None:
# assert shape_list(langs) == [bs, slen] # (slen, bs)
tf.debugging.assert_equal(shape_list(langs), [bs, slen])
# langs = langs.transpose(0, 1)
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x qlen x klen]
if head_mask is not None:
raise NotImplementedError
else:
head_mask = [None] * self.n_layers
# do not recompute cached elements
if cache is not None and input_ids is not None:
_slen = slen - cache['slen']
input_ids = input_ids[:, -_slen:]
position_ids = position_ids[:, -_slen:]
if langs is not None:
langs = langs[:, -_slen:]
mask = mask[:, -_slen:]
attn_mask = attn_mask[:, -_slen:]
# embeddings
if inputs_embeds is None:
inputs_embeds = self.embeddings(input_ids)
tensor = inputs_embeds + self.position_embeddings(position_ids)
if langs is not None and self.use_lang_emb:
tensor = tensor + self.lang_embeddings(langs)
if token_type_ids is not None:
tensor = tensor + self.embeddings(token_type_ids)
tensor = self.layer_norm_emb(tensor)
tensor = self.dropout(tensor, training=training)
tensor = tensor * mask[..., tf.newaxis]
# transformer layers
hidden_states = ()
attentions = ()
for i in range(self.n_layers):
if self.output_hidden_states:
hidden_states = hidden_states + (tensor,)
# self attention
attn_outputs = self.attentions[i]([tensor, attn_mask, None, cache, head_mask[i]], training=training)
attn = attn_outputs[0]
if self.output_attentions:
attentions = attentions + (attn_outputs[1],)
attn = self.dropout(attn, training=training)
tensor = tensor + attn
tensor = self.layer_norm1[i](tensor)
# encoder attention (for decoder only)
# if self.is_decoder and src_enc is not None:
# attn = self.encoder_attn[i](tensor, src_mask, kv=src_enc, cache=cache)
# attn = F.dropout(attn, p=self.dropout, training=self.training)
# tensor = tensor + attn
# tensor = self.layer_norm15[i](tensor)
# FFN
tensor = tensor + self.ffns[i](tensor)
tensor = self.layer_norm2[i](tensor)
tensor = tensor * mask[..., tf.newaxis]
# Add last hidden state
if self.output_hidden_states:
hidden_states = hidden_states + (tensor,)
# update cache length
if cache is not None:
cache['slen'] += tensor.size(1)
# move back sequence length to dimension 0
# tensor = tensor.transpose(0, 1)
outputs = (tensor,)
if self.output_hidden_states:
outputs = outputs + (hidden_states,)
if self.output_attentions:
outputs = outputs + (attentions,)
return outputs # outputs, (hidden_states), (attentions)
class TFXLMPreTrainedModel(TFPreTrainedModel):
""" An abstract class to handle weights initialization and
a simple interface for dowloading and loading pretrained models.
"""
config_class = XLMConfig
pretrained_model_archive_map = TF_XLM_PRETRAINED_MODEL_ARCHIVE_MAP
base_model_prefix = "transformer"
@property
def dummy_inputs(self):
# Sometimes XLM has language embeddings so don't forget to build them as well if needed
inputs_list = tf.constant([[7, 6, 0, 0, 1], [1, 2, 3, 0, 0], [0, 0, 0, 4, 5]])
attns_list = tf.constant([[1, 1, 0, 0, 1], [1, 1, 1, 0, 0], [1, 0, 0, 1, 1]])
if self.config.use_lang_emb and self.config.n_langs > 1:
langs_list = tf.constant([[1, 1, 0, 0, 1], [1, 1, 1, 0, 0], [1, 0, 0, 1, 1]])
else:
langs_list = None
return [inputs_list, attns_list, langs_list]
XLM_START_DOCSTRING = r""" The XLM model was proposed in
`Cross-lingual Language Model Pretraining`_
by Guillaume Lample*, Alexis Conneau*. It's a transformer pre-trained using one of the following objectives:
- a causal language modeling (CLM) objective (next token prediction),
- a masked language modeling (MLM) objective (Bert-like), or
- a Translation Language Modeling (TLM) object (extension of Bert's MLM to multiple language inputs)
Original code can be found `here`_.
This model is a tf.keras.Model `tf.keras.Model`_ sub-class. Use it as a regular TF 2.0 Keras Model and
refer to the TF 2.0 documentation for all matter related to general usage and behavior.
.. _`Cross-lingual Language Model Pretraining`:
https://arxiv.org/abs/1901.07291
.. _`here`:
https://github.com/facebookresearch/XLM
.. _`tf.keras.Model`:
https://www.tensorflow.org/versions/r2.0/api_docs/python/tf/keras/Model
Note on the model inputs:
TF 2.0 models accepts two formats as inputs:
- having all inputs as keyword arguments (like PyTorch models), or
- having all inputs as a list, tuple or dict in the first positional arguments.
This second option is usefull when using `tf.keras.Model.fit()` method which currently requires having all the tensors in the first argument of the model call function: `model(inputs)`.
If you choose this second option, there are three possibilities you can use to gather all the input Tensors in the first positional argument :
- a single Tensor with input_ids only and nothing else: `model(inputs_ids)
- a list of varying length with one or several input Tensors IN THE ORDER given in the docstring:
`model([input_ids, attention_mask])` or `model([input_ids, attention_mask, token_type_ids])`
- a dictionary with one or several input Tensors associaed to the input names given in the docstring:
`model({'input_ids': input_ids, 'token_type_ids': token_type_ids})`
Parameters:
config (:class:`~transformers.XLMConfig`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the configuration.
Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
"""
XLM_INPUTS_DOCSTRING = r"""
Inputs:
**input_ids**: ```Numpy array`` or ``tf.Tensor`` of shape ``(batch_size, sequence_length)``:
Indices of input sequence tokens in the vocabulary.
XLM is a model with absolute position embeddings so it's usually advised to pad the inputs on
the right rather than the left.
Indices can be obtained using :class:`transformers.XLMTokenizer`.
See :func:`transformers.PreTrainedTokenizer.encode` and
:func:`transformers.PreTrainedTokenizer.convert_tokens_to_ids` for details.
**attention_mask**: (`optional`) ``Numpy array`` or ``tf.Tensor`` of shape ``(batch_size, sequence_length)``:
Mask to avoid performing attention on padding token indices.
Mask values selected in ``[0, 1]``:
``1`` for tokens that are NOT MASKED, ``0`` for MASKED tokens.
**langs**: (`optional`) ```Numpy array`` or ``tf.Tensor`` of shape ``(batch_size, sequence_length)``:
A parallel sequence of tokens to be used to indicate the language of each token in the input.
Indices are languages ids which can be obtained from the language names by using two conversion mappings
provided in the configuration of the model (only provided for multilingual models).
More precisely, the `language name -> language id` mapping is in `model.config.lang2id` (dict str -> int) and
the `language id -> language name` mapping is `model.config.id2lang` (dict int -> str).
**token_type_ids**: (`optional`) ```Numpy array`` or ``tf.Tensor`` of shape ``(batch_size, sequence_length)``:
A parallel sequence of tokens (can be used to indicate various portions of the inputs).
The embeddings from these tokens will be summed with the respective token embeddings.
Indices are selected in the vocabulary (unlike BERT which has a specific vocabulary for segment indices).
**position_ids**: (`optional`) ```Numpy array`` or ``tf.Tensor`` of shape ``(batch_size, sequence_length)``:
Indices of positions of each input sequence tokens in the position embeddings.
Selected in the range ``[0, config.max_position_embeddings - 1]``.
**lengths**: (`optional`) ```Numpy array`` or ``tf.Tensor`` of shape ``(batch_size,)``:
Length of each sentence that can be used to avoid performing attention on padding token indices.
You can also use `attention_mask` for the same result (see above), kept here for compatbility.
Indices selected in ``[0, ..., input_ids.size(-1)]``:
**cache**:
dictionary with ``Numpy array`` or ``tf.Tensor`` that contains pre-computed
hidden-states (key and values in the attention blocks) as computed by the model
(see `cache` output below). Can be used to speed up sequential decoding.
The dictionary object will be modified in-place during the forward pass to add newly computed hidden-states.
**head_mask**: (`optional`) ``Numpy array`` or ``tf.Tensor`` of shape ``(num_heads,)`` or ``(num_layers, num_heads)``:
Mask to nullify selected heads of the self-attention modules.
Mask values selected in ``[0, 1]``:
``1`` indicates the head is **not masked**, ``0`` indicates the head is **masked**.
**inputs_embeds**: (`optional`) ``Numpy array`` or ``tf.Tensor`` of shape ``(batch_size, sequence_length, embedding_dim)``:
Optionally, instead of passing ``input_ids`` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert `input_ids` indices into associated vectors
than the model's internal embedding lookup matrix.
"""
@add_start_docstrings("The bare XLM Model transformer outputing raw hidden-states without any specific head on top.",
XLM_START_DOCSTRING, XLM_INPUTS_DOCSTRING)
class TFXLMModel(TFXLMPreTrainedModel):
r"""
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**last_hidden_state**: ``tf.Tensor`` of shape ``(batch_size, sequence_length, hidden_size)``
Sequence of hidden-states at the last layer of the model.
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``tf.Tensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``tf.Tensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
import tensorflow as tf
from transformers import XLMTokenizer, TFXLMModel
tokenizer = XLMTokenizer.from_pretrained('xlm-mlm-en-2048')
model = TFXLMModel.from_pretrained('xlm-mlm-en-2048')
input_ids = tf.constant(tokenizer.encode("Hello, my dog is cute"))[None, :] # Batch size 1
outputs = model(input_ids)
last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple
"""
def __init__(self, config, *inputs, **kwargs):
super(TFXLMModel, self).__init__(config, *inputs, **kwargs)
self.transformer = TFXLMMainLayer(config, name='transformer')
def call(self, inputs, **kwargs):
outputs = self.transformer(inputs, **kwargs)
return outputs
class TFXLMPredLayer(tf.keras.layers.Layer):
"""
Prediction layer (cross_entropy or adaptive_softmax).
"""
def __init__(self, config, input_embeddings, **kwargs):
super(TFXLMPredLayer, self).__init__(**kwargs)
self.asm = config.asm
self.n_words = config.n_words
self.pad_index = config.pad_index
if config.asm is False:
self.input_embeddings = input_embeddings
else:
raise NotImplementedError
# self.proj = nn.AdaptiveLogSoftmaxWithLoss(
# in_features=dim,
# n_classes=config.n_words,
# cutoffs=config.asm_cutoffs,
# div_value=config.asm_div_value,
# head_bias=True, # default is False
# )
def build(self, input_shape):
# The output weights are the same as the input embeddings, but there is an output-only bias for each token.
self.bias = self.add_weight(shape=(self.n_words,),
initializer='zeros',
trainable=True,
name='bias')
super(TFXLMPredLayer, self).build(input_shape)
def call(self, hidden_states):
hidden_states = self.input_embeddings(hidden_states, mode="linear")
hidden_states = hidden_states + self.bias
return hidden_states
@add_start_docstrings("""The XLM Model transformer with a language modeling head on top
(linear layer with weights tied to the input embeddings). """,
XLM_START_DOCSTRING, XLM_INPUTS_DOCSTRING)
class TFXLMWithLMHeadModel(TFXLMPreTrainedModel):
r"""
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**prediction_scores**: ``tf.Tensor`` of shape ``(batch_size, sequence_length, config.vocab_size)``
Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax).
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``tf.Tensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``tf.Tensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
import tensorflow as tf
from transformers import XLMTokenizer, TFXLMWithLMHeadModel
tokenizer = XLMTokenizer.from_pretrained('xlm-mlm-en-2048')
model = TFXLMWithLMHeadModel.from_pretrained('xlm-mlm-en-2048')
input_ids = tf.constant(tokenizer.encode("Hello, my dog is cute"))[None, :] # Batch size 1
outputs = model(input_ids)
last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple
"""
def __init__(self, config, *inputs, **kwargs):
super(TFXLMWithLMHeadModel, self).__init__(config, *inputs, **kwargs)
self.transformer = TFXLMMainLayer(config, name='transformer')
self.pred_layer = TFXLMPredLayer(config, self.transformer.embeddings, name='pred_layer_._proj')
def get_output_embeddings(self):
return self.pred_layer.input_embeddings
def call(self, inputs, **kwargs):
transformer_outputs = self.transformer(inputs, **kwargs)
output = transformer_outputs[0]
outputs = self.pred_layer(output)
outputs = (outputs,) + transformer_outputs[1:] # Keep new_mems and attention/hidden states if they are here
return outputs
@add_start_docstrings("""XLM Model with a sequence classification/regression head on top (a linear layer on top of
the pooled output) e.g. for GLUE tasks. """,
XLM_START_DOCSTRING, XLM_INPUTS_DOCSTRING)
class TFXLMForSequenceClassification(TFXLMPreTrainedModel):
r"""
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**logits**: ``tf.Tensor`` of shape ``(batch_size, config.num_labels)``
Classification (or regression if config.num_labels==1) scores (before SoftMax).
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``tf.Tensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``tf.Tensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
import tensorflow as tf
from transformers import XLMTokenizer, TFXLMForSequenceClassification
tokenizer = XLMTokenizer.from_pretrained('xlm-mlm-en-2048')
model = TFXLMForSequenceClassification.from_pretrained('xlm-mlm-en-2048')
input_ids = tf.constant(tokenizer.encode("Hello, my dog is cute"))[None, :] # Batch size 1
labels = tf.constant([1])[None, :] # Batch size 1
outputs = model(input_ids)
logits = outputs[0]
"""
def __init__(self, config, *inputs, **kwargs):
super(TFXLMForSequenceClassification, self).__init__(config, *inputs, **kwargs)
self.num_labels = config.num_labels
self.transformer = TFXLMMainLayer(config, name='transformer')
self.sequence_summary = TFSequenceSummary(config, initializer_range=config.init_std, name='sequence_summary')
def call(self, inputs, **kwargs):
transformer_outputs = self.transformer(inputs, **kwargs)
output = transformer_outputs[0]
logits = self.sequence_summary(output)
outputs = (logits,) + transformer_outputs[1:] # Keep new_mems and attention/hidden states if they are here
return outputs
@add_start_docstrings("""XLM Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layers on top of
the hidden-states output to compute `span start logits` and `span end logits`). """,
XLM_START_DOCSTRING, XLM_INPUTS_DOCSTRING)
class TFXLMForQuestionAnsweringSimple(TFXLMPreTrainedModel):
r"""
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**start_scores**: ``tf.Tensor`` of shape ``(batch_size, sequence_length,)``
Span-start scores (before SoftMax).
**end_scores**: ``tf.Tensor`` of shape ``(batch_size, sequence_length,)``
Span-end scores (before SoftMax).
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``tf.Tensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``tf.Tensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
import tensorflow as tf
from transformers import XLMTokenizer, TFXLMForQuestionAnsweringSimple
tokenizer = XLMTokenizer.from_pretrained('xlm-mlm-en-2048')
model = TFXLMForQuestionAnsweringSimple.from_pretrained('xlm-mlm-en-2048')
input_ids = tf.constant(tokenizer.encode("Hello, my dog is cute"))[None, :] # Batch size 1
outputs = model(input_ids)
start_scores, end_scores = outputs[:2]
"""
def __init__(self, config, *inputs, **kwargs):
super(TFXLMForQuestionAnsweringSimple, self).__init__(config, *inputs, **kwargs)
self.transformer = TFXLMMainLayer(config, name='transformer')
self.qa_outputs = tf.keras.layers.Dense(config.num_labels,
kernel_initializer=get_initializer(config.init_std),
name='qa_outputs')
def call(self, inputs, **kwargs):
transformer_outputs = self.transformer(inputs, **kwargs)
sequence_output = transformer_outputs[0]
logits = self.qa_outputs(sequence_output)
start_logits, end_logits = tf.split(logits, 2, axis=-1)
start_logits = tf.squeeze(start_logits, axis=-1)
end_logits = tf.squeeze(end_logits, axis=-1)
outputs = (start_logits, end_logits,) + transformer_outputs[1:] # Keep mems, hidden states, attentions if there are in it
return outputs # start_logits, end_logits, (hidden_states), (attentions)
| 51.940337
| 194
| 0.626979
|
a1f720621798865ff07c968196102e0f3e462be4
| 3,324
|
py
|
Python
|
src/CryptoPlus/Cipher/python_DES3.py
|
voytecPL/pycryptoplus
|
86905bbb8661e00cfb2afdc4461d4a79b6429d8a
|
[
"MIT"
] | 1
|
2022-02-27T17:46:18.000Z
|
2022-02-27T17:46:18.000Z
|
src/CryptoPlus/Cipher/python_DES3.py
|
voytecPL/pycryptoplus
|
86905bbb8661e00cfb2afdc4461d4a79b6429d8a
|
[
"MIT"
] | null | null | null |
src/CryptoPlus/Cipher/python_DES3.py
|
voytecPL/pycryptoplus
|
86905bbb8661e00cfb2afdc4461d4a79b6429d8a
|
[
"MIT"
] | null | null | null |
from .blockcipher import *
from . import pyDes
def new(key,mode=MODE_ECB,IV=None,counter=None,segment_size=None):
"""Create a DES-EDE3 or DES-EDE2 cipher object
wrapper for pure python 3DES implementation pyDes.py
key = raw string containing the 2/3 keys
- DES-EDE2: supply 2 keys as 1 single concatenated 16byte key= key1|key2
- DES-EDE3: supply 3 keys as 1 single concatenated 24byte key= key1|key2|key3
mode = python_AES.MODE_ECB/CBC/CFB/OFB/CTR/CMAC, default is ECB
IV = IV as a raw string, default is "all zero" IV
-> only needed for CBC mode
counter = counter object (CryptoPlus.Util.util.Counter)
-> only needed for CTR mode
segment_size = amount of bits to use from the keystream in each chain part
-> supported values: multiple of 8 between 8 and the blocksize
of the cipher (only per byte access possible), default is 8
-> only needed for CFB mode
EXAMPLES:
**********
IMPORTING:
-----------
>>> import codecs
>>> from CryptoPlus.Cipher import python_DES3
CBC TDES-EDE3 EXAMPLE: (using test vectors from http://csrc.nist.gov/groups/STM/cavp/documents/des/DESMMT.pdf)
------------
>>> key = codecs.decode('37ae5ebf46dff2dc0754b94f31cbb3855e7fd36dc870bfae', 'hex')
>>> IV = codecs.decode('3d1de3cc132e3b65', 'hex')
>>> cipher = python_DES3.new(key, python_DES3.MODE_CBC, IV)
>>> ciphertext = cipher.encrypt(codecs.decode('84401f78fe6c10876d8ea23094ea5309', 'hex'))
>>> codecs.encode(ciphertext, 'hex')
b'7b1f7c7e3b1c948ebd04a75ffba7d2f5'
>>> decipher = python_DES3.new(key, python_DES3.MODE_CBC, IV)
>>> plaintext = decipher.decrypt(ciphertext)
>>> codecs.encode(plaintext, 'hex')
b'84401f78fe6c10876d8ea23094ea5309'
CMAC TDES-EDE3 EXAMPLE:
-------------
testvector: http://csrc.nist.gov/publications/nistpubs/800-38B/Updated_CMAC_Examples.pdf
>>> key = codecs.decode('8aa83bf8cbda10620bc1bf19fbb6cd58bc313d4a371ca8b5', 'hex')
>>> plaintext = codecs.decode('6bc1bee22e409f96e93d7e117393172aae2d8a57', 'hex')
>>> cipher = python_DES3.new(key, python_DES3.MODE_CMAC)
>>> codecs.encode(cipher.encrypt(plaintext), 'hex')
b'743ddbe0ce2dc2ed'
CMAC TDES-EDE2 EXAMPLE:
-----------------------
testvector: http://csrc.nist.gov/groups/STM/cavp/documents/mac/cmactestvectors.zip
>>> key1 = codecs.decode("5104f2c76180c1d3", 'hex')
>>> key2 = codecs.decode("b9df763e31ada716", 'hex')
>>> key = key1 + key2
>>> plaintext = codecs.decode('a6866be2fa6678f264a19c4474968e3f4eec24f5086d', 'hex')
>>> cipher = python_DES3.new(key, python_DES3.MODE_CMAC)
>>> codecs.encode(cipher.encrypt(plaintext), 'hex')
b'32e7758f3f614dbf'"""
return python_DES3(key,mode,IV,counter,segment_size)
class python_DES3(BlockCipher):
key_error_message = "Key should be 128 or 192 bits"
def __init__(self,key,mode,IV,counter,segment_size):
cipher_module = pyDes.triple_des
self.blocksize = 8
BlockCipher.__init__(self,key,mode,IV,counter,cipher_module,segment_size)
def keylen_valid(self,key):
return len(key) in (16,24)
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
| 40.536585
| 114
| 0.672383
|
0a6d7a88bf9f59da10422af70cfacd7ecc61abd4
| 4,186
|
py
|
Python
|
seller_app/serializers/store_serializers.py
|
syz247179876/e_mall
|
f94e39e091e098242342f532ae371b8ff127542f
|
[
"Apache-2.0"
] | 7
|
2021-04-10T13:20:56.000Z
|
2022-03-29T15:00:29.000Z
|
seller_app/serializers/store_serializers.py
|
syz247179876/E_mall
|
f94e39e091e098242342f532ae371b8ff127542f
|
[
"Apache-2.0"
] | 9
|
2021-05-11T03:53:31.000Z
|
2022-03-12T00:58:03.000Z
|
seller_app/serializers/store_serializers.py
|
syz247179876/E_mall
|
f94e39e091e098242342f532ae371b8ff127542f
|
[
"Apache-2.0"
] | 2
|
2020-11-24T08:59:22.000Z
|
2020-11-24T14:10:59.000Z
|
# -*- coding: utf-8 -*-
# @Time : 2021/2/18 下午4:49
# @Author : 司云中
# @File : store_serializers.py
# @Software: Pycharm
from django.contrib.auth import get_user_model
from django.db import DatabaseError, transaction
from rest_framework import serializers
from Emall.exceptions import DataFormatError, SqlServerError, DataExisted, DataNotExist
from manager_app.models import Role
from seller_app.models import Store, Seller
User = get_user_model()
class SellerStoreSerializer(serializers.ModelSerializer):
class Meta:
model = Store
seller_model = Seller
role_model = Role
fields = ('id', 'name', 'intro')
read_only_fields = ('id',)
extra_kwargs = {
'intro': {'required': False}
}
def create_store(self):
"""创建店铺"""
intro = self.validated_data.pop('intro', None)
if not intro or len(intro) > 128:
raise DataFormatError()
credential = {
'name': self.validated_data.pop('name'),
'intro': intro
}
try:
role = self.Meta.role_model.objects.get(role_name="商家角色")
user = self.context.get('request').user
# 判断该用户是否已经开店
if self.Meta.seller_model.objects.filter(user=user).exists():
raise DataExisted()
with transaction.atomic():
store = self.Meta.model.objects.create(**credential) # 创建店铺
self.Meta.seller_model.objects.create(user=user, store=store, role=role) # 创建商家
user.is_seller = True # 将该用户升级成商家,具备商家权限
user.save(force_update=True)
except self.Meta.role_model.DoesNotExist:
raise DataNotExist()
except DatabaseError:
raise SqlServerError()
class SellerUserDisplaySerializer(serializers.ModelSerializer):
"""商家个人信息序列化器"""
class Meta:
model = User
fields = ('username', 'full_name', 'email', 'phone', 'is_seller', 'is_active',
'birthday', 'sex', 'head_image', 'date_joined')
class SellerStoreDisplaySerializer(serializers.ModelSerializer):
"""商家店铺信息序列化器"""
type = serializers.CharField(source='get_type_display', read_only=True)
rank = serializers.CharField(source='get_rank_display', read_only=True)
class Meta:
model = Store
fields = '__all__'
class SellerRoleDisplaySerializer(serializers.ModelSerializer):
class Meta:
model = Role
exclude = ['permission']
class SellerDisplaySerializer(serializers.ModelSerializer):
"""商家个人信息+店铺信息序列化器"""
user = SellerUserDisplaySerializer()
store = SellerStoreDisplaySerializer()
role = SellerRoleDisplaySerializer()
class Meta:
model = Seller
fields = '__all__'
class SellerUpdateInfoSerializer(serializers.ModelSerializer):
"""
商家更新个人信息序列化器
"""
class Meta:
model = User
fields = ('username', 'email', 'phone', 'birthday', 'sex')
def modify(self):
user = self.context.get('request').user
user.username = self.validated_data.get('username', None) or user.username
user.email = self.validated_data.get('email', None) or user.email
user.phone = self.validated_data.get('phone', None) or user.phone
user.sex = self.validated_data.get('sex', None) or user.sex
user.save(update_fields=['username', 'email', 'phone', 'sex'])
class SellerUpdateStoreSerializer(serializers.ModelSerializer):
"""
商家更新店铺相关信息序列化器
"""
class Meta:
model = Store
seller_model = Seller
fields = ('pk', 'name', 'intro', 'province')
extra_kwargs = {
'province':{
'required':True
}
}
def modify(self):
pk = self.context.get('request').data.get('pk', None)
if not pk:
raise DataFormatError('缺少数据')
user = self.context.get('request').user
queryset = self.Meta.model.objects.select_related('seller__user').filter(pk=pk, seller__user=user)
if queryset.count() == 0:
raise DataNotExist()
return queryset.update(**self.validated_data)
| 30.115108
| 106
| 0.624224
|
6f987472ae44a7be32f3429977356ff14a3a1112
| 390
|
py
|
Python
|
414. Third Maximum Number/other.py
|
hotheat/LeetCode
|
c37f44f71a0e266aa8078c95506e6aa54ce4660c
|
[
"MIT"
] | 2
|
2019-03-05T08:31:14.000Z
|
2019-03-21T15:11:43.000Z
|
414. Third Maximum Number/other.py
|
hotheat/LeetCode
|
c37f44f71a0e266aa8078c95506e6aa54ce4660c
|
[
"MIT"
] | null | null | null |
414. Third Maximum Number/other.py
|
hotheat/LeetCode
|
c37f44f71a0e266aa8078c95506e6aa54ce4660c
|
[
"MIT"
] | null | null | null |
class Solution(object):
def thirdMax(self, nums):
v = [float('-inf'), float('-inf'), float('-inf')]
for num in nums:
if num not in v:
if num > v[0]: v = [num, v[0], v[1]]
elif num > v[1]: v = [v[0], num, v[1]]
elif num > v[2]: v = [v[0], v[1], num]
return max(nums) if v[2] == float('-inf') else v[2]
| 43.333333
| 59
| 0.425641
|
51cddac32a5b7d11a3d2fbfcafc4d90da10b215f
| 1,330
|
py
|
Python
|
sfc_models/examples/scripts/intro_3_03_hello_world_1.py
|
MachineLP/SFC_models
|
d438a4e3e88534a206c761cda7a3f6a58ac3a0ac
|
[
"Apache-2.0"
] | 21
|
2016-11-03T12:30:50.000Z
|
2022-03-24T06:54:14.000Z
|
sfc_models/examples/scripts/intro_3_03_hello_world_1.py
|
MachineLP/SFC_models
|
d438a4e3e88534a206c761cda7a3f6a58ac3a0ac
|
[
"Apache-2.0"
] | 1
|
2019-04-02T02:01:27.000Z
|
2019-04-07T21:07:10.000Z
|
sfc_models/examples/scripts/intro_3_03_hello_world_1.py
|
MachineLP/SFC_models
|
d438a4e3e88534a206c761cda7a3f6a58ac3a0ac
|
[
"Apache-2.0"
] | 12
|
2016-11-03T12:30:57.000Z
|
2021-09-14T23:08:23.000Z
|
"""
intro_3_03_hello_world_1.py
Example code from Section 3.3 of "Introduction to SFC Models Using Python."
This example does not create a useful model, but it and following examples demonstrate
how models are built up by examining the log file.
Copyright 2017 Brian Romanchuk
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# This next line looks bizarre, but is needed for backwards compatibility with Python 2.7.
from __future__ import print_function
import sfc_models
from sfc_models.models import Model
print('*Starting up logging*')
# Log files are based on name of this module, which is given by: __file__
sfc_models.register_standard_logs(output_dir='output',
base_file_name=__file__)
print('*Build Model*')
mod = Model()
print('*Running main()*')
print('*(This will cause a warning...)*')
mod.main()
| 34.102564
| 90
| 0.762406
|
98f6e5eebae81baa9cf19ba3faa9c93d2d474df5
| 913
|
bzl
|
Python
|
third_party/hexagon/workspace.bzl
|
vixadd/tensorflow
|
8c624204eb686a91779149dc500e6c8c60096074
|
[
"Apache-2.0"
] | 190,993
|
2015-11-09T13:17:30.000Z
|
2022-03-31T23:05:27.000Z
|
third_party/hexagon/workspace.bzl
|
vixadd/tensorflow
|
8c624204eb686a91779149dc500e6c8c60096074
|
[
"Apache-2.0"
] | 48,461
|
2015-11-09T14:21:11.000Z
|
2022-03-31T23:17:33.000Z
|
third_party/hexagon/workspace.bzl
|
vixadd/tensorflow
|
8c624204eb686a91779149dc500e6c8c60096074
|
[
"Apache-2.0"
] | 104,981
|
2015-11-09T13:40:17.000Z
|
2022-03-31T19:51:54.000Z
|
"""Loads the Hexagon NN Header files library, used by TF Lite."""
load("//third_party:repo.bzl", "tf_http_archive")
# Note: Use libhexagon_nn_skel version 1.20 Only with the current version.
# This comment will be updated with compatible version.
def repo():
tf_http_archive(
name = "hexagon_nn",
sha256 = "6eaf6d8eabfcb3486753c68f22fd6c1eabf8fae28bf52e4ebea815e9daf67257",
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/storage.cloud.google.com/download.tensorflow.org/tflite/hexagon_nn_headers_v1.20.0.3.tgz",
# Repeated to bypass 'at least two urls' check. TODO(karimnosseir): add original source of this package.
"https://storage.googleapis.com/mirror.tensorflow.org/storage.cloud.google.com/download.tensorflow.org/tflite/hexagon_nn_headers_v1.20.0.3.tgz",
],
build_file = "//third_party/hexagon:BUILD",
)
| 50.722222
| 156
| 0.715225
|
8a63f5cedc0db5f4d141dd8af7b72332589197d8
| 7,984
|
py
|
Python
|
utils/solver.py
|
tings0802/LdG-sim
|
46dc04c9f91fd0a3afdd8e83509c70692656e1ea
|
[
"MIT"
] | 5
|
2020-03-04T06:38:43.000Z
|
2021-05-19T10:36:04.000Z
|
utils/solver.py
|
tings0802/LdG-sim
|
46dc04c9f91fd0a3afdd8e83509c70692656e1ea
|
[
"MIT"
] | null | null | null |
utils/solver.py
|
tings0802/LdG-sim
|
46dc04c9f91fd0a3afdd8e83509c70692656e1ea
|
[
"MIT"
] | 1
|
2020-03-24T05:54:26.000Z
|
2020-03-24T05:54:26.000Z
|
import numpy as np
from utils import utility as u
from utils import param as p
from utils import cond as c
from utils import mesh as m
def println(Q):
''' print for debug '''
for i in Q:
for j in i:
print(j, end=' ')
print()
print()
""" transform n and S into Q """
def Q_tensor(n, S=1, P=0):
''' calculate the Q tensor of a certain position which evalueates the liquid crystal molecule's orientation, degree of order and biaxiality '''
Q = np.zeros((3, 3))
for row in range(3):
for col in range(3):
if row == col:
Q[row, col] = (3 * n[row] * n[col] - 1) * (S / 2) - (Q[0, 0] + Q[1, 1] + Q[2, 2]) / 3
else:
Q[row, col] = (3 * n[row] * n[col] - 0) * (S / 2)
return Q
def Q_init(n, S=1, P=0):
''' calculate the Q tensor of a certain position which evalueates the liquid crystal molecule's orientation, degree of order and biaxiality '''
Q = np.zeros((3, 3))
for row in range(3):
for col in range(3):
if row == col:
Q[row, col] = (3 * n[row] * n[col] - 1) * (S / 2)
else:
Q[row, col] = (3 * n[row] * n[col] - 0) * (S / 2)
return Q
def all_Q(mesh):
for layer in mesh:
for line in layer:
for grid in line:
Q = Q_init(grid.n, grid.S, grid.P)
grid.Q = Q
# deprecated
def tensor_Q(n, S=1, P=0):
''' (Deprecated) calculate the Q tensor of a certain position which evalueates the liquid crystal molecule's orientation, degree of order and biaxiality '''
n = np.array(n)
Q = (np.outer(n, n) * 3 - np.eye(3)) * (S / 2)
Q -= np.trace(Q) * np.eye(3) / 3
return Q
""" solve n and S field from Q """
def eigen(grid):
''' find the max eigenvalue and the corresponding normalized eigvector of Q '''
eigval, eigvec = np.linalg.eig(grid.Q)
idx = np.argmax(eigval)
S = eigval[idx]
n = eigvec[:, idx]
grid.S = S
grid.n = n
return S, n
Eigen = np.vectorize(eigen)
""" iteration """
# deprecated
def retrive_Q(mesh):
''' (Deprecated) retrive the tensorial order parameter Q from mesh and store it as a big 3*3 tuple '''
all_Q = np.vectorize(lambda grid, i, j: grid.Q[i, j])
Qs = np.empty((3, 3))
for i in range(3):
for j in range(3):
Qs[i, j] = all_Q(mesh, i, j)
return Qs # shape = (3, 3, 27, 27, 17)
# deprecated
def laplace(Qs, i, j):
''' (Deprecated) finite difference discrete laplacian of Q_ij of all the points in the mesh '''
lap_Q = np.empty((p.x_nog, p.y_nog, p.z_nog))
for x in range(p.x_nog):
for y in range(p.y_nog):
for z in range(p.z_nog):
lap_Q[x, y, z] = np.average(Qs[i, j, x-1, y, z],
Qs[i, j, x+1, y, z],
Qs[i, j, x, y-1, z],
Qs[i, j, x, y+1, z],
Qs[i, j, x, y, z-1],
Qs[i, j, x, y, z+1]) - Qs[i, j, x, y, z]
return lap_Q # shape = (27, 27, 17)
# deprecated
def normal_dot_gradient(Qs, i, j, dr=p.dr_lap):
''' (Deprecated) inner product of gradient of Q_ij and the surface normal of all the points in the mesh '''
# surface normal = normalized r field (shape = 27 * 27 * 17)
grad_Q = np.empty((p.x_nog, p.y_nog, p.z_nog))
for x in range(p.x_nog):
for y in range(p.y_nog):
for z in range(p.z_nog):
normal = np.array((x, y, z)) / np.linalg.norm(normal)
grad_Q[x, y, z] = sum(normal[0] * (Qs[i, j, x, y, z] - Qs[i, j, x-1, y, z]) / dr,
normal[1] * (Qs[i, j, x, y, z] - Qs[i, j, x, y-1, z]) / dr,
normal[2] * (Qs[i, j, x, y, z] - Qs[i, j, x, y, z-1]) / dr)
return grad_Q # shape = (27, 27, 17)
def laplacian(mesh):
''' finite difference discrete laplacian of Q_ij of all the points in the mesh '''
lap_Qs = np.empty((*mesh.shape, 3, 3))
for x in range(p.x_nog):
for y in range(p.y_nog):
for z in range(1, p.z_nog-1):
if x == 0:
x = -2
elif x == p.x_nog - 1:
x = 1
if y == 0:
y = -2
elif y == p.y_nog - 1:
y = 1
q1 = mesh[x-1, y, z].Q
q2 = mesh[x+1, y, z].Q
q3 = mesh[x, y-1, z].Q
q4 = mesh[x, y+1, z].Q
q5 = mesh[x, y, z-1].Q
q6 = mesh[x, y, z+1].Q
temp = np.array([q1, q2, q3, q4, q5, q6])
lap_Qs[x, y, z] = np.average(temp, axis=0) - mesh[x, y, z].Q
return lap_Qs # shape = (27, 27, 17, 3, 3)
def gradient(mesh, dx=p.dr_lap, dy=p.dr_lap, dz=p.dr_lap):
''' gradient of Q_ij of all the points in the mesh '''
grad_Qs = np.empty((*mesh.shape, 3, 3, 3))
for x in range(p.x_nog):
for y in range(p.y_nog):
for z in range(p.z_nog):
grad_Qs[x, y, z] = np.array([(mesh[x, y, z].Q - mesh[x-1, y, z].Q) / dx,
(mesh[x, y, z].Q - mesh[x, y-1, z].Q) / dy,
(mesh[x, y, z].Q - mesh[x, y, z-1].Q) / dz])
return grad_Qs # shape = (27, 27, 17, 3, 3, 3)
def h_bulk(Q, lap_Q, L=p.L, A=p.A, B=p.B, C=p.C):
''' solve the molecular field on the bulk area '''
h = np.empty((3, 3))
for i in range(3):
for j in range(3):
h[i, j] = (L * lap_Q[i, j] -
A * Q[i, j] -
B * np.sum(np.multiply(Q[i], Q.T[j])) -
C * Q[i, j] * np.sum(np.multiply(Q, Q.T)))
return h
def h_surf(Q, grad_Q, Q_bound, surf_normal, W=p.W_sub, L=p.L):
''' solve the molecular field on the surface of substrate or sphere '''
h = np.empty((3, 3))
for i in range(3):
for j in range(3):
h[i, j] = (L * grad_Q[i, j].dot(surf_normal)) + W * (Q[i, j] - Q_bound[i, j])
return h
def evolute(mesh, L=p.L, A=p.A, B=p.B, C=p.C, W_subs=p.W_sub, W_shel=p.W_she, dt=p.dt, gamma=p.gamma):
lap_Qs = laplacian(mesh)
grad_Qs = gradient(mesh)
for x in range(p.x_nog):
for y in range(p.y_nog):
for z in range(p.z_nog):
grid = mesh[x, y, z]
lap_Q = lap_Qs[x, y, z]
grad_Q = grad_Qs[x, y, z]
if c.is_top(grid) or c.is_bot(grid): # h_surf of substrate
Q_bound = Q_tensor(p.n_subs, p.S_subs)
grid.h = h_surf(grid.Q, grad_Q, Q_bound=Q_bound, surf_normal=np.array([0, 0, 1]), W=W_subs)
elif c.is_osh(grid) or c.is_ish(grid): # h_surf of shell
Q_bound = Q_tensor(c.envelope(grid, p.n_shel), p.S_subs)
grid.h = h_surf(grid.Q, grad_Q, Q_bound=Q_bound, surf_normal=u.cartesian(grid.r), W=W_shel)
else: # h_bulk
grid.h = h_bulk(grid.Q, lap_Q)
newQ = grid.Q + grid.h * dt / gamma
newQ -= np.trace(newQ) * np.eye(3) / 3 # EL modification
symmetric = (abs(np.transpose(newQ) - newQ) <= np.full((3, 3), 2e-8)).all()
if not symmetric:
print(f'\nnewQ =\n{newQ}\n')
print(f'\nnp.transpose(newQ) =\n{np.transpose(newQ)}\n')
print(f'\nnewQ - np.transpose(newQ) =\n{newQ - np.transpose(newQ)}\n')
grid.Q = newQ
if __name__ == "__main__":
a = np.arange(1, 28).reshape([3, 3, 3])
print(a)
b = np.array([1, 2, 3])
print(b)
c = np.sum(np.multiply(a, b), axis=0)
print(c)
# TODO: XZ-periodic boundary
'''
# XZ-periodic boundary
F_bulk =
F_subs =
F_shel =
F_total = F_bulk + F_subs + F_shel
'''
| 37.308411
| 157
| 0.482715
|
293e99aa89dafb98cb814fd532a9cd9c3951e83c
| 1,935
|
py
|
Python
|
src/Current Models/Misc/vase.py
|
PharaohCola13/Geotesimal
|
45de6fb9a587ae8eb3c85d0acd6b93c36fa7bf24
|
[
"MIT"
] | 3
|
2018-12-13T20:11:18.000Z
|
2022-01-13T13:51:19.000Z
|
src/Current Models/Misc/vase.py
|
PharaohCola13/geometric-models
|
45de6fb9a587ae8eb3c85d0acd6b93c36fa7bf24
|
[
"MIT"
] | 5
|
2018-10-19T18:18:05.000Z
|
2021-06-10T00:20:52.000Z
|
src/Current Models/Misc/vase.py
|
PharaohCola13/geometric-models
|
45de6fb9a587ae8eb3c85d0acd6b93c36fa7bf24
|
[
"MIT"
] | 1
|
2018-10-17T05:32:26.000Z
|
2018-10-17T05:32:26.000Z
|
#{u, Sin[v]*(u^3+2u^2-2u+2)/5, Cos[v]*(u^3+2u^2-2u+2)/5}
# A Vase, brought to you by PharaohCola13
import mpl_toolkits.mplot3d.axes3d as p3
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d.art3d import *
from matplotlib.animation import *
from matplotlib import *
from numpy import *
name = "Vase"
def shape(fig, alpha, color, edge_c, edge_w, grid, sides, edges, multi_pi, figcolor, rotation, rotmagt, rotmagp, save):
# Definition of x
def x_(u, v):
x = cos(v) * (u**3 + 2 * u**2 - 2 * u + 2)/5
return x
# Definition of y
def y_(u, v):
y = sin(v) * (u**3 + 2 * u**2 - 2 * u + 2)/5
return y
# Definition of z
def z_(u, v):
z = u
return z
# Value of the angles
u = linspace(-2.3, 1.3, edges)
v = linspace(0, 2 * pi, sides + 1)
u, v = meshgrid(u, v)
# Symbolic representation
x = x_(u, v)
y = y_(u, v)
z = z_(u, v)
# Figure Properties
ax = p3.Axes3D(fig)
ax.set_facecolor(figcolor) # Figure background turns black
# Axis Properties
plt.axis(grid) # Turns off the axis grid
# Surface Plot
vase = ax.plot_surface(x, y, z)
vase.set_alpha(alpha) # Transparency of figure
vase.set_edgecolor(edge_c) # Edge color of the lines on the figure
vase.set_linewidth(edge_w) # Line width of the edges
vase.set_facecolor(color) # General color of the figure
def rot_on():
def animate(i):
ax.view_init(azim=rotmagt * i, elev=rotmagp * i)
if save == "MP4":
# Animate
ani = FuncAnimation(fig, animate, frames=500,
interval=100, save_count=50) # frames=100)#, repeat=True)
Writer = writers['ffmpeg']
writer = Writer(fps=30, bitrate=1800)
ani.save('{}.mp4'.format(name), writer=writer)
else:
# save = None
# Animate
ani = FuncAnimation(fig, animate,
interval=1, save_count=50) # frames=100)#, repeat=True)
pass
plt.ion()
plt.show()
time.sleep(0)
plt.close()
if rotation == "On":
rot_on()
elif rotation == "Off":
pass
| 22.5
| 119
| 0.643411
|
5970e7dd60eaedc92a93ddde799b8b0305bb3d7f
| 8,999
|
py
|
Python
|
sscanss/core/io/writer.py
|
ISISNeutronMuon/SScanSS-2
|
aa70107c8771e0ed0c1202a683ad94de84dff411
|
[
"BSD-3-Clause"
] | 9
|
2018-09-09T13:45:38.000Z
|
2022-01-04T09:02:37.000Z
|
sscanss/core/io/writer.py
|
ISISNeutronMuon/SScanSS-2
|
aa70107c8771e0ed0c1202a683ad94de84dff411
|
[
"BSD-3-Clause"
] | 21
|
2020-10-07T14:37:01.000Z
|
2022-03-18T09:14:35.000Z
|
sscanss/core/io/writer.py
|
ISISNeutronMuon/SScanSS-2
|
aa70107c8771e0ed0c1202a683ad94de84dff411
|
[
"BSD-3-Clause"
] | 8
|
2019-11-18T12:00:35.000Z
|
2022-02-17T13:35:15.000Z
|
"""
A collection of functions for writing data
"""
import csv
import datetime as dt
import h5py
import numpy as np
from ...config import __version__, settings
def write_project_hdf(data, filename):
"""Writes the project data dictionary to a hdf file
:param data: A dictionary containing the project data
:type data: dict
:param filename: path of the hdf file
:type filename: str
"""
with h5py.File(filename, 'w') as hdf_file:
hdf_file.attrs['name'] = data['name']
hdf_file.attrs['version'] = __version__
hdf_file.attrs['instrument_version'] = data['instrument_version']
date_created = dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
hdf_file.attrs['date_created'] = date_created
if settings.local:
setting_group = hdf_file.create_group('settings')
for key, value in settings.local.items():
setting_group.attrs[key] = value
samples = data['sample']
sample_group = hdf_file.create_group('sample', track_order=True)
for key, sample in samples.items():
sample_group.create_group(key)
sample_group[key]['vertices'] = sample.vertices
sample_group[key]['indices'] = sample.indices
fiducials = data['fiducials']
fiducial_group = hdf_file.create_group('fiducials')
fiducial_group['points'] = fiducials.points
fiducial_group['enabled'] = fiducials.enabled
measurements = data['measurement_points']
measurement_group = hdf_file.create_group('measurement_points')
measurement_group['points'] = measurements.points
measurement_group['enabled'] = measurements.enabled
vectors = data['measurement_vectors']
hdf_file.create_dataset('measurement_vectors', data=vectors)
alignment = data['alignment']
if alignment is not None:
hdf_file.create_dataset('alignment', data=alignment)
instrument = data['instrument']
hdf_file.attrs['instrument_name'] = instrument.name
_write_instrument(hdf_file, instrument)
def _write_instrument(hdf_file, instrument):
instrument_group = hdf_file.create_group('instrument')
instrument_group.attrs['name'] = instrument.name
instrument_group['gauge_volume'] = instrument.gauge_volume
instrument_group.attrs['script_template'] = instrument.script.template
positioners_group = instrument_group.create_group('positioners')
for key, positioner in instrument.positioners.items():
group = positioners_group.create_group(key)
group.attrs['name'] = positioner.name
group['default_base'] = positioner.default_base
group['tool'] = positioner.tool
if positioner.base_mesh is not None:
group['base_mesh_vertices'] = positioner.base_mesh.vertices
group['base_mesh_indices'] = positioner.base_mesh.indices
group['base_mesh_colour'] = positioner.base_mesh.colour.rgbaf
group['order'] = positioner.order
group = group.create_group('links', track_order=True)
for link in positioner.links:
sub_group = group.create_group(link.name)
sub_group['axis'] = link.joint_axis
sub_group['point'] = link.home
sub_group.attrs['type'] = link.type.value
sub_group.attrs['lower_limit'] = link.lower_limit
sub_group.attrs['upper_limit'] = link.upper_limit
sub_group.attrs['default_offset'] = link.default_offset
if link.mesh is not None:
sub_group['mesh_vertices'] = link.mesh.vertices
sub_group['mesh_indices'] = link.mesh.indices
sub_group['mesh_colour'] = link.mesh.colour.rgbaf
stacks_group = instrument_group.create_group('stacks')
for key, value in instrument.positioning_stacks.items():
stacks_group.attrs[key] = value
active_stack_group = stacks_group.create_group('active')
active_stack_group.attrs['name'] = instrument.positioning_stack.name
active_stack_group['set_points'] = instrument.positioning_stack.set_points
active_stack_group['lock_state'] = [link.locked for link in instrument.positioning_stack.links]
active_stack_group['limit_state'] = [link.ignore_limits for link in instrument.positioning_stack.links]
for index, positioner in enumerate(instrument.positioning_stack.auxiliary):
if positioner.base is positioner.default_base:
continue
base_group = active_stack_group.get('base')
if base_group is None:
base_group = active_stack_group.create_group('base')
base_group[positioner.name] = positioner.base
group = instrument_group.create_group('jaws')
group.attrs['name'] = instrument.jaws.name
if instrument.jaws.positioner is not None:
group.attrs['positioner_name'] = instrument.jaws.positioner.name
group['positioner_set_points'] = instrument.jaws.positioner.set_points
group['positioner_lock_state'] = [link.locked for link in instrument.jaws.positioner.links]
group['positioner_limit_state'] = [link.ignore_limits for link in instrument.jaws.positioner.links]
group['aperture'] = instrument.jaws.aperture
group['initial_source'] = instrument.jaws.initial_source
group['initial_direction'] = instrument.jaws.initial_direction
group['aperture_lower_limit'] = instrument.jaws.aperture_lower_limit
group['aperture_upper_limit'] = instrument.jaws.aperture_upper_limit
group['mesh_vertices'] = instrument.jaws.mesh.vertices
group['mesh_indices'] = instrument.jaws.mesh.indices
group['mesh_colour'] = instrument.jaws.mesh.colour.rgbaf
detectors_group = instrument_group.create_group('detectors')
for key, detector in instrument.detectors.items():
group = detectors_group.create_group(key)
group.attrs['name'] = detector.name
if detector.current_collimator is not None:
group.attrs['current_collimator'] = detector.current_collimator.name
if detector.positioner is not None:
group.attrs['positioner_name'] = detector.positioner.name
group['positioner_set_points'] = detector.positioner.set_points
group['positioner_lock_state'] = [link.locked for link in detector.positioner.links]
group['positioner_limit_state'] = [link.ignore_limits for link in detector.positioner.links]
group['initial_beam'] = detector.initial_beam
group = group.create_group('collimators')
for c_key, collimator in detector.collimators.items():
sub_group = group.create_group(c_key)
sub_group.attrs['name'] = collimator.name
sub_group['aperture'] = collimator.aperture
sub_group['mesh_vertices'] = collimator.mesh.vertices
sub_group['mesh_indices'] = collimator.mesh.indices
sub_group['mesh_colour'] = collimator.mesh.colour.rgbaf
fixed_hardware_group = instrument_group.create_group('fixed_hardware')
for key, mesh in instrument.fixed_hardware.items():
group = fixed_hardware_group.create_group(key)
group['mesh_vertices'] = mesh.vertices
group['mesh_indices'] = mesh.indices
group['mesh_colour'] = mesh.colour.rgbaf
def write_binary_stl(filename, mesh):
"""Writes a 3D mesh to a binary STL file. The binary STL format only
supports face normals while the Mesh object stores vertex normals
therefore the first vertex normal for each face is written.
:param filename: path of the stl file
:type filename: str
:param mesh: The vertices, normals and index array of the mesh
:type mesh: Mesh
"""
record_dtype = np.dtype([
('normals', np.float32, (3,)),
('vertices', np.float32, (3, 3)),
('attr', '<i2', (1,)),
])
face_count = mesh.indices.size // 3
data = np.recarray(face_count, dtype=record_dtype)
data.normals = mesh.normals[mesh.indices, :][::3]
data.attr = np.zeros((face_count, 1), dtype=np.uint32)
data.vertices = mesh.vertices[mesh.indices, :].reshape((-1, 3, 3))
with open(filename, 'wb') as stl_file:
stl_file.seek(80)
np.array(face_count, dtype=np.int32).tofile(stl_file)
data.tofile(stl_file)
def write_points(filename, data):
"""Writes point data and enabled status to tab delimited file.
:param filename: path of the file
:type filename: str
:param data: 3D points and enabled status
:type data: numpy.recarray
"""
with open(filename, 'w', newline='') as csv_file:
writer = csv.writer(csv_file, delimiter='\t')
write_enabled = True if data.enabled.all() else False
for i in range(data.size):
p0, p1, p2 = data[i].points
if write_enabled:
writer.writerow([f'{p0:.7f}', f'{p1:.7f}', f'{p2:.7f}'])
else:
writer.writerow([f'{p0:.7f}', f'{p1:.7f}', f'{p2:.7f}', data[i].enabled])
| 44.549505
| 107
| 0.678186
|
c2fd10aa3966e77e2299c6fb8de1c35ae03bdee0
| 233,750
|
py
|
Python
|
sdks/python/appcenter_sdk/api/analytics_api.py
|
Brantone/appcenter-sdks
|
eeb063ecf79908b6e341fb00196d2cd9dc8f3262
|
[
"MIT"
] | null | null | null |
sdks/python/appcenter_sdk/api/analytics_api.py
|
Brantone/appcenter-sdks
|
eeb063ecf79908b6e341fb00196d2cd9dc8f3262
|
[
"MIT"
] | 6
|
2019-10-23T06:38:53.000Z
|
2022-01-22T07:57:58.000Z
|
sdks/python/appcenter_sdk/api/analytics_api.py
|
Brantone/appcenter-sdks
|
eeb063ecf79908b6e341fb00196d2cd9dc8f3262
|
[
"MIT"
] | 2
|
2019-10-23T06:31:05.000Z
|
2021-08-21T17:32:47.000Z
|
# coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from appcenter_sdk.api_client import ApiClient
class analyticsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def Devices_BlockLogs(self, install_id, owner_name, app_name, **kwargs): # noqa: E501
"""Devices_BlockLogs # noqa: E501
**Warning, this operation is not reversible.**
A successful call to this API will permanently stop ingesting any logs received via SDK for the given installation ID, and cannot be restored. We advise caution when using this API, it is designed to permanently disable collection from a specific installation of the app on a device, usually following the request from a user.
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Devices_BlockLogs(install_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string install_id: The id of the device (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: string
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Devices_BlockLogs_with_http_info(install_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Devices_BlockLogs_with_http_info(install_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def Devices_BlockLogs_with_http_info(self, install_id, owner_name, app_name, **kwargs): # noqa: E501
"""Devices_BlockLogs # noqa: E501
**Warning, this operation is not reversible.**
A successful call to this API will permanently stop ingesting any logs received via SDK for the given installation ID, and cannot be restored. We advise caution when using this API, it is designed to permanently disable collection from a specific installation of the app on a device, usually following the request from a user.
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Devices_BlockLogs_with_http_info(install_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string install_id: The id of the device (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: string
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['install_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Devices_BlockLogs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'install_id' is set
if ('install_id' not in params or
params['install_id'] is None):
raise ValueError("Missing the required parameter `install_id` when calling `Devices_BlockLogs`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Devices_BlockLogs`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Devices_BlockLogs`") # noqa: E501
collection_formats = {}
path_params = {}
if 'install_id' in params:
path_params['install_id'] = params['install_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/devices/block_logs/{install_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='string', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def App_BlockLogs(self, owner_name, app_name, **kwargs): # noqa: E501
"""App_BlockLogs # noqa: E501
**Warning, this operation is not reversible.**
A successful call to this API will permanently stop ingesting any logs received via SDK by app_id, and cannot be restored. We advise caution when using this API, it is designed to permanently disable an app_id.
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.App_BlockLogs(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: string
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.App_BlockLogs_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.App_BlockLogs_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def App_BlockLogs_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""App_BlockLogs # noqa: E501
**Warning, this operation is not reversible.**
A successful call to this API will permanently stop ingesting any logs received via SDK by app_id, and cannot be restored. We advise caution when using this API, it is designed to permanently disable an app_id.
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.App_BlockLogs_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: string
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method App_BlockLogs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `App_BlockLogs`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `App_BlockLogs`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/devices/block_logs', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='string', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Crashes_ListSessionLogs(self, crash_id, owner_name, app_name, **kwargs): # noqa: E501
"""Crashes_ListSessionLogs # noqa: E501
Get session logs by crash ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Crashes_ListSessionLogs(crash_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_id: The id of the a crash (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string date: Date of data requested(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Crashes_ListSessionLogs_with_http_info(crash_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Crashes_ListSessionLogs_with_http_info(crash_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def Crashes_ListSessionLogs_with_http_info(self, crash_id, owner_name, app_name, **kwargs): # noqa: E501
"""Crashes_ListSessionLogs # noqa: E501
Get session logs by crash ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Crashes_ListSessionLogs_with_http_info(crash_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_id: The id of the a crash (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string date: Date of data requested(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['crash_id', 'owner_name', 'app_name', 'date'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Crashes_ListSessionLogs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'crash_id' is set
if ('crash_id' not in params or
params['crash_id'] is None):
raise ValueError("Missing the required parameter `crash_id` when calling `Crashes_ListSessionLogs`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Crashes_ListSessionLogs`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Crashes_ListSessionLogs`") # noqa: E501
collection_formats = {}
path_params = {}
if 'crash_id' in params:
path_params['crash_id'] = params['crash_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'date' in params:
query_params.append(('date', params['date'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/crashes/{crash_id}/session_logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_Versions(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_Versions # noqa: E501
Count of active versions in the time range ordered by version. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_Versions(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_Versions_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_Versions_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_Versions_with_http_info(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_Versions # noqa: E501
Count of active versions in the time range ordered by version. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_Versions_with_http_info(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional, default to )
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'owner_name', 'app_name', 'end', '$top', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_Versions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_Versions`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_Versions`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_Versions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if '$top' in params:
query_params.append(('$top', params['$top'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/versions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_PerDeviceCounts(self, start, interval, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_PerDeviceCounts # noqa: E501
Count of sessions per device in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_PerDeviceCounts(start, interval, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string interval: Size of interval in ISO 8601 duration format. (PnYnMnDTnHnMnS|PnW|P<date>T<time>). The valid durations are 1 day (P1D), 1 week (P1W), and 30 days (P30D). (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_PerDeviceCounts_with_http_info(start, interval, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_PerDeviceCounts_with_http_info(start, interval, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_PerDeviceCounts_with_http_info(self, start, interval, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_PerDeviceCounts # noqa: E501
Count of sessions per device in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_PerDeviceCounts_with_http_info(start, interval, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string interval: Size of interval in ISO 8601 duration format. (PnYnMnDTnHnMnS|PnW|P<date>T<time>). The valid durations are 1 day (P1D), 1 week (P1W), and 30 days (P30D). (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'interval', 'owner_name', 'app_name', 'end', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_PerDeviceCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_PerDeviceCounts`") # noqa: E501
# verify the required parameter 'interval' is set
if ('interval' not in params or
params['interval'] is None):
raise ValueError("Missing the required parameter `interval` when calling `Analytics_PerDeviceCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_PerDeviceCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_PerDeviceCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'interval' in params:
query_params.append(('interval', params['interval'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/sessions_per_device', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_SessionDurationsDistribution(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_SessionDurationsDistribution # noqa: E501
Gets session duration . # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_SessionDurationsDistribution(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_SessionDurationsDistribution_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_SessionDurationsDistribution_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_SessionDurationsDistribution_with_http_info(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_SessionDurationsDistribution # noqa: E501
Gets session duration . # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_SessionDurationsDistribution_with_http_info(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'owner_name', 'app_name', 'end', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_SessionDurationsDistribution" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_SessionDurationsDistribution`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_SessionDurationsDistribution`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_SessionDurationsDistribution`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/session_durations_distribution', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_SessionCounts(self, start, interval, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_SessionCounts # noqa: E501
Count of sessions in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_SessionCounts(start, interval, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string interval: Size of interval in ISO 8601 duration format. (PnYnMnDTnHnMnS|PnW|P<date>T<time>). The valid durations are 1 day (P1D), 1 week (P1W), and 30 days (P30D). (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_SessionCounts_with_http_info(start, interval, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_SessionCounts_with_http_info(start, interval, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_SessionCounts_with_http_info(self, start, interval, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_SessionCounts # noqa: E501
Count of sessions in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_SessionCounts_with_http_info(start, interval, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string interval: Size of interval in ISO 8601 duration format. (PnYnMnDTnHnMnS|PnW|P<date>T<time>). The valid durations are 1 day (P1D), 1 week (P1W), and 30 days (P30D). (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'interval', 'owner_name', 'app_name', 'end', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_SessionCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_SessionCounts`") # noqa: E501
# verify the required parameter 'interval' is set
if ('interval' not in params or
params['interval'] is None):
raise ValueError("Missing the required parameter `interval` when calling `Analytics_SessionCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_SessionCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_SessionCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'interval' in params:
query_params.append(('interval', params['interval'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/session_counts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_PlaceCounts(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_PlaceCounts # noqa: E501
Places in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_PlaceCounts(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_PlaceCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_PlaceCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_PlaceCounts_with_http_info(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_PlaceCounts # noqa: E501
Places in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_PlaceCounts_with_http_info(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional, default to )
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'owner_name', 'app_name', 'end', '$top', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_PlaceCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_PlaceCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_PlaceCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_PlaceCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if '$top' in params:
query_params.append(('$top', params['$top'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/places', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_OperatingSystemCounts(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_OperatingSystemCounts # noqa: E501
OSes in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_OperatingSystemCounts(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_OperatingSystemCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_OperatingSystemCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_OperatingSystemCounts_with_http_info(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_OperatingSystemCounts # noqa: E501
OSes in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_OperatingSystemCounts_with_http_info(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional, default to )
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'owner_name', 'app_name', 'end', '$top', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_OperatingSystemCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_OperatingSystemCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_OperatingSystemCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_OperatingSystemCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if '$top' in params:
query_params.append(('$top', params['$top'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/oses', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_ModelCounts(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ModelCounts # noqa: E501
models in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ModelCounts(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_ModelCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_ModelCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_ModelCounts_with_http_info(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ModelCounts # noqa: E501
models in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ModelCounts_with_http_info(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional, default to )
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'owner_name', 'app_name', 'end', '$top', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_ModelCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_ModelCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_ModelCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_ModelCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if '$top' in params:
query_params.append(('$top', params['$top'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/models', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_LogFlow(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_LogFlow # noqa: E501
Logs received between the specified start time and the current time. The API will return a maximum of 100 logs per call. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_LogFlow(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string start: Start date time in data in ISO 8601 date time format. It must be within the current day in the UTC timezone. The default value is the start time of the current day in UTC timezone.(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_LogFlow_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_LogFlow_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_LogFlow_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_LogFlow # noqa: E501
Logs received between the specified start time and the current time. The API will return a maximum of 100 logs per call. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_LogFlow_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string start: Start date time in data in ISO 8601 date time format. It must be within the current day in the UTC timezone. The default value is the start time of the current day in UTC timezone.(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'start'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_LogFlow" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_LogFlow`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_LogFlow`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/log_flow', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_LanguageCounts(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_LanguageCounts # noqa: E501
languages in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_LanguageCounts(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_LanguageCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_LanguageCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_LanguageCounts_with_http_info(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_LanguageCounts # noqa: E501
languages in the time range # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_LanguageCounts_with_http_info(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional, default to )
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'owner_name', 'app_name', 'end', '$top', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_LanguageCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_LanguageCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_LanguageCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_LanguageCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if '$top' in params:
query_params.append(('$top', params['$top'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/languages', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_GenericLogFlow(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_GenericLogFlow # noqa: E501
Logs received between the specified start time and the current time. The API will return a maximum of 100 logs per call. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_GenericLogFlow(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string start: Start date time in data in ISO 8601 date time format. It must be within the current day in the UTC timezone. The default value is the start time of the current day in UTC timezone.(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_GenericLogFlow_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_GenericLogFlow_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_GenericLogFlow_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_GenericLogFlow # noqa: E501
Logs received between the specified start time and the current time. The API will return a maximum of 100 logs per call. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_GenericLogFlow_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string start: Start date time in data in ISO 8601 date time format. It must be within the current day in the UTC timezone. The default value is the start time of the current day in UTC timezone.(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'start'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_GenericLogFlow" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_GenericLogFlow`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_GenericLogFlow`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/generic_log_flow', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_EventPropertyCounts(self, event_name, event_property_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventPropertyCounts # noqa: E501
Event properties value counts during the time range in descending order. Limited up to 5 values. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventPropertyCounts(event_name, event_property_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string event_property_name: The id of the event property (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:param integer $top: The number of property values to return(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_EventPropertyCounts_with_http_info(event_name, event_property_name, start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_EventPropertyCounts_with_http_info(event_name, event_property_name, start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_EventPropertyCounts_with_http_info(self, event_name, event_property_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventPropertyCounts # noqa: E501
Event properties value counts during the time range in descending order. Limited up to 5 values. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventPropertyCounts_with_http_info(event_name, event_property_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string event_property_name: The id of the event property (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:param integer $top: The number of property values to return(optional, default to )
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_name', 'event_property_name', 'start', 'owner_name', 'app_name', 'end', 'versions', '$top'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_EventPropertyCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_name' is set
if ('event_name' not in params or
params['event_name'] is None):
raise ValueError("Missing the required parameter `event_name` when calling `Analytics_EventPropertyCounts`") # noqa: E501
# verify the required parameter 'event_property_name' is set
if ('event_property_name' not in params or
params['event_property_name'] is None):
raise ValueError("Missing the required parameter `event_property_name` when calling `Analytics_EventPropertyCounts`") # noqa: E501
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_EventPropertyCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_EventPropertyCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_EventPropertyCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_name' in params:
path_params['event_name'] = params['event_name'] # noqa: E501
if 'event_property_name' in params:
path_params['event_property_name'] = params['event_property_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
if '$top' in params:
query_params.append(('$top', params['$top'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/events/{event_name}/properties/{event_property_name}/counts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_EventProperties(self, event_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventProperties # noqa: E501
Event properties. Up to the first 5 received properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventProperties(event_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_EventProperties_with_http_info(event_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_EventProperties_with_http_info(event_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_EventProperties_with_http_info(self, event_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventProperties # noqa: E501
Event properties. Up to the first 5 received properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventProperties_with_http_info(event_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_EventProperties" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_name' is set
if ('event_name' not in params or
params['event_name'] is None):
raise ValueError("Missing the required parameter `event_name` when calling `Analytics_EventProperties`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_EventProperties`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_EventProperties`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_name' in params:
path_params['event_name'] = params['event_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/events/{event_name}/properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_EventCount(self, event_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventCount # noqa: E501
Count of events by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventCount(event_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_EventCount_with_http_info(event_name, start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_EventCount_with_http_info(event_name, start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_EventCount_with_http_info(self, event_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventCount # noqa: E501
Count of events by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventCount_with_http_info(event_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_name', 'start', 'owner_name', 'app_name', 'end', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_EventCount" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_name' is set
if ('event_name' not in params or
params['event_name'] is None):
raise ValueError("Missing the required parameter `event_name` when calling `Analytics_EventCount`") # noqa: E501
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_EventCount`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_EventCount`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_EventCount`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_name' in params:
path_params['event_name'] = params['event_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/events/{event_name}/event_count', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_EventDeviceCount(self, event_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventDeviceCount # noqa: E501
Count of devices for an event by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventDeviceCount(event_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_EventDeviceCount_with_http_info(event_name, start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_EventDeviceCount_with_http_info(event_name, start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_EventDeviceCount_with_http_info(self, event_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventDeviceCount # noqa: E501
Count of devices for an event by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventDeviceCount_with_http_info(event_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_name', 'start', 'owner_name', 'app_name', 'end', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_EventDeviceCount" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_name' is set
if ('event_name' not in params or
params['event_name'] is None):
raise ValueError("Missing the required parameter `event_name` when calling `Analytics_EventDeviceCount`") # noqa: E501
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_EventDeviceCount`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_EventDeviceCount`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_EventDeviceCount`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_name' in params:
path_params['event_name'] = params['event_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/events/{event_name}/device_count', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_EventPerSessionCount(self, event_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventPerSessionCount # noqa: E501
Count of events per session by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventPerSessionCount(event_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_EventPerSessionCount_with_http_info(event_name, start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_EventPerSessionCount_with_http_info(event_name, start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_EventPerSessionCount_with_http_info(self, event_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventPerSessionCount # noqa: E501
Count of events per session by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventPerSessionCount_with_http_info(event_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_name', 'start', 'owner_name', 'app_name', 'end', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_EventPerSessionCount" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_name' is set
if ('event_name' not in params or
params['event_name'] is None):
raise ValueError("Missing the required parameter `event_name` when calling `Analytics_EventPerSessionCount`") # noqa: E501
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_EventPerSessionCount`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_EventPerSessionCount`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_EventPerSessionCount`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_name' in params:
path_params['event_name'] = params['event_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/events/{event_name}/count_per_session', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_EventPerDeviceCount(self, event_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventPerDeviceCount # noqa: E501
Count of events per device by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventPerDeviceCount(event_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_EventPerDeviceCount_with_http_info(event_name, start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_EventPerDeviceCount_with_http_info(event_name, start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_EventPerDeviceCount_with_http_info(self, event_name, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventPerDeviceCount # noqa: E501
Count of events per device by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventPerDeviceCount_with_http_info(event_name, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_name', 'start', 'owner_name', 'app_name', 'end', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_EventPerDeviceCount" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_name' is set
if ('event_name' not in params or
params['event_name'] is None):
raise ValueError("Missing the required parameter `event_name` when calling `Analytics_EventPerDeviceCount`") # noqa: E501
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_EventPerDeviceCount`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_EventPerDeviceCount`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_EventPerDeviceCount`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_name' in params:
path_params['event_name'] = params['event_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/events/{event_name}/count_per_device', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_EventsDelete(self, event_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventsDelete # noqa: E501
Delete the set of Events with the specified event names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventsDelete(event_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_EventsDelete_with_http_info(event_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_EventsDelete_with_http_info(event_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_EventsDelete_with_http_info(self, event_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventsDelete # noqa: E501
Delete the set of Events with the specified event names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventsDelete_with_http_info(event_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_EventsDelete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_name' is set
if ('event_name' not in params or
params['event_name'] is None):
raise ValueError("Missing the required parameter `event_name` when calling `Analytics_EventsDelete`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_EventsDelete`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_EventsDelete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_name' in params:
path_params['event_name'] = params['event_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/events/{event_name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_Events(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_Events # noqa: E501
Count of active events in the time range ordered by event. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_Events(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:param array event_name: to select the specific events(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional)
:param integer $skip: The offset (starting at 0) of the first result to return. This parameter along with limit is used to perform pagination.(optional)
:param string $inlinecount: Controls whether or not to include a count of all the items across all pages.(optional)
:param string $orderby: controls the sorting order and sorting based on which column(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_Events_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_Events_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_Events_with_http_info(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_Events # noqa: E501
Count of active events in the time range ordered by event. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_Events_with_http_info(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:param array event_name: to select the specific events(optional)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional, default to )
:param integer $skip: The offset (starting at 0) of the first result to return. This parameter along with limit is used to perform pagination.(optional)
:param string $inlinecount: Controls whether or not to include a count of all the items across all pages.(optional, default to )
:param string $orderby: controls the sorting order and sorting based on which column(optional, default to )
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'owner_name', 'app_name', 'end', 'versions', 'event_name', '$top', '$skip', '$inlinecount', '$orderby'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_Events" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_Events`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_Events`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_Events`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
if 'event_name' in params:
query_params.append(('event_name', params['event_name'])) # noqa: E501
if '$top' in params:
query_params.append(('$top', params['$top'])) # noqa: E501
if '$skip' in params:
query_params.append(('$skip', params['$skip'])) # noqa: E501
if '$inlinecount' in params:
query_params.append(('$inlinecount', params['$inlinecount'])) # noqa: E501
if '$orderby' in params:
query_params.append(('$orderby', params['$orderby'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/events', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_EventsDeleteLogs(self, event_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventsDeleteLogs # noqa: E501
Delete the set of Events with the specified event names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventsDeleteLogs(event_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_EventsDeleteLogs_with_http_info(event_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_EventsDeleteLogs_with_http_info(event_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_EventsDeleteLogs_with_http_info(self, event_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_EventsDeleteLogs # noqa: E501
Delete the set of Events with the specified event names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_EventsDeleteLogs_with_http_info(event_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string event_name: The id of the event (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_EventsDeleteLogs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_name' is set
if ('event_name' not in params or
params['event_name'] is None):
raise ValueError("Missing the required parameter `event_name` when calling `Analytics_EventsDeleteLogs`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_EventsDeleteLogs`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_EventsDeleteLogs`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_name' in params:
path_params['event_name'] = params['event_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/event_logs/{event_name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_DistributionReleaseCounts(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""Analytics_DistributionReleaseCounts # noqa: E501
Count of total downloads for the provided distribution releases. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_DistributionReleaseCounts(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The releases to retrieve. (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_DistributionReleaseCounts_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.Analytics_DistributionReleaseCounts_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def Analytics_DistributionReleaseCounts_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""Analytics_DistributionReleaseCounts # noqa: E501
Count of total downloads for the provided distribution releases. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_DistributionReleaseCounts_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The releases to retrieve. (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_DistributionReleaseCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_DistributionReleaseCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_DistributionReleaseCounts`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `Analytics_DistributionReleaseCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/distribution/release_counts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_CrashFreeDevicePercentages(self, start, version, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_CrashFreeDevicePercentages # noqa: E501
Percentage of crash-free device by day in the time range based on the selected versions. Api will return -1 if crash devices is greater than active devices. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashFreeDevicePercentages(start, version, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string version: (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_CrashFreeDevicePercentages_with_http_info(start, version, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_CrashFreeDevicePercentages_with_http_info(start, version, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_CrashFreeDevicePercentages_with_http_info(self, start, version, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_CrashFreeDevicePercentages # noqa: E501
Percentage of crash-free device by day in the time range based on the selected versions. Api will return -1 if crash devices is greater than active devices. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashFreeDevicePercentages_with_http_info(start, version, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string version: (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'version', 'owner_name', 'app_name', 'end'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_CrashFreeDevicePercentages" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_CrashFreeDevicePercentages`") # noqa: E501
# verify the required parameter 'version' is set
if ('version' not in params or
params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `Analytics_CrashFreeDevicePercentages`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_CrashFreeDevicePercentages`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_CrashFreeDevicePercentages`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/crashfree_device_percentages', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_CrashGroupTotals(self, crash_group_id, version, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
Overall crashes and affected users count of the selected crash group with selected version. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupTotals(crash_group_id, version, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_group_id: The id of the crash group (required)
:param string version: (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_CrashGroupTotals_with_http_info(crash_group_id, version, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_CrashGroupTotals_with_http_info(crash_group_id, version, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_CrashGroupTotals_with_http_info(self, crash_group_id, version, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
Overall crashes and affected users count of the selected crash group with selected version. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupTotals_with_http_info(crash_group_id, version, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_group_id: The id of the crash group (required)
:param string version: (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['crash_group_id', 'version', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_CrashGroupTotals" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'crash_group_id' is set
if ('crash_group_id' not in params or
params['crash_group_id'] is None):
raise ValueError("Missing the required parameter `crash_group_id` when calling `Analytics_CrashGroupTotals`") # noqa: E501
# verify the required parameter 'version' is set
if ('version' not in params or
params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `Analytics_CrashGroupTotals`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_CrashGroupTotals`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_CrashGroupTotals`") # noqa: E501
collection_formats = {}
path_params = {}
if 'crash_group_id' in params:
path_params['crash_group_id'] = params['crash_group_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/crash_groups/{crash_group_id}/overall', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_CrashGroupOperatingSystemCounts(self, crash_group_id, version, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
top OSes of the selected crash group with selected version. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupOperatingSystemCounts(crash_group_id, version, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_group_id: The id of the crash group (required)
:param string version: (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_CrashGroupOperatingSystemCounts_with_http_info(crash_group_id, version, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_CrashGroupOperatingSystemCounts_with_http_info(crash_group_id, version, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_CrashGroupOperatingSystemCounts_with_http_info(self, crash_group_id, version, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
top OSes of the selected crash group with selected version. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupOperatingSystemCounts_with_http_info(crash_group_id, version, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_group_id: The id of the crash group (required)
:param string version: (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional, default to )
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['crash_group_id', 'version', 'owner_name', 'app_name', '$top'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_CrashGroupOperatingSystemCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'crash_group_id' is set
if ('crash_group_id' not in params or
params['crash_group_id'] is None):
raise ValueError("Missing the required parameter `crash_group_id` when calling `Analytics_CrashGroupOperatingSystemCounts`") # noqa: E501
# verify the required parameter 'version' is set
if ('version' not in params or
params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `Analytics_CrashGroupOperatingSystemCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_CrashGroupOperatingSystemCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_CrashGroupOperatingSystemCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'crash_group_id' in params:
path_params['crash_group_id'] = params['crash_group_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
if '$top' in params:
query_params.append(('$top', params['$top'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/crash_groups/{crash_group_id}/operating_systems', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_CrashGroupModelCounts(self, crash_group_id, version, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
top models of the selected crash group with selected version. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupModelCounts(crash_group_id, version, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_group_id: The id of the crash group (required)
:param string version: (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_CrashGroupModelCounts_with_http_info(crash_group_id, version, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_CrashGroupModelCounts_with_http_info(crash_group_id, version, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_CrashGroupModelCounts_with_http_info(self, crash_group_id, version, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
top models of the selected crash group with selected version. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupModelCounts_with_http_info(crash_group_id, version, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_group_id: The id of the crash group (required)
:param string version: (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param integer $top: The maximum number of results to return. (0 will fetch all results)(optional, default to )
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['crash_group_id', 'version', 'owner_name', 'app_name', '$top'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_CrashGroupModelCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'crash_group_id' is set
if ('crash_group_id' not in params or
params['crash_group_id'] is None):
raise ValueError("Missing the required parameter `crash_group_id` when calling `Analytics_CrashGroupModelCounts`") # noqa: E501
# verify the required parameter 'version' is set
if ('version' not in params or
params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `Analytics_CrashGroupModelCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_CrashGroupModelCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_CrashGroupModelCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'crash_group_id' in params:
path_params['crash_group_id'] = params['crash_group_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
if '$top' in params:
query_params.append(('$top', params['$top'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/crash_groups/{crash_group_id}/models', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_CrashGroupCounts(self, crash_group_id, version, start, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
Count of crashes by day in the time range of the selected crash group with selected version. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupCounts(crash_group_id, version, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_group_id: The id of the crash group (required)
:param string version: (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_CrashGroupCounts_with_http_info(crash_group_id, version, start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_CrashGroupCounts_with_http_info(crash_group_id, version, start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_CrashGroupCounts_with_http_info(self, crash_group_id, version, start, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
Count of crashes by day in the time range of the selected crash group with selected version. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupCounts_with_http_info(crash_group_id, version, start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string crash_group_id: The id of the crash group (required)
:param string version: (required)
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['crash_group_id', 'version', 'start', 'owner_name', 'app_name', 'end'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_CrashGroupCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'crash_group_id' is set
if ('crash_group_id' not in params or
params['crash_group_id'] is None):
raise ValueError("Missing the required parameter `crash_group_id` when calling `Analytics_CrashGroupCounts`") # noqa: E501
# verify the required parameter 'version' is set
if ('version' not in params or
params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `Analytics_CrashGroupCounts`") # noqa: E501
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_CrashGroupCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_CrashGroupCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_CrashGroupCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'crash_group_id' in params:
path_params['crash_group_id'] = params['crash_group_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/crash_groups/{crash_group_id}/crash_counts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_CrashGroupsTotals(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""Analytics_CrashGroupsTotals # noqa: E501
Overall crashes and affected users count of the selected crash groups with selected versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupsTotals(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_CrashGroupsTotals_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.Analytics_CrashGroupsTotals_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def Analytics_CrashGroupsTotals_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""Analytics_CrashGroupsTotals # noqa: E501
Overall crashes and affected users count of the selected crash groups with selected versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashGroupsTotals_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_CrashGroupsTotals" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_CrashGroupsTotals`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_CrashGroupsTotals`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `Analytics_CrashGroupsTotals`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/crash_groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_CrashCounts(self, start, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
Count of crashes by day in the time range based the selected versions. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashCounts(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_CrashCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_CrashCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_CrashCounts_with_http_info(self, start, owner_name, app_name, **kwargs): # noqa: E501
""" # noqa: E501
Count of crashes by day in the time range based the selected versions. Available for UWP apps only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CrashCounts_with_http_info(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'owner_name', 'app_name', 'end', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_CrashCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_CrashCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_CrashCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_CrashCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/crash_counts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_AudienceNameExists(self, audience_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_AudienceNameExists # noqa: E501
Returns whether audience definition exists. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_AudienceNameExists(audience_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string audience_name: The name of the audience (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_AudienceNameExists_with_http_info(audience_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_AudienceNameExists_with_http_info(audience_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_AudienceNameExists_with_http_info(self, audience_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_AudienceNameExists # noqa: E501
Returns whether audience definition exists. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_AudienceNameExists_with_http_info(audience_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string audience_name: The name of the audience (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['audience_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_AudienceNameExists" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'audience_name' is set
if ('audience_name' not in params or
params['audience_name'] is None):
raise ValueError("Missing the required parameter `audience_name` when calling `Analytics_AudienceNameExists`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_AudienceNameExists`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_AudienceNameExists`") # noqa: E501
collection_formats = {}
path_params = {}
if 'audience_name' in params:
path_params['audience_name'] = params['audience_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/audiences/{audience_name}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_DeleteAudience(self, audience_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_DeleteAudience # noqa: E501
Deletes audience definition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_DeleteAudience(audience_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string audience_name: The name of the audience (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_DeleteAudience_with_http_info(audience_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_DeleteAudience_with_http_info(audience_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_DeleteAudience_with_http_info(self, audience_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_DeleteAudience # noqa: E501
Deletes audience definition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_DeleteAudience_with_http_info(audience_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string audience_name: The name of the audience (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['audience_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_DeleteAudience" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'audience_name' is set
if ('audience_name' not in params or
params['audience_name'] is None):
raise ValueError("Missing the required parameter `audience_name` when calling `Analytics_DeleteAudience`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_DeleteAudience`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_DeleteAudience`") # noqa: E501
collection_formats = {}
path_params = {}
if 'audience_name' in params:
path_params['audience_name'] = params['audience_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/audiences/{audience_name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_GetAudience(self, audience_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_GetAudience # noqa: E501
Gets audience definition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_GetAudience(audience_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string audience_name: The name of the audience (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_GetAudience_with_http_info(audience_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_GetAudience_with_http_info(audience_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_GetAudience_with_http_info(self, audience_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_GetAudience # noqa: E501
Gets audience definition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_GetAudience_with_http_info(audience_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string audience_name: The name of the audience (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['audience_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_GetAudience" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'audience_name' is set
if ('audience_name' not in params or
params['audience_name'] is None):
raise ValueError("Missing the required parameter `audience_name` when calling `Analytics_GetAudience`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_GetAudience`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_GetAudience`") # noqa: E501
collection_formats = {}
path_params = {}
if 'audience_name' in params:
path_params['audience_name'] = params['audience_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/audiences/{audience_name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_CreateOrUpdateAudience(self, audience_name, owner_name, app_name, body, **kwargs): # noqa: E501
"""Analytics_CreateOrUpdateAudience # noqa: E501
Creates or updates audience definition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CreateOrUpdateAudience(audience_name, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string audience_name: The name of the audience (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: Audience definition (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_CreateOrUpdateAudience_with_http_info(audience_name, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.Analytics_CreateOrUpdateAudience_with_http_info(audience_name, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def Analytics_CreateOrUpdateAudience_with_http_info(self, audience_name, owner_name, app_name, body, **kwargs): # noqa: E501
"""Analytics_CreateOrUpdateAudience # noqa: E501
Creates or updates audience definition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_CreateOrUpdateAudience_with_http_info(audience_name, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string audience_name: The name of the audience (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: Audience definition (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['audience_name', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_CreateOrUpdateAudience" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'audience_name' is set
if ('audience_name' not in params or
params['audience_name'] is None):
raise ValueError("Missing the required parameter `audience_name` when calling `Analytics_CreateOrUpdateAudience`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_CreateOrUpdateAudience`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_CreateOrUpdateAudience`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `Analytics_CreateOrUpdateAudience`") # noqa: E501
collection_formats = {}
path_params = {}
if 'audience_name' in params:
path_params['audience_name'] = params['audience_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/audiences/{audience_name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_ListDevicePropertyValues(self, property_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ListDevicePropertyValues # noqa: E501
Get list of device property values. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ListDevicePropertyValues(property_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string property_name: Device property (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string contains: Contains string(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_ListDevicePropertyValues_with_http_info(property_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_ListDevicePropertyValues_with_http_info(property_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_ListDevicePropertyValues_with_http_info(self, property_name, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ListDevicePropertyValues # noqa: E501
Get list of device property values. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ListDevicePropertyValues_with_http_info(property_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string property_name: Device property (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string contains: Contains string(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['property_name', 'owner_name', 'app_name', 'contains'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_ListDevicePropertyValues" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'property_name' is set
if ('property_name' not in params or
params['property_name'] is None):
raise ValueError("Missing the required parameter `property_name` when calling `Analytics_ListDevicePropertyValues`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_ListDevicePropertyValues`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_ListDevicePropertyValues`") # noqa: E501
collection_formats = {}
path_params = {}
if 'property_name' in params:
path_params['property_name'] = params['property_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'contains' in params:
query_params.append(('contains', params['contains'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/audiences/metadata/device_properties/{property_name}/values', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_ListDeviceProperties(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ListDeviceProperties # noqa: E501
Get list of device properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ListDeviceProperties(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_ListDeviceProperties_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_ListDeviceProperties_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_ListDeviceProperties_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ListDeviceProperties # noqa: E501
Get list of device properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ListDeviceProperties_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_ListDeviceProperties" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_ListDeviceProperties`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_ListDeviceProperties`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/audiences/metadata/device_properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_ListCustomProperties(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ListCustomProperties # noqa: E501
Get list of custom properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ListCustomProperties(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_ListCustomProperties_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_ListCustomProperties_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_ListCustomProperties_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ListCustomProperties # noqa: E501
Get list of custom properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ListCustomProperties_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_ListCustomProperties" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_ListCustomProperties`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_ListCustomProperties`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/audiences/metadata/custom_properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_TestAudience(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""Analytics_TestAudience # noqa: E501
Tests audience definition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_TestAudience(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: Audience definition (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_TestAudience_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.Analytics_TestAudience_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def Analytics_TestAudience_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""Analytics_TestAudience # noqa: E501
Tests audience definition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_TestAudience_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: Audience definition (required)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_TestAudience" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_TestAudience`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_TestAudience`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `Analytics_TestAudience`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/audiences/definition/test', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_ListAudiences(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ListAudiences # noqa: E501
Get list of audiences. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ListAudiences(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean include_disabled: Include disabled audience definitions(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_ListAudiences_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_ListAudiences_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_ListAudiences_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_ListAudiences # noqa: E501
Get list of audiences. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_ListAudiences_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean include_disabled: Include disabled audience definitions(optional)
:return: ErrorResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'include_disabled'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_ListAudiences" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_ListAudiences`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_ListAudiences`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'include_disabled' in params:
query_params.append(('include_disabled', params['include_disabled'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/audiences', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def Analytics_DeviceCounts(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_DeviceCounts # noqa: E501
Count of active devices by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_DeviceCounts(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.Analytics_DeviceCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.Analytics_DeviceCounts_with_http_info(start, owner_name, app_name, **kwargs) # noqa: E501
return data
def Analytics_DeviceCounts_with_http_info(self, start, owner_name, app_name, **kwargs): # noqa: E501
"""Analytics_DeviceCounts # noqa: E501
Count of active devices by interval in the time range. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.Analytics_DeviceCounts_with_http_info(start, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string start: Start date time in data in ISO 8601 date time format (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string end: Last date time in data in ISO 8601 date time format(optional)
:param array versions:(optional)
:return: Error
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'owner_name', 'app_name', 'end', 'versions'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method Analytics_DeviceCounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `Analytics_DeviceCounts`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `Analytics_DeviceCounts`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `Analytics_DeviceCounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'versions' in params:
query_params.append(('versions', params['versions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/analytics/active_device_counts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Error', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.452294
| 327
| 0.630922
|
0e19821473afa27108d7802475b24ae03113c378
| 8,354
|
py
|
Python
|
fwc_edu/fwc_education/report/fwc_salary_register/fwc_salary_register.py
|
mohsinalimat/fwc_edu
|
d3e2e43b5cd5001a0b8845206e85b1870dc9e53d
|
[
"MIT"
] | null | null | null |
fwc_edu/fwc_education/report/fwc_salary_register/fwc_salary_register.py
|
mohsinalimat/fwc_edu
|
d3e2e43b5cd5001a0b8845206e85b1870dc9e53d
|
[
"MIT"
] | null | null | null |
fwc_edu/fwc_education/report/fwc_salary_register/fwc_salary_register.py
|
mohsinalimat/fwc_edu
|
d3e2e43b5cd5001a0b8845206e85b1870dc9e53d
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, erpnext
from frappe.utils import flt
from frappe import msgprint, _
def execute(filters=None):
if not filters: filters = {}
currency = None
if filters.get('currency'):
currency = filters.get('currency')
company_currency = erpnext.get_company_currency(filters.get("company"))
salary_slips = get_salary_slips(filters, company_currency)
if not salary_slips: return [], []
columns, earning_types, ded_types = get_columns(salary_slips, filters)
ss_earning_map = get_ss_earning_map(salary_slips, currency, company_currency)
ss_ded_map = get_ss_ded_map(salary_slips,currency, company_currency)
doj_map = get_employee_doj_map()
basic_annual = get_employee_annual_basic()
mycompany = filters.get("company")
data = []
for ss in salary_slips:
if mycompany == "FWC Education" or mycompany == "Mailefihi Siuilikutapu College":
row = [ss.branch,ss.employee,ss.employee_name, basic_annual.get(ss.employee)]
else:
row = [ss.employee,ss.employee_name, basic_annual.get(ss.employee)]
# row = [ss.name, ss.employee, ss.employee_name, basic_annual.get(ss.employee), ss.branch, ss.department, ss.designation,
# ss.company, ss.start_date, ss.end_date, ss.leave_without_pay, ss.payment_days]
# if ss.branch is not None: columns[3] = columns[3].replace('-1','120')
# if ss.department is not None: columns[4] = columns[4].replace('-1','120')
# if ss.designation is not None: columns[5] = columns[5].replace('-1','120')
# if ss.leave_without_pay is not None: columns[9] = columns[9].replace('-1','130')
for e in earning_types:
row.append(ss_earning_map.get(ss.name, {}).get(e))
if currency == company_currency:
row += [flt(ss.gross_pay) * flt(ss.exchange_rate)]
else:
row += [ss.gross_pay]
# for d in ded_types:
# row.append(ss_ded_map.get(ss.name, {}).get(d))
# row.append(ss.total_loan_repayment)
# if currency == company_currency:
# row += [flt(ss.total_deduction) * flt(ss.exchange_rate), flt(ss.net_pay) * flt(ss.exchange_rate)]
# else:
# row += [ss.total_deduction, ss.net_pay]
for d in ded_types:
row.append(ss_ded_map.get(ss.name, {}).get(d))
# row.append(ss.total_loan_repayment)
# if currency == company_currency:
# row += [flt(ss.total_deduction) * flt(ss.exchange_rate), flt(ss.net_pay) * flt(ss.exchange_rate)]
# else:
row += [ss.net_pay]
# row.append(currency or company_currency)
data.append(row)
return columns, data
def get_columns(salary_slips, filters):
mycompany = filters.get("company")
if mycompany == "FWC Education" or mycompany == "Mailefihi Siuilikutapu College":
columns = [
_("Branch") + ":Link/Branch:100",
_("Employee") + ":Link/Employee:120",
_("Employee Name") + "::140",
_("Basic Salary") + ":Currency:120",
]
else:
columns = [
_("Employee") + ":Link/Employee:120",
_("Employee Name") + "::140",
_("Basic Salary") + ":Currency:120",
]
salary_components = {_("Earning"): [], _("Deduction"): []}
for component in frappe.db.sql("""select distinct sc.type,
IF(sd.salary_component IN ('MBF', 'MBF02','MBF03','MBF04', 'MBF05'),'MBF',
IF(sd.salary_component IN ('BSP', 'BSP02','BSP03','BSP04', 'BSP05'),'BSP',
IF(sd.salary_component IN ('TDB', 'TDB02','TDB03','TDB04', 'TDB05'), 'TDB',
IF(sd.salary_component IN ('Retirement Fund', 'Retirement Fund - Voluntary'),'Retirement Fund', sd.salary_component)))) as salary_component
from `tabSalary Detail` sd, `tabSalary Component` sc
where sc.name=sd.salary_component and sd.amount != 0 and sd.parent in (%s)""" %
(', '.join(['%s']*len(salary_slips))), tuple([d.name for d in salary_slips]), as_dict=1):
salary_components[_(component.type)].append(component.salary_component)
columns = columns + [(e + ":Currency:120") for e in salary_components[_("Earning")]] + \
[_("Gross Pay") + ":Currency:120"] + [(d + ":Currency:120") for d in salary_components[_("Deduction")]] + \
[_("Net Pay") + ":Currency:120"]
# columns = columns + [(e + ":Currency:120") for e in salary_components[_("Earning")]] + \
# [_("Gross Pay") + ":Currency:120"] + [(d + ":Currency:120") for d in salary_components[_("Deduction")]] + \
# [_("Loan Repayment") + ":Currency:120", _("Total Deduction") + ":Currency:120", _("Net Pay") + ":Currency:120"]
return columns, salary_components[_("Earning")], salary_components[_("Deduction")]
def get_salary_slips(filters, company_currency):
filters.update({"from_date": filters.get("from_date"), "to_date":filters.get("to_date")})
conditions, filters = get_conditions(filters, company_currency)
salary_slips = frappe.db.sql("""select * from `tabSalary Slip` where %s
order by reports_group""" % conditions, filters, as_dict=1)
# addtional_salary = frappe.db.sql("""select * from `tabFWC Additional Deduction` where %s
# order by employee""" % conditions, filters, as_dict=1)
# msgprint(_("Salary Slip {0}").format(addtional_salary))
# salary_slips.append(salaryslips)
return salary_slips or []
def get_conditions(filters, company_currency):
conditions = ""
doc_status = {"Draft": 0, "Submitted": 1, "Cancelled": 2}
if filters.get("docstatus"):
conditions += "docstatus = {0}".format(doc_status[filters.get("docstatus")])
if filters.get("from_date"): conditions += " and start_date >= %(from_date)s"
if filters.get("to_date"): conditions += " and end_date <= %(to_date)s"
if filters.get("company"): conditions += " and company = %(company)s"
if filters.get("employee"): conditions += " and employee = %(employee)s"
if filters.get("currency") and filters.get("currency") != company_currency:
conditions += " and currency = %(currency)s"
return conditions, filters
def get_employee_annual_basic():
return frappe._dict(frappe.db.sql("""
SELECT
employee,
basic_salary
FROM `tabEmployee`
"""))
def get_employee_doj_map():
return frappe._dict(frappe.db.sql("""
SELECT
employee,
date_of_joining
FROM `tabEmployee`
"""))
def get_ss_earning_map(salary_slips, currency, company_currency):
ss_earnings = frappe.db.sql("""select sd.parent, sd.salary_component,
sum(sd.amount) as amount, ss.exchange_rate, ss.name
from `tabSalary Detail` sd, `tabSalary Slip` ss where sd.parent=ss.name and sd.parent in (%s) group by sd.parent, sd.salary_component""" %
(', '.join(['%s']*len(salary_slips))), tuple([d.name for d in salary_slips]), as_dict=1)
ss_earning_map = {}
for d in ss_earnings:
ss_earning_map.setdefault(d.parent, frappe._dict()).setdefault(d.salary_component, [])
if currency == company_currency:
ss_earning_map[d.parent][d.salary_component] = flt(d.amount) * flt(d.exchange_rate if d.exchange_rate else 1)
else:
ss_earning_map[d.parent][d.salary_component] = flt(d.amount)
return ss_earning_map
def get_ss_ded_map(salary_slips, currency, company_currency):
ss_deductions = frappe.db.sql("""select distinct sd.parent,
IF(sd.salary_component IN ('MBF', 'MBF02','MBF03','MBF04', 'MBF05'),'MBF',
IF(sd.salary_component IN ('BSP', 'BSP02','BSP03','BSP04', 'BSP05'),'BSP',
IF(sd.salary_component IN ('TDB', 'TDB02','TDB03','TDB04', 'TDB05'), 'TDB',
IF(sd.salary_component IN ('Retirement Fund', 'Retirement Fund - Voluntary'),'Retirement Fund', sd.salary_component)))) as salary_component,
IF(sd.salary_component IN ('MBF', 'MBF02','MBF03','MBF04', 'MBF05'),'MBF',
IF(sd.salary_component IN ('BSP', 'BSP02','BSP03','BSP04', 'BSP05'),'BSP',
IF(sd.salary_component IN ('TDB', 'TDB02','TDB03','TDB04', 'TDB05'), 'TDB',
IF(sd.salary_component IN ('Retirement Fund', 'Retirement Fund - Voluntary'),'Retirement Fund', sd.salary_component)))) as salarycomponent,
sum(sd.amount) as amount, ss.name
from `tabSalary Detail` sd, `tabSalary Slip` ss
where sd.parent=ss.name and sd.parent in (%s) group by salarycomponent, sd.parent""" %
(', '.join(['%s']*len(salary_slips))), tuple([d.name for d in salary_slips]) , as_dict=1)
ss_ded_map = {}
# msgprint(_("Deduction {0}").format(ss_deductions))
for d in ss_deductions:
ss_ded_map.setdefault(d.parent, frappe._dict()).setdefault(d.salary_component, [])
ss_ded_map[d.parent][d.salary_component] = flt(d.amount)
return ss_ded_map
| 41.77
| 159
| 0.692961
|
95c694930ab2d2f0305d5cd3cf7bf6bde062ff23
| 792
|
py
|
Python
|
applications/escola_curso/models/tables.py
|
BetinRibeiro/web2py_crediario
|
d7b0aef4579870922c6d87b4b0322b427b2bef98
|
[
"BSD-3-Clause"
] | 2
|
2019-10-18T23:04:22.000Z
|
2019-10-24T04:03:10.000Z
|
applications/escola_curso/models/tables.py
|
BetinRibeiro/web2py_crediario
|
d7b0aef4579870922c6d87b4b0322b427b2bef98
|
[
"BSD-3-Clause"
] | null | null | null |
applications/escola_curso/models/tables.py
|
BetinRibeiro/web2py_crediario
|
d7b0aef4579870922c6d87b4b0322b427b2bef98
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
Notas = db.define_table('notas',
Field('nota', 'float', default=0, label="Nota"),
Field('aluno', 'reference auth_user', notnull=True, label="Aluno"),
Field('professor', 'reference auth_user', ondelete='SET NULL', label="Professor")
)
Biblioteca = db.define_table('biblioteca',
Field('arquivo', 'upload', notnull=True, label="Arquivo"),
Field('professor', 'reference auth_user', ondelete='SET NULL', label="Professor")
)
Forum = db.define_table('forum',
Field('mensagem', 'text', notnull=True, label="Mensagem", widget=ckeditor.widget),
auth.signature
)
Comentarios = db.define_table('comentarios',
Field('mensagem', 'text', notnull=True, label="Mensagem", widget=ckeditor.widget),
Field('postagem', 'reference forum', label="Postagem"),
auth.signature
)
| 33
| 83
| 0.705808
|
2e391ac7336b4c3437062139c7f01e76be893aad
| 18,747
|
py
|
Python
|
opts.py
|
wrn7777/MscProject
|
0c11cd7e00c830bbf7a0e02455cb86fbb5ed1b4b
|
[
"MIT"
] | null | null | null |
opts.py
|
wrn7777/MscProject
|
0c11cd7e00c830bbf7a0e02455cb86fbb5ed1b4b
|
[
"MIT"
] | null | null | null |
opts.py
|
wrn7777/MscProject
|
0c11cd7e00c830bbf7a0e02455cb86fbb5ed1b4b
|
[
"MIT"
] | null | null | null |
import argparse
def parse_opts():
parser = argparse.ArgumentParser()
parser.add_argument('--root_path', default='/root/data/ActivityNet', type=str, help='Root directory path of data')
parser.add_argument('--video_path', default='video_kinetics_jpg', type=str, help='Directory path of Videos')
parser.add_argument('--annotation_path', default='kinetics.json', type=str, help='Annotation file path')
parser.add_argument('--result_path', default='results', type=str, help='Result directory path')
parser.add_argument('--store_name', default='model', type=str, help='Name to store checkpoints')
parser.add_argument('--modality', default='RGB', type=str, help='Modality of generated model. RGB, Flow or RGBFlow')
parser.add_argument('--pretrain_modality', default='RGB', type=str, help='Modality of the pretrain model. RGB, Flow or RGBFlow')
parser.add_argument('--dataset', default='kinetics', type=str, help='Used dataset (activitynet | kinetics | ucf101 | hmdb51)')
parser.add_argument('--n_classes', default=400, type=int, help='Number of classes (activitynet: 200, kinetics: 400, ucf101: 101, hmdb51: 51)')
parser.add_argument('--n_finetune_classes', default=400, type=int, help='Number of classes for fine-tuning. n_classes is set to the number when pretraining.')
parser.add_argument('--sample_size', default=112, type=int, help='Height and width of inputs')
parser.add_argument('--sample_duration', default=16, type=int, help='Temporal duration of inputs')
parser.add_argument('--downsample', default=1, type=int, help='Downsampling. Selecting 1 frame out of N')
parser.add_argument('--initial_scale', default=1.0, type=float, help='Initial scale for multiscale cropping')
parser.add_argument('--n_scales', default=5, type=int, help='Number of scales for multiscale cropping')
parser.add_argument('--scale_step', default=0.84089641525, type=float, help='Scale step for multiscale cropping')
parser.add_argument('--train_crop', default='corner', type=str, help='Spatial cropping method in training. random is uniform. corner is selection from 4 corners and 1 center. (random | corner | center)')
parser.add_argument('--learning_rate', default=0.04, type=float, help='Initial learning rate (divided by 10 while training by lr scheduler)')
parser.add_argument('--lr_steps', default=[15, 25, 35, 45, 60, 50, 200, 250], type=float, nargs="+", metavar='LRSteps', help='epochs to decay learning rate by 10') # [15, 30, 37, 50, 200, 250]
parser.add_argument('--momentum', default=0.9, type=float, help='Momentum')
parser.add_argument('--dampening', default=0.9, type=float, help='dampening of SGD')
parser.add_argument('--weight_decay', default=1e-3, type=float, help='Weight Decay')
parser.add_argument('--ft_begin_index', default=0, type=int, help='Begin block index of fine-tuning')
parser.add_argument('--mean_dataset', default='activitynet', type=str, help='dataset for mean values of mean subtraction (activitynet | kinetics)')
parser.add_argument('--no_mean_norm', action='store_true', help='If true, inputs are not normalized by mean.')
parser.set_defaults(no_mean_norm=False)
parser.add_argument('--std_norm', action='store_true', help='If true, inputs are normalized by standard deviation.')
parser.set_defaults(std_norm=False)
parser.add_argument('--nesterov', action='store_true', help='Nesterov momentum')
parser.set_defaults(nesterov=False)
parser.add_argument('--optimizer', default='sgd', type=str, help='Currently only support SGD')
parser.add_argument('--lr_patience', default=10, type=int, help='Patience of LR scheduler. See documentation of ReduceLROnPlateau.')
parser.add_argument('--batch_size', default=128, type=int, help='Batch Size')
parser.add_argument('--n_epochs', default=250, type=int, help='Number of total epochs to run')
parser.add_argument('--begin_epoch', default=1, type=int, help='Training begins at this epoch. Previous trained model indicated by resume_path is loaded.')
parser.add_argument('--n_val_samples', default=3, type=int, help='Number of validation samples for each activity')
parser.add_argument('--resume_path', default='', type=str, help='Save data (.pth) of previous training')
parser.add_argument('--pretrain_path', default='', type=str, help='Pretrained model (.pth)')
parser.add_argument('--ft_portion', default='complete', type=str, help='The portion of the model to apply fine tuning, either complete or last_layer')
parser.add_argument('--no_train', action='store_true', help='If true, training is not performed.')
parser.set_defaults(no_train=False)
parser.add_argument('--no_val', action='store_true', help='If true, validation is not performed.')
parser.set_defaults(no_val=False)
parser.add_argument('--test', action='store_true', help='If true, test is performed.')
parser.set_defaults(test=False)
parser.add_argument('--test_subset', default='val', type=str, help='Used subset in test (val | test)')
parser.add_argument('--scale_in_test', default=1.0, type=float, help='Spatial scale in test')
parser.add_argument('--crop_position_in_test', default='c', type=str, help='Cropping method (c | tl | tr | bl | br) in test')
parser.add_argument('--no_softmax_in_test', action='store_true', help='If true, output for each clip is not normalized using softmax.')
parser.set_defaults(no_softmax_in_test=False)
parser.add_argument('--no_cuda', action='store_true', help='If true, cuda is not used.')
parser.set_defaults(no_cuda=False)
parser.add_argument('--n_threads', default=16, type=int, help='Number of threads for multi-thread loading')
parser.add_argument('--checkpoint', default=10, type=int, help='Trained model is saved at every this epochs.')
parser.add_argument('--no_hflip', action='store_true', help='If true holizontal flipping is not performed.')
parser.set_defaults(no_hflip=False)
parser.add_argument('--norm_value', default=1, type=int, help='If 1, range of inputs is [0-255]. If 255, range of inputs is [0-1].')
parser.add_argument('--model', default='resnet', type=str, help='(resnet | preresnet | wideresnet | resnext | densenet | ')
parser.add_argument('--version', default=1.1, type=float, help='Version of the model')
parser.add_argument('--model_depth', default=18, type=int, help='Depth of resnet (10 | 18 | 34 | 50 | 101)')
parser.add_argument('--resnet_shortcut', default='B', type=str, help='Shortcut type of resnet (A | B)')
parser.add_argument('--wide_resnet_k', default=2, type=int, help='Wide resnet k')
parser.add_argument('--resnext_cardinality', default=32, type=int, help='ResNeXt cardinality')
parser.add_argument('--groups', default=3, type=int, help='The number of groups at group convolutions at conv layers')
parser.add_argument('--width_mult', default=1.0, type=float, help='The applied width multiplier to scale number of filters')
parser.add_argument('--manual_seed', default=1, type=int, help='Manually set random seed')
parser.add_argument('--train_validate', action='store_true', help='If true, test is performed.')
parser.set_defaults(train_validate=False)
args = parser.parse_args()
return args
def parse_opts_online():
# Real-time test arguments with detector and classifier architecture
parser = argparse.ArgumentParser()
parser.add_argument('--root_path', default='/root/data/ActivityNet', type=str, help='Root directory path of data')
parser.add_argument('--video_path', default='video_kinetics_jpg', type=str, help='Directory path of Videos')
parser.add_argument('--video', default='data2/EgoGesture/videos/Subject02/Scene1/Color/rgb1.avi', type=str, help='Directory path of test Videos')
parser.add_argument('--whole_path', default='video_kinetics_jpg', type=str, help='The whole path of Videos')
parser.add_argument('--annotation_path', default='kinetics.json', type=str, help='Annotation file path')
parser.add_argument('--result_path', default='results', type=str, help='Result directory path')
parser.add_argument('--store_name', default='model', type=str, help='Name to store checkpoints')
parser.add_argument('--modality', default='RGB', type=str, help='Modality of input data. RGB, Flow or RGBFlow')
parser.add_argument('--modality_det', default='RGB', type=str, help='Modality of input data. RGB, Flow or RGBFlow')
parser.add_argument('--modality_clf', default='RGB', type=str, help='Modality of input data. RGB, Flow or RGBFlow')
parser.add_argument('--dataset', default='kinetics', type=str,
help='Used dataset (activitynet | kinetics | ucf101 | hmdb51)')
parser.add_argument('--n_classes_det', default=400, type=int,
help='Number of classes (activitynet: 200, kinetics: 400, ucf101: 101, hmdb51: 51)')
parser.add_argument('--n_finetune_classes_det', default=400, type=int,
help='Number of classes for fine-tuning. n_classes is set to the number when pretraining.')
parser.add_argument('--n_classes_clf', default=400, type=int,
help='Number of classes (activitynet: 200, kinetics: 400, ucf101: 101, hmdb51: 51)')
parser.add_argument('--n_finetune_classes_clf', default=400, type=int,
help='Number of classes for fine-tuning. n_classes is set to the number when pretraining.')
parser.add_argument('--n_classes', default=400, type=int,
help='Number of classes (activitynet: 200, kinetics: 400, ucf101: 101, hmdb51: 51)')
parser.add_argument('--n_finetune_classes', default=400, type=int,
help='Number of classes for fine-tuning. n_classes is set to the number when pretraining.')
parser.add_argument('--sample_size', default=112, type=int, help='Height and width of inputs')
parser.add_argument('--sample_duration_det', default=16, type=int, help='Temporal duration of inputs')
parser.add_argument('--sample_duration_clf', default=16, type=int, help='Temporal duration of inputs')
parser.add_argument('--sample_duration', default=16, type=int, help='Temporal duration of inputs')
parser.add_argument('--initial_scale', default=1.0, type=float, help='Initial scale for multiscale cropping')
parser.add_argument('--n_scales', default=5, type=int, help='Number of scales for multiscale cropping')
parser.add_argument('--scale_step', default=0.84089641525, type=float, help='Scale step for multiscale cropping')
parser.add_argument('--train_crop', default='corner', type=str,
help='Spatial cropping method in training. random is uniform. corner is selection from 4 corners and 1 center. (random | corner | center)')
parser.add_argument('--learning_rate', default=0.1, type=float,
help='Initial learning rate (divided by 10 while training by lr scheduler)')
parser.add_argument('--lr_steps', default=[10, 20, 30, 40, 100], type=float, nargs="+", metavar='LRSteps',
help='epochs to decay learning rate by 10')
parser.add_argument('--momentum', default=0.9, type=float, help='Momentum')
parser.add_argument('--dampening', default=0.9, type=float, help='dampening of SGD')
parser.add_argument('--weight_decay', default=1e-3, type=float, help='Weight Decay')
parser.add_argument('--mean_dataset', default='activitynet', type=str,
help='dataset for mean values of mean subtraction (activitynet | kinetics)')
parser.add_argument('--no_mean_norm', action='store_true', help='If true, inputs are not normalized by mean.')
parser.set_defaults(no_mean_norm=False)
parser.add_argument('--std_norm', action='store_true', help='If true, inputs are normalized by standard deviation.')
parser.set_defaults(std_norm=False)
parser.add_argument('--nesterov', action='store_true', help='Nesterov momentum')
parser.set_defaults(nesterov=False)
parser.add_argument('--optimizer', default='sgd', type=str, help='Currently only support SGD')
parser.add_argument('--lr_patience', default=10, type=int,
help='Patience of LR scheduler. See documentation of ReduceLROnPlateau.')
parser.add_argument('--batch_size', default=128, type=int, help='Batch Size')
parser.add_argument('--n_epochs', default=200, type=int, help='Number of total epochs to run')
parser.add_argument('--begin_epoch', default=1, type=int,
help='Training begins at this epoch. Previous trained model indicated by resume_path is loaded.')
parser.add_argument('--n_val_samples', default=3, type=int, help='Number of validation samples for each activity')
parser.add_argument('--resume_path_det', default='', type=str, help='Save data (.pth) of previous training')
parser.add_argument('--resume_path_clf', default='', type=str, help='Save data (.pth) of previous training')
parser.add_argument('--resume_path', default='', type=str, help='Save data (.pth) of previous training')
parser.add_argument('--pretrain_path_det', default='', type=str, help='Pretrained model (.pth)')
parser.add_argument('--pretrain_path_clf', default='', type=str, help='Pretrained model (.pth)')
parser.add_argument('--pretrain_path', default='', type=str, help='Pretrained model (.pth)')
parser.add_argument('--ft_begin_index', default=0, type=int, help='Begin block index of fine-tuning')
parser.add_argument('--no_train', action='store_true', help='If true, training is not performed.')
parser.set_defaults(no_train=False)
parser.add_argument('--no_val', action='store_true', help='If true, validation is not performed.')
parser.set_defaults(no_val=False)
parser.add_argument('--test', action='store_true', help='If true, test is performed.')
parser.set_defaults(test=True)
parser.add_argument('--test_subset', default='val', type=str, help='Used subset in test (val | test)')
parser.add_argument('--scale_in_test', default=1.0, type=float, help='Spatial scale in test')
parser.add_argument('--crop_position_in_test', default='c', type=str,
help='Cropping method (c | tl | tr | bl | br) in test')
parser.add_argument('--no_softmax_in_test', action='store_true',
help='If true, output for each clip is not normalized using softmax.')
parser.set_defaults(no_softmax_in_test=False)
parser.add_argument('--no_cuda', action='store_true', help='If true, cuda is not used.')
parser.set_defaults(no_cuda=False)
parser.add_argument('--n_threads', default=4, type=int, help='Number of threads for multi-thread loading')
parser.add_argument('--checkpoint', default=10, type=int, help='Trained model is saved at every this epochs.')
parser.add_argument('--no_hflip', action='store_true', help='If true holizontal flipping is not performed.')
parser.set_defaults(no_hflip=False)
parser.add_argument('--norm_value', default=1, type=int,
help='If 1, range of inputs is [0-255]. If 255, range of inputs is [0-1].')
parser.add_argument('--model_det', default='resnet', type=str,
help='(resnet | preresnet | wideresnet | resnext | densenet | ')
parser.add_argument('--model_depth_det', default=18, type=int, help='Depth of resnet (10 | 18 | 34 | 50 | 101)')
parser.add_argument('--resnet_shortcut_det', default='B', type=str, help='Shortcut type of resnet (A | B)')
parser.add_argument('--wide_resnet_k_det', default=2, type=int, help='Wide resnet k')
parser.add_argument('--resnext_cardinality_det', default=32, type=int, help='ResNeXt cardinality')
parser.add_argument('--model', default='resnet', type=str,
help='(resnet | preresnet | wideresnet | resnext | densenet | ')
parser.add_argument('--model_depth', default=18, type=int, help='Depth of resnet (10 | 18 | 34 | 50 | 101)')
parser.add_argument('--resnet_shortcut', default='B', type=str, help='Shortcut type of resnet (A | B)')
parser.add_argument('--wide_resnet_k', default=2, type=int, help='Wide resnet k')
parser.add_argument('--resnext_cardinality', default=32, type=int, help='ResNeXt cardinality')
parser.add_argument('--model_clf', default='resnet', type=str,
help='(resnet | preresnet | wideresnet | resnext | densenet | ')
parser.add_argument('--model_depth_clf', default=18, type=int, help='Depth of resnet (10 | 18 | 34 | 50 | 101)')
parser.add_argument('--resnet_shortcut_clf', default='B', type=str, help='Shortcut type of resnet (A | B)')
parser.add_argument('--wide_resnet_k_clf', default=2, type=int, help='Wide resnet k')
parser.add_argument('--resnext_cardinality_clf', default=32, type=int, help='ResNeXt cardinality')
parser.add_argument('--width_mult', default=1.0, type=float, help='The applied width multiplier to scale number of filters')
parser.add_argument('--width_mult_det', default=1.0, type=float, help='The applied width multiplier to scale number of filters')
parser.add_argument('--width_mult_clf', default=1.0, type=float, help='The applied width multiplier to scale number of filters')
parser.add_argument('--manual_seed', default=1, type=int, help='Manually set random seed')
parser.add_argument('--det_strategy', default='raw', type=str, help='Detector filter (raw | median | ma | ewma)')
parser.add_argument('--det_queue_size', default=1, type=int, help='Detector queue size')
parser.add_argument('--det_counter', default=1, type=float, help='Number of consequtive detection')
parser.add_argument('--clf_strategy', default='raw', type=str, help='Classifier filter (raw | median | ma | ewma)')
parser.add_argument('--clf_queue_size', default=1, type=int, help='Classifier queue size')
parser.add_argument('--clf_threshold_pre', default=1, type=float, help='Cumulative sum threshold to prepredict')
parser.add_argument('--clf_threshold_final', default=1, type=float,
help='Cumulative sum threshold to predict at the end')
parser.add_argument('--stride_len', default=1, type=int, help='Stride Lenght of video loader window')
parser.add_argument('--ft_portion', default='complete', type=str, help='The portion of the model to apply fine tuning, either complete or last_layer')
parser.add_argument('--groups', default=3, type=int, help='The number of groups at group convolutions at conv layers')
parser.add_argument('--downsample', default=1, type=int, help='Downsampling. Selecting 1 frame out of N')
args = parser.parse_args()
return args
| 88.429245
| 208
| 0.70854
|
716d7b434e10d735b6becfc3a403525dbf557bd1
| 489
|
py
|
Python
|
tests/test_user.py
|
iyerikuzwe/pers-blog
|
ee773d6b3cde282b0807b119970ecd981ce52f62
|
[
"Unlicense"
] | null | null | null |
tests/test_user.py
|
iyerikuzwe/pers-blog
|
ee773d6b3cde282b0807b119970ecd981ce52f62
|
[
"Unlicense"
] | null | null | null |
tests/test_user.py
|
iyerikuzwe/pers-blog
|
ee773d6b3cde282b0807b119970ecd981ce52f62
|
[
"Unlicense"
] | null | null | null |
import unittest
from app.models import User
class UserModelTest(unittest.TestCase):
def setUp(self):
self.new_user = User(password = 'banana')
def test_password_setter(self):
self.assertTrue(self.new_user.pass_secure is not None)
def test_no_access_password(self):
with self.assertRaises(AttributeError):
self.new_user.password
def test_password_verification(self):
self.assertTrue(self.new_user.verify_password('banana'))
| 25.736842
| 64
| 0.715746
|
b7156e02358fd82916750860a15bcc78ad74b2b8
| 18,223
|
py
|
Python
|
qiskit/quantum_info/operators/pauli.py
|
tareqdandachi/qiskit-terra
|
5221fe330adba5529bfa22dc25262ac8e6291aaf
|
[
"Apache-2.0"
] | 3
|
2019-05-19T17:39:38.000Z
|
2020-01-28T19:59:18.000Z
|
qiskit/quantum_info/operators/pauli.py
|
tareqdandachi/qiskit-terra
|
5221fe330adba5529bfa22dc25262ac8e6291aaf
|
[
"Apache-2.0"
] | 4
|
2019-05-13T15:28:46.000Z
|
2019-12-19T20:47:02.000Z
|
qiskit/quantum_info/operators/pauli.py
|
tareqdandachi/qiskit-terra
|
5221fe330adba5529bfa22dc25262ac8e6291aaf
|
[
"Apache-2.0"
] | 1
|
2021-07-07T16:55:41.000Z
|
2021-07-07T16:55:41.000Z
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=invalid-name,assignment-from-no-return
"""
Tools for working with Pauli Operators.
A simple pauli class and some tools.
"""
import numpy as np
from scipy import sparse
from qiskit.exceptions import QiskitError
def _make_np_bool(arr):
if not isinstance(arr, (list, np.ndarray, tuple)):
arr = [arr]
arr = np.asarray(arr).astype(np.bool)
return arr
def _count_set_bits(i):
"""
Counts the number of set bits in a uint (or a numpy array of uints).
"""
i = i - ((i >> 1) & 0x55555555)
i = (i & 0x33333333) + ((i >> 2) & 0x33333333)
return (((i + (i >> 4) & 0xF0F0F0F) * 0x1010101) & 0xffffffff) >> 24
class Pauli:
"""A simple class representing Pauli Operators.
The form is P_zx = (-i)^dot(z,x) Z^z X^x where z and x are elements of Z_2^n.
That is, there are 4^n elements (no phases in this group).
For example, for 1 qubit
P_00 = Z^0 X^0 = I
P_01 = X
P_10 = Z
P_11 = -iZX = (-i) iY = Y
The overload __mul__ does not track the sign: P1*P2 = Z^(z1+z2) X^(x1+x2) but
sgn_prod does __mul__ and track the phase: P1*P2 = (-i)^dot(z1+z2,x1+x2) Z^(z1+z2) X^(x1+x2)
where the sums are taken modulo 2.
Pauli vectors z and x are supposed to be defined as boolean numpy arrays.
Ref.
Jeroen Dehaene and Bart De Moor
Clifford group, stabilizer states, and linear and quadratic operations
over GF(2)
Phys. Rev. A 68, 042318 – Published 20 October 2003
"""
def __init__(self, z=None, x=None, label=None):
r"""Make the Pauli object.
Note that, for the qubit index:
- Order of z, x vectors is q_0 ... q_{n-1},
- Order of pauli label is q_{n-1} ... q_0
E.g.,
- z and x vectors: z = [z_0 ... z_{n-1}], x = [x_0 ... x_{n-1}]
- a pauli is $P_{n-1} \otimes ... \otimes P_0$
Args:
z (numpy.ndarray): boolean, z vector
x (numpy.ndarray): boolean, x vector
label (str): pauli label
"""
if label is not None:
a = Pauli.from_label(label)
self._z = a.z
self._x = a.x
else:
self._init_from_bool(z, x)
@classmethod
def from_label(cls, label):
r"""Take pauli string to construct pauli.
The qubit index of pauli label is q_{n-1} ... q_0.
E.g., a pauli is $P_{n-1} \otimes ... \otimes P_0$
Args:
label (str): pauli label
Returns:
Pauli: the constructed pauli
Raises:
QiskitError: invalid character in the label
"""
z = np.zeros(len(label), dtype=np.bool)
x = np.zeros(len(label), dtype=np.bool)
for i, char in enumerate(label):
if char == 'X':
x[-i - 1] = True
elif char == 'Z':
z[-i - 1] = True
elif char == 'Y':
z[-i - 1] = True
x[-i - 1] = True
elif char != 'I':
raise QiskitError("Pauli string must be only consisted of 'I', 'X', "
"'Y' or 'Z' but you have {}.".format(char))
return cls(z=z, x=x)
def _init_from_bool(self, z, x):
"""Construct pauli from boolean array.
Args:
z (numpy.ndarray): boolean, z vector
x (numpy.ndarray): boolean, x vector
Returns:
Pauli: self
Raises:
QiskitError: if z or x are None or the length of z and x are different.
"""
if z is None:
raise QiskitError("z vector must not be None.")
if x is None:
raise QiskitError("x vector must not be None.")
if len(z) != len(x):
raise QiskitError("length of z and x vectors must be "
"the same. (z: {} vs x: {})".format(len(z), len(x)))
z = _make_np_bool(z)
x = _make_np_bool(x)
self._z = z
self._x = x
return self
def __len__(self):
"""Return number of qubits."""
return len(self._z)
def __repr__(self):
"""Return the representation of self."""
z = [p for p in self._z]
x = [p for p in self._x]
ret = self.__class__.__name__ + "(z={}, x={})".format(z, x)
return ret
def __str__(self):
"""Output the Pauli label."""
label = ''
for z, x in zip(self._z[::-1], self._x[::-1]):
if not z and not x:
label = ''.join([label, 'I'])
elif not z and x:
label = ''.join([label, 'X'])
elif z and not x:
label = ''.join([label, 'Z'])
else:
label = ''.join([label, 'Y'])
return label
def __eq__(self, other):
"""Return True if all Pauli terms are equal.
Args:
other (Pauli): other pauli
Returns:
bool: are self and other equal.
"""
res = False
if len(self) == len(other):
if np.all(self._z == other.z) and np.all(self._x == other.x):
res = True
return res
def __mul__(self, other):
"""Multiply two Paulis.
Returns:
Pauli: the multiplied pauli.
Raises:
QiskitError: if the number of qubits of two paulis are different.
"""
if len(self) != len(other):
raise QiskitError("These Paulis cannot be multiplied - different "
"number of qubits. ({} vs {})".format(len(self), len(other)))
z_new = np.logical_xor(self._z, other.z)
x_new = np.logical_xor(self._x, other.x)
return Pauli(z_new, x_new)
def __imul__(self, other):
"""Multiply two Paulis.
Returns:
Pauli: the multiplied pauli and save to itself, in-place computation.
Raises:
QiskitError: if the number of qubits of two paulis are different.
"""
if len(self) != len(other):
raise QiskitError("These Paulis cannot be multiplied - different "
"number of qubits. ({} vs {})".format(len(self), len(other)))
self._z = np.logical_xor(self._z, other.z)
self._x = np.logical_xor(self._x, other.x)
return self
def __hash__(self):
"""Make object is hashable, based on the pauli label to hash."""
return hash(str(self))
@property
def z(self):
"""Getter of z."""
return self._z
@property
def x(self):
"""Getter of x."""
return self._x
@staticmethod
def sgn_prod(p1, p2):
r"""
Multiply two Paulis and track the phase.
$P_3 = P_1 \otimes P_2$: X*Y
Args:
p1 (Pauli): pauli 1
p2 (Pauli): pauli 2
Returns:
Pauli: the multiplied pauli
complex: the sign of the multiplication, 1, -1, 1j or -1j
"""
phase = Pauli._prod_phase(p1, p2)
new_pauli = p1 * p2
return new_pauli, phase
@property
def numberofqubits(self):
"""Number of qubits."""
return len(self)
def to_label(self):
"""Present the pauli labels in I, X, Y, Z format.
Order is $q_{n-1} .... q_0$
Returns:
str: pauli label
"""
return str(self)
def to_matrix(self):
r"""
Convert Pauli to a matrix representation.
Order is q_{n-1} .... q_0, i.e., $P_{n-1} \otimes ... P_0$
Returns:
numpy.array: a matrix that represents the pauli.
"""
mat = self.to_spmatrix()
return mat.toarray()
def to_spmatrix(self):
r"""
Convert Pauli to a sparse matrix representation (CSR format).
Order is q_{n-1} .... q_0, i.e., $P_{n-1} \otimes ... P_0$
Returns:
scipy.sparse.csr_matrix: a sparse matrix with CSR format that
represents the pauli.
"""
_x, _z = self._x, self._z
n = 2**len(_x)
twos_array = 1 << np.arange(len(_x))
xs = np.array(_x).dot(twos_array)
zs = np.array(_z).dot(twos_array)
rows = np.arange(n+1, dtype=np.uint)
columns = rows ^ xs
global_factor = (-1j)**np.dot(np.array(_x, dtype=np.uint), _z)
data = global_factor*(-1)**np.mod(_count_set_bits(zs & rows), 2)
return sparse.csr_matrix((data, columns, rows), shape=(n, n))
def to_operator(self):
"""Convert to Operator object."""
# Place import here to avoid cyclic import from circuit visualization
from qiskit.quantum_info.operators.operator import Operator
return Operator(self.to_matrix())
def to_instruction(self):
"""Convert to Pauli circuit instruction."""
from qiskit.circuit import QuantumCircuit, QuantumRegister
from qiskit.extensions.standard import IdGate, XGate, YGate, ZGate
gates = {'I': IdGate(), 'X': XGate(), 'Y': YGate(), 'Z': ZGate()}
label = self.to_label()
n_qubits = self.numberofqubits
qreg = QuantumRegister(n_qubits)
circuit = QuantumCircuit(qreg, name='Pauli:{}'.format(label))
for i, pauli in enumerate(reversed(label)):
circuit.append(gates[pauli], [qreg[i]])
return circuit.to_instruction()
def update_z(self, z, indices=None):
"""
Update partial or entire z.
Args:
z (numpy.ndarray or list): to-be-updated z
indices (numpy.ndarray or list or optional): to-be-updated qubit indices
Returns:
Pauli: self
Raises:
QiskitError: when updating whole z, the number of qubits must be the same.
"""
z = _make_np_bool(z)
if indices is None:
if len(self._z) != len(z):
raise QiskitError("During updating whole z, you can not "
"change the number of qubits.")
self._z = z
else:
if not isinstance(indices, list) and not isinstance(indices, np.ndarray):
indices = [indices]
for p, idx in enumerate(indices):
self._z[idx] = z[p]
return self
def update_x(self, x, indices=None):
"""
Update partial or entire x.
Args:
x (numpy.ndarray or list): to-be-updated x
indices (numpy.ndarray or list or optional): to-be-updated qubit indices
Returns:
Pauli: self
Raises:
QiskitError: when updating whole x, the number of qubits must be the same.
"""
x = _make_np_bool(x)
if indices is None:
if len(self._x) != len(x):
raise QiskitError("During updating whole x, you can not change "
"the number of qubits.")
self._x = x
else:
if not isinstance(indices, list) and not isinstance(indices, np.ndarray):
indices = [indices]
for p, idx in enumerate(indices):
self._x[idx] = x[p]
return self
def insert_paulis(self, indices=None, paulis=None, pauli_labels=None):
"""
Insert or append pauli to the targeted indices.
If indices is None, it means append at the end.
Args:
indices (list[int]): the qubit indices to be inserted
paulis (Pauli): the to-be-inserted or appended pauli
pauli_labels (list[str]): the to-be-inserted or appended pauli label
Note:
the indices refers to the location of original paulis,
e.g. if indices = [0, 2], pauli_labels = ['Z', 'I'] and original pauli = 'ZYXI'
the pauli will be updated to ZY'I'XI'Z'
'Z' and 'I' are inserted before the qubit at 0 and 2.
Returns:
Pauli: self
Raises:
QiskitError: provide both `paulis` and `pauli_labels` at the same time
"""
if pauli_labels is not None:
if paulis is not None:
raise QiskitError("Please only provide either `paulis` or `pauli_labels`")
if isinstance(pauli_labels, str):
pauli_labels = list(pauli_labels)
# since pauli label is in reversed order.
paulis = Pauli.from_label(pauli_labels[::-1])
if indices is None: # append
self._z = np.concatenate((self._z, paulis.z))
self._x = np.concatenate((self._x, paulis.x))
else:
if not isinstance(indices, list):
indices = [indices]
self._z = np.insert(self._z, indices, paulis.z)
self._x = np.insert(self._x, indices, paulis.x)
return self
def append_paulis(self, paulis=None, pauli_labels=None):
"""
Append pauli at the end.
Args:
paulis (Pauli): the to-be-inserted or appended pauli
pauli_labels (list[str]): the to-be-inserted or appended pauli label
Returns:
Pauli: self
"""
return self.insert_paulis(None, paulis=paulis, pauli_labels=pauli_labels)
def delete_qubits(self, indices):
"""
Delete pauli at the indices.
Args:
indices(list[int]): the indices of to-be-deleted paulis
Returns:
Pauli: self
"""
if not isinstance(indices, list):
indices = [indices]
self._z = np.delete(self._z, indices)
self._x = np.delete(self._x, indices)
return self
@classmethod
def random(cls, num_qubits, seed=None):
"""Return a random Pauli on number of qubits.
Args:
num_qubits (int): the number of qubits
seed (int): Optional. To set a random seed.
Returns:
Pauli: the random pauli
"""
rng = np.random.RandomState(seed)
z = rng.randint(2, size=num_qubits).astype(np.bool)
x = rng.randint(2, size=num_qubits).astype(np.bool)
return cls(z, x)
@classmethod
def pauli_single(cls, num_qubits, index, pauli_label):
"""
Generate single qubit pauli at index with pauli_label with length num_qubits.
Args:
num_qubits (int): the length of pauli
index (int): the qubit index to insert the single qubit
pauli_label (str): pauli
Returns:
Pauli: single qubit pauli
"""
tmp = Pauli.from_label(pauli_label)
z = np.zeros(num_qubits, dtype=np.bool)
x = np.zeros(num_qubits, dtype=np.bool)
z[index] = tmp.z[0]
x[index] = tmp.x[0]
return cls(z, x)
def kron(self, other):
r"""Kronecker product of two paulis.
Order is $P_2 (other) \otimes P_1 (self)$
Args:
other (Pauli): P2
Returns:
Pauli: self
"""
self.insert_paulis(indices=None, paulis=other)
return self
@staticmethod
def _prod_phase(p1, p2):
phase_changes = 0
for z1, x1, z2, x2 in zip(p1.z, p1.x, p2.z, p2.x):
if z1 and not x1: # Z
if x2:
phase_changes = phase_changes - 1 if z2 else phase_changes + 1
elif not z1 and x1: # X
if z2:
phase_changes = phase_changes + 1 if x2 else phase_changes - 1
elif z1 and x1: # Y
if not z2 and x2: # X
phase_changes -= 1
elif z2 and not x2: # Z
phase_changes += 1
phase = (1j) ** (phase_changes % 4)
return phase
def pauli_group(number_of_qubits, case='weight'):
"""Return the Pauli group with 4^n elements.
The phases have been removed.
case 'weight' is ordered by Pauli weights and
case 'tensor' is ordered by I,X,Y,Z counting lowest qubit fastest.
Args:
number_of_qubits (int): number of qubits
case (str): determines ordering of group elements ('weight' or 'tensor')
Returns:
list: list of Pauli objects
Raises:
QiskitError: case is not 'weight' or 'tensor'
QiskitError: number_of_qubits is larger than 4
"""
if number_of_qubits < 5:
temp_set = []
if case == 'weight':
tmp = pauli_group(number_of_qubits, case='tensor')
# sort on the weight of the Pauli operator
return sorted(tmp, key=lambda x: -np.count_nonzero(
np.array(x.to_label(), 'c') == b'I'))
elif case == 'tensor':
# the Pauli set is in tensor order II IX IY IZ XI ...
for k in range(4 ** number_of_qubits):
z = np.zeros(number_of_qubits, dtype=np.bool)
x = np.zeros(number_of_qubits, dtype=np.bool)
# looping over all the qubits
for j in range(number_of_qubits):
# making the Pauli for each j fill it in from the
# end first
element = (k // (4 ** j)) % 4
if element == 1:
x[j] = True
elif element == 2:
z[j] = True
x[j] = True
elif element == 3:
z[j] = True
temp_set.append(Pauli(z, x))
return temp_set
else:
raise QiskitError("Only support 'weight' or 'tensor' cases "
"but you have {}.".format(case))
raise QiskitError("Only support number of qubits is less than 5")
| 31.692174
| 96
| 0.541239
|
755e029388eb6e0af6f9aa285c6436b0a379736f
| 3,931
|
py
|
Python
|
models/networks/normalization.py
|
ustato/sber-swap
|
1140e085e165ed14e1098d81b7abd63feafedecf
|
[
"Apache-2.0"
] | 210
|
2022-01-24T12:58:08.000Z
|
2022-03-31T07:54:22.000Z
|
models/networks/normalization.py
|
ustato/sber-swap
|
1140e085e165ed14e1098d81b7abd63feafedecf
|
[
"Apache-2.0"
] | 16
|
2022-01-24T23:45:04.000Z
|
2022-03-30T22:53:46.000Z
|
models/networks/normalization.py
|
ustato/sber-swap
|
1140e085e165ed14e1098d81b7abd63feafedecf
|
[
"Apache-2.0"
] | 35
|
2022-01-25T00:55:36.000Z
|
2022-03-30T22:45:27.000Z
|
"""
Copyright (C) 2019 NVIDIA Corporation. All rights reserved.
Licensed under the CC BY-NC-SA 4.0 license (https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode).
"""
import re
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.networks.sync_batchnorm import SynchronizedBatchNorm2d
import torch.nn.utils.spectral_norm as spectral_norm
def get_nonspade_norm_layer(opt, norm_type='instance'):
# helper function to get # output channels of the previous layer
def get_out_channel(layer):
if hasattr(layer, 'out_channels'):
return getattr(layer, 'out_channels')
return layer.weight.size(0)
# this function will be returned
def add_norm_layer(layer):
nonlocal norm_type
if norm_type.startswith('spectral'):
layer = spectral_norm(layer)
subnorm_type = norm_type[len('spectral'):]
if subnorm_type == 'none' or len(subnorm_type) == 0:
return layer
# remove bias in the previous layer, which is meaningless
# since it has no effect after normalization
if getattr(layer, 'bias', None) is not None:
delattr(layer, 'bias')
layer.register_parameter('bias', None)
if subnorm_type == 'batch':
norm_layer = nn.BatchNorm2d(get_out_channel(layer), affine=True)
elif subnorm_type == 'sync_batch':
norm_layer = SynchronizedBatchNorm2d(get_out_channel(layer), affine=True)
elif subnorm_type == 'instance':
norm_layer = nn.InstanceNorm2d(get_out_channel(layer), affine=False)
else:
raise ValueError('normalization layer %s is not recognized' % subnorm_type)
return nn.Sequential(layer, norm_layer)
return add_norm_layer
class InstanceNorm2d(nn.Module):
def __init__(self, epsilon=1e-8, **kwargs):
super().__init__(**kwargs)
self.epsilon = epsilon
def forward(self, x):
#x = x - torch.mean(x, (2, 3), True)
tmp = torch.mul(x, x) # or x ** 2
tmp = torch.rsqrt(torch.mean(tmp, (2, 3), True) + self.epsilon)
return x * tmp
class SPADE(nn.Module):
def __init__(self, config_text, norm_nc, label_nc):
super().__init__()
assert config_text.startswith('spade')
parsed = re.search('spade(\D+)(\d)x\d', config_text)
param_free_norm_type = str(parsed.group(1))
ks = int(parsed.group(2))
if param_free_norm_type == 'instance':
self.param_free_norm = InstanceNorm2d(norm_nc)
elif param_free_norm_type == 'syncbatch':
self.param_free_norm = SynchronizedBatchNorm2d(norm_nc, affine=False)
elif param_free_norm_type == 'batch':
self.param_free_norm = nn.BatchNorm2d(norm_nc, affine=False)
else:
raise ValueError('%s is not a recognized param-free norm type in SPADE'
% param_free_norm_type)
# The dimension of the intermediate embedding space. Yes, hardcoded.
nhidden = 128 if norm_nc>128 else norm_nc
pw = ks // 2
self.mlp_shared = nn.Sequential(
nn.Conv2d(label_nc, nhidden, kernel_size=ks, padding=pw),
nn.ReLU()
)
self.mlp_gamma = nn.Conv2d(nhidden, norm_nc, kernel_size=ks, padding=pw, bias=False)
self.mlp_beta = nn.Conv2d(nhidden, norm_nc, kernel_size=ks, padding=pw, bias=False)
def forward(self, x, segmap):
# Part 1. generate parameter-free normalized activations
normalized = self.param_free_norm(x)
# Part 2. produce scaling and bias conditioned on semantic map
segmap = F.interpolate(segmap, size=x.size()[2:], mode='nearest')
actv = self.mlp_shared(segmap)
gamma = self.mlp_gamma(actv)
beta = self.mlp_beta(actv)
# apply scale and bias
out = normalized * gamma + beta
return out
| 36.398148
| 105
| 0.643348
|
25cc16a9f51ad037fe1ee19f962220f54cc9b727
| 3,193
|
py
|
Python
|
data/preprocess_ljspeech.py
|
jefflai108/VGNSL
|
0edc3db3691abbad2a505b2165bd99e7a62d784f
|
[
"MIT"
] | 1
|
2022-03-08T16:09:03.000Z
|
2022-03-08T16:09:03.000Z
|
data/preprocess_ljspeech.py
|
jefflai108/VGNSL
|
0edc3db3691abbad2a505b2165bd99e7a62d784f
|
[
"MIT"
] | null | null | null |
data/preprocess_ljspeech.py
|
jefflai108/VGNSL
|
0edc3db3691abbad2a505b2165bd99e7a62d784f
|
[
"MIT"
] | null | null | null |
import os
import string
import shutil
import tqdm
import benepar
import spacy
from nltk import Tree
def replace_leaves(tree, leaves):
if isinstance(tree, str):
return leaves[0]
left = 0
new_children = list()
for child in tree:
n_leaves = 1 if isinstance(child, str) else len(child.leaves())
new_child = replace_leaves(child, leaves[left:left+n_leaves])
new_children.append(new_child)
left += n_leaves
return Tree(tree.label(), new_children)
def remove_label(tree):
if len(tree) == 1:
if len(tree.leaves()) == 1:
return tree.leaves()[0]
return remove_label(tree[0])
new_children = list()
for child in tree:
new_child = remove_label(child)
new_children.append(new_child)
return Tree('', new_children)
def _read_file(scp):
with open(scp, 'r') as f:
content = f.readlines()
content = [x.strip('\n') for x in content]
return content
def _write_to_file(string, fpath):
f = open(fpath, 'w')
f.write(string)
f.close()
def prep_for_mfa(target_data_dir, wav_scp, text, ljspeech_download_dir):
# create directory contains {utt_id}.wav/{utt_id}.txt/{utt_id}-tree.txt for MFA
utt2wavpath = _read_file(wav_scp)
utt2wavpath = {x.split()[0]:x.split()[1] for x in utt2wavpath}
utt2transcript = _read_file(text)
utt2transcript = {x.split()[0]:' '.join(x.split()[1:]) for x in utt2transcript}
exclude = set(string.punctuation)
# setup pre-trained English parser
nlp = spacy.load('en_core_web_md')
if spacy.__version__.startswith('2'):
nlp.add_pipe(benepar.BeneparComponent("benepar_en3"))
else:
nlp.add_pipe("benepar", config={"model": "benepar_en3"})
for utt in tqdm.tqdm(utt2wavpath.keys()):
full_wavpath = os.path.join(ljspeech_download_dir, utt2wavpath[utt])
assert os.path.exists(full_wavpath)
transcript = utt2transcript[utt].lower() # lower case
transcript = ''.join(ch for ch in transcript if ch not in exclude) # remove puncs
transcript = ' '.join(transcript.split()) # remove extra spaces
doc = nlp(transcript)
sent = list(doc.sents)[0]
tree = Tree.fromstring(sent._.parse_string)
tree = remove_label(tree)
tree = ' '.join(str(tree).replace('(', ' ( ').replace(')', ' ) ').split())
shutil.copyfile(full_wavpath, os.path.join(target_data_dir, utt + '.wav'))
_write_to_file(transcript, os.path.join(target_data_dir, utt + '.txt'))
_write_to_file(tree, os.path.join(target_data_dir, utt + '-tree.txt'))
if __name__ == '__main__':
split = 'eval1'
#split = 'dev'
orig_data_dir = '/data/sls/scratch/clai24/syntax/VGNSL-feature/data/LJspeech/' + split
target_data_dir = '/data/sls/scratch/clai24/syntax/VGNSL-feature/data/LJspeech/' + split + '-speaker'
ljspeech_download_dir = '/data/sls/temp/clai24/lottery-ticket/espnet/egs2/ljspeech/tts1'
prep_for_mfa(target_data_dir,
os.path.join(orig_data_dir, 'wav.scp'),
os.path.join(orig_data_dir, 'text'),
ljspeech_download_dir)
| 35.087912
| 105
| 0.645474
|
3b0f8f8a55dd9fa85dd51be9ad69db0cd346df09
| 976
|
py
|
Python
|
tests/builder/executors/exception_handling_executor_test.py
|
sharp-pixel/opensearch-benchmark
|
32b2a68c3672f680fbc90a591f6c15b46701142e
|
[
"Apache-2.0"
] | 26
|
2021-12-09T06:58:53.000Z
|
2022-03-29T15:01:37.000Z
|
tests/builder/executors/exception_handling_executor_test.py
|
sharp-pixel/opensearch-benchmark
|
32b2a68c3672f680fbc90a591f6c15b46701142e
|
[
"Apache-2.0"
] | 63
|
2021-12-08T20:47:17.000Z
|
2022-03-31T18:21:31.000Z
|
tests/builder/executors/exception_handling_executor_test.py
|
sharp-pixel/opensearch-benchmark
|
32b2a68c3672f680fbc90a591f6c15b46701142e
|
[
"Apache-2.0"
] | 5
|
2021-12-09T10:17:30.000Z
|
2022-03-03T05:31:12.000Z
|
import unittest.mock as mock
from unittest import TestCase
from osbenchmark.builder.executors.exception_handling_shell_executor import ExceptionHandlingShellExecutor
from osbenchmark.exceptions import ExecutorError
class ExceptionHandlingShellExecutorTests(TestCase):
def setUp(self):
self.executor_impl = mock.Mock()
self.executor_impl.execute.return_value = None
self.executor_impl.copy.return_value = None
self.executor = ExceptionHandlingShellExecutor(self.executor_impl)
self.host = None
self.command = None
self.source = "/path/to/source"
self.destination = "/path/to/dest"
def test_success_executing_command(self):
self.executor.execute(self.host, self.command)
def test_failure_executing_command(self):
self.executor_impl.execute.side_effect = Exception("error")
with self.assertRaises(ExecutorError):
self.executor.execute(self.host, self.command)
| 34.857143
| 106
| 0.73873
|
dc450fbb522a1d3a54a25a0a05ac6f5c204cb0a7
| 9,106
|
py
|
Python
|
rpython/rtyper/raisingops.py
|
jptomo/pypy-lang-scheme
|
55edb2cec69d78f86793282a4566fcbc1ef9fcac
|
[
"MIT"
] | 34
|
2015-07-09T04:53:27.000Z
|
2021-07-19T05:22:27.000Z
|
rpython/rtyper/raisingops.py
|
jptomo/pypy-lang-scheme
|
55edb2cec69d78f86793282a4566fcbc1ef9fcac
|
[
"MIT"
] | 6
|
2015-05-30T17:20:45.000Z
|
2017-06-12T14:29:23.000Z
|
rpython/rtyper/raisingops.py
|
jptomo/pypy-lang-scheme
|
55edb2cec69d78f86793282a4566fcbc1ef9fcac
|
[
"MIT"
] | 11
|
2015-09-07T14:26:08.000Z
|
2020-04-10T07:20:41.000Z
|
import sys
from rpython.rlib.rarithmetic import r_longlong, r_uint, intmask
from rpython.rtyper.lltypesystem.lloperation import llop
from rpython.rtyper.lltypesystem.lltype import Signed, SignedLongLong, \
UnsignedLongLong
#XXX original SIGNED_RIGHT_SHIFT_ZERO_FILLS not taken into account
#XXX assuming HAVE_LONG_LONG (int_mul_ovf)
#XXX should int_mod and int_floordiv return an intmask(...) instead?
LONG_MAX = sys.maxint
LONG_MIN = -sys.maxint-1
LLONG_MAX = r_longlong(2 ** (r_longlong.BITS-1) - 1)
LLONG_MIN = -LLONG_MAX-1
def int_floordiv_zer(x, y):
'''#define OP_INT_FLOORDIV_ZER(x,y,r,err) \
if ((y)) { OP_INT_FLOORDIV(x,y,r,err); } \
else FAIL_ZER(err, "integer division")
'''
if y:
return llop.int_floordiv(Signed, x, y)
else:
raise ZeroDivisionError("integer division")
def uint_floordiv_zer(x, y):
'''#define OP_UINT_FLOORDIV_ZER(x,y,r,err) \
if ((y)) { OP_UINT_FLOORDIV(x,y,r,err); } \
else FAIL_ZER(err, "unsigned integer division")
'''
if y:
return x / y
else:
raise ZeroDivisionError("unsigned integer division")
def llong_floordiv_zer(x, y):
'''#define OP_LLONG_FLOORDIV_ZER(x,y,r) \
if ((y)) { OP_LLONG_FLOORDIV(x,y,r); } \
else FAIL_ZER("integer division")
'''
if y:
return llop.llong_floordiv(SignedLongLong, x, y)
else:
raise ZeroDivisionError("integer division")
def ullong_floordiv_zer(x, y):
'''#define OP_ULLONG_FLOORDIV_ZER(x,y,r) \
if ((y)) { OP_ULLONG_FLOORDIV(x,y,r); } \
else FAIL_ZER("unsigned integer division")
'''
if y:
return llop.llong_floordiv(UnsignedLongLong, x, y)
else:
raise ZeroDivisionError("unsigned integer division")
def int_neg_ovf(x):
if x == LONG_MIN:
raise OverflowError("integer negate")
return -x
def llong_neg_ovf(x):
if x == LLONG_MIN:
raise OverflowError("integer negate")
return -x
def int_abs_ovf(x):
if x == LONG_MIN:
raise OverflowError("integer absolute")
if x < 0:
return -x
else:
return x
def llong_abs_ovf(x):
if x == LLONG_MIN:
raise OverflowError("integer absolute")
if x < 0:
return -x
else:
return x
def int_add_ovf(x, y):
'''#define OP_INT_ADD_OVF(x,y,r,err) \
OP_INT_ADD(x,y,r,err); \
if ((r^(x)) >= 0 || (r^(y)) >= 0); \
else FAIL_OVF(err, "integer addition")
'''
r = intmask(r_uint(x) + r_uint(y))
if r^x >= 0 or r^y >= 0:
return r
else:
raise OverflowError("integer addition")
def int_add_nonneg_ovf(x, y):
'''
OP_INT_ADD(x,y,r); \
if (r >= (x)); \
else FAIL_OVF("integer addition")
'''
r = intmask(r_uint(x) + r_uint(y))
if r >= x:
return r
else:
raise OverflowError("integer addition")
def int_sub_ovf(x, y):
'''#define OP_INT_SUB_OVF(x,y,r,err) \
OP_INT_SUB(x,y,r,err); \
if ((r^(x)) >= 0 || (r^~(y)) >= 0); \
else FAIL_OVF(err, "integer subtraction")
'''
r = intmask(r_uint(x) - r_uint(y))
if r^x >= 0 or r^~y >= 0:
return r
else:
raise OverflowError("integer subtraction")
def int_lshift_ovf(x, y):
'''#define OP_INT_LSHIFT_OVF(x,y,r,err) \
OP_INT_LSHIFT(x,y,r,err); \
if ((x) != Py_ARITHMETIC_RIGHT_SHIFT(long, r, (y))) \
FAIL_OVF(err, "x<<y losing bits or changing sign")
'''
r = x << y
if x != _Py_ARITHMETIC_RIGHT_SHIFT(r, y):
raise OverflowError("x<<y losing bits or changing sign")
else:
return r
def int_rshift_val(x, y):
'''#define OP_INT_RSHIFT_VAL(x,y,r,err) \
if ((y) >= 0) { OP_INT_RSHIFT(x,y,r,err); } \
else FAIL_VAL(err, "negative shift count")
'''
if y >= 0:
return _Py_ARITHMETIC_RIGHT_SHIFT(x, y)
else:
raise ValueError("negative shift count")
def int_lshift_val(x, y):
'''#define OP_INT_LSHIFT_VAL(x,y,r,err) \
if ((y) >= 0) { OP_INT_LSHIFT(x,y,r,err); } \
else FAIL_VAL(err, "negative shift count")
'''
if y >= 0:
return x << y
else:
raise ValueError("negative shift count")
def int_lshift_ovf_val(x, y):
'''#define OP_INT_LSHIFT_OVF_VAL(x,y,r,err) \
if ((y) >= 0) { OP_INT_LSHIFT_OVF(x,y,r,err); } \
else FAIL_VAL(err, "negative shift count")
'''
if y >= 0:
return int_lshift_ovf(x, y)
else:
raise ValueError("negative shift count")
def int_floordiv_ovf(x, y):
'''#define OP_INT_FLOORDIV_OVF(x,y,r,err) \
if ((y) == -1 && (x) < 0 && ((unsigned long)(x) << 1) == 0) \
FAIL_OVF(err, "integer division"); \
OP_INT_FLOORDIV(x,y,r,err)
'''
if y == -1 and x < 0 and (r_uint(x) << 1) == 0:
raise OverflowError("integer division")
else:
return llop.int_floordiv(Signed, x, y)
def int_floordiv_ovf_zer(x, y):
'''#define OP_INT_FLOORDIV_OVF_ZER(x,y,r,err) \
if ((y)) { OP_INT_FLOORDIV_OVF(x,y,r,err); } \
else FAIL_ZER(err, "integer division")
'''
if y:
return int_floordiv_ovf(x, y)
else:
raise ZeroDivisionError("integer division")
def int_mod_ovf(x, y):
'''#define OP_INT_MOD_OVF(x,y,r,err) \
if ((y) == -1 && (x) < 0 && ((unsigned long)(x) << 1) == 0) \
FAIL_OVF(err, "integer modulo"); \
OP_INT_MOD(x,y,r,err)
'''
if y == -1 and x < 0 and (r_uint(x) << 1) == 0:
raise OverflowError("integer modulo")
else:
return llop.int_mod(Signed, x, y)
def int_mod_zer(x, y):
'''#define OP_INT_MOD_ZER(x,y,r,err) \
if ((y)) { OP_INT_MOD(x,y,r,err); } \
else FAIL_ZER(err, "integer modulo")
'''
if y:
return llop.int_mod(Signed, x, y)
else:
raise ZeroDivisionError("integer modulo")
def uint_mod_zer(x, y):
'''#define OP_UINT_MOD_ZER(x,y,r,err) \
if ((y)) { OP_UINT_MOD(x,y,r,err); } \
else FAIL_ZER(err, "unsigned integer modulo")
'''
if y:
return x % y
else:
raise ZeroDivisionError("unsigned integer modulo")
def int_mod_ovf_zer(x, y):
'''#define OP_INT_MOD_OVF_ZER(x,y,r,err) \
if ((y)) { OP_INT_MOD_OVF(x,y,r,err); } \
else FAIL_ZER(err, "integer modulo")
'''
if y:
return int_mod_ovf(x, y)
else:
raise ZeroDivisionError("integer modulo")
def llong_mod_zer(x, y):
'''#define OP_LLONG_MOD_ZER(x,y,r) \
if ((y)) { OP_LLONG_MOD(x,y,r); } \
else FAIL_ZER("integer modulo")
'''
if y:
return llop.int_mod(SignedLongLong, x, y)
else:
raise ZeroDivisionError("integer modulo")
# Helpers...
def _Py_ARITHMETIC_RIGHT_SHIFT(i, j):
'''
// Py_ARITHMETIC_RIGHT_SHIFT
// C doesn't define whether a right-shift of a signed integer sign-extends
// or zero-fills. Here a macro to force sign extension:
// Py_ARITHMETIC_RIGHT_SHIFT(TYPE, I, J)
// Return I >> J, forcing sign extension.
// Requirements:
// I is of basic signed type TYPE (char, short, int, long, or long long).
// TYPE is one of char, short, int, long, or long long, although long long
// must not be used except on platforms that support it.
// J is an integer >= 0 and strictly less than the number of bits in TYPE
// (because C doesn't define what happens for J outside that range either).
// Caution:
// I may be evaluated more than once.
#ifdef SIGNED_RIGHT_SHIFT_ZERO_FILLS
#define Py_ARITHMETIC_RIGHT_SHIFT(TYPE, I, J) \
((I) < 0 ? ~((~(unsigned TYPE)(I)) >> (J)) : (I) >> (J))
#else
#define Py_ARITHMETIC_RIGHT_SHIFT(TYPE, I, J) ((I) >> (J))
#endif
'''
return i >> j
#XXX some code from src/int.h seems missing
#def int_mul_ovf(x, y): #HAVE_LONG_LONG version
# '''{ \
# PY_LONG_LONG lr = (PY_LONG_LONG)(x) * (PY_LONG_LONG)(y); \
# r = (long)lr; \
# if ((PY_LONG_LONG)r == lr); \
# else FAIL_OVF(err, "integer multiplication"); \
# }
# '''
# lr = r_longlong(x) * r_longlong(y);
# r = intmask(lr)
# if r_longlong(r) == lr:
# return r
# else:
# raise OverflowError("integer multiplication")
#not HAVE_LONG_LONG version
def int_mul_ovf(a, b): #long a, long b, long *longprod):
longprod = a * b
doubleprod = float(a) * float(b)
doubled_longprod = float(longprod)
# Fast path for normal case: small multiplicands, and no info is lost in either method.
if doubled_longprod == doubleprod:
return longprod
# Somebody somewhere lost info. Close enough, or way off? Note
# that a != 0 and b != 0 (else doubled_longprod == doubleprod == 0).
# The difference either is or isn't significant compared to the
# true value (of which doubleprod is a good approximation).
# absdiff/absprod <= 1/32 iff 32 * absdiff <= absprod -- 5 good bits is "close enough"
if 32.0 * abs(doubled_longprod - doubleprod) <= abs(doubleprod):
return longprod
raise OverflowError("integer multiplication")
| 30.763514
| 92
| 0.59664
|
a6f31e3a6c49aff4cb4ea4bc4e246cc8f81fba99
| 35,569
|
py
|
Python
|
parser/fase2/team06/TablaDeSimbolos.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | null | null | null |
parser/fase2/team06/TablaDeSimbolos.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | null | null | null |
parser/fase2/team06/TablaDeSimbolos.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | null | null | null |
from enum import Enum
import pandas as pd
import reportes as h
class TIPO_DE_DATO(Enum) :
NUMERO = 1
FLOTANTE=2
CARACTER=3
#ir agregando los tipos faltantes para la comprobacion de tipos en las operacioens
class Simbolo() :
'Esta clase representa un simbolo dentro de nuestra tabla de simbolos'
def __init__(self, id, nombre, tipo, tamanoCadena, BD, tabla, obligatorio, pk, FK, referenciaTablaFK, referenciaCampoFK, unique, idUnique, check, condicionCheck, idCheck,valor,default, idConstraintFK, idConstraintPK, tipoIndex, sortIndex, ambito, rol) :
self.id = id
self.nombre = nombre
self.tipo = tipo
self.tamanoCadena = tamanoCadena
self.BD = BD
self.tabla = tabla
self.obligatorio = obligatorio
self.pk = pk
self.FK = FK
self.referenciaTablaFK = referenciaTablaFK
self.referenciaCampoFK = referenciaCampoFK
self.unique = unique
self.idUnique = idUnique
self.check = check
self.condicionCheck = condicionCheck
self.idCheck = idCheck
self.valor = valor
self.default = default
self.idConstraintFK = idConstraintFK
self.idConstraintPK = idConstraintPK
self.tipoIndex = tipoIndex
self.sortIndex = sortIndex
self.ambito = ambito
self.rol = rol
class TablaDeSimbolos() :
'Esta clase representa la tabla de simbolos'
def __init__(self, simbolos = {}) :
self.simbolos = simbolos
def agregar(self, simbolo) :
self.simbolos[simbolo.nombre] = simbolo
def obtener(self, id) :
print("a este entra")
if not id in self.simbolos :
print('Error1: variable ', id, ' no definida.')
return("no definida")
return self.simbolos[id]
def obtener2(self, nombre) :
print("a este entra")
if not nombre in self.simbolos :
print('Error1: variable ', nombre, ' no definida.')
return 0
return self.simbolos[nombre]
def actualizar(self, simbolo) :
if not simbolo.nombre in self.simbolos :
print('Error2: variable ', simbolo.nombre, ' no definida.')
else :
self.simbolos[simbolo.nombre] = simbolo
def mostrar(self,var):
print(str(var))
for x in self.simbolos:
print(x)
def destruir(self,simbolo):
print("########################### simbolos>",str(simbolo.id))
if not simbolo.id in self.simbolos :
print('Error3: variable ', simbolo.id, ' no definida.')
else :
self.simbolos[simbolo.id] = simbolo
del self.simbolos[simbolo.id]
print("si lo elimina")
def destruirColumna(self,nombre,BD,tabla):
clave = str(nombre)+str(BD)+str(tabla)
print(clave)
for simb in self.simbolos:
print (simb)
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla and self.simbolos[simb].tipo != None:
del self.simbolos[simb]
return
#print(self.simbolos[simb].id," ",self.simbolos[simb].nombre," ",self.simbolos[simb].BD," ",self.simbolos[simb].tabla)
print("la columna no existe")
return 0
def obtenerColumnas(self,tabla,BD):
#print("EMPIEZO A BORRAR LA TABLA: ",tabla)
print("DE MOMENTO IMPRIMIRÉ ACÁ ABAJO CUALES SON LAS COLUMNAS QUE PERTENECEN A LA TABLA")
listaColumnas = []
for simb in self.simbolos:
if self.simbolos[simb].tabla == tabla and self.simbolos[simb].BD == BD and self.simbolos[simb].tipo != None:
listaColumnas.append(self.simbolos[simb].nombre)
#print(self.simbolos[simb].nombre)
return listaColumnas
def destruirTabla(self,nombre,BD):
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD:
del self.simbolos[simb]
return
'''claveTabla = str(tabla)+str(BD)
for simb in self.simbolos:
if simb == claveTabla:
del self.simbolos[simb]
print("SE ACABARON LAS COLUMNAS DE LA TABLA: ",tabla)
return 0 '''
def destruirConstraint(self,nombre,BD,tabla):
print("aca estoy meeeeen!")
print(nombre)
print(BD)
print(tabla)
for simb in self.simbolos:
print("xdddx")
if self.simbolos[simb].tabla == tabla and self.simbolos[simb].BD == BD:
print("encontre una entrada posible")
print(self.simbolos[simb].idConstraintFK)
print(self.simbolos[simb].idConstraintPK)
if self.simbolos[simb].idConstraintFK == nombre:
print("ENCONTRE EL CONSTRAINTFK, KEMOSION")
self.simbolos[simb].idConstraintFK=None
self.simbolos[simb].FK = 0
self.simbolos[simb].referenciaTablaFK=None
self.simbolos[simb].referenciaCampoFK=None
elif self.simbolos[simb].idConstraintPK == nombre:
print("ENCONTRE EL CONSTRAINTPK, KEMOSION")
self.simbolos[simb].idConstraintPK=None
self.simbolos[simb].pk = 0
#-------------------------------------------------------------------
'''print("########################### simbolos>",str(simbolo.id))
if not simbolo.id in self.simbolos :
print('Error3: variable ', simbolo.id, ' no definida.')
else :
self.simbolos[simbolo.id] = simbolo
del self.simbolos[simbolo.id]
print("si lo elimina")'''
#-----------------------------------------------------------------------------------------------------------------------
def obtenerDato(self, nombre):
print("a este entra")
if not nombre in self.simbolos :
print('Error1: variable ', nombre, ' no definida.')
return("no definida")
return self.simbolos[nombre]
#-----------------------------------------------------------------------------------------------------------------------
#Funciones
def agregarSimbolo(self,simbolo):
clave = str(simbolo.nombre)+str(simbolo.BD)
self.simbolos[clave] = simbolo
def agregarVariable(self, simbolo):
clave = str(simbolo.nombre)+str(simbolo.BD)+str(simbolo.ambito)
self.simbolos[clave] = simbolo
def verificarFuncion(self,nombre,BD):
clave = str(nombre)+str(BD)
if not clave in self.simbolos:
return 0
return 1
def eliminarVariablesFuncion(self,BD,ambito):
for simb in self.simbolos:
if self.simbolos[simb].BD == BD and self.simbolos[simb].ambito == ambito:
del self.simbolos[simb]
return 1
return 0
def contVariablesFunction(self,BD,ambito):
contt=0
for simb in self.simbolos:
if self.simbolos[simb].BD == BD and self.simbolos[simb].ambito == ambito:
contt+=1
return contt
def eliminarFunction(self,nombre,BD):
clave = str(nombre)+str(BD)
for simb in self.simbolos:
if clave == simb:
del self.simbolos[simb]
return 1
return 0
#-----------------------------------------------------------------------------------------------------------------------
def agregarnuevTablaBD(self,simbolo):
clave = str(simbolo.nombre)+str(simbolo.BD)
self.simbolos[clave] = simbolo
def validarTabla(self,nombre,BD):
clave = str(nombre)+str(BD)
if not clave in self.simbolos:
return 0
return 1
def obtenerTablaBD(self, nombre):
print("a este entra")
if not nombre in self.simbolos :
print('Error: La tabla: ', nombre, ' no definida.')
return 0
return self.simbolos[nombre]
#-----------------------------------------------------------------------------------------------------------------------
#Inicia creacion de tabla
def agregarnuevaColumna(self,simbolo):
clave = str(simbolo.nombre) + str(simbolo.BD) + str(simbolo.tabla)
self.simbolos[clave] = simbolo
def verificarcolumnaBD(self,nombre,BD,tabla):
clave = str(nombre) + str(BD) + str(tabla)
if not clave in self.simbolos :
print('Error: La columna: ', nombre, ' no definida.')
return 0
return 1
def verificarcolumnaBDAT(self,nombre,BD,tabla):
clave = str(nombre) + str(BD) + str(tabla)
if not clave in self.simbolos :
print('Error: La tabla: ', nombre, ' no definida.')
return 0
return self.simbolos[clave]
def actualizauniqueColumna(self,nombre,BD,tabla):
clave = str(nombre) + str(BD) + str(tabla)
for simb in self.simbolos:
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla:
self.simbolos[simb].unique = 1
print("se actualizao restriccion unique en columna")
return
#print(self.simbolos[simb].id," ",self.simbolos[simb].nombre," ",self.simbolos[simb].BD," ",self.simbolos[simb].tabla)
print("la columna no existe")
return 0
def actualizauniqueColumnaAT(self,nombre,BD,tabla,idConstraint):
clave = str(nombre) + str(BD) + str(tabla)
print(clave)
for simb in self.simbolos:
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla:
self.simbolos[simb].unique = 1
self.simbolos[simb].idConstraintFK = idConstraint
print("**********************************")
print(self.simbolos[simb].idConstraintFK)
print("**********************************")
print("se actualizao restriccion unique en columna")
return
#print(self.simbolos[simb].id," ",self.simbolos[simb].nombre," ",self.simbolos[simb].BD," ",self.simbolos[simb].tabla)
print("la columna no existe")
return 0
def actualizarcheckColumna(self,nombre,BD,tabla,idchk,condchk):
clave = str(nombre) + str(BD) + str(tabla)
print(clave)
for simb in self.simbolos:
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla:
self.simbolos[simb].check = 1
self.simbolos[simb].condCheck = condchk
self.simbolos[simb].idCheck = idchk
print("se actualizo restricion check en columna")
return
#print(self.simbolos[simb].id," ",self.simbolos[simb].nombre," ",self.simbolos[simb].BD," ",self.simbolos[simb].tabla)
print("la columna no existe")
return 0
def actualizapkcolumna(self,nombre,BD,tabla):
clave = str(nombre) + str(BD) + str(tabla)
print(clave)
for simb in self.simbolos:
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla:
self.simbolos[simb].pk = 1
print("se actualizo restricion llave primaria en columna")
return
#print(self.simbolos[simb].id," ",self.simbolos[simb].nombre," ",self.simbolos[simb].BD," ",self.simbolos[simb].tabla)
print("la columna no existe")
return 0
def actualizapkcolumnaAT(self,nombre,BD,tabla,idConstraint):
clave = str(nombre) + str(BD) + str(tabla)
print(clave)
for simb in self.simbolos:
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla:
self.simbolos[simb].pk = 1
self.simbolos[simb].unique = 1
self.simbolos[simb].obligatorio = 0
self.simbolos[simb].idConstraintPK = idConstraint
print("se actualizo restricion llave primaria en columna")
return
#print(self.simbolos[simb].id," ",self.simbolos[simb].nombre," ",self.simbolos[simb].BD," ",self.simbolos[simb].tabla)
print("la columna no existe")
return 0
def actualizafkcolumna(self,nombre,BD,tabla,idrefcolumna,idreftabla):
clave = str(nombre) + str(BD) + str(tabla)
print(clave)
for simb in self.simbolos:
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla:
self.simbolos[simb].FK = 1
self.simbolos[simb].referenciaCampoFK = idrefcolumna
self.simbolos[simb].referenciaTablaFK = idreftabla
print("se actualizo columna como llave foranea")
return
#print(self.simbolos[simb].id," ",self.simbolos[simb].nombre," ",self.simbolos[simb].BD," ",self.simbolos[simb].tabla)
print("la columna no existe")
return 0
def actualizafkcolumnaAT(self,nombre,BD,tabla,idrefcolumna,idreftabla,idConstraint):
clave = str(nombre) + str(BD) + str(tabla)
print(clave)
for simb in self.simbolos:
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla:
self.simbolos[simb].FK = 1
self.simbolos[simb].referenciaCampoFK = idrefcolumna
self.simbolos[simb].referenciaTablaFK = idreftabla
self.simbolos[simb].idConstraintFK = idConstraint
print("se actualizo columna como llave foranea")
return
#print(self.simbolos[simb].id," ",self.simbolos[simb].nombre," ",self.simbolos[simb].BD," ",self.simbolos[simb].tabla)
print("la columna no existe")
return 0
def numerodeColumnas(self,BD,tabla):
cont = 0
for simb in self.simbolos:
if self.simbolos[simb].tabla == tabla and self.simbolos[simb].BD == BD and self.simbolos[simb].tipo != None:
cont=cont+1
return cont
def numerodeDatosenColumna(self,nombre,BD,tabla):
clave = str(nombre)+str(BD)+str(tabla)
if self.simbolos[clave].valor == None:
return 0
return len(self.simbolos[clave].valor)
def numerodeDatosenprimeraColumna(self,tabla,BD):
for simb in self.simbolos:
if self.simbolos[simb].tabla == tabla and self.simbolos[simb].BD == BD and self.simbolos[simb].id == 0 and self.simbolos[simb].tipo != None:
if self.simbolos[simb].valor == None:
return 0
return len(self.simbolos[simb].valor)
return 0
def actualizandoDefaultColumna(self,nombre,BD,tabla):
clave = str(nombre)+str(BD)+str(tabla)
if self.simbolos[clave].valor == None:
if self.simbolos[clave].default != None:
self.simbolos[clave].valor = [self.simbolos[clave].default]
else:
self.simbolos[clave].valor = ["NULL"]
else:
if self.simbolos[clave].default != None:
self.simbolos[clave].valor.append(self.simbolos[clave].defualt)
else:
self.simbolos[clave].valor.append("NULL")
#-----------------------------------------------------------------------------------------------------------------------
#Inicia Insert en Tabla
#se llama cuando en el insert solo colocan los registros a ingresar a la columna
def obtenersinNombreColumna(self,nombre,BD,id):
for simb in self.simbolos:
if self.simbolos[simb].tabla == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].id == id and self.simbolos[simb].tipo != None:
return self.simbolos[simb]
return 0
#se llama cuando en insert se especifica el id de la columna
def obtenerconNombreColumna(self,nombre,BD,tabla):
clave = str(nombre) + str(BD) + str(tabla)
for simb in self.simbolos:
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla and self.simbolos[simb].tipo != None:
return self.simbolos[simb]
return 0
#se utiliza para actualizar los datos en la tabla de simbolos
def actualizarValorColumna(self,nombre,BD,tabla,dato):
clave = str(nombre) + str(BD) + str(tabla)
for simb in self.simbolos:
if simb == clave:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla and self.simbolos[simb].tipo != None:
if self.simbolos[simb].valor == None:
self.simbolos[simb].valor = [dato]
else:
self.simbolos[simb].valor.append(dato)
print("se agrego un dato a la columna: ",nombre," en tabla: ",tabla)
return
#print(self.simbolos[simb].id," ",self.simbolos[simb].nombre," ",self.simbolos[simb].BD," ",self.simbolos[simb].tabla)
print("la columna no existe")
return 0
def columnasPrimaria(self,BD,tabla):
listpk = []
for simb in self.simbolos:
if self.simbolos[simb].tabla == tabla and self.simbolos[simb].BD == BD and self.simbolos[simb].pk == 1 and self.simbolos[simb].tipo != None:
listpk.append(self.simbolos[simb].id)
return listpk
#--------------Delete de registro
def eliminarRegistroTabla(self,BD,tabla,posvalor):
for simb in self.simbolos:
if self.simbolos[simb].tabla == tabla and self.simbolos[simb].BD == BD and self.simbolos[simb].tipo != None:
self.simbolos[simb].valor.pop(posvalor)
return 0
#--------------- Update de Registro
def UpdateRegistro(self,nombre,BD,tabla,dato,pos):
clave = str(nombre) + str(BD) + str(tabla)
if not clave in self.simbolos :
print('Error: La tabla: ', nombre, ' no definida.')
return 0
self.simbolos[clave].valor[pos] = dato
return 1
def printcontsimbolos(self):
tm = 0
for simb in self.simbolos:
print("----------Columna ",tm,"----------")
print(self.simbolos[simb].id)
print(self.simbolos[simb].nombre)
print(self.simbolos[simb].tipo)
print(self.simbolos[simb].tamanoCadena)
print(self.simbolos[simb].BD)
print(self.simbolos[simb].tabla)
print(self.simbolos[simb].obligatorio)
print(self.simbolos[simb].pk)
print(self.simbolos[simb].FK)
print(self.simbolos[simb].referenciaTablaFK)
print(self.simbolos[simb].referenciaCampoFK)
print(self.simbolos[simb].unique)
print(self.simbolos[simb].idUnique)
print(self.simbolos[simb].check)
print(self.simbolos[simb].condicionCheck)
print(self.simbolos[simb].idCheck)
print(self.simbolos[simb].valor)
print(self.simbolos[simb].default)
print(self.simbolos[simb].idConstraintFK)
print(self.simbolos[simb].idConstraintPK)
tm=tm+1
return 0
# --------------------CREAR, ALTER USE Y DROP BD---------------------------------------------------------------------
def agregarCrearBD(self, simbolo) :
self.simbolos[simbolo.nombre] = simbolo
def verificacionCrearBD(self, nombre) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == None and self.simbolos[simb].tabla == None:
print('Error1: base de datos ', nombre, ' ya definida.')
return 1
return 0
def verificacionUseBD(self, nombre) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == None and self.simbolos[simb].tabla == None:
print('BD ', nombre, ' existente.')
return 1
return 0
def verificacionAlterBD(self, nombre) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == None and self.simbolos[simb].tabla == None:
return 1
return 0
def verificacionAlterBD_2(self, nombre) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == None and self.simbolos[simb].tabla == None:
return 1
return 0
def actualizarAlterBD(self, old, alter) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == old and self.simbolos[simb].BD == None and self.simbolos[simb].tabla == None:
print("SIMB",self.simbolos[simb])
self.simbolos[alter] = self.simbolos.pop(simb)
self.simbolos[alter].nombre = alter
return 2
return 1
def destruirBD(self,nombre):
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == None and self.simbolos[simb].tabla == None:
print('Se elimino ', nombre)
self.simbolos.pop(simb)
return 1
return 0
def verificacionShowBD(self) :
bd = []
for simb in self.simbolos:
print("entro a for")
if self.simbolos[simb].nombre != None and self.simbolos[simb].BD == None and self.simbolos[simb].tabla == None:
bd.append(self.simbolos[simb].nombre)
return bd
#obtiene los datos cuando se manda una tabla y todas las columnas
def obtenerSelect1A(self, tabla, bd) :
print("a este entra metodo")
print("la bd: ",bd)
if tabla=="" or bd=="": return 0
a=""
columnas=[]
datos={}
for simb in self.simbolos:
print(simb)
if self.simbolos[simb].tabla == tabla and self.simbolos[simb].BD == bd:
print("res: ",self.simbolos[simb].valor)
print( simb," = ",self.simbolos[simb].valor)
a+=str(simb)+" = "+str(self.simbolos[simb].valor)+"\n"
datos[simb]=self.simbolos[simb].valor
#columnas.append(simb)
if a=="":
print("A va vacio")
return "0"
else:
print("vera si genera el dataframe")
df=pd.DataFrame(datos)
print(df)
print("si termino")
print("A es: ",a)
return df
#obtiene un dato cuando se mandan varias columnas de 1 tabla
def obtenerSelect2B(self, tabla, bd, campos) :
print("a este entra metodo")
print("la bd: ",bd)
print("la tabla: ",tabla)
print("campos: ",campos)
if tabla=="" or bd=="" or len(campos)==0: return 0
a=""
columnas=[]
datos={}
for x in range(0,len(campos)):
for simb in self.simbolos:
print(simb)
key=str(self.simbolos[simb].nombre)+str(self.simbolos[simb].BD)+str(self.simbolos[simb].tabla)
print("el nombre sera ====",key)
if self.simbolos[simb].tabla == tabla and self.simbolos[simb].BD == bd and (self.simbolos[simb].nombre+self.simbolos[simb].BD+self.simbolos[simb].tabla)==campos[x]:
print("res: ",self.simbolos[simb].valor)
print( simb," = ",self.simbolos[simb].valor)
a+=str(simb)+" = "+str(self.simbolos[simb].valor)+"\n"
datos[simb]=self.simbolos[simb].valor
#columnas.append(simb)
if a=="":
print("A va vacio")
return "0"
else:
print("vera si genera el dataframe")
df=pd.DataFrame(datos)
print(df)
print("si termino")
print("A es: ",a)
return df
def obtenerSelect2E(self, identificador):
if len(identificador)==0: return "no se encontro la variable"
a=""
for x in range(0,len(identificador)):
for simb in self.simbolos:
if self.simbolos[simb].nombre == identificador[x]:
a+= str(self.simbolos[simb].nombre)+" = "+ str(self.simbolos[simb].valor)+"\n"
if a=="":
print("A va vacio")
return "0"
else:
return a
#obtiene un dato cuando se mandan varias columnas de 1 tabla
def obtenerSelect4(self, tabla, bd, campos) :
print("a este entra metodo----------------------")
print("la bd: ",bd)
print("la tabla: ",tabla)
print("campos: ",campos)
if tabla=="" or bd=="" or len(campos)==0: return 0
a=""
columnas=[]
datos={}
for x in range(0,len(tabla)):
for y in range(0,len(campos)):
for simb in self.simbolos:
print(simb)
key=str(self.simbolos[simb].nombre)+str(self.simbolos[simb].BD)+str(self.simbolos[simb].tabla)
print("el nombre sera ====",key)
if self.simbolos[simb].tabla == tabla[x] and self.simbolos[simb].BD == bd and (self.simbolos[simb].nombre+self.simbolos[simb].BD+self.simbolos[simb].tabla)==campos[y]:
print("res: ",self.simbolos[simb].valor)
print( simb," = ",self.simbolos[simb].valor)
a+=str(simb)+" = "+str(self.simbolos[simb].valor)+"\n"
datos[simb]=self.simbolos[simb].valor
#columnas.append(simb)
if a=="":
print("A va vacio")
return "0"
else:
print("vera si genera el dataframe")
df=pd.DataFrame(datos)
print(df)
print("si termino")
print("A es: ",a)
return df
#obtiene un dato cuando se mandan varias tablas y todos los datos
def obtenerSelect5Todo(self, tabla, bd) :
print("a este entra metodo----------------------")
print("la bd: ",bd)
print("la tabla: ",tabla)
if bd=="" or len(tabla)==0: return 0
a=""
columnas=[]
datos={}
for x in range(0,len(tabla)):
for simb in self.simbolos:
print(simb)
key=str(self.simbolos[simb].nombre)+str(self.simbolos[simb].BD)+str(self.simbolos[simb].tabla)
print("el nombre sera ====",key)
if self.simbolos[simb].tabla == tabla[x] and self.simbolos[simb].BD == bd:
print("res: ",self.simbolos[simb].valor)
print( simb," = ",self.simbolos[simb].valor)
a+=str(simb)+" = "+str(self.simbolos[simb].valor)+"\n"
datos[simb]=self.simbolos[simb].valor
#columnas.append(simb)
if a=="":
print("A va vacio")
return "0"
else:
print("vera si genera el dataframe")
df=pd.DataFrame(datos)
print(df)
print("si termino")
print("A es: ",a)
return df
def agregarnuevoIndex(self,simbolo):
clave = str(simbolo.nombre) + str(simbolo.BD) + str(simbolo.tabla)
self.simbolos[clave] = simbolo
# --------------------------------------------ALTER INDEX COLUMN ----------------------------------------------------------
def verificacionAlterColumnIndex(self, nombre, BD,column) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD:
print(self.simbolos[simb].tabla)
return self.simbolos[simb].tabla
return 0
def obtenerTablasIndex(self,nombre,BD,column):
for simb in self.simbolos:
if self.simbolos[simb].tabla == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].id == column:
print(self.simbolos[simb].nombre)
return self.simbolos[simb].nombre
return 0
def verificacionColumnaIndex(self,nombre,BD,tabla,old_column,new_column):
clave = str(nombre) + str(BD) + str(tabla)
print("CLAVE: ",clave)
if clave in self.simbolos :
for simb in self.simbolos:
if self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla:
print("VALOR:",self.simbolos[simb].valor)
if self.simbolos[simb].valor != None:
print("VALOR CON VALOR:",self.simbolos[simb].valor)
y = self.simbolos[simb].valor
y = [new_column if x==old_column else x for x in y]
print("NUEVA TABLA: ",y)
tipo = self.simbolos[simb].tipoIndex
sort = self.simbolos[simb].sortIndex
tabla = self.simbolos[simb].tabla
valores = y
BDatos = BD
simbolo = Simbolo(None,nombre,None,None,BDatos,tabla,None,None,None,None,None,None,None,None,None,None,valores,None,None,None,tipo,sort,None,None)
print(simbolo)
self.simbolos[clave] = simbolo
return 0
else:
return 1
def obtenerIndex(self,nombre,BD,old_column):
for simb in self.simbolos:
if self.simbolos[simb].tabla == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].id == old_column:
print(self.simbolos[simb].nombre)
return self.simbolos[simb].nombre
return 0
def verificacionAlterStringColumIndex(self, nombre, BD,idcolumn) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD:
print(self.simbolos[simb].tabla)
return self.simbolos[simb].tabla
return 0
def obtenerTablasStringIndex(self,nombre,BD,idcolumn):
for simb in self.simbolos:
if self.simbolos[simb].tabla == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].nombre == idcolumn:
print(self.simbolos[simb].nombre)
return self.simbolos[simb].nombre
return 0
def actualizarAlterColumnIndex(self, nombre, nombreColumna, BD) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD:
print("SIMB",self.simbolos[simb])
clave = nombre + BD + self.simbolos[simb].tabla
tipo = self.simbolos[simb].tipoIndex
sort = self.simbolos[simb].sortIndex
tabla = self.simbolos[simb].tabla
valores = [nombreColumna]
BDatos = BD
simbolo = Simbolo(None,nombre,None,None,BDatos,tabla,None,None,None,None,None,None,None,None,None,None,valores,None,None,None,tipo,sort,None,None)
print(simbolo)
self.simbolos[clave] = simbolo
#del self.simbolos[simb]
return 2
return 1
# --------------------------------------------ALTER INDEX COLUMN ----------------------------------------------------------
def verificarIndex(self,nombre,BD,tabla):
clave = str(nombre) + str(BD) + str(tabla)
if not clave in self.simbolos :
for simb in self.simbolos:
if self.simbolos[simb].BD == BD and self.simbolos[simb].tabla == tabla:
return 0
else:
return 1
def verificarTablaIndex(self, nombre, BD, idcolumn):
for simb in self.simbolos:
if self.simbolos[simb].tabla == nombre and self.simbolos[simb].BD == BD:
print("TABLA:",self.simbolos[simb].tabla)
return self.simbolos[simb].tabla
return 0
def obtenerColumnaIndex(self,nombre,BD,idcolumn):
print("COLL:",idcolumn)
y = []
for simb in self.simbolos:
for col in idcolumn:
if self.simbolos[simb].tabla == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].nombre == col:
y.append(self.simbolos[simb].nombre)
print("Y:",len(y))
print("ID:",len(idcolumn))
if len(y) != len(idcolumn):
print("NO ES COLUMNA")
return 1
else:
print("SI ES COLUMNA")
return 0
def obtenerColumnaUnicaIndex(self,nombre,BD,idcolumn):
for simb in self.simbolos:
if self.simbolos[simb].tabla == nombre and self.simbolos[simb].BD == BD and self.simbolos[simb].nombre == idcolumn:
print(self.simbolos[simb].nombre)
return 0
return 1
def verificacionAlterIndex(self, nombre, BD) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD:
return 1
return 0
def deleteAlterIndex(self, nombre, BD) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == nombre and self.simbolos[simb].BD == BD:
print("SIMB",self.simbolos[simb])
del self.simbolos[simb]
return 2
return 1
def actualizarAlterIndex(self, old, alter, BD) :
for simb in self.simbolos:
if self.simbolos[simb].nombre == old and self.simbolos[simb].BD == BD:
print("SIMB",self.simbolos[simb])
clave = alter + BD + self.simbolos[simb].tabla
tipo = self.simbolos[simb].tipoIndex
sort = self.simbolos[simb].sortIndex
tabla = self.simbolos[simb].tabla
valores = self.simbolos[simb].valor
BDatos = BD
simbolo = Simbolo(None,alter,None,None,BDatos,tabla,None,None,None,None,None,None,None,None,None,None,valores,None,None,None,tipo,sort,None,None)
print(simbolo)
self.simbolos[clave] = simbolo
del self.simbolos[simb]
return 2
return 1
| 43.061743
| 257
| 0.540836
|
c4385c2300996a48f028585f17f796c2fa926374
| 612
|
py
|
Python
|
complaints/migrations/0008_auto_20180330_0512.py
|
shashank-sharma/smart-odisha-hackathon
|
e73b9d8ec8671aaaa72d3c14241bbecde56fe10f
|
[
"MIT"
] | 1
|
2018-06-18T15:23:07.000Z
|
2018-06-18T15:23:07.000Z
|
complaints/migrations/0008_auto_20180330_0512.py
|
shashank-sharma/smart-odisha-hackathon
|
e73b9d8ec8671aaaa72d3c14241bbecde56fe10f
|
[
"MIT"
] | null | null | null |
complaints/migrations/0008_auto_20180330_0512.py
|
shashank-sharma/smart-odisha-hackathon
|
e73b9d8ec8671aaaa72d3c14241bbecde56fe10f
|
[
"MIT"
] | 1
|
2020-01-31T13:21:38.000Z
|
2020-01-31T13:21:38.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-30 05:12
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('complaints', '0007_auto_20180330_0510'),
]
operations = [
migrations.AlterField(
model_name='complaints',
name='complaint_taken_by',
field=models.OneToOneField(default=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 26.608696
| 128
| 0.684641
|
1ad0618118e9ee6a1348a01f2b02fe1f6dd2e12a
| 3,466
|
py
|
Python
|
repos/system_upgrade/el7toel8/actors/dnfshellrpmupgrade/actor.py
|
Jakuje/leapp-repository
|
580540f68bd4f89152c28935f775f660c2db0839
|
[
"Apache-2.0"
] | null | null | null |
repos/system_upgrade/el7toel8/actors/dnfshellrpmupgrade/actor.py
|
Jakuje/leapp-repository
|
580540f68bd4f89152c28935f775f660c2db0839
|
[
"Apache-2.0"
] | 1
|
2020-06-15T18:32:21.000Z
|
2020-06-15T18:32:21.000Z
|
repos/system_upgrade/el7toel8/actors/dnfshellrpmupgrade/actor.py
|
kubco2/leapp-repository
|
ad98ad91d06d0adfe945566a414c95df862c4172
|
[
"Apache-2.0"
] | null | null | null |
import json
import os
import shutil
import sys
from tempfile import NamedTemporaryFile
from leapp.actors import Actor
from leapp.libraries.stdlib import run
from leapp.libraries.stdlib.call import STDOUT
from leapp.libraries.stdlib.config import is_debug
from leapp.models import FilteredRpmTransactionTasks, UsedTargetRepositories, TransactionCompleted
from leapp.tags import RPMUpgradePhaseTag, IPUWorkflowTag
def _logging_handler(fd_info, buffer):
'''Custom log handler to always show DNF stdout to console and stderr only in DEBUG mode'''
(_unused, fd_type) = fd_info
if fd_type == STDOUT:
sys.stdout.write(buffer)
else:
if is_debug():
sys.stderr.write(buffer)
class DnfShellRpmUpgrade(Actor):
"""
Setup and call DNF upgrade command
Based on previously calculated RPM transaction data, this actor will setup and call
rhel-upgrade DNF plugin with necessary parameters
"""
name = 'dnf_shell_rpm_upgrade'
consumes = (FilteredRpmTransactionTasks, UsedTargetRepositories)
produces = (TransactionCompleted,)
tags = (RPMUpgradePhaseTag, IPUWorkflowTag)
def process(self):
# FIXME: we hitting issue now because the network is down and rhsm
# # is trying to connect to the server. Commenting this out for now
# # so people will not be affected in case they do not have set a
# # release and we will have time to fix it properly.
# Make sure Subscription Manager OS Release is unset
# cmd = ['subscription-manager', 'release', '--unset']
# run(cmd)
shutil.copyfile(
self.get_file_path('rhel_upgrade.py'), '/lib/python2.7/site-packages/dnf-plugins/rhel_upgrade.py')
dnf_command = ['/usr/bin/dnf', 'rhel-upgrade', 'upgrade']
target_repoids = []
for target_repos in self.consume(UsedTargetRepositories):
for repo in target_repos.repos:
target_repoids.append(repo.repoid)
debugsolver = True if os.environ.get('LEAPP_DEBUG', '0') == '1' else False
shutil.copyfile(
'/etc/yum.repos.d/redhat.repo.upgrade',
'/etc/yum.repos.d/redhat.repo'
)
# FIXME: that's ugly hack, we should get info which file remove and
# + do it more nicely..
cmd = ['rm', '-f', '/etc/pki/product/69.pem']
run(cmd)
data = next(self.consume(FilteredRpmTransactionTasks), FilteredRpmTransactionTasks())
plugin_data = {
'pkgs_info':
{
'local_rpms': [pkg for pkg in data.local_rpms],
'to_install': [pkg for pkg in data.to_install],
'to_remove': [pkg for pkg in data.to_remove]
},
'dnf_conf':
{
'allow_erasing': True,
'best': True,
'debugsolver': debugsolver,
'disable_repos': True,
'enable_repos': target_repoids,
'gpgcheck': False,
'platform_id': 'platform:el8',
'releasever': '8',
'test_flag': False,
}
}
with NamedTemporaryFile() as data:
json.dump(plugin_data, data)
data.flush()
run(dnf_command + [data.name], callback_raw=_logging_handler)
self.produce(TransactionCompleted())
| 35.010101
| 110
| 0.608771
|
ae79aeceba95f40d5f9fcc2e4d5369b09e8012c1
| 1,880
|
py
|
Python
|
imagepy/menus/Kit3D/Viewer 3D/demo_plgs.py
|
pengguanjun/imagepy
|
d96ef98c2c3e93d368131fd2753bce164e1247cd
|
[
"BSD-4-Clause"
] | 1
|
2020-08-17T04:18:35.000Z
|
2020-08-17T04:18:35.000Z
|
imagepy/menus/Kit3D/Viewer 3D/demo_plgs.py
|
cycleuser/imagepy
|
5dc1a9a8137280c5215287392ba1b23d368bd7e9
|
[
"BSD-4-Clause"
] | null | null | null |
imagepy/menus/Kit3D/Viewer 3D/demo_plgs.py
|
cycleuser/imagepy
|
5dc1a9a8137280c5215287392ba1b23d368bd7e9
|
[
"BSD-4-Clause"
] | null | null | null |
from imagepy.core.engine import Free
from sciapp.object import Surface, MarkText
from sciapp.util import surfutil
import numpy as np
class Decoration(Free):
title = 'Decoration Demo'
def run(self, para=None):
dphi, dtheta = np.pi/20.0, np.pi/20.0
[phi,theta] = np.mgrid[0:np.pi+dphi*1.5:dphi,0:2*np.pi+dtheta*1.5:dtheta]
m0 = 4; m1 = 3; m2 = 2; m3 = 3; m4 = 6; m5 = 2; m6 = 6; m7 = 4;
r = np.sin(m0*phi)**m1 + np.cos(m2*phi)**m3 + np.sin(m4*theta)**m5 + np.cos(m6*theta)**m7
x = r*np.sin(phi)*np.cos(theta)
y = r*np.cos(phi)
z = r*np.sin(phi)*np.sin(theta)
vts, fs, ns, cs = surfutil.build_mesh(x, y, z)
cs[:] = surfutil.auto_lookup(vts[:,2], surfutil.linear_color('jet'))/255
self.app.show_mesh(Surface(vts, fs, ns, cs), 'decoration')
class Lines(Free):
title = 'Lines Demo'
def run(self, para=None):
vts = np.array([(0,0,0),(1,1,0),(2,1,0),(1,0,0)], dtype=np.float32)
fs = np.array([(0,1,2),(1,2,3)], dtype=np.uint32)
ns = np.ones((4,3), dtype=np.float32)
n_mer, n_long = 6, 11
pi = np.pi
dphi = pi / 1000.0
phi = np.arange(0.0, 2 * pi + 0.5 * dphi, dphi)
mu = phi * n_mer
x = np.cos(mu) * (1 + np.cos(n_long * mu / n_mer) * 0.5)
y = np.sin(mu) * (1 + np.cos(n_long * mu / n_mer) * 0.5)
z = np.sin(n_long * mu / n_mer) * 0.5
vts, fs, ns, cs = surfutil.build_line(x, y, z, (1, 0, 0))
cs[:] = surfutil.auto_lookup(vts[:,2], surfutil.linear_color('jet'))/255
self.app.show_mesh(Surface(vts, fs, ns, cs, mode='grid'), 'line')
class Balls(Free):
title = 'Random Balls Demo'
def run(self, para=None):
os = np.random.rand(30).reshape((-1,3))
rs = np.random.rand(10)/5
cs = (np.random.rand(10)*255).astype(np.uint8)
cs = surfutil.linear_color('jet')[cs]/255
vts, fs, ns, cs = surfutil.build_balls(os, rs, cs)
self.app.show_mesh(Surface(vts, fs, ns, cs), 'balls')
plgs = [Lines, Balls, Decoration]
| 34.814815
| 93
| 0.611702
|
122589d66b8200e6e43afd2895cbaee889046018
| 20,706
|
py
|
Python
|
openstack/tests/unit/object_store/v1/test_proxy.py
|
jlyheden/openstacksdk
|
7e0dcaaa4a69b17b97e746ce8de104689c60becc
|
[
"Apache-2.0"
] | null | null | null |
openstack/tests/unit/object_store/v1/test_proxy.py
|
jlyheden/openstacksdk
|
7e0dcaaa4a69b17b97e746ce8de104689c60becc
|
[
"Apache-2.0"
] | null | null | null |
openstack/tests/unit/object_store/v1/test_proxy.py
|
jlyheden/openstacksdk
|
7e0dcaaa4a69b17b97e746ce8de104689c60becc
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from testscenarios import load_tests_apply_scenarios as load_tests # noqa
from hashlib import sha1
import mock
import random
import string
import tempfile
import time
from openstack.object_store.v1 import account
from openstack.object_store.v1 import container
from openstack.object_store.v1 import obj
from openstack.tests.unit.cloud import test_object as base_test_object
from openstack.tests.unit import test_proxy_base
class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
kwargs_to_path_args = False
def setUp(self):
super(TestObjectStoreProxy, self).setUp()
self.proxy = self.cloud.object_store
self.container = self.getUniqueString()
self.endpoint = self.cloud.object_store.get_endpoint() + '/'
self.container_endpoint = '{endpoint}{container}'.format(
endpoint=self.endpoint, container=self.container)
def test_account_metadata_get(self):
self.verify_head(self.proxy.get_account_metadata, account.Account)
def test_container_metadata_get(self):
self.verify_head(self.proxy.get_container_metadata,
container.Container, value="container")
def test_container_delete(self):
self.verify_delete(self.proxy.delete_container,
container.Container, False)
def test_container_delete_ignore(self):
self.verify_delete(self.proxy.delete_container,
container.Container, True)
def test_container_create_attrs(self):
self.verify_create(
self.proxy.create_container,
container.Container,
method_args=['container_name'],
expected_kwargs={'name': 'container_name', "x": 1, "y": 2, "z": 3})
def test_object_metadata_get(self):
self._verify2("openstack.proxy.Proxy._head",
self.proxy.get_object_metadata,
method_args=['object'],
method_kwargs={'container': 'container'},
expected_args=[obj.Object, 'object'],
expected_kwargs={'container': 'container'})
def _test_object_delete(self, ignore):
expected_kwargs = {
"ignore_missing": ignore,
"container": "name",
}
self._verify2("openstack.proxy.Proxy._delete",
self.proxy.delete_object,
method_args=["resource"],
method_kwargs=expected_kwargs,
expected_args=[obj.Object, "resource"],
expected_kwargs=expected_kwargs)
def test_object_delete(self):
self._test_object_delete(False)
def test_object_delete_ignore(self):
self._test_object_delete(True)
def test_object_create_attrs(self):
kwargs = {"name": "test", "data": "data", "container": "name"}
self._verify2("openstack.proxy.Proxy._create",
self.proxy.upload_object,
method_kwargs=kwargs,
expected_args=[obj.Object],
expected_kwargs=kwargs)
def test_object_create_no_container(self):
self.assertRaises(TypeError, self.proxy.upload_object)
def test_object_get(self):
kwargs = dict(container="container")
self.verify_get(
self.proxy.get_object, obj.Object,
value=["object"],
method_kwargs=kwargs,
expected_kwargs=kwargs)
def test_set_temp_url_key(self):
key = 'super-secure-key'
self.register_uris([
dict(method='POST', uri=self.endpoint,
status_code=204,
validate=dict(
headers={
'x-account-meta-temp-url-key': key})),
dict(method='HEAD', uri=self.endpoint,
headers={
'x-account-meta-temp-url-key': key}),
])
self.proxy.set_account_temp_url_key(key)
self.assert_calls()
def test_set_account_temp_url_key_second(self):
key = 'super-secure-key'
self.register_uris([
dict(method='POST', uri=self.endpoint,
status_code=204,
validate=dict(
headers={
'x-account-meta-temp-url-key-2': key})),
dict(method='HEAD', uri=self.endpoint,
headers={
'x-account-meta-temp-url-key-2': key}),
])
self.proxy.set_account_temp_url_key(key, secondary=True)
self.assert_calls()
def test_set_container_temp_url_key(self):
key = 'super-secure-key'
self.register_uris([
dict(method='POST', uri=self.container_endpoint,
status_code=204,
validate=dict(
headers={
'x-container-meta-temp-url-key': key})),
dict(method='HEAD', uri=self.container_endpoint,
headers={
'x-container-meta-temp-url-key': key}),
])
self.proxy.set_container_temp_url_key(self.container, key)
self.assert_calls()
def test_set_container_temp_url_key_second(self):
key = 'super-secure-key'
self.register_uris([
dict(method='POST', uri=self.container_endpoint,
status_code=204,
validate=dict(
headers={
'x-container-meta-temp-url-key-2': key})),
dict(method='HEAD', uri=self.container_endpoint,
headers={
'x-container-meta-temp-url-key-2': key}),
])
self.proxy.set_container_temp_url_key(
self.container, key, secondary=True)
self.assert_calls()
def test_copy_object(self):
self.assertRaises(NotImplementedError, self.proxy.copy_object)
def test_file_segment(self):
file_size = 4200
content = ''.join(random.SystemRandom().choice(
string.ascii_uppercase + string.digits)
for _ in range(file_size)).encode('latin-1')
self.imagefile = tempfile.NamedTemporaryFile(delete=False)
self.imagefile.write(content)
self.imagefile.close()
segments = self.proxy._get_file_segments(
endpoint='test_container/test_image',
filename=self.imagefile.name,
file_size=file_size,
segment_size=1000)
self.assertEqual(len(segments), 5)
segment_content = b''
for (index, (name, segment)) in enumerate(segments.items()):
self.assertEqual(
'test_container/test_image/{index:0>6}'.format(index=index),
name)
segment_content += segment.read()
self.assertEqual(content, segment_content)
class TestDownloadObject(base_test_object.BaseTestObject):
def setUp(self):
super(TestDownloadObject, self).setUp()
self.the_data = b'test body'
self.register_uris([
dict(method='GET', uri=self.object_endpoint,
headers={
'Content-Length': str(len(self.the_data)),
'Content-Type': 'application/octet-stream',
'Accept-Ranges': 'bytes',
'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT',
'Etag': '"b5c454b44fbd5344793e3fb7e3850768"',
'X-Timestamp': '1481808853.65009',
'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1',
'Date': 'Mon, 19 Dec 2016 14:24:00 GMT',
'X-Static-Large-Object': 'True',
'X-Object-Meta-Mtime': '1481513709.168512',
},
content=self.the_data)])
def test_download(self):
data = self.cloud.object_store.download_object(
self.object, container=self.container)
self.assertEqual(data, self.the_data)
self.assert_calls()
def test_stream(self):
chunk_size = 2
for index, chunk in enumerate(self.cloud.object_store.stream_object(
self.object, container=self.container,
chunk_size=chunk_size)):
chunk_len = len(chunk)
start = index * chunk_size
end = start + chunk_len
self.assertLessEqual(chunk_len, chunk_size)
self.assertEqual(chunk, self.the_data[start:end])
self.assert_calls()
class TestExtractName(TestObjectStoreProxy):
scenarios = [
('discovery', dict(url='/', parts=['account'])),
('endpoints', dict(url='/endpoints', parts=['endpoints'])),
('container', dict(url='/AUTH_123/container_name',
parts=['container'])),
('object', dict(url='/container_name/object_name',
parts=['object'])),
('object_long', dict(url='/v1/AUTH_123/cnt/path/deep/object_name',
parts=['object']))
]
def test_extract_name(self):
results = self.proxy._extract_name(self.url, project_id='123')
self.assertEqual(self.parts, results)
class TestTempURL(TestObjectStoreProxy):
expires_iso8601_format = '%Y-%m-%dT%H:%M:%SZ'
short_expires_iso8601_format = '%Y-%m-%d'
time_errmsg = ('time must either be a whole number or in specific '
'ISO 8601 format.')
path_errmsg = 'path must be full path to an object e.g. /v1/a/c/o'
url = '/v1/AUTH_account/c/o'
seconds = 3600
key = 'correcthorsebatterystaple'
method = 'GET'
expected_url = url + ('?temp_url_sig=temp_url_signature'
'&temp_url_expires=1400003600')
expected_body = '\n'.join([
method,
'1400003600',
url,
]).encode('utf-8')
@mock.patch('hmac.HMAC')
@mock.patch('time.time', return_value=1400000000)
def test_generate_temp_url(self, time_mock, hmac_mock):
hmac_mock().hexdigest.return_value = 'temp_url_signature'
url = self.proxy.generate_temp_url(
self.url, self.seconds, self.method, temp_url_key=self.key)
key = self.key
if not isinstance(key, bytes):
key = key.encode('utf-8')
self.assertEqual(url, self.expected_url)
self.assertEqual(hmac_mock.mock_calls, [
mock.call(),
mock.call(key, self.expected_body, sha1),
mock.call().hexdigest(),
])
self.assertIsInstance(url, type(self.url))
@mock.patch('hmac.HMAC')
@mock.patch('time.time', return_value=1400000000)
def test_generate_temp_url_ip_range(self, time_mock, hmac_mock):
hmac_mock().hexdigest.return_value = 'temp_url_signature'
ip_ranges = [
'1.2.3.4', '1.2.3.4/24', '2001:db8::',
b'1.2.3.4', b'1.2.3.4/24', b'2001:db8::',
]
path = '/v1/AUTH_account/c/o/'
expected_url = path + ('?temp_url_sig=temp_url_signature'
'&temp_url_expires=1400003600'
'&temp_url_ip_range=')
for ip_range in ip_ranges:
hmac_mock.reset_mock()
url = self.proxy.generate_temp_url(
path, self.seconds, self.method,
temp_url_key=self.key, ip_range=ip_range)
key = self.key
if not isinstance(key, bytes):
key = key.encode('utf-8')
if isinstance(ip_range, bytes):
ip_range_expected_url = (
expected_url + ip_range.decode('utf-8')
)
expected_body = '\n'.join([
'ip=' + ip_range.decode('utf-8'),
self.method,
'1400003600',
path,
]).encode('utf-8')
else:
ip_range_expected_url = expected_url + ip_range
expected_body = '\n'.join([
'ip=' + ip_range,
self.method,
'1400003600',
path,
]).encode('utf-8')
self.assertEqual(url, ip_range_expected_url)
self.assertEqual(hmac_mock.mock_calls, [
mock.call(key, expected_body, sha1),
mock.call().hexdigest(),
])
self.assertIsInstance(url, type(path))
@mock.patch('hmac.HMAC')
def test_generate_temp_url_iso8601_argument(self, hmac_mock):
hmac_mock().hexdigest.return_value = 'temp_url_signature'
url = self.proxy.generate_temp_url(
self.url, '2014-05-13T17:53:20Z', self.method,
temp_url_key=self.key)
self.assertEqual(url, self.expected_url)
# Don't care about absolute arg.
url = self.proxy.generate_temp_url(self.url, '2014-05-13T17:53:20Z',
self.method,
temp_url_key=self.key,
absolute=True)
self.assertEqual(url, self.expected_url)
lt = time.localtime()
expires = time.strftime(self.expires_iso8601_format[:-1], lt)
if not isinstance(self.expected_url, str):
expected_url = self.expected_url.replace(
b'1400003600', bytes(str(int(time.mktime(lt))),
encoding='ascii'))
else:
expected_url = self.expected_url.replace(
'1400003600', str(int(time.mktime(lt))))
url = self.proxy.generate_temp_url(self.url, expires,
self.method,
temp_url_key=self.key)
self.assertEqual(url, expected_url)
expires = time.strftime(self.short_expires_iso8601_format, lt)
lt = time.strptime(expires, self.short_expires_iso8601_format)
if not isinstance(self.expected_url, str):
expected_url = self.expected_url.replace(
b'1400003600', bytes(str(int(time.mktime(lt))),
encoding='ascii'))
else:
expected_url = self.expected_url.replace(
'1400003600', str(int(time.mktime(lt))))
url = self.proxy.generate_temp_url(self.url, expires,
self.method,
temp_url_key=self.key)
self.assertEqual(url, expected_url)
@mock.patch('hmac.HMAC')
@mock.patch('time.time', return_value=1400000000)
def test_generate_temp_url_iso8601_output(self, time_mock, hmac_mock):
hmac_mock().hexdigest.return_value = 'temp_url_signature'
url = self.proxy.generate_temp_url(self.url, self.seconds,
self.method,
temp_url_key=self.key,
iso8601=True)
key = self.key
if not isinstance(key, bytes):
key = key.encode('utf-8')
expires = time.strftime(self.expires_iso8601_format,
time.gmtime(1400003600))
if not isinstance(self.url, str):
self.assertTrue(url.endswith(bytes(expires, 'utf-8')))
else:
self.assertTrue(url.endswith(expires))
self.assertEqual(hmac_mock.mock_calls, [
mock.call(),
mock.call(key, self.expected_body, sha1),
mock.call().hexdigest(),
])
self.assertIsInstance(url, type(self.url))
@mock.patch('hmac.HMAC')
@mock.patch('time.time', return_value=1400000000)
def test_generate_temp_url_prefix(self, time_mock, hmac_mock):
hmac_mock().hexdigest.return_value = 'temp_url_signature'
prefixes = ['', 'o', 'p0/p1/']
for p in prefixes:
hmac_mock.reset_mock()
path = '/v1/AUTH_account/c/' + p
expected_url = path + ('?temp_url_sig=temp_url_signature'
'&temp_url_expires=1400003600'
'&temp_url_prefix=' + p)
expected_body = '\n'.join([
self.method,
'1400003600',
'prefix:' + path,
]).encode('utf-8')
url = self.proxy.generate_temp_url(
path, self.seconds, self.method, prefix=True,
temp_url_key=self.key)
key = self.key
if not isinstance(key, bytes):
key = key.encode('utf-8')
self.assertEqual(url, expected_url)
self.assertEqual(hmac_mock.mock_calls, [
mock.call(key, expected_body, sha1),
mock.call().hexdigest(),
])
self.assertIsInstance(url, type(path))
def test_generate_temp_url_invalid_path(self):
self.assertRaisesRegex(
ValueError,
'path must be representable as UTF-8',
self.proxy.generate_temp_url, b'/v1/a/c/\xff', self.seconds,
self.method, temp_url_key=self.key)
@mock.patch('hmac.HMAC.hexdigest', return_value="temp_url_signature")
def test_generate_absolute_expiry_temp_url(self, hmac_mock):
if isinstance(self.expected_url, bytes):
expected_url = self.expected_url.replace(
b'1400003600', b'2146636800')
else:
expected_url = self.expected_url.replace(
u'1400003600', u'2146636800')
url = self.proxy.generate_temp_url(
self.url, 2146636800, self.method, absolute=True,
temp_url_key=self.key)
self.assertEqual(url, expected_url)
def test_generate_temp_url_bad_time(self):
for bad_time in ['not_an_int', -1, 1.1, '-1', '1.1', '2015-05',
'2015-05-01T01:00']:
self.assertRaisesRegex(
ValueError, self.time_errmsg,
self.proxy.generate_temp_url, self.url, bad_time,
self.method, temp_url_key=self.key)
def test_generate_temp_url_bad_path(self):
for bad_path in ['/v1/a/c', 'v1/a/c/o', 'blah/v1/a/c/o', '/v1//c/o',
'/v1/a/c/', '/v1/a/c']:
self.assertRaisesRegex(
ValueError, self.path_errmsg,
self.proxy.generate_temp_url, bad_path, 60, self.method,
temp_url_key=self.key)
class TestTempURLUnicodePathAndKey(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3'
key = u'k\u00e9y'
expected_url = (u'%s?temp_url_sig=temp_url_signature'
u'&temp_url_expires=1400003600') % url
expected_body = u'\n'.join([
u'GET',
u'1400003600',
url,
]).encode('utf-8')
class TestTempURLUnicodePathBytesKey(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3'
key = u'k\u00e9y'.encode('utf-8')
expected_url = (u'%s?temp_url_sig=temp_url_signature'
u'&temp_url_expires=1400003600') % url
expected_body = '\n'.join([
u'GET',
u'1400003600',
url,
]).encode('utf-8')
class TestTempURLBytesPathUnicodeKey(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8')
key = u'k\u00e9y'
expected_url = url + (b'?temp_url_sig=temp_url_signature'
b'&temp_url_expires=1400003600')
expected_body = b'\n'.join([
b'GET',
b'1400003600',
url,
])
class TestTempURLBytesPathAndKey(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8')
key = u'k\u00e9y'.encode('utf-8')
expected_url = url + (b'?temp_url_sig=temp_url_signature'
b'&temp_url_expires=1400003600')
expected_body = b'\n'.join([
b'GET',
b'1400003600',
url,
])
class TestTempURLBytesPathAndNonUtf8Key(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8')
key = b'k\xffy'
expected_url = url + (b'?temp_url_sig=temp_url_signature'
b'&temp_url_expires=1400003600')
expected_body = b'\n'.join([
b'GET',
b'1400003600',
url,
])
| 38.132597
| 79
| 0.569931
|
c9863b17cfea0fe5768b4af2497e1fa271531beb
| 19,586
|
py
|
Python
|
src/env_cylc.py
|
NCAR/CESM-WF
|
7a04ece110dbb3c84bcbd823e66f5d866a83720d
|
[
"Unlicense"
] | 5
|
2017-09-15T18:43:27.000Z
|
2020-08-14T04:04:34.000Z
|
src/env_cylc.py
|
NCAR/CESM-WF
|
7a04ece110dbb3c84bcbd823e66f5d866a83720d
|
[
"Unlicense"
] | 2
|
2016-11-18T20:54:30.000Z
|
2016-11-29T22:57:58.000Z
|
src/env_cylc.py
|
NCAR/CESM-WF
|
7a04ece110dbb3c84bcbd823e66f5d866a83720d
|
[
"Unlicense"
] | 7
|
2017-03-19T07:39:45.000Z
|
2020-11-05T20:16:44.000Z
|
import os, sys, subprocess, glob
import math
from standard_script_setup import *
from CIME.case import Case
from CIME.utils import transform_vars
from CIME.XML.batch import Batch
class EnvCylc():
def __init__(self):
self.env = {}
self.ptile = None
self.total_tasks = None
self.tasks_per_node = None
def get_date(self, runDir):
dates = {}
rpointers = glob.glob(str(runDir)+'/rpointer.*')
if len(rpointers) < 1:
print 'Could not find any rpointer files in: ',runDir
print 'You need to have rpointer files and the corresponding restart files if you have CONTINUE_RUN set to TRUE.'
sys.exit(1)
for rp in rpointers:
f = open(rp,'r')
for line in f:
if '.nc' in line:
dates[rp] = {}
if './' in line:
dates[rp]['fn'] = (str(runDir)+'/'+line[2:]).strip()
else:
dates[rp]['fn'] = (str(runDir)+'/'+line).strip()
dates[rp]['date'] = line.split('.')[-2][:-6]
f.close()
sd = 'null'
for d,v in dates.iteritems():
if not os.path.isfile(v['fn']):
print 'Restart file does not exist: ',v['fn']
print 'This was pointed to by: ',d
print 'Check rpointer files for errors.'
sys.exit(1)
if sd == 'null':
sd = v['date']
else:
if sd != v['date']:
print 'Check rpointer files, detected an inconsistency.'
print 'No Cylc workflow will be created.'
sys.exit(1)
return sd
def get_tseries_info(self,pp_dir,stop_n, stop_option):
import xml.etree.ElementTree as ET
xml_tree = ET.ElementTree()
tpers = ['hour', 'day', 'month', 'year']
sizes = [1, 24, 720, 8760]
i_tper = len(tpers)-1
i_size = 999
s_size = i_size * 365 * 24
xml_tree.parse(pp_dir+'/env_timeseries.xml')
for comp_archive_spec in xml_tree.findall("components/comp_archive_spec"):
for file_spec in comp_archive_spec.findall("files/file_extension"):
if file_spec.find("tseries_create") is not None:
tseries_create = file_spec.find("tseries_create").text
if tseries_create.upper() in ["T","TRUE"]:
if file_spec.find("tseries_filecat_tper") is not None:
tper = file_spec.find("tseries_filecat_tper").text
if file_spec.find("tseries_filecat_n") is not None:
size = file_spec.find("tseries_filecat_n").text
s = size.split(',')
t = tper.split(',')
for it in range(0,len(t)):
for i in range(0,len(tpers)):
if tpers[i] in t[it]:
c_size = int(sizes[i])*int(s[it])
if c_size < s_size:
i_tper = i
i_size = s[it]
s_size = c_size
# We don't want this interval shorter than the cesm run length, if it is, set to cesm stop_n and stop_option
for i in range(0,len(tpers)):
if tpers[i] in stop_option:
c_size = int(sizes[i])*int(stop_n)
if c_size > s_size:
i_size = stop_n
i_tper = i
return tpers[i_tper],i_size
def get_tseries_resubmit(self, ts_tper, ts_n, stop_n, stop_option):
tpers = ['hour', 'day', 'month', 'year']
sizes = [1, 24, 720, 8760]
ts = 0
cesm = 0
if ts_tper not in stop_option and stop_option not in ts_tper:
for i in range(0,len(tpers)):
if tpers[i] in ts_tper:
ts = int(ts_n) * sizes[i]
if tpers[i] in stop_option:
cesm = int(stop_n) * sizes[i]
else:
ts = ts_n
cesm = stop_n
if int(ts)%int(cesm) > 0:
freq = (int(ts)/int(cesm))+1
else:
freq = (int(ts)/int(cesm))
return freq
def get_env(self, my_case, debug):
case = Case(my_case, read_only=False)
cwd = os.getcwd()
os.chdir(my_case)
machine_name = case.get_value('MACH')
print 'Running on ',machine_name
batch_system = case.get_value("BATCH_SYSTEM")
batch = Batch(batch_system=batch_system, machine=machine_name)
env_batch = case.get_env("batch")
os.chdir(cwd)
directives = {}
num_nodes = case.num_nodes
bjobs = batch.get_batch_jobs()
# for job, jsect in bjobs:
# job_ = str.replace(job,'.','_')
# directives[job_] = []
#task_count = jsect["task_count"]
#task_count = env_batch.get_value("task_count", subgroup=job)
# models = case.get_values("COMP_CLASSES")
# env_mach_pes = case.get_env("mach_pes")
# #task_count = env_mach_pes.get_total_tasks(models)
# ptile = case.get_value("PES_PER_NODE")
# self.num_nodes = case.num_nodes
# self.thread_count = case.thread_count
#task_count = jsect["task_count"] if "task_count" in jsect else env_mach_pes.get_total_tasks(models)
# task_count = case.get_value("TOTALPES")*int(case.thread_count)
# if task_count == "default":
# models = case.get_values("COMP_CLASSES")
# env_mach_pes = case.get_env("mach_pes")
# task_count = env_mach_pes.get_total_tasks(models)
# ptile = case.get_value("PES_PER_NODE")
# self.num_nodes = case.num_nodes
# self.thread_count = case.thread_count
# else:
# ptile = 4
# self.num_nodes = 1
# self.thread_count = 1
# self.ptile = ptile
# self.total_tasks = task_count
# self.tasks_per_node = ptile
# queue = env_batch.select_best_queue(int(task_count),job=job)
# if queue is None:
# queue = env_batch.select_best_queue(task_count,job)
# all_queue = []
# all_queue.append(env_batch.get_default_queue())
# all_queue = all_queue + env_batch.get_all_queues()
# queue = None
# Add back in when cime is frozen
# for q in all_queue:
# if q is not None:
# if queue is None:
# queue = q.xml_element.text
# wall_time=None
#wall_time = env_batch.get_max_walltime(queue) if wall_time is None else wall_time
# wall_time = env_batch.get_queue_specs(queue)[3] if wall_time is None else wall_time
# env_batch.set_value("JOB_WALLCLOCK_TIME", wall_time, subgroup=job)
# env_batch.set_value("JOB_QUEUE", queue, subgroup=job)
#direct = ''
#ds = env_batch.get_batch_directives(case, job, raw=True)
# overrides = {"total_tasks": int(task_count),"num_nodes":int(math.ceil(float(task_count)/float(case.tasks_per_node)))}
# overrides["job_id"] = case.get_value("CASE") + os.path.splitext(job)[1]
# overrides["batchdirectives"] = env_batch.get_batch_directives(case, job, overrides=overrides)
# ds = env_batch.get_batch_directives(case, job, overrides=overrides)
# dss = ds.split('\n')
# for d in dss:
# direct = direct + transform_vars(d, case=case, subgroup=job)
#direct = direct + transform_vars(d, case=case, subgroup=job, check_members=self)
# s = env_batch.get_submit_args(case, job)
# bd = env_batch.get_batch_directives(case, job, overrides=overrides)
# Add this back in when cime is more stable
# if "run" not in job_:
# direct = direct.replace(bd,'')
# direct = direct + s
# direct = direct.replace('-', '\n-')
# direct = direct.split('\n')
# for d in direct:
# d.lstrip()
# d.strip()
# if '#PBS' in d:
# d=d.replace("#PBS",'')
# d = d.split(' ')
# d=' '.join(d).split()
# if len(d) == 2:
# if ' ' not in d[0] and ' ' not in d[1] and 'walltime' not in d[1]:
# directives[job_].append(d[0]+' = '+d[1])
#### Start temp code to get pbs directives from case.run
#directives[job_] = []
# if 'st_archive' in job_:
# directives[job_].append("-A = "+os.getenv('PROJECT'))
# directives[job_].append("-q = regular")
# with open(my_case+"/case.st_archive") as f:
# for l in f:
# if '#PBS' in l:
# pbs_split = l.split()
# if len(pbs_split) == 3:
# directives[job_].append(pbs_split[1]+" = "+pbs_split[2])
# else:
# print '***************************'
# print 'Opening '+my_case+"/.case.run"
# print '***************************'
# with open(my_case+"/.case.run") as f:
# directives[job_].append("-A = "+os.getenv('PROJECT'))
# directives[job_].append("-q = regular")
# for l in f:
# if '#PBS' in l:
# pbs_split = l.split()
# if len(pbs_split) == 3:
# directives[job_].append(pbs_split[1]+" = "+pbs_split[2])
directives['case_st_archive'] = []
directives['case_st_archive'].append("-A = "+os.getenv('PROJECT'))
directives['case_st_archive'].append("-q = regular")
with open(my_case+"/case.st_archive") as f:
for l in f:
if '#PBS' in l:
pbs_split = l.split()
if len(pbs_split) == 3:
directives['case_st_archive'].append(pbs_split[1]+" = "+pbs_split[2])
directives['case_run'] = []
with open(my_case+"/.case.run") as f:
directives['case_run'].append("-A = "+os.getenv('PROJECT'))
directives['case_run'].append("-q = regular")
for l in f:
if '#PBS' in l:
pbs_split = l.split()
if len(pbs_split) == 3:
directives['case_run'].append(pbs_split[1]+" = "+pbs_split[2])
#### End temp code to get pbs directives from case.run
self.env['machine_name'] = machine_name
self.env['batch_type'] = env_batch.get_batch_system_type()
self.env['directives'] = directives
self.env['STOP_N'] = case.get_value("STOP_N")
self.env['RESUBMIT'] = case.get_value("RESUBMIT")
self.env['STOP_OPTION'] = case.get_value('STOP_OPTION')
self.env['DOUT_S'] = case.get_value('DOUT_S')
self.env['DOUT_L_MS'] = case.get_value('DOUT_L_MS')
self.env['CASEROOT'] = case.get_value('CASEROOT')
self.env['CASE'] = case.get_value('CASE')
self.env['RUNDIR'] = case.get_value('RUNDIR')
self.env['CESMSCRATCHROOT'] = case.get_value('CIME_OUTPUT_ROOT')
self.env['USER'] = case.get_value('USER')
cont_run = case.get_value('CONTINUE_RUN')
if not cont_run:
start = case.get_value('RUN_STARTDATE')
else:
start = self.get_date(self.env['RUNDIR'])
if debug is True:
valid = True
self.env['RUN_STARTDATE'] = start
else:
valid = False
while not valid:
choice = str(raw_input("Use start date "+start+"? y/n \n"))
if choice == 'Y' or choice == 'y':
valid = True
self.env['RUN_STARTDATE'] = start
elif choice == 'N' or choice == 'n':
valid = True
user_date = str(raw_input("Enter new date (format yyyy-mm-dd):\n"))
self.env['RUN_STARTDATE'] = user_date
#case.set_value("RUN_WITH_SUBMIT", True)
if os.path.isdir(my_case+'/postprocess/'):
pp_dir = my_case+'/postprocess/'
os.chdir(pp_dir)
# get pp directives
comps = ['atm', 'ocn', 'lnd', 'ice']
diag_t = ['diagnostics', 'averages']
for c in comps:
for d in diag_t:
job = c+"_"+d
directives[job] = []
output = subprocess.check_output('./pp_config --getbatch '+d+' --machine '+machine_name+' -comp '+c, shell=True)
output_s = output.split('\n')
for o in output_s:
o_s = o.split()
if len(o_s) > 1:
if 'walltime' not in o_s[1]:
directives[job].append(o_s[0]+' = '+o_s[1])
# get pp for timeseries and xconform
tools = ['xconform', 'timeseries']
for t in tools:
directives[t]=[]
output = subprocess.check_output('./pp_config --getbatch '+t+' --machine '+machine_name, shell=True)
output_s = output.split('\n')
for o in output_s:
o_s = o.split()
if len(o_s) > 1:
if 'walltime' not in o_s[1]:
directives[t].append(o_s[0]+' = '+o_s[1])
self.env['GENERATE_TIMESERIES'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get GENERATE_TIMESERIES', shell=True)
self.env['TIMESERIES_TPER'],self.env['TIMESERIES_N'] = self.get_tseries_info(pp_dir,self.env['STOP_N'],self.env['STOP_OPTION'])
self.env['TIMESERIES_RESUBMIT'] = self.get_tseries_resubmit(self.env['TIMESERIES_TPER'],self.env['TIMESERIES_N'],
self.env['STOP_N'],self.env['STOP_OPTION'])
self.env['STANDARDIZE_TIMESERIES'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get STANDARDIZE_TIMESERIES', shell=True)
self.env['GENERATE_AVGS_ATM'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get GENERATE_AVGS_ATM', shell=True)
self.env['GENERATE_DIAGS_ATM'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get GENERATE_DIAGS_ATM', shell=True)
self.env['ATMDIAG_test_first_yr'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get ATMDIAG_test_first_yr', shell=True)
self.env['ATMDIAG_test_nyrs'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get ATMDIAG_test_nyrs', shell=True)
self.env['ATMDIAG_TEST_TIMESERIES'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get ATMDIAG_TEST_TIMESERIES', shell=True)
self.env['GENERATE_AVGS_OCN'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get GENERATE_AVGS_OCN', shell=True)
self.env['GENERATE_DIAGS_OCN'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get GENERATE_DIAGS_OCN', shell=True)
self.env['OCNDIAG_YEAR0'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get OCNDIAG_YEAR0', shell=True)
self.env['OCNDIAG_YEAR1'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get OCNDIAG_YEAR1', shell=True)
self.env['OCNDIAG_TSERIES_YEAR0'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get OCNDIAG_TSERIES_YEAR0', shell=True)
self.env['OCNDIAG_TSERIES_YEAR1'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get OCNDIAG_TSERIES_YEAR1', shell=True)
self.env['OCNDIAG_MODELCASE_INPUT_TSERIES'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get OCNDIAG_MODELCASE_INPUT_TSERIES', shell=True)
self.env['GENERATE_AVGS_LND'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get GENERATE_AVGS_LND', shell=True)
self.env['GENERATE_DIAGS_LND'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get GENERATE_DIAGS_LND', shell=True)
self.env['LNDDIAG_clim_first_yr_1'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get LNDDIAG_clim_first_yr_1', shell=True)
self.env['LNDDIAG_clim_num_yrs_1'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get LNDDIAG_clim_num_yrs_1', shell=True)
self.env['LNDDIAG_trends_first_yr_1'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get LNDDIAG_trends_first_yr_1', shell=True)
self.env['LNDDIAG_trends_num_yrs_1'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get LNDDIAG_trends_num_yrs_1', shell=True)
self.env['LNDDIAG_CASE1_TIMESERIES'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get LNDDIAG_CASE1_TIMESERIES', shell=True)
self.env['GENERATE_AVGS_ICE'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get GENERATE_AVGS_ICE', shell=True)
self.env['GENERATE_DIAGS_ICE'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get GENERATE_DIAGS_ICE', shell=True)
self.env['ICEDIAG_BEGYR_DIFF'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get ICEDIAG_BEGYR_DIFF', shell=True)
self.env['ICEDIAG_ENDYR_DIFF'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get ICEDIAG_ENDYR_DIFF', shell=True)
self.env['ICEDIAG_DIFF_TIMESERIES'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get ICEDIAG_DIFF_TIMESERIES', shell=True)
self.env['ICEDIAG_BEGYR_CONT'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get ICEDIAG_BEGYR_CONT', shell=True)
self.env['ICEDIAG_ENDYR_CONT'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get ICEDIAG_ENDYR_CONT', shell=True)
self.env['ICEDIAG_YRS_TO_AVG'] = subprocess.check_output('./pp_config -value -caseroot '+pp_dir+' --get ICEDIAG_YRS_TO_AVG', shell=True)
os.chdir(cwd)
# Capitalize all true false values
for k,v in self.env.iteritems():
if 'directive' not in k:
self.env[k] = str(self.env[k]).strip()
v = str(v).strip()
v = str(v).lstrip()
if 'True' in v or 'False' in v or 'true' in v or 'false' in v or v == True or v == False:
self.env[k] = str(v).upper()
return self.env
def get_env(case_dir, debug=False):
cylc = EnvCylc()
env = cylc.get_env(case_dir, debug)
return env
#for k in env.keys():
# print k,': ',env[k]
| 50.220513
| 174
| 0.542632
|
d8898dda91e4636fcae4c67323c078a3abbb400d
| 842
|
py
|
Python
|
python/getImages.py
|
Short-T/CS3103
|
6b5b42c2347be013ef255d6f021170d9419088b2
|
[
"Apache-2.0"
] | null | null | null |
python/getImages.py
|
Short-T/CS3103
|
6b5b42c2347be013ef255d6f021170d9419088b2
|
[
"Apache-2.0"
] | null | null | null |
python/getImages.py
|
Short-T/CS3103
|
6b5b42c2347be013ef255d6f021170d9419088b2
|
[
"Apache-2.0"
] | 1
|
2022-03-22T17:01:05.000Z
|
2022-03-22T17:01:05.000Z
|
#!/bin/env python3
#
# Improved demonstration of pymysql to call a stored procedure
# Logan Davidson, March 19 2022
#
import pymysql.cursors
import settings
# Make the connection
dbConnection = pymysql.connect(settings.DBHOST,
settings.DBUSER,
settings.DBPASSWD,
settings.DBDATABASE,
charset='utf8mb4',
cursorclass= pymysql.cursors.DictCursor)
sqlProcName = 'getImages'
# Run query and get result
try:
cursor = dbConnection.cursor()
cursor.callproc(sqlProcName)
dbConnection.commit()
# We get all of the results and then iterate through them.
results = cursor.fetchall()
# Figure out what to do for images
for row in results:
print ("%s, %s" % (row["ImageFilePath"], row["ImageFileName"]) )
except pymysql.MySQLError as e:
# failure
print(e)
finally:
#close dbConnection
dbConnection.close()
# End.
| 22.157895
| 66
| 0.731591
|
7376d4e6bdda291944233e705bad47fb9cdaac51
| 375,826
|
py
|
Python
|
tests/test_datasets.py
|
davesteps/kluster
|
35ffd9120cf46f01cc706f1fe8b466ef3801382d
|
[
"CC0-1.0"
] | null | null | null |
tests/test_datasets.py
|
davesteps/kluster
|
35ffd9120cf46f01cc706f1fe8b466ef3801382d
|
[
"CC0-1.0"
] | null | null | null |
tests/test_datasets.py
|
davesteps/kluster
|
35ffd9120cf46f01cc706f1fe8b466ef3801382d
|
[
"CC0-1.0"
] | null | null | null |
import logging
import numpy as np
import xarray as xr
from HSTB.kluster import xarray_conversion
class SyntheticFqpr:
"""
DEPRECATED - moved to just copying and pasting records to get a test dataset.
Class holding the synthetic data that I came up with. You can pass in new attitude or mounting angle values
to see the result. I tried to have it encompass multiple sectors and times to test out the interp and rotation
stuff.
"""
def __init__(self, synth_time=0, synth_heave=0, synth_roll=0, synth_pitch=0, synth_yaw=90, synth_tx_mountroll=0,
synth_tx_mountpitch=0, synth_tx_mountyaw=0, synth_rx_mountroll=0, synth_rx_mountpitch=0,
synth_rx_mountyaw=0, synth_tx_x=0, synth_tx_y=0, synth_tx_z=0, synth_rx_x=0, synth_rx_y=0,
synth_rx_z=0, secs=('999',)):
self.client = None
self.synth_time = synth_time
self.synth_heave = synth_heave
self.synth_roll = synth_roll
self.synth_pitch = synth_pitch
self.synth_yaw = synth_yaw
self.synth_tx_mountroll = synth_tx_mountroll
self.synth_tx_mountpitch = synth_tx_mountpitch
self.synth_tx_mountyaw = synth_tx_mountyaw
self.synth_rx_mountroll = synth_rx_mountroll
self.synth_rx_mountpitch = synth_rx_mountpitch
self.synth_rx_mountyaw = synth_rx_mountyaw
self.secs = secs
self.xyzrph = {'antenna_x': {str(synth_time): '0.000'}, 'antenna_y': {str(synth_time): '0.000'},
'antenna_z': {str(synth_time): '0.000'}, 'imu_h': {str(synth_time): '0.000'},
'latency': {str(synth_time): '0.000'}, 'imu_p': {str(synth_time): '0.000'},
'imu_r': {str(synth_time): '0.000'}, 'imu_x': {str(synth_time): '0.000'},
'imu_y': {str(synth_time): '0.000'}, 'imu_z': {str(synth_time): '0.000'},
'rx_r': {str(synth_time): synth_rx_mountroll}, 'rx_p': {str(synth_time): synth_rx_mountpitch},
'rx_h': {str(synth_time): synth_rx_mountyaw}, 'rx_x': {str(synth_time): synth_rx_x},
'rx_y': {str(synth_time): synth_rx_y}, 'rx_z': {str(synth_time): synth_rx_z},
'rx_x_0': {str(synth_time): '0.204'}, 'rx_x_1': {str(synth_time): '0.204'},
'rx_x_2': {str(synth_time): '0.204'}, 'rx_y_0': {str(synth_time): '0.0'},
'rx_y_1': {str(synth_time): '0.0'}, 'rx_y_2': {str(synth_time): '0.0'},
'rx_z_0': {str(synth_time): '-0.0315'}, 'rx_z_1': {str(synth_time): '-0.0315'},
'rx_z_2': {str(synth_time): '-0.0315'}, 'tx_r': {str(synth_time): synth_tx_mountroll},
'tx_p': {str(synth_time): synth_tx_mountpitch}, 'tx_h': {str(synth_time): synth_tx_mountyaw},
'tx_x': {str(synth_time): synth_tx_x}, 'tx_y': {str(synth_time): synth_tx_y},
'tx_z': {str(synth_time): synth_tx_z}, 'tx_x_0': {str(synth_time): '0.002'},
'tx_x_1': {str(synth_time): '0.002'}, 'tx_x_2': {str(synth_time): '0.002'},
'tx_y_0': {str(synth_time): '-0.1042'}, 'tx_y_1': {str(synth_time): '0.0'},
'tx_y_2': {str(synth_time): '0.1042'}, 'tx_z_0': {str(synth_time): '-0.0149'},
'tx_z_1': {str(synth_time): '-0.006'}, 'tx_z_2': {str(synth_time): '-0.0149'},
'waterline': {str(synth_time): '0.200'}}
self.raw_ping = self.construct_raw_ping()
self.raw_att = self.construct_rawattitude()
def construct_raw_ping(self):
"""
Take the provided synthetic data built into this class and generate new raw_ping data. Will be distributed
across beamangle/traveltime evenly.
Returns
-------
dataset: list of xarray DataSet objects that represent the raw_ping data you would get when running
xarray_conversion normally.
"""
tme_vals = [self.synth_time + 1, self.synth_time + 3]
sec_vals = self.secs
bm_vals = [i for i in range(9)]
tme_coord = xr.DataArray(np.array(tme_vals), dims=['time'], coords=np.array([tme_vals]))
beam_coord = xr.DataArray(np.array(bm_vals), dims=['beam'],
coords=np.array([bm_vals]))
dataset = []
for sec in sec_vals:
delay_data = [0] * len(tme_vals)
delay = xr.DataArray(np.array(delay_data), dims=['time'], coords={'time': tme_coord})
ntx_data = [1] * len(tme_vals)
ntx = xr.DataArray(np.array(ntx_data), dims=['time'], coords={'time': tme_coord})
tiltangle_data = [0.5] * len(tme_vals)
tiltangle = xr.DataArray(np.array(tiltangle_data), dims=['time'], coords={'time': tme_coord})
twoway_travel_time_data = [[0.5, 0.4, 0.3, 0.2, 0.1, 0.2, 0.3, 0.4, 0.5]] * len(tme_vals)
twoway_travel_time = xr.DataArray(np.array(twoway_travel_time_data), dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
beam_pointing_angle_data = [[80, 60, 40, 20, 0, -20, -40, -60, -80]] * len(tme_vals)
beam_pointing_angle = xr.DataArray(np.array(beam_pointing_angle_data), dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
dataset.append(xr.Dataset({'delay': (['time'], delay.data), 'ntx': (['time'], ntx.data), # use the underlying numpy array to avoid problems in xarray 0.19.0
'tiltangle': (['time'], tiltangle.data),
'traveltime': (['time', 'beam'], twoway_travel_time.data),
'beampointingangle': (['time', 'beam'], beam_pointing_angle.data)},
coords={'time': tme_coord.data, 'beam': beam_coord.data},
attrs={'system_serial_number': ['999'], 'sector_identifier': sec}).chunk())
return dataset
def construct_rawattitude(self):
"""
Take the provided synthetic data built into this class and generate new attitude data.
Returns
-------
dataset: xarray DataSet object that represents the attitude data you would get when running
xarray_conversion normally.
"""
tme_vals = [self.synth_time, self.synth_time + 2, self.synth_time + 4]
tme_coord = xr.DataArray(np.array(tme_vals), dims=['time'], coords=np.array([tme_vals]))
heading = xr.DataArray(np.array([self.synth_yaw, self.synth_yaw + 2, self.synth_yaw + 3]), dims=['time'],
coords={'time': tme_coord})
heave = xr.DataArray(np.array([self.synth_heave, self.synth_heave + 2, self.synth_heave + 3]), dims=['time'],
coords={'time': tme_coord})
pitch = xr.DataArray(np.array([self.synth_pitch, self.synth_pitch + 2, self.synth_pitch + 3]), dims=['time'],
coords={'time': tme_coord})
roll = xr.DataArray(np.array([self.synth_roll, self.synth_roll + 2, self.synth_roll + 3]), dims=['time'],
coords={'time': tme_coord})
return xr.Dataset({'heading': (['time'], heading.data), 'heave': (['time'], heave.data), 'pitch': (['time'], pitch.data),
'roll': (['time'], roll.data)}, coords={'time': tme_coord.data}).chunk()
class RealDualheadFqpr:
"""
Class holding the real data that I took from a Hassler EM2040 dual head .all file (0022_20190716_232128_S250.all).
Covers a ping and the attitude values associated with them.
float(fq.multibeam.raw_ping[0].time[3853])
Out[11]: 1563319483.706
float(fq.multibeam.raw_ping[1].time[3826])
Out[12]: 1563319483.706001
rec389 = ad.getrecord(88, 7681)
rec394 = ad.getrecord(88, 7682)
"""
def __init__(self,
synth_ra_time=(1563319483.706, 1563319483.706001),
synth_att_time=(1563319483.506, 1563319483.516, 1563319483.526, 1563319483.536, 1563319483.546,
1563319483.556, 1563319483.566, 1563319483.576, 1563319483.586, 1563319483.596,
1563319483.606, 1563319483.616, 1563319483.626, 1563319483.636, 1563319483.646,
1563319483.656, 1563319483.666, 1563319483.6759999, 1563319483.6859999,
1563319483.6959999, 1563319483.7059999, 1563319483.7159998, 1563319483.726,
1563319483.736, 1563319483.746, 1563319483.756, 1563319483.766, 1563319483.776,
1563319483.786, 1563319483.796, 1563319483.806, 1563319483.816, 1563319483.826,
1563319483.836, 1563319483.846, 1563319483.856, 1563319483.866, 1563319483.876,
1563319483.886, 1563319483.896),
synth_nav_time=(1563319483.506, 1563319483.516, 1563319483.526, 1563319483.536, 1563319483.546,
1563319483.555, 1563319483.565, 1563319483.575, 1563319483.585, 1563319483.595,
1563319483.605, 1563319483.615, 1563319483.625, 1563319483.635, 1563319483.645,
1563319483.656, 1563319483.666, 1563319483.6759999, 1563319483.6859999,
1563319483.6959999, 1563319483.7059999, 1563319483.7159998, 1563319483.7259998,
1563319483.7359998, 1563319483.7459998, 1563319483.755, 1563319483.765, 1563319483.775,
1563319483.785, 1563319483.795, 1563319483.805, 1563319483.815, 1563319483.825,
1563319483.835, 1563319483.845, 1563319483.856, 1563319483.866,1563319483.876,
1563319483.886, 1563319483.896),
synth_txsector_beam=((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)),
synth_frequency=((290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000),
(290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000)),
synth_delay=((5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08),
(5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08,
5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08, 5.5501005e-08)),
synth_beampointingangle=((64.79, 64.729996, 64.67, 64.61, 64.549995, 64.49, 64.43, 64.369995, 64.31, 64.24,
64.18, 64.119995, 64.06, 64.0, 63.93, 63.87, 63.809998, 63.739998, 63.68, 63.609997,
63.55, 63.48, 63.41, 63.35, 63.28, 63.21, 63.149998, 63.079998, 63.01, 62.94,
62.87, 62.8, 62.73, 62.66, 62.59, 62.52, 62.449997, 62.379997, 62.309998, 62.23,
62.16, 62.09, 62.01, 61.94, 61.859997, 61.789997, 61.71, 61.64, 61.559998, 61.48,
61.399998, 61.329998, 61.25, 61.17, 61.09, 61.01, 60.93, 60.85, 60.77, 60.68,
60.6, 60.52, 60.43, 60.35, 60.27, 60.18, 60.09, 60.01, 59.92, 59.829998,
59.75, 59.66, 59.57, 59.48, 59.39, 59.3, 59.21, 59.12, 59.02, 58.93,
58.84, 58.739998, 58.649998, 58.55, 58.46, 58.359997, 58.26, 58.17, 58.07, 57.969997,
57.87, 57.77, 57.67, 57.559998, 57.46, 57.36, 57.25, 57.149998, 57.039997, 56.94,
56.829998, 56.719997, 56.62, 56.51, 56.399998, 56.289997, 56.18, 56.059998, 55.949997, 55.84,
55.719997, 55.61, 55.489998, 55.379997, 55.26, 55.14, 55.02, 54.899998, 54.78, 54.66,
54.539997, 54.41, 54.289997, 54.16, 54.039997, 53.91, 53.78, 53.649998, 53.52, 53.39,
53.26, 53.129997, 53.0, 52.86, 52.73, 52.59, 52.449997, 52.309998, 52.17, 52.03,
51.89, 51.75, 51.6, 51.46, 51.309998, 51.17, 51.02, 50.87, 50.719997, 50.57,
50.42, 50.26, 50.11, 49.95, 49.789997, 49.64, 49.48, 49.309998, 49.149998, 48.989998,
48.82, 48.66, 48.489998, 48.32, 48.149998, 47.98, 47.809998, 47.64, 47.46, 47.289997,
47.11, 46.93, 46.75, 46.57, 46.379997, 46.2, 46.01, 45.82, 45.629997, 45.44,
45.25, 45.059998, 44.86, 44.66, 44.469997, 44.27, 44.059998, 43.86, 43.66, 43.45,
43.239998, 43.03, 42.82, 42.61, 42.39, 42.18, 41.96, 41.739998, 41.52, 41.29,
41.07, 40.84, 40.61, 40.379997, 40.149998, 39.91, 39.68, 39.44, 39.2, 38.96,
38.71, 38.469997, 38.219997, 37.969997, 37.719997, 37.46, 37.21, 36.95, 36.69, 36.43,
36.16, 35.899998, 35.63, 35.36, 35.079998, 34.809998, 34.53, 34.25, 33.969997, 33.69,
33.399998, 33.12, 32.829998, 32.53, 32.239998, 31.939999, 31.64, 31.34, 31.039999, 30.73,
30.42, 30.109999, 29.8, 29.49, 29.17, 28.849998, 28.529999, 28.199999, 27.869999, 27.55,
27.21, 26.88, 26.539999, 26.21, 25.859999, 25.519999, 25.18, 24.83, 24.48, 24.119999,
23.769999, 23.41, 23.05, 22.689999, 22.32, 21.96, 21.59, 21.22, 20.84, 20.47,
20.09, 19.71, 19.32, 18.939999, 18.55, 18.16, 17.77, 17.38, 16.98, 16.58,
16.18, 15.78, 15.37, 14.969999, 14.559999, 14.15, 13.74, 13.32, 12.9, 12.49,
12.07, 11.639999, 11.219999, 10.79, 10.37, 9.94, 9.51, 9.08, 8.639999, 8.21,
7.77, 7.33, 6.89, 6.45, 6.0099998, 5.5699997, 5.12, 4.68, 4.23, 3.78,
3.33, 2.8799999, 2.4299998, 1.9799999, 1.53, 1.0799999, 0.62, 0.17, -0.28, -0.74,
-1.1899999, -1.65, -2.11, -2.56, -3.02, -3.48, -3.9299998, -4.39, -4.85, -5.2999997,
-5.7599998, -6.22, -6.67, -7.1299996, -7.5899997, -8.04, -8.5, -8.95, -9.4, -9.86,
-10.309999, -10.76, -11.21, -11.66, -12.11, -12.559999, -13.0, -13.45, -13.889999, -14.34,
-14.78, -15.219999, -15.66, -16.1, -16.539999, -16.97, -17.41, -17.84, -18.27, -18.699999,
-19.13, -19.55, -19.98, -20.4, -20.82, -21.24, -21.65, -22.07, -22.48, -22.89,
-23.3, -23.71, -24.109999, -24.519999, -24.92, -25.32, -25.71, -26.109999, -26.5, -26.89,
-27.279999, -27.66, -28.05, -28.43, -28.81, -29.179998, -29.56, -29.929998, -30.3, -30.67,
-31.029999, -31.4, -31.76, -32.12, -32.469997, -32.82, -33.18, -33.52, -33.87, -34.21),
(33.51, 33.18, 32.85, 32.51, 32.18, 31.84, 31.5, 31.15, 30.81, 30.46,
30.109999, 29.76, 29.41, 29.05, 28.689999, 28.33, 27.97, 27.609999, 27.24, 26.869999,
26.5, 26.13, 25.75, 25.38, 25.0, 24.619999, 24.24, 23.85, 23.47, 23.08,
22.689999, 22.3, 21.9, 21.51, 21.109999, 20.71, 20.31, 19.91, 19.5, 19.1,
18.69, 18.279999, 17.869999, 17.46, 17.039999, 16.63, 16.21, 15.79, 15.38, 14.96,
14.53, 14.11, 13.69, 13.259999, 12.83, 12.41, 11.98, 11.55, 11.12, 10.69,
10.26, 9.82, 9.389999, 8.96, 8.5199995, 8.09, 7.6499996, 7.21, 6.7799997, 6.3399997,
5.9, 5.47, 5.0299997, 4.5899997, 4.15, 3.7099998, 3.28, 2.84, 2.3999999, 1.9599999,
1.52, 1.0799999, 0.65, 0.21, -0.22999999, -0.65999997, -1.1, -1.53, -1.9699999, -2.3999999,
-2.84, -3.27, -3.6999998, -4.13, -4.56, -4.99, -5.42, -5.85, -6.27, -6.7,
-7.12, -7.54, -7.97, -8.389999, -8.8, -9.22, -9.639999, -10.05, -10.469999, -10.88,
-11.29, -11.7, -12.11, -12.509999, -12.92, -13.32, -13.719999, -14.12, -14.5199995, -14.91,
-15.299999, -15.7, -16.09, -16.47, -16.859999, -17.25, -17.63, -18.01, -18.39, -18.76,
-19.14, -19.51, -19.88, -20.25, -20.609999, -20.98, -21.34, -21.699999, -22.06, -22.41,
-22.769999, -23.119999, -23.47, -23.81, -24.16, -24.5, -24.84, -25.18, -25.519999, -25.849998,
-26.18, -26.51, -26.84, -27.17, -27.49, -27.81, -28.13, -28.449999, -28.76, -29.07,
-29.38, -29.689999, -30.0, -30.3, -30.599998, -30.9, -31.199999, -31.49, -31.789999, -32.079998,
-32.37, -32.649998, -32.94, -33.219997, -33.5, -33.78, -34.059998, -34.329998, -34.6, -34.87,
-35.14, -35.41, -35.67, -35.93, -36.19, -36.45, -36.71, -36.96, -37.219997, -37.469997,
-37.719997, -37.96, -38.21, -38.45, -38.69, -38.93, -39.17, -39.41, -39.64, -39.87,
-40.1, -40.329998, -40.559998, -40.79, -41.01, -41.23, -41.45, -41.67, -41.89, -42.1,
-42.32, -42.53, -42.739998, -42.95, -43.149998, -43.36, -43.559998, -43.77, -43.969997, -44.17,
-44.37, -44.559998, -44.76, -44.95, -45.14, -45.329998, -45.52, -45.71, -45.899998, -46.079998,
-46.26, -46.45, -46.629997, -46.809998, -46.989998, -47.16, -47.34, -47.51, -47.68, -47.86,
-48.03, -48.19, -48.36, -48.53, -48.69, -48.86, -49.02, -49.18, -49.34, -49.5,
-49.66, -49.82, -49.969997, -50.129997, -50.28, -50.43, -50.59, -50.739998, -50.879997, -51.03,
-51.18, -51.329998, -51.469997, -51.61, -51.76, -51.899998, -52.039997, -52.18, -52.32, -52.46,
-52.59, -52.73, -52.86, -53.0, -53.129997, -53.26, -53.39, -53.52, -53.649998, -53.78,
-53.91, -54.039997, -54.16, -54.289997, -54.41, -54.53, -54.66, -54.78, -54.899998, -55.02,
-55.14, -55.26, -55.37, -55.489998, -55.61, -55.719997, -55.84, -55.949997, -56.059998, -56.17,
-56.289997, -56.399998, -56.51, -56.61, -56.719997, -56.829998, -56.94, -57.039997, -57.149998, -57.25,
-57.36, -57.46, -57.57, -57.67, -57.77, -57.87, -57.969997, -58.07, -58.17, -58.27,
-58.37, -58.46, -58.559998, -58.649998, -58.75, -58.84, -58.94, -59.03, -59.129997, -59.219997,
-59.309998, -59.399998, -59.489998, -59.579998, -59.67, -59.76, -59.85, -59.94, -60.02, -60.109997,
-60.199997, -60.28, -60.37, -60.449997, -60.539997, -60.62, -60.699997, -60.789997, -60.87, -60.949997,
-61.03, -61.109997, -61.19, -61.27, -61.35, -61.43, -61.51, -61.59, -61.66, -61.739998,
-61.82, -61.89, -61.969997, -62.039997, -62.12, -62.19, -62.27, -62.34, -62.41, -62.489998,
-62.559998, -62.629997, -62.699997, -62.77, -62.85, -62.92, -62.989998, -63.059998, -63.12, -63.19,
-63.26, -63.329998, -63.399998, -63.46, -63.53, -63.6, -63.66, -63.73, -63.8, -63.859997,
-63.92, -63.989998, -64.049995, -64.119995, -64.18, -64.24, -64.31, -64.369995, -64.43, -64.49)),
synth_soundspeed=(1541.5999755859375, 1541.5999755859375),
synth_counter=(35352, 35352),
synth_traveltime=((0.06522449, 0.06518704, 0.06507684, 0.065071255, 0.064642794, 0.064505786, 0.06434554, 0.06416631, 0.063962236, 0.06377582,
0.0635625, 0.06340408, 0.06328276, 0.0631804, 0.06307749, 0.06280551, 0.062687226, 0.06258843, 0.06258736, 0.06253909,
0.06246798, 0.062533304, 0.062135007, 0.062001966, 0.0619417, 0.061624885, 0.061449982, 0.06100908, 0.0609155, 0.060836654,
0.060686, 0.060577974, 0.060436483, 0.06017909, 0.059903726, 0.059593327, 0.05961624, 0.059536032, 0.059413526, 0.05915733,
0.058976103, 0.058743443, 0.058609582, 0.058437146, 0.0583026, 0.05813677, 0.057953823, 0.057874046, 0.05773502, 0.057653327,
0.057192303, 0.05707479, 0.057043985, 0.05692095, 0.05691686, 0.056565855, 0.05642815, 0.056190096, 0.05600257, 0.05578149,
0.05570252, 0.055485103, 0.05529015, 0.055106144, 0.054920398, 0.054755036, 0.054668132, 0.054615926, 0.054494556, 0.054432377,
0.054161485, 0.05399137, 0.053796716, 0.053626675, 0.053485643, 0.053344276, 0.053240005, 0.05310789, 0.052983914, 0.05280533,
0.052616112, 0.052513927, 0.052311942, 0.05214153, 0.05201989, 0.051904447, 0.051806886, 0.051698662, 0.0511948, 0.050998855,
0.050825115, 0.050497606, 0.050418604, 0.050333034, 0.050229885, 0.05013128, 0.050065372, 0.049987655, 0.04986936, 0.049747612,
0.049612574, 0.04936429, 0.04912361, 0.048920497, 0.048735674, 0.048601624, 0.04847881, 0.0483425, 0.04822244, 0.048108548,
0.04795933, 0.04778951, 0.04764674, 0.047454994, 0.047208834, 0.0469862, 0.047033556, 0.046873834, 0.04691157, 0.04671424,
0.04644442, 0.04624085, 0.04611523, 0.04588836, 0.04578103, 0.045665614, 0.045577113, 0.045611817, 0.04522572, 0.045074847,
0.044934355, 0.044838343, 0.044644285, 0.04448069, 0.044326458, 0.044175576, 0.04401584, 0.04388153, 0.04374801, 0.043601375,
0.043467492, 0.043320697, 0.043149393, 0.0430222, 0.042833645, 0.042648237, 0.042483058, 0.042383272, 0.042239156, 0.042139858,
0.042027947, 0.041908782, 0.041744523, 0.041616105, 0.04151694, 0.041373696, 0.041251116, 0.041057285, 0.040898778, 0.040764626,
0.040633954, 0.04051557, 0.040407497, 0.040280275, 0.040128462, 0.039896782, 0.039810136, 0.03962978, 0.039490405, 0.039437298,
0.039251424, 0.03903844, 0.038855065, 0.038736027, 0.038593363, 0.03847536, 0.03838772, 0.038259927, 0.03811045, 0.037954308,
0.037783723, 0.037620768, 0.03744685, 0.037308894, 0.03713024, 0.036985103, 0.036887284, 0.03681795, 0.036761448, 0.03655517,
0.036370464, 0.036215004, 0.036048945, 0.03594251, 0.03582423, 0.035668287, 0.03555743, 0.035394955, 0.03524771, 0.03509924,
0.03492953, 0.034781773, 0.034602158, 0.034490712, 0.034394506, 0.034273535, 0.034173444, 0.034011576, 0.033872608, 0.03372753,
0.033607963, 0.033479813, 0.033324625, 0.03322643, 0.03311384, 0.032966346, 0.03284337, 0.032699086, 0.03254835, 0.032423094,
0.032333124, 0.032218125, 0.03212301, 0.032028582, 0.031913806, 0.03174077, 0.031655036, 0.03148415, 0.031378485, 0.03127493,
0.031162292, 0.031048689, 0.030914722, 0.03078834, 0.030664947, 0.030533018, 0.030373689, 0.030267736, 0.030149909, 0.030047255,
0.029957393, 0.02982482, 0.029724898, 0.029619483, 0.029430812, 0.029349215, 0.02930378, 0.02921781, 0.029064978, 0.028962802,
0.028828954, 0.028735913, 0.028656164, 0.028525708, 0.028417978, 0.02837464, 0.028294982, 0.028085144, 0.028014475, 0.0278881,
0.027792554, 0.027711269, 0.027650138, 0.027599972, 0.027530676, 0.027436728, 0.02734857, 0.027308816, 0.027275976, 0.027089471,
0.026979135, 0.026879309, 0.026838332, 0.026807435, 0.026790207, 0.026758457, 0.026675122, 0.026512671, 0.026411993, 0.026344003,
0.026275361, 0.026225492, 0.026193999, 0.02616346, 0.026104024, 0.025933256, 0.026011482, 0.02580203, 0.025773056, 0.025746878,
0.025719209, 0.025703037, 0.025742559, 0.02562818, 0.025484039, 0.025463546, 0.025443735, 0.025397873, 0.025359608, 0.02536266,
0.025172435, 0.025252314, 0.025224654, 0.025370404, 0.02508849, 0.02495524, 0.024871388, 0.024826415, 0.024842858, 0.024835419,
0.024810242, 0.024783982, 0.024775004, 0.024785873, 0.024774877, 0.024676148, 0.024662785, 0.024611581, 0.024604993, 0.024610542,
0.02461692, 0.024627423, 0.02462869, 0.024617245, 0.024603054, 0.024534967, 0.024535242, 0.024539262, 0.02453834, 0.0245391,
0.024541842, 0.024541361, 0.024534907, 0.024533238, 0.024535734, 0.024542985, 0.02455123, 0.024558408, 0.02455526, 0.02456441,
0.024643479, 0.024654316, 0.024641989, 0.024689602, 0.0247348, 0.024758905, 0.024773423, 0.024780592, 0.024790682, 0.024837008,
0.024887824, 0.024908548, 0.025075123, 0.025001861, 0.02511, 0.025119135, 0.025131602, 0.025152188, 0.025205202, 0.025332384,
0.025369521, 0.02518933, 0.025490778, 0.025523953, 0.02556849, 0.025615312, 0.025657328, 0.02570217, 0.026048128, 0.02589172,
0.025936496, 0.026053024, 0.026104333, 0.026139332, 0.026173145, 0.026220541, 0.026291238, 0.026371716, 0.02644942, 0.026507504,
0.026553351, 0.026664851, 0.026784983, 0.026868131, 0.026909685, 0.027035134, 0.02708057, 0.027252892, 0.027315978, 0.027363624,
0.027421255, 0.027476111, 0.027543077, 0.027667334, 0.027811557, 0.027955748, 0.028007679, 0.02805615, 0.028225068, 0.028321648),
(0.028341256, 0.028267216, 0.02819169, 0.028065993, 0.027978903, 0.027861916, 0.027814543, 0.027771937, 0.027729565, 0.027561279,
0.027490498, 0.02729614, 0.02726445, 0.027200872, 0.027102495, 0.027024996, 0.02699118, 0.026959093, 0.026888823, 0.026795097,
0.026690306, 0.026583338, 0.02651464, 0.026463535, 0.02645002, 0.026434481, 0.026363837, 0.026230454, 0.026140213, 0.0260415,
0.025975566, 0.025957426, 0.025928603, 0.025897559, 0.025876988, 0.025860658, 0.025824694, 0.025677681, 0.025578134, 0.025459323,
0.025400503, 0.025399707, 0.02536268, 0.02531868, 0.025290577, 0.025289783, 0.025275948, 0.025186384, 0.025190832, 0.025170509,
0.025071217, 0.025020227, 0.025022427, 0.025006438, 0.024977453, 0.024928058, 0.024893843, 0.024826892, 0.024837537, 0.024835454,
0.024825364, 0.024787355, 0.024754863, 0.024744669, 0.024764579, 0.024738109, 0.024707051, 0.024730662, 0.024720922, 0.024714136,
0.024676956, 0.024663521, 0.024663089, 0.024665415, 0.024678243, 0.02468989, 0.024685243, 0.024678735, 0.024679454, 0.024678046,
0.02468035, 0.024739003, 0.0247674, 0.024743253, 0.02475427, 0.024770709, 0.02477101, 0.024807306, 0.024867848, 0.024864934,
0.024871062, 0.024886938, 0.02493612, 0.024929078, 0.024955736, 0.025016543, 0.02509175, 0.025257425, 0.024946557, 0.025135297,
0.02520516, 0.025218878, 0.025242163, 0.025273249, 0.025356248, 0.025437266, 0.025472641, 0.025534341, 0.025595607, 0.025626272,
0.025656361, 0.025710078, 0.025788054, 0.025852202, 0.025898213, 0.025922868, 0.0259505, 0.026012402, 0.026169363, 0.026297735,
0.026431158, 0.026404101, 0.026456883, 0.026458915, 0.026464188, 0.026485236, 0.02661925, 0.02682908, 0.026906397, 0.026959669,
0.026995962, 0.027061315, 0.027137168, 0.027199881, 0.027286468, 0.027385924, 0.027481344, 0.027530583, 0.02756527, 0.027678994,
0.027827643, 0.027907362, 0.02802151, 0.02815224, 0.02823538, 0.028284255, 0.028361235, 0.028440803, 0.028515894, 0.028592901,
0.028685875, 0.028806537, 0.028895397, 0.02903401, 0.029117582, 0.0292258, 0.029362388, 0.02945047, 0.029508533, 0.029661555,
0.029748157, 0.029810004, 0.029906625, 0.030132703, 0.030240707, 0.030349977, 0.03049707, 0.030586315, 0.030724024, 0.030790819,
0.030910674, 0.031025346, 0.03112967, 0.031266525, 0.031475887, 0.031604774, 0.031740934, 0.031883735, 0.03198466, 0.03209943,
0.03221525, 0.03237489, 0.032533713, 0.032645825, 0.032803643, 0.03293376, 0.033045284, 0.033189557, 0.033310354, 0.033401527,
0.03358435, 0.033739153, 0.03387751, 0.034037918, 0.034241587, 0.03433783, 0.03443083, 0.034539144, 0.0347603, 0.034945622,
0.03515317, 0.035204496, 0.03526165, 0.035343625, 0.035493907, 0.035635896, 0.035740998, 0.03591008, 0.036083657, 0.036146555,
0.036284827, 0.036392953, 0.03647947, 0.03667351, 0.036812864, 0.036966283, 0.03714145, 0.037323304, 0.037457727, 0.03759276,
0.037717864, 0.037836023, 0.037980728, 0.03815581, 0.038314465, 0.03847739, 0.03861899, 0.03876452, 0.038911708, 0.039040655,
0.039172463, 0.039286606, 0.039412994, 0.039588615, 0.039779164, 0.03993379, 0.040120576, 0.040384363, 0.040468775, 0.0405913,
0.040750254, 0.04086353, 0.040963642, 0.041103657, 0.04109384, 0.041407086, 0.04166521, 0.041842952, 0.04197794, 0.04213145,
0.042288613, 0.04242097, 0.042609476, 0.042733584, 0.042926807, 0.04311394, 0.043245636, 0.043367323, 0.043578003, 0.043753672,
0.04394945, 0.04410044, 0.044267166, 0.04447169, 0.044612218, 0.044772495, 0.04494207, 0.04508513, 0.045267407, 0.045419555,
0.04557632, 0.045704264, 0.045839746, 0.046018355, 0.046210222, 0.046378676, 0.046535775, 0.046723336, 0.046886057, 0.047069807,
0.047225844, 0.047359616, 0.047403686, 0.047668457, 0.04803036, 0.048128698, 0.04828162, 0.04841474, 0.048525117, 0.048644245,
0.048799768, 0.049006276, 0.04916146, 0.049396362, 0.0495242, 0.04969117, 0.049882345, 0.050049912, 0.050235257, 0.050373737,
0.0506082, 0.050737403, 0.050908424, 0.050865155, 0.051170506, 0.051403362, 0.051608514, 0.05182332, 0.051980477, 0.052123256,
0.052259997, 0.052400805, 0.052551176, 0.05277073, 0.052929755, 0.053136323, 0.053348947, 0.053459987, 0.053639263, 0.053794358,
0.053898446, 0.054098476, 0.054298148, 0.05446773, 0.054665692, 0.054816477, 0.05495632, 0.055100735, 0.05537986, 0.055488206,
0.055743963, 0.055838246, 0.055895437, 0.055975545, 0.05617462, 0.05650928, 0.056859788, 0.056863766, 0.057010952, 0.057240766,
0.05721194, 0.057756644, 0.05786516, 0.058000784, 0.05812295, 0.05830852, 0.058492575, 0.05863681, 0.0587849, 0.058969487,
0.059138488, 0.05937737, 0.05944659, 0.059682943, 0.05978295, 0.05989475, 0.06009527, 0.060301565, 0.06056904, 0.06072526,
0.060803007, 0.061006345, 0.06103696, 0.06139422, 0.061568066, 0.061686378, 0.061937917, 0.062169164, 0.062290505, 0.062400684,
0.06252082, 0.06266932, 0.06287615, 0.06303492, 0.06320577, 0.0632127, 0.06370442, 0.06375613, 0.063881114, 0.0639308,
0.06399956, 0.06428089, 0.06443866, 0.06453087, 0.06478732, 0.06479099, 0.065225035, 0.06541945, 0.06561232, 0.06591677,
0.06605095, 0.06611706, 0.06627636, 0.06657195, 0.0667452, 0.06690033, 0.066922285, 0.0672966, 0.06734647, 0.06770231)),
synth_qualityfactor=((3, 3, 5, 15, 15, 7, 8, 16, 2, 16, 9, 9, 16, 16, 15, 1, 2, 2, 5, 3,
3, 10, 13, 14, 12, 6, 13, 13, 6, 2, 4, 5, 7, 10, 13, 14, 6, 4, 5, 14,
15, 8, 3, 8, 32, 24, 11, 18, 6, 20, 5, 5, 3, 4, 8, 8, 9, 12, 6, 6,
5, 6, 9, 12, 6, 6, 5, 5, 3, 10, 9, 109, 6, 4, 5, 6, 2, 3, 4, 6,
10, 11, 14, 10, 4, 2, 11, 91, 14, 10, 9, 16, 14, 7, 4, 3, 2, 2, 2, 5,
8, 13, 18, 3, 2, 2, 2, 1, 3, 2, 3, 2, 3, 4, 8, 16, 8, 9, 24, 13,
6, 8, 11, 9, 5, 6, 14, 30, 8, 3, 6, 7, 5, 5, 4, 5, 2, 2, 3, 2,
2, 1, 9, 8, 11, 27, 14, 6, 6, 9, 8, 5, 4, 4, 2, 3, 4, 4, 5, 4,
2, 2, 2, 4, 5, 11, 7, 5, 5, 5, 3, 3, 17, 11, 15, 16, 21, 12, 11, 6,
6, 3, 3, 4, 4, 14, 13, 16, 27, 53, 5, 3, 8, 8, 9, 7, 7, 3, 2, 2,
2, 3, 9, 9, 8, 9, 7, 3, 2, 4, 4, 4, 5, 5, 4, 4, 4, 3, 1, 2,
4, 3, 4, 5, 3, 3, 18, 5, 4, 4, 5, 4, 6, 4, 5, 5, 9, 8, 8, 16,
6, 6, 3, 3, 12, 13, 15, 14, 5, 4, 15, 17, 5, 5, 6, 17, 17, 5, 3, 14,
17, 16, 14, 14, 17, 18, 15, 12, 14, 9, 19, 14, 14, 13, 12, 15, 19, 20, 16, 14,
14, 13, 13, 16, 19, 30, 21, 14, 13, 13, 12, 12, 27, 30, 17, 12, 14, 15, 14, 14,
16, 18, 24, 33, 11, 19, 19, 13, 11, 12, 12, 11, 11, 11, 16, 11, 12, 10, 10, 10,
10, 10, 15, 17, 15, 11, 10, 11, 11, 11, 11, 11, 11, 11, 11, 10, 10, 10, 10, 11,
15, 12, 11, 14, 12, 11, 11, 12, 12, 11, 12, 13, 14, 15, 11, 11, 11, 13, 17, 22,
21, 15, 14, 13, 12, 11, 12, 13, 26, 17, 18, 15, 13, 12, 13, 14, 14, 14, 14, 14,
15, 19, 17, 12, 13, 10, 10, 13, 14, 15, 15, 15, 15, 13, 7, 17, 14, 16, 7, 6),
(3, 17, 7, 151, 20, 15, 4, 10, 13, 12, 6, 8, 14, 9, 3, 4, 4, 4, 4, 3,
5, 9, 9, 6, 2, 13, 12, 10, 5, 12, 16, 13, 13, 8, 6, 12, 11, 6, 4, 18,
11, 30, 3, 3, 13, 8, 6, 3, 4, 4, 13, 15, 8, 7, 40, 13, 13, 19, 11, 9,
13, 13, 5, 5, 10, 7, 9, 11, 13, 6, 6, 5, 7, 12, 8, 6, 10, 11, 11, 4,
6, 12, 9, 14, 8, 8, 11, 10, 13, 14, 9, 15, 12, 8, 6, 10, 8, 7, 3, 3,
2, 1, 1, 4, 6, 8, 6, 8, 17, 10, 6, 5, 6, 14, 28, 10, 4, 4, 8, 21,
10, 7, 2, 2, 2, 4, 7, 16, 9, 4, 6, 9, 4, 4, 3, 4, 3, 2, 1, 3,
6, 22, 14, 15, 51, 24, 7, 6, 6, 5, 8, 7, 4, 5, 6, 12, 38, 26, 8, 19,
8, 8, 9, 8, 3, 3, 4, 3, 4, 3, 3, 3, 3, 5, 5, 7, 6, 4, 4, 3,
3, 4, 4, 5, 13, 15, 11, 10, 5, 2, 4, 4, 7, 6, 7, 7, 7, 7, 4, 5,
16, 10, 8, 8, 6, 5, 6, 6, 4, 7, 8, 16, 14, 12, 11, 12, 9, 5, 5, 6,
7, 7, 6, 4, 8, 8, 9, 8, 7, 3, 8, 9, 9, 7, 11, 4, 4, 4, 6, 5,
4, 3, 4, 4, 4, 16, 15, 6, 7, 7, 6, 16, 18, 14, 9, 9, 14, 14, 15, 17,
18, 16, 14, 13, 14, 16, 17, 17, 14, 13, 15, 20, 19, 14, 14, 12, 13, 13, 15, 15,
13, 12, 13, 14, 13, 16, 23, 13, 12, 14, 13, 12, 13, 14, 14, 13, 14, 16, 17, 13,
11, 11, 18, 13, 12, 13, 16, 12, 10, 10, 12, 13, 14, 13, 14, 12, 11, 12, 12, 10,
11, 11, 11, 11, 10, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 10, 10, 10, 10,
11, 12, 14, 13, 10, 11, 15, 18, 14, 13, 10, 13, 14, 14, 15, 13, 13, 13, 12, 12,
13, 7, 12, 12, 12, 12, 12, 13, 16, 7, 12, 12, 13, 19, 18, 12, 13, 20, 15, 13,
14, 15, 18, 16, 13, 12, 17, 10, 17, 18, 17, 15, 14, 15, 14, 6, 6, 18, 17, 18)),
synth_tiltangle=((-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999,
-1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999, -1.3199999),
(-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77,
-0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77, -0.77)),
synth_ntx=(1, 1),
synth_heave=(0.10999999940395355, 0.10999999940395355, 0.10999999940395355, 0.09999999403953552,
0.09999999403953552, 0.09999999403953552, 0.09999999403953552, 0.09999999403953552,
0.09999999403953552, 0.09999999403953552, 0.09999999403953552, 0.09999999403953552,
0.09999999403953552, 0.09999999403953552, 0.08999999612569809, 0.08999999612569809,
0.08999999612569809, 0.08999999612569809, 0.07999999821186066, 0.07999999821186066,
0.07999999821186066, 0.07999999821186066, 0.07999999821186066, 0.07999999821186066,
0.07999999821186066, 0.07999999821186066, 0.07999999821186066, 0.07999999821186066,
0.07999999821186066, 0.07999999821186066, 0.07999999821186066, 0.07999999821186066,
0.07999999821186066, 0.07000000029802322, 0.07000000029802322, 0.07000000029802322,
0.07000000029802322, 0.07000000029802322, 0.07000000029802322, 0.07000000029802322),
synth_roll=(-0.07999999821186066, -0.07999999821186066, -0.07999999821186066, -0.07999999821186066,
-0.07999999821186066, -0.07000000029802322, -0.07000000029802322, -0.07000000029802322,
-0.07000000029802322, -0.07000000029802322, -0.07000000029802322, -0.07000000029802322,
-0.07000000029802322, -0.07000000029802322, -0.05999999865889549, -0.05999999865889549,
-0.05999999865889549, -0.05999999865889549, -0.05999999865889549, -0.05999999865889549,
-0.05999999865889549, -0.05999999865889549, -0.05999999865889549, -0.05999999865889549,
-0.04999999701976776, -0.04999999701976776, -0.04999999701976776, -0.04999999701976776,
-0.04999999701976776, -0.04999999701976776, -0.04999999701976776, -0.04999999701976776,
-0.04999999701976776, -0.04999999701976776, -0.04999999701976776, -0.04999999701976776,
-0.04999999701976776, -0.04999999701976776, -0.04999999701976776, -0.04999999701976776),
synth_pitch=(0.9599999785423279, 0.9599999785423279, 0.9599999785423279, 0.949999988079071,
0.949999988079071, 0.949999988079071, 0.949999988079071, 0.9399999976158142,
0.9399999976158142, 0.9399999976158142, 0.9399999976158142, 0.9300000071525574,
0.9300000071525574, 0.9300000071525574, 0.9300000071525574, 0.9199999570846558,
0.9199999570846558, 0.9199999570846558, 0.9199999570846558, 0.9199999570846558,
0.9099999666213989, 0.9099999666213989, 0.9099999666213989, 0.9099999666213989,
0.8999999761581421, 0.8999999761581421, 0.8999999761581421, 0.8999999761581421,
0.8999999761581421, 0.8899999856948853, 0.8899999856948853, 0.8899999856948853,
0.8899999856948853, 0.8899999856948853, 0.8799999952316284, 0.8799999952316284,
0.8799999952316284, 0.8799999952316284, 0.8799999952316284, 0.8700000047683716),
synth_yaw=(214.1199951171875, 214.1199951171875, 214.1199951171875, 214.1199951171875, 214.1199951171875,
214.1199951171875, 214.1199951171875, 214.1199951171875, 214.1199951171875, 214.11000061035156,
214.11000061035156, 214.11000061035156, 214.11000061035156, 214.11000061035156, 214.11000061035156,
214.11000061035156, 214.11000061035156, 214.11000061035156, 214.11000061035156, 214.11000061035156,
214.11000061035156, 214.11000061035156, 214.11000061035156, 214.11000061035156, 214.11000061035156,
214.11000061035156, 214.11000061035156, 214.09999084472656, 214.09999084472656, 214.09999084472656,
214.09999084472656, 214.09999084472656, 214.09999084472656, 214.09999084472656, 214.09999084472656,
214.09999084472656, 214.09999084472656, 214.09999084472656, 214.09999084472656, 214.09999084472656),
synth_altitude=(-32.73036529849195, -32.711789940262086),
synth_latitude=(30.41797070292914, 30.41796516450258),
synth_longitude=(-81.07355341266269, -81.07355899578471),
synth_xyztime=1563319288, synth_waterline=-2.383,
synth_tx_port_mountroll=4.374, synth_tx_port_mountpitch=0.363, synth_tx_port_mountyaw=1.045,
synth_tx_stbd_mountroll=-3.398, synth_tx_stbd_mountpitch=-0.166, synth_tx_stbd_mountyaw=0.988,
synth_rx_port_mountroll=4.274, synth_rx_port_mountpitch=0.363, synth_rx_port_mountyaw=0.899,
synth_rx_stbd_mountroll=-3.448, synth_rx_stbd_mountpitch=-0.166, synth_rx_stbd_mountyaw=1.035,
synth_tx_port_x=0.595, synth_tx_port_y=-13.292, synth_tx_port_z=1.319,
synth_tx_stbd_x=0.427, synth_tx_stbd_y=1.559, synth_tx_stbd_z=1.381,
synth_rx_port_x=0.495, synth_rx_port_y=-13.598, synth_rx_port_z=1.282,
synth_rx_stbd_x=0.331, synth_rx_stbd_y=1.251, synth_rx_stbd_z=1.385,
synth_tx_x_0=0.00, synth_tx_x_1=0.00, synth_tx_x_2=0.00,
synth_tx_y_0=-0.0554, synth_tx_y_1=0.0131, synth_tx_y_2=0.0554,
synth_tx_z_0=-0.012, synth_tx_z_1=-0.006, synth_tx_z_2=-0.012,
synth_rx_x_0=0.011, synth_rx_x_1=0.011, synth_rx_x_2=0.011,
synth_rx_y_0=0.00, synth_rx_y_1=0.00, synth_rx_y_2=0.00,
synth_rx_z_0=-0.006, synth_rx_z_1=-0.006, synth_rx_z_2=-0.006,
serialnum='389', secondary_serialnum='394',
profile=None,
secs=('389', '394'),
xyz88_depth=((18.247137, 18.284613, 18.30119, 18.347609, 18.272295, 18.280445, 18.281658, 18.277119, 18.265053, 18.265673,
18.250294, 18.25067, 18.261683, 18.278084, 18.301876, 18.267723, 18.27876, 18.303057, 18.348646, 18.387753,
18.412275, 18.485277, 18.418695, 18.42399, 18.45862, 18.41526, 18.407068, 18.324894, 18.348211, 18.375885,
18.38131, 18.399605, 18.407343, 18.378876, 18.344366, 18.29845, 18.356182, 18.381601, 18.393568, 18.370342,
18.363163, 18.339361, 18.353611, 18.34822, 18.361656, 18.357765, 18.35506, 18.378246, 18.38909, 18.41821,
18.323885, 18.333736, 18.378532, 18.393019, 18.446316, 18.385263, 18.393997, 18.369184, 18.360678, 18.347275,
18.374224, 18.35458, 18.348644, 18.33937, 18.329128, 18.331938, 18.361048, 18.395273, 18.412182, 18.449108,
18.407572, 18.406677, 18.396883, 18.395185, 18.40319, 18.410738, 18.430944, 18.441103, 18.460056, 18.453123,
18.442003, 18.467583, 18.45109, 18.451557, 18.462988, 18.48244, 18.508032, 18.523424, 18.400745, 18.389153,
18.385176, 18.324312, 18.354128, 18.387142, 18.407513, 18.42928, 18.46877, 18.497807, 18.517164, 18.529099,
18.541418, 18.510418, 18.475948, 18.460838, 18.452152, 18.462425, 18.476622, 18.490784, 18.505266, 18.52178,
18.52965, 18.523504, 18.532934, 18.51718, 18.484753, 18.460917, 18.543884, 18.544184, 18.623112, 18.60753,
18.562096, 18.547897, 18.55949, 18.534569, 18.552656, 18.572187, 18.602379, 18.683146, 18.589382, 18.592316,
18.599058, 18.623943, 18.607233, 18.607668, 18.60654, 18.611195, 18.6115, 18.622093, 18.632504, 18.636744,
18.645945, 18.649023, 18.645597, 18.656076, 18.643814, 18.627558, 18.62419, 18.649172, 18.65397, 18.678205,
18.696339, 18.715242, 18.70873, 18.722284, 18.74857, 18.749817, 18.764355, 18.749931, 18.746782, 18.754082,
18.766708, 18.780169, 18.80218, 18.814634, 18.814863, 18.777143, 18.806839, 18.791317, 18.79875, 18.842909,
18.82667, 18.796417, 18.780203, 18.793968, 18.799484, 18.81246, 18.843737, 18.854555, 18.853884, 18.849678,
18.836855, 18.827023, 18.814583, 18.819485, 18.799175, 18.799362, 18.826147, 18.863333, 18.906664, 18.875338,
18.854172, 18.84732, 18.834019, 18.85179, 18.865532, 18.855045, 18.871157, 18.858871, 18.85383, 18.85057,
18.83207, 18.827303, 18.804195, 18.817423, 18.838276, 18.847902, 18.865116, 18.850313, 18.847778, 18.840397,
18.849487, 18.849813, 18.836897, 18.85545, 18.864975, 18.85697, 18.858706, 18.850075, 18.83666, 18.837065,
18.86, 18.864618, 18.882832, 18.900608, 18.908566, 18.877625, 18.900496, 18.871256, 18.880213, 18.889523,
18.894926, 18.89623, 18.886883, 18.883259, 18.877949, 18.868711, 18.841284, 18.845982, 18.842236, 18.849298,
18.863426, 18.849493, 18.855202, 18.856401, 18.805351, 18.821774, 18.860605, 18.874468, 18.843811, 18.84287,
18.824175, 18.82905, 18.843626, 18.821676, 18.817451, 18.85308, 18.863573, 18.787539, 18.80358, 18.78276,
18.779793, 18.787142, 18.807154, 18.833673, 18.847767, 18.842182, 18.841072, 18.872355, 18.909054, 18.83615,
18.816458, 18.802864, 18.830883, 18.863562, 18.906368, 18.937946, 18.931847, 18.868143, 18.848423, 18.85085,
18.851646, 18.864815, 18.891363, 18.91637, 18.920523, 18.842499, 18.944555, 18.836668, 18.859327, 18.881937,
18.903399, 18.933208, 19.00214, 18.957134, 18.887781, 18.909777, 18.931173, 18.932032, 18.938168, 18.97328,
18.863155, 18.954313, 18.963669, 19.102472, 18.917904, 18.843931, 18.806519, 18.796698, 18.832767, 18.849594,
18.851778, 18.851952, 18.864088, 18.89018, 18.898462, 18.838629, 18.843023, 18.81701, 18.823908, 18.83916,
18.853647, 18.870316, 18.878712, 18.87604, 18.870201, 18.821854, 18.824633, 18.829145, 18.828667, 18.828306,
18.828247, 18.824514, 18.815115, 18.808086, 18.803041, 18.800615, 18.797571, 18.792744, 18.778873, 18.772938,
18.819387, 18.81263, 18.787083, 18.805948, 18.821753, 18.820328, 18.810923, 18.794321, 18.779285, 18.789915,
18.803255, 18.792738, 18.890642, 18.80715, 18.858574, 18.835312, 18.81272, 18.795795, 18.801876, 18.861805,
18.853603, 18.684414, 18.869247, 18.855486, 18.848957, 18.842949, 18.83326, 18.823507, 19.032911, 18.875208,
18.862885, 18.901308, 18.89247, 18.86961, 18.846008, 18.831081, 18.832943, 18.839338, 18.843843, 18.833282,
18.812979, 18.839214, 18.868813, 18.872604, 18.846205, 18.878653, 18.852777, 18.915152, 18.900711, 18.874603,
18.855965, 18.832785, 18.818491, 18.84202, 18.879484, 18.91559, 18.886587, 18.857903, 18.90687, 18.908127),
(18.805456, 18.818306, 18.829233, 18.80737, 18.808516, 18.79026, 18.818037, 18.84993, 18.879406, 18.823614,
18.833326, 18.757017, 18.791496, 18.804665, 18.792683, 18.794113, 18.82501, 18.856256, 18.861294, 18.848764,
18.82729, 18.803102, 18.806263, 18.819544, 18.859999, 18.898142, 18.89589, 18.848696, 18.830057, 18.805355,
18.803196, 18.83465, 18.858524, 18.878693, 18.90662, 18.936682, 18.951376, 18.883383, 18.849981, 18.800179,
18.794424, 18.830566, 18.838852, 18.840902, 18.854546, 18.886719, 18.90895, 18.873432, 18.906507, 18.920721,
18.875145, 18.864058, 18.891993, 18.90578, 18.90866, 18.894444, 18.891161, 18.86193, 18.890488, 18.908335,
18.919035, 18.907791, 18.899242, 18.906612, 18.936218, 18.929022, 18.917505, 18.946669, 18.949013, 18.952747,
18.932102, 18.92838, 18.933674, 18.939978, 18.953222, 18.964458, 18.962088, 18.957222, 18.95679, 18.953619,
18.952185, 18.992785, 19.009226, 18.984228, 18.985027, 18.989063, 18.979462, 18.996489, 19.03063, 19.01553,
19.005919, 19.002968, 19.02426, 19.001707, 19.003675, 19.030424, 19.06691, 19.170614, 18.913528, 19.032352,
19.060768, 19.045782, 19.036297, 19.032207, 19.066603, 19.097633, 19.093374, 19.10839, 19.121159, 19.110836,
19.09903, 19.103647, 19.125051, 19.135984, 19.131514, 19.111225, 19.092113, 19.097033, 19.170143, 19.222088,
19.276394, 19.212006, 19.205616, 19.162754, 19.120173, 19.088005, 19.136944, 19.238665, 19.244448, 19.233364,
19.207901, 19.203358, 19.205158, 19.196638, 19.20525, 19.220268, 19.232786, 19.212101, 19.180386, 19.20391,
19.248678, 19.246416, 19.266657, 19.298704, 19.295454, 19.269655, 19.26196, 19.255, 19.244015, 19.235151,
19.235973, 19.254166, 19.250116, 19.2779, 19.270176, 19.277657, 19.302612, 19.29475, 19.26847, 19.302849,
19.293137, 19.266558, 19.261417, 19.340137, 19.342205, 19.344065, 19.368801, 19.358244, 19.37486, 19.34836,
19.354116, 19.358244, 19.352365, 19.368034, 19.42716, 19.435656, 19.447542, 19.464972, 19.456001, 19.454798,
19.452995, 19.476475, 19.501043, 19.496763, 19.518656, 19.523115, 19.515984, 19.529675, 19.525803, 19.506674,
19.539848, 19.558655, 19.56403, 19.584396, 19.628096, 19.609524, 19.588362, 19.575094, 19.627586, 19.658777,
19.701633, 19.656124, 19.613306, 19.583626, 19.594118, 19.59917, 19.583231, 19.601349, 19.621286, 19.583576,
19.58242, 19.567736, 19.54083, 19.570461, 19.573652, 19.579914, 19.600721, 19.620094, 19.617388, 19.61424,
19.605186, 19.595558, 19.595163, 19.613712, 19.622604, 19.632856, 19.63148, 19.631338, 19.631264, 19.625647,
19.620378, 19.601667, 19.592382, 19.60675, 19.627626, 19.634111, 19.651913, 19.710016, 19.680069, 19.663862,
19.668768, 19.655298, 19.63074, 19.62477, 19.551254, 19.626007, 19.677914, 19.69108, 19.683502, 19.683956,
19.685436, 19.674759, 19.69391, 19.678175, 19.698097, 19.714495, 19.700312, 19.685665, 19.715364, 19.723837,
19.740627, 19.73665, 19.743864, 19.767233, 19.756708, 19.759182, 19.765139, 19.758862, 19.769138, 19.76565,
19.7686, 19.753525, 19.746246, 19.751886, 19.767647, 19.772789, 19.772533, 19.784615, 19.785587, 19.79483,
19.791842, 19.779015, 19.73371, 19.77437, 19.85969, 19.835402, 19.827738, 19.816893, 19.796331, 19.778946,
19.775902, 19.792942, 19.794268, 19.821522, 19.805244, 19.809793, 19.817791, 19.821552, 19.831848, 19.823166,
19.845934, 19.832651, 19.835276, 19.759981, 19.81395, 19.839087, 19.85291, 19.875832, 19.87014, 19.864529,
19.850185, 19.843042, 19.833036, 19.854761, 19.853195, 19.869005, 19.886543, 19.865734, 19.869926, 19.864704,
19.84026, 19.857107, 19.867006, 19.871775, 19.880077, 19.877163, 19.86347, 19.85752, 19.893219, 19.873465,
19.906199, 19.880722, 19.841803, 19.810846, 19.821762, 19.88004, 19.943148, 19.88396, 19.881542, 19.900843,
19.829699, 19.964218, 19.939976, 19.931698, 19.911583, 19.919834, 19.927183, 19.913584, 19.907955, 19.914383,
19.915161, 19.939138, 19.905598, 19.927917, 19.904175, 19.88414, 19.893347, 19.904055, 19.941832, 19.93515,
19.902332, 19.917835, 19.86913, 19.93424, 19.931635, 19.918087, 19.939964, 19.96227, 19.948801, 19.923983,
19.909658, 19.904167, 19.916973, 19.91413, 19.90718, 19.855768, 19.957058, 19.919233, 19.91201, 19.873272,
19.84037, 19.87343, 19.86768, 19.849201, 19.873407, 19.819332, 19.905298, 19.909233, 19.912344, 19.95714,
19.949686, 19.913317, 19.91308, 19.945606, 19.948954, 19.94659, 19.8959, 19.958593, 19.924044, 19.980381)),
xyz88_acrosstrack=((-60.08836, -60.04249, -59.944885, -59.921623, -59.598003, -59.48148, -59.348495, -59.202095, -59.038147, -58.883575,
-58.71321, -58.581806, -58.4767, -58.38504, -58.289795, -58.078274, -57.975456, -57.88322, -57.863186, -57.80659,
-57.73708, -57.76051, -57.457184, -57.344105, -57.279064, -57.03351, -56.891132, -56.558628, -56.47039, -56.3925,
-56.264214, -56.16588, -56.044098, -55.84115, -55.625744, -55.38596, -55.37953, -55.300922, -55.192753, -54.98802,
-54.838974, -54.65412, -54.53507, -54.392387, -54.272957, -54.135002, -53.982002, -53.90403, -53.78167, -53.699173,
-53.353428, -53.24946, -53.202415, -53.091442, -53.062843, -52.794178, -52.673218, -52.48298, -52.32777, -52.1463,
-52.066154, -51.890614, -51.72746, -51.575157, -51.421753, -51.279255, -51.19071, -51.12919, -51.017002, -50.94541,
-50.734135, -50.588776, -50.426735, -50.281624, -50.15641, -50.03103, -49.930996, -49.81203, -49.695435, -49.544987,
-49.38743, -49.285835, -49.119816, -48.972103, -48.860737, -48.750385, -48.652153, -48.549976, -48.17773, -48.013523,
-47.864403, -47.611965, -47.526848, -47.434113, -47.332798, -47.23456, -47.155018, -47.07078, -46.956203, -46.84261,
-46.716976, -46.515995, -46.323532, -46.153038, -45.994865, -45.870514, -45.753685, -45.62477, -45.509903, -45.399166,
-45.26196, -45.114513, -44.981777, -44.820187, -44.619827, -44.435158, -44.427494, -44.284214, -44.270023, -44.10231,
-43.887577, -43.713036, -43.592583, -43.403282, -43.294933, -43.17817, -43.07886, -43.059067, -42.767933, -42.628708,
-42.49629, -42.392525, -42.225986, -42.075916, -41.935234, -41.793594, -41.646465, -41.515667, -41.38548, -41.247097,
-41.11693, -40.97872, -40.82197, -40.696426, -40.529198, -40.367428, -40.215324, -40.10428, -39.965633, -39.85505,
-39.73668, -39.610638, -39.460045, -39.328545, -39.21521, -39.078068, -38.95052, -38.776165, -38.62687, -38.492752,
-38.357643, -38.233414, -38.112278, -37.97965, -37.832325, -37.636703, -37.529057, -37.36529, -37.223057, -37.135727,
-36.96608, -36.780674, -36.612793, -36.483646, -36.33746, -36.20924, -36.09567, -35.95869, -35.809258, -35.65578,
-35.494637, -35.338223, -35.17253, -35.027832, -34.86334, -34.714645, -34.590282, -34.485428, -34.387825, -34.202522,
-34.029827, -33.873928, -33.712444, -33.58403, -33.44635, -33.29136, -33.15822, -32.99697, -32.844418, -32.688244,
-32.523754, -32.368687, -32.196846, -32.06196, -31.935387, -31.79253, -31.664219, -31.500288, -31.34836, -31.194014,
-31.050005, -30.905092, -30.743307, -30.611122, -30.471773, -30.311274, -30.16726, -30.009583, -29.849106, -29.701733,
-29.568834, -29.427067, -29.292019, -29.157482, -29.009592, -28.837763, -28.704922, -28.531862, -28.39017, -28.249731,
-28.10205, -27.957523, -27.80038, -27.644201, -27.493078, -27.33525, -27.165672, -27.020403, -26.870234, -26.72382,
-26.583258, -26.424576, -26.280338, -26.134092, -25.950022, -25.811419, -25.687923, -25.544537, -25.37433, -25.228455,
-25.06383, -24.919157, -24.776718, -24.61821, -24.462471, -24.334799, -24.1935, -24.001019, -23.860773, -23.697302,
-23.548779, -23.402443, -23.263445, -23.12836, -22.98354, -22.833921, -22.683392, -22.5491, -22.413734, -22.23266,
-22.073523, -21.918268, -21.778212, -21.644455, -21.511375, -21.373873, -21.22159, -21.047558, -20.888494, -20.738909,
-20.589472, -20.445263, -20.302624, -20.163485, -20.01394, -19.837936, -19.721876, -19.535303, -19.391117, -19.250814,
-19.106878, -18.962036, -18.831656, -18.666864, -18.500612, -18.355423, -18.210304, -18.060566, -17.909067, -17.76781,
-17.592363, -17.460062, -17.310808, -17.186111, -17.001617, -16.839422, -16.6814, -16.532015, -16.386572, -16.238209,
-16.08798, -15.937776, -15.7892475, -15.642308, -15.493425, -15.338358, -15.186566, -15.035888, -14.888052, -14.737481,
-14.590066, -14.43935, -14.288173, -14.139923, -13.988426, -13.836522, -13.688956, -13.5380745, -13.387167, -13.239521,
-13.088548, -12.937637, -12.79023, -12.639466, -12.488578, -12.340746, -12.18955, -12.041668, -11.8944845, -11.743233,
-11.589597, -11.441035, -11.294603, -11.142532, -10.99028, -10.83996, -10.693901, -10.545443, -10.399969, -10.246428,
-10.0951805, -9.947906, -9.778569, -9.64511, -9.483239, -9.340288, -9.193593, -9.0488205, -8.898062, -8.732843,
-8.584109, -8.482261, -8.279783, -8.134508, -7.9866805, -7.838151, -7.6937976, -7.545345, -7.3281755, -7.230167,
-7.0840106, -6.9194818, -6.7742825, -6.6298947, -6.4891, -6.344575, -6.196705, -6.042412, -5.89177, -5.7464805,
-5.604633, -5.446101, -5.280927, -5.1298327, -4.9915085, -4.829499, -4.6894913, -4.510823, -4.3675575, -4.229341,
-4.090947, -3.9500942, -3.8081741, -3.6456454, -3.478547, -3.3106425, -3.1726325, -3.042469, -2.86362, -2.714639),
(-9.552668, -9.417009, -9.281006, -9.123426, -8.983848, -8.830525, -8.70268, -8.57328, -8.447218, -8.274377,
-8.135853, -7.955903, -7.831391, -7.69306, -7.5436354, -7.401393, -7.2732673, -7.145634, -7.002958, -6.853476,
-6.7012033, -6.548905, -6.4050345, -6.2697215, -6.14179, -6.0130987, -5.8693566, -5.70606, -5.5582194, -5.405504,
-5.261649, -5.1298685, -4.992121, -4.857103, -4.721237, -4.586236, -4.44678, -4.2834926, -4.128134, -3.9728482,
-3.827243, -3.6932695, -3.552186, -3.4099548, -3.2675066, -3.1331117, -2.9930189, -2.8405483, -2.7068458, -2.5658417,
-2.4100373, -2.2651758, -2.1278462, -1.9846623, -1.8398554, -1.6960595, -1.5510652, -1.4027891, -1.262954, -1.1216264,
-0.97945434, -0.8316235, -0.68780917, -0.54585814, -0.40263104, -0.25951865, -0.11306588, 0.030420648, 0.1724333, 0.3175933,
0.46372032, 0.605566, 0.75033444, 0.89510524, 1.0398505, 1.1847801, 1.3265246, 1.4715093, 1.6165286, 1.7615105,
1.9065621, 2.053426, 2.196496, 2.3408742, 2.4866633, 2.6295245, 2.7748344, 2.919177, 3.068923, 3.2110102,
3.3570378, 3.5006015, 3.6474135, 3.788989, 3.933956, 4.0829287, 4.234148, 4.3972516, 4.4971795, 4.6641026,
4.812971, 4.9542522, 5.1002645, 5.2442517, 5.3934865, 5.5464053, 5.692098, 5.839326, 5.990242, 6.132469,
6.2746773, 6.421679, 6.573969, 6.7205844, 6.86704, 7.0056715, 7.144962, 7.29216, 7.4620957, 7.6231065,
7.7863417, 7.9137917, 8.057705, 8.185293, 8.316804, 8.452311, 8.615385, 8.800579, 8.949817, 9.089166,
9.227064, 9.370271, 9.516917, 9.659891, 9.807173, 9.962299, 10.113322, 10.249869, 10.381722, 10.536483,
10.706986, 10.851526, 11.008273, 11.168046, 11.315083, 11.446831, 11.588781, 11.731939, 11.873719, 12.013091,
12.158728, 12.3152685, 12.460139, 12.624648, 12.764892, 12.91515, 13.07717, 13.22028, 13.348098, 13.514901,
13.654964, 13.784958, 13.929636, 14.126093, 14.273302, 14.421423, 14.586308, 14.72316, 14.884936, 15.0124445,
15.163703, 15.309366, 15.454383, 15.611126, 15.801578, 15.955822, 16.113863, 16.272068, 16.411083, 16.556494,
16.70311, 16.870998, 17.035646, 17.178238, 17.343334, 17.495405, 17.638308, 17.794638, 17.943142, 18.073746,
18.250479, 18.410269, 18.565594, 18.728695, 18.914776, 19.04633, 19.176361, 19.31446, 19.508091, 19.683685,
19.87109, 19.977144, 20.086216, 20.208439, 20.36377, 20.515007, 20.646717, 20.813131, 20.982008, 21.088066,
21.23868, 21.369633, 21.488857, 21.667486, 21.812895, 21.9698, 22.13533, 22.308805, 22.45265, 22.59707,
22.736115, 22.867992, 23.018541, 23.18284, 23.338493, 23.496887, 23.643326, 23.792261, 23.942392, 24.078373,
24.216587, 24.34812, 24.483511, 24.647911, 24.82139, 24.970638, 25.142157, 25.356527, 25.465038, 25.59968,
25.752737, 25.875303, 25.993532, 26.135677, 26.184547, 26.431377, 26.64211, 26.804651, 26.941511, 27.08979,
27.24049, 27.376247, 27.54306, 27.674109, 27.844288, 28.010998, 28.14723, 28.273962, 28.452442, 28.61296,
28.786196, 28.931826, 29.083979, 29.259932, 29.399578, 29.548283, 29.703, 29.841234, 30.004307, 30.148586,
30.292479, 30.421741, 30.55243, 30.714062, 30.880867, 31.03298, 31.178022, 31.342669, 31.491629, 31.654215,
31.799215, 31.93005, 31.99982, 32.215157, 32.490017, 32.59512, 32.739025, 32.866764, 32.979843, 33.098656,
33.24119, 33.417046, 33.556187, 33.750935, 33.875973, 34.02322, 34.189922, 34.337845, 34.49757, 34.626656,
34.822445, 34.94566, 35.09652, 35.102585, 35.34232, 35.534416, 35.708378, 35.885445, 36.027905, 36.157444,
36.286522, 36.414913, 36.55322, 36.734295, 36.875134, 37.047848, 37.22477, 37.33384, 37.488674, 37.62741,
37.731983, 37.897594, 38.066574, 38.211964, 38.38005, 38.512985, 38.642086, 38.770866, 38.994278, 39.09886,
39.303314, 39.398506, 39.46855, 39.554127, 39.720528, 39.979248, 40.249023, 40.283077, 40.41118, 40.599174,
40.610783, 41.010563, 41.116123, 41.236763, 41.351727, 41.506676, 41.660686, 41.790962, 41.920456, 42.07508,
42.219086, 42.41124, 42.48675, 42.67737, 42.774147, 42.879078, 43.045254, 43.215527, 43.42469, 43.560577,
43.642235, 43.807343, 43.85639, 44.12831, 44.27674, 44.38322, 44.58572, 44.77077, 44.879536, 44.984055,
45.092033, 45.219795, 45.38824, 45.523285, 45.670288, 45.699486, 46.067112, 46.12765, 46.2359, 46.295013,
46.367443, 46.588657, 46.72348, 46.808937, 47.013012, 47.03991, 47.36529, 47.52615, 47.685997, 47.92084,
48.036118, 48.10704, 48.240036, 48.47253, 48.61549, 48.74574, 48.785637, 49.070538, 49.12667, 49.398777)),
xyz88_alongtrack=((0.7486924, 0.74938226, 0.7499608, 0.75069356, 0.75078005, 0.7513034, 0.7517863, 0.7522347, 0.75263816, 0.75318384,
0.7535609, 0.7540199, 0.754534, 0.75507486, 0.7557283, 0.7559811, 0.75648236, 0.7571278, 0.7578185, 0.7585428,
0.75911033, 0.76002413, 0.76013374, 0.7605818, 0.7612655, 0.7614923, 0.7618511, 0.76183826, 0.7624396, 0.7630641,
0.7635533, 0.7641156, 0.76461124, 0.76488507, 0.76511693, 0.7652739, 0.7660567, 0.7666406, 0.7671391, 0.767477,
0.767849, 0.7681136, 0.7686688, 0.76903933, 0.7695812, 0.76995325, 0.77038485, 0.7709209, 0.7714305, 0.772053,
0.77187926, 0.7723153, 0.7730268, 0.77353925, 0.77430063, 0.7743128, 0.7747755, 0.7750136, 0.775354, 0.7757061,
0.77627236, 0.7765251, 0.7769116, 0.7772238, 0.7775251, 0.77795565, 0.7785595, 0.7791499, 0.77966386, 0.7803105,
0.7803729, 0.7807519, 0.7810647, 0.78142846, 0.7818548, 0.7822737, 0.78277665, 0.7832056, 0.7837313, 0.78403157,
0.7842977, 0.7848562, 0.78507495, 0.785446, 0.78585404, 0.7863512, 0.78688806, 0.7873128, 0.7867737, 0.78702736,
0.78733087, 0.78721654, 0.787755, 0.7883446, 0.7888063, 0.78927433, 0.7898988, 0.7904124, 0.7908804, 0.7912598,
0.79166657, 0.79174656, 0.791767, 0.79195356, 0.7921825, 0.79254806, 0.7929384, 0.793349, 0.79373205, 0.7941259,
0.7944738, 0.79468673, 0.7950359, 0.7951657, 0.7951853, 0.79526424, 0.7961542, 0.7964075, 0.79725915, 0.79738086,
0.797267, 0.7974069, 0.7977212, 0.7977663, 0.79812074, 0.79849917, 0.7989552, 0.7998001, 0.7992803, 0.799508,
0.7997601, 0.80014896, 0.8002069, 0.80040765, 0.8005767, 0.8007991, 0.80098134, 0.8012389, 0.8014894, 0.80168563,
0.80191547, 0.80209136, 0.80222034, 0.8024436, 0.80249035, 0.80249, 0.80259526, 0.8029203, 0.80307883, 0.8033865,
0.80364, 0.8039007, 0.8039457, 0.8041514, 0.8044528, 0.8045425, 0.804738, 0.8046997, 0.8047411, 0.80486006,
0.8050191, 0.8051754, 0.8053971, 0.8055357, 0.8055697, 0.80529284, 0.8055508, 0.80543953, 0.8055067, 0.80586207,
0.80572605, 0.8054709, 0.80532175, 0.80540687, 0.8054175, 0.8054831, 0.8056865, 0.80571884, 0.8056522, 0.80555004,
0.8053728, 0.805213, 0.8050211, 0.8049608, 0.8046977, 0.8045853, 0.8046724, 0.80484104, 0.80505115, 0.8046538,
0.80433035, 0.80411345, 0.8038386, 0.8038013, 0.80371714, 0.8034435, 0.803363, 0.8030516, 0.8027901, 0.8025248,
0.8021421, 0.80185026, 0.80140805, 0.8012416, 0.80112636, 0.8009036, 0.8007441, 0.8003169, 0.7999747, 0.79958963,
0.79930913, 0.79896766, 0.7985031, 0.79827, 0.7979598, 0.7974909, 0.7971056, 0.79661834, 0.7960875, 0.7956521,
0.7953586, 0.7949374, 0.79459023, 0.7942304, 0.7937679, 0.79303396, 0.79266506, 0.79190904, 0.7914211, 0.7909266,
0.7903744, 0.7898044, 0.7891271, 0.7884625, 0.7877985, 0.78707665, 0.7862199, 0.7855774, 0.78486735, 0.78419876,
0.7835678, 0.78273696, 0.78202915, 0.78128123, 0.78015345, 0.77946323, 0.77890766, 0.77815115, 0.77709657, 0.7762504,
0.77522874, 0.774373, 0.77353966, 0.7724957, 0.77149653, 0.77075696, 0.7698513, 0.76838833, 0.76746696, 0.7662871,
0.76523113, 0.76419336, 0.76321477, 0.7622586, 0.761187, 0.7600247, 0.7588437, 0.75782424, 0.75678426, 0.75518596,
0.75382656, 0.7524879, 0.75131315, 0.75017804, 0.7490398, 0.74782753, 0.74641854, 0.7447286, 0.7431997, 0.7417588,
0.74029577, 0.7388716, 0.73744977, 0.7360397, 0.734487, 0.7325896, 0.7313941, 0.7293232, 0.72774214, 0.7261805,
0.724554, 0.7228943, 0.7213843, 0.7194303, 0.71743274, 0.7156686, 0.7138803, 0.7120106, 0.7100928, 0.70827585,
0.70601773, 0.70425487, 0.7022617, 0.7005341, 0.69807315, 0.6958541, 0.69365287, 0.6915356, 0.6894273, 0.6872592,
0.68504435, 0.68280417, 0.6805554, 0.67829555, 0.67598987, 0.6736049, 0.6712019, 0.6688099, 0.6664141, 0.6639404,
0.6614918, 0.65895927, 0.6563971, 0.6538657, 0.65125364, 0.6486398, 0.6460347, 0.64334214, 0.640625, 0.6379395,
0.6351655, 0.6323676, 0.62961215, 0.6267643, 0.62388444, 0.62103385, 0.6180911, 0.61518705, 0.61227673, 0.609252,
0.6061139, 0.60308915, 0.60009396, 0.5969255, 0.593728, 0.5905548, 0.5874501, 0.58427167, 0.5811302, 0.5777731,
0.57443804, 0.5711758, 0.56734765, 0.564373, 0.5606783, 0.5574146, 0.5540389, 0.55068034, 0.54715145, 0.54324687,
0.539717, 0.5372979, 0.5324143, 0.52889264, 0.5252844, 0.52163434, 0.5180627, 0.51436526, 0.5089714, 0.5064557,
0.5027428, 0.49855632, 0.49482208, 0.4910789, 0.4874051, 0.48361602, 0.47972724, 0.47564957, 0.47164467, 0.46774855,
0.46391433, 0.45965084, 0.45518857, 0.45106167, 0.44723046, 0.44279376, 0.43887442, 0.43397096, 0.4299339, 0.4260018,
0.4220557, 0.41801274, 0.4139335, 0.40930724, 0.40455434, 0.3997567, 0.39568597, 0.39182585, 0.38667685, 0.3822889),
(0.6291171, 0.62772375, 0.62631464, 0.6246244, 0.6231454, 0.62148553, 0.62014633, 0.6187852, 0.6174472, 0.61549115,
0.6139743, 0.6118972, 0.6105411, 0.6089985, 0.6072959, 0.6056764, 0.60423356, 0.6027859, 0.6011316, 0.59937304,
0.5975639, 0.5957414, 0.5940261, 0.59240824, 0.5908843, 0.58933806, 0.58757436, 0.5855354, 0.58368963, 0.58176845,
0.57995564, 0.57829595, 0.5765451, 0.57481563, 0.57306486, 0.5713131, 0.56948674, 0.5673175, 0.5652464, 0.56316113,
0.56119835, 0.5593834, 0.5574566, 0.5555006, 0.55352837, 0.5516543, 0.5496873, 0.5475355, 0.54563093, 0.5436106,
0.5413708, 0.5392705, 0.5372598, 0.53515184, 0.53300774, 0.53086835, 0.5286946, 0.52646375, 0.52433103, 0.5221642,
0.5199723, 0.5176852, 0.5154451, 0.5132145, 0.5109415, 0.5086689, 0.5063301, 0.5040085, 0.5017064, 0.49933812,
0.49694887, 0.4946086, 0.49220166, 0.4897793, 0.48733947, 0.48488238, 0.48247057, 0.4799899, 0.47749174, 0.47498047,
0.47245228, 0.46985924, 0.4673285, 0.46477753, 0.46217558, 0.45960996, 0.45699114, 0.4543643, 0.45161742, 0.44901514,
0.44632345, 0.44366008, 0.4409129, 0.43826467, 0.4355297, 0.43269625, 0.42980227, 0.42664585, 0.4247934, 0.42153,
0.41862586, 0.41586617, 0.4129981, 0.4101544, 0.40718567, 0.40413025, 0.40121037, 0.3982432, 0.39518782, 0.3922967,
0.38939247, 0.38637567, 0.38323602, 0.38020006, 0.37715346, 0.37425566, 0.37133098, 0.3682294, 0.36464328, 0.36122987,
0.3577566, 0.35500997, 0.35191092, 0.34914145, 0.34627476, 0.34331197, 0.3397645, 0.3357408, 0.33245447, 0.3293666,
0.32629177, 0.3230977, 0.31981853, 0.3166037, 0.313291, 0.30979386, 0.3063762, 0.303252, 0.30021513, 0.2966869,
0.29280362, 0.28946632, 0.28585625, 0.28217635, 0.27874744, 0.2756417, 0.2723057, 0.26893163, 0.26557496, 0.26226738,
0.258813, 0.25511092, 0.2516485, 0.24774839, 0.24437195, 0.24076639, 0.23689266, 0.23341803, 0.23027684, 0.22627309,
0.22284164, 0.21961954, 0.21606436, 0.21137191, 0.20774901, 0.20409445, 0.20005819, 0.19664256, 0.19265069, 0.18942001,
0.18565197, 0.18201254, 0.17836066, 0.17445107, 0.16977741, 0.1658994, 0.16192484, 0.15795086, 0.15439619, 0.15068942,
0.14694293, 0.14270622, 0.1385478, 0.13487618, 0.1306852, 0.12677771, 0.12306937, 0.11906212, 0.1152034, 0.11176047,
0.10725646, 0.10314671, 0.09910878, 0.09490419, 0.09016004, 0.086654656, 0.083175145, 0.079501025, 0.074559815, 0.070022225,
0.06520324, 0.062249944, 0.05921938, 0.05587649, 0.05178005, 0.047768828, 0.044202, 0.039818905, 0.035370674, 0.032398425,
0.028354695, 0.024785351, 0.021482285, 0.016779698, 0.012870749, 0.008657896, 0.0042611156, -0.00036098997, -0.0042694476, -0.008200066,
-0.012012364, -0.015636798, -0.019737698, -0.024142781, -0.028355144, -0.03264135, -0.03665254, -0.040731534, -0.044847555, -0.048602838,
-0.052422058, -0.056118865, -0.05988595, -0.064354576, -0.06904957, -0.07314254, -0.07780495, -0.08347805, -0.08661729, -0.090422854,
-0.09464767, -0.09811337, -0.10151302, -0.10549583, -0.10719968, -0.113728896, -0.1193629, -0.1238389, -0.12770006, -0.13184538,
-0.13605702, -0.1399128, -0.14449644, -0.14825188, -0.15293269, -0.15753673, -0.16144039, -0.1650824, -0.16995998, -0.17444256,
-0.17924443, -0.18337731, -0.1876395, -0.1924932, -0.19650042, -0.20069866, -0.20505166, -0.2090054, -0.21357813, -0.21769422,
-0.22176722, -0.22552502, -0.22928347, -0.23385458, -0.23852004, -0.24282813, -0.24696718, -0.25159925, -0.25584844, -0.26044282,
-0.26460642, -0.26842344, -0.2706808, -0.2766156, -0.28400102, -0.28715652, -0.29133096, -0.29506233, -0.298432, -0.30194992,
-0.30606747, -0.31102633, -0.31502455, -0.32047084, -0.32417277, -0.3283914, -0.33315226, -0.33739933, -0.3419492, -0.34573233,
-0.35125187, -0.35489786, -0.359247, -0.3598921, -0.36649784, -0.37190637, -0.37686262, -0.3818532, -0.3860231, -0.38981938,
-0.39365986, -0.39743608, -0.40152088, -0.40664652, -0.41075423, -0.41567838, -0.42071563, -0.42402893, -0.42851368, -0.43259278,
-0.43580425, -0.44052467, -0.4453912, -0.4496048, -0.45445943, -0.4583636, -0.46222985, -0.46603617, -0.4723307, -0.47552574,
-0.48128885, -0.48425078, -0.48656937, -0.48928997, -0.49409366, -0.5012796, -0.5087581, -0.51016855, -0.5139464, -0.51933324,
-0.52017206, -0.5309779, -0.5342519, -0.53785825, -0.5413826, -0.5458841, -0.55036557, -0.5542987, -0.55815333, -0.56266344,
-0.56690234, -0.5723951, -0.5748641, -0.5803261, -0.5833541, -0.5865971, -0.5914368, -0.59638774, -0.602291, -0.6063558,
-0.6090109, -0.6137789, -0.61558926, -0.6231522, -0.6275642, -0.63081694, -0.63664985, -0.64196414, -0.6452871, -0.64857054,
-0.65187883, -0.65570825, -0.660607, -0.6646344, -0.6690489, -0.6703059, -0.68044853, -0.68253475, -0.68580496, -0.6878576,
-0.69026226, -0.69658315, -0.7006356, -0.7033193, -0.709202, -0.710421, -0.7194361, -0.72419316, -0.72892773, -0.735571,
-0.7390627, -0.74146056, -0.74542516, -0.7520985, -0.75633377, -0.76023597, -0.76182663, -0.7698284, -0.7717773, -0.77944946))):
if profile is None:
profile = {'profile_1563319288': '[[0.0, 1541.800048828125], [3.06, 1541.800048828125], [3.13, 1542.0], '
'[3.17, 1541.9000244140625], [3.44, 1542.0], [4.4, 1541.7000732421875], '
'[5.22, 1540.5], [5.64, 1540.300048828125], [7.68, 1539.5999755859375], '
'[8.64, 1539.0], [9.78, 1538.800048828125], [12.3, 1537.5], '
'[12.34, 1537.300048828125], [13.1, 1536.800048828125], '
'[13.46, 1536.300048828125], [13.58, 1536.0], [14.01, 1535.300048828125], '
'[14.1, 1534.800048828125], [14.44, 1534.2000732421875], '
'[14.71, 1533.800048828125], [14.8, 1533.5999755859375], '
'[15.15, 1533.300048828125], [16.88, 1532.7000732421875], [20.07, 1532.5], '
'[12000.0, 1675.800048828125]]'}
self.client = None
self.synth_ra_time = synth_ra_time
self.synth_att_time = synth_att_time
self.synth_nav_time = synth_nav_time
self.synth_delay = synth_delay
self.synth_frequency = synth_frequency
self.synth_txsector_beam = synth_txsector_beam
self.synth_beampointingangle = synth_beampointingangle
self.synth_qualityfactor = synth_qualityfactor
self.synth_counter = synth_counter
self.synth_soundspeed = synth_soundspeed
self.synth_tiltangle = synth_tiltangle
self.synth_traveltime = synth_traveltime
self.synth_ntx = synth_ntx
self.synth_heave = synth_heave
self.synth_roll = synth_roll
self.synth_pitch = synth_pitch
self.synth_yaw = synth_yaw
self.synth_altitude = synth_altitude
self.synth_latitude = synth_latitude
self.synth_longitude = synth_longitude
self.xyz88_depth = xyz88_depth
self.xyz88_alongtrack = xyz88_alongtrack
self.xyz88_acrosstrack = xyz88_acrosstrack
self.secs = secs
self.serialnum = serialnum
self.secondary_serialnum = secondary_serialnum
self.profile = profile
self.xyzrph = {'beam_opening_angle': {str(synth_xyztime): 1.0},
'heading_patch_error': {str(synth_xyztime): 0.5},
'heading_sensor_error': {str(synth_xyztime): 0.02},
'heave_error': {str(synth_xyztime): 0.05},
'horizontal_positioning_error': {str(synth_xyztime): 1.5},
'imu_h': {str(synth_xyztime): 0.0},
'latency': {str(synth_xyztime): 0.0},
'imu_p': {str(synth_xyztime): 0.0},
'imu_r': {str(synth_xyztime): 0.0},
'imu_x': {str(synth_xyztime): 0.0},
'imu_y': {str(synth_xyztime): 0.0},
'imu_z': {str(synth_xyztime): 0.0},
'latency_patch_error': {str(synth_xyztime): 0.0},
'pitch_patch_error': {str(synth_xyztime): 0.1},
'pitch_sensor_error': {str(synth_xyztime): 0.0005},
'roll_patch_error': {str(synth_xyztime): 0.1},
'roll_sensor_error': {str(synth_xyztime): 0.0005},
'tx_to_antenna_x': {str(synth_xyztime): 0.0},
'tx_to_antenna_y': {str(synth_xyztime): 0.0},
'tx_to_antenna_z': {str(synth_xyztime): 0.0},
'separation_model_error': {str(synth_xyztime): 0.0},
'surface_sv_error': {str(synth_xyztime): 0.5},
'timing_latency_error': {str(synth_xyztime): 0.001},
'vertical_positioning_error': {str(synth_xyztime): 1.0},
'vessel_speed_error': {str(synth_xyztime): 0.1},
'waterline_error': {str(synth_xyztime): -0.640},
'x_offset_error': {str(synth_xyztime): 0.2},
'y_offset_error': {str(synth_xyztime): 0.2},
'z_offset_error': {str(synth_xyztime): 0.2},
'rx_port_r': {str(synth_xyztime): synth_rx_port_mountroll},
'rx_port_p': {str(synth_xyztime): synth_rx_port_mountpitch},
'rx_port_h': {str(synth_xyztime): synth_rx_port_mountyaw},
'rx_stbd_r': {str(synth_xyztime): synth_rx_stbd_mountroll},
'rx_stbd_p': {str(synth_xyztime): synth_rx_stbd_mountpitch},
'rx_stbd_h': {str(synth_xyztime): synth_rx_stbd_mountyaw},
'tx_port_r': {str(synth_xyztime): synth_tx_port_mountroll},
'tx_port_p': {str(synth_xyztime): synth_tx_port_mountpitch},
'tx_port_h': {str(synth_xyztime): synth_tx_port_mountyaw},
'tx_stbd_r': {str(synth_xyztime): synth_tx_stbd_mountroll},
'tx_stbd_p': {str(synth_xyztime): synth_tx_stbd_mountpitch},
'tx_stbd_h': {str(synth_xyztime): synth_tx_stbd_mountyaw},
'tx_port_x': {str(synth_xyztime): synth_tx_port_x},
'tx_port_y': {str(synth_xyztime): synth_tx_port_y},
'tx_port_z': {str(synth_xyztime): synth_tx_port_z},
'tx_stbd_x': {str(synth_xyztime): synth_tx_stbd_x},
'tx_stbd_y': {str(synth_xyztime): synth_tx_stbd_y},
'tx_stbd_z': {str(synth_xyztime): synth_tx_stbd_z},
'tx_port_x_0': {str(synth_xyztime): synth_tx_x_0},
'tx_port_x_1': {str(synth_xyztime): synth_tx_x_1},
'tx_port_x_2': {str(synth_xyztime): synth_tx_x_2},
'tx_port_y_0': {str(synth_xyztime): synth_tx_y_0},
'tx_port_y_1': {str(synth_xyztime): synth_tx_y_1},
'tx_port_y_2': {str(synth_xyztime): synth_tx_y_2},
'tx_port_z_0': {str(synth_xyztime): synth_tx_z_0},
'tx_port_z_1': {str(synth_xyztime): synth_tx_z_1},
'tx_port_z_2': {str(synth_xyztime): synth_tx_z_2},
'tx_stbd_x_0': {str(synth_xyztime): synth_tx_x_0},
'tx_stbd_x_1': {str(synth_xyztime): synth_tx_x_1},
'tx_stbd_x_2': {str(synth_xyztime): synth_tx_x_2},
'tx_stbd_y_0': {str(synth_xyztime): synth_tx_y_0},
'tx_stbd_y_1': {str(synth_xyztime): synth_tx_y_1},
'tx_stbd_y_2': {str(synth_xyztime): synth_tx_y_2},
'tx_stbd_z_0': {str(synth_xyztime): synth_tx_z_0},
'tx_stbd_z_1': {str(synth_xyztime): synth_tx_z_1},
'tx_stbd_z_2': {str(synth_xyztime): synth_tx_z_2},
'rx_port_x': {str(synth_xyztime): synth_rx_port_x},
'rx_port_y': {str(synth_xyztime): synth_rx_port_y},
'rx_port_z': {str(synth_xyztime): synth_rx_port_z},
'rx_stbd_x': {str(synth_xyztime): synth_rx_stbd_x},
'rx_stbd_y': {str(synth_xyztime): synth_rx_stbd_y},
'rx_stbd_z': {str(synth_xyztime): synth_rx_stbd_z},
'rx_port_x_0': {str(synth_xyztime): synth_rx_x_0},
'rx_port_x_1': {str(synth_xyztime): synth_rx_x_1},
'rx_port_x_2': {str(synth_xyztime): synth_rx_x_2},
'rx_port_y_0': {str(synth_xyztime): synth_rx_y_0},
'rx_port_y_1': {str(synth_xyztime): synth_rx_y_1},
'rx_port_y_2': {str(synth_xyztime): synth_rx_y_2},
'rx_port_z_0': {str(synth_xyztime): synth_rx_z_0},
'rx_port_z_1': {str(synth_xyztime): synth_rx_z_1},
'rx_port_z_2': {str(synth_xyztime): synth_rx_z_2},
'rx_stbd_x_0': {str(synth_xyztime): synth_rx_x_0},
'rx_stbd_x_1': {str(synth_xyztime): synth_rx_x_1},
'rx_stbd_x_2': {str(synth_xyztime): synth_rx_x_2},
'rx_stbd_y_0': {str(synth_xyztime): synth_rx_y_0},
'rx_stbd_y_1': {str(synth_xyztime): synth_rx_y_1},
'rx_stbd_y_2': {str(synth_xyztime): synth_rx_y_2},
'rx_stbd_z_0': {str(synth_xyztime): synth_rx_z_0},
'rx_stbd_z_1': {str(synth_xyztime): synth_rx_z_1},
'rx_stbd_z_2': {str(synth_xyztime): synth_rx_z_2},
'waterline': {str(synth_xyztime): synth_waterline},
}
self.raw_ping = self.construct_raw_ping()
self.raw_att = self.construct_rawattitude()
def construct_raw_ping(self):
"""
Take the provided real data built into this class and generate new raw_ping data.
Returns
-------
dataset: list of xarray DataSet objects that represent the raw_ping data you would get when running
xarray_conversion normally.
"""
sec_vals = self.secs
dataset = []
for cnt, sec in enumerate(sec_vals):
tme_vals = self.synth_ra_time[cnt]
num_beams = int(len(self.synth_beampointingangle[cnt]))
bm_vals = [i for i in range(num_beams)]
tme_coord = xr.DataArray(np.array([tme_vals]), dims=['time'], coords={'time': np.array([tme_vals])})
beam_coord = xr.DataArray(np.array(bm_vals), dims=['beam'], coords={'beam': np.array(bm_vals)})
ntx = xr.DataArray(np.array([self.synth_ntx[cnt]]), dims=['time'], coords={'time': tme_coord})
counter = xr.DataArray(np.array([self.synth_counter[cnt]]), dims=['time'], coords={'time': tme_coord})
soundspeed = xr.DataArray(np.array([self.synth_soundspeed[cnt]]), dims=['time'], coords={'time': tme_coord})
tiltangle_data = np.array(list(self.synth_tiltangle[cnt]))
if tiltangle_data.ndim == 1:
tiltangle_data = np.expand_dims(tiltangle_data, axis=0)
tiltangle = xr.DataArray(tiltangle_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
twtt_data = np.array(list(self.synth_traveltime[cnt]))
if twtt_data.ndim == 1:
twtt_data = np.expand_dims(twtt_data, axis=0)
twoway_travel_time = xr.DataArray(twtt_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
bpa_data = np.array(list(self.synth_beampointingangle[cnt]))
if bpa_data.ndim == 1:
bpa_data = np.expand_dims(bpa_data, axis=0)
beam_pointing_angle = xr.DataArray(bpa_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
del_data = np.array(list(self.synth_delay[cnt]))
if del_data.ndim == 1:
del_data = np.expand_dims(del_data, axis=0)
delay = xr.DataArray(del_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
frequency_data = np.array(list(self.synth_frequency[cnt]))
if frequency_data.ndim == 1:
frequency_data = np.expand_dims(frequency_data, axis=0)
frequency = xr.DataArray(frequency_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
txsector_data = np.array(list(self.synth_txsector_beam[cnt]))
if txsector_data.ndim == 1:
txsector_data = np.expand_dims(txsector_data, axis=0)
txsector = xr.DataArray(txsector_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
qf_data = np.array(list(self.synth_qualityfactor[cnt]))
if qf_data.ndim == 1:
qf_data = np.expand_dims(qf_data, axis=0)
quality_factor = xr.DataArray(qf_data, dims=['time', 'beam'], coords={'time': tme_coord, 'beam': bm_vals})
altitude = xr.DataArray(np.array([self.synth_altitude[cnt]]), dims=['time'], coords={'time': tme_coord})
latitude = xr.DataArray(np.array([self.synth_latitude[cnt]]), dims=['time'], coords={'time': tme_coord})
longitude = xr.DataArray(np.array([self.synth_longitude[cnt]]), dims=['time'], coords={'time': tme_coord})
dataset.append(xr.Dataset({'ntx': (['time'], ntx.data), # use the underlying numpy array to avoid problems in xarray 0.19.0
'counter': (['time'], counter.data),
'soundspeed': (['time'], soundspeed.data),
'tiltangle': (['time', 'beam'], tiltangle.data),
'txsector_beam': (['time', 'beam'], txsector.data),
'delay': (['time', 'beam'], delay.data),
'frequency': (['time', 'beam'], frequency.data),
'traveltime': (['time', 'beam'], twoway_travel_time.data),
'beampointingangle': (['time', 'beam'], beam_pointing_angle.data),
'qualityfactor': (['time', 'beam'], quality_factor.data),
'altitude': (['time'], altitude.data),
'latitude': (['time'], latitude.data),
'longitude': (['time'], longitude.data)},
coords={'time': tme_coord, 'beam': beam_coord},
attrs={list(self.profile.keys())[0]: self.profile[list(self.profile.keys())[0]],
'system_serial_number': [self.serialnum],
'secondary_system_serial_number': [self.secondary_serialnum],
'system_identifier': sec,
'min_lon': float(np.min(longitude)),
'min_lat': float(np.min(latitude)),
'max_lon': float(np.max(longitude)),
'max_lat': float(np.max(latitude))}).chunk())
return dataset
def construct_rawattitude(self):
"""
Take the provided real data built into this class and generate new xarray attitude data.
Returns
-------
dataset: xarray DataSet object that represents the attitude data you would get when running
xarray_conversion normally.
"""
tme_vals = self.synth_att_time
tme_coord = xr.DataArray(np.array(tme_vals), dims=['time'], coords={'time': np.array(tme_vals)})
heading = xr.DataArray(np.array(self.synth_yaw), dims=['time'], coords={'time': tme_coord})
heave = xr.DataArray(np.array(self.synth_heave), dims=['time'], coords={'time': tme_coord})
pitch = xr.DataArray(np.array(self.synth_pitch), dims=['time'], coords={'time': tme_coord})
roll = xr.DataArray(np.array(self.synth_roll), dims=['time'], coords={'time': tme_coord})
return xr.Dataset({'heading': (['time'], heading.data), 'heave': (['time'], heave.data), 'pitch': (['time'], pitch.data),
'roll': (['time'], roll.data)}, coords={'time': tme_coord.data}).chunk()
class RealFqpr:
"""
Class holding the real data that I took from a .all file (0009_20170523_181119_FA2806.all). Covers a few pings and
the attitude values associated with them.
fq.multibeam.raw_ping[0].time.values[20]
Out[88]: 1495563084.457
fq.multibeam.raw_ping[0].time.values[21]
Out[89]: 1495563084.457001
fq.multibeam.raw_ping[0].time.values[22]
Out[90]: 1495563084.948
fq.multibeam.raw_ping[0].time.values[23]
Out[91]: 1495563084.948001
recone = ad.getrecord(88,20)
rectwo = ad.getrecord(88,21)
recthree = ad.getrecord(88,22)
recfour = ad.getrecord(88,23)
"""
def __init__(self,
synth_ra_time=(1495563084.457, 1495563084.457001, 1495563084.948, 1495563084.948001),
synth_att_time=(1495563084.440, 1495563084.451, 1495563084.461, 1495563084.47, 1495563084.481,
1495563084.941, 1495563084.951, 1495563084.961, 1495563084.971, 1495563084.981,
1495563084.991),
synth_nav_time=(1495563084.455, 1495563084.461, 1495563084.471, 1495563084.481, 1495563084.491,
1495563084.945, 1495563084.951, 1495563084.96, 1495563084.971, 1495563084.981),
synth_delay=((3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08),
(0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038),
(3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08,
3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08, 3.7951395e-08),
(0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038,
0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038, 0.002206038)),
synth_frequency=((265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000),
(275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000),
(265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000, 265000,
265000, 265000, 265000, 265000, 265000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000, 285000,
285000, 285000, 285000, 285000, 285000, 285000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000,
270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000, 270000),
(275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000, 275000,
275000, 275000, 275000, 275000, 275000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000, 290000,
290000, 290000, 290000, 290000, 290000, 290000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000,
280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000, 280000)),
synth_txsector_beam=((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2),
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2)),
synth_beampointingangle=((72.06, 71.95, 71.84, 71.729996, 71.61, 71.5, 71.38, 71.159996, 71.04, 70.92,
70.799995, 70.68, 70.549995, 70.15, 70.02, 69.89, 69.77, 69.63, 69.5, 69.369995,
69.229996, 69.1, 68.96, 68.82, 68.68, 68.54, 68.39, 68.25, 68.1, 67.95,
67.799995, 67.54, 67.39, 67.229996, 67.08, 66.909996, 66.75, 66.59, 66.32, 66.159996,
65.99, 65.81, 65.64, 65.46, 65.18, 65.0, 64.82, 64.64, 64.04, 63.85,
63.66, 63.46, 63.27, 63.07, 62.859997, 62.66, 62.449997, 62.239998, 62.03, 61.809998,
61.6, 61.37, 61.149998, 60.92, 60.69, 60.46, 60.219997, 59.98, 59.739998, 59.489998,
59.25, 58.989998, 58.739998, 58.48, 58.21, 57.94, 57.57, 57.3, 57.02, 56.739998,
56.449997, 56.16, 55.86, 55.449997, 55.149998, 54.84, 54.53, 54.21, 53.89, 53.559998,
53.12, 52.78, 52.44, 52.1, 51.739998, 51.39, 51.02, 50.559998, 50.18, 49.8,
49.42, 49.03, 48.629997, 48.23, 47.71, 47.3, 46.879997, 46.45, 46.02, 45.57,
45.129997, 44.67, 44.1, 43.64, 43.16, 42.68, 42.19, 41.69, 41.19, 40.68,
40.16, 39.53, 38.989998, 38.45, 37.899998, 37.34, 36.78, 36.2, 35.62, 35.03,
34.32, 33.71, 33.09, 32.469997, 31.83, 31.189999, 30.539999, 29.88, 29.22, 28.539999,
27.859999, 27.05, 26.349998, 25.64, 24.93, 24.199999, 23.47, 22.73, 21.98, 21.22,
20.46, 19.689999, 18.91, 18.119999, 17.33, 16.529999, 15.73, 14.809999, 13.99, 13.17,
12.34, 11.509999, 10.67, 9.83, 8.99, 8.139999, 7.29, 6.43, 5.58, 4.72,
3.86, 2.99, 2.1299999, 1.27, 0.39999998, -0.45999998, -1.3299999, -2.19, -3.05, -3.9099998,
-4.77, -5.62, -6.47, -7.3199997, -8.17, -9.01, -9.84, -10.679999, -11.5, -12.33,
-13.03, -13.84, -14.65, -15.45, -16.24, -17.029999, -17.81, -18.58, -19.34, -20.1,
-20.85, -21.59, -22.32, -23.05, -23.769999, -24.359999, -25.07, -25.76, -26.439999, -27.119999,
-27.789999, -28.449999, -29.099998, -29.74, -30.369999, -31.0, -31.51, -32.11, -32.71, -33.309998,
-33.89, -34.469997, -35.03, -35.59, -36.14, -36.69, -37.12, -37.649998, -38.17, -38.68,
-39.18, -39.68, -40.17, -40.649998, -41.129997, -41.489998, -41.95, -42.399998, -42.85, -43.29,
-43.73, -44.149998, -44.57, -44.879997, -45.289997, -45.69, -46.09, -46.48, -46.86, -47.239998,
-47.62, -47.879997, -48.25, -48.6, -48.96, -49.3, -49.649998, -49.98, -50.2, -50.53,
-50.85, -51.17, -51.48, -51.789997, -52.1, -52.399998, -52.579998, -52.87, -53.16, -53.44,
-53.719997, -54.0, -54.27, -54.539997, -54.699997, -54.96, -55.219997, -55.469997, -55.719997, -55.969997,
-56.21, -56.449997, -56.69, -56.92, -57.149998, -57.379997, -57.6, -57.82, -58.039997, -58.26,
-58.469997, -58.68, -58.89, -59.09, -59.3, -59.5, -59.69, -59.89, -60.079998, -60.27,
-60.46, -60.64, -60.82, -61.01, -61.18, -60.949997, -61.12, -61.3, -61.469997, -61.629997,
-61.8, -61.96, -62.129997, -62.289997, -62.44, -62.6, -62.66, -62.809998, -62.96, -63.109997,
-63.26, -63.399998, -63.55, -63.69, -63.829998, -63.969997, -64.01, -64.15, -64.28, -64.42,
-64.549995, -64.68, -64.81, -64.939995, -65.06, -65.189995, -65.31, -65.33, -65.46, -65.58,
-65.7, -65.81, -65.93, -66.04, -66.159996, -66.27, -66.38, -66.49, -66.6, -66.71,
-66.82, -66.93, -67.03, -67.14, -67.24, -67.34, -67.439995, -67.54, -67.64, -67.74,
-67.84, -67.93, -68.03, -68.119995, -68.22, -68.31, -68.4, -68.5, -68.59, -68.67,
-68.76, -68.85, -68.939995, -69.03, -68.83, -68.92, -69.0, -69.08, -69.17, -69.25,
-69.33, -69.409996, -69.49, -69.57, -69.65, -69.72, -69.799995, -69.88, -69.95, -70.03,
-70.1, -70.08, -70.15, -70.22, -70.29, -70.369995, -70.439995, -70.51, -70.58, -70.64),
(72.06, 71.95, 71.84, 71.729996, 71.61, 71.5, 71.28, 71.159996, 71.04, 70.92,
70.799995, 70.68, 70.27, 70.15, 70.02, 69.89, 69.77, 69.63, 69.5, 69.369995,
69.229996, 69.1, 68.96, 68.82, 68.68, 68.54, 68.39, 68.25, 68.1, 67.95,
67.799995, 67.439995, 67.29, 67.13, 66.979996, 66.81, 66.65, 66.49, 66.32, 66.159996,
65.99, 65.81, 65.64, 65.46, 65.28, 65.0, 64.82, 64.64, 64.45, 64.26,
64.07, 63.77, 63.579998, 63.379997, 63.17, 62.969997, 62.76, 62.449997, 62.239998, 62.02,
61.809998, 61.579998, 61.359997, 61.129997, 60.69, 60.46, 60.219997, 59.98, 59.739998, 59.489998,
59.25, 58.989998, 58.739998, 58.48, 58.21, 57.94, 57.67, 57.3, 57.02, 56.739998,
56.449997, 56.16, 55.86, 55.559998, 55.149998, 54.84, 54.53, 54.21, 53.89, 53.559998,
53.23, 52.78, 52.44, 52.1, 51.739998, 51.39, 51.02, 50.66, 50.28, 49.8,
49.42, 49.03, 48.629997, 48.23, 47.82, 47.41, 46.879997, 46.45, 46.02, 45.57,
45.129997, 44.67, 44.21, 43.75, 43.16, 42.68, 42.19, 41.69, 41.19, 40.68,
40.16, 39.629997, 39.09, 38.45, 37.899998, 37.34, 36.78, 36.2, 35.62, 35.03,
34.43, 33.82, 32.98, 32.36, 31.72, 31.08, 30.429998, 29.769999, 29.109999, 28.43,
27.75, 27.05, 26.349998, 25.64, 24.93, 24.199999, 23.47, 22.73, 21.98, 21.22,
20.46, 19.689999, 18.91, 18.119999, 17.33, 16.529999, 15.73, 14.92, 14.099999, 13.28,
12.45, 11.62, 10.78, 9.83, 8.99, 8.139999, 7.29, 6.43, 5.58, 4.72,
3.86, 2.99, 2.1299999, 1.27, 0.39999998, -0.45999998, -1.3299999, -2.19, -3.05, -3.9099998,
-4.77, -5.62, -6.47, -7.3199997, -8.17, -8.9, -9.73, -10.57, -11.389999, -12.219999,
-13.03, -13.84, -14.65, -15.45, -16.24, -17.029999, -17.81, -18.58, -19.34, -20.1,
-20.85, -21.59, -22.32, -23.05, -23.769999, -24.47, -25.18, -25.869999, -26.55, -27.23,
-27.9, -28.56, -29.21, -29.849998, -30.26, -30.89, -31.51, -32.11, -32.71, -33.309998,
-33.89, -34.469997, -35.03, -35.59, -36.04, -36.59, -37.12, -37.649998, -38.17, -38.68,
-39.18, -39.68, -40.17, -40.54, -41.02, -41.489998, -41.95, -42.399998, -42.85, -43.29,
-43.73, -44.04, -44.46, -44.879997, -45.289997, -45.69, -46.09, -46.48, -46.86, -47.14,
-47.52, -47.879997, -48.25, -48.6, -48.96, -49.3, -49.649998, -49.87, -50.2, -50.53,
-50.85, -51.17, -51.48, -51.789997, -51.989998, -52.289997, -52.579998, -52.87, -53.16, -53.44,
-53.719997, -54.0, -54.17, -54.44, -54.699997, -54.96, -55.219997, -55.469997, -55.719997, -55.969997,
-56.21, -56.449997, -56.69, -56.92, -57.149998, -57.379997, -57.6, -57.61, -57.829998, -58.05,
-58.26, -58.469997, -58.68, -58.879997, -59.09, -59.289997, -59.379997, -59.579998, -59.77, -59.96,
-60.149998, -60.329998, -60.51, -60.699997, -60.77, -60.949997, -61.12, -61.3, -61.469997, -61.629997,
-61.8, -61.96, -62.129997, -62.19, -62.34, -62.5, -62.66, -62.809998, -62.96, -63.109997,
-63.26, -63.399998, -63.55, -63.69, -63.829998, -63.969997, -64.11, -64.25, -64.38, -64.52,
-64.65, -64.78, -64.909996, -64.84, -64.96, -65.09, -65.21, -65.33, -65.46, -65.58,
-65.7, -65.81, -65.93, -66.04, -66.159996, -66.27, -66.38, -66.49, -66.6, -66.71,
-66.82, -66.93, -67.03, -67.14, -67.24, -67.34, -67.439995, -67.54, -67.64, -67.74,
-67.84, -67.93, -68.03, -68.119995, -68.22, -68.31, -68.4, -68.5, -68.59, -68.67,
-68.479996, -68.57, -68.659996, -68.75, -68.83, -68.92, -69.0, -69.08, -69.17, -69.25,
-69.33, -69.409996, -69.49, -69.57, -69.65, -69.72, -69.7, -69.78, -69.85, -69.93,
-70.0, -70.08, -70.15, -70.22, -70.29, -70.369995, -70.439995, -70.51, -70.58, -70.64),
(74.59, 74.49, 74.38, 74.299995, 74.189995, 74.08, 73.97, 73.86, 73.75, 73.68,
73.56, 73.45, 73.33, 73.21, 73.1, 72.979996, 72.85, 72.729996, 72.61, 72.479996,
72.36, 72.25, 72.119995, 71.99, 71.86, 71.72, 71.59, 71.46, 71.32, 71.18,
71.04, 70.9, 70.77, 70.63, 70.479996, 70.33, 70.18, 70.03, 69.869995, 69.72,
69.56, 69.4, 69.24, 69.07, 68.909996, 68.75, 68.58, 68.409996, 68.24, 68.06,
67.88, 67.7, 67.52, 67.33, 67.15, 66.96, 66.77, 66.57, 66.38, 66.18,
65.979996, 65.77, 65.57, 65.36, 65.14, 64.93, 64.71, 64.49, 64.27, 64.04,
63.789997, 63.559998, 63.32, 63.079998, 62.84, 62.6, 62.35, 62.1, 61.84, 61.579998,
61.32, 61.05, 60.78, 60.5, 60.219997, 59.94, 59.649998, 59.359997, 59.07, 58.77,
58.46, 58.149998, 57.84, 57.52, 57.199997, 56.879997, 56.55, 56.18, 55.84, 55.5,
55.149998, 54.789997, 54.43, 54.07, 53.699997, 53.32, 52.94, 52.55, 52.16, 51.76,
51.35, 50.94, 50.5, 50.079998, 49.649998, 49.21, 48.77, 48.309998, 47.86, 47.39,
46.89, 46.41, 45.93, 45.43, 44.93, 44.43, 43.91, 43.39, 42.84, 42.3,
41.76, 41.2, 40.64, 40.07, 39.489998, 38.91, 38.309998, 37.71, 37.07, 36.45,
35.82, 35.19, 34.54, 33.89, 33.23, 32.559998, 31.88, 31.199999, 30.5, 29.769999,
29.06, 28.34, 27.619999, 26.88, 26.14, 25.39, 24.63, 23.869999, 23.1, 22.32,
21.539999, 20.75, 19.949999, 19.15, 18.34, 17.52, 16.699999, 15.839999, 15.009999, 14.17,
13.34, 12.5, 11.65, 10.8, 9.95, 9.099999, 8.25, 7.39, 6.5299997, 5.67,
4.8199997, 3.9599998, 3.1, 2.24, 1.39, 0.53, -0.32, -1.17, -2.02, -2.86,
-3.6999998, -4.54, -5.38, -6.17, -6.99, -7.81, -8.62, -9.429999, -10.24, -11.03,
-11.82, -12.61, -13.389999, -14.16, -14.92, -15.679999, -16.43, -17.17, -17.9, -18.63,
-19.35, -20.029999, -20.73, -21.42, -22.109999, -22.779999, -23.449999, -24.109999, -24.769999, -25.41,
-26.05, -26.67, -27.26, -27.869999, -28.48, -29.07, -29.66, -30.23, -30.8, -31.369999,
-31.92, -32.469997, -32.98, -33.51, -34.04, -34.55, -35.059998, -35.559998, -36.05, -36.54,
-37.02, -37.46, -37.92, -38.379997, -38.829998, -39.27, -39.71, -40.14, -40.57, -40.96,
-41.379997, -41.78, -42.18, -42.57, -42.96, -43.34, -43.719997, -44.09, -44.46, -44.82,
-45.17, -45.52, -45.87, -46.21, -46.539997, -46.84, -47.17, -47.489998, -47.8, -48.11,
-48.42, -48.719997, -49.02, -49.3, -49.59, -49.879997, -50.16, -50.44, -50.71, -50.98,
-51.25, -51.5, -51.76, -52.02, -52.27, -52.52, -52.76, -53.01, -53.239998, -53.48,
-53.71, -53.94, -54.17, -54.39, -54.61, -54.829998, -55.05, -55.239998, -55.449997, -55.649998,
-55.86, -56.059998, -56.26, -56.449997, -56.649998, -56.84, -57.03, -57.219997, -57.399998, -57.579998,
-57.76, -57.94, -58.12, -58.289997, -58.46, -58.629997, -58.8, -58.969997, -59.129997, -59.289997,
-59.449997, -59.609997, -59.77, -59.92, -60.07, -60.219997, -60.37, -60.52, -60.67, -60.809998,
-60.949997, -61.09, -61.23, -61.379997, -61.52, -61.649998, -61.789997, -61.92, -62.05, -62.18,
-62.309998, -62.43, -62.559998, -62.68, -62.809998, -62.93, -63.05, -63.17, -63.28, -63.399998,
-63.51, -63.629997, -63.739998, -63.85, -63.96, -64.09, -64.2, -64.31, -64.42, -64.52,
-64.63, -64.729996, -64.83, -64.93, -65.03, -65.13, -65.229996, -65.33, -65.43, -65.53,
-65.619995, -65.72, -65.81, -65.9, -65.99, -66.09, -66.17, -66.26, -66.35, -66.439995,
-66.53, -66.61, -66.72, -66.799995, -66.88, -66.97, -67.049995, -67.13, -67.21, -67.29,
-67.369995, -67.45, -67.52, -67.6, -67.68, -67.75, -67.83, -67.9, -67.979996, -68.049995),
(74.59, 74.49, 74.38, 74.299995, 74.189995, 74.08, 73.97, 73.86, 73.75, 73.7,
73.58, 73.47, 73.35, 73.229996, 73.119995, 73.0, 72.869995, 72.75, 72.63, 72.5,
72.38, 72.25, 72.119995, 71.99, 71.86, 71.72, 71.59, 71.49, 71.35, 71.21,
71.07, 70.93, 70.78, 70.64, 70.49, 70.34, 70.189995, 70.04, 69.88, 69.729996,
69.57, 69.409996, 69.25, 69.08, 68.92, 68.75, 68.58, 68.409996, 68.24, 68.06,
67.88, 67.7, 67.52, 67.33, 67.15, 66.96, 66.77, 66.57, 66.38, 66.18,
65.979996, 65.77, 65.57, 65.36, 65.14, 64.93, 64.71, 64.49, 64.27, 64.04,
63.809998, 63.57, 63.329998, 63.09, 62.85, 62.609997, 62.359997, 62.109997, 61.84, 61.579998,
61.32, 61.05, 60.78, 60.51, 60.219997, 59.94, 59.649998, 59.359997, 59.07, 58.77,
58.469997, 58.149998, 57.84, 57.52, 57.199997, 56.879997, 56.55, 56.21, 55.86, 55.52,
55.17, 54.809998, 54.449997, 54.09, 53.719997, 53.32, 52.94, 52.55, 52.16, 51.76,
51.35, 50.94, 50.52, 50.079998, 49.649998, 49.21, 48.77, 48.309998, 47.86, 47.39,
46.92, 46.41, 45.93, 45.43, 44.93, 44.43, 43.91, 43.39, 42.86, 42.32,
41.76, 41.2, 40.64, 40.07, 39.489998, 38.91, 38.309998, 37.71, 37.1, 36.48,
35.82, 35.19, 34.54, 33.89, 33.23, 32.559998, 31.88, 31.199999, 30.5, 29.8,
29.09, 28.369999, 27.58, 26.84, 26.099998, 25.35, 24.59, 23.83, 23.06, 22.279999,
21.5, 20.71, 19.91, 19.109999, 18.3, 17.48, 16.66, 15.839999, 15.009999, 14.17,
13.34, 12.5, 11.65, 10.8, 9.95, 9.099999, 8.25, 7.39, 6.5299997, 5.67,
4.8199997, 3.9599998, 3.1, 2.24, 1.39, 0.53, -0.32, -1.17, -2.02, -2.86,
-3.6999998, -4.54, -5.38, -6.21, -7.0299997, -7.85, -8.66, -9.469999, -10.28, -11.07,
-11.86, -12.65, -13.429999, -14.2, -14.96, -15.719999, -16.47, -17.21, -17.869999, -18.6,
-19.32, -20.029999, -20.73, -21.42, -22.109999, -22.779999, -23.449999, -24.109999, -24.769999, -25.41,
-26.019999, -26.64, -27.26, -27.869999, -28.48, -29.07, -29.66, -30.23, -30.8, -31.369999,
-31.9, -32.45, -32.98, -33.51, -34.04, -34.55, -35.059998, -35.559998, -36.05, -36.51,
-36.989998, -37.46, -37.92, -38.379997, -38.829998, -39.27, -39.71, -40.14, -40.55, -40.96,
-41.379997, -41.78, -42.18, -42.57, -42.96, -43.34, -43.7, -44.07, -44.44, -44.8,
-45.149998, -45.5, -45.85, -46.19, -46.51, -46.84, -47.17, -47.489998, -47.8, -48.11,
-48.42, -48.71, -49.01, -49.3, -49.59, -49.879997, -50.16, -50.44, -50.71, -50.969997,
-51.239998, -51.5, -51.76, -52.02, -52.27, -52.52, -52.76, -53.0, -53.23, -53.469997,
-53.699997, -53.93, -54.16, -54.379997, -54.6, -54.809998, -55.03, -55.239998, -55.449997, -55.649998,
-55.86, -56.059998, -56.26, -56.449997, -56.649998, -56.84, -57.03, -57.219997, -57.399998, -57.579998,
-57.76, -57.94, -58.12, -58.289997, -58.46, -58.629997, -58.8, -58.969997, -59.129997, -59.289997,
-59.449997, -59.609997, -59.77, -59.92, -60.07, -60.219997, -60.37, -60.52, -60.67, -60.809998,
-60.949997, -61.09, -61.23, -61.37, -61.51, -61.64, -61.78, -61.91, -62.039997, -62.17,
-62.3, -62.42, -62.55, -62.67, -62.8, -62.92, -63.039997, -63.16, -63.27, -63.39,
-63.5, -63.62, -63.73, -63.84, -63.949997, -64.06, -64.17, -64.28, -64.39, -64.49,
-64.6, -64.7, -64.799995, -64.939995, -65.04, -65.14, -65.24, -65.34, -65.43, -65.53,
-65.619995, -65.72, -65.81, -65.9, -65.99, -66.09, -66.17, -66.26, -66.35, -66.439995,
-66.53, -66.61, -66.7, -66.78, -66.86, -66.95, -67.03, -67.11, -67.189995, -67.27,
-67.35, -67.43, -67.5, -67.58, -67.659996, -67.729996, -67.81, -67.88, -67.96, -68.03)),
synth_soundspeed=(1488.6, 1488.6, 1488.6, 1488.6),
synth_counter=(61986, 61987, 61988, 61989),
synth_traveltime=((0.3374375, 0.33574128, 0.33404624, 0.3323523, 0.3306599, 0.32896882, 0.32727906, 0.3255905, 0.32390353, 0.322218,
0.32053387, 0.3188512, 0.3171696, 0.31548983, 0.3138117, 0.31213507, 0.31045982, 0.30878627, 0.30711445, 0.30544433,
0.30200797, 0.2998462, 0.29679245, 0.29647696, 0.2950641, 0.29270256, 0.2923256, 0.29122898, 0.28977475, 0.288152,
0.28717324, 0.285622, 0.28515747, 0.28226322, 0.28172052, 0.28025725, 0.2776043, 0.2771847, 0.27503136, 0.27308482,
0.27217513, 0.27123868, 0.2689912, 0.26770753, 0.26570505, 0.26344052, 0.2616543, 0.26036218, 0.2553513, 0.2543611,
0.2529014, 0.25154543, 0.2504533, 0.24887377, 0.24781282, 0.24646112, 0.24496694, 0.24279521, 0.24143311, 0.23992637,
0.23836696, 0.23702249, 0.23514406, 0.23415889, 0.23316592, 0.23129629, 0.22966407, 0.22826019, 0.22680067, 0.22541586,
0.22421229, 0.22279815, 0.22108059, 0.21954548, 0.21816167, 0.2167328, 0.21525824, 0.21379735, 0.21222386, 0.21090919,
0.20954707, 0.20804231, 0.2063668, 0.2046565, 0.2033373, 0.20201279, 0.20065914, 0.19947606, 0.19785869, 0.19623053,
0.19484422, 0.19340433, 0.19197422, 0.19056596, 0.18937707, 0.18817508, 0.18674783, 0.18526357, 0.18383032, 0.18259765,
0.18136238, 0.18006901, 0.1786776, 0.1773137, 0.17586833, 0.17465161, 0.17351812, 0.17230795, 0.171026, 0.16971181,
0.16850959, 0.16689701, 0.16589576, 0.16480514, 0.16371034, 0.16239388, 0.16132355, 0.16022831, 0.15894939, 0.15757099,
0.15638211, 0.15529375, 0.15418589, 0.15309717, 0.15202093, 0.15103377, 0.15010498, 0.14910506, 0.14789501, 0.14690709,
0.14592192, 0.1449036, 0.14390156, 0.14297749, 0.14209363, 0.14124912, 0.14040565, 0.1394752, 0.13854763, 0.13776311,
0.13695164, 0.13607863, 0.13524538, 0.13447393, 0.13383554, 0.13311, 0.1324143, 0.131847, 0.13116807, 0.13039087,
0.12989727, 0.1293344, 0.12869512, 0.1278975, 0.12756506, 0.12713467, 0.12667976, 0.12615696, 0.12586565, 0.12567085,
0.12508342, 0.12458022, 0.12434319, 0.124130465, 0.12391781, 0.12371096, 0.12351953, 0.12337861, 0.123037465, 0.12298218,
0.122884125, 0.12287177, 0.122825414, 0.12281928, 0.12284699, 0.12284642, 0.12295518, 0.123063765, 0.12316184, 0.12333491,
0.12351629, 0.123924024, 0.123896964, 0.12392259, 0.12437285, 0.124518655, 0.12557365, 0.12587348, 0.12610409, 0.1262859,
0.12672538, 0.1272102, 0.12762138, 0.1284419, 0.12902081, 0.12952158, 0.130026, 0.13065875, 0.13121751, 0.13189772,
0.13257839, 0.13332321, 0.13372011, 0.13479127, 0.13551272, 0.13639, 0.13716088, 0.1380022, 0.13874415, 0.13950342,
0.14047438, 0.14139827, 0.1423899, 0.1432405, 0.14417371, 0.14520165, 0.1461937, 0.1472412, 0.14816545, 0.148998,
0.15011981, 0.15118724, 0.15220977, 0.15327466, 0.1544236, 0.1557034, 0.15672776, 0.15780595, 0.15882792, 0.1598993,
0.16106391, 0.16218315, 0.16336899, 0.16450976, 0.16554788, 0.16673224, 0.16802587, 0.16914153, 0.17030972, 0.17162412,
0.17295085, 0.17418154, 0.17548361, 0.17694345, 0.17809199, 0.17917247, 0.18048562, 0.18174766, 0.18293624, 0.1842345,
0.18559851, 0.18693331, 0.18802407, 0.18925586, 0.19083007, 0.19226876, 0.19335645, 0.19458514, 0.19650607, 0.19746383,
0.1983271, 0.19937204, 0.20100424, 0.2025753, 0.20409648, 0.20539345, 0.20676452, 0.20794113, 0.20893277, 0.21023737,
0.2117768, 0.21306436, 0.21439092, 0.21591753, 0.21724063, 0.21829474, 0.21930324, 0.22076441, 0.22204877, 0.22347717,
0.22520763, 0.22634006, 0.22736493, 0.22926562, 0.23047931, 0.23142922, 0.23233043, 0.23409557, 0.23543914, 0.23652168,
0.23774871, 0.23916756, 0.2404476, 0.2417661, 0.24313623, 0.2444644, 0.24582466, 0.24723598, 0.24860941, 0.25008437,
0.2515735, 0.2530708, 0.25392833, 0.2553322, 0.2558639, 0.25716078, 0.25775516, 0.2582176, 0.25939447, 0.26023754,
0.26045263, 0.2619916, 0.2629584, 0.2640043, 0.26487383, 0.26567894, 0.26693115, 0.2679422, 0.26938418, 0.27002198,
0.27070826, 0.27169305, 0.27310315, 0.27501243, 0.2752617, 0.27595088, 0.27737764, 0.27816954, 0.27857998, 0.28057557,
0.27989239, 0.2819125, 0.28271177, 0.28314772, 0.2835152, 0.285096, 0.2860017, 0.28680596, 0.28817615, 0.2893704,
0.2891026, 0.2903716, 0.2913212, 0.29234722, 0.29328594, 0.2937249, 0.29417732, 0.2947401, 0.29634175, 0.29782552,
0.29846802, 0.2992299, 0.29954284, 0.30072692, 0.30164543, 0.30294213, 0.30340365, 0.30444562, 0.30606294, 0.30626845,
0.30657163, 0.30782205, 0.30859825, 0.3093672, 0.31012902, 0.311427, 0.3120387, 0.3121292, 0.31230107, 0.3135856,
0.31425026, 0.31519422, 0.31555274, 0.3161917, 0.31767243, 0.3179527, 0.31766587, 0.31814748, 0.31916523, 0.32009357,
0.32061866, 0.32161832, 0.32132223, 0.32230845, 0.32313886, 0.3237364, 0.32347977, 0.32488364, 0.32545, 0.32647768,
0.32574812, 0.32751325, 0.32767314, 0.32827035, 0.32886252, 0.3294498, 0.33003208, 0.33060968, 0.33199447, 0.33165485),
(0.33286086, 0.33118767, 0.3295156, 0.32784468, 0.3261752, 0.32450712, 0.32284027, 0.32117453, 0.31951043, 0.3178478,
0.3161865, 0.31452668, 0.31286797, 0.311211, 0.30955562, 0.30790174, 0.30624908, 0.30459827, 0.30294913, 0.30130166,
0.2999808, 0.29793584, 0.29666272, 0.29535195, 0.2943339, 0.29275528, 0.29143497, 0.2899108, 0.28851902, 0.2867685,
0.2856211, 0.28363413, 0.28132424, 0.28047538, 0.27930307, 0.2771033, 0.2764726, 0.27512047, 0.27451742, 0.27225524,
0.27039626, 0.26918897, 0.2675723, 0.2660115, 0.26485437, 0.26268727, 0.26087478, 0.26004234, 0.25931886, 0.25688004,
0.25484818, 0.253472, 0.25120234, 0.25006732, 0.24886847, 0.24708949, 0.2454348, 0.24369726, 0.24219495, 0.24093822,
0.23985489, 0.23791327, 0.23633018, 0.23400958, 0.23240674, 0.23072962, 0.22933055, 0.22818145, 0.22649285, 0.22497757,
0.22371121, 0.22243716, 0.22068694, 0.21918961, 0.21807982, 0.21700086, 0.21538106, 0.21351945, 0.21215576, 0.21066187,
0.20932892, 0.20784307, 0.20650421, 0.20469497, 0.20325057, 0.20193627, 0.20043588, 0.19924983, 0.19785585, 0.19658189,
0.19473614, 0.19315888, 0.19194259, 0.19060436, 0.18939073, 0.18796088, 0.18665922, 0.18549667, 0.18407935, 0.18257701,
0.18121134, 0.17986487, 0.17854963, 0.17734906, 0.1760046, 0.17456003, 0.17329924, 0.17212135, 0.17092489, 0.1695952,
0.16841523, 0.16718346, 0.16603881, 0.16471009, 0.16344406, 0.16222867, 0.16105902, 0.15992494, 0.15885988, 0.15761916,
0.15648209, 0.1553135, 0.15418808, 0.15326373, 0.15211077, 0.1510404, 0.15001374, 0.14898524, 0.147978, 0.14696974,
0.14592198, 0.14477202, 0.14371444, 0.14277057, 0.1418359, 0.14100888, 0.14011908, 0.13928261, 0.13847078, 0.13760248,
0.13688685, 0.13611749, 0.13532026, 0.13455074, 0.13384242, 0.13301031, 0.13235469, 0.13160288, 0.13094611, 0.13053145,
0.12998609, 0.12953024, 0.12866259, 0.12812932, 0.1273899, 0.12707789, 0.12672006, 0.12615287, 0.12593159, 0.12552254,
0.12515098, 0.12455235, 0.124462426, 0.12410692, 0.12390766, 0.12380411, 0.12415181, 0.12331057, 0.123262145, 0.12300099,
0.12289409, 0.12286724, 0.122894794, 0.12303805, 0.12300654, 0.12304886, 0.12302006, 0.123087294, 0.12320298, 0.1233815,
0.123541676, 0.12367764, 0.12384253, 0.12401555, 0.1243041, 0.12460236, 0.12529752, 0.12564437, 0.12599713, 0.12631042,
0.12690003, 0.12732169, 0.12765713, 0.12829152, 0.1290989, 0.1293522, 0.13003391, 0.13069198, 0.13124296, 0.132006,
0.1328138, 0.13337041, 0.13408667, 0.13489902, 0.13566206, 0.13648161, 0.13730575, 0.13813399, 0.13886712, 0.13979977,
0.14072353, 0.14165695, 0.14273554, 0.14358151, 0.14443316, 0.1453344, 0.14611326, 0.14711714, 0.14800332, 0.14911148,
0.15023136, 0.15131749, 0.15243182, 0.15351419, 0.15458275, 0.15579124, 0.15670927, 0.15762487, 0.15876997, 0.15995735,
0.16113818, 0.16234262, 0.16355492, 0.16478373, 0.16561183, 0.16648972, 0.1676648, 0.16888827, 0.1701298, 0.17149624,
0.17296453, 0.17439163, 0.17552282, 0.17653602, 0.17783983, 0.17905387, 0.18054618, 0.18193889, 0.18304598, 0.1843297,
0.18543431, 0.18653731, 0.18780568, 0.18940958, 0.19066106, 0.19198756, 0.19352391, 0.19473048, 0.19582511, 0.19699496,
0.19838217, 0.19996342, 0.20133598, 0.20271938, 0.20417832, 0.20544527, 0.20648396, 0.20767626, 0.20919274, 0.21054257,
0.21186842, 0.21331093, 0.21476352, 0.21587414, 0.21688952, 0.21828754, 0.21981798, 0.22106262, 0.2224259, 0.22363804,
0.22496949, 0.22688825, 0.22808611, 0.22915138, 0.23015428, 0.23155305, 0.2327705, 0.23400426, 0.23518828, 0.23629488,
0.23767921, 0.2383312, 0.24041149, 0.24118999, 0.24218851, 0.24310917, 0.24399212, 0.24582842, 0.24807505, 0.2484584,
0.24898927, 0.25063944, 0.25164995, 0.25314948, 0.25465676, 0.25569138, 0.25659078, 0.25763106, 0.25903884, 0.26015314,
0.26133516, 0.2621599, 0.263621, 0.26418424, 0.26473442, 0.26658112, 0.26729834, 0.26825556, 0.2697009, 0.27106598,
0.271804, 0.27319574, 0.2737906, 0.27445823, 0.27570713, 0.27685735, 0.27828977, 0.27902433, 0.28054592, 0.28082117,
0.28124255, 0.28264135, 0.28352916, 0.2847527, 0.28541255, 0.28600365, 0.28707704, 0.2873831, 0.28953376, 0.2895397,
0.2904794, 0.29118988, 0.29204822, 0.29322278, 0.29393393, 0.29434648, 0.29478297, 0.29620054, 0.2968893, 0.2982344,
0.29894388, 0.29977655, 0.30002904, 0.3015654, 0.3024459, 0.30329716, 0.30414006, 0.30520344, 0.30537754, 0.30581886,
0.30731544, 0.30831861, 0.30879223, 0.30950722, 0.31046945, 0.31083757, 0.31188375, 0.3122523, 0.3132099, 0.31331334,
0.31421906, 0.31518498, 0.31494266, 0.3163285, 0.3166934, 0.31735632, 0.31796598, 0.3180701, 0.318577, 0.31913015,
0.32052407, 0.3210829, 0.32181957, 0.32245904, 0.32229233, 0.32335287, 0.32447138, 0.324823, 0.32538158, 0.32593533,
0.32648453, 0.32771406, 0.3290931, 0.33047274, 0.33185285, 0.33323348, 0.33461434, 0.3359959, 0.33737814, 0.33876082),
(0.34952378, 0.34781742, 0.34611216, 0.34440723, 0.34270442, 0.34100282, 0.3393024, 0.33760324, 0.3359047, 0.33420813,
0.33251286, 0.33081892, 0.3291264, 0.3274345, 0.32574475, 0.32405633, 0.3223694, 0.32068288, 0.31899884, 0.31731635,
0.3156354, 0.313956, 0.31227767, 0.3106015, 0.30892685, 0.307254, 0.30558184, 0.30391225, 0.30224448, 0.29974726,
0.29883817, 0.29686078, 0.2946734, 0.29348192, 0.2915586, 0.28951976, 0.28922865, 0.28747454, 0.28632924, 0.2845475,
0.28342858, 0.28202116, 0.27990416, 0.277935, 0.2756999, 0.27437663, 0.27302432, 0.27073893, 0.268861, 0.26634005,
0.2654823, 0.26414636, 0.26252753, 0.26140076, 0.25980127, 0.25760058, 0.25607803, 0.25479853, 0.252879, 0.25150388,
0.25019184, 0.24862282, 0.24697128, 0.24561174, 0.24321835, 0.24167174, 0.24029641, 0.23918897, 0.23772328, 0.23623858,
0.23429714, 0.23265418, 0.231168, 0.22961941, 0.22816032, 0.22642048, 0.22506748, 0.22351176, 0.2215603, 0.220271,
0.21917507, 0.21746637, 0.2157991, 0.21451057, 0.21293528, 0.21140668, 0.20996046, 0.20854498, 0.20684811, 0.2054097,
0.20414828, 0.20292741, 0.20135075, 0.1997913, 0.19832559, 0.19690043, 0.1953709, 0.19400004, 0.19251965, 0.19114597,
0.1897365, 0.18842469, 0.18712027, 0.18567996, 0.18443596, 0.18313166, 0.1817566, 0.1804667, 0.17888054, 0.17765112,
0.17643544, 0.17505316, 0.17374359, 0.17244327, 0.17131376, 0.1701405, 0.16892482, 0.16777545, 0.16658403, 0.16517414,
0.16395454, 0.16280822, 0.1616519, 0.16037945, 0.1592842, 0.15799911, 0.15679604, 0.15571615, 0.15467037, 0.1535193,
0.15235943, 0.15126604, 0.15028423, 0.14929827, 0.14825268, 0.14716478, 0.1462178, 0.14522602, 0.14412707, 0.14310741,
0.14226688, 0.1414078, 0.14066544, 0.1397508, 0.13878988, 0.13807268, 0.13731098, 0.13646573, 0.1356466, 0.13486522,
0.13407151, 0.13334863, 0.1325879, 0.13172221, 0.13125128, 0.13062342, 0.12999359, 0.12934342, 0.1290545, 0.1279493,
0.12765542, 0.12718399, 0.12663431, 0.1263254, 0.12608296, 0.12574545, 0.12525022, 0.12495604, 0.12453335, 0.12437017,
0.12435173, 0.1240722, 0.123518676, 0.123335734, 0.123181015, 0.12300213, 0.12292728, 0.122833595, 0.12278443, 0.12277298,
0.12277819, 0.12302151, 0.12301352, 0.12299361, 0.123048276, 0.12309367, 0.12336921, 0.12347535, 0.12371521, 0.123915195,
0.12411547, 0.124577165, 0.1247994, 0.1252104, 0.12554155, 0.12611936, 0.12653475, 0.12691125, 0.1274926, 0.12792705,
0.12843879, 0.12905855, 0.12937091, 0.13014194, 0.13071409, 0.13135426, 0.13166998, 0.132797, 0.13341077, 0.13425606,
0.13504706, 0.13564044, 0.13637829, 0.13732405, 0.13792647, 0.1388735, 0.13970393, 0.14064917, 0.14149219, 0.14235827,
0.1432345, 0.14422348, 0.14519358, 0.14616433, 0.14716151, 0.14805378, 0.14910705, 0.15014632, 0.15115243, 0.1522071,
0.15324861, 0.15438694, 0.15577088, 0.15671782, 0.15761985, 0.15865226, 0.15985563, 0.16099177, 0.1621253, 0.16326444,
0.16432644, 0.16538331, 0.16649805, 0.16757344, 0.16860327, 0.17009363, 0.17155463, 0.17283446, 0.17410983, 0.17515329,
0.17640325, 0.17761569, 0.17869896, 0.1798961, 0.181278, 0.18254937, 0.18375994, 0.18507482, 0.18640873, 0.18777947,
0.188932, 0.19001274, 0.19133976, 0.19267933, 0.1939582, 0.19520302, 0.1962091, 0.19714569, 0.19869547, 0.20016551,
0.2013677, 0.20286436, 0.20432717, 0.20540822, 0.20678699, 0.20805156, 0.20920618, 0.21042117, 0.21183781, 0.21327128,
0.21459864, 0.21576135, 0.21699542, 0.2187274, 0.21964803, 0.22075085, 0.2222718, 0.2236746, 0.22488222, 0.22607544,
0.22693227, 0.22774242, 0.22965862, 0.2307208, 0.23218565, 0.23339052, 0.23464046, 0.2361007, 0.23707837, 0.2382492,
0.23963168, 0.24079719, 0.24167283, 0.24224456, 0.24431519, 0.24544038, 0.24663107, 0.24787924, 0.249109, 0.2503338,
0.251632, 0.2536441, 0.25414267, 0.25511307, 0.2559753, 0.25707358, 0.2581656, 0.25917283, 0.26096886, 0.2618075,
0.26266012, 0.2645718, 0.2656489, 0.26627463, 0.26813418, 0.26902574, 0.26977584, 0.27101105, 0.2716428, 0.2722922,
0.2735653, 0.27446496, 0.27495456, 0.2763124, 0.27714124, 0.2781502, 0.27939692, 0.2801243, 0.28079376, 0.2819911,
0.28290904, 0.28399506, 0.28463173, 0.2856267, 0.28621188, 0.2874185, 0.28742227, 0.28836733, 0.29048777, 0.29098138,
0.2921293, 0.29305393, 0.29347047, 0.29369828, 0.29475656, 0.2955472, 0.29599845, 0.29703245, 0.29757124, 0.2982986,
0.29873192, 0.2997446, 0.3003785, 0.30158925, 0.3014942, 0.30311292, 0.3031039, 0.30466112, 0.30508977, 0.30610496,
0.30641335, 0.30722526, 0.3080106, 0.30864128, 0.30968332, 0.30971646, 0.31116155, 0.31207982, 0.31233025, 0.31333387,
0.31464726, 0.3142663, 0.31579366, 0.3159604, 0.31729323, 0.31720212, 0.31851956, 0.31892884, 0.31895152, 0.31991968,
0.32106316, 0.3208131, 0.32166645, 0.321932, 0.32254016, 0.3235073, 0.32439402, 0.32444543, 0.32538262, 0.32605052),
(0.34790522, 0.3462068, 0.34450945, 0.34281242, 0.34111747, 0.33942378, 0.33773124, 0.33604, 0.3343493, 0.3326606,
0.33097318, 0.3292871, 0.32760245, 0.32591838, 0.32423645, 0.3225559, 0.32087678, 0.31919804, 0.3175218, 0.31584713,
0.314174, 0.31250238, 0.3108318, 0.30916342, 0.30749652, 0.30583143, 0.30416703, 0.3025052, 0.30084518, 0.2974168,
0.2966839, 0.29483196, 0.29315454, 0.2916964, 0.289605, 0.28871137, 0.28654718, 0.28579998, 0.28347248, 0.28265527,
0.28090742, 0.2792868, 0.2775144, 0.2759834, 0.27364492, 0.27323818, 0.27147254, 0.2700461, 0.26860043, 0.2663896,
0.26417336, 0.26269966, 0.261743, 0.26079604, 0.25810105, 0.25728133, 0.25588825, 0.25416476, 0.25278136, 0.2510677,
0.24945892, 0.24811378, 0.24655314, 0.24508342, 0.24363345, 0.24182093, 0.24007909, 0.23856342, 0.23708029, 0.2340498,
0.23341027, 0.23215985, 0.23077641, 0.22946927, 0.22818644, 0.22638775, 0.22480561, 0.22329323, 0.22176018, 0.22005954,
0.21859173, 0.21718536, 0.21583132, 0.21417794, 0.2126156, 0.21108992, 0.2096315, 0.20834614, 0.20726106, 0.20538875,
0.2037896, 0.20255573, 0.20115304, 0.19960026, 0.19807142, 0.19659342, 0.19513562, 0.19392072, 0.19270298, 0.19110811,
0.18972525, 0.18835935, 0.18682402, 0.18544489, 0.18420038, 0.18283129, 0.1816057, 0.18034834, 0.17896956, 0.17769016,
0.17638947, 0.17503333, 0.17385443, 0.17260012, 0.17134963, 0.17001294, 0.16872884, 0.16753931, 0.16642272, 0.16512223,
0.16390495, 0.16266356, 0.16149503, 0.16024853, 0.15912336, 0.15790711, 0.15687737, 0.15578374, 0.15468983, 0.15347764,
0.15235794, 0.15135242, 0.15031648, 0.14932536, 0.14828695, 0.14717919, 0.1462355, 0.14534271, 0.14428245, 0.143215,
0.14222541, 0.14145051, 0.14061408, 0.13974057, 0.1389407, 0.13803357, 0.13716908, 0.13643545, 0.13568282, 0.13494469,
0.13414712, 0.13335785, 0.13255288, 0.13200292, 0.13142821, 0.13078408, 0.13001919, 0.12949517, 0.12884684, 0.12820776,
0.12757012, 0.12714896, 0.1268058, 0.12619527, 0.12593627, 0.12549984, 0.1252003, 0.12501144, 0.12469904, 0.12421357,
0.12403646, 0.12380109, 0.123570606, 0.12343775, 0.12323134, 0.12317958, 0.122890346, 0.1228781, 0.1228293, 0.12278651,
0.12285889, 0.12287098, 0.123152904, 0.12308871, 0.12306893, 0.123269565, 0.12336985, 0.12360632, 0.123662814, 0.123908624,
0.12421874, 0.12442542, 0.12493986, 0.12515582, 0.12567656, 0.12613358, 0.1264891, 0.12704371, 0.12744805, 0.12804587,
0.1283652, 0.12895599, 0.12960143, 0.13037103, 0.13078845, 0.13157575, 0.13215967, 0.13263683, 0.13345061, 0.13429774,
0.13493152, 0.13569126, 0.136438, 0.13726783, 0.13806362, 0.13895361, 0.13974044, 0.14061067, 0.14151202, 0.14237846,
0.14329684, 0.14423604, 0.14525178, 0.14623672, 0.14713715, 0.14813264, 0.14911932, 0.1501618, 0.1512373, 0.15227002,
0.15330744, 0.1543346, 0.15540637, 0.15649626, 0.15772445, 0.15874219, 0.15994392, 0.16102287, 0.16222943, 0.16337381,
0.16456024, 0.16566522, 0.16677713, 0.1678776, 0.16897833, 0.17028074, 0.17146754, 0.17276302, 0.17400728, 0.17517337,
0.17639211, 0.17762078, 0.17887408, 0.18004537, 0.181355, 0.1826774, 0.1838994, 0.18499501, 0.18615243, 0.18758196,
0.18872015, 0.18998979, 0.19114098, 0.19244903, 0.19387932, 0.19503474, 0.19626087, 0.1976598, 0.19909632, 0.20048262,
0.20147245, 0.2026596, 0.2041687, 0.20549457, 0.20668124, 0.20807917, 0.20942885, 0.21058725, 0.21181576, 0.21304144,
0.21408005, 0.21528511, 0.21718842, 0.21844403, 0.21968675, 0.22123079, 0.22253843, 0.22377332, 0.22483815, 0.22589852,
0.22745974, 0.22868374, 0.22987889, 0.2307443, 0.23191217, 0.23325554, 0.23447575, 0.23608, 0.23735261, 0.23855974,
0.24003546, 0.2410615, 0.24203207, 0.24357837, 0.2444672, 0.2453507, 0.24692322, 0.24797383, 0.24902591, 0.2503122,
0.25176135, 0.2533386, 0.2543625, 0.25525907, 0.25662854, 0.25836396, 0.25882873, 0.25957465, 0.26083076, 0.26215127,
0.2637028, 0.26445383, 0.26544997, 0.26656035, 0.26752347, 0.2694063, 0.26993862, 0.27046898, 0.27257794, 0.27264857,
0.27278677, 0.27444905, 0.27519545, 0.27629617, 0.27729246, 0.27827746, 0.27960426, 0.280282, 0.28090495, 0.28144053,
0.28317782, 0.28368717, 0.28537542, 0.2859166, 0.28674975, 0.28806946, 0.28874835, 0.28950787, 0.2898384, 0.29115772,
0.29196408, 0.29268453, 0.2935402, 0.29374206, 0.29455963, 0.29580715, 0.2968124, 0.29739895, 0.2983788, 0.29979277,
0.3000085, 0.30042776, 0.30176097, 0.30326927, 0.30288717, 0.30483282, 0.30488205, 0.3062204, 0.30644295, 0.30721697,
0.30727562, 0.30838144, 0.3092293, 0.3098389, 0.31087533, 0.31170174, 0.31185383, 0.3121063, 0.31304207, 0.31416106,
0.31470588, 0.31535697, 0.3165851, 0.31657994, 0.3168897, 0.31790352, 0.31857693, 0.31922302, 0.31991863, 0.32053146,
0.32159615, 0.321619, 0.32229674, 0.32238117, 0.32338658, 0.32437083, 0.32429138, 0.3256261, 0.3269364, 0.32686067)),
synth_qualityfactor=((35, 22, 32, 9, 34, 46, 87, 99, 22, 12, 12, 24, 43, 13, 84, 9, 20, 62, 24, 65,
30, 79, 37, 77, 20, 30, 4, 20, 33, 9, 31, 15, 8, 14, 15, 24, 23, 20, 24, 34,
9, 33, 15, 7, 19, 8, 17, 31, 57, 4, 15, 13, 12, 17, 18, 7, 13, 9, 10, 7,
4, 4, 7, 3, 5, 7, 4, 4, 8, 9, 9, 5, 6, 4, 3, 4, 4, 5, 5, 4,
5, 4, 7, 4, 4, 4, 4, 3, 7, 7, 4, 5, 7, 7, 4, 3, 3, 2, 12, 7,
4, 5, 4, 8, 4, 3, 3, 3, 3, 3, 5, 13, 7, 3, 4, 3, 4, 8, 7, 2,
2, 2, 4, 3, 2, 3, 1, 3, 5, 5, 3, 1, 0, 1, 0, 3, 3, 5, 3, 2,
3, 2, 1, 2, 3, 10, 3, 8, 4, 7, 2, 8, 3, 9, 7, 8, 9, 8, 8, 13,
8, 7, 6, 6, 6, 6, 5, 8, 5, 5, 5, 5, 5, 5, 5, 5, 10, 5, 5, 7,
7, 7, 5, 6, 7, 8, 17, 8, 6, 7, 8, 7, 11, 10, 9, 9, 10, 10, 3, 3,
7, 3, 8, 4, 1, 1, 1, 3, 5, 6, 2, 2, 4, 4, 3, 3, 3, 4, 5, 7,
4, 4, 5, 3, 4, 6, 7, 3, 1, 4, 4, 4, 7, 3, 2, 3, 4, 3, 4, 3,
2, 1, 4, 6, 6, 4, 4, 6, 6, 5, 5, 3, 3, 3, 2, 3, 6, 17, 9, 3,
2, 4, 5, 4, 5, 5, 2, 1, 3, 5, 2, 5, 5, 4, 3, 3, 6, 7, 9, 10,
10, 3, 8, 4, 5, 7, 10, 3, 3, 5, 6, 3, 6, 5, 3, 4, 5, 6, 11, 12,
6, 7, 9, 6, 11, 3, 5, 4, 16, 16, 5, 17, 16, 6, 16, 12, 14, 13, 14, 8,
6, 4, 9, 13, 11, 14, 3, 10, 5, 8, 18, 16, 4, 2, 4, 12, 5, 6, 12, 19,
15, 13, 11, 4, 9, 5, 3, 3, 24, 3, 8, 6, 4, 23, 25, 16, 23, 26, 10, 6,
5, 28, 28, 29, 29, 6, 18, 4, 4, 28, 27, 13, 29, 31, 12, 6, 9, 20, 22, 4,
38, 22, 16, 2, 38, 38, 10, 37, 37, 8, 29, 13, 35, 36, 37, 38, 39, 40, 9, 4),
(17, 16, 54, 9, 17, 47, 65, 8, 67, 15, 17, 18, 57, 60, 26, 12, 40, 55, 51, 67,
14, 37, 23, 60, 18, 43, 30, 29, 6, 29, 15, 27, 17, 9, 110, 21, 14, 26, 24, 22,
26, 13, 32, 8, 11, 13, 8, 9, 18, 17, 17, 33, 9, 6, 20, 5, 33, 20, 23, 6,
8, 4, 7, 8, 3, 9, 7, 4, 3, 3, 3, 6, 6, 2, 3, 4, 8, 7, 6, 4,
4, 6, 5, 6, 4, 5, 10, 10, 11, 7, 3, 2, 3, 3, 3, 2, 2, 3, 4, 2,
3, 4, 2, 4, 5, 6, 5, 5, 4, 6, 5, 4, 4, 3, 2, 2, 3, 3, 2, 2,
1, 3, 6, 5, 2, 1, 2, 3, 4, 4, 4, 4, 3, 1, 2, 1, 1, 1, 2, 2,
4, 8, 3, 3, 8, 3, 5, 7, 9, 9, 9, 9, 3, 9, 7, 8, 9, 7, 7, 7,
10, 6, 6, 6, 5, 6, 14, 7, 5, 7, 4, 5, 7, 13, 7, 9, 5, 5, 5, 7,
5, 5, 5, 6, 6, 8, 8, 9, 6, 9, 8, 9, 7, 4, 7, 8, 6, 8, 8, 4,
8, 2, 1, 3, 2, 1, 3, 5, 3, 1, 1, 3, 4, 3, 3, 2, 3, 3, 3, 2,
1, 2, 3, 3, 4, 6, 5, 3, 2, 2, 4, 3, 3, 3, 2, 2, 1, 2, 4, 5,
4, 4, 3, 5, 6, 4, 6, 4, 2, 8, 5, 6, 6, 8, 5, 2, 4, 5, 4, 2,
4, 2, 3, 3, 6, 6, 3, 4, 4, 5, 6, 4, 3, 2, 3, 4, 5, 4, 3, 3,
8, 10, 4, 6, 3, 3, 3, 3, 5, 5, 16, 9, 11, 4, 4, 5, 17, 10, 14, 4,
7, 1, 9, 15, 7, 4, 3, 5, 4, 7, 4, 7, 12, 6, 14, 4, 5, 7, 9, 5,
5, 16, 6, 11, 6, 9, 12, 7, 13, 6, 2, 11, 12, 6, 3, 14, 14, 10, 8, 16,
15, 12, 18, 7, 10, 20, 22, 12, 9, 16, 16, 3, 12, 5, 18, 17, 17, 3, 9, 12,
6, 4, 16, 16, 3, 3, 26, 18, 8, 2, 21, 2, 14, 1, 4, 25, 25, 6, 8, 10,
32, 9, 36, 3, 167, 21, 9, 41, 41, 41, 41, 4, 50, 28, 41, 6, 3, 5, 13, 39),
(60, 120, 66, 52, 17, 49, 26, 14, 15, 23, 16, 7, 27, 29, 13, 35, 39, 42, 19, 40,
15, 40, 176, 13, 30, 44, 27, 16, 58, 18, 7, 26, 9, 15, 67, 8, 7, 47, 40, 36,
35, 21, 41, 49, 14, 11, 12, 34, 36, 31, 25, 30, 16, 4, 16, 25, 23, 34, 10, 6,
4, 11, 5, 7, 15, 6, 10, 10, 13, 12, 8, 6, 4, 5, 7, 15, 7, 9, 8, 5,
7, 8, 6, 6, 8, 5, 4, 4, 4, 3, 3, 4, 3, 3, 2, 2, 3, 3, 3, 1,
3, 4, 4, 4, 3, 3, 2, 5, 7, 4, 8, 6, 3, 3, 3, 3, 4, 5, 3, 4,
5, 3, 3, 2, 2, 7, 3, 4, 4, 4, 4, 2, 2, 1, 4, 7, 4, 4, 3, 3,
2, 2, 2, 3, 2, 2, 5, 7, 2, 4, 2, 4, 4, 10, 7, 2, 2, 9, 8, 10,
9, 9, 6, 6, 7, 10, 6, 8, 6, 6, 10, 10, 6, 5, 5, 5, 5, 5, 5, 5,
5, 7, 7, 5, 5, 6, 8, 5, 6, 6, 8, 8, 7, 7, 7, 9, 8, 8, 10, 8,
4, 8, 7, 4, 2, 2, 8, 7, 9, 6, 9, 4, 5, 9, 6, 3, 4, 9, 3, 2,
4, 4, 1, 2, 2, 4, 5, 2, 2, 2, 2, 3, 4, 2, 3, 4, 3, 2, 2, 2,
1, 4, 5, 2, 3, 7, 3, 2, 5, 5, 8, 3, 2, 3, 3, 2, 5, 6, 5, 7,
2, 2, 5, 4, 3, 8, 7, 8, 6, 6, 7, 19, 6, 6, 4, 3, 3, 3, 3, 6,
4, 5, 8, 7, 6, 6, 3, 3, 4, 3, 4, 21, 7, 8, 2, 2, 2, 3, 2, 7,
11, 5, 2, 12, 5, 5, 8, 8, 10, 13, 42, 13, 15, 9, 19, 10, 8, 12, 12, 2,
7, 8, 4, 15, 14, 4, 8, 3, 2, 3, 8, 3, 4, 7, 9, 5, 14, 6, 2, 8,
8, 3, 5, 4, 5, 11, 9, 16, 14, 4, 11, 15, 6, 4, 11, 11, 10, 3, 8, 7,
9, 8, 29, 5, 87, 5, 10, 3, 3, 9, 13, 43, 6, 3, 23, 47, 25, 8, 5, 26,
2, 14, 7, 4, 8, 4, 3, 4, 16, 27, 54, 39, 7, 5, 13, 30, 1, 2, 32, 10),
(25, 17, 82, 25, 20, 34, 28, 11, 40, 32, 40, 17, 7, 6, 22, 10, 17, 8, 37, 16,
9, 18, 18, 17, 7, 7, 9, 11, 34, 21, 151, 125, 104, 8, 61, 61, 56, 12, 24, 28,
10, 24, 8, 20, 9, 4, 16, 17, 13, 39, 20, 13, 6, 25, 13, 10, 37, 17, 9, 11,
7, 10, 10, 7, 15, 13, 23, 7, 10, 22, 9, 10, 6, 4, 7, 8, 5, 7, 7, 6,
5, 4, 7, 5, 4, 3, 8, 8, 7, 8, 3, 4, 5, 5, 5, 7, 3, 3, 8, 6,
4, 3, 5, 3, 2, 2, 3, 5, 4, 4, 3, 3, 5, 3, 4, 3, 2, 3, 3, 2,
1, 2, 2, 2, 2, 3, 2, 3, 4, 3, 1, 3, 2, 2, 2, 3, 4, 5, 3, 2,
4, 4, 5, 3, 2, 4, 5, 4, 3, 3, 8, 3, 9, 3, 8, 3, 8, 5, 10, 10,
10, 6, 9, 7, 7, 6, 6, 6, 8, 7, 6, 6, 5, 6, 6, 8, 5, 5, 5, 5,
5, 5, 8, 5, 5, 6, 6, 6, 6, 6, 7, 7, 8, 6, 7, 7, 7, 9, 8, 8,
8, 9, 6, 9, 8, 3, 3, 9, 4, 8, 3, 5, 3, 2, 1, 3, 3, 3, 1, 2,
2, 2, 4, 5, 4, 6, 3, 4, 4, 2, 0, 2, 3, 1, 2, 2, 4, 4, 1, 2,
2, 3, 2, 2, 3, 4, 3, 2, 4, 4, 3, 2, 2, 3, 5, 5, 6, 4, 7, 5,
2, 2, 4, 5, 4, 7, 8, 11, 7, 5, 5, 8, 2, 7, 6, 3, 3, 2, 3, 3,
2, 11, 5, 4, 2, 3, 4, 10, 6, 7, 2, 5, 6, 5, 5, 4, 8, 10, 5, 6,
7, 6, 8, 6, 4, 8, 5, 10, 14, 7, 11, 6, 3, 3, 12, 7, 4, 11, 8, 6,
31, 6, 11, 11, 32, 8, 3, 7, 24, 3, 4, 12, 4, 13, 13, 13, 18, 6, 9, 6,
13, 33, 3, 4, 13, 2, 8, 8, 7, 65, 13, 31, 7, 10, 8, 14, 77, 5, 17, 5,
3, 2, 20, 4, 7, 15, 17, 4, 3, 20, 6, 5, 21, 15, 3, 8, 3, 33, 50, 5,
20, 21, 16, 10, 3, 21, 5, 22, 4, 23, 5, 3, 10, 13, 5, 28, 7, 29, 6, 30)),
synth_tiltangle=((-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61,
-0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, -0.61, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22,
0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84,
0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84, 0.84),
(-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41,
-0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, -0.41, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996,
0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 0.59999996, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05,
1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05, 1.05),
(-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23, -1.23,
-1.23, -1.23, -1.23, -1.23, -1.23, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19,
0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999,
1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999, 1.4399999),
(-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995,
-0.98999995, -0.98999995, -0.98999995, -0.98999995, -0.98999995, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 0.64,
0.64, 0.64, 0.64, 0.64, 0.64, 0.64, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68,
1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68, 1.68)),
synth_ntx=(3, 3, 3, 3),
synth_heave=(0.009999999776482582, 0.009999999776482582, 0.009999999776482582, 0.009999999776482582,
0.009999999776482582, -0.009999999776482582, -0.009999999776482582, -0.009999999776482582,
-0.009999999776482582, -0.009999999776482582, -0.009999999776482582),
synth_roll=(0.5899999737739563, 0.47999998927116394, 0.3799999952316284, 0.26999998092651367,
0.1599999964237213, -4.230000019073486, -4.289999961853027, -4.349999904632568,
-4.409999847412109, -4.46999979019165, -4.519999980926514),
synth_pitch=(-0.4899999797344208, -0.5, -0.5, -0.5099999904632568, -0.5199999809265137,
-0.4099999964237213, -0.3999999761581421, -0.38999998569488525, -0.3799999952316284,
-0.3700000047683716, -0.35999998450279236),
synth_yaw=(307.8299865722656, 307.8500061035156, 307.8599853515625, 307.8800048828125, 307.889984130859,
308.5799865722656, 308.5899963378906, 308.6000061035156, 308.6099853515625, 308.6199951171875,
308.6300048828125),
synth_altitude=(-23.99238128, -23.991950434852843, -23.97421651, -23.97251081602561),
synth_latitude=(47.78895106, 47.788951346363866, 47.788956832232806, 47.78895705141592),
synth_longitude=(-122.4772233, -122.4772239100667, -122.47723500815962, -122.47723549195827),
synth_xyztime=1495563079, synth_waterline=-0.640,
synth_tx_mountroll=0, synth_tx_mountpitch=0, synth_tx_mountyaw=0,
synth_rx_mountroll=0, synth_rx_mountpitch=0, synth_rx_mountyaw=0,
synth_tx_x=0.00, synth_tx_y=0.00, synth_tx_z=0.00,
synth_tx_x_0=0.00, synth_tx_x_1=0.00, synth_tx_x_2=0.00, synth_tx_y_0=-0.0554, synth_tx_y_1=0.0131,
synth_tx_y_2=0.0554, synth_tx_z_0=-0.012, synth_tx_z_1=-0.006, synth_tx_z_2=-0.012,
synth_rx_x=-0.100, synth_rx_y=-0.304, synth_rx_z=-0.016,
synth_rx_x_0=0.011, synth_rx_x_1=0.011, synth_rx_x_2=0.011, synth_rx_y_0=0.00, synth_rx_y_1=0.00,
synth_rx_y_2=0.00, synth_rx_z_0=-0.006, synth_rx_z_1=-0.006, synth_rx_z_2=-0.006,
serialnum='40111', secondary_serialnum='0',
profile=None,
secs=('40111',),
xyz88_depth=((92.06035 , 91.95807 , 91.88622 , 91.773445, 91.695786, 91.57608 ,
91.52661 , 91.81559 , 91.7159 , 91.65144 , 91.54755 , 91.4417 ,
91.40188 , 92.308655, 92.223595, 92.167305, 92.038475, 91.97768 ,
91.90828 , 91.8051 , 91.15648 , 90.849365, 90.301445, 90.68221 ,
90.69038 , 90.34231 , 90.72844 , 90.82467 , 90.8089 , 90.76446 ,
90.91836 , 91.21524 , 91.55413 , 91.04521 , 91.31912 , 91.35388 ,
90.89974 , 91.26508 , 91.336784, 91.12317 , 91.31172 , 91.51731 ,
91.18421 , 91.26309 , 91.3571 , 91.05201 , 90.90386 , 90.9502 ,
90.766205, 90.925064, 90.8804 , 90.92283 , 91.026245, 90.94698 ,
91.106026, 91.124695, 91.08556 , 90.78457 , 90.80491 , 90.763306,
90.69388 , 90.75102 , 90.543335, 90.72136 , 90.88873 , 90.67829 ,
90.57639 , 90.583374, 90.5331 , 90.55636 , 90.62423 , 90.626045,
90.49224 , 90.42357 , 90.45429 , 90.435 , 90.63666 , 90.58172 ,
90.513275, 90.52375 , 90.54794 , 90.50012 , 90.36588 , 90.435295,
90.45224 , 90.48081 , 90.46099 , 90.55087 , 90.409454, 90.29325 ,
90.467606, 90.43368 , 90.36895 , 90.32243 , 90.408 , 90.45636 ,
90.40018 , 90.484314, 90.41933 , 90.456474, 90.48098 , 90.482216,
90.41856 , 90.372604, 90.44945 , 90.46438 , 90.52351 , 90.54598 ,
90.51913 , 90.47074 , 90.47021 , 90.247925, 90.513504, 90.5574 ,
90.6147 , 90.532425, 90.584625, 90.622246, 90.52737 , 90.40024 ,
90.360985, 90.499146, 90.49667 , 90.48808 , 90.48308 , 90.52616 ,
90.59036 , 90.615234, 90.48266 , 90.49061 , 90.60594 , 90.57632 ,
90.54895 , 90.552704, 90.59347 , 90.633 , 90.66157 , 90.62524 ,
90.571915, 90.6113 , 90.61266 , 90.66376 , 90.62608 , 90.6179 ,
90.68069 , 90.687454, 90.68652 , 90.762115, 90.755104, 90.65876 ,
90.74021 , 90.76615 , 90.71836 , 90.542496, 90.68163 , 90.736305,
90.74869 , 90.7445 , 90.840614, 90.99138 , 90.84078 , 90.727036,
90.79305 , 90.85668 , 90.8976 , 90.92781 , 90.94899 , 90.988686,
90.85856 , 90.919304, 90.929436, 90.98322 , 90.99065 , 91.00754 ,
91.02907 , 91.00918 , 91.04935 , 91.06845 , 91.059906, 91.08624 ,
91.098045, 91.257545, 91.07654 , 90.91437 , 91.04012 , 90.927574,
91.45697 , 91.414474, 91.30404 , 91.13882 , 91.18989 , 91.20801 ,
91.15739 , 91.37682 , 91.40767 , 91.36745 , 91.30865 , 91.33095 ,
91.28041 , 91.30064 , 91.29955 , 91.32726 , 91.11231 , 91.32834 ,
91.29297 , 91.43916 , 91.40538 , 91.41529 , 91.355804, 91.27971 ,
91.33037 , 91.337715, 91.3803 , 91.32011 , 91.30292 , 91.32562 ,
91.42844 , 91.45833 , 91.39307 , 91.26412 , 91.29312 , 91.28383 ,
91.24967 , 91.22302 , 91.2388 , 91.31284 , 91.36669 , 91.312675,
91.220894, 91.15824 , 91.12612 , 91.06577 , 91.04068 , 90.98903 ,
90.86418 , 90.977005, 90.97647 , 90.89353 , 90.82274 , 90.83039 ,
90.830734, 90.775635, 90.76201 , 90.994774, 90.875046, 90.739334,
90.7087 , 90.65639 , 90.570206, 90.50629 , 90.47941 , 90.63971 ,
90.45104 , 90.37565 , 90.43266 , 90.42596 , 90.244675, 90.15863 ,
90.57105 , 90.33566 , 90.06787 , 89.872345, 89.928375, 89.96624 ,
89.94741 , 89.861565, 90.05491 , 89.92046 , 89.69613 , 89.59733 ,
89.61083 , 89.48251 , 89.406395, 89.40382 , 89.539955, 89.34886 ,
89.13139 , 89.111595, 88.989204, 88.93797 , 88.998116, 88.830734,
88.61378 , 88.723915, 88.58792 , 88.343544, 88.09974 , 88.14611 ,
88.055374, 87.86159 , 87.74058 , 87.65648 , 87.53995 , 87.45678 ,
87.33113 , 87.23661 , 87.17364 , 87.067535, 86.99469 , 86.921875,
86.87371 , 86.82135 , 86.60108 , 86.47631 , 86.16548 , 87.210785,
86.91795 , 86.51953 , 86.384636, 86.164085, 85.73328 , 85.733765,
85.511856, 85.31103 , 85.13922 , 84.88287 , 85.069664, 84.902306,
84.83716 , 84.54372 , 84.29329 , 84.132706, 84.036095, 84.1181 ,
83.75122 , 83.51622 , 83.757385, 83.515656, 83.22127 , 83.299 ,
82.70851 , 82.81579 , 82.592735, 82.261284, 81.97401 , 81.936714,
81.76866 , 81.93191 , 81.827385, 81.73244 , 81.25381 , 81.20854 ,
81.036476, 80.91647 , 80.73524 , 80.47964 , 80.195526, 79.97084 ,
79.95673 , 79.93937 , 79.694435, 79.47791 , 79.21095 , 79.10164 ,
78.954666, 78.90364 , 78.63229 , 78.509735, 78.502235, 78.19612 ,
77.91423 , 77.87622 , 77.67835 , 77.51287 , 77.30734 , 77.26881 ,
77.05818 , 76.714066, 76.42623 , 76.412964, 76.209656, 76.070854,
75.8236 , 75.606926, 76.671394, 76.40274 , 76.035126, 75.81483 ,
75.68311 , 75.56681 , 75.38909 , 75.28472 , 74.91301 , 74.802704,
74.65428 , 74.52566 , 74.16202 , 74.10487 , 73.96223 , 73.85205 ,
73.454796, 73.857155, 73.6232 , 73.450325, 73.31188 , 73.095825,
72.916275, 72.771484, 72.76588 , 72.45702),
(90.76799 , 90.699684, 90.59168 , 90.48029 , 90.439064, 90.3572 ,
90.60728 , 90.551346, 90.45774 , 90.35882 , 90.2911 , 90.185196,
91.12573 , 91.04257 , 90.95898 , 90.8718 , 90.77804 , 90.71785 ,
90.61606 , 90.539474, 90.55688 , 90.31801 , 90.375145, 90.41661 ,
90.54392 , 90.46314 , 90.52548 , 90.456795, 90.48739 , 90.3703 ,
90.46814 , 90.94431 , 90.62145 , 90.82607 , 90.89088 , 90.647224,
90.91293 , 90.93586 , 91.23371 , 90.91306 , 90.75001 , 90.860985,
90.7694 , 90.74872 , 90.830635, 90.85425 , 90.694885, 90.92879 ,
91.19714 , 90.797264, 90.563126, 90.89111 , 90.550575, 90.66674 ,
90.75463 , 90.623825, 90.532036, 90.66801 , 90.64047 , 90.69855 ,
90.81529 , 90.62576 , 90.5632 , 90.207985, 90.64664 , 90.50894 ,
90.5252 , 90.631805, 90.48893 , 90.43077 , 90.46948 , 90.5471 ,
90.37574 , 90.343346, 90.486176, 90.63409 , 90.52667 , 90.52872 ,
90.54883 , 90.48134 , 90.51672 , 90.453384, 90.487755, 90.28327 ,
90.47674 , 90.48725 , 90.422844, 90.51001 , 90.469315, 90.51552 ,
90.26333 , 90.37717 , 90.41161 , 90.3975 , 90.470665, 90.38876 ,
90.41193 , 90.47366 , 90.43568 , 90.49537 , 90.454025, 90.410446,
90.40083 , 90.437096, 90.40294 , 90.2868 , 90.45612 , 90.494995,
90.49503 , 90.454254, 90.46458 , 90.46353 , 90.49371 , 90.39176 ,
90.50826 , 90.48029 , 90.47535 , 90.47531 , 90.51442 , 90.45853 ,
90.448875, 90.416954, 90.405594, 90.6139 , 90.563065, 90.55612 ,
90.560684, 90.555405, 90.5579 , 90.55316 , 90.51673 , 90.407394,
90.562035, 90.549995, 90.54608 , 90.60099 , 90.59555 , 90.61613 ,
90.63474 , 90.61552 , 90.67672 , 90.70908 , 90.69517 , 90.688194,
90.7037 , 90.63726 , 90.66306 , 90.61077 , 90.610664, 90.772255,
90.81744 , 90.91824 , 90.70933 , 90.7257 , 90.56981 , 90.70797 ,
90.789215, 90.70954 , 90.858154, 90.85591 , 90.86387 , 90.682495,
90.85793 , 90.846794, 90.89673 , 91.00234 , 91.42256 , 90.94348 ,
91.028984, 90.93831 , 90.94021 , 90.982735, 91.04439 , 91.17163 ,
91.14848 , 91.15956 , 91.097 , 91.08553 , 91.089455, 91.119255,
91.11483 , 91.073654, 91.03352 , 90.976135, 90.98549 , 91.01371 ,
91.28279 , 91.27707 , 91.26153 , 91.189514, 91.3075 , 91.27929 ,
91.1737 , 91.26005 , 91.452576, 91.23704 , 91.302605, 91.34197 ,
91.28531 , 91.35515 , 91.44755 , 91.345146, 91.33945 , 91.38603 ,
91.37782 , 91.40176 , 91.39979 , 91.39724 , 91.32833 , 91.361786,
91.37723 , 91.38641 , 91.480354, 91.413124, 91.55323 , 91.497734,
91.35773 , 91.36033 , 91.2716 , 91.29981 , 91.33824 , 91.33923 ,
91.35894 , 91.34029 , 91.4248 , 91.45698 , 91.3257 , 91.17713 ,
91.156166, 91.15482 , 91.13092 , 91.117386, 91.10602 , 91.257195,
91.01805 , 90.80598 , 90.756744, 90.73394 , 90.68608 , 90.72127 ,
90.79573 , 91.019585, 90.919395, 90.74472 , 90.721695, 90.63733 ,
90.69642 , 90.70836 , 90.561165, 90.68271 , 90.52982 , 90.39877 ,
90.31636 , 90.39842 , 90.30141 , 90.26272 , 90.270004, 90.3864 ,
90.225044, 90.0885 , 90.03745 , 90.08253 , 90.01977 , 89.97255 ,
90.15885 , 90.06228 , 89.87726 , 89.749825, 89.72873 , 89.66966 ,
89.59129 , 89.52754 , 89.730225, 89.55623 , 89.35894 , 89.286705,
89.280495, 89.14859 , 89.078995, 88.93956 , 88.86052 , 88.97336 ,
88.79322 , 88.60602 , 88.388725, 88.31561 , 88.190605, 88.578995,
88.43394 , 88.25276 , 88.19386 , 87.85771 , 88.01282 , 87.73686 ,
87.50916 , 87.27425 , 87.346695, 87.40081 , 87.5908 , 87.19848 ,
86.85513 , 86.86794 , 86.68015 , 86.5981 , 86.88377 , 86.6903 ,
86.47367 , 86.27071 , 86.18382 , 86.05304 , 85.90994 , 85.67452 ,
85.61211 , 85.58503 , 85.31088 , 85.3589 , 85.069954, 84.885574,
84.84923 , 84.751205, 84.51449 , 84.44518 , 84.15734 , 83.88921 ,
83.79489 , 83.666046, 83.58684 , 83.32456 , 83.32521 , 82.95278 ,
82.6214 , 82.57423 , 82.374985, 82.9309 , 82.696594, 82.40665 ,
82.285866, 81.9734 , 82.08807 , 81.68611 , 81.51548 , 81.31129 ,
81.11197 , 81.02959 , 80.81508 , 80.51678 , 80.25799 , 80.193596,
79.99877 , 79.90879 , 79.7146 , 79.51562 , 79.1962 , 79.17775 ,
79.01903 , 78.85216 , 78.683205, 78.568695, 78.25564 , 78.01019 ,
77.96847 , 77.86508 , 77.622345, 77.442635, 77.28628 , 77.04939 ,
76.945816, 76.637115, 76.50665 , 76.23733 , 77.12329 , 76.99095 ,
76.634674, 76.56528 , 76.355705, 76.1433 , 75.99037 , 75.679504,
75.46275 , 75.25679 , 75.248405, 75.04045 , 74.90954 , 74.717705,
74.37548 , 74.31669 , 74.57175 , 74.3464 , 74.20926 , 74.02851 ,
73.847664, 73.780525, 73.74633 , 73.74628 , 73.743744, 73.66323 ,
73.65715 , 73.64867 , 73.60185 , 73.62824),
(92.339935, 92.322266, 92.328224, 92.22129 , 92.30532 , 92.20998 ,
92.20413 , 92.196175, 92.182625, 91.99864 , 92.0118 , 91.981705,
91.97824 , 91.97947 , 91.935974, 91.91858 , 91.94193 , 91.92594 ,
91.90251 , 91.903725, 91.87108 , 91.798096, 91.784256, 91.77543 ,
91.75851 , 91.76525 , 91.73821 , 91.70663 , 91.694374, 91.430214,
91.637695, 91.517265, 91.28923 , 91.39731 , 91.305855, 91.170334,
91.574875, 91.51844 , 91.68243 , 91.60425 , 91.76375 , 91.82473 ,
91.65652 , 91.55454 , 91.32555 , 91.387474, 91.46497 , 91.22743 ,
91.118576, 90.80923 , 91.05724 , 91.13589 , 91.11162 , 91.27635 ,
91.24103 , 91.01473 , 91.0186 , 91.132614, 90.97975 , 91.04484 ,
91.12385 , 91.12933 , 91.068115, 91.1347 , 90.83643 , 90.81577 ,
90.877785, 91.033325, 91.04858 , 91.07109 , 90.95782 , 90.895966,
90.912155, 90.89144 , 90.90009 , 90.78678 , 90.84379 , 90.80735 ,
90.62205 , 90.70013 , 90.85232 , 90.75824 , 90.67085 , 90.75713 ,
90.710106, 90.67348 , 90.680504, 90.69564 , 90.57216 , 90.57354 ,
90.66245 , 90.75777 , 90.68726 , 90.626816, 90.59941 , 90.57577 ,
90.51432 , 90.59387 , 90.54671 , 90.538185, 90.51944 , 90.5536 ,
90.57787 , 90.52256 , 90.56973 , 90.59297 , 90.56179 , 90.580124,
90.430984, 90.46884 , 90.51945 , 90.46759 , 90.482285, 90.46043 ,
90.528435, 90.5738 , 90.580055, 90.640076, 90.64692 , 90.55135 ,
90.5796 , 90.6063 , 90.611084, 90.56248 , 90.59759 , 90.55712 ,
90.524704, 90.5435 , 90.60372 , 90.5734 , 90.52104 , 90.514046,
90.55673 , 90.59034 , 90.58229 , 90.5304 , 90.569534, 90.56199 ,
90.50856 , 90.464066, 90.52476 , 90.55803 , 90.664764, 90.64223 ,
90.57922 , 90.66778 , 90.71366 , 90.686386, 90.67269 , 90.686745,
90.66004 , 90.666725, 90.62826 , 90.51183 , 90.648155, 90.661514,
90.65974 , 90.62551 , 90.83101 , 90.44326 , 90.61156 , 90.639465,
90.59474 , 90.70388 , 90.84568 , 90.90167 , 90.82485 , 90.885185,
90.822716, 90.93222 , 91.12553 , 91.11021 , 90.87452 , 90.89129 ,
90.90866 , 90.888054, 90.92441 , 90.92761 , 90.943405, 90.966805,
90.98222 , 91.15343 , 91.118515, 91.055214, 91.02614 , 90.96989 ,
91.0645 , 91.01385 , 91.04147 , 91.02144 , 90.980606, 91.11184 ,
91.04737 , 91.11434 , 91.09558 , 91.23381 , 91.2389 , 91.19597 ,
91.27843 , 91.24501 , 91.24553 , 91.30214 , 91.12787 , 91.26025 ,
91.239716, 91.245056, 91.01371 , 91.32769 , 91.2733 , 91.3571 ,
91.390015, 91.3011 , 91.27436 , 91.37431 , 91.22913 , 91.3055 ,
91.286545, 91.33157 , 91.290825, 91.26603 , 91.22722 , 91.259766,
91.29276 , 91.288506, 91.283165, 91.21455 , 91.22721 , 91.23385 ,
91.20379 , 91.185265, 91.16436 , 91.18287 , 91.375534, 91.269554,
91.12509 , 91.06354 , 91.06241 , 91.039986, 91.00824 , 90.968666,
90.885155, 90.84155 , 90.789665, 90.69835 , 90.584015, 90.71341 ,
90.81229 , 90.81586 , 90.80015 , 90.71469 , 90.675804, 90.63881 ,
90.5206 , 90.464066, 90.48231 , 90.453285, 90.38126 , 90.36569 ,
90.34755 , 90.35218 , 90.25718 , 90.11602 , 90.083145, 90.06067 ,
90.01825 , 90.00809 , 89.81909 , 89.608025, 89.68572 , 89.713394,
89.61133 , 89.65326 , 89.66421 , 89.54129 , 89.51829 , 89.43354 ,
89.31858 , 89.21713 , 89.21449 , 89.20785 , 89.147385, 89.05625 ,
88.96352 , 89.0615 , 88.84715 , 88.70179 , 88.733864, 88.68902 ,
88.60694 , 88.485176, 88.25449 , 87.99938 , 88.15909 , 88.01017 ,
88.00418 , 87.895386, 87.797806, 87.84923 , 87.663956, 87.572624,
87.52363 , 87.4145 , 87.19557 , 86.88994 , 87.086914, 86.96978 ,
86.86788 , 86.78021 , 86.70726 , 86.62763 , 86.56758 , 86.74693 ,
86.40191 , 86.242165, 86.04191 , 85.916664, 85.784225, 85.619 ,
85.73769 , 85.53689 , 85.3364 , 85.47828 , 85.34444 , 85.092545,
85.232216, 85.05706 , 84.830956, 84.760994, 84.49129 , 84.258766,
84.2156 , 84.05372 , 83.76075 , 83.70232 , 83.51019 , 83.40005 ,
83.3252 , 83.12656 , 82.90322 , 82.83502 , 82.68195 , 82.607574,
82.3671 , 82.26068 , 82.00021 , 81.950584, 81.55112 , 81.42023 ,
81.65302 , 81.3855 , 81.33902 , 81.19015 , 80.929405, 80.615654,
80.53312 , 80.303474, 80.045975, 79.95131 , 79.71385 , 79.5632 ,
79.29453 , 79.21584 , 79.034645, 79.0033 , 78.62548 , 78.69894 ,
78.34152 , 78.39352 , 78.15288 , 78.06105 , 77.8162 , 77.66907 ,
77.549446, 77.38209 , 77.32287 , 76.96743 , 77.05054 , 76.95519 ,
76.6866 , 76.60885 , 76.60382 , 76.21552 , 76.18537 , 75.928276,
75.96425 , 75.60656 , 75.62784 , 75.4247 , 75.12972 , 75.06333 ,
75.04375 , 74.67582 , 74.61657 , 74.37483 , 74.22055 , 74.18366 ,
74.08825 , 73.83243 , 73.7463 , 73.63608),
(92.02544 , 91.993835, 92.011116, 91.97145 , 91.890945, 91.88841 ,
91.88356 , 91.86322 , 91.848595, 91.59571 , 91.60843 , 91.57048 ,
91.5758 , 91.574104, 91.52298 , 91.5119 , 91.5372 , 91.51016 ,
91.48616 , 91.49448 , 91.45488 , 91.45282 , 91.44794 , 91.428375,
91.41055 , 91.42348 , 91.388054, 91.25345 , 91.24602 , 90.69503 ,
90.95363 , 90.86626 , 90.85947 , 90.88236 , 90.73388 , 90.95244 ,
90.77044 , 91.025795, 90.807785, 91.03213 , 90.98665 , 90.97533 ,
90.908676, 90.946465, 90.67809 , 91.068855, 91.00808 , 91.05022 ,
91.0858 , 90.88084 , 90.664795, 90.69797 , 90.89663 , 91.12245 ,
90.69989 , 90.95743 , 91.00739 , 90.95974 , 90.99817 , 90.93962 ,
90.90883 , 90.994064, 90.96457 , 90.98862 , 91.0408 , 90.92008 ,
90.84303 , 90.84206 , 90.848175, 90.271416, 90.608665, 90.726425,
90.77846 , 90.85507 , 90.93323 , 90.79582 , 90.757484, 90.739845,
90.748276, 90.65674 , 90.65121 , 90.6833 , 90.72623 , 90.635056,
90.6143 , 90.577415, 90.57761 , 90.647766, 90.79329 , 90.60183 ,
90.51719 , 90.62802 , 90.63425 , 90.57563 , 90.51822 , 90.46891 ,
90.43921 , 90.53054 , 90.62772 , 90.51487 , 90.50862 , 90.5167 ,
90.427124, 90.40234 , 90.448425, 90.47277 , 90.51874 , 90.5524 ,
90.50724 , 90.51949 , 90.52607 , 90.48709 , 90.5377 , 90.57171 ,
90.57595 , 90.53393 , 90.50255 , 90.53965 , 90.585915, 90.54923 ,
90.53678 , 90.553246, 90.55081 , 90.51576 , 90.532875, 90.51309 ,
90.58023 , 90.59121 , 90.599045, 90.53245 , 90.52635 , 90.571304,
90.58161 , 90.61211 , 90.608475, 90.5444 , 90.58551 , 90.63973 ,
90.581505, 90.5086 , 90.504524, 90.591835, 90.63792 , 90.64173 ,
90.68373 , 90.64793 , 90.62553 , 90.6718 , 90.702324, 90.723595,
90.695145, 90.657364, 90.63514 , 90.73557 , 90.79908 , 90.800674,
90.70571 , 90.757324, 90.70759 , 90.65074 , 90.573135, 90.63558 ,
90.73752 , 90.62934 , 90.75623 , 90.74082 , 90.804344, 90.92873 ,
90.945786, 90.81971 , 90.8963 , 90.91217 , 90.9142 , 90.9678 ,
90.94695 , 91.02019 , 90.89752 , 90.96081 , 90.9767 , 90.97669 ,
91.041695, 91.04195 , 91.2216 , 91.12451 , 91.04092 , 91.0992 ,
91.064125, 91.10935 , 91.00174 , 91.01525 , 91.055115, 90.999535,
91.14831 , 91.06075 , 91.17852 , 91.227974, 91.18878 , 91.271614,
91.227066, 91.307846, 91.171585, 91.2082 , 91.2662 , 91.395676,
91.26537 , 91.371445, 91.322586, 91.18805 , 91.31594 , 91.401245,
91.33035 , 91.330154, 91.30897 , 91.33139 , 91.31072 , 91.352196,
91.30424 , 91.300224, 91.297035, 91.272675, 91.28969 , 91.29162 ,
91.324104, 91.328285, 91.26252 , 91.25737 , 91.22881 , 91.23719 ,
91.24871 , 91.21593 , 91.21447 , 91.16678 , 91.15248 , 91.131004,
91.17571 , 91.105064, 91.0915 , 91.032005, 91.04547 , 91.050964,
91.03684 , 90.976265, 90.92144 , 90.84226 , 90.764465, 90.79191 ,
90.744736, 90.75651 , 90.756226, 90.702545, 90.64719 , 90.61822 ,
90.585655, 90.510765, 90.49643 , 90.49278 , 90.46256 , 90.342514,
90.23745 , 90.27135 , 90.17029 , 90.12138 , 90.00405 , 89.967674,
90.01573 , 89.90634 , 89.81819 , 89.816536, 89.83915 , 89.829735,
89.631966, 89.557465, 89.58909 , 89.551834, 89.445145, 89.41758 ,
89.38539 , 89.25898 , 89.17636 , 89.10506 , 88.92527 , 88.829956,
89.009705, 88.91542 , 88.83157 , 88.860344, 88.80828 , 88.72012 ,
88.58152 , 88.4101 , 88.45015 , 88.3517 , 88.234764, 88.010155,
87.89395 , 87.86337 , 87.75718 , 87.81051 , 87.73383 , 87.654915,
87.63792 , 87.47771 , 87.292114, 87.33459 , 87.10768 , 86.90445 ,
86.93663 , 86.77891 , 86.643654, 86.58507 , 86.5765 , 86.60649 ,
86.44003 , 86.25458 , 86.2249 , 86.309494, 85.96625 , 85.71317 ,
85.65353 , 85.610565, 85.6418 , 85.40645 , 85.24648 , 85.14928 ,
85.00052 , 85.14198 , 84.84654 , 84.55347 , 84.748604, 84.33241 ,
83.936386, 84.01145 , 83.799324, 83.691086, 83.549194, 83.4314 ,
83.37993 , 83.16611 , 82.92861 , 82.66585 , 82.75583 , 82.51021 ,
82.57725 , 82.33577 , 82.14563 , 82.130615, 81.921394, 81.736115,
81.45851 , 81.42854 , 81.2799 , 81.074394, 80.93803 , 80.61687 ,
80.46829 , 80.43173 , 80.32678 , 80.107475, 79.99081 , 80.02226 ,
79.69663 , 79.456406, 79.45876 , 79.37041 , 78.90856 , 79.07421 ,
78.7296 , 78.72988 , 78.46358 , 78.30841 , 77.99881 , 77.92456 ,
77.81878 , 77.64457 , 77.58277 , 77.43197 , 77.1765 , 76.91593 ,
76.82219 , 76.770836, 76.570724, 76.440346, 76.40973 , 76.110405,
75.88642 , 75.7994 , 75.666985, 75.5265 , 75.38955 , 75.239075,
75.19305 , 74.895226, 74.79635 , 74.511986, 74.446625, 74.41249 ,
74.08843 , 74.13188 , 74.1291 , 73.84215)),
xyz88_acrosstrack=((-232.62158 , -231.30997 , -229.98645 , -228.6795 ,
-227.35896 , -226.05576 , -224.72432 , -223.25398 ,
-221.94582 , -220.62047 , -219.31186 , -218.00221 ,
-216.66704 , -214.92795 , -213.61061 , -212.28104 ,
-210.98543 , -209.65836 , -208.33536 , -207.02545 ,
-204.52931 , -202.91203 , -200.67534 , -200.2468 ,
-199.0931 , -197.32823 , -196.84328 , -195.90344 ,
-194.72227 , -193.41537 , -192.54117 , -191.12758 ,
-190.58348 , -188.44798 , -187.86824 , -186.64693 ,
-184.67987 , -184.15242 , -182.33142 , -180.8244 ,
-179.97351 , -179.09216 , -177.39139 , -176.27791 ,
-174.55582 , -172.82053 , -171.4015 , -170.292 ,
-166.17287 , -165.24649 , -164.0354 , -162.86182 ,
-161.87819 , -160.57874 , -159.58362 , -158.41821 ,
-157.1598 , -155.4729 , -154.29068 , -153.01816 ,
-151.71431 , -150.51762 , -149.016 , -148.05183 ,
-147.08453 , -145.58443 , -144.22128 , -142.98877 ,
-141.73863 , -140.50407 , -139.39734 , -138.14195 ,
-136.70546 , -135.38692 , -134.13087 , -132.86458 ,
-131.39888 , -130.11946 , -128.74385 , -127.54324 ,
-126.28558 , -124.94505 , -123.50876 , -121.8826 ,
-120.65209 , -119.39639 , -118.14999 , -116.973724 ,
-115.56676 , -114.122345 , -112.671 , -111.32168 ,
-110.00546 , -108.69169 , -107.46873 , -106.26015 ,
-104.917206 , -103.3863 , -102.025185 , -100.76935 ,
-99.51263 , -98.20902 , -96.86311 , -95.51827 ,
-93.963036 , -92.69178 , -91.456985 , -90.16704 ,
-88.83731 , -87.492386 , -86.20518 , -84.69755 ,
-83.32244 , -82.07486 , -80.7944 , -79.4139 ,
-78.14662 , -76.85292 , -75.49061 , -74.041336 ,
-72.69091 , -71.22072 , -69.887276 , -68.57123 ,
-67.247475 , -65.95285 , -64.680885 , -63.359734 ,
-61.975876 , -60.657017 , -59.157017 , -57.811516 ,
-56.460857 , -55.147106 , -53.80591 , -52.494434 ,
-51.18493 , -49.830715 , -48.484756 , -47.15947 ,
-45.83875 , -44.27867 , -42.929043 , -41.590256 ,
-40.298176 , -38.93553 , -37.6098 , -36.305416 ,
-34.941498 , -33.561275 , -32.259224 , -30.91018 ,
-29.546665 , -28.144218 , -26.830492 , -25.48297 ,
-24.152693 , -22.60731 , -21.282543 , -19.956234 ,
-18.556664 , -17.193779 , -15.838551 , -14.487753 ,
-13.159468 , -11.796595 , -10.436882 , -9.066825 ,
-7.701584 , -6.357174 , -4.9941387 , -3.6186657 ,
-2.2579048 , -0.8978571 , 0.47828665, 1.8386322 ,
3.2174845 , 4.599013 , 5.964562 , 7.3355737 ,
8.709359 , 10.08831 , 11.43276 , 12.779663 ,
14.194792 , 15.540902 , 17.01311 , 18.39095 ,
19.746851 , 21.092619 , 22.278051 , 23.66912 ,
25.032242 , 26.486448 , 27.878534 , 29.244947 ,
30.618883 , 31.994158 , 33.35903 , 34.742126 ,
36.13417 , 37.538567 , 38.80981 , 40.30284 ,
41.683167 , 42.911804 , 44.308098 , 45.69388 ,
47.0232 , 48.380962 , 49.80319 , 51.20409 ,
52.609303 , 53.96087 , 55.333706 , 56.75272 ,
57.969753 , 59.358524 , 60.703594 , 61.999763 ,
63.425835 , 64.80872 , 66.15428 , 67.52411 ,
68.925125 , 70.389404 , 71.555374 , 72.90957 ,
74.23022 , 75.56781 , 76.94872 , 78.298744 ,
79.667015 , 80.99905 , 82.28814 , 83.46859 ,
84.87751 , 86.171455 , 87.49968 , 88.88907 ,
90.283325 , 91.62838 , 92.99412 , 94.27514 ,
95.5677 , 96.79838 , 98.15546 , 99.46979 ,
100.73511 , 102.08342 , 103.450905 , 104.62229 ,
105.8525 , 107.11335 , 108.58482 , 109.975845 ,
111.16416 , 112.39845 , 113.89274 , 114.98288 ,
116.00317 , 117.1339 , 118.611885 , 120.04076 ,
121.45769 , 122.71399 , 123.83041 , 125.00324 ,
126.06899 , 127.32279 , 128.70308 , 129.95427 ,
131.19754 , 132.56558 , 133.65294 , 134.71867 ,
135.75714 , 137.06529 , 138.27763 , 139.567 ,
141.04463 , 142.14029 , 143.16924 , 144.75752 ,
145.8937 , 146.86356 , 147.78764 , 149.28075 ,
150.48865 , 151.52957 , 152.64905 , 153.9082 ,
155.06442 , 156.23175 , 157.46373 , 158.63998 ,
159.82349 , 161.0686 , 162.26343 , 163.54105 ,
164.81703 , 166.09743 , 166.9283 , 168.1611 ,
168.76283 , 169.30722 , 169.95233 , 170.53963 ,
171.58693 , 172.3963 , 172.78865 , 174.06036 ,
174.96706 , 175.92662 , 176.7251 , 177.50974 ,
178.44875 , 179.35751 , 180.56718 , 181.22627 ,
181.90334 , 182.78116 , 183.97707 , 185.49408 ,
185.86246 , 186.52768 , 187.57565 , 188.32538 ,
188.78792 , 190.36876 , 190.0739 , 191.6611 ,
192.40192 , 192.895 , 193.31293 , 194.60023 ,
195.39854 , 195.9762 , 197.11958 , 198.11649 ,
198.09872 , 199.13301 , 199.96245 , 200.83089 ,
201.6532 , 202.1054 , 202.5788 , 203.11575 ,
204.39618 , 205.58301 , 206.18622 , 206.87462 ,
207.22519 , 208.20573 , 208.9892 , 210.03513 ,
210.50417 , 211.3737 , 212.6544 , 212.92934 ,
213.2718 , 214.26942 , 214.95288 , 215.61818 ,
216.2915 , 217.32849 , 217.8841 , 218.07657 ,
218.31253 , 219.32535 , 219.91737 , 220.70541 ,
221.07133 , 221.64581 , 222.44064 , 222.75261 ,
222.65382 , 223.10591 , 223.94681 , 224.70963 ,
225.17964 , 225.99545 , 225.88785 , 226.69408 ,
227.39075 , 227.8988 , 227.81749 , 228.92896 ,
229.41988 , 230.25523 , 229.81483 , 231.05933 ,
231.25836 , 231.77765 , 232.28165 , 232.80597 ,
233.31442 , 233.80792 , 234.88411 , 234.71631),
(-229.49791 , -228.19116 , -226.90044 , -225.61136 ,
-224.29446 , -222.99823 , -221.5617 , -220.25046 ,
-218.95293 , -217.66031 , -216.35495 , -215.06592 ,
-213.33284 , -212.03262 , -210.73524 , -209.4375 ,
-208.14037 , -206.83142 , -205.54333 , -204.2423 ,
-203.16335 , -201.60794 , -200.54787 , -199.46309 ,
-198.57639 , -197.32693 , -196.22124 , -195.00842 ,
-193.85652 , -192.47893 , -191.49287 , -189.63513 ,
-187.89091 , -187.09517 , -186.09724 , -184.40123 ,
-183.74661 , -182.61646 , -181.96791 , -180.25266 ,
-178.79338 , -177.73262 , -176.4308 , -175.1396 ,
-174.13043 , -172.30437 , -170.86902 , -170.0452 ,
-169.29248 , -167.45639 , -165.86717 , -164.52316 ,
-162.78871 , -161.76248 , -160.69345 , -159.25195 ,
-157.88974 , -156.32458 , -155.0521 , -153.9379 ,
-152.93584 , -151.37337 , -150.04189 , -148.24608 ,
-146.58287 , -145.20523 , -143.97151 , -142.89893 ,
-141.50607 , -140.20857 , -139.06274 , -137.88367 ,
-136.44266 , -135.1332 , -134.04602 , -132.97908 ,
-131.59515 , -129.9152 , -128.66692 , -127.35833 ,
-126.11876 , -124.80859 , -123.55544 , -122.04066 ,
-120.563065 , -119.330086 , -117.98046 , -116.803 ,
-115.52688 , -114.28812 , -112.74206 , -111.14285 ,
-109.949684 , -108.67592 , -107.43885 , -106.11895 ,
-104.83003 , -103.63497 , -102.26705 , -100.72356 ,
-99.39556 , -98.07922 , -96.759636 , -95.50305 ,
-94.157875 , -92.78022 , -91.30528 , -90.03211 ,
-88.7648 , -87.395004 , -86.11964 , -84.78781 ,
-83.50839 , -82.16271 , -80.62651 , -79.296684 ,
-77.98224 , -76.688354 , -75.41167 , -74.02574 ,
-72.69913 , -71.348305 , -70.00777 , -68.61003 ,
-67.25227 , -65.92097 , -64.606995 , -63.29357 ,
-61.975903 , -60.6482 , -59.295124 , -57.896408 ,
-56.178234 , -54.85725 , -53.507763 , -52.195087 ,
-50.870525 , -49.5518 , -48.24753 , -46.894375 ,
-45.606678 , -44.25364 , -42.915108 , -41.575947 ,
-40.261497 , -38.867878 , -37.553875 , -36.199646 ,
-34.862064 , -33.558105 , -32.242054 , -30.917496 ,
-29.500338 , -28.13754 , -26.75521 , -25.432854 ,
-24.121428 , -22.741076 , -21.4283 , -20.067785 ,
-18.70134 , -17.326012 , -15.989767 , -14.447754 ,
-13.122668 , -11.769483 , -10.430127 , -9.026256 ,
-7.6793623, -6.3056045, -4.9589334, -3.5829494,
-2.223406 , -0.8463392, 0.5318618, 1.8947171,
3.2718642, 4.6356425, 6.002341 , 7.3741493,
8.746894 , 10.103333 , 11.463251 , 12.847746 ,
14.223967 , 15.414743 , 16.839336 , 18.22123 ,
19.575766 , 20.961195 , 22.34793 , 23.728905 ,
25.078077 , 26.493914 , 27.934477 , 29.245134 ,
30.659613 , 32.041233 , 33.404423 , 34.828205 ,
36.23778 , 37.59043 , 38.972977 , 40.372932 ,
41.766678 , 43.15288 , 44.567257 , 45.94986 ,
47.277042 , 48.696327 , 50.104065 , 51.510353 ,
52.94999 , 54.30122 , 55.293827 , 56.66421 ,
57.971714 , 59.34231 , 60.670902 , 62.1006 ,
63.50721 , 64.89884 , 66.28525 , 67.66504 ,
68.875046 , 70.30908 , 71.58528 , 72.86399 ,
74.241425 , 75.63242 , 77.02142 , 78.413086 ,
79.79495 , 80.9972 , 82.18315 , 83.38337 ,
84.71718 , 86.061104 , 87.4435 , 88.85918 ,
90.32701 , 91.59998 , 92.8752 , 94.09085 ,
95.44957 , 96.767456 , 98.22179 , 99.60801 ,
100.84867 , 102.01516 , 103.23901 , 104.436485 ,
105.74837 , 107.236824 , 108.524765 , 109.833824 ,
111.29697 , 112.34738 , 113.516594 , 114.72859 ,
116.07185 , 117.518364 , 118.84439 , 120.1627 ,
121.36983 , 122.60781 , 123.695656 , 124.87728 ,
126.27529 , 127.541435 , 128.79237 , 130.138 ,
131.30388 , 132.41685 , 133.45378 , 134.74625 ,
136.10825 , 137.29749 , 138.54477 , 139.69989 ,
140.91527 , 142.52264 , 143.67912 , 144.72287 ,
145.7252 , 146.9797 , 148.10536 , 148.93813 ,
150.04326 , 151.09995 , 152.3205 , 153.07158 ,
154.75804 , 155.57697 , 156.55301 , 157.46423 ,
158.1717 , 159.69447 , 161.48428 , 162.01881 ,
162.64888 , 164.02678 , 164.97438 , 166.27107 ,
167.3813 , 168.34183 , 169.20175 , 170.16977 ,
171.38425 , 172.37311 , 173.42223 , 174.21973 ,
175.45564 , 175.93298 , 176.51877 , 178.01416 ,
178.74098 , 179.61096 , 180.81169 , 181.97394 ,
182.6865 , 183.85608 , 184.47238 , 185.13757 ,
186.19539 , 187.1871 , 188.38185 , 189.0928 ,
190.32379 , 190.70866 , 191.19475 , 192.34346 ,
193.14412 , 193.89261 , 194.52391 , 195.12233 ,
196.03604 , 196.41254 , 198.0906 , 198.26134 ,
199.084 , 199.73592 , 200.50308 , 201.47467 ,
202.12827 , 202.57617 , 203.02654 , 204.18085 ,
204.80513 , 205.90738 , 206.5461 , 207.28368 ,
207.60616 , 208.83101 , 209.58844 , 210.32721 ,
211.0571 , 211.9402 , 212.19344 , 212.63206 ,
213.82576 , 214.65425 , 215.11519 , 215.74289 ,
216.55608 , 216.9299 , 217.79114 , 218.18944 ,
218.98691 , 219.16196 , 219.5632 , 220.36789 ,
220.3019 , 221.41278 , 221.77107 , 222.36336 ,
222.89293 , 223.08022 , 223.55002 , 224.05188 ,
225.14162 , 225.64743 , 226.26595 , 226.82831 ,
226.81079 , 227.65683 , 228.44324 , 228.79062 ,
229.27527 , 229.7648 , 230.25043 , 231.22841 ,
232.31165 , 233.38396 , 234.45695 , 235.55438 ,
236.62805 , 237.70259 , 238.78908 , 239.85281),
(-2.42026947e+02, -2.40680084e+02, -2.39324097e+02, -2.38011124e+02,
-2.36635864e+02, -2.35356094e+02, -2.34005066e+02, -2.32652588e+02,
-2.31301819e+02, -2.30021957e+02, -2.28665497e+02, -2.27324173e+02,
-2.25972153e+02, -2.24615295e+02, -2.23279678e+02, -2.21933334e+02,
-2.20572281e+02, -2.19220001e+02, -2.17878387e+02, -2.16524200e+02,
-2.15184418e+02, -2.13862015e+02, -2.12514038e+02, -2.11159790e+02,
-2.09815720e+02, -2.08459152e+02, -2.07116653e+02, -2.05774689e+02,
-2.04426514e+02, -2.02518585e+02, -2.01685059e+02, -2.00127502e+02,
-1.98451248e+02, -1.97424484e+02, -1.95895584e+02, -1.94293457e+02,
-1.93864426e+02, -1.92452820e+02, -1.91429657e+02, -1.90004715e+02,
-1.89006424e+02, -1.87816620e+02, -1.86149612e+02, -1.84572433e+02,
-1.82836456e+02, -1.81708572e+02, -1.80546936e+02, -1.78770203e+02,
-1.77258041e+02, -1.75317947e+02, -1.74472504e+02, -1.73313705e+02,
-1.71965591e+02, -1.70934402e+02, -1.69608597e+02, -1.67877533e+02,
-1.66593140e+02, -1.65446838e+02, -1.63906021e+02, -1.62700882e+02,
-1.61541229e+02, -1.60201843e+02, -1.58829666e+02, -1.57628754e+02,
-1.55748886e+02, -1.54432800e+02, -1.53212463e+02, -1.52166672e+02,
-1.50885864e+02, -1.49585617e+02, -1.47970596e+02, -1.46575668e+02,
-1.45267960e+02, -1.43927643e+02, -1.42641602e+02, -1.41181015e+02,
-1.39950378e+02, -1.38597717e+02, -1.36990555e+02, -1.35791229e+02,
-1.34706284e+02, -1.33242523e+02, -1.31804977e+02, -1.30584213e+02,
-1.29192932e+02, -1.27832176e+02, -1.26512932e+02, -1.25204460e+02,
-1.23738731e+02, -1.22414032e+02, -1.21184685e+02, -1.19980530e+02,
-1.18561485e+02, -1.17152000e+02, -1.15796890e+02, -1.14473602e+02,
-1.13073601e+02, -1.11699997e+02, -1.10322311e+02, -1.09011124e+02,
-1.07663017e+02, -1.06360313e+02, -1.05068428e+02, -1.03703018e+02,
-1.02433975e+02, -1.01116631e+02, -9.97721405e+01, -9.84599533e+01,
-9.69977875e+01, -9.57120285e+01, -9.44149551e+01, -9.30405579e+01,
-9.16691437e+01, -9.03291779e+01, -8.90708160e+01, -8.77813568e+01,
-8.64745560e+01, -8.51716919e+01, -8.38710861e+01, -8.24247208e+01,
-8.10434494e+01, -7.97324829e+01, -7.84250412e+01, -7.70325851e+01,
-7.57307510e+01, -7.43211517e+01, -7.29421997e+01, -7.16371765e+01,
-7.03025742e+01, -6.89409561e+01, -6.75812759e+01, -6.62261963e+01,
-6.49255676e+01, -6.36163635e+01, -6.22685013e+01, -6.09096718e+01,
-5.95799980e+01, -5.82420464e+01, -5.68057404e+01, -5.54442863e+01,
-5.41395187e+01, -5.28248940e+01, -5.15351830e+01, -5.01880531e+01,
-4.88159523e+01, -4.75146484e+01, -4.61959190e+01, -4.48514328e+01,
-4.34907837e+01, -4.21015053e+01, -4.07415657e+01, -3.93982811e+01,
-3.80502357e+01, -3.66440926e+01, -3.53528252e+01, -3.40122566e+01,
-3.26609917e+01, -3.13061409e+01, -3.00254555e+01, -2.85575047e+01,
-2.72680454e+01, -2.59275780e+01, -2.45651913e+01, -2.32482185e+01,
-2.19285355e+01, -2.05869808e+01, -1.92155361e+01, -1.78201656e+01,
-1.64532642e+01, -1.51070089e+01, -1.37925367e+01, -1.24340782e+01,
-1.10445881e+01, -9.68760586e+00, -8.33431053e+00, -6.98122215e+00,
-5.64143372e+00, -4.27921867e+00, -2.91905975e+00, -1.55971444e+00,
-2.16057450e-01, 1.15277898e+00, 2.51463461e+00, 3.86889529e+00,
5.22230101e+00, 6.58459520e+00, 7.94595098e+00, 9.29845047e+00,
1.06630421e+01, 1.20107365e+01, 1.33656244e+01, 1.47479982e+01,
1.61056499e+01, 1.74167614e+01, 1.87636223e+01, 2.01622143e+01,
2.15159645e+01, 2.28667622e+01, 2.42645893e+01, 2.56032467e+01,
2.69665070e+01, 2.83585663e+01, 2.96657085e+01, 3.10724831e+01,
3.24179344e+01, 3.37904282e+01, 3.50612602e+01, 3.65407104e+01,
3.78760452e+01, 3.92840843e+01, 4.06682892e+01, 4.19295273e+01,
4.32769890e+01, 4.46821442e+01, 4.59758949e+01, 4.73677635e+01,
4.87228966e+01, 5.01092148e+01, 5.14655800e+01, 5.27963524e+01,
5.41448441e+01, 5.55138130e+01, 5.68300629e+01, 5.81847115e+01,
5.95523720e+01, 6.08553352e+01, 6.22305145e+01, 6.35782318e+01,
6.49142151e+01, 6.62769699e+01, 6.76081619e+01, 6.89890976e+01,
7.04253845e+01, 7.17051010e+01, 7.29569855e+01, 7.42493057e+01,
7.56361847e+01, 7.69724503e+01, 7.83050842e+01, 7.96373215e+01,
8.09198303e+01, 8.21400604e+01, 8.34176025e+01, 8.46899948e+01,
8.59239120e+01, 8.73857346e+01, 8.88393250e+01, 9.01857147e+01,
9.15343399e+01, 9.27053146e+01, 9.40334320e+01, 9.53073654e+01,
9.65221939e+01, 9.77862625e+01, 9.91614227e+01, 1.00458382e+02,
1.01725456e+02, 1.03035706e+02, 1.04361702e+02, 1.05696739e+02,
1.06899506e+02, 1.08062614e+02, 1.09363976e+02, 1.10666710e+02,
1.11919243e+02, 1.13103477e+02, 1.14202881e+02, 1.15250481e+02,
1.16641304e+02, 1.17990685e+02, 1.19186195e+02, 1.20534439e+02,
1.21873940e+02, 1.22958237e+02, 1.24235802e+02, 1.25448769e+02,
1.26574089e+02, 1.27746902e+02, 1.29026413e+02, 1.30317383e+02,
1.31550201e+02, 1.32650360e+02, 1.33811478e+02, 1.35287064e+02,
1.36242569e+02, 1.37306061e+02, 1.38626999e+02, 1.39887466e+02,
1.40996948e+02, 1.42115494e+02, 1.43004990e+02, 1.43863510e+02,
1.45428101e+02, 1.46436844e+02, 1.47707047e+02, 1.48809174e+02,
1.49938293e+02, 1.51162704e+02, 1.52106476e+02, 1.53154266e+02,
1.54360489e+02, 1.55415695e+02, 1.56281616e+02, 1.56935806e+02,
1.58580948e+02, 1.59590408e+02, 1.60649185e+02, 1.61746445e+02,
1.62820465e+02, 1.63889664e+02, 1.65008041e+02, 1.66593750e+02,
1.67188553e+02, 1.68079208e+02, 1.68898804e+02, 1.69874557e+02,
1.70847015e+02, 1.71763809e+02, 1.73190384e+02, 1.73982025e+02,
1.74785645e+02, 1.76286865e+02, 1.77237579e+02, 1.77872467e+02,
1.79334122e+02, 1.80147766e+02, 1.80868134e+02, 1.81908463e+02,
1.82549545e+02, 1.83186478e+02, 1.84243683e+02, 1.85049713e+02,
1.85580338e+02, 1.86711685e+02, 1.87467636e+02, 1.88334290e+02,
1.89376755e+02, 1.90047638e+02, 1.90685928e+02, 1.91684311e+02,
1.92490585e+02, 1.93397568e+02, 1.94012466e+02, 1.94857834e+02,
1.95439850e+02, 1.96424942e+02, 1.96594009e+02, 1.97405670e+02,
1.99010590e+02, 1.99515015e+02, 2.00451904e+02, 2.01250946e+02,
2.01688370e+02, 2.01995605e+02, 2.02867630e+02, 2.03590485e+02,
2.04050919e+02, 2.04908112e+02, 2.05428711e+02, 2.06064728e+02,
2.06512085e+02, 2.07345490e+02, 2.07919205e+02, 2.08890259e+02,
2.08957474e+02, 2.10210663e+02, 2.10336929e+02, 2.11543640e+02,
2.11973267e+02, 2.12808258e+02, 2.13141098e+02, 2.13834702e+02,
2.14496811e+02, 2.15053802e+02, 2.15897385e+02, 2.16050385e+02,
2.17152390e+02, 2.17907700e+02, 2.18198990e+02, 2.19014053e+02,
2.20048462e+02, 2.19884583e+02, 2.21089813e+02, 2.21308807e+02,
2.22340057e+02, 2.22390656e+02, 2.23413971e+02, 2.23802597e+02,
2.23919510e+02, 2.24700241e+02, 2.25599640e+02, 2.25526596e+02,
2.26211945e+02, 2.26498596e+02, 2.27018692e+02, 2.27784332e+02,
2.28508209e+02, 2.28631042e+02, 2.29388199e+02, 2.29943176e+02),
(-2.40890091e+02, -2.39554581e+02, -2.38200150e+02, -2.36876007e+02,
-2.35597824e+02, -2.34251541e+02, -2.32903778e+02, -2.31564270e+02,
-2.30219894e+02, -2.28976044e+02, -2.27623672e+02, -2.26291702e+02,
-2.24939941e+02, -2.23592667e+02, -2.22266342e+02, -2.20925781e+02,
-2.19563293e+02, -2.18226273e+02, -2.16891068e+02, -2.15540024e+02,
-2.14207123e+02, -2.12862671e+02, -2.11512192e+02, -2.10173126e+02,
-2.08835403e+02, -2.07482086e+02, -2.06151505e+02, -2.04859238e+02,
-2.03515167e+02, -2.00979675e+02, -2.00264084e+02, -1.98798035e+02,
-1.97429092e+02, -1.96228378e+02, -1.94590652e+02, -1.93757217e+02,
-1.92065735e+02, -1.91333969e+02, -1.89526840e+02, -1.88746979e+02,
-1.87332245e+02, -1.85997223e+02, -1.84566818e+02, -1.83282440e+02,
-1.81480286e+02, -1.80946976e+02, -1.79511780e+02, -1.78306473e+02,
-1.77079880e+02, -1.75344116e+02, -1.73605225e+02, -1.72350204e+02,
-1.71445618e+02, -1.70530334e+02, -1.68490845e+02, -1.67661041e+02,
-1.66458191e+02, -1.65027313e+02, -1.63834335e+02, -1.62410599e+02,
-1.61060181e+02, -1.59865936e+02, -1.58552902e+02, -1.57281982e+02,
-1.56006531e+02, -1.54520416e+02, -1.53066528e+02, -1.51761642e+02,
-1.50470932e+02, -1.48196548e+02, -1.47432083e+02, -1.46270172e+02,
-1.45031860e+02, -1.43839264e+02, -1.42663620e+02, -1.41166443e+02,
-1.39795105e+02, -1.38468338e+02, -1.37104477e+02, -1.35651627e+02,
-1.34340378e+02, -1.33061371e+02, -1.31815506e+02, -1.30388565e+02,
-1.28990326e+02, -1.27632080e+02, -1.26306274e+02, -1.25076614e+02,
-1.23975365e+02, -1.22393089e+02, -1.20980789e+02, -1.19752647e+02,
-1.18436813e+02, -1.17031799e+02, -1.15640427e+02, -1.14287178e+02,
-1.12929489e+02, -1.11694778e+02, -1.10450584e+02, -1.09013046e+02,
-1.07680214e+02, -1.06347145e+02, -1.04927574e+02, -1.03595734e+02,
-1.02327225e+02, -1.00944412e+02, -9.96791000e+01, -9.83852005e+01,
-9.70357361e+01, -9.57228546e+01, -9.43803177e+01, -9.30200043e+01,
-9.17488098e+01, -9.04011307e+01, -8.90793991e+01, -8.77057114e+01,
-8.63645325e+01, -8.50422058e+01, -8.37802200e+01, -8.23890991e+01,
-8.10555344e+01, -7.96498642e+01, -7.83364868e+01, -7.69572754e+01,
-7.56419373e+01, -7.42658081e+01, -7.29678192e+01, -7.16561127e+01,
-7.03307266e+01, -6.89424438e+01, -6.75710068e+01, -6.62551041e+01,
-6.49306870e+01, -6.36191483e+01, -6.22741585e+01, -6.09069633e+01,
-5.95785828e+01, -5.82801018e+01, -5.69053154e+01, -5.55234337e+01,
-5.41126823e+01, -5.28289528e+01, -5.15046387e+01, -5.01726265e+01,
-4.88570137e+01, -4.74895210e+01, -4.61367264e+01, -4.48298683e+01,
-4.34906998e+01, -4.21617851e+01, -4.08000183e+01, -3.94369164e+01,
-3.79664726e+01, -3.66479263e+01, -3.53263626e+01, -3.39800186e+01,
-3.25891266e+01, -3.12688751e+01, -2.99084415e+01, -2.85365467e+01,
-2.71764832e+01, -2.58469524e+01, -2.45245075e+01, -2.31510124e+01,
-2.18384686e+01, -2.04737034e+01, -1.91343670e+01, -1.78128357e+01,
-1.64642868e+01, -1.50778217e+01, -1.37477427e+01, -1.24015865e+01,
-1.10378494e+01, -9.68412876e+00, -8.32634640e+00, -6.97964621e+00,
-5.62676477e+00, -4.26760769e+00, -2.90696096e+00, -1.54678810e+00,
-2.02943429e-01, 1.15767193e+00, 2.52903128e+00, 3.88978767e+00,
5.23443127e+00, 6.60559988e+00, 7.95719433e+00, 9.31980038e+00,
1.06695957e+01, 1.20212650e+01, 1.33872900e+01, 1.47400150e+01,
1.61343937e+01, 1.74826756e+01, 1.88577976e+01, 2.02382736e+01,
2.15818291e+01, 2.29692822e+01, 2.43297710e+01, 2.57056828e+01,
2.70246181e+01, 2.84049721e+01, 2.97928066e+01, 3.12015247e+01,
3.25103035e+01, 3.39217110e+01, 3.52664070e+01, 3.65701027e+01,
3.78505936e+01, 3.92598000e+01, 4.05918121e+01, 4.19563980e+01,
4.33071251e+01, 4.46748466e+01, 4.60417480e+01, 4.74063263e+01,
4.87467995e+01, 5.01065559e+01, 5.14839592e+01, 5.28142319e+01,
5.41296082e+01, 5.54788170e+01, 5.68606453e+01, 5.82213631e+01,
5.95501938e+01, 6.08956070e+01, 6.22434006e+01, 6.35925713e+01,
6.49585648e+01, 6.63130264e+01, 6.76127167e+01, 6.89449768e+01,
7.02710419e+01, 7.16144028e+01, 7.30165405e+01, 7.43025208e+01,
7.56873703e+01, 7.70014572e+01, 7.83639832e+01, 7.96502838e+01,
8.09939117e+01, 8.22864075e+01, 8.35637283e+01, 8.48500137e+01,
8.61214142e+01, 8.74879761e+01, 8.88000031e+01, 9.01542282e+01,
9.14547729e+01, 9.27218552e+01, 9.40333557e+01, 9.53159561e+01,
9.66228256e+01, 9.78796844e+01, 9.92094498e+01, 1.00534103e+02,
1.01775078e+02, 1.02960999e+02, 1.04189430e+02, 1.05556839e+02,
1.06750931e+02, 1.08019432e+02, 1.09221710e+02, 1.10505737e+02,
1.11829559e+02, 1.13008308e+02, 1.14235542e+02, 1.15553963e+02,
1.16880997e+02, 1.18180313e+02, 1.19250664e+02, 1.20399513e+02,
1.21766243e+02, 1.23012405e+02, 1.24174568e+02, 1.25467834e+02,
1.26711372e+02, 1.27850220e+02, 1.29015305e+02, 1.30163742e+02,
1.31218887e+02, 1.32359497e+02, 1.33934570e+02, 1.35113892e+02,
1.36268906e+02, 1.37608841e+02, 1.38795700e+02, 1.39936081e+02,
1.40956024e+02, 1.41989410e+02, 1.43322693e+02, 1.44447021e+02,
1.45554443e+02, 1.46438492e+02, 1.47518127e+02, 1.48693100e+02,
1.49801758e+02, 1.51148422e+02, 1.52281601e+02, 1.53353073e+02,
1.54622528e+02, 1.55585480e+02, 1.56513138e+02, 1.57799667e+02,
1.58678467e+02, 1.59530762e+02, 1.60838364e+02, 1.61806915e+02,
1.62764755e+02, 1.63874146e+02, 1.65091537e+02, 1.66391571e+02,
1.67332001e+02, 1.68174179e+02, 1.69326126e+02, 1.70726761e+02,
1.71284882e+02, 1.72028976e+02, 1.73097260e+02, 1.74211105e+02,
1.75470840e+02, 1.76203720e+02, 1.77100235e+02, 1.78058975e+02,
1.78922226e+02, 1.80398239e+02, 1.80972778e+02, 1.81538651e+02,
1.83172455e+02, 1.83421814e+02, 1.83715744e+02, 1.85034424e+02,
1.85737137e+02, 1.86681885e+02, 1.87551208e+02, 1.88401733e+02,
1.89498596e+02, 1.90135941e+02, 1.90742767e+02, 1.91291107e+02,
1.92653458e+02, 1.93168976e+02, 1.94499512e+02, 1.95036926e+02,
1.95788605e+02, 1.96849899e+02, 1.97481476e+02, 1.98166824e+02,
1.98545639e+02, 1.99615875e+02, 2.00321091e+02, 2.00979782e+02,
2.01717743e+02, 2.02007187e+02, 2.02713501e+02, 2.03723953e+02,
2.04565414e+02, 2.05115555e+02, 2.05939789e+02, 2.07050751e+02,
2.07349640e+02, 2.07774246e+02, 2.08830215e+02, 2.10058441e+02,
2.09929962e+02, 2.11402420e+02, 2.11569839e+02, 2.12629303e+02,
2.12903336e+02, 2.13571136e+02, 2.13730606e+02, 2.14629227e+02,
2.15337662e+02, 2.15880402e+02, 2.16718140e+02, 2.17417404e+02,
2.17627701e+02, 2.17918213e+02, 2.18686020e+02, 2.19584457e+02,
2.20081604e+02, 2.20635086e+02, 2.21608109e+02, 2.21707031e+02,
2.22026260e+02, 2.22849289e+02, 2.23420944e+02, 2.23975754e+02,
2.24565445e+02, 2.25094543e+02, 2.25941193e+02, 2.26057922e+02,
2.26614822e+02, 2.26774307e+02, 2.27579437e+02, 2.28359680e+02,
2.28403168e+02, 2.29428238e+02, 2.30448837e+02, 2.30482071e+02)),
xyz88_alongtrack=((-3.4341142 , -3.420134 , -3.4064367 , -3.3923776 , -3.3786461 ,
-3.3645449 , -3.351086 , -3.3406847 , -3.3268292 , -3.3132641 ,
-3.2993538 , -3.285411 , -3.272095 , -3.2673 , -3.2535994 ,
-3.2401679 , -3.226115 , -3.2126648 , -3.1991496 , -3.1853182 ,
-3.1530151 , -3.133586 , -3.1050909 , -3.1060703 , -3.0952425 ,
-3.0738997 , -3.0744498 , -3.0668483 , -3.0554807 , -3.042557 ,
-3.0363784 , -3.0270576 , -3.0265014 , -2.9995985 , -2.9978554 ,
-2.986884 , -2.9623342 , -2.9623554 , -2.9463341 , -2.9293861 ,
-2.924039 , -2.9186556 , -2.8983135 , -2.8890698 , -2.874427 ,
-2.854197 , -2.839065 , -2.8294857 , -2.78911 , -2.7828393 ,
-2.771149 , -2.7610343 , -2.7535224 , -2.7405941 , -2.7337966 ,
-2.7234998 , -2.7115622 , -2.6920753 , -2.6817079 , -2.6696715 ,
-2.6569793 , -2.647072 , -2.63068 , -2.6246283 , -2.6184206 ,
-2.6020677 , -2.588518 , -2.5777202 , -2.565964 , -2.5554373 ,
-2.5467098 , -2.5357418 , -2.5212414 , -2.5087504 , -2.4982927 ,
-2.4870417 , -2.477368 , -2.465551 , -2.45274 , -2.4426322 ,
-2.4322782 , -2.4201796 , -2.40601 , -2.39335 , -2.3832784 ,
-2.3732102 , -2.3625271 , -2.3541355 , -2.3403473 , -2.3266785 ,
-2.3174686 , -2.3059475 , -2.2942684 , -2.2829356 , -2.2744348 ,
-2.265522 , -2.2539537 , -2.2431602 , -2.2314298 , -2.2221925 ,
-2.2128057 , -2.2027435 , -2.1913815 , -2.180371 , -2.1698074 ,
-2.1604757 , -2.1521957 , -2.1429794 , -2.1327355 , -2.122101 ,
-2.1127417 , -2.0982218 , -2.0927794 , -2.0846784 , -2.0766473 ,
-2.0656796 , -2.0578237 , -2.04963 , -2.03883 , -0.34185302,
-0.34496605, -0.34934685, -0.3525476 , -0.35563388, -0.3587202 ,
-0.36197007, -0.36524594, -0.36837184, -0.37071913, -0.3736608 ,
-0.3775382 , -0.38023272, -0.38290423, -0.38562244, -0.38854894,
-0.3913591 , -0.39405623, -0.3964399 , -0.3986658 , -0.40129486,
-0.40365994, -0.40664464, -0.40873381, -0.4109026 , -0.41330856,
-0.4154755 , -0.41748488, -0.41979715, -0.42170793, -0.4231194 ,
-0.42526954, -0.42713314, -0.42857406, -0.42934686, -0.4315364 ,
-0.43326852, -0.4347006 , -0.43618485, -0.43787804, -0.43977138,
-0.44015783, -0.44062367, -0.44189614, -0.4430774 , -0.44405803,
-0.44492954, -0.44567725, -0.44644138, -0.4462955 , -0.44699663,
-0.44737768, -0.4478939 , -0.44810182, -0.4482765 , -0.44839418,
-0.4482304 , -0.44827715, -0.44813967, -0.44779012, -0.4475297 ,
-0.44711775, -0.44734615, -0.44584632, -0.4443593 , -0.44416752,
-0.4427677 , -0.44436726, -0.44313622, -0.44150934, -0.4395432 ,
-0.43872708, -0.43749186, -0.4358667 , -0.43542552, -0.43403193,
-0.43223837, -0.4302709 , -0.42864323, -0.4265904 , -0.42480665,
-0.42283425, -0.42092514, -0.41789728, -0.416698 , -0.41430518,
-0.41305274, -0.41052634, -0.40818647, -0.40552106, -0.40265933,
-0.40029612, -0.39768454, -0.39519858, -0.39220968, -0.38935834,
-0.38657755, -0.3846073 , -0.38183612, -0.37858635, -0.37504283,
-0.3720415 , -0.3688764 , -0.36561212, -0.36229026, -0.35909116,
-0.3560292 , -0.3535312 , -0.34992886, -0.34615177, 0.9327747 ,
0.9457139 , 0.95843416, 0.9715502 , 0.98438776, 0.99677664,
1.0086974 , 1.0227591 , 1.0356303 , 1.0489902 , 1.0632321 ,
1.0776258 , 1.0915332 , 1.105831 , 1.1196829 , 1.1332442 ,
1.1462287 , 1.1608013 , 1.1749841 , 1.1886905 , 1.2034223 ,
1.2184955 , 1.2316297 , 1.2452023 , 1.2592907 , 1.2759203 ,
1.2916869 , 1.305141 , 1.3192412 , 1.3365759 , 1.3490934 ,
1.3608676 , 1.3740125 , 1.3913199 , 1.4081212 , 1.4248521 ,
1.4397576 , 1.4530084 , 1.4670321 , 1.4798586 , 1.4949672 ,
1.5116189 , 1.5268314 , 1.5419824 , 1.5586681 , 1.5718782 ,
1.5850751 , 1.5980088 , 1.6141641 , 1.6292719 , 1.6453232 ,
1.6636587 , 1.6775088 , 1.6906247 , 1.7104388 , 1.7249027 ,
1.7374468 , 1.7494551 , 1.7683302 , 1.783817 , 1.7973671 ,
1.8118517 , 1.8281054 , 1.8431324 , 1.8582848 , 1.8743794 ,
1.8897412 , 1.9051749 , 1.9215206 , 1.9371886 , 1.9539692 ,
1.9707144 , 1.9875603 , 1.9989024 , 2.0153415 , 2.0239282 ,
2.028806 , 2.0379179 , 2.0465288 , 2.0606384 , 2.071832 ,
2.0780282 , 2.0948684 , 2.1074324 , 2.1206737 , 2.1317272 ,
2.1428323 , 2.154875 , 2.167442 , 2.1837835 , 2.193393 ,
2.2031493 , 2.2153733 , 2.2317152 , 2.2518988 , 2.2579153 ,
2.2675717 , 2.281012 , 2.2918565 , 2.2989936 , 2.3201687 ,
2.317977 , 2.3391883 , 2.349953 , 2.3577092 , 2.3643186 ,
2.3819861 , 2.3934293 , 2.4007812 , 2.416749 , 2.4307017 ,
2.4320717 , 2.4463978 , 2.4583652 , 2.4707034 , 2.4826367 ,
2.4897573 , 2.4972827 , 2.5054855 , 2.5231926 , 2.5396364 ,
2.5488675 , 2.559188 , 2.5650363 , 2.579026 , 2.5904417 ,
2.6051438 , 2.612699 , 2.6252537 , 2.643088 , 2.6481194 ,
2.6540053 , 2.6680543 , 2.6783786 , 2.6883307 , 2.698569 ,
2.7132158 , 2.7218535 , 2.725976 , 2.7304826 , 2.7447262 ,
2.7538846 , 2.7655287 , 2.7717228 , 2.7807224 , 2.787311 ,
2.7928395 , 2.7930343 , 2.8003395 , 2.8127356 , 2.8239741 ,
2.8313723 , 2.843324 , 2.8434606 , 2.855321 , 2.8658166 ,
2.8735688 , 2.8740702 , 2.8901873 , 2.8976362 , 2.9099596 ,
2.905566 , 2.9213147 , 2.9252193 , 2.933381 , 2.9411721 ,
2.949623 , 2.9576855 , 2.9653773 , 2.980684 , 2.9797814),
(-2.5296342 , -2.5204477 , -2.510908 , -2.5013425 , -2.4924164 ,
-2.4830773 , -2.4768658 , -2.4678226 , -2.45843 , -2.4490137 ,
-2.4398842 , -2.4304159 , -2.4303641 , -2.4211125 , -2.4118779 ,
-2.4026043 , -2.39326 , -2.3842425 , -2.374873 , -2.3657236 ,
-2.3590941 , -2.3464284 , -2.3403988 , -2.3340342 , -2.3299425 ,
-2.3211021 , -2.3148725 , -2.3064184 , -2.2995348 , -2.2894833 ,
-2.2844563 , -2.2784903 , -2.2637155 , -2.2611887 , -2.2557328 ,
-2.24223 , -2.2413392 , -2.23458 , -2.2341332 , -2.2196176 ,
-2.2085981 , -2.203367 , -2.1942024 , -2.185973 , -2.1807384 ,
-2.1697738 , -2.1590073 , -2.1567974 , -2.1554554 , -2.1393263 ,
-2.1267426 , -2.1225593 , -2.1078153 , -2.1030023 , -2.0975945 ,
-2.0872457 , -2.0778692 , -2.0701027 , -2.062097 , -2.0561185 ,
-2.051557 , -2.0398386 , -2.0311005 , -2.0159519 , -2.0115545 ,
-2.0016599 , -1.9945676 , -1.9895811 , -1.9795785 , -1.9712225 ,
-1.9649968 , -1.9590901 , -1.9485128 , -1.9404866 , -1.9360045 ,
-1.9317261 , -1.9223715 , -1.9127463 , -1.9058589 , -1.8975201 ,
-1.890925 , -1.8826761 , -1.8760394 , -1.8648524 , -1.8590841 ,
-1.8523346 , -1.8439835 , -1.8386006 , -1.8310211 , -1.8248185 ,
-1.813026 , -1.8058181 , -1.7998081 , -1.7927473 , -1.787078 ,
-1.7789302 , -1.7723851 , -1.7668903 , -1.7591796 , -1.7519104 ,
-1.7444488 , -1.7370543 , -1.7301438 , -1.7242212 , -1.7169296 ,
-1.7083946 , -1.7033383 , -1.6975528 , -1.6913056 , -1.6840385 ,
-1.6779888 , -1.6715573 , -1.6658645 , -1.6580609 , -1.6524823 ,
-1.6459084 , -1.6397858 , -1.6338834 , -1.6286706 , 0.42960384,
0.42081293, 0.4119724 , 0.40328807, 0.3942537 , 0.3856998 ,
0.37740278, 0.3693173 , 0.36134616, 0.35345936, 0.34562823,
0.33776766, 0.32976323, 0.32014346, 0.31287897, 0.3055896 ,
0.29864824, 0.29175887, 0.28504542, 0.27854133, 0.2719177 ,
0.26579985, 0.25950533, 0.25340033, 0.24745007, 0.24177958,
0.23586306, 0.23052108, 0.22510584, 0.21997283, 0.2153067 ,
0.21064407, 0.20619014, 0.20123747, 0.19692113, 0.19251035,
0.18884872, 0.18532938, 0.1815907 , 0.17854334, 0.17535667,
0.17237215, 0.16927981, 0.16701925, 0.16432387, 0.16231556,
0.16056024, 0.15955718, 0.1571873 , 0.15603733, 0.15477544,
0.1539029 , 0.15329698, 0.15294707, 0.15292287, 0.15285623,
0.15306455, 0.15336455, 0.15396446, 0.15480661, 0.1559111 ,
0.15717275, 0.15856844, 0.1601802 , 0.16200984, 0.16415095,
0.1662055 , 0.16923568, 0.17196332, 0.1748236 , 0.17787184,
0.18140593, 0.18491459, 0.18843052, 0.19259158, 0.19716193,
0.20100309, 0.20571576, 0.21047443, 0.21524215, 0.22055207,
0.22601297, 0.23123601, 0.23684607, 0.24274613, 0.24874365,
0.25490016, 0.26132998, 0.26777703, 0.27407658, 0.28103286,
0.28807855, 0.29526663, 0.3028074 , 0.30997464, 0.31537196,
0.32288024, 0.33016565, 0.3379537 , 0.34562692, 0.354017 ,
0.36239704, 0.37081546, 0.37931794, 0.3879072 , 0.39549688,
0.40462217, 0.4129201 , 0.42135173, 0.43046737, 1.3655072 ,
1.3817369 , 1.3982302 , 1.4147899 , 1.4301261 , 1.4434702 ,
1.4572818 , 1.4736766 , 1.4904693 , 1.5077864 , 1.5260442 ,
1.5452833 , 1.5627409 , 1.5789938 , 1.5942984 , 1.6122221 ,
1.6294963 , 1.6492637 , 1.6680617 , 1.6844387 , 1.700825 ,
1.7171581 , 1.7333101 , 1.7513151 , 1.772408 , 1.7902514 ,
1.8086666 , 1.829503 , 1.8448267 , 1.8611717 , 1.8782914 ,
1.8976177 , 1.9187694 , 1.9380023 , 1.957246 , 1.9754248 ,
1.9935209 , 2.0092914 , 2.0266464 , 2.0475106 , 2.0664093 ,
2.0851183 , 2.1053567 , 2.123346 , 2.1400077 , 2.1555421 ,
2.1752217 , 2.1961215 , 2.214276 , 2.233462 , 2.251209 ,
2.27001 , 2.2951543 , 2.313072 , 2.3292804 , 2.3448722 ,
2.364583 , 2.382284 , 2.3956556 , 2.41308 , 2.4297707 ,
2.4491642 , 2.461033 , 2.4880188 , 2.5010831 , 2.5167181 ,
2.5313592 , 2.5427556 , 2.5673177 , 2.5962312 , 2.6049473 ,
2.6152413 , 2.637638 , 2.65313 , 2.674319 , 2.69234 ,
2.7081387 , 2.7223406 , 2.7383444 , 2.7583666 , 2.7747402 ,
2.7921488 , 2.8055062 , 2.8260071 , 2.8339355 , 2.843893 ,
2.8686767 , 2.8810503 , 2.8957267 , 2.9157877 , 2.935306 ,
2.9475012 , 2.9671664 , 2.9778666 , 2.9893866 , 3.007291 ,
3.0241585 , 3.0443966 , 3.056784 , 3.0775683 , 3.0846698 ,
3.0934389 , 3.1129785 , 3.1269016 , 3.1386325 , 3.149753 ,
3.1604378 , 3.1761959 , 3.1831887 , 3.2115598 , 3.2152567 ,
3.229638 , 3.2411919 , 3.2547245 , 3.2715294 , 3.2831879 ,
3.2915173 , 3.2998211 , 3.3197937 , 3.3309734 , 3.3501456 ,
3.3616056 , 3.3747969 , 3.381139 , 3.40233 , 3.4158156 ,
3.429014 , 3.4420805 , 3.457673 , 3.462892 , 3.4711576 ,
3.4919822 , 3.5066552 , 3.515342 , 3.5267642 , 3.541361 ,
3.548554 , 3.5638616 , 3.5717168 , 3.5860205 , 3.5898879 ,
3.594236 , 3.6086707 , 3.608592 , 3.628207 , 3.6351144 ,
3.6461325 , 3.6558797 , 3.6601412 , 3.669064 , 3.6785247 ,
3.6976619 , 3.7072208 , 3.7185144 , 3.7290342 , 3.7298775 ,
3.7449558 , 3.7581415 , 3.7650244 , 3.7738168 , 3.783065 ,
3.7922537 , 3.8097312 , 3.8289597 , 3.8478837 , 3.8668382 ,
3.886518 , 3.905518 , 3.9245515 , 3.9439483 , 3.9626787),
(-6.1701527 , -6.143218 , -6.116473 , -6.0889077 , -6.062544 ,
-6.0362325 , -6.009482 , -5.9826803 , -5.9558525 , -5.927859 ,
-5.9013577 , -5.87452 , -5.8478966 , -5.8212624 , -5.7943892 ,
-5.7677264 , -5.741422 , -5.714701 , -5.6881 , -5.6616607 ,
-5.6349983 , -5.60806 , -5.581574 , -5.555062 , -5.52865 ,
-5.5023956 , -5.4759064 , -5.4493704 , -5.423055 , -5.381912 ,
-5.3691163 , -5.3370986 , -5.3010874 , -5.2830343 , -5.252121 ,
-5.219112 , -5.217454 , -5.1894526 , -5.1725254 , -5.1439705 ,
-5.1275163 , -5.105812 , -5.071236 , -5.039516 , -5.002689 ,
-4.982311 , -4.9615874 , -4.92395 , -4.893519 , -4.851656 ,
-4.839939 , -4.819476 , -4.7937284 , -4.7772145 , -4.7517805 ,
-4.7155094 , -4.691606 , -4.6722345 , -4.640914 , -4.619679 ,
-4.599586 , -4.574918 , -4.54851 , -4.527577 , -4.4876966 ,
-4.4631896 , -4.441972 , -4.425681 , -4.402615 , -4.379389 ,
-4.3480334 , -4.321665 , -4.2983665 , -4.2738585 , -4.2509317 ,
-4.22266 , -4.2017484 , -4.1769733 , -4.1449356 , -4.125175 ,
-4.1089053 , -4.0813413 , -4.0544395 , -4.0347457 , -4.0095754 ,
-3.9852183 , -3.9625072 , -3.9402182 , -3.9125867 , -3.8899186 ,
-3.8707 , -3.8521287 , -3.8266852 , -3.8017006 , -3.778392 ,
-3.7557847 , -3.7312255 , -3.7100916 , -3.6864069 , -3.6647143 ,
-3.6423092 , -3.6218588 , -3.601488 , -3.5783722 , -3.5590808 ,
-3.538626 , -3.5167084 , -3.496485 , -3.4704309 , -3.4512982 ,
-3.4323993 , -3.4102163 , -3.3896844 , -3.36897 , -3.3516233 ,
-3.3334568 , -3.3143098 , -3.2965896 , -3.277859 , -3.2548542 ,
-3.235797 , -3.217885 , -3.1996887 , -3.1792495 , -3.1621997 ,
-0.25061432, -0.25341326, -0.25625178, -0.25930303, -0.2619753 ,
-0.26451287, -0.26720542, -0.26997048, -0.2726771 , -0.27524158,
-0.2776008 , -0.28025678, -0.2826892 , -0.2850645 , -0.28730047,
-0.2898375 , -0.29223442, -0.2948517 , -0.29698354, -0.2989442 ,
-0.30136922, -0.30359975, -0.30552405, -0.30747923, -0.3095366 ,
-0.31133592, -0.31319463, -0.314828 , -0.31617475, -0.31832188,
-0.31999028, -0.32155633, -0.32294336, -0.32512572, -0.32514602,
-0.3270774 , -0.32845816, -0.32952017, -0.33107036, -0.33268598,
-0.3339299 , -0.33462906, -0.33579773, -0.33641762, -0.3376072 ,
-0.3390288 , -0.33962536, -0.33934534, -0.33992824, -0.34044504,
-0.3407534 , -0.34120375, -0.34146827, -0.3417109 , -0.3419133 ,
-0.34201765, -0.34263235, -0.34241366, -0.34202212, -0.3416901 ,
-0.34118736, -0.34117806, -0.3405634 , -0.34016913, -0.3395369 ,
-0.33875757, -0.33854306, -0.3375409 , -0.33699748, -0.33604687,
-0.33559027, -0.33459798, -0.33336276, -0.33250383, -0.33119103,
-0.32992545, -0.32878718, -0.32678166, -0.3258018 , -0.3242357 ,
-0.32268274, -0.3202639 , -0.31968316, -0.31777254, -0.31626207,
-0.3145265 , -0.31242126, -0.31039542, -0.30874702, -0.30622005,
-0.30439734, -0.30219388, -0.3001563 , -0.29776624, -0.2954389 ,
-0.2929788 , -0.29073805, -0.2885509 , -0.28609788, -0.28357497,
-0.2808612 , -0.27832037, -0.27576703, -0.2730402 , -0.27027318,
-0.26752153, -0.26480654, -0.26271072, -0.25958645, -0.25631547,
-0.25329557, 2.3198926 , 2.341152 , 2.362467 , 2.3839064 ,
2.404224 , 2.424166 , 2.445166 , 2.4658287 , 2.485771 ,
2.5125527 , 2.5390441 , 2.5628045 , 2.5865796 , 2.6066172 ,
2.6301265 , 2.652843 , 2.673816 , 2.6964712 , 2.7220752 ,
2.7459188 , 2.768937 , 2.7934527 , 2.8183856 , 2.8438435 ,
2.8659923 , 2.887092 , 2.9119623 , 2.9370801 , 2.9611688 ,
2.984307 , 3.0043626 , 3.0233266 , 3.0516076 , 3.0787613 ,
3.1018977 , 3.1293552 , 3.1564965 , 3.1775303 , 3.2033513 ,
3.2274868 , 3.2496977 , 3.2730763 , 3.2994616 , 3.3261418 ,
3.3513246 , 3.373616 , 3.3972485 , 3.4288392 , 3.447505 ,
3.4689748 , 3.49711 , 3.523548 , 3.5466187 , 3.5697057 ,
3.5873063 , 3.604161 , 3.6387062 , 3.659551 , 3.6869226 ,
3.7101405 , 3.7340686 , 3.760947 , 3.780482 , 3.8028438 ,
3.828947 , 3.8514876 , 3.869367 , 3.8822057 , 3.919422 ,
3.9411228 , 3.9640384 , 3.9879239 , 4.0114107 , 4.034805 ,
4.0594287 , 4.0955853 , 4.1073523 , 4.126584 , 4.1440926 ,
4.165459 , 4.1867647 , 4.206727 , 4.2393537 , 4.2564306 ,
4.2738166 , 4.30833 , 4.3293743 , 4.342854 , 4.3765693 ,
4.3944545 , 4.41007 , 4.4335513 , 4.447276 , 4.4610634 ,
4.485117 , 4.5030637 , 4.5143094 , 4.5401063 , 4.5568995 ,
4.5765147 , 4.600311 , 4.615208 , 4.6293116 , 4.6521916 ,
4.6704245 , 4.691224 , 4.704844 , 4.724179 , 4.7370353 ,
4.759802 , 4.7627687 , 4.7813535 , 4.8192854 , 4.830439 ,
4.8522 , 4.8705378 , 4.8802066 , 4.8867393 , 4.9069834 ,
4.9234624 , 4.93376 , 4.953696 , 4.9654837 , 4.9801598 ,
4.9902077 , 5.009688 , 5.022896 , 5.0457306 , 5.046714 ,
5.076403 , 5.0788445 , 5.1074343 , 5.1172495 , 5.13688 ,
5.1444306 , 5.1606708 , 5.1762147 , 5.1892204 , 5.20917 ,
5.2123938 , 5.2386675 , 5.2565207 , 5.26317 , 5.2824903 ,
5.3071165 , 5.3028507 , 5.3315544 , 5.3365602 , 5.3611994 ,
5.3621287 , 5.386591 , 5.3957353 , 5.3983316 , 5.4169664 ,
5.438482 , 5.4365215 , 5.4529085 , 5.4596562 , 5.472051 ,
5.490402 , 5.507735 , 5.510587 , 5.5287447 , 5.542047),
(-5.063785 , -5.0421233 , -5.0208287 , -4.9989486 , -4.97786 ,
-4.956472 , -4.9350367 , -4.913545 , -4.8920655 , -4.8689556 ,
-4.847747 , -4.8261867 , -4.8049126 , -4.7836375 , -4.762031 ,
-4.7407637 , -4.719666 , -4.698271 , -4.6769605 , -4.6558695 ,
-4.634413 , -4.6133165 , -4.592102 , -4.570886 , -4.549732 ,
-4.528789 , -4.5075383 , -4.4854956 , -4.4644666 , -4.417211 ,
-4.409773 , -4.3857727 , -4.3644495 , -4.3461847 , -4.318716 ,
-4.3089914 , -4.2802525 , -4.2726736 , -4.241684 , -4.2329626 ,
-4.2105703 , -4.189924 , -4.167034 , -4.14794 , -4.116444 ,
-4.114101 , -4.091335 , -4.0736275 , -4.0555234 , -4.026123 ,
-3.9965417 , -3.978063 , -3.9674134 , -3.9570477 , -3.9199286 ,
-3.9113975 , -3.8941224 , -3.8719723 , -3.85474 , -3.8325944 ,
-3.8120136 , -3.7955968 , -3.775664 , -3.757232 , -3.7392094 ,
-3.7154036 , -3.6928024 , -3.673637 , -3.6548316 , -3.6123824 ,
-3.6066508 , -3.5916548 , -3.574536 , -3.5585268 , -3.5428355 ,
-3.5190377 , -3.4987247 , -3.479442 , -3.4601119 , -3.4379168 ,
-3.4192133 , -3.401653 , -3.3848014 , -3.3632038 , -3.343255 ,
-3.3236592 , -3.3052075 , -3.2893484 , -3.2766275 , -3.2515383 ,
-3.2306972 , -3.2158413 , -3.1980383 , -3.1779666 , -3.1581767 ,
-3.1391163 , -3.1204214 , -3.105599 , -3.0908444 , -3.069848 ,
-3.0522344 , -3.0349674 , -3.0148876 , -2.9972184 , -2.981757 ,
-2.964527 , -2.9493032 , -2.9335883 , -2.9158018 , -2.899654 ,
-2.8831363 , -2.8656514 , -2.8510733 , -2.8353653 , -2.8195233 ,
-2.8022857 , -2.7857637 , -2.7709243 , -2.7570996 , -2.7402747 ,
-2.724731 , -2.709084 , -2.6942518 , -2.6781745 , -2.6640043 ,
0.66846293, 0.6593954 , 0.6502432 , 0.6410959 , 0.63149226,
0.62222916, 0.6135538 , 0.60487074, 0.59643096, 0.58782566,
0.57907134, 0.5709216 , 0.5631169 , 0.5547157 , 0.54637086,
0.53817296, 0.5310856 , 0.5238058 , 0.5165165 , 0.5095688 ,
0.5022796 , 0.49526024, 0.48883134, 0.48235533, 0.47606492,
0.4696316 , 0.46333265, 0.45679197, 0.4515032 , 0.44625813,
0.4408848 , 0.4351901 , 0.43046013, 0.42541286, 0.42049214,
0.41572955, 0.4117749 , 0.40818846, 0.4038652 , 0.40080574,
0.3972714 , 0.39431587, 0.3918493 , 0.38911384, 0.38595125,
0.38391796, 0.38182884, 0.37987727, 0.37837186, 0.3767807 ,
0.375809 , 0.37424806, 0.37367344, 0.3731295 , 0.37274712,
0.372862 , 0.3729327 , 0.37397683, 0.37410036, 0.37449732,
0.37572205, 0.3767725 , 0.37838638, 0.37958467, 0.38149962,
0.3837622 , 0.3858393 , 0.3890288 , 0.39140847, 0.3948692 ,
0.3982883 , 0.40148556, 0.40547407, 0.40911385, 0.41347596,
0.41705775, 0.42167273, 0.42659083, 0.4320353 , 0.43638995,
0.4421468 , 0.44730002, 0.45218083, 0.458007 , 0.4644077 ,
0.4701402 , 0.47641975, 0.48274007, 0.48944283, 0.4961784 ,
0.5032472 , 0.51009107, 0.5173177 , 0.5247971 , 0.5321115 ,
0.5395914 , 0.5473632 , 0.55554235, 0.56366634, 0.5715689 ,
0.57984746, 0.5882185 , 0.5968071 , 0.60563415, 0.6144194 ,
0.62302417, 0.6318701 , 0.6408516 , 0.65004987, 0.6598824 ,
0.66881514, 2.8181574 , 2.8419414 , 2.8680222 , 2.8927715 ,
2.9185703 , 2.9429321 , 2.9673157 , 2.9917567 , 3.0161512 ,
3.0441792 , 3.0702722 , 3.0982633 , 3.1251836 , 3.1508787 ,
3.177682 , 3.2044206 , 3.2318149 , 3.2577786 , 3.2862692 ,
3.3149629 , 3.3416681 , 3.3662148 , 3.3920488 , 3.422806 ,
3.4482467 , 3.4761229 , 3.501862 , 3.5305355 , 3.5612159 ,
3.5869417 , 3.6141565 , 3.6445618 , 3.675562 , 3.7056994 ,
3.7286189 , 3.7547908 , 3.787306 , 3.8163857 , 3.8428853 ,
3.873411 , 3.9028132 , 3.9288948 , 3.9561367 , 3.9831827 ,
4.007046 , 4.033872 , 4.0739064 , 4.1018786 , 4.129444 ,
4.1626406 , 4.1914463 , 4.2188764 , 4.2429733 , 4.2672076 ,
4.3007555 , 4.3280263 , 4.354803 , 4.3751864 , 4.4014235 ,
4.430808 , 4.4579988 , 4.4925094 , 4.5206885 , 4.547345 ,
4.579568 , 4.6029897 , 4.6253886 , 4.658669 , 4.679645 ,
4.700163 , 4.7340894 , 4.7580333 , 4.781907 , 4.8102665 ,
4.841808 , 4.875794 , 4.899225 , 4.9200273 , 4.9499316 ,
4.9870896 , 4.9996395 , 5.017656 , 5.045376 , 5.0744343 ,
5.1078043 , 5.1258082 , 5.148601 , 5.173428 , 5.1954827 ,
5.23539 , 5.2490773 , 5.262582 , 5.3070807 , 5.311601 ,
5.317446 , 5.3530416 , 5.37079 , 5.395554 , 5.4181705 ,
5.440431 , 5.469647 , 5.4857945 , 5.501048 , 5.5146422 ,
5.55177 , 5.564615 , 5.6008434 , 5.6143517 , 5.6338997 ,
5.662582 , 5.678856 , 5.6967115 , 5.705904 , 5.734875 ,
5.753488 , 5.770643 , 5.790242 , 5.796955 , 5.8156877 ,
5.84315 , 5.865778 , 5.8800635 , 5.902226 , 5.932761 ,
5.939875 , 5.95074 , 5.979732 , 6.0131927 , 6.008232 ,
6.0492396 , 6.0528154 , 6.081999 , 6.0887785 , 6.106753 ,
6.110283 , 6.1348915 , 6.1541767 , 6.1687145 , 6.1917253 ,
6.210685 , 6.215859 , 6.2232566 , 6.2443123 , 6.269108 ,
6.2824316 , 6.297495 , 6.3244576 , 6.326535 , 6.3349195 ,
6.3576317 , 6.373265 , 6.3884234 , 6.4045753 , 6.419024 ,
6.4425483 , 6.445232 , 6.4605875 , 6.4645147 , 6.4868937 ,
6.508643 , 6.509296 , 6.5380497 , 6.5666127 , 6.5670757))):
if profile is None:
profile = {'profile_1495599960': '[[0.0, 1489.2000732421875], [0.32, 1489.2000732421875], '
'[0.5, 1488.7000732421875], [0.55, 1488.300048828125], '
'[0.61, 1487.9000244140625], [0.65, 1488.2000732421875], '
'[0.67, 1488.0], [0.79, 1487.9000244140625], '
'[0.88, 1487.9000244140625], [1.01, 1488.2000732421875], '
'[1.04, 1488.0999755859375], [1.62, 1488.0999755859375], '
'[2.0300000000000002, 1488.300048828125], [2.43, 1488.9000244140625], '
'[2.84, 1488.5], [3.25, 1487.7000732421875], '
'[3.67, 1487.2000732421875], [4.45, 1486.800048828125], '
'[4.8500000000000005, 1486.800048828125], [5.26, 1486.5999755859375], '
'[6.09, 1485.7000732421875], [6.9, 1485.0999755859375], '
'[7.71, 1484.800048828125], [8.51, 1484.0], [8.91, 1483.800048828125], '
'[10.13, 1483.7000732421875], [11.8, 1483.0999755859375], '
'[12.620000000000001, 1482.9000244140625], '
'[16.79, 1482.9000244140625], [20.18, 1481.9000244140625], '
'[23.93, 1481.300048828125], [34.79, 1480.800048828125], '
'[51.15, 1480.800048828125], [56.13, 1481.0], [60.67, 1481.5], '
'[74.2, 1481.9000244140625], [12000.0, 1675.800048828125]]'}
self.client = None
self.synth_ra_time = synth_ra_time
self.synth_att_time = synth_att_time
self.synth_nav_time = synth_nav_time
self.synth_beampointingangle = synth_beampointingangle
self.synth_delay = synth_delay
self.synth_frequency = synth_frequency
self.synth_txsector_beam = synth_txsector_beam
self.synth_qualityfactor = synth_qualityfactor
self.synth_soundspeed = synth_soundspeed
self.synth_counter = synth_counter
self.synth_tiltangle = synth_tiltangle
self.synth_traveltime = synth_traveltime
self.synth_ntx = synth_ntx
self.synth_heave = synth_heave
self.synth_roll = synth_roll
self.synth_pitch = synth_pitch
self.synth_yaw = synth_yaw
self.synth_altitude = synth_altitude
self.synth_latitude = synth_latitude
self.synth_longitude = synth_longitude
self.xyz88_depth = xyz88_depth
self.xyz88_alongtrack = xyz88_alongtrack
self.xyz88_acrosstrack = xyz88_acrosstrack
self.secs = secs
self.serialnum = serialnum
self.secondary_serialnum = secondary_serialnum
self.profile = profile
self.xyzrph = {'beam_opening_angle': {str(synth_xyztime): 1.0},
'heading_patch_error': {str(synth_xyztime): 0.5},
'heading_sensor_error': {str(synth_xyztime): 0.02},
'heave_error': {str(synth_xyztime): 0.05},
'horizontal_positioning_error': {str(synth_xyztime): 1.5},
'imu_h': {str(synth_xyztime): 0.0},
'latency': {str(synth_xyztime): 0.0},
'imu_p': {str(synth_xyztime): 0.0},
'imu_r': {str(synth_xyztime): 0.0},
'imu_x': {str(synth_xyztime): 0.0},
'imu_y': {str(synth_xyztime): 0.0},
'imu_z': {str(synth_xyztime): 0.0},
'latency_patch_error': {str(synth_xyztime): 0.0},
'pitch_patch_error': {str(synth_xyztime): 0.1},
'pitch_sensor_error': {str(synth_xyztime): 0.0005},
'roll_patch_error': {str(synth_xyztime): 0.1},
'roll_sensor_error': {str(synth_xyztime): 0.0005},
'tx_to_antenna_x': {str(synth_xyztime): 0.0},
'tx_to_antenna_y': {str(synth_xyztime): 0.0},
'tx_to_antenna_z': {str(synth_xyztime): 0.0},
'separation_model_error': {str(synth_xyztime): 0.0},
'surface_sv_error': {str(synth_xyztime): 0.5},
'timing_latency_error': {str(synth_xyztime): 0.001},
'vertical_positioning_error': {str(synth_xyztime): 1.0},
'vessel_speed_error': {str(synth_xyztime): 0.1},
'waterline_error': {str(synth_xyztime): 0.02},
'x_offset_error': {str(synth_xyztime): 0.2},
'y_offset_error': {str(synth_xyztime): 0.2},
'z_offset_error': {str(synth_xyztime): 0.2},
'tx_r': {str(synth_xyztime): synth_tx_mountroll},
'tx_p': {str(synth_xyztime): synth_tx_mountpitch},
'tx_h': {str(synth_xyztime): synth_tx_mountyaw},
'rx_r': {str(synth_xyztime): synth_rx_mountroll},
'rx_p': {str(synth_xyztime): synth_rx_mountpitch},
'rx_h': {str(synth_xyztime): synth_rx_mountyaw},
'tx_x': {str(synth_xyztime): synth_tx_x},
'tx_y': {str(synth_xyztime): synth_tx_y},
'tx_z': {str(synth_xyztime): synth_tx_z},
'tx_x_0': {str(synth_xyztime): synth_tx_x_0},
'tx_x_1': {str(synth_xyztime): synth_tx_x_1},
'tx_x_2': {str(synth_xyztime): synth_tx_x_2},
'tx_y_0': {str(synth_xyztime): synth_tx_y_0},
'tx_y_1': {str(synth_xyztime): synth_tx_y_1},
'tx_y_2': {str(synth_xyztime): synth_tx_y_2},
'tx_z_0': {str(synth_xyztime): synth_tx_z_0},
'tx_z_1': {str(synth_xyztime): synth_tx_z_1},
'tx_z_2': {str(synth_xyztime): synth_tx_z_2},
'rx_x': {str(synth_xyztime): synth_rx_x},
'rx_y': {str(synth_xyztime): synth_rx_y},
'rx_z': {str(synth_xyztime): synth_rx_z},
'rx_x_0': {str(synth_xyztime): synth_rx_x_0},
'rx_x_1': {str(synth_xyztime): synth_rx_x_1},
'rx_x_2': {str(synth_xyztime): synth_rx_x_2},
'rx_y_0': {str(synth_xyztime): synth_rx_y_0},
'rx_y_1': {str(synth_xyztime): synth_rx_y_1},
'rx_y_2': {str(synth_xyztime): synth_rx_y_2},
'rx_z_0': {str(synth_xyztime): synth_rx_z_0},
'rx_z_1': {str(synth_xyztime): synth_rx_z_1},
'rx_z_2': {str(synth_xyztime): synth_rx_z_2},
'waterline': {str(synth_xyztime): synth_waterline}
}
self.raw_ping = self.construct_raw_ping()
self.raw_att = self.construct_rawattitude()
def construct_raw_ping(self):
"""
Take the provided real data built into this class and generate new raw_ping data.
Returns
-------
dataset: list of xarray DataSet objects that represent the raw_ping data you would get when running
xarray_conversion normally.
"""
sec_vals = self.secs
dataset = []
for cnt, sec in enumerate(sec_vals):
num_beams = int(len(self.synth_beampointingangle[cnt]))
bm_vals = [i for i in range(num_beams)]
tme_vals = self.synth_ra_time
counter_vals = self.synth_counter
tme_coord = xr.DataArray(np.array(tme_vals), dims=['time'], coords=np.array([tme_vals]))
beam_coord = xr.DataArray(np.array(bm_vals), dims=['beam'], coords=np.array([bm_vals]))
ntx = xr.DataArray(np.array(self.synth_ntx), dims=['time'], coords={'time': tme_coord})
soundspeed = xr.DataArray(np.array(self.synth_soundspeed), dims=['time'], coords={'time': tme_coord})
counter = xr.DataArray(np.array(counter_vals), dims=['time'], coords={'time': tme_coord})
tiltangle_data = np.array(list(self.synth_tiltangle))
if tiltangle_data.ndim == 1:
tiltangle_data = np.expand_dims(tiltangle_data, axis=1)
tiltangle = xr.DataArray(tiltangle_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
twtt_data = np.array(list(self.synth_traveltime))
if twtt_data.ndim == 1:
twtt_data = np.expand_dims(twtt_data, axis=1)
twoway_travel_time = xr.DataArray(twtt_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
delay_data = np.array(list(self.synth_delay))
if delay_data.ndim == 1:
delay_data = np.expand_dims(delay_data, axis=1)
delay = xr.DataArray(delay_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
frequency_data = np.array(list(self.synth_frequency))
if frequency_data.ndim == 1:
frequency_data = np.expand_dims(frequency_data, axis=1)
frequency = xr.DataArray(frequency_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
txsector_data = np.array(list(self.synth_txsector_beam))
if txsector_data.ndim == 1:
txsector_data = np.expand_dims(txsector_data, axis=1)
txsector = xr.DataArray(txsector_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
bpa_data = np.array(list(self.synth_beampointingangle))
if bpa_data.ndim == 1:
bpa_data = np.expand_dims(bpa_data, axis=1)
beam_pointing_angle = xr.DataArray(bpa_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
qf_data = np.array(list(self.synth_qualityfactor))
if qf_data.ndim == 1:
qf_data = np.expand_dims(qf_data, axis=1)
quality_factor = xr.DataArray(qf_data, dims=['time', 'beam'],
coords={'time': tme_coord, 'beam': bm_vals})
altitude = xr.DataArray(np.array(self.synth_altitude), dims=['time'], coords={'time': tme_coord})
latitude = xr.DataArray(np.array(self.synth_latitude), dims=['time'], coords={'time': tme_coord})
longitude = xr.DataArray(np.array(self.synth_longitude), dims=['time'], coords={'time': tme_coord})
dataset.append(xr.Dataset({'ntx': (['time'], ntx.data), # use the underlying numpy array to avoid problems in xarray 0.19.0
'counter': (['time'], counter.data),
'soundspeed': (['time'], soundspeed.data),
'tiltangle': (['time', 'beam'], tiltangle.data),
'txsector_beam': (['time', 'beam'], txsector.data),
'delay': (['time', 'beam'], delay.data),
'frequency': (['time', 'beam'], frequency.data),
'traveltime': (['time', 'beam'], twoway_travel_time.data),
'beampointingangle': (['time', 'beam'], beam_pointing_angle.data),
'qualityfactor': (['time', 'beam'], quality_factor.data),
'altitude': (['time'], altitude.data),
'latitude': (['time'], latitude.data),
'longitude': (['time'], longitude.data)},
coords={'time': tme_coord.data, 'beam': beam_coord.data},
attrs={list(self.profile.keys())[0]: self.profile[list(self.profile.keys())[0]],
'system_serial_number': [self.serialnum],
'secondary_system_serial_number': [self.secondary_serialnum],
'system_identifier': sec,
'min_lon': float(np.min(longitude)),
'min_lat': float(np.min(latitude)),
'max_lon': float(np.max(longitude)),
'max_lat': float(np.max(latitude))}).chunk())
return dataset
def construct_rawattitude(self):
"""
Take the provided real data built into this class and generate new xarray attitude data.
Returns
-------
dataset: xarray DataSet object that represents the attitude data you would get when running
xarray_conversion normally.
"""
tme_vals = list(self.synth_att_time)
tme_coord = xr.DataArray(np.array(tme_vals), dims=['time'], coords=np.array([tme_vals]))
heading = xr.DataArray(np.array(self.synth_yaw), dims=['time'], coords={'time': tme_coord})
heave = xr.DataArray(np.array(self.synth_heave), dims=['time'], coords={'time': tme_coord})
pitch = xr.DataArray(np.array(self.synth_pitch), dims=['time'], coords={'time': tme_coord})
roll = xr.DataArray(np.array(self.synth_roll), dims=['time'], coords={'time': tme_coord})
return xr.Dataset({'heading': (['time'], heading.data), 'heave': (['time'], heave.data), 'pitch': (['time'], pitch.data),
'roll': (['time'], roll.data)}, coords={'time': tme_coord.data}).chunk()
def load_dataset(dset=None, skip_dask=True):
"""
Returns the 'real' dataset constructed using one of the synth data classes. If None, uses SyntheticFqpr with some
dummy values. Otherwise, expects one of RealFqpr, RealDualheadFqpr, SyntheticFqpr, etc. Builds the
xarray_conversion BatchRead class using the dataset data.
Parameters
----------
dset: optional, if None will use SyntheticFqpr with zeroed values, otherwise one of RealFqpr, RealDualheadFqpr,
SyntheticFqpr, etc classes.
skip_dask
Returns
-------
kongs_dat: xarray_conversion BatchRead object
"""
if dset is None:
dset = SyntheticFqpr(synth_time=0, synth_heave=0, synth_roll=0, synth_pitch=0, synth_yaw=0,
synth_tx_mountroll=0, synth_tx_mountpitch=0, synth_tx_mountyaw=0, synth_rx_mountroll=0,
synth_rx_mountpitch=0, synth_rx_mountyaw=0, secs=('999_0_290000', '999_0_300000'))
kongs_dat = xarray_conversion.BatchRead('', skip_dask=skip_dask)
kongs_dat.logger = logging.getLogger()
kongs_dat.logger.setLevel(logging.INFO)
kongs_dat.xyzrph = dset.xyzrph
kongs_dat.raw_ping = dset.raw_ping
kongs_dat.raw_att = dset.raw_att
return kongs_dat
| 108.338426
| 182
| 0.477224
|
fd458ff72623db3649c7915a35f804c0aaafff28
| 359
|
py
|
Python
|
flask_worker/task.py
|
GrayMissing/flask-worker
|
d35fa7528c536de88dab93ae431b7860b59b53fc
|
[
"Apache-2.0"
] | 2
|
2017-03-16T06:31:39.000Z
|
2017-03-16T09:22:35.000Z
|
flask_worker/task.py
|
GrayMissing/flask-worker
|
d35fa7528c536de88dab93ae431b7860b59b53fc
|
[
"Apache-2.0"
] | null | null | null |
flask_worker/task.py
|
GrayMissing/flask-worker
|
d35fa7528c536de88dab93ae431b7860b59b53fc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
from .message import Message
class Task(object):
def __init__(self, func, worker):
self.func = func
self.worker = worker
def start(self, *args, **kwargs):
self.func(*args, **kwargs)
def delay(self, *args, **kwargs):
self.worker.default_chan.push(Message(self, args, kwargs))
return True
| 21.117647
| 66
| 0.615599
|
b2ac550678d972146bdb0deef242dcedda81f586
| 8,153
|
py
|
Python
|
ssguan/ignitor/auth/service.py
|
samuelbaizg/ssguan
|
97def0609d61e40472554464470758b5fb9eca35
|
[
"Apache-2.0"
] | 1
|
2015-07-14T14:24:05.000Z
|
2015-07-14T14:24:05.000Z
|
ssguan/ignitor/auth/service.py
|
samuelbaizg/ssguan
|
97def0609d61e40472554464470758b5fb9eca35
|
[
"Apache-2.0"
] | null | null | null |
ssguan/ignitor/auth/service.py
|
samuelbaizg/ssguan
|
97def0609d61e40472554464470758b5fb9eca35
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2015 www.suishouguan.com
#
# Licensed under the Private License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://github.com/samuelbaizg/ssguan/blob/master/LICENSE
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ssguan.ignitor.auth.error import OldPasswordError, LoginFailedError, \
CredentialExpiredError
from ssguan.ignitor.auth.model import User, RoleOperation, Token, Role, \
UserRole, encrypt_password
from ssguan.ignitor.base import context
from ssguan.ignitor.base.error import ProgramError, RequiredError
from ssguan.ignitor.orm.error import LinkedError
from ssguan.ignitor.orm.validator import LengthValidator
from ssguan.ignitor.utility import kind, crypt
def signup(u_name, u_email, u_account, u_password):
u_name = crypt.rsa_decrypt(u_name, context.get_token().rsa_key)
u_email = crypt.rsa_decrypt(u_email, context.get_token().rsa_key)
u_account = crypt.rsa_decrypt(u_account, context.get_token().rsa_key)
u_password = crypt.rsa_decrypt(u_password, context.get_token().rsa_key)
create_user(u_name, u_email, u_account,
u_password, False, User.ID_ANONYMOUS)
return True
def update_user_password(old_password, new_password):
if context.get_token().is_anonymous():
raise CredentialExpiredError()
old_password = crypt.rsa_decrypt(old_password, context.get_token().rsa_key)
new_password = crypt.rsa_decrypt(new_password, context.get_token().rsa_key)
user = User.get_by_key(context.get_user_id())
if user.u_password != encrypt_password(old_password):
raise OldPasswordError()
else:
if __validate_password(new_password):
user.u_password = encrypt_password(new_password)
user.update(context.get_user_id())
return True
def create_user(u_account, u_password, disable_flag, u_attributes=None, u_preferences=None):
if __validate_password(u_password):
user1 = User()
user1.u_account = u_account
user1.u_password = encrypt_password(u_password)
user1.disable_flag = disable_flag
user1.u_attributes = u_attributes
user1.u_preferences = u_preferences
user1.create(context.get_user_id())
return user1
def disable_user(user_id, disable_flag):
user = User.get_by_key(user_id)
user.disable_flag = disable_flag
return user.update(context.get_user_id())
def get_user(user_id=None, u_account=None):
user = None
if user_id != None and user_id != User.NULL_USER_ID:
user = User.get_by_key(user_id)
elif u_account != None:
query = User.all()
query.filter("u_account =", u_account)
if query.count() > 0:
user = query.get()
return user
def login(login_name, login_password, is_anonymous=False):
"""
Return Token
"""
if is_anonymous:
usermodel = get_user(User.ID_ANONYMOUS)
else:
if kind.str_is_empty(login_name):
raise RequiredError("login_name")
if kind.str_is_empty(login_name):
raise RequiredError("login_password")
login_name = crypt.rsa_decrypt(
login_name, context.get_token().rsa_key)
login_password = crypt.rsa_decrypt(
login_password, context.get_token().rsa_key)
query = User.all()
query.filter("u_account =", login_name)
password = encrypt_password(login_password)
query.filter("u_password =", password)
if query.count() > 0:
usermodel = query.fetch(1)[0]
else:
raise LoginFailedError()
role_codes = fetch_userroles(usermodel.key())
operation_codes = fetch_useroperations(usermodel.key())
token = Token(usermodel.key(), usermodel.u_account, role_codes, operation_codes,
anonymous=usermodel.is_anonymous())
if usermodel.u_preferences is not None:
for (key, value) in usermodel.u_preferences.items():
setattr(token, key, value)
return token
def logout():
token = login(None, None, is_anonymous=True)
return token
def get_role(role_id=None, role_name=None):
if role_id != None:
role = Role.get_by_key(role_id)
else:
query = Role.all()
query.filter("role_name =", role_name)
role = query.get()
return role
def create_role(role_code, role_name, enable_flag=True, reserve_flag=False):
role = Role(role_code=role_code, role_name=role_name, enable_flag=enable_flag, reserve_flag=reserve_flag)
role.create(context.get_user_id())
return role
def delete_role(role_id):
query = UserRole.all()
query.filter("role_id =", role_id)
if query.count() > 0:
raise LinkedError("role", "user")
role = Role.get_by_key(role_id)
role.delete_roleoperations()
return role.delete(context.get_user_id())
def fetch_roles(enable_flag=None, reserve_flag=None):
query = Role.all()
if enable_flag != None:
query.filter("enable_flag =", enable_flag)
if reserve_flag != None:
query.filter("reserve_flag =", reserve_flag)
return query.fetch()
def create_userrole(user_id, role_id):
userrole = UserRole(user_id=user_id, role_id=role_id)
return userrole.create(context.get_user_id())
def delete_userroles(userrole_id=None, user_id=None, role_id=None):
if userrole_id is None and user_id is None and role_id is None:
raise ProgramError(
"one of userrole_id, user_id, role_id can't be None at least")
if userrole_id is not None:
userrole = UserRole.get_by_key(userrole_id)
return userrole.delete(context.get_user_id())
query = UserRole.all()
if user_id is not None:
query.filter("user_id =", user_id)
if role_id is not None:
query.filter("role_id =", role_id)
return query.delete(context.get_user_id())
def fetch_userroles(user_id):
"""
Return the set of role_code that the user owns.
"""
codes = set()
query = UserRole.all()
query.filter("user_id =", user_id)
userroles = query.fetch()
if len(userroles) > 0:
query = Role.all()
query.filter("_id in ", [
userrole.role_id for userrole in userroles])
roles = query.fetch()
for role in roles:
codes.add(role.role_code)
return codes
def fetch_useroperations(user_id, role_id=None):
"""
Return the set of opeation_code that the user owns in the role.
"""
operation_codes = []
if user_id == User.ID_ROOT:
for role in fetch_roles():
if role_id is None or role_id == role.key():
codes = role.fetch_roleoperations()
operation_codes.extend(codes)
return set(operation_codes)
useroperations = []
if role_id is None:
query = UserRole.all()
query.filter("user_id =", user_id)
userroles = query.fetch()
if len(userroles) == 0:
return set()
else:
query = RoleOperation.all()
query.filter("role_id in", [
userrole.role_id for userrole in userroles])
else:
query = RoleOperation.all()
query.filter("role_id =", role_id)
roleoperations = query.fetch()
for roleoperation in roleoperations:
useroperations.append(roleoperation.operation_code)
return set(useroperations)
def __validate_password(password):
validator = LengthValidator(minlength=6, maxlength=20)
return validator.validate(password, "password")
| 38.098131
| 110
| 0.6562
|
bb09f432a4c73afcec57760f45fa39fb783a3b84
| 6,850
|
py
|
Python
|
tests/flow/test_value_comparisons.py
|
filipecosta90/RedisGraph
|
96c00296b3dab510a101c24ad70cfd3f715f3805
|
[
"Ruby",
"ISC",
"MIT"
] | null | null | null |
tests/flow/test_value_comparisons.py
|
filipecosta90/RedisGraph
|
96c00296b3dab510a101c24ad70cfd3f715f3805
|
[
"Ruby",
"ISC",
"MIT"
] | null | null | null |
tests/flow/test_value_comparisons.py
|
filipecosta90/RedisGraph
|
96c00296b3dab510a101c24ad70cfd3f715f3805
|
[
"Ruby",
"ISC",
"MIT"
] | null | null | null |
from RLTest import Env
from redisgraph import Graph, Node, Edge
from base import FlowTestsBase
redis_graph = None
dis_redis = None
redis_con = None
values = ["str1", "str2", False, True, 5, 10.5]
class testValueComparison(FlowTestsBase):
def __init__(self):
super(testValueComparison, self).__init__()
global redis_graph
redis_con = self.env.getConnection()
redis_graph = Graph("G", redis_con)
self.populate_graph()
@classmethod
def populate_graph(self):
global redis_graph
for v in values:
node = Node(label="value", properties={"val": v})
redis_graph.add_node(node)
# Add an additional node with no properties
redis_graph.add_node(Node(label="value"))
redis_graph.commit()
# Verify the ordering of values that can and cannot be directly compared
def test_orderability(self):
query = """MATCH (v:value) RETURN v.val ORDER BY v.val"""
actual_result = redis_graph.query(query)
expected = [['str1'],
['str2'],
[False],
[True],
[5],
[10.5],
[None]]
self.env.assertEquals(actual_result.result_set, expected)
# Expect the results to appear in reverse when using descending order
query = """MATCH (v:value) RETURN v.val ORDER BY v.val DESC"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.result_set, expected[::-1])
# From the Cypher specification:
# "In a mixed set, any numeric value is always considered to be higher than any string value"
def test_mixed_type_min(self):
query = """MATCH (v:value) RETURN MIN(v.val)"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.result_set[0][0], 'str1')
def test_mixed_type_max(self):
query = """MATCH (v:value) RETURN MAX(v.val)"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.result_set[0][0], 10.5)
# Verify that disjoint types pass <> filters
def test_disjoint_comparisons(self):
# Compare all node pairs under a Cartesian product
query = """MATCH (v:value), (w:value) WHERE ID(v) <> ID(w) AND v.val = w.val RETURN v"""
actual_result = redis_graph.query(query)
# No nodes have the same property, so there should be 0 equal results
expected_result_count = 0
self.env.assertEquals(
len(actual_result.result_set), expected_result_count)
query = """MATCH (v:value), (w:value) WHERE ID(v) <> ID(w) AND v.val <> w.val RETURN v"""
actual_result = redis_graph.query(query)
# Every comparison should produce an inequal result
node_count = len(redis_graph.nodes)
# The node with value set as "null" should not be returned or be part of evaluation.
expected_result_count = (node_count - 1) * (node_count - 2)
self.env.assertEquals(
len(actual_result.result_set), expected_result_count)
# Verify that AND conditions on true, false, and NULL values evaluate appropriately
def test_AND_truth_tables(self):
# Test two non-NULL values
query = """RETURN true AND true, true AND false, false AND true, false AND false"""
actual_result = redis_graph.query(query)
expected_val = [True, False, False, False] # Truth table for AND
self.env.assertEquals(actual_result.result_set[0], expected_val)
# false AND null == false
query = """RETURN false AND NULL"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.result_set[0][0], False)
# true AND null == null
query = """RETURN true AND NULL"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.result_set[0][0], None)
# Test two NULL values
query = """RETURN NULL AND NULL"""
actual_result = redis_graph.query(query)
# AND comparisons with two NULL values evaluate to NULL
self.env.assertEquals(actual_result.result_set[0][0], None)
# Verify that OR conditions on true, false, and NULL values evaluate appropriately
def test_OR_truth_tables(self):
# Test two non-NULL values
query = """RETURN true OR true, true OR false, false OR true, false OR false"""
actual_result = redis_graph.query(query)
expected_val = [True, True, True, False] # Truth table for OR
self.env.assertEquals(actual_result.result_set[0], expected_val)
# false OR null == null
query = """RETURN false OR NULL"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.result_set[0][0], None)
# true OR null == true
query = """RETURN true OR NULL"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.result_set[0][0], True)
# null OR null == null
query = """RETURN NULL OR NULL"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.result_set[0][0], None)
# Verify that XOR conditions on true, false, and NULL values evaluate appropriately
def test_XOR_truth_tables(self):
# Test two non-NULL values
query = """RETURN true XOR true, true XOR false, false XOR true, false XOR false"""
actual_result = redis_graph.query(query)
expected_val = [False, True, True, False] # Truth table for XOR
self.env.assertEquals(actual_result.result_set[0], expected_val)
# Test one NULL value
query = """RETURN true XOR null, false XOR null"""
actual_result = redis_graph.query(query)
# XOR comparisons with one NULL value always evaluate to null
expected_val = [None, None]
self.env.assertEquals(actual_result.result_set[0], expected_val)
# Test two NULL values
query = """RETURN NULL XOR NULL"""
actual_result = redis_graph.query(query)
# XOR comparisons with two NULL values evaluate to NULL
self.env.assertEquals(actual_result.result_set[0][0], None)
# Verify that NOT conditions on true, false, and NULL values evaluate appropriately
def test_NOT_truth_tables(self):
# Test non-NULL values
query = """RETURN NOT true, NOT false"""
actual_result = redis_graph.query(query)
expected_val = [False, True] # Truth table (single-valued) for NOT
self.env.assertEquals(actual_result.result_set[0], expected_val)
# NOT null == null
query = """RETURN NOT NULL"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.result_set[0][0], None)
| 42.546584
| 97
| 0.646423
|
16f0615488dab37019cb2874be31171266439f34
| 3,236
|
py
|
Python
|
profiles_project/settings.py
|
JackHarris1973/profiles-rest-api
|
9868e612d7d01d733524618d47c69128ec3f29e4
|
[
"MIT"
] | null | null | null |
profiles_project/settings.py
|
JackHarris1973/profiles-rest-api
|
9868e612d7d01d733524618d47c69128ec3f29e4
|
[
"MIT"
] | null | null | null |
profiles_project/settings.py
|
JackHarris1973/profiles-rest-api
|
9868e612d7d01d733524618d47c69128ec3f29e4
|
[
"MIT"
] | null | null | null |
"""
Django settings for profiles_project project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'z)5t)@surh)---(i6ts7#8alu_*ko-#a5!sh$oi4#=qdl65td4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'profiles_api',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'profiles_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'profiles_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'profiles_api.UserProfile'
| 25.68254
| 91
| 0.698702
|
4ff1d43c0dece812b3eaa2f892cb9a31c3edb5ef
| 3,080
|
py
|
Python
|
app/app/settings.py
|
cherylliao/Back
|
c06dc609eb7bc38b881f9f9400e2b9e47a64e46d
|
[
"MIT"
] | null | null | null |
app/app/settings.py
|
cherylliao/Back
|
c06dc609eb7bc38b881f9f9400e2b9e47a64e46d
|
[
"MIT"
] | null | null | null |
app/app/settings.py
|
cherylliao/Back
|
c06dc609eb7bc38b881f9f9400e2b9e47a64e46d
|
[
"MIT"
] | null | null | null |
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 2.1.11.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=*un1t8(yx5_atno30_1o3u)r@2^*z(^+8hcmx)n3$g$7g@8km'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| 25.454545
| 91
| 0.694481
|
09360e074db5425dc4eca08ff799449da7477981
| 17,985
|
py
|
Python
|
plugins/modules/panos_facts.py
|
bvaradinov-c/pan-os-ansible
|
554960c9d9e81076020fa1cd8ca8892307fcadc7
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/panos_facts.py
|
bvaradinov-c/pan-os-ansible
|
554960c9d9e81076020fa1cd8ca8892307fcadc7
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/panos_facts.py
|
bvaradinov-c/pan-os-ansible
|
554960c9d9e81076020fa1cd8ca8892307fcadc7
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Tomi Raittinen <tomi.raittinen@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: panos_facts
short_description: Collects facts from PAN-OS devices
description:
- Collects fact information from Palo Alto Networks firewalls and Panorama.
author:
- Tomi Raittinen (@traittinen)
- Garfield Lee Freeman (@shinmog)
- Michael Richardson (@mrichardson03)
notes:
- Checkmode is not supported.
requirements:
- pan-python
version_added: 2.8
extends_documentation_fragment:
- paloaltonetworks.panos.fragments.transitional_provider
options:
host:
description:
- B(Removed)
- Use I(provider) instead.
gather_subset:
description:
- Scopes what information is gathered from the device.
Possible values for this argument include all, system, session,
interfaces, ha, routing, vr, vsys and config. You can specify a
list of values to include a larger subset. Values can also be used
with an initial ! to specify that a specific subset should not be
collected. Panorama only supports the system, ha, and config
subsets.
required: false
default: ['!config']
'''
EXAMPLES = '''
# Gather facts
- name: Get facts
panos_facts:
provider: '{{ provider }}'
gather_subset: ['config']
'''
RETURN = '''
ansible_net_hostname:
description: Hostname of the local node.
returned: When C(system) is specified in C(gather_subset).
type: str
ansible_net_serial:
description: Serial number of the local node.
returned: When C(system) is specified in C(gather_subset).
type: str
ansible_net_model:
description: Device model of the local node.
returned: When C(system) is specified in C(gather_subset).
type: str
ansible_net_version:
description: PanOS version of the local node.
returned: When C(system) is specified in C(gather_subset).
type: str
ansible_net_uptime:
description: Uptime of the local node.
returned: When C(system) is specified in C(gather_subset).
type: str
sample: 469 days, 19:30:16
ansible_net_full_commit_required:
description: Specifies whether full commit is required to apply changes.
returned: When C(system) is specified in C(gather_subset).
type: bool
ansible_net_uncommitted_changes:
description: Specifies if commit is required to apply changes.
returned: When C(system) is specified in C(gather_subset).
type: bool
ansible_net_multivsys:
description: Specifies whether multivsys mode is enabled on local node.
returned: When C(system) is specified in C(gather_subset).
type: str
sample: on
ansible_net_session_usage:
description: Current number of active sessions on local node
returned: When C(session) is specified in C(gather_subset).
type: int
ansible_net_session_max:
description: Maximum number of sessions on local node.
returned: When C(session) is specified in C(gather_subset).
type: int
ansible_net_pps:
description: Current packets/s throughput.
returned: When C(session) is specified in C(gather_subset).
type: int
ansible_net_kbps:
description: Current kb/s throughput.
returned: When C(session) is specified in C(gather_subset).
type: int
ansible_net_ha_enabled:
description: Specifies whether HA is enabled or not.
returned: When C(ha) is specified in C(gather_subset).
type: bool
ansible_net_ha_localmode:
description: Specifies the HA mode on local node.
returned: When C(ha) is specified in C(gather_subset).
type: str
sample: Active-Passive
ansible_net_ha_localstate:
description: Specifies the HA state on local node.
returned: When C(ha) is specified in C(gather_subset).
type: str
sample: active
ansible_net_config:
description: Device confiration in XML format.
returned: When C(config) is specified in C(gather_subset).
type: str
ansible_net_interfaces:
description: Network interface information.
returned: When C(interface) is specified in C(gather_subset).
type: complex
contains:
name:
description: Interface name.
type: str
sample: ae1.23
comment:
description: Interface description/comment.
type: str
ip:
description: List of interface IP addresses in CIDR format.
type: list
sample: 192.0.2.1/24
ipv6:
description: List of interface IPv6 addresses in CIDR format.
type: list
sample: 2001:db8::0000:1/64
tag:
description: VLAN tag for the subinterface.
type: int
sample: 23
ansible_net_virtual_routers:
description: Virtual Router information.
returned: When C(vr) is specified in C(gather_subset).
type: complex
contains:
vr_name:
description: Name of the virtual router.
type: str
vr_routerid:
description: BGP router ID.
type: str
sample: 192.0.2.1
vr_asn:
description: BGP autonomous system number.
type: int
sample: 65001
vr_iflist:
description: List interfaces in the VR.
type: list
sample:
- ae2.12
- ae2.14
ansible_net_virtual_systems:
description: Virtual System information.
returned: When C(vsys) is specified in C(gather_subset).
type: complex
contains:
vsys_description:
description: VSYS description/name.
type: str
vsys_id:
description: VSYS ID.
type: int
vsys_name:
description: VSYS name.
type: int
sample: vsys1
vsys_currentsessions:
description: Number of active sessions on VSYS.
type: int
vsys_vsys_maxsessions:
description: Number of configured maximum sessions on VSYS. 0 for unlimited.
type: int
vsys_vrlist:
description: List of virtual routers attached to the VSYS.
type: list
vsys_iflist:
description: List of interfaces attached to the VSYS.
type: list
vsys_zonelist:
description: List of security zones attached to the VSYS.
type: list
ansible_net_routing_table:
description: Routing Table information.
returned: When C(routing) is specified in C(gather_subset).
type: complex
contains:
age:
description: Age of the route entry in the routing table.
type: str
destination:
description: IP prefix of the destination.
type: str
flags:
description: Flags for the route entry in the routing table.
type: str
interface:
description: Egress interface the router will use to reach the next hop.
type: str
metric:
description: Metric for the route.
type: str
nexthop:
description: Address of the device at the next hop toward the destination network.
type: str
route_table:
description: Unicast or multicast route table.
type: str
virtual_router:
description: Virtual router the route belongs to.
type: str
'''
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.paloaltonetworks.panos.plugins.module_utils.panos import get_connection
from ansible.module_utils.six import iteritems
try:
from pandevice.device import Vsys
from pandevice.errors import PanDeviceError
from pandevice.firewall import Firewall
from pandevice.network import AggregateInterface
from pandevice.network import EthernetInterface
from pandevice.network import Layer3Subinterface
from pandevice.network import Layer2Subinterface
from pandevice.network import IPv6Address
from pandevice.network import VlanInterface
from pandevice.network import LoopbackInterface
from pandevice.network import TunnelInterface
from pandevice.network import VirtualRouter
from pandevice.network import Bgp
from pandevice.network import Zone
except ImportError:
pass
class Factbase(object):
def __init__(self, module, parent):
self.module = module
self.parent = parent
self.facts = dict()
class System(Factbase):
def populate_facts(self):
xapi = self.parent
root = xapi.op('show system info').find('./result/system')
self.facts.update({
'hostname': root.findtext('hostname'),
'model': root.findtext('model'),
'serial': root.findtext('serial'),
'version': root.findtext('sw-version'),
'uptime': root.findtext('uptime'),
'multivsys': root.findtext('multi-vsys')
})
# Check uncommitted changes
result = xapi.op('check pending-changes').find('./result').text
if result == "yes":
uncommitted_changes = True
else:
uncommitted_changes = False
# Check if full commit is required
if uncommitted_changes:
result = xapi.op('check full-commit-required').find('./result').text
if result == "yes":
full_commit_required = True
else:
full_commit_required = False
else:
full_commit_required = False
self.facts.update({
'uncommitted_changes': uncommitted_changes,
'full_commit_required': full_commit_required
})
class Session(Factbase):
def populate_facts(self):
root = self.parent.op('show session info')
self.facts.update({
'session_usage': root.find('./result/num-active').text,
'session_max': root.find('./result/num-max').text,
'pps': root.find('./result/pps').text,
'kbps': root.find('./result/kbps').text
})
class Routing(Factbase):
def populate_facts(self):
entries = self.parent.op('show routing route').findall('./result/entry')
routing_table = [
{route.tag.replace('-', '_'): route.text for route in entry}
for entry in entries
]
self.facts.update({
'routing_table': routing_table
})
class Interfaces(Factbase):
def populate_facts(self):
interfaces = []
cls_types = (AggregateInterface, EthernetInterface, VlanInterface, LoopbackInterface, TunnelInterface)
for cls_type in cls_types:
listing = cls_type.refreshall(self.parent, add=False)
for elm in listing:
iface_info = {
'name': elm.name,
'comment': elm.comment,
'ip': getattr(elm, 'ip', []),
'ipv6': [],
}
for child in elm.children:
if isinstance(child, IPv6Address):
iface_info['ipv6'].append(child.uid)
elif isinstance(child, Layer3Subinterface) or isinstance(child, Layer2Subinterface):
child_info = {
'name': child.name,
'comment': child.comment,
'tag': child.tag,
'ip': getattr(child, 'ip', []),
'ipv6': [],
}
for sub_child in child.children:
if isinstance(child, IPv6Address):
child_info['ipv6'].append(sub_child.name)
interfaces.append(child_info)
interfaces.append(iface_info)
newlist = sorted(interfaces, key=lambda k: k['name'])
self.facts.update({
'interfaces': newlist
})
class Ha(Factbase):
def populate_facts(self):
root = self.parent.op('show high-availability all')
if root.find('./result/enabled').text == 'yes':
ha_enabled = True
ha_localmode = root.find('./result/group/local-info/mode').text
ha_localstate = root.find('./result/group/local-info/state').text
else:
ha_enabled = False
ha_localmode = "standalone"
ha_localstate = "active"
self.facts.update({
'ha_enabled': ha_enabled,
'ha_localmode': ha_localmode,
'ha_localstate': ha_localstate
})
class Vr(Factbase):
def populate_facts(self):
listing = VirtualRouter.refreshall(self.parent, add=False)
virtual_routers = []
for vr in listing:
info = {
'vr_name': vr.name,
'vr_iflist': vr.interface or [],
'vr_asn': None,
'vr_routerid': None,
}
for child in vr.children:
if isinstance(child, Bgp):
info['vr_asn'] = child.local_as
info['vr_routerid'] = child.router_id
virtual_routers.append(info)
self.facts.update({
'virtual_routers': virtual_routers
})
class VsysFacts(Factbase):
def populate_facts(self):
# Get session usage XML
session_root = self.parent.op('show session meter')
# Loop through all VSYS
virtual_systems = []
vsys_list = Vsys.refreshall(self.parent, name_only=True)
for vsys in vsys_list:
for var in ('display_name', 'interface', 'virtual_routers'):
vsys.refresh_variable(var)
zones = [x.name for x in Zone.refreshall(vsys, name_only=True)]
vsys_id = vsys.name[4:]
vsys_sessions = session_root.find(".//entry/[vsys='" + vsys_id + "']")
vsys_currentsessions = vsys_sessions.find('.//current').text
vsys_maxsessions = vsys_sessions.find('.//maximum').text
virtual_systems.append({
'vsys_id': vsys_id,
'vsys_name': vsys.name,
'vsys_description': vsys.display_name,
'vsys_iflist': vsys.interface,
'vsys_vrlist': vsys.virtual_routers,
'vsys_zonelist': zones,
'vsys_maxsessions': vsys_maxsessions,
'vsys_currentsessions': vsys_currentsessions,
})
self.facts.update({
'virtual-systems': virtual_systems
})
class Config(Factbase):
def populate_facts(self):
self.parent.xapi.show()
config = self.parent.xapi.xml_result().encode('utf-8')
self.facts.update({
'config': config
})
FIREWALL_SUBSETS = dict(
system=System,
session=Session,
interfaces=Interfaces,
ha=Ha,
vr=Vr,
vsys=VsysFacts,
config=Config,
routing=Routing,
)
PANORAMA_SUBSETS = dict(
system=System,
ha=Ha,
config=Config,
)
def main():
helper = get_connection(
with_classic_provider_spec=True,
argument_spec=dict(
gather_subset=dict(default=['!config'], type='list'),
# TODO(gfreeman) - remove in a later version.
host=dict(),
),
)
module = AnsibleModule(
argument_spec=helper.argument_spec,
supports_check_mode=False,
required_one_of=helper.required_one_of,
)
# TODO(gfreeman) - remove in a later version.
if module.params['host'] is not None:
module.fail_json(msg='Param "host" is removed; use "provider" instead')
parent = helper.get_pandevice_parent(module)
gather_subset = module.params['gather_subset']
runable_subsets = set()
exclude_subsets = set()
valid_subsets = None
if isinstance(parent, Firewall):
valid_subsets = frozenset(FIREWALL_SUBSETS)
else:
valid_subsets = frozenset(PANORAMA_SUBSETS)
for subset in gather_subset:
if subset == 'all':
runable_subsets.update(valid_subsets)
continue
if subset.startswith('!'):
subset = subset[1:]
if subset == 'all':
exclude_subsets.update(valid_subsets)
continue
exclude = True
else:
exclude = False
if subset not in valid_subsets:
module.fail_json(msg='Subset must be one of [%s], got %s' %
(', '.join(valid_subsets), subset))
if exclude:
exclude_subsets.add(subset)
else:
runable_subsets.add(subset)
if not runable_subsets:
runable_subsets.update(valid_subsets)
runable_subsets.difference_update(exclude_subsets)
runable_subsets.add('system')
facts = dict()
facts['gather_subset'] = list(runable_subsets)
# Create instance classes, e.g. System, Session etc.
instances = list()
for key in runable_subsets:
if isinstance(parent, Firewall):
instances.append(FIREWALL_SUBSETS[key](module, parent))
else:
instances.append(PANORAMA_SUBSETS[key](module, parent))
# Populate facts for instances
for inst in instances:
inst.populate_facts()
facts.update(inst.facts)
ansible_facts = dict()
for key, value in iteritems(facts):
key = 'ansible_net_%s' % key
ansible_facts[key] = value
module.exit_json(ansible_facts=ansible_facts)
if __name__ == '__main__':
main()
| 31.831858
| 110
| 0.61012
|
057024a514fef642fdf17bcf97a7597f247c9355
| 942
|
py
|
Python
|
setup.py
|
mathcube7/swiftex
|
384d586d9a2f24f6b6d4d888b41adfc8683df13f
|
[
"MIT"
] | null | null | null |
setup.py
|
mathcube7/swiftex
|
384d586d9a2f24f6b6d4d888b41adfc8683df13f
|
[
"MIT"
] | null | null | null |
setup.py
|
mathcube7/swiftex
|
384d586d9a2f24f6b6d4d888b41adfc8683df13f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os.path import exists
from setuptools import setup, find_packages
author = 'mathcube'
email = 'mathcube7@gmail.com'
description = 'Easy and fast math typesetting with Python'
name = 'swiftex'
year = '2021'
url = 'https://github.com/mathcube7/swiftex'
version = '0.0.4'
setup(
name=name,
author=author,
author_email=email,
url=url,
version=version,
packages=find_packages(),
package_dir={name: name},
include_package_data=True,
license='MIT',
description=description,
long_description=open('README.md').read() if exists('README.md') else '',
long_description_content_type="text/markdown",
install_requires=['sphinx', 'IPython',
],
python_requires=">=3.6",
classifiers=['Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
platforms=['ALL'],
)
| 26.914286
| 77
| 0.641189
|
a7390e842da2b260bba5d7e81648e01a710a9d51
| 9,548
|
py
|
Python
|
chunkflow/chunk/image/adjust_grey.py
|
fcollman/chunkflow
|
5834d406d4fc3579bae37edfb50a11dc67f4a1a3
|
[
"Apache-2.0"
] | null | null | null |
chunkflow/chunk/image/adjust_grey.py
|
fcollman/chunkflow
|
5834d406d4fc3579bae37edfb50a11dc67f4a1a3
|
[
"Apache-2.0"
] | null | null | null |
chunkflow/chunk/image/adjust_grey.py
|
fcollman/chunkflow
|
5834d406d4fc3579bae37edfb50a11dc67f4a1a3
|
[
"Apache-2.0"
] | null | null | null |
# Shang Mu, January 2019
# No stability consideration is accounted for in implementing this.
# TODO: Need unit tests
# TODO: use the out=None convention of numpy for in-place edits vs
# new copy of array
import numpy as np
from chunkflow.chunk import Chunk
def window_level(img, half_window, level):
r'''
Pixel values equal to level will become 0, level±window/2 will become ±1.
Mathematically equivalent of rescale(img, level+np.array([-1,1])*half_window, [-1,1])
'''
if half_window <= 0:
raise ValueError('invalid value.')
#img = np.copy(img)
img -= level
img *= 1. / half_window
return img
def rescale(img, old_range, new_range=[-1, 1]):
r'''
Linearly remap pixel values within old_range to the new_range.
For example, from values between [0,1] to values between [-1,1].
'''
if np.array_equal(old_range, new_range): # is this even fast in python?
return img
img -= old_range[0]
img *= (new_range[1] - new_range[0]) / (old_range[1] - old_range[0])
img += new_range[0]
return img
def get_voxels_for_stats(img, min_max_invalid=[True, True], debug=False):
min_invalid, max_invalid = min_max_invalid
#TODO clip_percentile = [None,None]
mask = True
if min_invalid:
mi = np.min(img)
mask = img != mi
if max_invalid:
ma = np.max(img)
mask = np.logical_and(mask, img != ma)
#if quantile
if mask is True:
stat_img = img
else:
stat_img = img[mask]
if debug and min_invalid and max_invalid:
print('rawminmax =', mi, ma)
return stat_img
def normalize(img,
method,
target_scale=[-1, 1],
min_max_invalid=[True, True],
invalid_values=[],
clip_percentile=[None, None],
do_clipping=False,
make_copy=True,
debug=False):
r'''
*Assuming floating point voxel values.*
'''
stat_img = get_voxels_for_stats(img,
min_max_invalid=min_max_invalid,
debug=debug)
if make_copy:
img = np.copy(img)
if stat_img.size == 0:
return img
if method == 1 or method == 'meanstd':
sd = np.std(stat_img)
if sd > 0:
img -= np.mean(stat_img)
img /= sd
if debug:
print('sd=', sd, 'mean=', np.mean(stat_img))
if do_clipping:
img = np.clip(img, -2, 2, img) # 2*std
elif method == 2 or method == 'fill':
mi = np.min(stat_img)
ma = np.max(stat_img)
if debug:
print('minmax =', mi, ma)
img = rescale(img, [mi, ma], new_range=target_scale)
if do_clipping:
img = np.clip(img, *target_scale, img)
return img
def adjust_gamma(img, gamma, auto_rescale=False):
r'''
Floating point images only.
Assuming pixel values in the range of [0,1]; values out of the range will be clipped.
Otherwise setting auto_rescale=True will rescale image to [0,1] (or make no change
if and only if the image has uniform value all across) before applying gamma adjustment.
'''
#img = np.copy(img)
if auto_rescale:
mi = np.min(img)
ma = np.max(img)
if mi != ma:
img -= mi
img /= ma - mi
img = np.clip(img, 0, 1, img)
img **= gamma
return img
def grey_augment(img,
max_level_change=0.15,
max_window_change=0.15,
max_log2gamma_change=1.0,
level_prob=1.0,
window_prob=0.8,
gamma_prob=0.3,
value_range=[-1, 1],
make_copy=True):
r"""
Performs grey value (histogram) augmentation on img.
Gamma adjustment is applied after adjusting window/level.
value_range: (Expected) range of pixel values of the input image and this is also
what the output image will conform to. The window/level of the pre-augmentation
input image are inferred from this. Values outside the range in the output will be clipped.
"""
if make_copy:
img = np.copy(img)
change_level = np.random.rand() < level_prob
change_window = np.random.rand() < window_prob
change_gamma = np.random.rand() < gamma_prob
level = (value_range[0] + value_range[1]) / 2
half_window = (value_range[1] - value_range[0]) / 2
log2gamma = 0
if change_level:
level += 2 * (np.random.rand() - 0.5) * max_level_change
if change_window:
half_window += 2 * (np.random.rand() - 0.5) * max_window_change / 2
if change_gamma:
log2gamma += 2 * (np.random.rand() - 0.5) * max_log2gamma_change
if change_level or change_window or change_gamma:
if change_gamma:
target_range = [0, 1]
# gamma adjustment only makes sense in [0,1] range in the conventional sense
else:
target_range = value_range
# level/window adjustement using rescale()
img = rescale(img, level + np.array([-1, 1]) * half_window,
target_range)
img = np.clip(img, *target_range, img)
if change_gamma: # perhaps gamma should always happen before window/level on the raw full range of values?
img = adjust_gamma(img, 2**log2gamma)
# rescale back to original/desired pixel value representation
img = rescale(img, [0, 1], value_range)
return img
def normalize_section_shang(image: np.ndarray, nominalmin: float,
nominalmax: float, clipvalues: bool):
"""
Parameters
------------
image:
image volume.
nominalmin:
min threshold
nominalmax:
max threshold
clipvalues:
clip values or not.
"""
assert nominalmin < nominalmax
assert image.ndim == 3
global_offset = image.global_offset
originaltype = image.dtype
arr = image.astype(np.float32)
# number of bits per voxel
nbits = np.dtype(originaltype).itemsize * 8
default_nominalmax = float(2**nbits - 1)
nominalmin = nominalmin if nominalmin is not None else 0.0
nominalmax = nominalmax if nominalmax is not None else default_nominalmax
normalization = 'fill'
# stack/chunk-wise normalization first if necessary (for blank slices within a valid stack)
#arr = normalize(arr, normalization, target_scale = [-1,1], min_max_invalid = [True]*2, make_copy=False)
# slice-wise normalization
# Note in chunkflow the first dim is z/slice
for ii in range(arr.shape[0]):
normalize(arr[ii, :, :],
normalization,
target_scale=[nominalmin, nominalmax],
min_max_invalid=[True, True],
do_clipping=clipvalues,
make_copy=False)
# cast to original data type if necessary
#arr = np.round(arr)
#arr = arr.astype(originaltype)
return Chunk(arr, global_offset=global_offset)
def test1_grey_augment():
from numpy.random import randint
x = np.random.rand(*randint(200, size=randint(5)))
print(x.shape)
y = grey_augment(x, value_range=[0, 1])
ind = np.where(np.logical_and(y > 0, y < 1))
return np.array_equal(np.argsort(x[ind]), np.argsort(y[ind]))
if __name__ == "__main__":
import sys
import glob
import tifffile
import ntpath
import os
if len(sys.argv) > 1:
if sys.argv[1] == 'normalize':
pattern, out = sys.argv[2:4]
#all_tiffs = glob.glob("/omniData/TracerTasks/pinky/cell_bodies/BasilData2/RawData/*.tif")
all_tiffs = sorted(glob.glob(pattern))
#print(all_tiffs)
for image_file in all_tiffs:
imgs = tifffile.imread(image_file).astype(np.float32)
print(image_file)
#print(imgs.shape)
if len(imgs.shape) == 2:
imgs = imgs[..., None]
if 0:
normalization = 'meanstd'
old_range = (-2, 2)
elif 1:
normalization = 'fill'
old_range = (-1, 1)
if 1: # stack-wise normalization
imgs = normalize(imgs,
normalization,
target_scale=[-1, 1],
min_max_invalid=[True] * 2,
make_copy=False,
debug=True,
do_clipping=True)
else: # slice-wise normalization
for ii in range(imgs.shape[2]):
normalize(imgs[..., ii],
normalization,
target_scale=[-1, 1],
min_max_invalid=[True, True],
make_copy=False,
debug=True,
do_clipping=True)
imgs = rescale(imgs, old_range=old_range,
new_range=[0, 255]).astype(np.uint8)
fname = out + '/' + ntpath.basename(image_file) + '_norm.tif'
if os.path.exists(fname):
raise IOError('file already exist')
else:
tifffile.imwrite(fname, data=imgs)
| 32.040268
| 115
| 0.556137
|
35793eef09fcdb65906ac466037ca753adc9ee32
| 4,841
|
py
|
Python
|
python/pySimE/space/exp/OrbitalTransferOpt/Poly6th_symbolic.py
|
ProkopHapala/SimpleSimulationEngine
|
240f9b7e85b3a6eda7a27dc15fe3f7b8c08774c5
|
[
"MIT"
] | 26
|
2016-12-04T04:45:12.000Z
|
2022-03-24T09:39:28.000Z
|
python/pySimE/space/exp/OrbitalTransferOpt/Poly6th_symbolic.py
|
Aki78/FlightAI
|
9c5480f2392c9c89b9fee4902db0c4cde5323a6c
|
[
"MIT"
] | null | null | null |
python/pySimE/space/exp/OrbitalTransferOpt/Poly6th_symbolic.py
|
Aki78/FlightAI
|
9c5480f2392c9c89b9fee4902db0c4cde5323a6c
|
[
"MIT"
] | 2
|
2019-02-09T12:31:06.000Z
|
2019-04-28T02:24:50.000Z
|
#!/usr/bin/env python
from sympy import *
from sympy.matrices import Matrix
print
print " ==== Solution of 6th order polygon coeficients ==== "
p0, p1, v0, v1, a0,a1,t = symbols('p0 p1 v0 v1 a0 a1 t')
" 1 t t2 t3 t4 t5 "
A = Matrix([
[ 1, 0, 0, 0, 0, 0 ], # p0
[ 1, 1, 1, 1, 1, 1 ], # p1
[ 0, 1, 0, 0, 0, 0 ], # v0
[ 0, 1, 2, 3, 4, 5 ], # v1
[ 0, 0, 2, 0, 0, 0 ], # a0
[ 0, 0, 2, 6, 12, 20 ]]) # a1
b = Matrix(6,1,[p0,p1,v0,v1,a0,a1])
CP = A.LUsolve(b)
print "Coefficients "
print CP
print
print " ==== Make A4 A5 zero ==== "
A = Matrix([
[ 1.5,-1.0 ],
[ -0.5, 0.5 ]])
b = Matrix(2,1,[
-(15*p0 - 15*p1 + 8*v0 + 7*v1),
-(-6*p0 + 6*p1 - 3*v0 - 3*v1)])
c = A.LUsolve(b)
print " Coefficients: "
print c
print
print " ==== Make Derivative of accelaration zero === "
A3 = -1.5*a0 + 0.5*a1 - 10*p0 + 10*p1 - 6*v0 - 4*v1
A4 = 1.5*a0 - a1 + 15*p0 - 15*p1 + 8*v0 + 7*v1
A5 = -0.5*a0 + 0.5*a1 - 6*p0 + 6*p1 - 3*v0 - 3*v1
#Eq1 = 6*A3
#Eq2 = 6*A3 + 24*A4 + 60*A5
Eq1 = 6*CP[3]
Eq2 = 6*CP[3] + 24*CP[4] + 60*CP[5]
aa = solve( [Eq1,Eq2],[a0,a1] )
print aa
print
print " ==== Polynom itselfs === "
P = CP[0] + CP[1]*t + CP[2]*t**2 + CP[3]*t**3 + CP[4]*t**4 + CP[5]*t**5
dP = CP[1]*t + 2*CP[2]*t + 3*CP[3]*t**2 + 4*CP[4]*t**3 + 5*CP[5]*t**4
ddP = 2*CP[2] + 6*CP[3]*t + 12*CP[4]*t**2 + 20*CP[5]*t**3
print "P = ",P
print
print "dP = ",dP
print
print "ddP = ",ddP
print
print " ==== Variational derivatives of gravitational acclearation === "
print " :: F = 1/r^2 ; r = P(t) "
print " :: dF(t)/dCi = -2/(P(t)^3) * dP/dCi "
dP_da0 = diff( P, a0, 1)
print "dP/da0 = ", dP_da0
dP_da1 = diff( P, a1, 1)
print "dP/da1 = ", dP_da1
print
print " ==== Variational derivatives of kinematic acclearation in radial coordinates === "
# http://en.wikipedia.org/wiki/Polar_coordinate_system Vector calculus
# X =
# dX = dR*uR + R*dPhi*uPhi
# ddX = ( ddR + R*dPhi**2 )*uR + R*ddPhi + 2* dR * dPhi
print " F_Phi = R * ddPhi + 2 * dR * dPhi "
print " F_R = R * dPhi**2 + ddR "
ap0, ap1, ar0, ar1 = symbols('ap0, ap1, ar0, ar1')
Phi = Function('Phi' )(ap0,ap1)
dPhi = Function('dPhi' )(ap0,ap1)
ddPhi = Function('ddPhi')(ap0,ap1)
R = Function('R' )(ar0,ar1)
dR = Function('dR' )(ar0,ar1)
ddR = Function('ddR')(ar0,ar1)
F_Phi = R * ddPhi + 2 * dR * dPhi
F_R = R * dPhi**2 + ddR
print
print " F_Phi = ", F_Phi
print
print " d F_Phi / ap0 = "
#pprint ( diff( F_Phi, ap0) )
print diff( F_Phi, ap0)
print " d F_Phi / ar0 = "
#pprint ( diff( F_Phi, ar0))
print diff( F_Phi, ar0)
print
print " F_Phi = ", F_Phi
print
print " d F_R / ap0 = "
#pprint ( diff( F_R, ap0) )
print diff( F_R, ap0)
print " d F_R / ar0 = "
#pprint ( diff( F_R, ar0) )
print diff( F_R, ar0)
diff_P_a0 = diff( P, a0)
diff_P_a1 = diff( P, a1)
diff_dP_a0 = diff( dP, a0)
diff_dP_a1 = diff( dP, a1)
diff_ddP_a0 = diff( ddP, a0)
diff_ddP_a1 = diff( ddP, a1)
print
print " diff_P_a0 = ",diff_P_a0
print
print " diff_P_a1 = ",diff_P_a1
print
print " diff_dP_a0 = ",diff_dP_a0
print
print " diff_dP_a1 = ",diff_dP_a1
print
print " diff_ddP_a0 = ",diff_ddP_a0
print
print " diff_ddP_a1 = ",diff_ddP_a1
'''
print
print " ==== FULL EXAPNSION of kinematic variational derivatives === "
h0,h1,dh0,dh1,ddh0,ddh1 = symbols('h0 h1 dh0 dh1 ddh0 ddh1')
r0,r1,dr0,dr1,ddr0,ddr1 = symbols('r0 r1 dr0 dr1 ddr0 ddr1')
listP = [ p0 ,p1 ,v0 ,v1 ,a0 ,a1 ]
listPhi = [ h0 ,h1 ,dh0 ,dh1 ,ddh0 ,ddh1 ]
listR = [ r0 ,r1 ,dr0 ,dr1 ,ddr0 ,ddr1 ]
PhiP = P.subs( listP[0], listPhi[0] )
for i in range(1,len(listPhi)):
PhiP = PhiP.subs( listP[i], listPhi[i] )
RP = P.subs( listP[0], listR[0] )
for i in range(1,len(listR)):
RP = RP.subs( listP[i], listR[i] )
dPhiP = diff( PhiP, t, 1)
ddPhiP = diff( dPhiP, t, 1)
dRP = diff( RP, t, 1)
ddRP = diff( dRP, t, 1)
F_Phi = RP * ddPhiP + 2*dRP * dPhiP
F_R = RP * dPhiP**2 + ddRP
print
print " ===== Derivatives of F_Phi "
dFPhi_ddh0 = diff( F_Phi, ddh0, 1)
dFPhi_ddh1 = diff( F_Phi, ddh1, 1)
dFPhi_ddr0 = diff( F_Phi, ddr0, 1)
dFPhi_ddr1 = diff( F_Phi, ddr1, 1)
print
print " dFPhi_ddh0 = ",dFPhi_ddh0
print
print " dFPhi_ddh1 = ",dFPhi_ddh1
print
print " dFPhi_ddr0 = ",dFPhi_ddr0
print
print " dFPhi_ddr1 = ",dFPhi_ddr1
print
print " ===== Derivatives of F_R "
dFR_ddh0 = diff( F_R , ddh0, 1)
dFR_ddh1 = diff( F_R , ddh1, 1)
dFR_ddr0 = diff( F_R , ddr0, 1)
dFR_ddr1 = diff( F_R , ddr1, 1)
print
print " dFR_ddh0 = ",dFR_ddh0
print
print " dFR_ddh1 = ",dFR_ddh1
print
print " dFR_ddr0 = ",dFR_ddr0
print
print " dFR_ddr1 = ",dFR_ddr1
'''
| 23.5
| 90
| 0.545755
|
ddc835c6da3483b1e93f1fbf9227a4fb76b33ab3
| 756
|
py
|
Python
|
manage.py
|
sheillanjoroge/Blogs-Que
|
0887f3d87897898c8140fe30b1b0dca7d7340ee1
|
[
"Unlicense"
] | 1
|
2020-12-14T09:18:57.000Z
|
2020-12-14T09:18:57.000Z
|
manage.py
|
sheillanjoroge/Blogs-Que
|
0887f3d87897898c8140fe30b1b0dca7d7340ee1
|
[
"Unlicense"
] | null | null | null |
manage.py
|
sheillanjoroge/Blogs-Que
|
0887f3d87897898c8140fe30b1b0dca7d7340ee1
|
[
"Unlicense"
] | null | null | null |
from app import create_app,db
from flask_script import Manager,Server
from app.models import User,Post
from flask_migrate import Migrate, MigrateCommand
# Creating app instance
app = create_app('production')
# app = create_app('production')
manager = Manager(app)
manager.add_command('server',Server)
migrate = Migrate(app,db)
manager.add_command('db',MigrateCommand)
@manager.command
def test():
"""Run the unit tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
# if __name__ == '__main__':
# manager.run()
@manager.shell
def make_shell_context():
return dict(app = app,db = db,User = User, Post = Post )
if __name__ == '__main__':
manager.run()
| 22.909091
| 60
| 0.723545
|
d80cb7a47cf34e3b4f1dfcdca3427661e174e9ca
| 28,732
|
py
|
Python
|
python-package/lightgbm/engine.py
|
AIshb/LightGBM
|
486f6de4847667c8b8622009f93ddeab02c24466
|
[
"MIT"
] | null | null | null |
python-package/lightgbm/engine.py
|
AIshb/LightGBM
|
486f6de4847667c8b8622009f93ddeab02c24466
|
[
"MIT"
] | null | null | null |
python-package/lightgbm/engine.py
|
AIshb/LightGBM
|
486f6de4847667c8b8622009f93ddeab02c24466
|
[
"MIT"
] | 1
|
2020-07-23T15:27:08.000Z
|
2020-07-23T15:27:08.000Z
|
# coding: utf-8
"""Library with training routines of LightGBM."""
from __future__ import absolute_import
import collections
import copy
import warnings
from operator import attrgetter
import numpy as np
from . import callback
from .basic import Booster, Dataset, LightGBMError, _ConfigAliases, _InnerPredictor
from .compat import (SKLEARN_INSTALLED, _LGBMGroupKFold, _LGBMStratifiedKFold,
string_type, integer_types, range_, zip_)
def train(params, train_set, num_boost_round=100,
valid_sets=None, valid_names=None,
fobj=None, feval=None, init_model=None,
feature_name='auto', categorical_feature='auto',
early_stopping_rounds=None, evals_result=None,
verbose_eval=True, learning_rates=None,
keep_training_booster=False, callbacks=None):
"""Perform the training with given parameters.
Parameters
----------
params : dict
Parameters for training.
train_set : Dataset
Data to be trained on.
num_boost_round : int, optional (default=100)
Number of boosting iterations.
valid_sets : list of Datasets or None, optional (default=None)
List of data to be evaluated on during training.
valid_names : list of strings or None, optional (default=None)
Names of ``valid_sets``.
fobj : callable or None, optional (default=None)
Customized objective function.
Should accept two parameters: preds, train_data,
and return (grad, hess).
preds : list or numpy 1-D array
The predicted values.
train_data : Dataset
The training dataset.
grad : list or numpy 1-D array
The value of the first order derivative (gradient) for each sample point.
hess : list or numpy 1-D array
The value of the second order derivative (Hessian) for each sample point.
For binary task, the preds is margin.
For multi-class task, the preds is group by class_id first, then group by row_id.
If you want to get i-th row preds in j-th class, the access way is score[j * num_data + i]
and you should group grad and hess in this way as well.
feval : callable or None, optional (default=None)
Customized evaluation function.
Should accept two parameters: preds, train_data,
and return (eval_name, eval_result, is_higher_better) or list of such tuples.
preds : list or numpy 1-D array
The predicted values.
train_data : Dataset
The training dataset.
eval_name : string
The name of evaluation function (without whitespaces).
eval_result : float
The eval result.
is_higher_better : bool
Is eval result higher better, e.g. AUC is ``is_higher_better``.
For binary task, the preds is probability of positive class (or margin in case of specified ``fobj``).
For multi-class task, the preds is group by class_id first, then group by row_id.
If you want to get i-th row preds in j-th class, the access way is preds[j * num_data + i].
To ignore the default metric corresponding to the used objective,
set the ``metric`` parameter to the string ``"None"`` in ``params``.
init_model : string, Booster or None, optional (default=None)
Filename of LightGBM model or Booster instance used for continue training.
feature_name : list of strings or 'auto', optional (default="auto")
Feature names.
If 'auto' and data is pandas DataFrame, data columns names are used.
categorical_feature : list of strings or int, or 'auto', optional (default="auto")
Categorical features.
If list of int, interpreted as indices.
If list of strings, interpreted as feature names (need to specify ``feature_name`` as well).
If 'auto' and data is pandas DataFrame, pandas unordered categorical columns are used.
All values in categorical features should be less than int32 max value (2147483647).
Large values could be memory consuming. Consider using consecutive integers starting from zero.
All negative values in categorical features will be treated as missing values.
The output cannot be monotonically constrained with respect to a categorical feature.
early_stopping_rounds : int or None, optional (default=None)
Activates early stopping. The model will train until the validation score stops improving.
Validation score needs to improve at least every ``early_stopping_rounds`` round(s)
to continue training.
Requires at least one validation data and one metric.
If there's more than one, will check all of them. But the training data is ignored anyway.
To check only the first metric, set the ``first_metric_only`` parameter to ``True`` in ``params``.
The index of iteration that has the best performance will be saved in the ``best_iteration`` field
if early stopping logic is enabled by setting ``early_stopping_rounds``.
evals_result: dict or None, optional (default=None)
This dictionary used to store all evaluation results of all the items in ``valid_sets``.
.. rubric:: Example
With a ``valid_sets`` = [valid_set, train_set],
``valid_names`` = ['eval', 'train']
and a ``params`` = {'metric': 'logloss'}
returns {'train': {'logloss': ['0.48253', '0.35953', ...]},
'eval': {'logloss': ['0.480385', '0.357756', ...]}}.
verbose_eval : bool or int, optional (default=True)
Requires at least one validation data.
If True, the eval metric on the valid set is printed at each boosting stage.
If int, the eval metric on the valid set is printed at every ``verbose_eval`` boosting stage.
The last boosting stage or the boosting stage found by using ``early_stopping_rounds`` is also printed.
.. rubric:: Example
With ``verbose_eval`` = 4 and at least one item in ``valid_sets``,
an evaluation metric is printed every 4 (instead of 1) boosting stages.
learning_rates : list, callable or None, optional (default=None)
List of learning rates for each boosting round
or a customized function that calculates ``learning_rate``
in terms of current number of round (e.g. yields learning rate decay).
keep_training_booster : bool, optional (default=False)
Whether the returned Booster will be used to keep training.
If False, the returned value will be converted into _InnerPredictor before returning.
You can still use _InnerPredictor as ``init_model`` for future continue training.
callbacks : list of callables or None, optional (default=None)
List of callback functions that are applied at each iteration.
See Callbacks in Python API for more information.
Returns
-------
booster : Booster
The trained Booster model.
"""
# create predictor first
params = copy.deepcopy(params)
if fobj is not None:
for obj_alias in _ConfigAliases.get("objective"):
params.pop(obj_alias, None)
params['objective'] = 'none'
for alias in _ConfigAliases.get("num_iterations"):
if alias in params:
num_boost_round = params.pop(alias)
warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
params["num_iterations"] = num_boost_round
for alias in _ConfigAliases.get("early_stopping_round"):
if alias in params:
early_stopping_rounds = params.pop(alias)
warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
params["early_stopping_round"] = early_stopping_rounds
first_metric_only = params.get('first_metric_only', False)
if num_boost_round <= 0:
raise ValueError("num_boost_round should be greater than zero.")
if isinstance(init_model, string_type):
predictor = _InnerPredictor(model_file=init_model, pred_parameter=params)
elif isinstance(init_model, Booster):
predictor = init_model._to_predictor(dict(init_model.params, **params))
else:
predictor = None
init_iteration = predictor.num_total_iteration if predictor is not None else 0
# check dataset
if not isinstance(train_set, Dataset):
raise TypeError("Training only accepts Dataset object")
train_set._update_params(params) \
._set_predictor(predictor) \
.set_feature_name(feature_name) \
.set_categorical_feature(categorical_feature)
is_valid_contain_train = False
train_data_name = "training"
reduced_valid_sets = []
name_valid_sets = []
if valid_sets is not None:
if isinstance(valid_sets, Dataset):
valid_sets = [valid_sets]
if isinstance(valid_names, string_type):
valid_names = [valid_names]
for i, valid_data in enumerate(valid_sets):
# reduce cost for prediction training data
if valid_data is train_set:
is_valid_contain_train = True
if valid_names is not None:
train_data_name = valid_names[i]
continue
if not isinstance(valid_data, Dataset):
raise TypeError("Training only accepts Dataset object")
reduced_valid_sets.append(valid_data._update_params(params).set_reference(train_set))
if valid_names is not None and len(valid_names) > i:
name_valid_sets.append(valid_names[i])
else:
name_valid_sets.append('valid_' + str(i))
# process callbacks
if callbacks is None:
callbacks = set()
else:
for i, cb in enumerate(callbacks):
cb.__dict__.setdefault('order', i - len(callbacks))
callbacks = set(callbacks)
# Most of legacy advanced options becomes callbacks
if verbose_eval is True:
callbacks.add(callback.print_evaluation())
elif isinstance(verbose_eval, integer_types):
callbacks.add(callback.print_evaluation(verbose_eval))
if early_stopping_rounds is not None and early_stopping_rounds > 0:
callbacks.add(callback.early_stopping(early_stopping_rounds, first_metric_only, verbose=bool(verbose_eval)))
if learning_rates is not None:
callbacks.add(callback.reset_parameter(learning_rate=learning_rates))
if evals_result is not None:
callbacks.add(callback.record_evaluation(evals_result))
callbacks_before_iter = {cb for cb in callbacks if getattr(cb, 'before_iteration', False)}
callbacks_after_iter = callbacks - callbacks_before_iter
callbacks_before_iter = sorted(callbacks_before_iter, key=attrgetter('order'))
callbacks_after_iter = sorted(callbacks_after_iter, key=attrgetter('order'))
# construct booster
try:
booster = Booster(params=params, train_set=train_set)
if is_valid_contain_train:
booster.set_train_data_name(train_data_name)
for valid_set, name_valid_set in zip_(reduced_valid_sets, name_valid_sets):
booster.add_valid(valid_set, name_valid_set)
finally:
train_set._reverse_update_params()
for valid_set in reduced_valid_sets:
valid_set._reverse_update_params()
booster.best_iteration = 0
# start training
for i in range_(init_iteration, init_iteration + num_boost_round):
for cb in callbacks_before_iter:
cb(callback.CallbackEnv(model=booster,
params=params,
iteration=i,
begin_iteration=init_iteration,
end_iteration=init_iteration + num_boost_round,
evaluation_result_list=None))
booster.update(fobj=fobj)
evaluation_result_list = []
# check evaluation result.
if valid_sets is not None:
if is_valid_contain_train:
evaluation_result_list.extend(booster.eval_train(feval))
evaluation_result_list.extend(booster.eval_valid(feval))
try:
for cb in callbacks_after_iter:
cb(callback.CallbackEnv(model=booster,
params=params,
iteration=i,
begin_iteration=init_iteration,
end_iteration=init_iteration + num_boost_round,
evaluation_result_list=evaluation_result_list))
except callback.EarlyStopException as earlyStopException:
booster.best_iteration = earlyStopException.best_iteration + 1
evaluation_result_list = earlyStopException.best_score
break
booster.best_score = collections.defaultdict(collections.OrderedDict)
for dataset_name, eval_name, score, _ in evaluation_result_list:
booster.best_score[dataset_name][eval_name] = score
if not keep_training_booster:
booster.model_from_string(booster.model_to_string(), False).free_dataset()
return booster
class _CVBooster(object):
"""Auxiliary data struct to hold all boosters of CV."""
def __init__(self):
self.boosters = []
self.best_iteration = -1
def append(self, booster):
"""Add a booster to _CVBooster."""
self.boosters.append(booster)
def __getattr__(self, name):
"""Redirect methods call of _CVBooster."""
def handler_function(*args, **kwargs):
"""Call methods with each booster, and concatenate their results."""
ret = []
for booster in self.boosters:
ret.append(getattr(booster, name)(*args, **kwargs))
return ret
return handler_function
def _make_n_folds(full_data, folds, nfold, params, seed, fpreproc=None, stratified=True,
shuffle=True, eval_train_metric=False):
"""Make a n-fold list of Booster from random indices."""
full_data = full_data.construct()
num_data = full_data.num_data()
if folds is not None:
if not hasattr(folds, '__iter__') and not hasattr(folds, 'split'):
raise AttributeError("folds should be a generator or iterator of (train_idx, test_idx) tuples "
"or scikit-learn splitter object with split method")
if hasattr(folds, 'split'):
group_info = full_data.get_group()
if group_info is not None:
group_info = np.array(group_info, dtype=np.int32, copy=False)
flatted_group = np.repeat(range_(len(group_info)), repeats=group_info)
else:
flatted_group = np.zeros(num_data, dtype=np.int32)
folds = folds.split(X=np.zeros(num_data), y=full_data.get_label(), groups=flatted_group)
else:
if any(params.get(obj_alias, "") in {"lambdarank", "rank_xendcg", "xendcg",
"xe_ndcg", "xe_ndcg_mart", "xendcg_mart"}
for obj_alias in _ConfigAliases.get("objective")):
if not SKLEARN_INSTALLED:
raise LightGBMError('Scikit-learn is required for ranking cv.')
# ranking task, split according to groups
group_info = np.array(full_data.get_group(), dtype=np.int32, copy=False)
flatted_group = np.repeat(range_(len(group_info)), repeats=group_info)
group_kfold = _LGBMGroupKFold(n_splits=nfold)
folds = group_kfold.split(X=np.zeros(num_data), groups=flatted_group)
elif stratified:
if not SKLEARN_INSTALLED:
raise LightGBMError('Scikit-learn is required for stratified cv.')
skf = _LGBMStratifiedKFold(n_splits=nfold, shuffle=shuffle, random_state=seed)
folds = skf.split(X=np.zeros(num_data), y=full_data.get_label())
else:
if shuffle:
randidx = np.random.RandomState(seed).permutation(num_data)
else:
randidx = np.arange(num_data)
kstep = int(num_data / nfold)
test_id = [randidx[i: i + kstep] for i in range_(0, num_data, kstep)]
train_id = [np.concatenate([test_id[i] for i in range_(nfold) if k != i]) for k in range_(nfold)]
folds = zip_(train_id, test_id)
ret = _CVBooster()
for train_idx, test_idx in folds:
train_set = full_data.subset(sorted(train_idx))
valid_set = full_data.subset(sorted(test_idx))
# run preprocessing on the data set if needed
if fpreproc is not None:
train_set, valid_set, tparam = fpreproc(train_set, valid_set, params.copy())
else:
tparam = params
cvbooster = Booster(tparam, train_set)
if eval_train_metric:
cvbooster.add_valid(train_set, 'train')
cvbooster.add_valid(valid_set, 'valid')
ret.append(cvbooster)
return ret
def _agg_cv_result(raw_results, eval_train_metric=False):
"""Aggregate cross-validation results."""
cvmap = collections.OrderedDict()
metric_type = {}
for one_result in raw_results:
for one_line in one_result:
if eval_train_metric:
key = "{} {}".format(one_line[0], one_line[1])
else:
key = one_line[1]
metric_type[key] = one_line[3]
cvmap.setdefault(key, [])
cvmap[key].append(one_line[2])
return [('cv_agg', k, np.mean(v), metric_type[k], np.std(v)) for k, v in cvmap.items()]
def cv(params, train_set, num_boost_round=100,
folds=None, nfold=5, stratified=True, shuffle=True,
metrics=None, fobj=None, feval=None, init_model=None,
feature_name='auto', categorical_feature='auto',
early_stopping_rounds=None, fpreproc=None,
verbose_eval=None, show_stdv=True, seed=0,
callbacks=None, eval_train_metric=False):
"""Perform the cross-validation with given paramaters.
Parameters
----------
params : dict
Parameters for Booster.
train_set : Dataset
Data to be trained on.
num_boost_round : int, optional (default=100)
Number of boosting iterations.
folds : generator or iterator of (train_idx, test_idx) tuples, scikit-learn splitter object or None, optional (default=None)
If generator or iterator, it should yield the train and test indices for each fold.
If object, it should be one of the scikit-learn splitter classes
(https://scikit-learn.org/stable/modules/classes.html#splitter-classes)
and have ``split`` method.
This argument has highest priority over other data split arguments.
nfold : int, optional (default=5)
Number of folds in CV.
stratified : bool, optional (default=True)
Whether to perform stratified sampling.
shuffle : bool, optional (default=True)
Whether to shuffle before splitting data.
metrics : string, list of strings or None, optional (default=None)
Evaluation metrics to be monitored while CV.
If not None, the metric in ``params`` will be overridden.
fobj : callable or None, optional (default=None)
Customized objective function.
Should accept two parameters: preds, train_data,
and return (grad, hess).
preds : list or numpy 1-D array
The predicted values.
train_data : Dataset
The training dataset.
grad : list or numpy 1-D array
The value of the first order derivative (gradient) for each sample point.
hess : list or numpy 1-D array
The value of the second order derivative (Hessian) for each sample point.
For binary task, the preds is margin.
For multi-class task, the preds is group by class_id first, then group by row_id.
If you want to get i-th row preds in j-th class, the access way is score[j * num_data + i]
and you should group grad and hess in this way as well.
feval : callable or None, optional (default=None)
Customized evaluation function.
Should accept two parameters: preds, train_data,
and return (eval_name, eval_result, is_higher_better) or list of such tuples.
preds : list or numpy 1-D array
The predicted values.
train_data : Dataset
The training dataset.
eval_name : string
The name of evaluation function (without whitespaces).
eval_result : float
The eval result.
is_higher_better : bool
Is eval result higher better, e.g. AUC is ``is_higher_better``.
For binary task, the preds is probability of positive class (or margin in case of specified ``fobj``).
For multi-class task, the preds is group by class_id first, then group by row_id.
If you want to get i-th row preds in j-th class, the access way is preds[j * num_data + i].
To ignore the default metric corresponding to the used objective,
set ``metrics`` to the string ``"None"``.
init_model : string, Booster or None, optional (default=None)
Filename of LightGBM model or Booster instance used for continue training.
feature_name : list of strings or 'auto', optional (default="auto")
Feature names.
If 'auto' and data is pandas DataFrame, data columns names are used.
categorical_feature : list of strings or int, or 'auto', optional (default="auto")
Categorical features.
If list of int, interpreted as indices.
If list of strings, interpreted as feature names (need to specify ``feature_name`` as well).
If 'auto' and data is pandas DataFrame, pandas unordered categorical columns are used.
All values in categorical features should be less than int32 max value (2147483647).
Large values could be memory consuming. Consider using consecutive integers starting from zero.
All negative values in categorical features will be treated as missing values.
The output cannot be monotonically constrained with respect to a categorical feature.
early_stopping_rounds : int or None, optional (default=None)
Activates early stopping.
CV score needs to improve at least every ``early_stopping_rounds`` round(s)
to continue.
Requires at least one metric. If there's more than one, will check all of them.
To check only the first metric, set the ``first_metric_only`` parameter to ``True`` in ``params``.
Last entry in evaluation history is the one from the best iteration.
fpreproc : callable or None, optional (default=None)
Preprocessing function that takes (dtrain, dtest, params)
and returns transformed versions of those.
verbose_eval : bool, int, or None, optional (default=None)
Whether to display the progress.
If None, progress will be displayed when np.ndarray is returned.
If True, progress will be displayed at every boosting stage.
If int, progress will be displayed at every given ``verbose_eval`` boosting stage.
show_stdv : bool, optional (default=True)
Whether to display the standard deviation in progress.
Results are not affected by this parameter, and always contain std.
seed : int, optional (default=0)
Seed used to generate the folds (passed to numpy.random.seed).
callbacks : list of callables or None, optional (default=None)
List of callback functions that are applied at each iteration.
See Callbacks in Python API for more information.
eval_train_metric : bool, optional (default=False)
Whether to display the train metric in progress.
The score of the metric is calculated again after each training step, so there is some impact on performance.
Returns
-------
eval_hist : dict
Evaluation history.
The dictionary has the following format:
{'metric1-mean': [values], 'metric1-stdv': [values],
'metric2-mean': [values], 'metric2-stdv': [values],
...}.
"""
if not isinstance(train_set, Dataset):
raise TypeError("Training only accepts Dataset object")
params = copy.deepcopy(params)
if fobj is not None:
for obj_alias in _ConfigAliases.get("objective"):
params.pop(obj_alias, None)
params['objective'] = 'none'
for alias in _ConfigAliases.get("num_iterations"):
if alias in params:
warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
num_boost_round = params.pop(alias)
params["num_iterations"] = num_boost_round
for alias in _ConfigAliases.get("early_stopping_round"):
if alias in params:
warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))
early_stopping_rounds = params.pop(alias)
params["early_stopping_round"] = early_stopping_rounds
first_metric_only = params.get('first_metric_only', False)
if num_boost_round <= 0:
raise ValueError("num_boost_round should be greater than zero.")
if isinstance(init_model, string_type):
predictor = _InnerPredictor(model_file=init_model, pred_parameter=params)
elif isinstance(init_model, Booster):
predictor = init_model._to_predictor(dict(init_model.params, **params))
else:
predictor = None
if metrics is not None:
for metric_alias in _ConfigAliases.get("metric"):
params.pop(metric_alias, None)
params['metric'] = metrics
train_set._update_params(params) \
._set_predictor(predictor) \
.set_feature_name(feature_name) \
.set_categorical_feature(categorical_feature)
results = collections.defaultdict(list)
cvfolds = _make_n_folds(train_set, folds=folds, nfold=nfold,
params=params, seed=seed, fpreproc=fpreproc,
stratified=stratified, shuffle=shuffle,
eval_train_metric=eval_train_metric)
# setup callbacks
if callbacks is None:
callbacks = set()
else:
for i, cb in enumerate(callbacks):
cb.__dict__.setdefault('order', i - len(callbacks))
callbacks = set(callbacks)
if early_stopping_rounds is not None and early_stopping_rounds > 0:
callbacks.add(callback.early_stopping(early_stopping_rounds, first_metric_only, verbose=False))
if verbose_eval is True:
callbacks.add(callback.print_evaluation(show_stdv=show_stdv))
elif isinstance(verbose_eval, integer_types):
callbacks.add(callback.print_evaluation(verbose_eval, show_stdv=show_stdv))
callbacks_before_iter = {cb for cb in callbacks if getattr(cb, 'before_iteration', False)}
callbacks_after_iter = callbacks - callbacks_before_iter
callbacks_before_iter = sorted(callbacks_before_iter, key=attrgetter('order'))
callbacks_after_iter = sorted(callbacks_after_iter, key=attrgetter('order'))
for i in range_(num_boost_round):
for cb in callbacks_before_iter:
cb(callback.CallbackEnv(model=cvfolds,
params=params,
iteration=i,
begin_iteration=0,
end_iteration=num_boost_round,
evaluation_result_list=None))
cvfolds.update(fobj=fobj)
res = _agg_cv_result(cvfolds.eval_valid(feval), eval_train_metric)
for _, key, mean, _, std in res:
results[key + '-mean'].append(mean)
results[key + '-stdv'].append(std)
try:
for cb in callbacks_after_iter:
cb(callback.CallbackEnv(model=cvfolds,
params=params,
iteration=i,
begin_iteration=0,
end_iteration=num_boost_round,
evaluation_result_list=res))
except callback.EarlyStopException as earlyStopException:
cvfolds.best_iteration = earlyStopException.best_iteration + 1
for k in results:
results[k] = results[k][:cvfolds.best_iteration]
break
return dict(results)
| 48.698305
| 128
| 0.650738
|
54085bdc3d96ab30a7bd22998d80f2c64bb93967
| 2,343
|
py
|
Python
|
rel1-beta/bluecat_app/bin/dnsalert.py
|
mheidir/BlueCatSG-SplunkApp-UnOfficial
|
bd914b8650d191e48c18acda5bdd70aeabb99207
|
[
"Apache-2.0"
] | 1
|
2018-06-26T14:57:54.000Z
|
2018-06-26T14:57:54.000Z
|
rel1-beta/bluecat_app/bin/dnsalert.py
|
mheidir/BlueCatSG-SplunkApp-UnOfficial
|
bd914b8650d191e48c18acda5bdd70aeabb99207
|
[
"Apache-2.0"
] | null | null | null |
rel1-beta/bluecat_app/bin/dnsalert.py
|
mheidir/BlueCatSG-SplunkApp-UnOfficial
|
bd914b8650d191e48c18acda5bdd70aeabb99207
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import sys, os, datetime
sys.path.append("/opt/splunk/etc/apps/framework/contrib/splunk-sdk-python/")
from time import sleep
import splunklib.client as client
import splunklib.results as results
from bluecat import api
from bluecat import api_exception
from bluecat import entity
from bluecat.wrappers import generic_getters
import json
import base64
def block_tunnel(malware):
# Talk to BlueCat Address Manager
api_url = 'http://10.0.1.251/Services/API?wsdl'
bam = api(api_url)
bam.login('apiuser', 'bluecat')
#conf = bam.get_configuration('ACME Corp')
tg = bam._soap_client.service.getEntityByName(0, "dnstunnel-malware", "TagGroup")
if tg.id==0:
tg = bam._soap_client.service.addTagGroup('dnstunnel-malware', "")
try:
bam._soap_client.service.addTag(tg.id, "**." + malware, "")
e = bam._soap_client.service.getEntities(tg.id, "Tag", 0, 1000)
blacklistItems = ''
for entity in e.item:
blacklistItems += '\n' + entity.name
bam._soap_client.service.uploadResponsePolicyItems(1651093, base64.b64encode(blacklistItems))
bam._soap_client.service.deployServerConfig(140183, "services=DNS|forceDNSFullDeployment=true")
return 0
except:
return 1
def log(msg):
f = open(os.path.join(os.environ["SPLUNK_HOME"], "var", "log", "splunk", "bluecat_dns_alert.log"), "a")
print >> f, str(datetime.datetime.now().isoformat())+ ',' +msg
f.close()
HOST = "localhost"
PORT = 8089
USERNAME = "admin"
PASSWORD = "changeme"
exec_mode = {"earliest_time" : "-30s", "latest_time" : "now", "count" : 0}
entropy_search = "search named rate limit | eval length=len(rateName) | search length > 40 | stats count by rateDomain, rateClientIP | search count > 8"
# Create a Service instance and log in
service = client.connect(
host=HOST,
port=PORT,
username=USERNAME,
password=PASSWORD)
search_results = service.jobs.oneshot (entropy_search, **exec_mode);
for item in results.ResultsReader(search_results):
logstr = item['rateDomain'] + ',' + item['rateClientIP'] + ',' + item['count']
tempstr = logstr + ',DETECTED'
log (tempstr)
if block_tunnel(item['rateDomain']) == 1:
logstr += ",BLOCKED"
log (logstr)
| 34.455882
| 152
| 0.66752
|
70419232c1e6951b01e4100134862ee0dd3a0e93
| 4,307
|
py
|
Python
|
apps/glossy-test/test_tools/analysis/build_setting.py
|
dlobba/lwb-cc2538
|
4a41de11ab43eb75574e521e70b22bb38714bc57
|
[
"BSD-3-Clause"
] | 1
|
2020-04-12T12:04:11.000Z
|
2020-04-12T12:04:11.000Z
|
apps/glossy-test/test_tools/analysis/build_setting.py
|
dlobba/lwb-cc2538
|
4a41de11ab43eb75574e521e70b22bb38714bc57
|
[
"BSD-3-Clause"
] | null | null | null |
apps/glossy-test/test_tools/analysis/build_setting.py
|
dlobba/lwb-cc2538
|
4a41de11ab43eb75574e521e70b22bb38714bc57
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python3
import re
from collections import OrderedDict
# -----------------------------------------------------------------------------
# SETTINGS
# -----------------------------------------------------------------------------
SIM_SETTINGS = "settings.csv"
# fields
SIM_NTX = "ntx"
SIM_INITIATOR = "initiator"
SIM_TXPOWER = "txpower"
SIM_FRAMESIZE = "frame_size"
SIM_SLOT_DURATION_MS = "slot_ms"
SIM_PERIOD_DURATION_MS = "period_ms"
SIM_GUARD_TIME_MS = "guard_ms"
SIM_CHANNEL = "channel"
# temporary field
SIM_PAYLOAD_LEN = "payload_len"
# HEADERS
SETTINGS_HEADER = [SIM_CHANNEL, SIM_TXPOWER, SIM_NTX, SIM_FRAMESIZE, SIM_INITIATOR]
SETTINGS_ABBREV = ["ch", "txpower", "ntx", "frame", "init"]
SETTINGS_FULL = [
SIM_PERIOD_DURATION_MS, SIM_SLOT_DURATION_MS, SIM_GUARD_TIME_MS,\
SIM_CHANNEL, SIM_TXPOWER,\
SIM_FRAMESIZE, SIM_NTX, SIM_INITIATOR]
FILTER_RULES = {
"(?i)cc2538_rf_channel:\s+(\d+)": SIM_CHANNEL,\
"(?i)cc2538_rf_tx_power:\s+(-?\d+)": SIM_TXPOWER,\
"(?i)initiator_id:\s+(\d+)": SIM_INITIATOR,\
"(?i)payload_data_len:\s+(\d+)": SIM_PAYLOAD_LEN,\
"(?i)glossy_n_tx:\s+(\d+)": SIM_NTX,\
"(?i)glossy_period:\s+(.*)": SIM_PERIOD_DURATION_MS,\
"(?i)glossy_slot:\s+(.*)": SIM_SLOT_DURATION_MS,\
"(?i)glossy_guard:\s+(.*)": SIM_GUARD_TIME_MS\
}
def match_filter(log, rules):
"""
Return None if there is no rule in rules matching
the log.
Return the log if it matches a rule or None.
"""
for rule in rules:
match = re.match(rule, log)
if match:
return rules[rule], match
return None, None
def get_pkt_size(payload):
# header (4B) + seqno (4B) + payload + crc(2B)
return 4 + 4 + payload + 2
def parse_build_setting_lines(lines):
RTIMER_SYMBOLIC = r"rtimer_second"
RTIMER_SECOND = 32768
PRECISION = 1000 # ms
# assume millisecond granularity
# if this is not sufficient (IT SHOULD BE!) throw an error an inform
# the user that this code should be changed to the new precision
# by changing the value of RTIMER_VALUE
settings = {}
for line in lines:
rule, match = match_filter(line, FILTER_RULES)
if rule == SIM_CHANNEL:
settings[SIM_CHANNEL] = int(match.group(1))
elif rule == SIM_TXPOWER:
settings[SIM_TXPOWER] = int(match.group(1))
elif rule == SIM_INITIATOR:
settings[SIM_INITIATOR] = int(match.group(1))
elif rule == SIM_PAYLOAD_LEN:
settings[SIM_FRAMESIZE] = get_pkt_size(int(match.group(1)))
elif rule == SIM_NTX:
settings[SIM_NTX] = int(match.group(1))
elif rule == SIM_PERIOD_DURATION_MS:
duration = match.group(1)
duration = int(eval(duration) / RTIMER_SECOND * PRECISION)
settings[SIM_PERIOD_DURATION_MS] = duration
elif rule == SIM_SLOT_DURATION_MS:
duration = match.group(1)
duration = int(eval(duration) / RTIMER_SECOND * PRECISION)
settings[SIM_SLOT_DURATION_MS] = duration
elif rule == SIM_GUARD_TIME_MS:
duration = match.group(1)
duration = int(eval(duration) / RTIMER_SECOND * PRECISION)
settings[SIM_GUARD_TIME_MS] = duration
return settings
def parse_build_setting(filesettings):
with open(filesettings, "r") as fh:
return parse_build_setting_lines(fh)
def get_settings_row(settings):
values = [settings[h] for h in SETTINGS_HEADER]
return values
def get_radio_channel(settings):
return settings[SIM_CHANNEL]
def get_sim_name(settings):
values = [str(settings[v]).lower() for v in SETTINGS_HEADER]
values = list(map(lambda x: re.sub("-", "m", x), values))
values = ["%s%s" % (k, v) for k,v in zip(SETTINGS_HEADER, values)]
return str.join("_", values)
def get_sim_name_abbrev(settings):
values = [str(settings[v]).lower() for v in SETTINGS_HEADER]
values = list(map(lambda x: re.sub("-", "m", x), values))
values = ["%s%s" % (k, str(v)) for k,v in zip(SETTINGS_ABBREV, values)]
return str.join("_", values)
def get_settings_summary(settings):
summary=OrderedDict([(k, settings[k]) for k in SETTINGS_FULL])
return summary
if __name__ == "__main__":
pass
| 32.141791
| 83
| 0.625493
|
133181f908afe6e5d507a28152f19fdd1b7cba2c
| 9,542
|
py
|
Python
|
ambassador/ambassador/config/resourcefetcher.py
|
lolletsoc/ambassador
|
7afb14b997290f10d5c3627a85432ab31ccffd0c
|
[
"Apache-2.0"
] | null | null | null |
ambassador/ambassador/config/resourcefetcher.py
|
lolletsoc/ambassador
|
7afb14b997290f10d5c3627a85432ab31ccffd0c
|
[
"Apache-2.0"
] | null | null | null |
ambassador/ambassador/config/resourcefetcher.py
|
lolletsoc/ambassador
|
7afb14b997290f10d5c3627a85432ab31ccffd0c
|
[
"Apache-2.0"
] | null | null | null |
from typing import List, Optional, Tuple, TYPE_CHECKING
# from typing import cast as typecast
import json
import logging
import os
import yaml
# from collections import namedtuple
from .acresource import ACResource
# from ..utils import RichStatus
if TYPE_CHECKING:
from .config import Config
# StringOrList is either a string or a list of strings.
# StringOrList = Union[str, List[str]]
# class YAMLElement (dict):
# def __init__(self, obj: dict, serialization: str, rkey: Optional[str]=None,
# filename: Optional[str]=None, filepath: Optional[str]=None, ocount=1):
#
# if filename and not rkey:
# rkey = filename
#
# super().__init__(obj=obj, serialization=serialization, rkey=rkey,
# filename=filename, filepath=filepath, ocount=ocount)
# Some thoughts:
# - loading a bunch of Ambassador resources is different from loading a bunch of K8s
# services, because we should assume that if we're being a fed a bunch of Ambassador
# resources, we'll get a full set. The whole 'secret loader' thing needs to have the
# concept of a TLSSecret resource that can be force-fed to us, or that can be fetched
# through the loader if needed.
# - If you're running a debug-loop Ambassador, you should just have a flat (or
# recursive, I don't care) directory full of Ambassador YAML, including TLSSecrets
# and Endpoints and whatnot, as needed. All of it will get read by
# load_from_filesystem and end up in the elements array.
# - If you're running expecting to be fed by kubewatch, at present kubewatch will
# send over K8s Service records, and anything annotated in there will end up in
# elements. This may include TLSSecrets or Endpoints. Any TLSSecret mentioned that
# isn't already in elements will need to be fetched.
# - Ambassador resources do not have namespaces. They have the ambassador_id. That's
# it. The ambassador_id is completely orthogonal to the namespace. No element with
# the wrong ambassador_id will end up in elements. It would be nice if they were
# never sent by kubewatch, but, well, y'know.
# - TLSSecret resources are not TLSContexts. TLSSecrets only have a name, a private
# half, and a public half. They do _not_ have other TLSContext information.
# - Endpoint resources probably have just a name, a service name, and an endpoint
# address.
class ResourceFetcher:
def __init__(self, logger: logging.Logger, aconf: 'Config') -> None:
self.aconf = aconf
self.logger = logger
self.elements: List[ACResource] = []
self.filename: Optional[str] = None
self.ocount: int = 1
self.saved: List[Tuple[Optional[str], int]] = []
@property
def location(self):
return "%s.%d" % (self.filename or "anonymous YAML", self.ocount)
def push_location(self, filename: Optional[str], ocount: int) -> None:
self.saved.append((self.filename, self.ocount))
self.filename = filename
self.ocount = ocount
def pop_location(self) -> None:
self.filename, self.ocount = self.saved.pop()
def load_from_filesystem(self, config_dir_path, recurse: bool=False, k8s: bool=False):
inputs: List[Tuple[str, str]] = []
if os.path.isdir(config_dir_path):
dirs = [ config_dir_path ]
while dirs:
dirpath = dirs.pop(0)
for filename in os.listdir(dirpath):
filepath = os.path.join(dirpath, filename)
if recurse and os.path.isdir(filepath):
# self.logger.debug("%s: RECURSE" % filepath)
dirs.append(filepath)
continue
if not os.path.isfile(filepath):
# self.logger.debug("%s: SKIP non-file" % filepath)
continue
if not filename.lower().endswith('.yaml'):
# self.logger.debug("%s: SKIP non-YAML" % filepath)
continue
# self.logger.debug("%s: SAVE configuration file" % filepath)
inputs.append((filepath, filename))
else:
# this allows a file to be passed into the ambassador cli
# rather than just a directory
inputs.append((config_dir_path, os.path.basename(config_dir_path)))
for filepath, filename in inputs:
self.logger.info("reading %s (%s)" % (filename, filepath))
try:
serialization = open(filepath, "r").read()
self.parse_yaml(serialization, k8s=k8s, filename=filename)
except IOError as e:
self.aconf.post_error("could not read YAML from %s: %s" % (filepath, e))
def parse_yaml(self, serialization: str, k8s=False, rkey: Optional[str]=None,
filename: Optional[str]=None) -> None:
# self.logger.debug("%s: parsing %d byte%s of YAML:\n%s" %
# (self.location, len(serialization), "" if (len(serialization) == 1) else "s",
# serialization))
try:
objects = list(yaml.safe_load_all(serialization))
self.push_location(filename, 1)
for obj in objects:
if k8s:
self.extract_k8s(obj)
else:
# if not obj:
# self.logger.debug("%s: empty object from %s" % (self.location, serialization))
self.process_object(obj, rkey=rkey)
self.ocount += 1
self.pop_location()
except yaml.error.YAMLError as e:
self.aconf.post_error("%s: could not parse YAML: %s" % (self.location, e))
def extract_k8s(self, obj: dict) -> None:
self.logger.debug("extract_k8s obj %s" % json.dumps(obj, indent=4, sort_keys=True))
kind = obj.get('kind', None)
if kind != "Service":
self.logger.debug("%s: ignoring K8s %s object" % (self.location, kind))
return
metadata = obj.get('metadata', None)
if not metadata:
self.logger.debug("%s: ignoring unannotated K8s %s" % (self.location, kind))
return
# Use metadata to build a unique resource identifier
resource_name = metadata.get('name')
# This should never happen as the name field is required in metadata for Service
if not resource_name:
self.logger.debug("%s: ignoring unnamed K8s %s" % (self.location, kind))
return
resource_namespace = metadata.get('namespace', 'default')
# This resource identifier is useful for log output since filenames can be duplicated (multiple subdirectories)
resource_identifier = '{name}.{namespace}'.format(namespace=resource_namespace, name=resource_name)
annotations = metadata.get('annotations', None)
if annotations:
annotations = annotations.get('getambassador.io/config', None)
# self.logger.debug("annotations %s" % annotations)
if not annotations:
# self.logger.debug("%s: ignoring K8s %s without Ambassador annotation" % (self.location, kind))
return
if self.filename and (not self.filename.endswith(":annotation")):
self.filename += ":annotation"
self.parse_yaml(annotations, filename=self.filename, rkey=resource_identifier)
def process_object(self, obj: dict, rkey: Optional[str]=None) -> None:
if not isinstance(obj, dict):
# Bug!!
if not obj:
self.aconf.post_error("%s is empty" % self.location)
else:
self.aconf.post_error("%s is not a dictionary? %s" %
(self.location, json.dumps(obj, indent=4, sort_keys=4)))
return
if not self.aconf.good_ambassador_id(obj):
self.logger.debug("%s SKIP for ambassador_id mismatch" % self.location)
return
if 'kind' not in obj:
# Bug!!
self.aconf.post_error("%s is missing 'kind'?? %s" %
(self.location, json.dumps(obj, indent=4, sort_keys=True)))
return
self.logger.debug("%s PROCESS %s initial rkey %s" % (self.location, obj['kind'], rkey))
# Is this a pragma object?
if obj['kind'] == 'Pragma':
# Why did I think this was a good idea? [ :) ]
new_source = obj.get('source', None)
if new_source:
# We don't save the old self.filename here, so this change will last until
# the next input source (or the next Pragma).
self.filename = new_source
# Don't count Pragma objects, since the user generally doesn't write them.
self.ocount -= 1
return
if not rkey:
rkey = self.filename
rkey = "%s.%d" % (rkey, self.ocount)
self.logger.debug("%s PROCESS %s updated rkey to %s" % (self.location, obj['kind'], rkey))
# Fine. Fine fine fine.
serialization = yaml.safe_dump(obj, default_flow_style=False)
r = ACResource.from_dict(rkey, rkey, serialization, obj)
self.elements.append(r)
self.logger.debug("%s PROCESS %s save %s" % (self.location, obj['kind'], rkey))
def sorted(self, key=lambda x: x.rkey): # returns an iterator, probably
return sorted(self.elements, key=key)
| 39.593361
| 119
| 0.60459
|
601c20fc70b2c4c025895b6a31c983530e66d1e3
| 6,329
|
py
|
Python
|
akivymd/uix/onboarding.py
|
JUDE122-MAX/akivymd
|
b2daddd2f58889859514286606f46a4af6f03828
|
[
"MIT"
] | 51
|
2020-06-18T15:14:02.000Z
|
2022-03-07T05:46:11.000Z
|
akivymd/uix/onboarding.py
|
JUDE122-MAX/akivymd
|
b2daddd2f58889859514286606f46a4af6f03828
|
[
"MIT"
] | 16
|
2020-07-19T08:13:45.000Z
|
2021-06-05T20:55:12.000Z
|
akivymd/uix/onboarding.py
|
JUDE122-MAX/akivymd
|
b2daddd2f58889859514286606f46a4af6f03828
|
[
"MIT"
] | 15
|
2020-07-18T10:34:10.000Z
|
2021-07-03T09:07:37.000Z
|
from kivy.animation import Animation
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.event import EventDispatcher
from kivy.lang import Builder
from kivy.metrics import dp
from kivy.properties import (
BooleanProperty,
ListProperty,
NumericProperty,
StringProperty,
)
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.carousel import Carousel
from kivy.uix.widget import Widget
from kivymd.theming import ThemableBehavior
Builder.load_string(
"""
<ItemCircles>:
size_hint_x: None
canvas.before:
Color:
rgba: root._circles_color
Line:
circle: [ self.pos[0]+self.width/2, self.pos[1]+self.height/2, self.width/2]
width: dp(1)
<AKOnboardingItem>:
<AKOnboarding>:
orientation: 'vertical'
MyCarousel:
min_move:root.min_move
anim_type: root.anim_type
anim_move_duration: root.anim_move_duration
id: carousel
FloatLayout:
id: rounded_box
size_hint_y: None
height: circles_box.y+ circles_box.height*2
canvas.before:
Color:
rgba: root.bottom_bar_color if root.bottom_bar_color else app.theme_cls.bg_dark
a: 1 if root.show_bottom_bar else 0
RoundedRectangle:
pos: self.pos
size: self.size
radius: root.bottom_bar_radius
Widget:
id: ghost_circle
size_hint: None, None
canvas.before:
Color:
rgba: root.circles_color if root.circles_color else root.theme_cls.primary_color
Ellipse:
pos: self.pos
size: self.size
BoxLayout:
id: circles_box
pos: rounded_box.width/2-self.width/2 , rounded_box.height/2-self.height/2
size_hint: None,None
size: self.minimum_width , root.circles_size
spacing: root.circles_size/2
MDFlatButton:
text: 'Skip'
on_release: root._on_finish_dispatch()
disabled: False if root.skip_button else True
opacity: 1 if root.skip_button else 0
text_color: root.circles_color if root.circles_color else root.theme_cls.primary_color
pos_hint: {'right': .95, 'center_y': .5}
"""
)
class ItemCircles(ThemableBehavior, Widget):
_circles_color = ListProperty(None)
def __init__(self, **kwargs):
super().__init__(**kwargs)
class MyCarousel(ThemableBehavior, Carousel):
def __init__(self, **kwargs):
super().__init__(**kwargs)
Clock.schedule_once(lambda x: self._add_circles())
Window.bind(on_resize=self._on_resize)
def _add_circles(self):
self.total_circles = len(self.slides) - 1
if self.parent.circles_color:
circle_color = self.parent.circles_color
else:
circle_color = self.theme_cls.primary_color
for _ in range(self.total_circles + 1):
self.parent.ids.circles_box.add_widget(
ItemCircles(
width=self.parent.circles_size, _circles_color=circle_color
)
)
self._current_circle = self.total_circles
Clock.schedule_once(lambda x: self._set_current_circle(animation=False))
def on_size(self, *args):
Clock.schedule_once(lambda x: self._set_current_circle(animation=False))
return super().on_size(*args)
def reset(self):
self._current_circle = self.total_circles
self._set_current_circle()
self.load_slide(self.slides[0])
def _set_current_circle(self, mode=None, animation=True):
if mode == "next":
if self._current_circle > 0:
self._current_circle -= 1
else:
self.parent._on_finish_dispatch()
elif mode == "previous":
if self._current_circle < self.total_circles:
self._current_circle += 1
if animation:
width = self.parent.ids.ghost_circle.width
anim = Animation(
pos=self.parent.ids.circles_box.children[
self._current_circle
].pos,
t=self.anim_type,
duration=self.anim_move_duration,
)
anim.start(self.parent.ids.ghost_circle)
else:
self.parent.ids.ghost_circle.pos = (
self.parent.ids.circles_box.children[self._current_circle].pos
)
def on_touch_up(self, touch):
if abs(self._offset) > self.width * self.min_move:
if self._offset > 0: # previous screen
self._set_current_circle("previous")
elif self._offset < 0: # next screen
self._set_current_circle("next")
return super().on_touch_up(touch)
def _on_resize(self, *args):
Clock.schedule_once(lambda x: self._set_current_circle(animation=False))
class AKOnboardingItem(BoxLayout):
pass
class AKOnboarding(ThemableBehavior, BoxLayout, EventDispatcher):
circles_size = NumericProperty(dp(20))
skip_button = BooleanProperty(True)
min_move = NumericProperty(0.05)
anim_type = StringProperty("out_quad")
anim_move_duration = NumericProperty(0.2)
bottom_bar_radius = ListProperty([dp(20), dp(20), 0, 0])
show_bottom_bar = BooleanProperty(True)
bottom_bar_color = ListProperty(None)
circles_color = ListProperty(None)
def __init__(self, **kwargs):
super(AKOnboarding, self).__init__(**kwargs)
self.register_event_type("on_finish")
Clock.schedule_once(lambda x: self._update())
def add_widget(self, widget, index=0, canvas=None):
if issubclass(widget.__class__, AKOnboardingItem):
self.ids.carousel.add_widget(widget)
else:
super().add_widget(widget, index=index, canvas=canvas)
def _on_finish_dispatch(self):
self.dispatch("on_finish")
def on_finish(self, *args):
pass
def reset(self):
return self.ids.carousel.reset()
def on_size(self, *args):
self.ids.carousel.size = self.size
def _update(self):
self.ids.ghost_circle.size = [self.circles_size, self.circles_size]
| 31.02451
| 100
| 0.625217
|
1377ef1d2b3d26f3cb21622018b84fcb43f3a66b
| 20,018
|
py
|
Python
|
poppy/manager/default/ssl_certificate.py
|
LukeRepko/poppy
|
0620ff595f8fcaa445b684de99ad130a3c398461
|
[
"Apache-2.0"
] | 3
|
2017-07-05T20:09:59.000Z
|
2018-11-27T22:02:57.000Z
|
poppy/manager/default/ssl_certificate.py
|
LukeRepko/poppy
|
0620ff595f8fcaa445b684de99ad130a3c398461
|
[
"Apache-2.0"
] | 24
|
2017-04-18T15:14:04.000Z
|
2019-03-20T19:09:07.000Z
|
poppy/manager/default/ssl_certificate.py
|
LukeRepko/poppy
|
0620ff595f8fcaa445b684de99ad130a3c398461
|
[
"Apache-2.0"
] | 8
|
2017-04-03T13:24:27.000Z
|
2021-11-08T20:28:10.000Z
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from oslo_context import context as context_utils
from oslo_log import log
from poppy.common import errors
from poppy.common import util
from poppy.distributed_task.taskflow.flow import create_ssl_certificate
from poppy.distributed_task.taskflow.flow import delete_ssl_certificate
from poppy.distributed_task.taskflow.flow import recreate_ssl_certificate
from poppy.manager import base
from poppy.model.helpers import domain
from poppy.model import ssl_certificate
from poppy.transport.validators import helpers as validators
LOG = log.getLogger(__name__)
class DefaultSSLCertificateController(base.SSLCertificateController):
def __init__(self, manager):
super(DefaultSSLCertificateController, self).__init__(manager)
self.distributed_task_controller = (
self._driver.distributed_task.services_controller
)
self.storage = self._driver.storage.certificates_controller
self.service_storage = self._driver.storage.services_controller
self.flavor_controller = self._driver.storage.flavors_controller
def create_ssl_certificate(
self, project_id, cert_obj, https_upgrade=False):
if (not validators.is_valid_domain_name(cert_obj.domain_name)) or \
(validators.is_root_domain(
domain.Domain(cert_obj.domain_name).to_dict())) or \
(not validators.is_valid_tld(cert_obj.domain_name)):
# here created a http domain object but it does not matter http or
# https
raise ValueError('%s must be a valid non-root domain' %
cert_obj.domain_name)
try:
flavor = self.flavor_controller.get(cert_obj.flavor_id)
# raise a lookup error if the flavor is not found
except LookupError as e:
raise e
try:
self.storage.create_certificate(
project_id,
cert_obj
)
# ValueError will be raised if the cert_info has already existed
except ValueError as e:
raise e
providers = [p.provider_id for p in flavor.providers]
kwargs = {
'providers_list_json': json.dumps(providers),
'project_id': project_id,
'cert_obj_json': json.dumps(cert_obj.to_dict()),
'context_dict': context_utils.get_current().to_dict()
}
if https_upgrade is True:
kwargs['https_upgrade'] = True
self.distributed_task_controller.submit_task(
create_ssl_certificate.create_ssl_certificate,
**kwargs)
return kwargs
def delete_ssl_certificate(self, project_id, domain_name, cert_type):
cert_obj = self.storage.get_certs_by_domain(
domain_name, cert_type=cert_type)
try:
flavor = self.flavor_controller.get(cert_obj.flavor_id)
# raise a lookup error if the flavor is not found
except LookupError as e:
raise e
providers = [p.provider_id for p in flavor.providers]
kwargs = {
'project_id': project_id,
'domain_name': domain_name,
'cert_type': cert_type,
'cert_obj_json': json.dumps(cert_obj.to_dict()),
'providers_list_json': json.dumps(providers),
'context_dict': context_utils.get_current().to_dict()
}
self.distributed_task_controller.submit_task(
delete_ssl_certificate.delete_ssl_certificate,
**kwargs)
return kwargs
def get_certs_info_by_domain(self, domain_name, project_id):
return self.storage.get_certs_by_domain(
domain_name=domain_name,
project_id=project_id)
def get_san_retry_list(self):
if 'akamai' in self._driver.providers:
akamai_driver = self._driver.providers['akamai'].obj
res = akamai_driver.mod_san_queue.traverse_queue()
# For other providers san_retry_list implementation goes here
else:
# if not using akamai driver just return an empty list
return []
res = [json.loads(r) for r in res]
return [
{"domain_name": r['domain_name'],
"project_id": r['project_id'],
"flavor_id": r['flavor_id'],
"cert_type": r['cert_type'],
"validate_service": r.get('validate_service', True)}
for r in res
]
def update_san_retry_list(self, queue_data_list):
for r in queue_data_list:
service_obj = self.service_storage\
.get_service_details_by_domain_name(r['domain_name'])
if service_obj is None and r.get('validate_service', True):
raise LookupError(u'Domain {0} does not exist on any service, '
'are you sure you want to proceed request, '
'{1}? You can set validate_service to False '
'to retry this san-retry request forcefully'.
format(r['domain_name'], r))
cert_for_domain = None
try:
cert_for_domain = self.storage.get_certs_by_domain(
r['domain_name'])
except ValueError:
LOG.info("No matching certificates found for "
"the domain {}".format(r['domain_name']))
if cert_for_domain:
if cert_for_domain.get_cert_status() == "deployed":
raise ValueError(u'Cert on {0} already exists'.
format(r['domain_name']))
new_queue_data = [
json.dumps({'flavor_id': r['flavor_id'],
'domain_name': r['domain_name'],
'project_id': r['project_id'],
'cert_type': r['cert_type'],
'validate_service': r.get('validate_service', True)})
for r in queue_data_list
]
res, deleted = [], []
if 'akamai' in self._driver.providers:
akamai_driver = self._driver.providers['akamai'].obj
orig = [json.loads(r) for r in
akamai_driver.mod_san_queue.traverse_queue()]
res = [json.loads(r) for r in
akamai_driver.mod_san_queue.put_queue_data(new_queue_data)]
deleted = tuple(x for x in orig if x not in res)
# other provider's retry-list implementation goes here
return res, deleted
def rerun_san_retry_list(self):
run_list = []
ignore_list = []
if 'akamai' in self._driver.providers:
akamai_driver = self._driver.providers['akamai'].obj
retry_list = []
while len(akamai_driver.mod_san_queue.mod_san_queue_backend) > 0:
res = akamai_driver.mod_san_queue.dequeue_mod_san_request()
retry_list.append(json.loads(res.decode('utf-8')))
retry_list = util.remove_duplicates(retry_list)
# double check in POST. This check should really be first done in
# PUT
for r in retry_list:
err_state = False
service_obj = self.service_storage\
.get_service_details_by_domain_name(r['domain_name'])
if service_obj is None and r.get('validate_service', True):
err_state = True
LOG.error(
u'Domain {0} does not exist on any service, are you '
'sure you want to proceed request, {1}? You can set '
'validate_service to False to retry this san-retry '
'request forcefully'.format(r['domain_name'], r)
)
elif (
service_obj is not None and
service_obj.operator_status.lower() == 'disabled'
):
err_state = True
LOG.error(
u'The service for domain {0} is disabled.'
'No certificates will be created for '
'service {1} while it remains in {2} operator_status'
'request forcefully'.format(
r['domain_name'],
service_obj.service_id,
service_obj.operator_status
)
)
try:
cert_for_domain = self.storage.get_certs_by_domain(
r['domain_name'])
if cert_for_domain.get_cert_status() == "deployed":
err_state = True
LOG.error(
u'Certificate on {0} has already been provisioned '
'successfully.'.format(r['domain_name']))
except ValueError:
LOG.info("No matching certificates found for "
"the domain {}".format(r['domain_name']))
if err_state is False:
run_list.append(r)
else:
ignore_list.append(r)
if not r.get('validate_service', True):
# validation is False, send ignored retry_list
# object back to queue
akamai_driver.mod_san_queue.enqueue_mod_san_request(
json.dumps(r)
)
LOG.warn(
"{0} was skipped because it failed validation.".format(
r['domain_name']
)
)
for cert_obj_dict in run_list:
try:
cert_obj = ssl_certificate.SSLCertificate(
cert_obj_dict['flavor_id'],
cert_obj_dict['domain_name'],
cert_obj_dict['cert_type'],
project_id=cert_obj_dict['project_id']
)
try:
cert_for_domain = (
self.storage.get_certs_by_domain(
cert_obj.domain_name,
project_id=cert_obj.project_id,
flavor_id=cert_obj.flavor_id,
cert_type=cert_obj.cert_type))
# If this cert has been deployed through manual
# process we ignore the rerun process for this entry
if cert_for_domain.get_cert_status() == 'deployed':
run_list.remove(cert_obj_dict)
ignore_list.append(cert_obj_dict)
continue
except ValueError:
LOG.info("No matching certificates found for "
"the domain {}".format(cert_obj.domain_name))
# rerun the san process
try:
flavor = self.flavor_controller.get(cert_obj.flavor_id)
# raise a lookup error if the flavor is not found
except LookupError as e:
raise e
providers = [p.provider_id for p in flavor.providers]
kwargs = {
'project_id': cert_obj.project_id,
'domain_name': cert_obj.domain_name,
'cert_type': cert_obj.cert_type,
'providers_list_json': json.dumps(providers),
'cert_obj_json': json.dumps(cert_obj.to_dict()),
'enqueue': False,
'context_dict': context_utils.RequestContext(
tenant=cert_obj.project_id
).to_dict()
}
self.distributed_task_controller.submit_task(
recreate_ssl_certificate.recreate_ssl_certificate,
**kwargs)
except Exception as e:
# When exception happens we log it and re-queue this
# request
LOG.exception(e)
run_list.remove(cert_obj_dict)
ignore_list.append(cert_obj_dict)
akamai_driver.mod_san_queue.enqueue_mod_san_request(
json.dumps(cert_obj_dict)
)
# For other providers post san_retry_list implementation goes here
else:
# if not using akamai driver just return summary of run list and
# ignore list
pass
return run_list, ignore_list
def get_san_cert_configuration(self, san_cert_name):
if 'akamai' in self._driver.providers:
akamai_driver = self._driver.providers['akamai'].obj
if san_cert_name not in akamai_driver.san_cert_cnames:
raise ValueError(
"%s is not a valid san cert, valid san certs are: %s" %
(san_cert_name, akamai_driver.san_cert_cnames))
res = akamai_driver.cert_info_storage.get_cert_config(
san_cert_name
)
else:
# if not using akamai driver just return an empty list
res = {}
return res
def update_san_cert_configuration(self, san_cert_name, new_cert_config):
if 'akamai' in self._driver.providers:
akamai_driver = self._driver.providers['akamai'].obj
if san_cert_name not in akamai_driver.san_cert_cnames:
raise ValueError(
"%s is not a valid san cert, valid san certs are: %s" %
(san_cert_name, akamai_driver.san_cert_cnames))
# given the spsId, determine the most recent jobId
# and persist the jobId
if new_cert_config.get('spsId') is not None:
resp = akamai_driver.sps_api_client.get(
akamai_driver.akamai_sps_api_base_url.format(
spsId=new_cert_config['spsId']
),
)
if resp.status_code != 200:
raise RuntimeError(
'SPS GET Request failed. Exception: {0}'.format(
resp.text
)
)
else:
resp_json = resp.json()
new_cert_config['jobId'] = (
resp_json['requestList'][0]['jobId']
)
res = akamai_driver.cert_info_storage.update_cert_config(
san_cert_name, new_cert_config)
else:
# if not using akamai driver just return an empty list
res = {}
return res
def get_sni_cert_configuration(self, cert_name):
if 'akamai' in self._driver.providers:
akamai_driver = self._driver.providers['akamai'].obj
self._validate_sni_cert_name(akamai_driver, cert_name)
res = akamai_driver.cert_info_storage.get_sni_cert_info(cert_name)
else:
# if not using akamai driver just return an empty list
res = {}
return res
def update_sni_cert_configuration(self, cert_name, new_cert_config):
if 'akamai' in self._driver.providers:
akamai_driver = self._driver.providers['akamai'].obj
self._validate_sni_cert_name(akamai_driver, cert_name)
res = akamai_driver.cert_info_storage.update_sni_cert_config(
cert_name,
new_cert_config
)
else:
# if not using akamai driver just return an empty list
res = {}
return res
def get_san_cert_hostname_limit(self):
if 'akamai' in self._driver.providers:
akamai_driver = self._driver.providers['akamai'].obj
res = akamai_driver.cert_info_storage.get_san_cert_hostname_limit()
res = {'san_cert_hostname_limit': res}
else:
# if not using akamai driver just return an empty list
res = {'san_cert_hostname_limit': 0}
return res
@staticmethod
def _validate_sni_cert_name(provider_driver, cert_name):
if cert_name not in provider_driver.sni_cert_cnames:
raise ValueError(
"{0} is not a valid sni cert, "
"valid sni certs are: {1}".format(
cert_name, provider_driver.sni_cert_cnames))
def set_san_cert_hostname_limit(self, request_json):
if 'akamai' in self._driver.providers:
try:
new_limit = request_json['san_cert_hostname_limit']
except Exception as exc:
LOG.error("Error attempting to update san settings {0}".format(
exc
))
raise ValueError('Unknown setting!')
akamai_driver = self._driver.providers['akamai'].obj
res = akamai_driver.cert_info_storage.set_san_cert_hostname_limit(
new_limit
)
else:
# if not using akamai driver just return an empty list
res = 0
return res
def get_certs_by_status(self, status):
certs_by_status = self.storage.get_certs_by_status(status)
return certs_by_status
def update_certificate_status(self, domain_name, certificate_updates):
certificate_old = self.storage.get_certs_by_domain(domain_name)
try:
if (
certificate_updates.get("op") == "replace" and
certificate_updates.get("path") == "status" and
certificate_updates.get("value") is not None
):
if (
certificate_old.get_cert_status() !=
certificate_updates.get("value")
):
new_cert_details = certificate_old.cert_details
# update the certificate for the first provider akamai
# this logic changes when multiple certificate providers
# are supported
first_provider = list(new_cert_details.keys())[0]
first_provider_cert_details = (
list(new_cert_details.values())[0]
)
first_provider_cert_details["extra_info"][
"status"] = certificate_updates.get("value")
new_cert_details[first_provider] = json.dumps(
first_provider_cert_details
)
self.storage.update_certificate(
certificate_old.domain_name,
certificate_old.cert_type,
certificate_old.flavor_id,
new_cert_details
)
except Exception as e:
LOG.error(
"Something went wrong during certificate update: {0}".format(
e
)
)
raise errors.CertificateStatusUpdateError(e)
| 41.704167
| 79
| 0.550055
|
878024ab69f27846b3b55ffe2d3fc64fc136e295
| 133
|
py
|
Python
|
pybry/__init__.py
|
pavlo-seimskyi/PyBRY
|
af86805a8077916f72f3fe980943d4cd741e61f0
|
[
"MIT"
] | null | null | null |
pybry/__init__.py
|
pavlo-seimskyi/PyBRY
|
af86805a8077916f72f3fe980943d4cd741e61f0
|
[
"MIT"
] | null | null | null |
pybry/__init__.py
|
pavlo-seimskyi/PyBRY
|
af86805a8077916f72f3fe980943d4cd741e61f0
|
[
"MIT"
] | null | null | null |
from .lbryd_api import LbrydApi
from .lbrycrd_api import LbrycrdApi
from .LBRYException import LBRYException
__version__ = '1.6.4'
| 19
| 40
| 0.81203
|
adb24d256d58f9218131a48f76f096cd72fb64ef
| 850
|
py
|
Python
|
orders/forms.py
|
OSAMAMOHAMED1234/E-Commerce_django
|
c9dc14e47e0a369ad91e21a461d2c28ad47543e7
|
[
"MIT"
] | 3
|
2018-05-02T20:37:11.000Z
|
2020-10-15T17:19:26.000Z
|
orders/forms.py
|
OSAMAMOHAMED1234/E-Commerce_django
|
c9dc14e47e0a369ad91e21a461d2c28ad47543e7
|
[
"MIT"
] | 1
|
2019-06-10T21:35:13.000Z
|
2019-06-10T21:35:13.000Z
|
orders/forms.py
|
OSAMAMOHAMED1234/E-Commerce_django
|
c9dc14e47e0a369ad91e21a461d2c28ad47543e7
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib.auth import get_user_model
from .models import Checkout
from products.models import Product
User = get_user_model()
class UpdateCheckoutForm(forms.ModelForm):
product_id = forms.IntegerField(widget=forms.HiddenInput)
class Meta:
model = Checkout
fields = [
'product_id',
'quantity',
]
def clean_quantity(self):
quantity = self.cleaned_data['quantity']
qs = self.cleaned_data['product_id']
available = Product.objects.filter(id=qs).first().quantity
if quantity > available:
raise forms.ValidationError('Quantity more than the available quantity = {}!'.format(available))
if quantity == 0:
raise forms.ValidationError('Quantity can not be less than 1')
return quantity
| 29.310345
| 108
| 0.663529
|
ed30bca65fbc33392e7e50fa5d33b8bb2331da42
| 2,971
|
py
|
Python
|
python/curagridder/test_ws.py
|
HLSUD/NUFFT
|
6dead03fcdfea058ac3c0322723ee73f2695c74a
|
[
"MIT"
] | 1
|
2021-07-31T12:52:55.000Z
|
2021-07-31T12:52:55.000Z
|
python/curagridder/test_ws.py
|
HLSUD/NUFFT
|
6dead03fcdfea058ac3c0322723ee73f2695c74a
|
[
"MIT"
] | 1
|
2021-04-24T16:07:07.000Z
|
2021-04-24T16:07:07.000Z
|
python/curagridder/test_ws.py
|
HLSUD/NUFFT
|
6dead03fcdfea058ac3c0322723ee73f2695c74a
|
[
"MIT"
] | null | null | null |
from curagridder import ms2dirty, dirty2ms
import numpy as np
import time
# import pytest
from numpy.testing import assert_, assert_allclose, assert_array_almost_equal
def _l2error(a, b):
return np.sqrt(np.sum(np.abs(a-b)**2)/np.sum(np.abs(a)**2))
def explicit_gridder(uvw, freq, ms, nxdirty, nydirty, xpixsize, ypixsize):
speedoflight = 299792458.
x, y = np.meshgrid(*[-ss/2 + np.arange(ss) for ss in [nxdirty, nydirty]],
indexing='ij')
x *= xpixsize
y *= ypixsize
res = np.zeros((nxdirty, nydirty))
eps = x**2+y**2
nm1 = -eps/(np.sqrt(1.-eps)+1.)
n = nm1+1
for row in range(ms.shape[0]):
for chan in range(ms.shape[1]):
phase = (freq[chan]/speedoflight *
(x*uvw[row, 0] + y*uvw[row, 1] + uvw[row, 2]*nm1))
res += (ms[row, chan]*np.exp(2j*np.pi*phase)).real
return res/n
def test_against_wdft(nrow, nchan, nxdirty, nydirty, fov, epsilon):
print("\n\nTesting imaging with {} rows and {} "
"frequency channels".format(nrow, nchan))
print("Dirty image has {}x{} pixels, "
"FOV={} degrees".format(nxdirty, nydirty, fov))
print("Requested accuracy: {}".format(epsilon))
xpixsize = fov*np.pi/180/nxdirty
ypixsize = fov*np.pi/180/nydirty
speedoflight = 299792458.
np.random.seed(42)
f0 = 1e9
freq = f0 + np.arange(nchan)*(f0/nchan)
uvw = (np.random.rand(nrow, 3)-0.5)/(f0/speedoflight)
ms = np.random.rand(nrow, nchan)-0.5 + 1j*(np.random.rand(nrow, nchan)-0.5)
dirty = np.random.rand(nxdirty, nydirty)-0.5 + 0j
dirty2 = np.zeros((nxdirty,nydirty),dtype=np.complex128)
print("begin")
start = time.time()
dirty2 = ms2dirty(uvw,freq, ms, None, dirty2, fov, epsilon,2)
end = time.time()
print("The elapsed time {} (sec)".format(end-start))
print("Execution finished")
dirty2 = np.reshape(dirty2,[nxdirty,nydirty])
ms2 = np.zeros((nrow,1),dtype=np.complex128)
ms2 = dirty2ms(uvw,freq, ms2, None, dirty, fov, epsilon,2)
# ms2 = np.reshape(ms2,[nrow,1])
print("\nadjointness testing....")
print(np.vdot(ms, ms2).real)
print(np.vdot(dirty2, dirty).real)
assert_allclose(np.vdot(ms, ms2).real, np.vdot(dirty2, dirty).real, rtol=1e-12)
if nrow<1e4:
print("Vertification begin")
truth = explicit_gridder(uvw, freq, ms, nxdirty, nydirty, xpixsize, ypixsize)
print("L2 error between explicit transform and CURIG:",
_l2error(truth, dirty2.real))
# the first test will execute 2 times to warp up the GPU
# for i in range(10):
test_against_wdft(1000, 1, 512, 512, 2, 1e-12)
test_against_wdft(1000, 1, 512, 512, 2, 1e-12)
test_against_wdft(10000, 1, 512, 512, 60, 1e-12)
# test_against_wdft(10000, 1, 1024, 1024, 2, 1e-12)
# test_against_wdft(100000000, 1, 1024, 1024, 2, 1e-12)
# test_against_wdft(100000000, 1, 2048, 2048, 2, 1e-12)
# test_against_wdft(100000000, 1, 4096, 4096, 2, 1e-12)
| 33.761364
| 85
| 0.636486
|
3974d2b9ba2d874b0b837ddcb54099e0808f3d40
| 1,425
|
py
|
Python
|
containers/python/rpc_test_container.py
|
EyalSel/clipper
|
3c5a1cc6ce59e0ccd778f526a50808d0e7b2576f
|
[
"Apache-2.0"
] | 32
|
2019-09-11T16:49:58.000Z
|
2022-01-26T15:40:40.000Z
|
containers/python/rpc_test_container.py
|
EyalSel/clipper
|
3c5a1cc6ce59e0ccd778f526a50808d0e7b2576f
|
[
"Apache-2.0"
] | 7
|
2018-06-20T22:06:09.000Z
|
2019-01-07T04:01:22.000Z
|
containers/python/rpc_test_container.py
|
EyalSel/clipper
|
3c5a1cc6ce59e0ccd778f526a50808d0e7b2576f
|
[
"Apache-2.0"
] | 9
|
2019-09-03T14:05:26.000Z
|
2021-12-22T07:17:27.000Z
|
import rpc
import os
import sys
import numpy as np
import json
class RPCTestContainer(rpc.ModelContainerBase):
def __init__(self, rpc_service):
self.rpc_service = rpc_service
def predict_doubles(self, inputs):
outputs = []
for input_item in inputs:
input_item = inputs[0]
clipper_time = input_item[0]
event_history = self.rpc_service.get_event_history()
recent_events = []
for i in range(0, len(event_history)):
curr_event = event_history[i]
if curr_event[0] >= clipper_time:
if i > 0 and len(recent_events) == 0:
# Capture the heartbeat message
# sent before Clipper came online
recent_events.append(event_history[i - 1][1])
recent_events.append(event_history[i][1])
outputs.append(json.dumps(recent_events))
return outputs
if __name__ == "__main__":
ip = "127.0.0.1"
port = 7000
input_type = "doubles"
model_version = 1
rpc_service = rpc.RPCService(collect_metrics=False, read_config=False)
rpc_service.model_name = "rpctest_py"
rpc_service.model_version = 1
rpc_service.host = "127.0.0.1"
rpc_service.port = 7000
rpc_service.input_type = "doubles"
model = RPCTestContainer(rpc_service)
rpc_service.start(model)
| 30.978261
| 74
| 0.61193
|
72801996b90f7c15a7bbd56251dc8790958fb2c5
| 81,567
|
py
|
Python
|
nxt_editor/dockwidgets/property_editor.py
|
nxt-dev/nxt_editor
|
7497c280f8c27a06d3cfc2be1de5d9eb80eca913
|
[
"MIT"
] | 131
|
2020-12-03T08:01:26.000Z
|
2022-03-07T03:41:37.000Z
|
nxt_editor/dockwidgets/property_editor.py
|
zklins/nxt_editor
|
57386a561c5e1d216a8998ef63a6bd9b02c62b7c
|
[
"MIT"
] | 127
|
2020-12-07T21:43:02.000Z
|
2022-02-17T22:31:14.000Z
|
nxt_editor/dockwidgets/property_editor.py
|
zklins/nxt_editor
|
57386a561c5e1d216a8998ef63a6bd9b02c62b7c
|
[
"MIT"
] | 17
|
2020-12-08T08:06:44.000Z
|
2021-11-18T05:40:11.000Z
|
# Built-in
import textwrap
import sys
import logging
from functools import partial
# External
from Qt import QtWidgets
from Qt import QtGui
from Qt import QtCore
try:
QtCore.QStringListModel
except AttributeError:
del QtCore
from PySide2 import QtCore
# Internal
from nxt_editor import user_dir
from nxt_editor.dockwidgets.dock_widget_base import DockWidgetBase
from nxt_editor.pixmap_button import PixmapButton
from nxt_editor.label_edit import LabelEdit
from nxt_editor import colors, LOGGER_NAME
from nxt_editor.decorator_widgets import OpinionDots
from nxt import DATA_STATE, NODE_ERRORS, nxt_path
from nxt.nxt_node import INTERNAL_ATTRS, META_ATTRS
from nxt import tokens
# Fixme: Should this be a pref?
HISTORICAL_MAX_CHARS = 50
logger = logging.getLogger(LOGGER_NAME)
class PropertyEditor(DockWidgetBase):
PREF_KEY = user_dir.USER_PREF.ATTR_SORTING
def __init__(self, graph_model=None, title='Property Editor', parent=None,
minimum_width=300, minimum_height=350):
super(PropertyEditor, self).__init__(title=title,
parent=parent,
minimum_width=minimum_width,
minimum_height=minimum_height)
self.setObjectName('Property Editor')
# local attributes
self.main_window = parent
self.authoring_actions = parent.node_actions
self._actions = parent.property_manager_actions
self.comment_actions = parent.node_comment_actions
self.stage_model = graph_model
self.node_path = None
self._resolved = True
self.node_path = ''
self.node_instance = ''
self.node_inst_source = ('', '')
self.inst_layer_colors = []
self.node_name = ''
self.node_pos = (0.0, 0.0)
self.node_comment = ''
self.node_comment_source = ('', '')
self.comment_layer_colors = []
self.node_execute = ''
self.node_exec_source = ('', '')
self.exec_layer_colors = []
self.node_child_order = []
self.co_layer_colors = []
self.node_attr_names = []
self.node_enabled = True
self.enabled_layer_colors = []
self.selection = []
self.user_sort_pref = user_dir.user_prefs.get(self.PREF_KEY)
# main layout
self.main = QtWidgets.QWidget(parent=self)
self.setWidget(self.main)
self.layout = QtWidgets.QVBoxLayout()
self.layout.setContentsMargins(0, 0, 0, 0)
self.layout.setSpacing(0)
self.main.setLayout(self.layout)
self.background_frame = QtWidgets.QFrame(self)
self.background_frame.setStyleSheet('background-color: #3E3E3E; border-radius: 0px;')
self.layout.addWidget(self.background_frame)
# ACTIONS
self.addActions(self._actions.actions())
# Inst path
self.localize_inst_path_action = self._actions.localize_inst_path_action
self.localize_inst_path_action.triggered.connect(
self.localize_inst_path)
self.revert_inst_path_action = self._actions.revert_inst_path_action
self.revert_inst_path_action.triggered.connect(self.revert_inst_path)
# Exec path
self.localize_exec_path_action = self._actions.localize_exec_path_action
self.localize_exec_path_action.triggered.connect(
self.localize_exec_path)
self.revert_exec_path_action = self._actions.revert_exec_path_action
self.revert_exec_path_action.triggered.connect(self.revert_exec_path)
# Attrs
self.add_attr_action = self._actions.add_attr_action
self.add_attr_action.triggered.connect(self.add_attr)
self.remove_attr_action = self._actions.remove_attr_action
self.remove_attr_action.triggered.connect(self.remove_selected_attrs)
# Copy Actions
self.copy_raw_action = self._actions.copy_raw_action
copy_raw = partial(self.copy_selected_attrs, DATA_STATE.RAW)
self.copy_raw_action.triggered.connect(copy_raw)
self.copy_resolved_action = self._actions.copy_resolved_action
copy_resolved = partial(self.copy_selected_attrs, DATA_STATE.RESOLVED)
self.copy_resolved_action.triggered.connect(copy_resolved)
self.copy_cached_action = self._actions.copy_cached_action
copy_cached = partial(self.copy_selected_attrs, DATA_STATE.CACHED)
self.copy_cached_action.triggered.connect(copy_cached)
# Localize/Revert
self.localize_attr_action = self._actions.localize_attr_action
self.localize_attr_action.triggered.connect(self.localize_attrs)
self.revert_attr_action = self._actions.revert_attr_action
self.revert_attr_action.triggered.connect(self.revert_attrs)
############
# properties
############
self.properties_frame = QtWidgets.QFrame(self)
self.properties_frame.setStyleSheet('background-color: #3E3E3E; border-radius: 0px;')
self.layout.addWidget(self.properties_frame)
self.properties_layout = QtWidgets.QVBoxLayout()
self.properties_layout.setContentsMargins(4, 0, 4, 0)
self.properties_layout.setSpacing(0)
self.properties_frame.setLayout(self.properties_layout)
# name
self.name_layout = QtWidgets.QHBoxLayout()
self.name_layout.setContentsMargins(0, 0, 0, 0)
self.properties_layout.addLayout(self.name_layout)
self.name_label = LabelEdit(parent=self)
self.name_label.setFont(QtGui.QFont("Roboto", 14))
self.name_label.nameChangeRequested.connect(self.edit_name)
self.name_layout.addWidget(self.name_label, 0, QtCore.Qt.AlignLeft)
self.name_edit_button = PixmapButton(pixmap=':icons/icons/pencil.png',
pixmap_hover=':icons/icons/pencil_hover.png',
pixmap_pressed=':icons/icons/pencil.png',
size=16,
parent=self)
self.name_edit_button.pressed.connect(self.name_label.edit_text)
self.name_layout.addWidget(self.name_edit_button, 0, QtCore.Qt.AlignLeft)
self.name_layout.addStretch()
# details
self.details_layout = QtWidgets.QGridLayout()
self.details_layout.setContentsMargins(20, 4, 0, 4)
self.details_layout.setSpacing(2)
self.properties_layout.addLayout(self.details_layout)
# path
self.path_label = QtWidgets.QLabel('Path', parent=self)
self.details_layout.addWidget(self.path_label, 0, 0)
self.path_field = QtWidgets.QLineEdit(parent=self)
self.path_field.setAlignment(QtCore.Qt.AlignVCenter)
self.path_field.setStyleSheet('border-radius: 11px; border: 1px solid transparent; background-color: #323232')
self.path_field.setFont(QtGui.QFont("Roboto Mono", 8))
self.path_field.setAlignment(QtCore.Qt.AlignVCenter)
self.path_field.setReadOnly(True)
self.details_layout.addWidget(self.path_field, 0, 1)
# instance
self.instance_label = QtWidgets.QLabel('Instance', parent=self)
self.details_layout.addWidget(self.instance_label, 1, 0)
self.instance_layout = QtWidgets.QGridLayout()
self.details_layout.addLayout(self.instance_layout, 1, 1)
self.instance_field = LineEdit(parent=self)
self.instance_field.focus_changed.connect(self.focus_instance_field)
self.instance_field.setFont(QtGui.QFont("Roboto Mono", 8))
self.instance_field.setAlignment(QtCore.Qt.AlignVCenter)
self.instance_field.editingFinished.connect(self.edit_instance)
self.instance_layout.addWidget(self.instance_field, 0, 0)
self.instance_field_model = QtCore.QStringListModel()
self.instance_field_completer = QtWidgets.QCompleter()
self.instance_field_completer.popup().setStyleSheet('border: 1px solid transparent; background-color: #323232; color: white')
self.instance_field_completer.setModel(self.instance_field_model)
self.instance_field.setCompleter(self.instance_field_completer)
self.instance_field.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.instance_field.customContextMenuRequested.connect(self.instance_context_menu)
self.locate_instance_button = PixmapButton(pixmap=':icons/icons/locate_off.png',
pixmap_hover=':icons/icons/locate_on_hover.png',
pixmap_pressed=':icons/icons/locate_on_pressed.png',
size=16,
parent=self.properties_frame)
self.locate_instance_button.setToolTip('Locate Instance')
self.locate_instance_button.setStyleSheet('QToolTip {color: white; border: 1px solid #3E3E3E}')
self.locate_instance_button.setFixedWidth(17)
self.locate_instance_button.setFixedHeight(16)
self.locate_instance_button.clicked.connect(self.view_instance_node)
self.instance_layout.addWidget(self.locate_instance_button, 0, 1)
self.instance_opinions = OpinionDots(self, 'Instance Opinions')
self.instance_layout.addWidget(self.instance_opinions, 0, 2)
self.remove_instance_button = PixmapButton(pixmap=':icons/icons/delete.png',
pixmap_hover=':icons/icons/delete_hover.png',
pixmap_pressed=':icons/icons/delete_pressed.png',
size=12,
parent=self.properties_frame)
self.remove_instance_button.setToolTip('Revert Instance')
self.remove_instance_button.setStyleSheet('QToolTip {color: white; border: 1px solid #3E3E3E}')
self.remove_instance_button.set_action(self.revert_inst_path_action)
self.instance_layout.addWidget(self.remove_instance_button, 0, 3)
# execute in
self.execute_label = QtWidgets.QLabel('Exec Input', parent=self)
self.details_layout.addWidget(self.execute_label, 2, 0)
self.execute_layout = QtWidgets.QGridLayout()
self.details_layout.addLayout(self.execute_layout, 2, 1)
self.execute_field = LineEdit(parent=self)
self.execute_field.setStyleSheet(line_edit_style_factory('white'))
self.execute_field.setFont(QtGui.QFont("Roboto Mono", 8))
self.execute_field.setAlignment(QtCore.Qt.AlignVCenter)
self.execute_field.editingFinished.connect(self.edit_exec_source)
self.execute_field.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.execute_field.customContextMenuRequested.connect(self.exec_context_menu)
self.execute_layout.addWidget(self.execute_field, 0, 0)
self.execute_field_model = QtCore.QStringListModel()
self.execute_field_completer = QtWidgets.QCompleter()
self.execute_field_completer.popup().setStyleSheet('border: 1px solid transparent; background-color: #323232; color: white')
self.execute_field_completer.setModel(self.execute_field_model)
self.execute_field.setCompleter(self.execute_field_completer)
self.execute_opinions = OpinionDots(self, 'Execute Opinions')
self.execute_layout.addWidget(self.execute_opinions, 0, 1)
self.remove_exec_source_button = PixmapButton(pixmap=':icons/icons/delete.png',
pixmap_hover=':icons/icons/delete_hover.png',
pixmap_pressed=':icons/icons/delete_pressed.png',
size=12,
parent=self.properties_frame)
self.remove_exec_source_button.setToolTip('Revert Execute Source')
self.remove_exec_source_button.setStyleSheet('QToolTip {color: white; border: 1px solid #3E3E3E}')
self.remove_exec_source_button.set_action(self.revert_exec_path_action)
self.execute_layout.addWidget(self.remove_exec_source_button, 0, 2)
# execute_order
self.child_order_label = QtWidgets.QLabel('Child Order',
parent=self)
self.details_layout.addWidget(self.child_order_label, 3, 0)
self.child_order_layout = QtWidgets.QGridLayout()
self.details_layout.addLayout(self.child_order_layout, 3, 1)
self.child_order_field = LineEdit(parent=self)
self.child_order_field.setStyleSheet('border-radius: 11px; border: 1px solid transparent; background-color: #232323')
self.child_order_field.setFont(QtGui.QFont("Roboto Mono", 8))
self.child_order_field.setAlignment(QtCore.Qt.AlignVCenter)
self.child_order_field.accept.connect(self.accept_edit_child_order)
self.child_order_field.cancel.connect(self.cancel_edit_child_order)
self.child_order_layout.addWidget(self.child_order_field, 0, 0)
self.child_order_field_model = QtCore.QStringListModel()
self.child_order_field_completer = QtWidgets.QCompleter()
self.child_order_field_completer.popup().setStyleSheet('border: 1px solid transparent; background-color: #323232; color: white')
self.child_order_field_completer.setModel(self.child_order_field_model)
self.child_order_field.setCompleter(self.child_order_field_completer)
self.child_order_opinions = OpinionDots(self, 'Child Order Opinions')
self.child_order_layout.addWidget(self.child_order_opinions, 0, 1)
self.revert_child_order_button = PixmapButton(
pixmap=':icons/icons/delete.png',
pixmap_hover=':icons/icons/delete_hover.png',
pixmap_pressed=':icons/icons/delete_pressed.png',
size=12,
parent=self.properties_frame)
self.revert_child_order_button.setToolTip('Revert Child Order')
self.revert_child_order_button.setStyleSheet(
'QToolTip {color: white; border: 1px solid #3E3E3E}')
self.revert_child_order_button.clicked.connect(self.revert_child_order)
self.child_order_layout.addWidget(self.revert_child_order_button, 0, 2)
# position
self.position_label = QtWidgets.QLabel('Position', parent=self)
self.position_label.setMaximumWidth(80)
self.details_layout.addWidget(self.position_label, 4, 0)
self.position_layout = QtWidgets.QHBoxLayout()
self.details_layout.addLayout(self.position_layout, 4, 1)
self.positionX_field = NodePositionSpinbox(parent=self)
self.positionX_field.setFixedWidth(80)
self.positionX_field.setAlignment(QtCore.Qt.AlignRight)
self.positionX_field.setSingleStep(1)
self.positionX_field.setMaximum(10000)
self.positionX_field.setMinimum(-10000)
self.positionX_field.stepChanged.connect(self.edit_position)
self.positionX_field.editingFinished.connect(self.edit_position)
self.position_layout.addWidget(self.positionX_field, 0, QtCore.Qt.AlignLeft)
self.positionY_field = NodePositionSpinbox(parent=self)
self.positionY_field.setFixedWidth(80)
self.positionY_field.setAlignment(QtCore.Qt.AlignRight)
self.positionY_field.setSingleStep(1)
self.positionY_field.setMaximum(10000)
self.positionY_field.setMinimum(-10000)
self.positionY_field.stepChanged.connect(self.edit_position)
self.positionY_field.editingFinished.connect(self.edit_position)
self.position_layout.addWidget(self.positionY_field, 0, QtCore.Qt.AlignLeft)
self.enabled_checkbox_label = QtWidgets.QLabel('Enabled: ',
parent=self)
self.position_layout.addWidget(self.enabled_checkbox_label, 0,
QtCore.Qt.AlignLeft)
self.enabled_checkbox = QtWidgets.QCheckBox()
self.enabled_checkbox.stateChanged.connect(self.toggle_node_enabled)
self.position_layout.addWidget(self.enabled_checkbox, 0,
QtCore.Qt.AlignLeft)
self.enabled_opinions = OpinionDots(self, 'Enabled Opinions')
self.position_layout.addWidget(self.enabled_opinions, 0,
QtCore.Qt.AlignLeft)
icn = ':icons/icons/'
self.revert_enabled = PixmapButton(pixmap=icn+'delete.png',
pixmap_hover=icn+'delete_hover.png',
pixmap_pressed=icn+'delete_pressed.png',
size=12,
parent=self.properties_frame)
self.revert_enabled.setToolTip('Revert Enabled State')
self.revert_enabled.setStyleSheet('QToolTip {color: white; '
'order: 1px solid #3E3E3E'
'}')
self.revert_enabled.clicked.connect(self.revert_node_enabled)
self.position_layout.addWidget(self.revert_enabled, 0,
QtCore.Qt.AlignLeft)
self.position_layout.addStretch()
# comment
self.comment_label = QtWidgets.QLabel('Comment')
self.details_layout.addWidget(self.comment_label, 5, 0)
self.comment_layout = QtWidgets.QGridLayout()
self.details_layout.addLayout(self.comment_layout, 5, 1)
self.comment_field = TextEdit(self, 'Node Comment')
self.comment_field.addActions(self.comment_actions.actions())
self.comment_field.setFixedHeight(44)
self.comment_field.setTabChangesFocus(False)
self.comment_field.accept.connect(self.accept_edit_comment)
self.comment_field.cancel.connect(self.cancel_edit_comment)
self.comment_layout.addWidget(self.comment_field, 0, 0)
self.comment_opinions = OpinionDots(self, 'Comment Opinions', vertical=True)
self.comment_layout.addWidget(self.comment_opinions, 0, 1)
self.remove_comment_button = PixmapButton(pixmap=':icons/icons/delete.png',
pixmap_hover=':icons/icons/delete_hover.png',
pixmap_pressed=':icons/icons/delete_pressed.png',
size=12,
parent=self.properties_frame)
self.remove_comment_button.setToolTip('Revert Comment')
self.remove_comment_button.setStyleSheet('QToolTip {color: white; border: 1px solid #3E3E3E}')
self.remove_comment_button.clicked.connect(self.remove_comment)
self.comment_layout.addWidget(self.remove_comment_button, 0, 2)
# Comment
self.accept_comment_action = self.comment_actions.accept_comment_action
self.accept_comment_action.triggered.connect(self.accept_edit_comment)
self.cancel_comment_action = self.comment_actions.cancel_comment_action
self.cancel_comment_action.triggered.connect(self.cancel_edit_comment)
##################
# attributes table
##################
style = '''
QTableView {
outline: none;
border-radius: 11px;
border: 1px solid transparent;
font-family: "Roboto Mono";
font-size: 12px
}
QTableView::item {
padding: 3px;
}
QTableView::item:selected:hover {
color: #148CD2;
}
QTableView:item:selected {
background-color: #113343;
color: white;
}
QHeaderView {
border-radius: 8px;
border: 0px solid transparent;
}
QHeaderView::section::horizontal::first {
border-top-left-radius: 6px;
}
QHeaderView::section::horizontal::last {
border-top-right-radius: 6px;
}
QToolTip {
font-family: Roboto Mono;
color: white;
border: 1px solid #3E3E3E
}
'''
self.attributes_widget = QtWidgets.QWidget(self)
self.attributes_widget.setStyleSheet('background-color: #232323')
self.properties_layout.addWidget(self.attributes_widget, 1)
self.attributes_layout = QtWidgets.QVBoxLayout()
self.attributes_layout.setContentsMargins(0, 0, 0, 0)
self.attributes_layout.setSpacing(0)
self.attributes_widget.setLayout(self.attributes_layout)
self.table_view = AttrsTableView(self)
self.table_view.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
self.table_view.setEditTriggers(QtWidgets.QAbstractItemView.DoubleClicked)
self.table_view.setSortingEnabled(True)
self.table_view.setStyleSheet(style)
self.table_view.verticalHeader().setMinimumSectionSize(12)
self.table_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.table_view.customContextMenuRequested.connect(self.custom_context_menu)
self.attributes_layout.addWidget(self.table_view, 1)
self.attributes_layout.addStretch()
# headers
horizontal_header = self.table_view.horizontalHeader()
horizontal_header.setSectionsMovable(True)
horizontal_header.setStretchLastSection(True)
header_dict = {COLUMNS.name: 'Name',
COLUMNS.value: 'Value',
COLUMNS.nxt_type: 'Type',
COLUMNS.source: 'Source',
COLUMNS.locality: 'Locality',
COLUMNS.comment: 'Comment'}
self.header_names = COLUMNS.column_dict_to_list(header_dict)
self.model = PropertyModel(graph_model=self.stage_model,
node_path=self.node_path,
view=self.table_view,
headers=self.header_names,
parent=self)
self.proxy_model = QtCore.QSortFilterProxyModel()
self.proxy_model.setSourceModel(self.model)
self.table_view.setModel(self.proxy_model)
self.table_view.selectionModel().selectionChanged.connect(self.set_selection)
# add remove row
self.property_options_layout = QtWidgets.QHBoxLayout()
self.property_options_layout.setContentsMargins(10, 4, 10, 10)
self.property_options_layout.setSpacing(8)
self.properties_layout.addLayout(self.property_options_layout)
self.property_options_layout.addStretch(10)
# Add attr button
self.add_attr_button = PixmapButton(pixmap=':icons/icons/plus.png',
pixmap_hover=':icons/icons/plus_hover.png',
pixmap_pressed=':icons/icons/plus_hover.png',
size=10,
parent=self.properties_frame)
self.add_attr_button.setFixedWidth(10)
self.add_attr_button.set_action(self.add_attr_action)
self.property_options_layout.addWidget(self.add_attr_button)
# Remove attr button
self.remove_attr_button = PixmapButton(pixmap=':icons/icons/minus.png',
pixmap_hover=':icons/icons/minus_hover.png',
pixmap_pressed=':icons/icons/minus_hover.png',
size=10,
parent=self.properties_frame)
self.remove_attr_button.setFixedWidth(10)
self.remove_attr_button.set_action(self.remove_attr_action)
self.property_options_layout.addWidget(self.remove_attr_button)
if not self.main_window.in_startup:
self.update_styles()
self.display_properties()
@property
def view(self):
return self.main_window.view
@property
def resolved(self):
return self.stage_model.data_state if self.stage_model else True
def update_resolved(self):
self.model.set_represented_node(node_path=self.node_path)
def set_selection(self):
indexes = [self.proxy_model.mapToSource(p) for p in self.table_view.selectedIndexes()]
self.model.selected_indexes = indexes
def set_stage_model(self, stage_model):
super(PropertyEditor, self).set_stage_model(stage_model=stage_model)
if self.stage_model:
self.model.stage_model = self.stage_model
self.set_represented_node()
def set_stage_model_connections(self, model, connect):
self.model_signal_connections = [
(model.node_focus_changed, self.set_represented_node),
(model.nodes_changed, self.handle_nodes_changed),
(model.attrs_changed, self.handle_attrs_changed),
(model.data_state_changed, self.update_resolved),
(model.node_moved, self.handle_node_moved),
(model.comp_layer_changed, self.set_represented_node),
(model.target_layer_changed, self.set_represented_node),
]
super(PropertyEditor, self).set_stage_model_connections(model,
connect)
def on_stage_model_destroyed(self):
super(PropertyEditor, self).on_stage_model_destroyed()
self.properties_frame.hide()
def handle_nodes_changed(self, nodes):
if self.node_path in nodes:
self.set_represented_node()
def handle_attrs_changed(self, attr_paths):
for path in attr_paths:
if self.node_path == nxt_path.node_path_from_attr_path(path):
self.set_represented_node()
return
def handle_node_moved(self, node_path):
if node_path == self.node_path:
self.node_pos = self.stage_model.get_node_pos(node_path)
self.update_properties()
def set_represented_node(self):
self.node_path = self.stage_model.node_focus
if not self.node_path:
self.clear()
self.properties_frame.hide()
return
self.properties_frame.show()
if self.user_sort_pref:
order_str = self.user_sort_pref['order']
if order_str == 'AscendingOrder':
order = QtCore.Qt.AscendingOrder
else:
order = QtCore.Qt.DescendingOrder
col = self.user_sort_pref['column']
if self.model.rowCount(self):
self.model.horizontal_header.blockSignals(True)
self.model.horizontal_header.setSortIndicator(col, order)
self.model.horizontal_header.blockSignals(False)
self.node_name = nxt_path.node_name_from_node_path(self.node_path)
if not self.node_name:
self.clear()
return
disp_layer = self.stage_model.comp_layer
# set instance completer options
# Todo: node_path completer logic needed!
top_nodes = self.stage_model.get_node_sibling_paths(self.node_path)
if top_nodes:
self.instance_field_model.setStringList(top_nodes)
node_path = self.node_path
# set execute completer options
sibling_node_paths = self.stage_model.get_node_sibling_paths(node_path)
if sibling_node_paths:
self.execute_field_model.setStringList(sibling_node_paths)
# set node data
if self.stage_model.data_state != DATA_STATE.RAW:
expand = True
else:
expand = False
self.node_instance = self.stage_model.get_node_instance_path(node_path,
disp_layer,
expand)
self.node_inst_source = self.stage_model.get_node_attr_source(node_path,
INTERNAL_ATTRS.INSTANCE_PATH,
disp_layer)
inst_layers = self.stage_model.get_layers_with_opinion(self.node_path,
INTERNAL_ATTRS.INSTANCE_PATH)
self.inst_layer_colors = self.stage_model.get_layer_colors(inst_layers)
self.node_pos = self.stage_model.get_node_pos(self.node_path)
self.node_comment = self.stage_model.get_node_comment(self.node_path,
disp_layer)
self.node_comment_source = self.stage_model.get_node_attr_source(node_path,
INTERNAL_ATTRS.COMMENT,
disp_layer)
comment_layers = self.stage_model.get_layers_with_opinion(self.node_path,
INTERNAL_ATTRS.COMMENT)
self.comment_layer_colors = self.stage_model.get_layer_colors(comment_layers)
self.node_execute = self.stage_model.get_node_exec_in(node_path,
disp_layer)
self.node_exec_source = self.stage_model.get_node_attr_source(node_path,
INTERNAL_ATTRS.EXECUTE_IN,
disp_layer)
exec_layers = self.stage_model.get_layers_with_opinion(self.node_path,
INTERNAL_ATTRS.EXECUTE_IN)
self.exec_layer_colors = self.stage_model.get_layer_colors(exec_layers)
self.node_enabled = self.stage_model.get_node_enabled(self.node_path)
enabled_layers = self.stage_model.get_layers_with_opinion(self.node_path,
INTERNAL_ATTRS.ENABLED)
self.enabled_layer_colors = self.stage_model.get_layer_colors(enabled_layers)
self.node_child_order = self.stage_model.get_node_child_order(node_path,
disp_layer)
co_layers = self.stage_model.get_layers_with_opinion(self.node_path,
INTERNAL_ATTRS.CHILD_ORDER)
self.co_layer_colors = self.stage_model.get_layer_colors(co_layers)
# update general
self.update_name()
self.update_properties()
self.update_styles()
self.display_properties()
# update attribute model
self.model.set_represented_node(node_path=self.node_path)
def view_instance_node(self):
instance_path = self.instance_field.text()
if instance_path:
self.stage_model.select_and_frame(instance_path)
def focus_instance_field(self, in_focus):
"""Ensures the path is not expanded when the instance field gains
focus. Listens to the custom focus signal.
:param in_focus: bool
:return: None
"""
expand = not in_focus
if self.instance_field.has_focus:
return
if in_focus:
layer = self.stage_model.target_layer
else:
layer = self.stage_model.comp_layer
path = self.stage_model.get_node_instance_path(self.node_path, layer,
expand=expand)
if not path:
layer = self.stage_model.comp_layer
comp_path = self.stage_model.get_node_instance_path(self.node_path,
layer,
expand=expand)
if comp_path != path:
path = comp_path
self.instance_field.setText(path)
def update_properties(self):
self.name_label.setText(self.node_name)
self.path_field.setText(self.node_path)
self.instance_field.setText(self.node_instance)
self.instance_opinions.layer_colors = self.inst_layer_colors
self.positionX_field.setValue(self.node_pos[0])
self.positionY_field.setValue(self.node_pos[1])
self.comment_field.setText(self.node_comment)
self.comment_opinions.layer_colors = self.comment_layer_colors
self.execute_field.setText(self.node_execute)
self.execute_opinions.layer_colors = self.exec_layer_colors
old_signal_state = self.enabled_checkbox.blockSignals(True)
if self.node_enabled:
check_box_state = QtCore.Qt.Checked
else:
check_box_state = QtCore.Qt.Unchecked
self.enabled_checkbox.setCheckState(check_box_state)
self.enabled_checkbox.blockSignals(old_signal_state)
self.enabled_opinions.layer_colors = self.enabled_layer_colors
self.child_order_field.setText(' '.join(self.node_child_order))
self.child_order_opinions.layer_colors = self.co_layer_colors
def update_styles(self):
if not self.stage_model or not self.node_path:
return
# get colors
tgt_layer_color = self.stage_model.get_layer_color(
self.stage_model.target_layer) or 'transparent'
# style position widgets
top_layer_color = self.stage_model.get_layer_color(
self.stage_model.top_layer) or 'transparent'
pos_style = '''
QAbstractSpinBox {
background-color: #232323;
border: 1px solid transparent;
color: #F0F0F0;
padding-top: 2px;
padding-bottom: 2px;
padding-left: 0px;
padding-right: 0px;
border-radius: 11px;
}
QAbstractSpinBox:hover {
border: 1px solid %s;
}
QAbstractSpinBox:focus {
border: 2px solid %s;
}
QAbstractSpinBox:up-button {
border-left: 0px solid #3E3E3E;
padding-right: 6px;
padding-top: 3px;
}
QAbstractSpinBox:down-button {
border-left: 0px solid #3E3E3E;
padding-right: 6px;
padding-bottom: 3px;
}
''' % (top_layer_color, top_layer_color)
self.positionX_field.setStyleSheet(pos_style)
self.positionY_field.setStyleSheet(pos_style)
errors = self.stage_model.get_node_error(self.node_path,
self.stage_model.comp_layer)
# other fields
ec = self.stage_model.get_layer_color(self.node_exec_source[0])
e_s = line_edit_style_factory(ec, tgt_layer_color)
self.execute_field.setStyleSheet(e_s)
co_s = line_edit_style_factory(tgt_layer_color=tgt_layer_color)
self.child_order_field.setStyleSheet(co_s)
cc = self.stage_model.get_layer_color(self.node_comment_source[0])
c_s = line_edit_style_factory(cc, tgt_layer_color)
self.comment_field.setStyleSheet(c_s)
# instance field
inst_color = self.stage_model.get_layer_color(self.node_inst_source[0])
inst_style = line_edit_style_factory(inst_color, tgt_layer_color)
self.instance_field.setStyleSheet(inst_style)
for error in errors:
if error == NODE_ERRORS.INSTANCE:
error_style = line_edit_style_factory(inst_color,
tgt_layer_color,
colors.ERROR.getRgb())
self.instance_field.setStyleSheet(error_style)
elif error == NODE_ERRORS.EXEC:
error_style = line_edit_style_factory(ec,
tgt_layer_color,
colors.ERROR.getRgb())
self.execute_field.setStyleSheet(error_style)
elif error == NODE_ERRORS.ORPHANS:
error_style = line_edit_style_factory('white',
tgt_layer_color,
colors.ERROR.getRgb())
self.child_order_field.setStyleSheet(error_style)
def display_properties(self):
# display properties if the node path is valid
if not self.node_path or not self.stage_model:
self.properties_frame.hide()
elif self.properties_frame.isHidden():
self.properties_frame.show()
# display properties if the node is a root node
if self.stage_model:
is_world = self.node_path == nxt_path.WORLD
is_top = self.stage_model.is_top_node(self.node_path)
self.name_label.setEnabled(not is_world)
self.name_edit_button.setVisible(not is_world)
self.instance_label.setVisible(not is_world)
self.instance_field.setVisible(not is_world)
self.locate_instance_button.setVisible(not is_world)
self.remove_instance_button.setVisible(not is_world)
self.instance_opinions.setVisible(not is_world)
self.execute_field.setVisible(is_top)
self.execute_label.setVisible(is_top)
self.remove_exec_source_button.setVisible(is_top)
self.execute_opinions.setVisible(not is_world)
self.child_order_label.setVisible(not is_world)
self.child_order_field.setVisible(not is_world)
self.revert_child_order_button.setVisible(not is_world)
self.child_order_opinions.setVisible(not is_world)
self.position_label.setVisible(is_top)
self.positionX_field.setVisible(is_top)
self.positionY_field.setVisible(is_top)
self.enabled_checkbox.setVisible(not is_world)
self.enabled_checkbox_label.setVisible(not is_world)
self.revert_enabled.setVisible(not is_world)
self.enabled_opinions.setVisible(not is_world)
def edit_name(self, new_name):
self.stage_model.set_node_name(self.node_path, new_name, self.stage_model.target_layer)
self.node_name = nxt_path.node_name_from_node_path(self.node_path)
self.update_name()
def update_name(self):
self.name_label.setText(self.node_name)
def edit_instance(self):
comp_layer = self.stage_model.comp_layer
target_layer = self.stage_model.target_layer
if self.stage_model.node_exists(self.node_path, target_layer):
lookup_layer = target_layer
else:
lookup_layer = comp_layer
cur_inst_path = self.stage_model.get_node_instance_path(self.node_path,
lookup_layer,
expand=False)
instance_path = str(self.instance_field.text())
if (not cur_inst_path and not instance_path
or cur_inst_path == instance_path):
# I want to use .isModified() but the completer doesn't count and
# a modification, I think it internally uses setText - Lucas 2020
return
if instance_path is not None:
self.stage_model.set_node_instance(self.node_path, instance_path,
target_layer)
elif cur_inst_path is not None:
self.stage_model.revert_node_instance(self.node_path, target_layer)
self.update_styles()
cur_inst_path = self.stage_model.get_node_instance_path(self.node_path,
comp_layer,
expand=False)
self.instance_field.clearFocus()
self.instance_field.setText(cur_inst_path)
def edit_exec_source(self):
comp_layer = self.stage_model.comp_layer
target_layer = self.stage_model.target_layer
cur_exec_path = self.stage_model.get_node_exec_in(self.node_path,
comp_layer)
source = str(self.execute_field.text())
if not cur_exec_path and not source or cur_exec_path == source:
# I want to use .isModified() but the completer doesn't count and
# a modification, I think it internally uses setText - Lucas 2020
return
self.stage_model.set_node_exec_in(self.node_path, source, target_layer)
real_exec_path = self.stage_model.get_node_exec_in(self.node_path,
comp_layer)
self.execute_field.setText(real_exec_path)
self.update_styles()
def remove_exec_source(self):
self.stage_model.set_node_exec_in(self.node_path, None,
self.stage_model.target_layer)
def accept_edit_child_order(self):
child_order = self.child_order_field.text().split()
if self.stage_model:
self.stage_model.set_node_child_order(self.node_path, child_order)
self.node_child_order = self.stage_model.get_node_child_order(self.node_path, self.stage_model.comp_layer)
self.update_properties()
def cancel_edit_child_order(self):
self.child_order_field.clearFocus()
self.update_properties()
def revert_child_order(self):
if self.stage_model:
self.stage_model.revert_child_order(node_path=self.node_path)
def toggle_node_enabled(self):
if self.stage_model:
button_state = True if self.enabled_checkbox.checkState() == \
QtCore.Qt.Checked else False
self.stage_model.set_node_enabled(self.node_path, button_state)
return
def revert_node_enabled(self):
if self.stage_model:
self.stage_model.revert_node_enabled(self.node_path)
def edit_position(self):
x = self.positionX_field.value()
y = self.positionY_field.value()
if not self.node_path or not self.stage_model.node_exists(self.node_path, self.stage_model.comp_layer):
self.clear()
return
self.node_pos = (x, y)
self.stage_model.set_nodes_pos({self.node_path: self.node_pos})
def accept_edit_comment(self):
comment = self.comment_field.toPlainText()
if self.stage_model:
if (not comment and not self.node_comment
or comment == self.node_comment):
return
self.stage_model.set_node_comment(self.node_path, comment, self.stage_model.target_layer)
self.node_comment = self.stage_model.get_node_comment(self.node_path, self.stage_model.comp_layer)
self.update_properties()
def cancel_edit_comment(self):
self.comment_field.blockSignals(True)
self.comment_field.clearFocus()
self.comment_field.blockSignals(False)
self.update_properties()
def remove_comment(self):
if self.stage_model:
self.stage_model.set_node_comment(node_path=self.node_path,
comment=None,
layer=self.stage_model.target_layer)
def add_attr(self):
if self.node_path:
self.stage_model.add_node_attr(node_path=self.node_path,
layer=self.stage_model.target_layer)
def get_selected_attr_names(self):
selection = self.table_view.selectedIndexes()
attr_names = set()
for index in selection:
proxy_index = self.proxy_model.mapToSource(index)
target_row = proxy_index.row()
attr_names.add(self.model.get_data()[target_row][0])
return attr_names
def remove_selected_attrs(self):
for attr_name in self.get_selected_attr_names():
self.stage_model.delete_node_attr(node_path=self.node_path,
attr_name=attr_name)
def copy_selected_attrs(self, data_state):
attr_names = self.get_selected_attr_names()
self.stage_model.copy_attrs_val(self.node_path, attr_names, data_state)
def clear(self):
# clear data
self.node_path = str()
self.node_instance = str()
self.node_name = str()
self.node_pos = (float(), float())
self.node_comment = ''
self.node_attr_names = list()
# update general
self.update_name()
self.update_properties()
self.display_properties()
# update attribute model
self.model.set_represented_node(node_path=self.node_path)
def localize_inst_path(self):
self.instance_field.blockSignals(True)
self.stage_model.localize_node_instance(self.node_path)
self.instance_field.blockSignals(False)
def revert_inst_path(self):
self.instance_field.blockSignals(True)
comp_layer = self.stage_model.comp_layer
cur_inst_path = self.stage_model.get_node_instance_path(self.node_path,
comp_layer,
expand=False)
if cur_inst_path is not None:
self.stage_model.revert_node_instance(self.node_path)
self.instance_field.blockSignals(False)
def localize_exec_path(self):
self.execute_field.blockSignals(True)
self.stage_model.localize_node_in_exec_source(self.node_path)
self.execute_field.blockSignals(False)
def revert_exec_path(self):
self.execute_field.blockSignals(True)
comp_layer = self.stage_model.comp_layer
cur_inst_path = self.stage_model.get_node_exec_in(self.node_path,
comp_layer)
if cur_inst_path:
self.stage_model.set_node_exec_in(self.node_path, None)
self.execute_field.blockSignals(False)
def localize_attrs(self):
data = self.model._data
path = self.node_path
attr_names = list()
for index in self.model.selected_indexes:
attr_names.append(data[index.row()][0])
self.stage_model.localize_node_attrs(path, attr_names)
def revert_attrs(self):
data = self.model._data
path = self.node_path
attr_names = list()
for index in self.model.selected_indexes:
attr_names.append(data[index.row()][0])
self.stage_model.revert_node_attrs(path, attr_names)
def custom_context_menu(self, pos):
index = self.table_view.indexAt(pos)
if not index.isValid:
return
index = self.proxy_model.mapToSource(index)
name = index.sibling(index.row(), COLUMNS.name).data()
if index.column() != COLUMNS.source:
try:
locality = self.model._data[index.row()][COLUMNS.locality]
except IndexError:
locality = None
menu = QtWidgets.QMenu(self)
menu.addAction(self.add_attr_action)
if index.row() != -1:
menu.addAction(self.remove_attr_action)
menu.addSeparator()
menu.addAction(self.localize_attr_action)
path = self.node_path
hist_func = self.stage_model.get_historical_opinions
if locality == LOCALITIES.local and hist_func(path, name):
menu.addAction(self.revert_attr_action)
if index.column() == COLUMNS.value:
menu.addSeparator()
menu.addAction(self.copy_raw_action)
menu.addAction(self.copy_resolved_action)
menu.addAction(self.copy_cached_action)
else:
menu = HistoricalContextMenu(self, self.node_path, name,
self.stage_model)
menu.popup(QtGui.QCursor.pos())
@staticmethod
def reset_action_enabled(actions):
for action in actions:
action.setEnabled(True)
def instance_context_menu(self):
self.instance_field.clearFocus()
l_inst = self.instance_field
menu = QtWidgets.QLineEdit.createStandardContextMenu(l_inst)
menu.addAction(self.localize_inst_path_action)
menu.addAction(self._actions.revert_inst_path_action)
if not l_inst.text():
self.localize_inst_path_action.setEnabled(False)
link_to = HistoricalContextMenu.LINKS.SOURCE
historical_menu = HistoricalContextMenu(self, self.node_path,
INTERNAL_ATTRS.INSTANCE_PATH,
self.stage_model,
truncate_left=True,
link_to=link_to)
menu.addMenu(historical_menu)
menu.popup(QtGui.QCursor.pos())
menu.aboutToHide.connect(partial(self.reset_action_enabled,
menu.actions()))
def exec_context_menu(self):
self.execute_field.clearFocus()
l_exec = self.execute_field
menu = QtWidgets.QLineEdit.createStandardContextMenu(l_exec)
menu.addAction(self.localize_exec_path_action)
menu.addAction(self.revert_exec_path_action)
layer, src_path = self.stage_model.get_node_attr_source(self.node_path,
INTERNAL_ATTRS.EXECUTE_IN,
self.stage_model.comp_layer)
tgt_path = self.stage_model.target_layer.real_path
if src_path == self.node_path and layer == tgt_path:
self.localize_exec_path_action.setEnabled(False)
self.revert_exec_path_action.setEnabled(True)
else:
self.localize_exec_path_action.setEnabled(True)
self.revert_exec_path_action.setEnabled(False)
link_to = HistoricalContextMenu.LINKS.SOURCE
historical_menu = HistoricalContextMenu(self, self.node_path,
INTERNAL_ATTRS.EXECUTE_IN,
self.stage_model,
truncate_left=True,
link_to=link_to)
menu.addMenu(historical_menu)
menu.popup(QtGui.QCursor.pos())
menu.aboutToHide.connect(partial(self.reset_action_enabled,
menu.actions()))
class PropertyModel(QtCore.QAbstractTableModel):
"""Property Editor model"""
def __init__(self, parent=None, graph_model=None, node_path=None, view=None, headers=None):
"""Initialize the data structure and get header labels.
Data Structure
##############
attributes:
<attributeName>:
cached_value: <value>
dirty: <bool>
type: <value>
value: <value>
runtime: <bool>
:param graph_model:
:param parent:
"""
super(PropertyModel, self).__init__()
# incoming data
self.parent = parent
self.stage_model = graph_model
self.node_path = node_path
self.view = view
self.headers = headers
# local attributes
self.node_attr_names = []
self.node_attr_draw_details = {}
self.attr_data = []
self.attr_data_resolved = []
self.attr_data_cached = []
self.selected_indexes = []
self.horizontal_header = self.view.horizontalHeader()
self.state = None
self.horizontal_header.sortIndicatorChanged.connect(self.save_state)
self.horizontal_header.sectionResized.connect(self.save_state)
# set default data
self._data = [[]]
@property
def resolved(self):
return self.stage_model.data_state
def set_represented_node(self, node_path=None):
"""Sends node data for selected node to the model.
:param node_path:
:return:
"""
comp_layer = self.stage_model.comp_layer
stage_model = self.stage_model
self.node_path = node_path
if not self.node_path or not stage_model.node_exists(node_path,
comp_layer):
self.clear()
return
# get attribute model data
local_attrs = stage_model.get_node_local_attr_names(node_path,
comp_layer)
local_attrs = sorted(local_attrs)
parent_attrs = stage_model.get_node_inherited_attr_names(node_path,
comp_layer)
parent_attrs = sorted(parent_attrs)
inst_attrs = stage_model.get_node_instanced_attr_names(node_path,
comp_layer)
inst_attrs = sorted(inst_attrs)
self.node_attr_names = []
for attr_list in (local_attrs, parent_attrs, inst_attrs):
for attr in attr_list:
if attr not in self.node_attr_names:
self.node_attr_names += [attr]
cached_attrs = stage_model.get_cached_attr_names(node_path)
if stage_model.data_state == DATA_STATE.CACHED:
for attr_name in cached_attrs:
if attr_name not in self.node_attr_names:
self.node_attr_names += [attr_name]
self.attr_data = []
self.attr_data_resolved = []
self.attr_data_cached = []
for attr_name in self.node_attr_names:
# get cached data
cached = DATA_STATE.CACHED
attr_cached = stage_model.get_node_attr_value(self.node_path,
attr_name,
data_state=cached,
as_string=True)
self.attr_data_cached += [attr_cached]
# get resolved data
resolved = DATA_STATE.RESOLVED
resolved_val = stage_model.get_node_attr_value(node_path,
attr_name,
comp_layer,
data_state=resolved)
self.attr_data_resolved += [resolved_val]
# Get locality
if attr_name in local_attrs:
locality = LOCALITIES.local
elif attr_name in parent_attrs:
locality = LOCALITIES.inherited
elif attr_name in inst_attrs:
locality = LOCALITIES.instanced
elif attr_name in cached_attrs:
locality = LOCALITIES.code
# get raw data
raw = DATA_STATE.RAW
attr_value = stage_model.get_node_attr_value(node_path, attr_name,
comp_layer,
data_state=raw,
as_string=True)
type_layer = comp_layer
if (stage_model.data_state == DATA_STATE.CACHED and
stage_model.current_rt_layer):
type_layer = stage_model.current_rt_layer.cache_layer
attr_type = stage_model.get_node_attr_type(node_path, attr_name,
type_layer)
if locality == LOCALITIES.code:
attr_source = node_path
else:
attr_source = stage_model.get_node_attr_source_path(node_path,
attr_name,
comp_layer)
attr_comment = stage_model.get_node_attr_comment(node_path,
attr_name,
comp_layer)
# add data row
row_dict = {COLUMNS.name: attr_name,
COLUMNS.value: attr_value,
COLUMNS.nxt_type: attr_type,
COLUMNS.source: attr_source,
COLUMNS.locality: locality,
COLUMNS.comment: attr_comment}
row_data = COLUMNS.column_dict_to_list(row_dict)
self.attr_data += [row_data]
# get draw details for this attr
color = stage_model.get_node_attr_color(node_path, attr_name,
comp_layer)
self.node_attr_draw_details[attr_name] = {'color': color}
# set model data
self.horizontal_header.sortIndicatorChanged.disconnect(self.save_state)
self.horizontal_header.sectionResized.disconnect(self.save_state)
self.beginResetModel()
self._data = self.attr_data if self.attr_data else [[]]
self.endResetModel()
self.horizontal_header.sortIndicatorChanged.connect(self.save_state)
self.horizontal_header.sectionResized.connect(self.save_state)
# header settings
# TODO Why on earth is the model touching the view like this?
if self.attr_data:
self.horizontal_header.setSectionResizeMode(COLUMNS.name,
QtWidgets.QHeaderView.Interactive)
self.horizontal_header.setSectionResizeMode(COLUMNS.value,
QtWidgets.QHeaderView.Interactive)
self.horizontal_header.setSectionResizeMode(COLUMNS.nxt_type,
QtWidgets.QHeaderView.Fixed)
self.horizontal_header.setSectionResizeMode(COLUMNS.source,
QtWidgets.QHeaderView.Interactive)
self.horizontal_header.setSectionResizeMode(COLUMNS.locality,
QtWidgets.QHeaderView.Interactive)
self.horizontal_header.setSectionResizeMode(COLUMNS.comment,
QtWidgets.QHeaderView.Interactive)
self.horizontal_header.setSortIndicatorShown(True)
# column widths
if self.state:
if sys.version_info[0] > 2 and isinstance(self.state, str):
self.state = bytes(self.state, 'utf-8')
try:
self.horizontal_header.restoreState(self.state)
except TypeError:
logger.error('Corrupted property editor pref!')
self.state = ''
self.view.resizeColumnToContents(COLUMNS.nxt_type)
def get_data(self):
return self._data
def clear(self):
self.beginResetModel()
self._data = [[]]
self.endResetModel()
def save_state(self):
self.state = self.horizontal_header.saveState()
col = self.horizontal_header.sortIndicatorSection()
order = self.horizontal_header.sortIndicatorOrder().name
state = {'column': int(col),
'order': str(order)}
if state == self.parent.user_sort_pref:
return
user_dir.user_prefs[self.parent.PREF_KEY] = state
self.parent.user_sort_pref = state
def setData(self, index, value, role=None):
if not index.isValid:
return False
row = index.row()
column = index.column()
if value == self.attr_data[row][column] and column != COLUMNS.source:
return False
attr_name = str(self._data[row][COLUMNS.name])
# set attr name
if role == QtCore.Qt.EditRole and column == COLUMNS.name:
self.stage_model.rename_node_attr(node_path=self.node_path,
attr_name=attr_name,
new_attr_name=value,
layer=self.stage_model.target_layer)
return True
# set attr value
elif role == QtCore.Qt.EditRole and column == COLUMNS.value:
if value == self.attr_data[row][column]:
return False
self.stage_model.set_node_attr_value(node_path=self.node_path,
attr_name=attr_name,
value=value,
layer=self.stage_model.target_layer)
return True
# set attr comment
elif role == QtCore.Qt.EditRole and column == COLUMNS.comment:
self.stage_model.node_setattr_comment(self.node_path, attr_name,
value,
self.stage_model.target_layer)
return True
elif role == QtCore.Qt.CheckStateRole and column == COLUMNS.source:
self.stage_model.select_and_frame(value)
return True
return False
def data(self, index, role=None):
if not index.isValid:
return None
row = index.row()
column = index.column()
cached_state = DATA_STATE.CACHED
resolved_state = DATA_STATE.RESOLVED
if role is None:
return self._data[row][column]
if role == QtCore.Qt.BackgroundRole and column == COLUMNS.value:
try:
unresolved_data = self._data[row][column]
except IndexError:
unresolved_data = None
try:
cached_data = self.attr_data_cached[row]
except IndexError:
cached_data = None
in_cached_state = self.stage_model.data_state == cached_state
if in_cached_state and not cached_data:
color = colors.UNCACHED_RED
return QtGui.QBrush(color, QtCore.Qt.BDiagPattern)
if role == QtCore.Qt.DisplayRole:
state = self.stage_model.data_state
if column == COLUMNS.value and state is resolved_state:
try:
return self.attr_data_resolved[row]
except IndexError:
return None
elif column == COLUMNS.value and state is cached_state:
try:
unresolved_data = self._data[row][column]
cached_data = self.attr_data_cached[row]
invalid_cache = unresolved_data and not cached_data
if invalid_cache:
return unresolved_data
else:
return cached_data
except IndexError:
return None
else:
return self._data[row][column]
if role == QtCore.Qt.ToolTipRole:
if column == COLUMNS.value:
value = ' value : ' + (',\n ' + (' ' * 10)).join(
textwrap.wrap(self._data[row][COLUMNS.value], 100))
resolved_state = 'resolved : ' + (',\n ' + (' ' * 10)).join(
textwrap.wrap(str(self.attr_data_resolved[row]), 100))
cached_state = ' cached : ' + (',\n ' + (' ' * 10)).join(
textwrap.wrap(str(self.attr_data_cached[row]), 100))
return '\n'.join([value, resolved_state, cached_state])
elif column == COLUMNS.source:
path = self.node_path
name = index.sibling(index.row(), COLUMNS.name).data()
historicals = self.stage_model.get_historical_opinions(path,
name)
lines = []
for historical in historicals:
_, source = historical.get(META_ATTRS.SOURCE)
val = historical.get(META_ATTRS.VALUE)
if len(val) > 50:
val = val[:50] + '...'
text = source + '.' + name + '\t' + val
lines += [text]
if not historicals:
lines = ['No Historical Opinions']
return '\n'.join(lines)
if role == QtCore.Qt.ForegroundRole:
attr_name = self._data[row][COLUMNS.name]
locality_idx = self._data[row][COLUMNS.locality]
color = self.node_attr_draw_details[attr_name]['color']
if locality_idx in (LOCALITIES.local, LOCALITIES.code):
return QtGui.QColor(color).lighter(150)
else:
return QtGui.QColor(color).darker(150)
if role == QtCore.Qt.FontRole:
if self._data[row][COLUMNS.locality] == LOCALITIES.instanced:
font = QtGui.QFont()
font.setItalic(True)
return font
if role == QtCore.Qt.DecorationRole and column == COLUMNS.nxt_type:
attr_type = self._data[row][column]
color = colors.ATTR_COLORS.get(attr_type, QtCore.Qt.gray)
icon = QtGui.QPixmap(QtCore.QSize(10, 10))
icon.fill(QtCore.Qt.transparent)
painter = QtGui.QPainter(icon)
painter.setRenderHint(QtGui.QPainter.Antialiasing)
painter.setBrush(color)
painter.setPen(QtCore.Qt.NoPen)
painter.drawEllipse(QtCore.QPointF(7, 5), 3, 3)
del painter
return icon
if role == QtCore.Qt.EditRole:
return self._data[row][column]
def flags(self, index):
column = index.column()
if column in (COLUMNS.name, COLUMNS.value, COLUMNS.comment):
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | \
QtCore.Qt.ItemIsEditable
elif column == COLUMNS.nxt_type:
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
elif column == COLUMNS.source:
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
else:
return QtCore.Qt.NoItemFlags
def headerData(self, section, orientation, role):
if orientation == QtCore.Qt.Horizontal:
if role == QtCore.Qt.DisplayRole:
return self.headers[section]
def rowCount(self, parent):
return len(self._data)
def columnCount(self, parent):
return len(self._data[0])
class AttrsTableView(QtWidgets.QTableView):
def __init__(self, parent=None):
super(AttrsTableView, self).__init__(parent=parent)
self.node_path_delegate = NodePathBtnDelegate(self)
self.setItemDelegateForColumn(COLUMNS.source, self.node_path_delegate)
self.mouse_pressed = False
self.drag_start_pos = None
self.setDragEnabled(True)
self.setMouseTracking(True)
self.installEventFilter(self)
def mousePressEvent(self, event):
super(AttrsTableView, self).mousePressEvent(event)
self.mouse_pressed = self.indexAt(event.pos())
self.startDrag(event)
def mouseReleaseEvent(self, event):
super(AttrsTableView, self).mouseReleaseEvent(event)
self.mouse_pressed = False
self.drag_start_pos = None
def mouseMoveEvent(self, event):
super(AttrsTableView, self).mouseMoveEvent(event)
if not self.drag_start_pos or not self.mouse_pressed:
return
start_drag_dist = QtWidgets.QApplication.startDragDistance()
drag_delta = (event.pos() - self.drag_start_pos).manhattanLength()
if drag_delta >= start_drag_dist:
drag = QtGui.QDrag(self)
mime_data = QtCore.QMimeData()
attr_name = self.get_attr_name()
if attr_name is None:
return
token = tokens.make_token_str(attr_name)
mime_data.setText(token)
drag.setMimeData(mime_data)
drag.exec_()
self.drag_start_pos = None
def dragEnterEvent(self, event):
event.setDropAction(QtCore.Qt.LinkAction)
event.accept()
def startDrag(self, event):
self.drag_start_pos = None
if not self.mouse_pressed:
return
self.drag_start_pos = event.pos()
def get_attr_name(self):
if not self.mouse_pressed:
return
idx = self.model().index(self.mouse_pressed.row(), COLUMNS.name)
return self.model().data(idx)
class NodePathBtnDelegate(QtWidgets.QStyledItemDelegate):
def __init__(self, parent):
self.parent = parent
super(NodePathBtnDelegate, self).__init__()
def paint(self, painter, option, index):
self.initStyleOption(option, index)
inner_rect = QtCore.QRect().united(option.rect)
inner_rect = inner_rect.marginsRemoved(QtCore.QMargins(1, 1, 1, 1))
painter.setPen(QtCore.Qt.NoPen)
painter.setBrush(option.backgroundBrush)
attr_name = index.sibling(index.row(), COLUMNS.name).data()
model = index.model().sourceModel()
color = model.node_attr_draw_details[attr_name]['color']
color = QtGui.QColor(color)
painter.setPen(color)
if option.state & QtWidgets.QStyle.State_MouseOver:
if self.parent.mouse_pressed == index.column():
color.setAlpha(110)
else:
color.setAlpha(80)
else:
color.setAlpha(50)
painter.fillRect(inner_rect, color)
color.setAlpha(255)
painter.drawText(inner_rect, QtCore.Qt.AlignCenter, index.data())
def editorEvent(self, event, model, option, index):
if event.button() != QtCore.Qt.MouseButton.LeftButton:
return False
if event.type() != QtCore.QEvent.Type.MouseButtonRelease:
return False
model.setData(index, index.data(), role=QtCore.Qt.CheckStateRole)
return True
class HistoricalContextMenu(QtWidgets.QMenu):
class LINKS(object):
SOURCE = 'source'
VALUE = 'value'
def __init__(self, parent, node_path, attr_name, stage_model,
title='Historical Values', label_fmt=None,
link_to=LINKS.SOURCE, truncate_left=False):
self.colors = []
self.mouse_pos = QtCore.QPoint(0, 0)
self.mouse_pressed = False
super(HistoricalContextMenu, self).__init__(title=title, parent=parent)
self.add_historcal_value_actions(node_path, attr_name, stage_model,
truncate_left, link_to, label_fmt)
def mouseMoveEvent(self, event):
super(HistoricalContextMenu, self).mouseMoveEvent(event)
self.mouse_pos = event.pos()
def mousePressEvent(self, event):
super(HistoricalContextMenu, self).mousePressEvent(event)
self.mouse_pressed = True
def mouseReleaseEvent(self, event):
super(HistoricalContextMenu, self).mouseReleaseEvent(event)
self.mouse_pressed = False
def paintEvent(self, event):
painter = QtGui.QPainter()
fm = QtGui.QFontMetrics(self.font())
painter.begin(self)
y = 0
y_txt = fm.height() * .5
self.rect().setHeight(self.rect().height() + y)
step = self.rect().height() / len(self.actions())
painter.setRenderHint(QtGui.QPainter.Antialiasing)
data = []
x_offset = self.rect().width()
option = QtWidgets.QStyleOptionButton()
option.initFrom(self)
for action in self.actions():
color = getattr(action, 'color', '#232323')
rect = QtCore.QRect(0, y, self.rect().width(), step)
x = fm.boundingRect(action.text()).width()
x_pos = (rect.width() - x) * .5
if x_pos < x_offset:
x_offset = x_pos
item_data = {'rect': rect, 'color': color, 'text': action.text(),
'y': y}
data += [item_data]
y += step
for item_data in data:
color = QtGui.QColor(item_data['color'])
rect = item_data['rect']
if rect.contains(self.mouse_pos):
if self.mouse_pressed:
mult = 110
else:
mult = 80
color.setAlpha(mult)
else:
color.setAlpha(50)
painter.fillRect(rect, color)
color = QtCore.Qt.white
painter.setPen(color)
painter.drawText(x_offset, rect.height() - y_txt +
item_data['y'], item_data['text'])
painter.end()
def add_historcal_value_actions(self, node_path, attr_name,
stage_model, truncate_left=False,
link_to=LINKS.SOURCE, label_fmt=None):
"""Adds menu actions representing each historical value for the given
node_path and attr_name. If truncate_left is True the any characters
over the HISTORICAL_MAX_CHARS limit will be removed from the left of the
string. Otherwise they will be removed from the right.
Default text_mode will result in: "/node.attr 123"
:param node_path: String of node path
:param attr_name: String of attr name
:param stage_model: StageModel
:param link_to: LINKS.SOURCE or LINKS.VALUE tells the stage model
what data to try and select and focus to
:param label_fmt: String ready for formatting
i.e.: '{source}.{attr_name} {value}'
Valid format keys are:
source
attr_name
value
:param truncate_left: bool
:return: None
"""
if attr_name != INTERNAL_ATTRS.INSTANCE_PATH:
historicals = stage_model.get_historical_opinions(node_path,
attr_name)
else:
historicals = stage_model.get_instance_trace(node_path)
if not historicals:
action = self.addAction('No Historical Opinions')
action.setEnabled(False)
if attr_name in INTERNAL_ATTRS.SAVED:
attr_name = INTERNAL_ATTRS.as_save_key(attr_name)
for historical in historicals:
layer, source = historical.get(META_ATTRS.SOURCE)
color = stage_model.get_layer_color(layer)
val = historical.get(META_ATTRS.VALUE) or ''
char_count = len(val)
if link_to == self.LINKS.SOURCE:
link = source
else:
link = val
if char_count > HISTORICAL_MAX_CHARS:
if truncate_left:
val = '...' + val[char_count - HISTORICAL_MAX_CHARS:]
else:
val = val[:HISTORICAL_MAX_CHARS] + '...'
if label_fmt is None:
pref_key = user_dir.USER_PREF.HISTORICAL_LABEL_FORMAT
default_fmt = '{source}.{attr_name} {value}'
label_fmt = user_dir.user_prefs.get(pref_key, default_fmt)
text = label_fmt.format(source=source, attr_name=attr_name,
value=val)
func = partial(stage_model.select_and_frame, link)
action = self.addAction(text, func)
action.color = color
class NodePositionSpinbox(QtWidgets.QSpinBox):
stepChanged = QtCore.Signal()
def stepBy(self, step):
value = self.value()
super(QtWidgets.QSpinBox, self).stepBy(step)
if self.value() != value:
self.stepChanged.emit()
class LineEdit(QtWidgets.QLineEdit):
accept = QtCore.Signal()
cancel = QtCore.Signal()
focus_changed = QtCore.Signal(bool)
def __init__(self, parent=None):
# Cheat because hasFocus is the parent not the actual line
self.has_focus = False
super(LineEdit, self).__init__(parent)
def keyPressEvent(self, event):
# accept edit
if event.key() in (QtCore.Qt.Key_Enter, QtCore.Qt.Key_Return):
self.accept.emit()
self.clearFocus()
# cancel edit
elif event.key() == QtCore.Qt.Key_Escape:
self.cancel.emit()
# pass
else:
return QtWidgets.QLineEdit.keyPressEvent(self, event)
def focusInEvent(self, event):
super(LineEdit, self).focusInEvent(event)
self.focus_changed.emit(True)
self.has_focus = True
def focusOutEvent(self, event):
super(LineEdit, self).focusOutEvent(event)
self.has_focus = False
self.focus_changed.emit(False)
class TextEdit(QtWidgets.QTextEdit):
accept = QtCore.Signal()
cancel = QtCore.Signal()
def __init__(self, parent, name):
super(TextEdit, self).__init__(parent=parent)
self.layer_colors = []
self.setObjectName(name)
def focusOutEvent(self, event):
super(TextEdit, self).focusOutEvent(event)
self.accept.emit()
class OverlayWidget(QtWidgets.QWidget):
def __init__(self, parent=None):
super(OverlayWidget, self).__init__(parent)
self._parent = parent
self.ext_color = QtGui.QColor(62, 62, 62, 190)
self.base_color = QtGui.QColor(62, 62, 62, 0)
self.main_color = self.base_color
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground)
self.setAttribute(QtCore.Qt.WA_TransparentForMouseEvents)
self.data_state = ''
def paintEvent(self, event):
painter = QtGui.QPainter()
painter.begin(self)
painter.setRenderHint(QtGui.QPainter.Antialiasing)
# actual_display_state
self.data_state = DATA_STATE.RAW
if self.data_state == DATA_STATE.RAW:
color = QtGui.QColor(100, 0, 0, 200)
painter.fillRect(self.parent().rect(),
QtGui.QBrush(color, QtCore.Qt.BDiagPattern))
painter.end()
def update(self):
self.setGeometry(self.parent().rect())
super(OverlayWidget, self).update()
class LOCALITIES:
code = '0.Code'
local = '1.Local'
inherited = '2.Parent'
instanced = '3.Inst'
class COLUMNS:
name = 0
value = 1
nxt_type = 2
source = 3
locality = 4
comment = 5
@classmethod
def column_dict_to_list(cls, columns_dict):
"""Helper method for sorting row data correctly. This allows for
easy changes to row data in the future.
"""
columns = ['', '', '', '', '', '']
for k, v in columns_dict.items():
columns[k] = v
return columns
def line_edit_style_factory(txt_color='white', tgt_layer_color='white',
bg_color='#232323'):
"""Generates a string of a qss style sheet for a line edit. Colors can be
supplied as strings of color name or hex value. If a color arg receives
a tuple we assume it is either an rgb or rgba tuple.
:param txt_color: Color the text of the line edit should be.
:param tgt_layer_color: The color of the current target layer.
:param bg_color: The color that will fill the background of the line eidit.
:return: string of qss
"""
def handle_rgb(color_tuple):
"""Assumes the tuple is rgba or rgb (len 4 or 3)"""
val = ','.join([str(i) for i in color_tuple])
if len(color_tuple) == 4:
rgb = 'rgba({})'.format(val)
else:
rgb = 'rgb({})'.format(val)
return rgb
if isinstance(bg_color, tuple):
bg_color = handle_rgb(bg_color)
style = '''
QTextEdit,
QLineEdit {
border-radius: 11px;
border: 1px solid transparent;
background-color: %s;
color: %s
}
QTextEdit:hover,
QLineEdit:hover {
border: 1px solid %s
}
QTextEdit:focus,
QLineEdit:focus {
border: 2px solid %s
}
''' % (bg_color, txt_color, tgt_layer_color,
tgt_layer_color)
return style
| 44.596501
| 136
| 0.596467
|
94b2252d0d890e7c8feb5c93edb487cb4d2dda81
| 30,536
|
py
|
Python
|
modules/eden/sync.py
|
unimauro/eden
|
b739d334e6828d0db14b3790f2f5e2666fc83576
|
[
"MIT"
] | null | null | null |
modules/eden/sync.py
|
unimauro/eden
|
b739d334e6828d0db14b3790f2f5e2666fc83576
|
[
"MIT"
] | null | null | null |
modules/eden/sync.py
|
unimauro/eden
|
b739d334e6828d0db14b3790f2f5e2666fc83576
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
""" Sahana Eden Synchronization
@author: Dominic König <dominic[at]aidiq.com>
@copyright: 2009-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["SyncDataModel",
"sync_rheader",
"sync_now",
"sync_job_reset"]
from gluon import *
from gluon.storage import Storage
from gluon.dal import Row
from ..s3 import *
# =============================================================================
class SyncDataModel(S3Model):
names = ["sync_config",
"sync_status",
"sync_repository",
"sync_task",
"sync_job",
"sync_conflict",
"sync_log"]
def model(self):
T = current.T
db = current.db
request = current.request
s3 = current.response.s3
UNKNOWN_OPT = current.messages.UNKNOWN_OPT
NONE = current.messages.NONE
define_table = self.define_table
configure = self.configure
add_component = self.add_component
set_method = self.set_method
scheduler_task_id = s3.scheduler_task_id
s3_datetime_represent = lambda dt: \
S3DateTime.datetime_represent(dt, utc=True)
# -------------------------------------------------------------------------
# Configuration
# -------------------------------------------------------------------------
tablename = "sync_config"
table = define_table(tablename,
Field("proxy",
label=T("Proxy Server URL"),
requires=IS_EMPTY_OR(IS_URL(mode="generic"))),
*s3.meta_fields())
# Field configuration
table.uuid.readable = True
table.uuid.label = "UUID"
table.uuid.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("UUID"),
T("Unique identifier which THIS repository identifies itself with when sending synchronization requests.")))
table.proxy.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Proxy Server URL"),
T("URL of the default proxy server to connect to remote repositories (if required). If only some of the repositories require the use of a proxy server, you can configure this in the respective repository configurations.")))
# CRUD Strings
s3.crud_strings[tablename] = Storage(
title_display = T("Synchronization Settings"),
title_update = T("Edit Synchronization Settings"),
msg_record_modified = T("Synchronization settings updated"))
# Resource Configuration
configure(tablename,
insertable=False,
deletable=False,
update_next=URL(c="sync", f="config", args=["1", "update"]))
# -------------------------------------------------------------------------
# Status
# -------------------------------------------------------------------------
tablename = "sync_status"
table = define_table(tablename,
Field("running", "boolean",
default=False,
readable=False,
writable=False),
Field("manual", "boolean",
default=False,
readable=False,
writable=False),
Field("timestmp", "datetime",
readable=False,
writable=False))
# -------------------------------------------------------------------------
# Repository
# -------------------------------------------------------------------------
tablename = "sync_repository"
table = define_table(tablename,
Field("name",
length=64,
notnull=True),
Field("url",
label="URL",
requires = IS_EMPTY_OR(
IS_NOT_IN_DB(db,
"sync_repository.url"))),
Field("username"),
Field("password", "password"),
Field("proxy",
label=T("Proxy Server URL"),
requires=IS_EMPTY_OR(IS_URL(mode="generic"))),
Field("last_status",
readable=False,
writable=False,
label=T("Last status")),
Field("accept_push", "boolean",
default=False,
label=T("Accept Push")),
*s3.meta_fields())
# Field configuration
table.uuid.label = "UUID"
table.uuid.readable = True
table.uuid.writable = True
table.name.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Repository Name"),
T("Name of the repository (for you own reference)")))
table.url.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Repository Base URL"),
T("Base URL of the remote Sahana Eden instance including application path, e.g. http://www.example.org/eden")))
table.proxy.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Proxy Server URL"),
T("URL of the proxy server to connect to the repository (leave empty for default proxy)")))
table.username.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Username"),
T("Username to use for authentication at the remote site.")))
table.password.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Password"),
T("Password to use for authentication at the remote site.")))
table.uuid.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Repository UUID"),
T("Identifier which the repository identifies itself with when sending synchronization requests.")))
table.accept_push.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Accept Push"),
T("Accept unsolicited data transmissions from the repository.")))
# CRUD Strings
ADD_REPOSITORY = T("Add Repository")
s3.crud_strings[tablename] = Storage(
title_create = ADD_REPOSITORY,
title_display = T("Repository Configuration"),
title_list = T("Repositories"),
title_update = T("Edit Repository Configuration"),
title_search = T("Search for Repository"),
subtitle_create = T("Add Repository"),
subtitle_list = T("Currently Configured Repositories"),
label_list_button = T("List Repositories"),
label_create_button = ADD_REPOSITORY,
msg_record_created = T("Repository configured"),
msg_record_modified = T("Repository configuration updated"),
msg_record_deleted = T("Repository configuration deleted"),
msg_list_empty = T("No repositories configured"))
# Resource Configuration
configure(tablename,
list_fields=["name",
"uuid",
"accept_push",
(T("Last Synchronization"), "last_sync_time")
],
onaccept=self.sync_repository_onaccept,
ondelete=self.sync_repository_ondelete,
create_next=URL(c="sync", f="repository", args=["[id]",
"task"]),
update_next=URL(c="sync", f="repository", args=["[id]"]))
table.virtualfields.append(SyncRepositoryVirtualFields())
set_method(tablename, method="now", action=sync_now)
# Reusable Fields
repository_id = S3ReusableField("repository_id", table,
requires = IS_ONE_OF(db,
"sync_repository.id",
"%(name)s"),
represent = self.sync_repository_represent,
label = T("Repository"))
# Components
add_component("sync_task",
sync_repository="repository_id")
add_component("sync_log",
sync_repository="repository_id")
add_component("sync_conflict",
sync_repository="repository_id")
add_component(S3Task.TASK_TABLENAME,
sync_repository=dict(name="job",
joinby="repository_id",
link="sync_job",
key="scheduler_task_id",
actuate="replace"))
# -------------------------------------------------------------------------
# Task
# -------------------------------------------------------------------------
# Synchronization mode
sync_mode = {
1: T("pull"), # pull only
2: T("push"), # push only
3: T("pull and push"), # pull & push
4: T("none") # do not synchronize this resource
}
# Strategy (allowed import methods)
sync_strategy = S3ImportItem.METHOD
sync_strategy_represent = lambda opt: opt and \
", ".join([o for o in sync_strategy.values()
if o in opt]) or NONE
# Update method
sync_update_method = {
1: T("update"), # update the existing record
2: T("replace"), # replace the existing record
}
# Update/conflict resolution policy
sync_policies = S3ImportItem.POLICY
sync_policy = {
sync_policies.OTHER: T("always update"),
sync_policies.NEWER: T("update if newer"),
sync_policies.MASTER: T("update if master"),
sync_policies.THIS: T("never update")
}
sync_policy_represent = lambda opt: \
opt and sync_policy.get(opt, UNKNOWN_OPT) or NONE
tablename = "sync_task"
table = define_table(tablename,
Field("resource_name",
notnull=True),
repository_id(),
Field("last_sync", "datetime",
readable=True,
writable=False,
update="",
label=T("Last synchronized on")),
Field("mode", "integer",
requires = IS_IN_SET(sync_mode,
zero=None),
default = 3,
label = T("Mode"),
represent = lambda opt: \
sync_mode.get(opt, NONE)),
Field("strategy", "list:string",
requires = IS_IN_SET(sync_strategy.values(),
multiple=True,
zero=None),
default = sync_strategy.values(),
label = T("Strategy"),
represent = sync_strategy_represent,
widget = CheckboxesWidgetS3.widget),
Field("update_method", "integer",
# hide while not implemented
readable=False,
writable=False,
requires = IS_IN_SET(sync_update_method,
zero=None),
default = 1,
label = T("Update Method"),
represent = lambda opt: \
sync_update_method.get(opt,
NONE)),
Field("update_policy",
requires = IS_IN_SET(sync_policies,
zero=None),
default = sync_policies.NEWER,
label = T("Update Policy"),
represent = sync_policy_represent),
Field("conflict_policy",
requires = IS_IN_SET(sync_policies,
zero=None),
default = sync_policies.NEWER,
label = T("Conflict Policy"),
represent = sync_policy_represent),
*s3.meta_fields())
# Field configuration
table.resource_name.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Resource Name"),
T("Table name of the resource to synchronize")))
table.mode.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Synchronization mode"),
T("How data shall be transferred")))
table.strategy.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Strategy"),
T("Which methods to apply when importing data to the local repository")))
table.update_method.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Update Method"),
T("How local records shall be updated")))
table.update_policy.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Update Policy"),
T("Under which conditions local records shall be updated")))
table.conflict_policy.comment = DIV(_class="tooltip",
_title="%s|%s" % (
T("Conflict policy"),
T("Under which condition a local record shall be updated if it also has been modified locally since the last synchronization")))
# CRUD Strings
ADD_TASK = T("Add Resource")
s3.crud_strings[tablename] = Storage(
title_create = ADD_TASK,
title_display = T("Resource Configuration"),
title_list = T("Resources"),
title_update = T("Edit Resource Configuration"),
title_search = T("Search for Resource"),
subtitle_create = ADD_TASK,
subtitle_list = T("Currently Configured Resources"),
label_list_button = T("List Resources"),
label_create_button = ADD_TASK,
msg_record_created = T("Resource configured"),
msg_record_modified = T("Resource configuration updated"),
msg_record_deleted = T("Resource configuration deleted"),
msg_list_empty = T("No resources configured yet"))
# Resource Configuration
configure(tablename,
create_onvalidation=self.sync_task_onvalidation)
# -------------------------------------------------------------------------
# Job
# -------------------------------------------------------------------------
tablename = "sync_job"
table = define_table(tablename,
repository_id(),
scheduler_task_id())
# CRUD Strings
ADD_JOB = T("Add Job")
s3.crud_strings[tablename] = Storage(
title_create = ADD_JOB,
title_display = T("Synchronization Job"),
title_list = T("Synchronization Schedule"),
title_update = T("Edit Job"),
title_search = T("Search for Job"),
subtitle_create = ADD_JOB,
subtitle_list = T("Currently Configured Jobs"),
label_list_button = T("List Jobs"),
label_create_button = ADD_JOB,
msg_record_created = T("Job added"),
msg_record_modified = T("Job updated updated"),
msg_record_deleted = T("Job deleted"),
msg_list_empty = T("No jobs configured yet"),
msg_no_match = T("No jobs configured"))
# Resource Configuration
set_method(tablename,
component_name="job",
method="reset",
action=sync_job_reset)
# -------------------------------------------------------------------------
# Conflicts
# -------------------------------------------------------------------------
# @todo: implement table
tablename = "sync_conflict"
table = define_table(tablename,
repository_id(),
Field("dummy"))
# Field configuration?
# CRUD Strings?
# Resource Configuration
configure(tablename,
insertable=False,
editable=False)
# Reusable Fields?
# Components?
# -------------------------------------------------------------------------
# Log
# -------------------------------------------------------------------------
tablename = "sync_log"
table = define_table(tablename,
Field("timestmp", "datetime",
represent=s3_datetime_represent,
label=T("Date/Time")),
repository_id(),
Field("resource_name"),
# Synchronization mode: PULL/PUSH, IN/OUT
Field("mode"),
Field("action"),
Field("result"),
Field("remote", "boolean",
default=False,
label=T("Remote Error"),
represent=lambda opt: opt and T("yes") or ("no")),
Field("message", "text"),
*s3.meta_fields())
# CRUD Strings
s3.crud_strings[tablename] = Storage(
title_display = T("Log Entry"),
title_list = T("Synchronization Log"),
subtitle_list = T("Synchronization Log"),
label_list_button = T("List All Entries"),
msg_record_deleted = T("Log Entry Deleted"),
msg_list_empty = T("No entries found"),
msg_no_match = T("No entries found"))
# Resource Configuration
configure(tablename,
editable=False,
insertable=False,
deletable=True,
orderby=~table.timestmp)
# ---------------------------------------------------------------------
# Return global names to s3db
#
return Storage()
# -------------------------------------------------------------------------
def defaults(self):
""" Safe defaults if module is disabled """
return Storage()
# -------------------------------------------------------------------------
@staticmethod
def sync_repository_represent(rid):
""" Repository representation """
db = current.db
s3db = current.s3db
NONE = current.messages.NONE
rtable = s3db.sync_repository
repository = db(rtable.id == rid).select(rtable.name,
limitby=(0, 1)).first()
if repository:
return repository.name
else:
return NONE
# -------------------------------------------------------------------------
@staticmethod
def sync_repository_ondelete(row):
"""
Cleanup after repository deletion
@todo: use standard delete cascade
"""
db = current.db
s3db = current.s3db
# Delete all resources in this repository
rtable = s3db.sync_resource
db(rtable.repository_id == row.id).update(deleted=True)
# Delete all jobs for this repository
# @todo: remove scheduler_task entry as well
jtable = s3db.sync_job
db(jtable.repository_id == row.id).update(deleted=True)
# Delete all pending conflicts of this repository
ctable = s3db.sync_conflict
db(ctable.repository_id == row.id).delete()
# Delete all log entries for this repository
ltable = s3db.sync_log
db(ltable.repository_id == row.id).delete()
return
# -------------------------------------------------------------------------
@staticmethod
def sync_repository_onaccept(form):
"""
Send registration request to the peer
"""
T = current.T
db = current.db
s3db = current.s3db
sync = current.manager.sync
response = current.response
try:
repository_id = form.vars.id
except:
return
if repository_id:
rtable = s3db.sync_repository
query = rtable.id == repository_id
repository = db(query).select(limitby=(0, 1)).first()
if repository and repository.url:
success = sync.request_registration(repository)
if not success:
response.warning = T("Could not auto-register at the repository, please register manually.")
else:
response.confirmation = T("Successfully registered at the repository.")
return
# -------------------------------------------------------------------------
@staticmethod
def sync_task_onvalidation(form):
"""
Task record validation
"""
db = current.db
s3db = current.s3db
request = current.request
repository_id = form.vars.repository_id or \
request.post_vars.repository_id
resource_name = form.vars.resource_name
if repository_id and resource_name:
ttable = s3db.sync_task
query = (ttable.repository_id == repository_id) & \
(ttable.resource_name == resource_name) & \
(ttable.deleted != True)
row = db(query).select(ttable.id, limitby=(0, 1)).first()
if row:
form.errors.resource_name = \
T("This resource is already configured for this repository")
# =============================================================================
class SyncRepositoryVirtualFields:
""" Repository virtual fields """
def last_sync_time(self):
""" Last synchronization date/time for this repository """
T = current.T
db = current.db
s3db = current.s3db
s3_datetime_represent = lambda dt: \
S3DateTime.datetime_represent(dt, utc=True)
table = s3db.sync_task
query = table.repository_id == self.sync_repository.id
task = db(query).select(orderby=~table.last_sync,
limitby=(0,1)).first()
if task:
return s3_datetime_represent(task.last_sync)
else:
return T("never")
# -----------------------------------------------------------------------------
def sync_rheader(r, tabs=[]):
"""
Synchronization resource headers
"""
T = current.T
if r.representation == "html":
if r.tablename == "sync_repository":
repository = r.record
if r.component and r.component_name=="log" and not r.component_id:
purge_log = A(T("Remove all log entries"),
_href=r.url(method="delete"))
else:
purge_log = ""
if repository:
if repository.url:
tabs.append((T("Manual Synchronization"), "now"))
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader = DIV(TABLE(
TR(TH("%s: " % T("Name")),
repository.name,
TH(""),
purge_log),
TR(TH("URL: "),
repository.url,
TH(""),
""),
), rheader_tabs)
return rheader
return None
# -------------------------------------------------------------------------
def sync_job_reset(r, **attr):
"""
RESTful method to reset a job status from FAILED to QUEUED,
for "Reset" action button
"""
session = current.session
if r.interactive:
if r.component and r.component.alias == "job":
job_id = r.component_id
if job_id:
S3Task.reset(job_id)
session.confirmation = T("Job reactivated")
r.component_id = None
redirect(r.url(method=""))
# -----------------------------------------------------------------------------
def sync_now(r, **attr):
"""
Manual synchronization of a repository
"""
T = current.T
manager = current.manager
response = current.response
session = current.session
auth = current.auth
rheader = attr.get("rheader", None)
if rheader:
rheader = rheader(r)
output = dict(title=T("Manual Synchronization"), rheader=rheader)
s3task = current.s3task
sync = S3Sync()
if r.interactive:
if r.http in ("GET", "POST"):
repository = r.record
if not repository:
r.error(404, manager.ERROR.BAD_RECORD)
form = FORM(TABLE(
TR(TD(T("Click 'Start' to synchronize with this repository now:"))),
TR(TD(INPUT(_type="submit", _value=T("Start"))))))
if form.accepts(r.post_vars, session):
task_id = s3task.async("sync_synchronize",
args = [repository.id],
vars = dict(user_id=auth.user.id,
manual=True))
if task_id is False:
response.error = T("Could not initiate manual synchronization.")
elif task_id is None:
response.flash = T("Manual synchronization completed.")
else:
sync.set_status(manual=True)
response.flash = T("Manual synchronization started in the background.")
else:
r.error(405, manager.ERROR.BAD_METHOD)
else:
r.error(501, manager.ERROR.BAD_FORMAT)
status = sync.get_status()
if status.running:
output.update(form=T("Synchronization currently active - refresh page to update status."))
elif not status.manual:
output.update(form=form)
else:
output.update(form=T("Manual synchronization scheduled - refresh page to update status."))
response.view = "update.html"
return output
# END =========================================================================
| 42.88764
| 259
| 0.442789
|
547588c9f5a63bbe8cc0f6d233c6c83960dfbfaf
| 756
|
py
|
Python
|
DataWrangling/insert.py
|
aguijarro/DataSciencePython
|
cb8725947da647b28e40ec9f2149606268f07577
|
[
"MIT"
] | null | null | null |
DataWrangling/insert.py
|
aguijarro/DataSciencePython
|
cb8725947da647b28e40ec9f2149606268f07577
|
[
"MIT"
] | null | null | null |
DataWrangling/insert.py
|
aguijarro/DataSciencePython
|
cb8725947da647b28e40ec9f2149606268f07577
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Add a single line of code to the insert_autos function that will insert the
automobile data into the 'autos' collection. The data variable that is
returned from the process_file function is a list of dictionaries, as in the
example in the previous video.
"""
from autos import process_file
def insert_autos(infile, db):
data = process_file(infile)
# Add your code here. Insert the data in one command.
for d in data:
db.autos.insert(d)
if __name__ == "__main__":
# Code here is for local use on your own computer.
from pymongo import MongoClient
client = MongoClient("mongodb://localhost:27017")
db = client.examples
insert_autos('autos-small.csv', db)
print (db.autos.find_one())
| 29.076923
| 76
| 0.718254
|
276ee023892a08d37a18226b741630234c7ba565
| 8,576
|
py
|
Python
|
tools/wave/network/api/results_api_handler.py
|
cta-wave/dpctf-test-runner
|
9c6a856dd73d97f7efaa3a4b97e0f5978f916dd2
|
[
"BSD-3-Clause"
] | 1
|
2021-06-07T12:19:31.000Z
|
2021-06-07T12:19:31.000Z
|
tools/wave/network/api/results_api_handler.py
|
cta-wave/dpctf-test-runner
|
9c6a856dd73d97f7efaa3a4b97e0f5978f916dd2
|
[
"BSD-3-Clause"
] | 21
|
2021-03-31T19:48:22.000Z
|
2022-03-12T00:24:53.000Z
|
tools/wave/network/api/results_api_handler.py
|
cta-wave/dpctf-test-runner
|
9c6a856dd73d97f7efaa3a4b97e0f5978f916dd2
|
[
"BSD-3-Clause"
] | 1
|
2020-07-17T13:06:00.000Z
|
2020-07-17T13:06:00.000Z
|
from __future__ import absolute_import
from __future__ import unicode_literals
import json
from .api_handler import ApiHandler
from ...data.exceptions.duplicate_exception import DuplicateException
from ...data.exceptions.invalid_data_exception import InvalidDataException
class ResultsApiHandler(ApiHandler):
def __init__(self, results_manager, session_manager, web_root):
super(ResultsApiHandler, self).__init__(web_root)
self._results_manager = results_manager
self._sessions_manager = session_manager
def create_result(self, request, response):
try:
uri_parts = self.parse_uri(request)
token = uri_parts[2]
data = None
body = request.body.decode("utf-8")
if body != "":
data = json.loads(body)
self._results_manager.create_result(token, data)
except Exception:
self.handle_exception("Failed to create result")
response.status = 500
def read_results(self, request, response):
try:
uri_parts = self.parse_uri(request)
token = uri_parts[2]
session = self._sessions_manager.read_session(token)
if session is None:
response.status = 404
return
results = self._results_manager.read_results(token)
self.send_json(response=response, data=results)
except Exception:
self.handle_exception("Failed to read results")
response.status = 500
def read_results_compact(self, request, response):
try:
uri_parts = self.parse_uri(request)
token = uri_parts[2]
results = self._results_manager.read_flattened_results(token)
self.send_json(response=response, data=results)
except Exception:
self.handle_exception("Failed to read compact results")
response.status = 500
def read_results_api_wpt_report_url(self, request, response):
try:
uri_parts = self.parse_uri(request)
token = uri_parts[2]
api = uri_parts[3]
uri = self._results_manager.read_results_wpt_report_uri(token, api)
self.send_json({"uri": uri}, response)
except Exception:
self.handle_exception("Failed to read results report url")
response.status = 500
def read_results_api_wpt_multi_report_uri(self, request, response):
try:
uri_parts = self.parse_uri(request)
api = uri_parts[3]
query = self.parse_query_parameters(request)
tokens = query["tokens"].split(",")
uri = self._results_manager.read_results_wpt_multi_report_uri(
tokens,
api
)
self.send_json({"uri": uri}, response)
except Exception:
self.handle_exception("Failed to read results multi report url")
response.status = 500
def download_results_api_json(self, request, response):
try:
uri_parts = self.parse_uri(request)
token = uri_parts[2]
api = uri_parts[3]
blob = self._results_manager.export_results_api_json(token, api)
if blob is None:
response.status = 404
return
file_path = self._results_manager.get_json_path(token, api)
file_name = "{}-{}-{}".format(
token.split("-")[0],
api,
file_path.split("/")[-1]
)
self.send_zip(blob, file_name, response)
except Exception:
self.handle_exception("Failed to download api json")
response.status = 500
def import_results_api_json(self, request, response):
try:
uri_parts = self.parse_uri(request)
token = uri_parts[2]
api = uri_parts[3]
blob = request.body
self._results_manager.import_results_api_json(token, api, blob)
response.status = 200
except Exception:
self.handle_exception("Failed to upload api json")
response.status = 500
def download_results_all_api_jsons(self, request, response):
try:
uri_parts = self.parse_uri(request)
token = uri_parts[2]
blob = self._results_manager.export_results_all_api_jsons(token)
file_name = token.split("-")[0] + "_results_json.zip"
self.send_zip(blob, file_name, response)
except Exception:
self.handle_exception("Failed to download all api jsons")
response.status = 500
def download_results(self, request, response):
try:
uri_parts = self.parse_uri(request)
token = uri_parts[2]
blob = self._results_manager.export_results(token)
if blob is None:
response.status = 404
return
file_name = token + ".zip"
self.send_zip(blob, file_name, response)
except Exception:
self.handle_exception("Failed to download results")
response.status = 500
def download_results_overview(self, request, response):
try:
uri_parts = self.parse_uri(request)
token = uri_parts[2]
blob = self._results_manager.export_results_overview(token)
if blob is None:
response.status = 404
return
file_name = token.split("-")[0] + "_results_html.zip"
self.send_zip(blob, file_name, response)
except Exception:
self.handle_exception("Failed to download results overview")
response.status = 500
def import_results(self, request, response):
try:
blob = request.body
token = self._results_manager.import_results(blob)
self.send_json({"token": token}, response)
except DuplicateException:
self.handle_exception("Failed to import results")
self.send_json({"error": "Session already exists!"}, response, 400)
return
except InvalidDataException:
self.handle_exception("Failed to import results")
self.send_json({"error": "Invalid input data!"}, response, 400)
return
except Exception:
self.handle_exception("Failed to import results")
response.status = 500
def handle_request(self, request, response):
method = request.method
uri_parts = self.parse_uri(request)
# /api/results/<token>
if len(uri_parts) == 3:
if method == "POST":
if uri_parts[2] == "import":
self.import_results(request, response)
return
self.create_result(request, response)
return
if method == "GET":
self.read_results(request, response)
return
# /api/results/<token>/<function>
if len(uri_parts) == 4:
function = uri_parts[3]
if method == "GET":
if function == "compact":
self.read_results_compact(request, response)
return
if function == "reporturl":
return self.read_results_api_wpt_multi_report_uri(request,
response)
if function == "json":
self.download_results_all_api_jsons(request, response)
return
if function == "export":
self.download_results(request, response)
return
if function == "overview":
self.download_results_overview(request, response)
return
# /api/results/<token>/<api>/<function>
if len(uri_parts) == 5:
function = uri_parts[4]
if method == "GET":
if function == "reporturl":
self.read_results_api_wpt_report_url(request, response)
return
if function == "json":
self.download_results_api_json(request, response)
return
if method == "POST":
if function == "json":
self.import_results_api_json(request, response)
return
response.status = 404
| 36.806867
| 79
| 0.570079
|
983e401fc8efc6f7bcac566339b839313dd35731
| 1,233
|
py
|
Python
|
src/party3rd/network.py
|
yaroslavNikolaev/A.R.M.O.R.
|
443b50ad39b7ada7562db62c36824c7c4edb842b
|
[
"MIT"
] | 1
|
2020-03-29T20:53:28.000Z
|
2020-03-29T20:53:28.000Z
|
src/party3rd/network.py
|
yaroslavNikolaev/A.R.M.O.R.
|
443b50ad39b7ada7562db62c36824c7c4edb842b
|
[
"MIT"
] | null | null | null |
src/party3rd/network.py
|
yaroslavNikolaev/A.R.M.O.R.
|
443b50ad39b7ada7562db62c36824c7c4edb842b
|
[
"MIT"
] | null | null | null |
from utils.collectors import GitHubVersionCollector
from utils.configuration import Configuration
class NginxVersionCollector(GitHubVersionCollector):
owner = "nginx"
repo = "nginx"
@staticmethod
def get_application_name() -> str:
return "nginx"
def __init__(self, config: Configuration):
super().__init__(config, self.owner, self.repo)
class CalicoVersionCollector(GitHubVersionCollector):
owner = "projectcalico"
repo = "calico"
@staticmethod
def get_application_name() -> str:
return "calico"
def __init__(self, config: Configuration):
super().__init__(config, self.owner, self.repo)
class HaproxyVersionCollector(GitHubVersionCollector):
owner = "haproxy"
repo = "haproxy"
@staticmethod
def get_application_name() -> str:
return "haproxy"
def __init__(self, config: Configuration):
super().__init__(config, self.owner, self.repo)
class IstioVersionCollector(GitHubVersionCollector):
owner = "istio"
repo = "istio"
@staticmethod
def get_application_name() -> str:
return "istio"
def __init__(self, config: Configuration):
super().__init__(config, self.owner, self.repo)
| 24.176471
| 55
| 0.690187
|
a3375b08a97f7e5614a265191c300d9fcded5f00
| 6,038
|
py
|
Python
|
automated_testing/plotting_functions.py
|
yT0n1/D3MEAP-Projekt
|
138a10da2eba7b57d79a7b051f687652c1e05f86
|
[
"BSD-Source-Code"
] | null | null | null |
automated_testing/plotting_functions.py
|
yT0n1/D3MEAP-Projekt
|
138a10da2eba7b57d79a7b051f687652c1e05f86
|
[
"BSD-Source-Code"
] | null | null | null |
automated_testing/plotting_functions.py
|
yT0n1/D3MEAP-Projekt
|
138a10da2eba7b57d79a7b051f687652c1e05f86
|
[
"BSD-Source-Code"
] | null | null | null |
import math
import matplotlib.pyplot as plt
import numpy as np
"""
Plotting Functions:
This file includes all plotting variants used in the automated testing
"""
def plot_data(df, min_nodes, max_nodes, problems, squeezed):
problem_hardness = []
for problem in problems:
problem_hardness.append(sum(problem.param_fragment_size))
avg_problem_hardness = np.mean(problem_hardness)
y_axises = ['time', 'deviation', 'space']
for y_axis in y_axises:
fig, ax = plt.subplots()
ax.set(xlabel='Node Count', ylabel=y_axis, title=f'Average {y_axis} per node count')
if y_axis == 'space':
for node_count in range(min_nodes,max_nodes+1):
df = df.append({'algo':'Total Replication', 'space':(avg_problem_hardness*node_count), 'nodes':node_count, 'time':0, 'deviation':0, 'total_replication':0}, ignore_index=True)
plot_group = df.groupby(['algo', 'nodes'], as_index=False)[y_axis].mean().groupby('algo')
for name, group in plot_group:
group.plot(x='nodes', y=y_axis, label=name, ax=ax)
plt.xticks([i for i in range(min_nodes, max_nodes + 1)])
plt.show()
df = df[df.algo != "Total Replication"]
deviation = ((df.groupby('algo').mean() / df.groupby('algo').mean().loc['Complete']) - 1) * 100
deviation = deviation.drop(columns=['nodes'])
if squeezed:
deviation = deviation.drop(columns=['deviation'])
fig, ax = plt.subplots()
deviation.plot.bar(ax=ax)
ax.set(xlabel='Split Strategies', ylabel='%',
title='%-Deviation from optimum Complete Split Strategy')
plt.show()
def plot_heu_vs_opt(df, min_nodes, max_nodes, problems, squeezes):
problem_hardness = []
for problem in problems:
problem_hardness.append(sum(problem.param_fragment_size))
avg_problem_hardness = np.mean(problem_hardness)
y_axises = ['time', 'deviation', 'space']
for y_axis in y_axises:
fig, ax = plt.subplots()
ax.set(xlabel='Node Count', ylabel=y_axis, title=f'Average {y_axis} per node count')
if y_axis == 'space':
for node_count in range(min_nodes, max_nodes + 1):
df = df.append(
{'algo': 'Total Replication', 'space': (avg_problem_hardness * node_count),
'nodes': node_count, 'time': 0, 'deviation': 0, 'total_replication': 0},
ignore_index=True)
plot_group = df.groupby(['algo', 'nodes'], as_index=False)[y_axis].mean().groupby('algo')
for name, group in plot_group:
group.plot(x='nodes', y=y_axis, label=name, ax=ax)
plt.xticks([i for i in range(min_nodes, max_nodes + 1)])
plt.show()
df = df[df.algo != "Total Replication"]
deviation = ((df.groupby('algo').mean() / df.groupby('algo').mean().loc['Complete']) - 1) * 100
deviation = deviation.drop(index=['Complete'])
deviation = deviation.drop(columns=['nodes', 'total_replication'])
if not squeezes:
deviation = deviation.drop(columns=[ 'deviation'])
fig, ax = plt.subplots()
deviation.plot.bar(ax=ax)
ax.set(xlabel='Split Strategies', ylabel='%',
title='%-Difference from optimum complete split')
plt.show()
def plot_data_pareto(df):
plot_group = df.groupby(['algo', 'epsilon'], as_index=False).mean()
for algo in plot_group['algo'].unique():
fig, axs = plt.subplots(1, 3, figsize=(10, 3))
axs[0].set(xlabel='Space', ylabel='Deviation', title=f'Space / Deviation for {algo}')
color = plot_group[plot_group.algo == algo]['epsilon'].apply(lambda x: math.log(x, 10))
plot_group[plot_group.algo == algo].plot.scatter(x='space',
y='deviation',
ax=axs[0],
colormap='cool',
c=color)
axs[1].set(xlabel='Space', ylabel='Deviation', title=f'Time / Deviation for {algo}')
plot_group[plot_group.algo == algo].plot.scatter(x='time',
y='deviation',
ax=axs[1],
colormap='cool',
c=color)
axs[2].set(xlabel='Space', ylabel='Deviation', title=f'Space / Time for {algo}')
plot_group[plot_group.algo == algo].plot.scatter(x='time',
y='space',
ax=axs[2],
colormap='cool',
c=color)
plt.tight_layout()
plt.show()
def plot_data_timeout(df):
plot_group = df.groupby(['algo', 'timeout'], as_index=False).mean()
for algo in plot_group['algo'].unique():
fig, axs = plt.subplots(1, 3, figsize=(10, 3))
axs[0].set(xlabel='Timeout', ylabel='Space', title='Space / Timeout Relation')
plot_group[plot_group.algo == algo].plot.scatter(x='timeout',
y='space',
ax=axs[0])
axs[1].set(xlabel='Timeout', ylabel='Time', title='Time / Timeout Relation')
plot_group[plot_group.algo == algo].plot.scatter(x='timeout',
y='time',
ax=axs[1])
axs[2].set(xlabel='Timeout', ylabel='Deviation', title='Deviation / Timeout Relation')
plot_group[plot_group.algo == algo].plot.scatter(x='timeout',
y='deviation',
ax=axs[2])
plt.tight_layout()
plt.show()
| 51.169492
| 190
| 0.525836
|
79440c1684e8cd9185325689d9bdc263f9f75510
| 5,304
|
py
|
Python
|
jtop/core/tegrastats.py
|
alx/jetson_stats
|
a55d5d67bf268bb47fe75a9a0b15598e99bfe9ea
|
[
"MIT"
] | null | null | null |
jtop/core/tegrastats.py
|
alx/jetson_stats
|
a55d5d67bf268bb47fe75a9a0b15598e99bfe9ea
|
[
"MIT"
] | null | null | null |
jtop/core/tegrastats.py
|
alx/jetson_stats
|
a55d5d67bf268bb47fe75a9a0b15598e99bfe9ea
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
# Copyright (C) 2019, Raffaello Bonghi <raffaello@rnext.it>
# All rights reserved
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Logging
import logging
# Launch command
import subprocess as sp
# Threading
from threading import Thread
# Tegrastats parser
from .tegra_parse import VALS, MTS, RAM, SWAP, IRAM, CPUS, TEMPS, VOLTS
# Create logger for jplotlib
logger = logging.getLogger(__name__)
class Tegrastats(Thread):
"""
- Subprocess read:
https://stackoverflow.com/questions/375427/non-blocking-read-on-a-subprocess-pipe-in-python/4896288#4896288
- Property
https://www.programiz.com/python-programming/property
"""
class TegrastatsException(Exception):
pass
def __init__(self, path, interval):
Thread.__init__(self)
# Set interval tegrastats
self.interval = interval
# Initialize jetson stats
self._stats = {}
# Start process tegrastats
self.path = path
# Define Tegrastats process
self.p = None
# Initialize observer
self._observers = set()
def run(self):
try:
while self.p.poll() is None:
out = self.p.stdout
if out is not None:
# Read line process output
line = out.readline()
# Decode line in UTF-8
tegrastats_data = line.decode("utf-8")
# Decode and store
self._stats = self._decode(tegrastats_data)
# Notifiy all observers
for observer in self._observers:
observer.update(self._stats)
except SystemExit:
logger.error("System exit", exc_info=True)
except AttributeError:
logger.error("Attribute error", exc_info=True)
@property
def stats(self):
# Return dictionary parsed
return self._stats
def attach(self, observer):
self._observers.add(observer)
def detach(self, observer):
self._observers.discard(observer)
def open(self, callback=None):
try:
# Launch subprocess or raise and exception
self.p = sp.Popen([self.path, '--interval', str(self.interval)], stdout=sp.PIPE)
# Start himself like file
self.daemon = True
self.start()
# Wait first value not empty
while not self._stats:
pass
# If callback is defined after each decode will be send the updates by function
if callback is not None:
self.attach(callback)
return True
except OSError:
logger.error("Tegrastats not in list!")
raise Tegrastats.TegrastatsException("Tegrastats is not available on this hardware")
return False
def close(self):
if self.p is not None:
self.p.kill()
return True
else:
return False
def __enter__(self):
self.open()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def _decode(self, text):
# Find and parse all single values
stats = VALS(text)
# Parse if exist MTS
mts = MTS(text)
if mts:
stats['MTS'] = mts
# Parse RAM
stats['RAM'] = RAM(text)
# If exists parse SWAP
swap = SWAP(text)
if swap:
stats['SWAP'] = swap
# If exists parse IRAM
iram = IRAM(text)
if iram:
stats['IRAM'] = iram
# Parse CPU status
stats['CPU'] = CPUS(text)
# Parse temperatures
stats['TEMP'] = TEMPS(text)
# Parse voltages
stats['VOLT'] = VOLTS(text)
return stats
# EOF
| 34.219355
| 115
| 0.625
|
47471bf5dca6e5e15b2c531b34a263f4083063a8
| 435
|
py
|
Python
|
S1/TP5/ex7.py
|
HerbeMalveillante/ecole
|
bebbc73cd678c58c9cd40389ea1cf229a0200308
|
[
"MIT"
] | null | null | null |
S1/TP5/ex7.py
|
HerbeMalveillante/ecole
|
bebbc73cd678c58c9cd40389ea1cf229a0200308
|
[
"MIT"
] | null | null | null |
S1/TP5/ex7.py
|
HerbeMalveillante/ecole
|
bebbc73cd678c58c9cd40389ea1cf229a0200308
|
[
"MIT"
] | null | null | null |
import random
def cree(n):
return [random.randint(0, 20) for i in range(n)]
def apparitions(x, lis):
return [indice for indice, item in enumerate(lis) if item == x]
entiers = int(input("Saisir le nombre d'entiers de la liste : "))
liste = cree(entiers)
print(f"Liste : {liste}")
entierRecherche = int(input("Saisir l'entier recherché : "))
print(f"Liste des indices d'apparition : {apparitions(entierRecherche, liste)}")
| 25.588235
| 80
| 0.698851
|
2004322087704bee1d0827c88ebc9164c562bb3d
| 688
|
py
|
Python
|
nightcrawler/apps/services/management/commands/script.py
|
alchucam/nightcrawler
|
e2c29c6b6b35b5090b3e5e9e11e477a5ad07fd46
|
[
"Apache-2.0"
] | null | null | null |
nightcrawler/apps/services/management/commands/script.py
|
alchucam/nightcrawler
|
e2c29c6b6b35b5090b3e5e9e11e477a5ad07fd46
|
[
"Apache-2.0"
] | 2
|
2020-06-05T18:19:00.000Z
|
2021-06-10T20:20:54.000Z
|
nightcrawler/apps/services/management/commands/script.py
|
alchucam/nightcrawler
|
e2c29c6b6b35b5090b3e5e9e11e477a5ad07fd46
|
[
"Apache-2.0"
] | 1
|
2018-12-04T06:30:09.000Z
|
2018-12-04T06:30:09.000Z
|
from django.core.management.base import BaseCommand, CommandError
from nightcrawler.apps.services.updater import *
class Command(BaseCommand):
help = 'update to models for display and analysis'
def handle(self, *args, **options):
print("Start periodic tasks")
nytimesUpdater().save_to_models()
yonhapUpdater().save_to_models()
ecnsUpdater().save_to_models()
japantimesUpdater().save_to_models()
nytimesUpdater().save_to_analysis()
yonhapUpdater().save_to_analysis()
ecnsUpdater().save_to_analysis()
japantimesUpdater().save_to_analysis()
print("Finish periodic tasks")
#for periodic schedule task
| 34.4
| 65
| 0.703488
|
fc8cc1440b9028bcd0cd317e094f1e3ca99a128a
| 71,494
|
py
|
Python
|
modules/java/generator/gen_java.py
|
ev3dev/opencv
|
781edd9001a85f259f2c10d6c2b70204eb221e70
|
[
"BSD-3-Clause"
] | 20
|
2016-12-14T06:11:12.000Z
|
2018-10-24T20:09:47.000Z
|
modules/java/generator/gen_java.py
|
ev3dev/opencv
|
781edd9001a85f259f2c10d6c2b70204eb221e70
|
[
"BSD-3-Clause"
] | 28
|
2016-10-16T19:42:37.000Z
|
2018-09-14T21:29:48.000Z
|
modules/java/generator/gen_java.py
|
ev3dev/opencv
|
781edd9001a85f259f2c10d6c2b70204eb221e70
|
[
"BSD-3-Clause"
] | 29
|
2016-11-24T21:49:36.000Z
|
2019-07-12T20:36:58.000Z
|
#!/usr/bin/env python
import sys, re, os.path
import logging
from pprint import pformat
from string import Template
if sys.version_info[0] >= 3:
from io import StringIO
else:
from cStringIO import StringIO
class_ignore_list = (
#core
"FileNode", "FileStorage", "KDTree", "KeyPoint", "DMatch",
#features2d
"SimpleBlobDetector"
)
const_ignore_list = (
"CV_CAP_OPENNI",
"CV_CAP_PROP_OPENNI_",
"CV_CAP_INTELPERC",
"CV_CAP_PROP_INTELPERC_"
"WINDOW_AUTOSIZE",
"CV_WND_PROP_",
"CV_WINDOW_",
"CV_EVENT_",
"CV_GUI_",
"CV_PUSH_BUTTON",
"CV_CHECKBOX",
"CV_RADIOBOX",
#attention!
#the following constants are added to this list using code automatic generation
#TODO: should be checked
"CV_CAP_ANY",
"CV_CAP_MIL",
"CV_CAP_VFW",
"CV_CAP_V4L",
"CV_CAP_V4L2",
"CV_CAP_FIREWARE",
"CV_CAP_FIREWIRE",
"CV_CAP_IEEE1394",
"CV_CAP_DC1394",
"CV_CAP_CMU1394",
"CV_CAP_STEREO",
"CV_CAP_TYZX",
"CV_TYZX_LEFT",
"CV_TYZX_RIGHT",
"CV_TYZX_COLOR",
"CV_TYZX_Z",
"CV_CAP_QT",
"CV_CAP_UNICAP",
"CV_CAP_DSHOW",
"CV_CAP_PVAPI",
"CV_CAP_PROP_DC1394_OFF",
"CV_CAP_PROP_DC1394_MODE_MANUAL",
"CV_CAP_PROP_DC1394_MODE_AUTO",
"CV_CAP_PROP_DC1394_MODE_ONE_PUSH_AUTO",
"CV_CAP_PROP_POS_MSEC",
"CV_CAP_PROP_POS_FRAMES",
"CV_CAP_PROP_POS_AVI_RATIO",
"CV_CAP_PROP_FPS",
"CV_CAP_PROP_FOURCC",
"CV_CAP_PROP_FRAME_COUNT",
"CV_CAP_PROP_FORMAT",
"CV_CAP_PROP_MODE",
"CV_CAP_PROP_BRIGHTNESS",
"CV_CAP_PROP_CONTRAST",
"CV_CAP_PROP_SATURATION",
"CV_CAP_PROP_HUE",
"CV_CAP_PROP_GAIN",
"CV_CAP_PROP_EXPOSURE",
"CV_CAP_PROP_CONVERT_RGB",
"CV_CAP_PROP_WHITE_BALANCE_BLUE_U",
"CV_CAP_PROP_RECTIFICATION",
"CV_CAP_PROP_MONOCHROME",
"CV_CAP_PROP_SHARPNESS",
"CV_CAP_PROP_AUTO_EXPOSURE",
"CV_CAP_PROP_GAMMA",
"CV_CAP_PROP_TEMPERATURE",
"CV_CAP_PROP_TRIGGER",
"CV_CAP_PROP_TRIGGER_DELAY",
"CV_CAP_PROP_WHITE_BALANCE_RED_V",
"CV_CAP_PROP_MAX_DC1394",
"CV_CAP_GSTREAMER_QUEUE_LENGTH",
"CV_CAP_PROP_PVAPI_MULTICASTIP",
"CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING",
"EVENT_.*",
"CV_L?(BGRA?|RGBA?|GRAY|XYZ|YCrCb|Luv|Lab|HLS|YUV|HSV)\d*2L?(BGRA?|RGBA?|GRAY|XYZ|YCrCb|Luv|Lab|HLS|YUV|HSV).*",
"CV_COLORCVT_MAX",
"CV_.*Bayer.*",
"CV_YUV420(i|sp|p)2.+",
"CV_TM_.+",
"CV_FLOODFILL_.+",
"CV_ADAPTIVE_THRESH_.+",
"WINDOW_.+",
"WND_PROP_.+",
)
const_private_list = (
"CV_MOP_.+",
"CV_INTER_.+",
"CV_THRESH_.+",
"CV_INPAINT_.+",
"CV_RETR_.+",
"CV_CHAIN_APPROX_.+",
"OPPONENTEXTRACTOR",
"GRIDDETECTOR",
"PYRAMIDDETECTOR",
"DYNAMICDETECTOR",
)
# { Module : { public : [[name, val],...], private : [[]...] } }
missing_consts = \
{
'Core' :
{
'private' :
(
('CV_8U', 0 ), ('CV_8S', 1 ),
('CV_16U', 2 ), ('CV_16S', 3 ),
('CV_32S', 4 ),
('CV_32F', 5 ), ('CV_64F', 6 ),
('CV_USRTYPE1', 7 ),
), # private
'public' :
(
('SVD_MODIFY_A', 1), ('SVD_NO_UV', 2), ('SVD_FULL_UV', 4),
('FILLED', -1),
('REDUCE_SUM', 0), ('REDUCE_AVG', 1), ('REDUCE_MAX', 2), ('REDUCE_MIN', 3),
) #public
}, # Core
"Imgproc":
{
'private' :
(
('IPL_BORDER_CONSTANT', 0 ),
('IPL_BORDER_REPLICATE', 1 ),
('IPL_BORDER_REFLECT', 2 ),
('IPL_BORDER_WRAP', 3 ),
('IPL_BORDER_REFLECT_101', 4 ),
('IPL_BORDER_TRANSPARENT', 5 ),
), # private
'public' :
(
('LINE_AA', 16), ('LINE_8', 8), ('LINE_4', 4),
) #public
}, # Imgproc
"Calib3d":
{
'public' :
(
('CALIB_USE_INTRINSIC_GUESS', '1'),
('CALIB_RECOMPUTE_EXTRINSIC', '2'),
('CALIB_CHECK_COND', '4'),
('CALIB_FIX_SKEW', '8'),
('CALIB_FIX_K1', '16'),
('CALIB_FIX_K2', '32'),
('CALIB_FIX_K3', '64'),
('CALIB_FIX_K4', '128'),
('CALIB_FIX_INTRINSIC', '256')
)
}, # Calib3d
"Video":
{
'private' :
(
('CV_LKFLOW_INITIAL_GUESSES', 4 ),
('CV_LKFLOW_GET_MIN_EIGENVALS', 8 ),
) # private
}, # Video
}
# c_type : { java/jni correspondence }
type_dict = {
# "simple" : { j_type : "?", jn_type : "?", jni_type : "?", suffix : "?" },
"" : { "j_type" : "", "jn_type" : "long", "jni_type" : "jlong" }, # c-tor ret_type
"void" : { "j_type" : "void", "jn_type" : "void", "jni_type" : "void" },
"env" : { "j_type" : "", "jn_type" : "", "jni_type" : "JNIEnv*"},
"cls" : { "j_type" : "", "jn_type" : "", "jni_type" : "jclass"},
"bool" : { "j_type" : "boolean", "jn_type" : "boolean", "jni_type" : "jboolean", "suffix" : "Z" },
"char" : { "j_type" : "char", "jn_type" : "char", "jni_type" : "jchar", "suffix" : "C" },
"int" : { "j_type" : "int", "jn_type" : "int", "jni_type" : "jint", "suffix" : "I" },
"long" : { "j_type" : "int", "jn_type" : "int", "jni_type" : "jint", "suffix" : "I" },
"float" : { "j_type" : "float", "jn_type" : "float", "jni_type" : "jfloat", "suffix" : "F" },
"double" : { "j_type" : "double", "jn_type" : "double", "jni_type" : "jdouble", "suffix" : "D" },
"size_t" : { "j_type" : "long", "jn_type" : "long", "jni_type" : "jlong", "suffix" : "J" },
"__int64" : { "j_type" : "long", "jn_type" : "long", "jni_type" : "jlong", "suffix" : "J" },
"int64" : { "j_type" : "long", "jn_type" : "long", "jni_type" : "jlong", "suffix" : "J" },
"double[]": { "j_type" : "double[]", "jn_type" : "double[]", "jni_type" : "jdoubleArray", "suffix" : "_3D" },
# "complex" : { j_type : "?", jn_args : (("", ""),), jn_name : "", jni_var : "", jni_name : "", "suffix" : "?" },
"vector_Point" : { "j_type" : "MatOfPoint", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Point> %(n)s", "suffix" : "J" },
"vector_Point2f" : { "j_type" : "MatOfPoint2f", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Point2f> %(n)s", "suffix" : "J" },
#"vector_Point2d" : { "j_type" : "MatOfPoint2d", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Point2d> %(n)s", "suffix" : "J" },
"vector_Point3i" : { "j_type" : "MatOfPoint3", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Point3i> %(n)s", "suffix" : "J" },
"vector_Point3f" : { "j_type" : "MatOfPoint3f", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Point3f> %(n)s", "suffix" : "J" },
#"vector_Point3d" : { "j_type" : "MatOfPoint3d", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Point3d> %(n)s", "suffix" : "J" },
"vector_KeyPoint" : { "j_type" : "MatOfKeyPoint", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<KeyPoint> %(n)s", "suffix" : "J" },
"vector_DMatch" : { "j_type" : "MatOfDMatch", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<DMatch> %(n)s", "suffix" : "J" },
"vector_Rect" : { "j_type" : "MatOfRect", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Rect> %(n)s", "suffix" : "J" },
"vector_uchar" : { "j_type" : "MatOfByte", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<uchar> %(n)s", "suffix" : "J" },
"vector_char" : { "j_type" : "MatOfByte", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<char> %(n)s", "suffix" : "J" },
"vector_int" : { "j_type" : "MatOfInt", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<int> %(n)s", "suffix" : "J" },
"vector_float" : { "j_type" : "MatOfFloat", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<float> %(n)s", "suffix" : "J" },
"vector_double" : { "j_type" : "MatOfDouble", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<double> %(n)s", "suffix" : "J" },
"vector_Vec4i" : { "j_type" : "MatOfInt4", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Vec4i> %(n)s", "suffix" : "J" },
"vector_Vec4f" : { "j_type" : "MatOfFloat4", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Vec4f> %(n)s", "suffix" : "J" },
"vector_Vec6f" : { "j_type" : "MatOfFloat6", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Vec6f> %(n)s", "suffix" : "J" },
"vector_Mat" : { "j_type" : "List<Mat>", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector<Mat> %(n)s", "suffix" : "J" },
"vector_vector_KeyPoint": { "j_type" : "List<MatOfKeyPoint>", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector< std::vector<KeyPoint> > %(n)s" },
"vector_vector_DMatch" : { "j_type" : "List<MatOfDMatch>", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector< std::vector<DMatch> > %(n)s" },
"vector_vector_char" : { "j_type" : "List<MatOfByte>", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector< std::vector<char> > %(n)s" },
"vector_vector_Point" : { "j_type" : "List<MatOfPoint>", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector< std::vector<Point> > %(n)s" },
"vector_vector_Point2f" : { "j_type" : "List<MatOfPoint2f>", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector< std::vector<Point2f> > %(n)s" },
"vector_vector_Point3f" : { "j_type" : "List<MatOfPoint3f>", "jn_type" : "long", "jni_type" : "jlong", "jni_var" : "std::vector< std::vector<Point3f> > %(n)s" },
"Mat" : { "j_type" : "Mat", "jn_type" : "long", "jn_args" : (("__int64", ".nativeObj"),),
"jni_var" : "Mat& %(n)s = *((Mat*)%(n)s_nativeObj)",
"jni_type" : "jlong", #"jni_name" : "*%(n)s",
"suffix" : "J" },
"Point" : { "j_type" : "Point", "jn_args" : (("double", ".x"), ("double", ".y")),
"jni_var" : "Point %(n)s((int)%(n)s_x, (int)%(n)s_y)", "jni_type" : "jdoubleArray",
"suffix" : "DD"},
"Point2f" : { "j_type" : "Point", "jn_args" : (("double", ".x"), ("double", ".y")),
"jni_var" : "Point2f %(n)s((float)%(n)s_x, (float)%(n)s_y)", "jni_type" : "jdoubleArray",
"suffix" : "DD"},
"Point2d" : { "j_type" : "Point", "jn_args" : (("double", ".x"), ("double", ".y")),
"jni_var" : "Point2d %(n)s(%(n)s_x, %(n)s_y)", "jni_type" : "jdoubleArray",
"suffix" : "DD"},
"Point3i" : { "j_type" : "Point3", "jn_args" : (("double", ".x"), ("double", ".y"), ("double", ".z")),
"jni_var" : "Point3i %(n)s((int)%(n)s_x, (int)%(n)s_y, (int)%(n)s_z)", "jni_type" : "jdoubleArray",
"suffix" : "DDD"},
"Point3f" : { "j_type" : "Point3", "jn_args" : (("double", ".x"), ("double", ".y"), ("double", ".z")),
"jni_var" : "Point3f %(n)s((float)%(n)s_x, (float)%(n)s_y, (float)%(n)s_z)", "jni_type" : "jdoubleArray",
"suffix" : "DDD"},
"Point3d" : { "j_type" : "Point3", "jn_args" : (("double", ".x"), ("double", ".y"), ("double", ".z")),
"jni_var" : "Point3d %(n)s(%(n)s_x, %(n)s_y, %(n)s_z)", "jni_type" : "jdoubleArray",
"suffix" : "DDD"},
"KeyPoint": { "j_type" : "KeyPoint", "jn_args" : (("float", ".x"), ("float", ".y"), ("float", ".size"),
("float", ".angle"), ("float", ".response"), ("int", ".octave"), ("int", ".class_id")),
"jni_var" : "KeyPoint %(n)s(%(n)s_x, %(n)s_y, %(n)s_size, %(n)s_angle, %(n)s_response, %(n)s_octave, %(n)s_class_id)",
"jni_type" : "jdoubleArray",
"suffix" : "FFFFFII"},
"DMatch" : { "j_type" : "DMatch", "jn_args" : ( ('int', 'queryIdx'), ('int', 'trainIdx'),
('int', 'imgIdx'), ('float', 'distance'), ),
"jni_var" : "DMatch %(n)s(%(n)s_queryIdx, %(n)s_trainIdx, %(n)s_imgIdx, %(n)s_distance)",
"jni_type" : "jdoubleArray",
"suffix" : "IIIF"},
"Rect" : { "j_type" : "Rect", "jn_args" : (("int", ".x"), ("int", ".y"), ("int", ".width"), ("int", ".height")),
"jni_var" : "Rect %(n)s(%(n)s_x, %(n)s_y, %(n)s_width, %(n)s_height)", "jni_type" : "jdoubleArray",
"suffix" : "IIII"},
"Size" : { "j_type" : "Size", "jn_args" : (("double", ".width"), ("double", ".height")),
"jni_var" : "Size %(n)s((int)%(n)s_width, (int)%(n)s_height)", "jni_type" : "jdoubleArray",
"suffix" : "DD"},
"Size2f" : { "j_type" : "Size", "jn_args" : (("double", ".width"), ("double", ".height")),
"jni_var" : "Size2f %(n)s((float)%(n)s_width, (float)%(n)s_height)", "jni_type" : "jdoubleArray",
"suffix" : "DD"},
"RotatedRect": { "j_type" : "RotatedRect", "jn_args" : (("double", ".center.x"), ("double", ".center.y"), ("double", ".size.width"), ("double", ".size.height"), ("double", ".angle")),
"jni_var" : "RotatedRect %(n)s(cv::Point2f(%(n)s_center_x, %(n)s_center_y), cv::Size2f(%(n)s_size_width, %(n)s_size_height), %(n)s_angle)",
"jni_type" : "jdoubleArray", "suffix" : "DDDDD"},
"Scalar" : { "j_type" : "Scalar", "jn_args" : (("double", ".val[0]"), ("double", ".val[1]"), ("double", ".val[2]"), ("double", ".val[3]")),
"jni_var" : "Scalar %(n)s(%(n)s_val0, %(n)s_val1, %(n)s_val2, %(n)s_val3)", "jni_type" : "jdoubleArray",
"suffix" : "DDDD"},
"Range" : { "j_type" : "Range", "jn_args" : (("int", ".start"), ("int", ".end")),
"jni_var" : "Range %(n)s(%(n)s_start, %(n)s_end)", "jni_type" : "jdoubleArray",
"suffix" : "II"},
"CvSlice" : { "j_type" : "Range", "jn_args" : (("int", ".start"), ("int", ".end")),
"jni_var" : "Range %(n)s(%(n)s_start, %(n)s_end)", "jni_type" : "jdoubleArray",
"suffix" : "II"},
"String" : { "j_type" : "String", "jn_type" : "String",
"jni_type" : "jstring", "jni_name" : "n_%(n)s",
"jni_var" : 'const char* utf_%(n)s = env->GetStringUTFChars(%(n)s, 0); String n_%(n)s( utf_%(n)s ? utf_%(n)s : "" ); env->ReleaseStringUTFChars(%(n)s, utf_%(n)s)',
"suffix" : "Ljava_lang_String_2"},
"c_string": { "j_type" : "String", "jn_type" : "String",
"jni_type" : "jstring", "jni_name" : "n_%(n)s.c_str()",
"jni_var" : 'const char* utf_%(n)s = env->GetStringUTFChars(%(n)s, 0); String n_%(n)s( utf_%(n)s ? utf_%(n)s : "" ); env->ReleaseStringUTFChars(%(n)s, utf_%(n)s)',
"suffix" : "Ljava_lang_String_2"},
"TermCriteria": { "j_type" : "TermCriteria", "jn_args" : (("int", ".type"), ("int", ".maxCount"), ("double", ".epsilon")),
"jni_var" : "TermCriteria %(n)s(%(n)s_type, %(n)s_maxCount, %(n)s_epsilon)", "jni_type" : "jdoubleArray",
"suffix" : "IID"},
"CvTermCriteria": { "j_type" : "TermCriteria", "jn_args" : (("int", ".type"), ("int", ".maxCount"), ("double", ".epsilon")),
"jni_var" : "TermCriteria %(n)s(%(n)s_type, %(n)s_maxCount, %(n)s_epsilon)", "jni_type" : "jdoubleArray",
"suffix" : "IID"},
"Vec2d" : { "j_type" : "double[]", "jn_args" : (("double", ".val[0]"), ("double", ".val[1]")),
"jn_type" : "double[]",
"jni_var" : "Vec2d %(n)s(%(n)s_val0, %(n)s_val1)", "jni_type" : "jdoubleArray",
"suffix" : "DD"},
"Vec3d" : { "j_type" : "double[]", "jn_args" : (("double", ".val[0]"), ("double", ".val[1]"), ("double", ".val[2]")),
"jn_type" : "double[]",
"jni_var" : "Vec3d %(n)s(%(n)s_val0, %(n)s_val1, %(n)s_val2)", "jni_type" : "jdoubleArray",
"suffix" : "DDD"},
"Moments" : {
"j_type" : "Moments",
"jn_args" : (("double", ".m00"), ("double", ".m10"), ("double", ".m01"), ("double", ".m20"), ("double", ".m11"),
("double", ".m02"), ("double", ".m30"), ("double", ".m21"), ("double", ".m12"), ("double", ".m03")),
"jni_var" : "Moments %(n)s(%(n)s_m00, %(n)s_m10, %(n)s_m01, %(n)s_m20, %(n)s_m11, %(n)s_m02, %(n)s_m30, %(n)s_m21, %(n)s_m12, %(n)s_m03)",
"jni_type" : "jdoubleArray",
"suffix" : "DDDDDDDDDD"},
}
# { class : { func : {j_code, jn_code, cpp_code} } }
ManualFuncs = {
'Core' :
{
'minMaxLoc' : {
'j_code' : """
// manual port
public static class MinMaxLocResult {
public double minVal;
public double maxVal;
public Point minLoc;
public Point maxLoc;
public MinMaxLocResult() {
minVal=0; maxVal=0;
minLoc=new Point();
maxLoc=new Point();
}
}
// C++: minMaxLoc(Mat src, double* minVal, double* maxVal=0, Point* minLoc=0, Point* maxLoc=0, InputArray mask=noArray())
//javadoc: minMaxLoc(src, mask)
public static MinMaxLocResult minMaxLoc(Mat src, Mat mask) {
MinMaxLocResult res = new MinMaxLocResult();
long maskNativeObj=0;
if (mask != null) {
maskNativeObj=mask.nativeObj;
}
double resarr[] = n_minMaxLocManual(src.nativeObj, maskNativeObj);
res.minVal=resarr[0];
res.maxVal=resarr[1];
res.minLoc.x=resarr[2];
res.minLoc.y=resarr[3];
res.maxLoc.x=resarr[4];
res.maxLoc.y=resarr[5];
return res;
}
//javadoc: minMaxLoc(src)
public static MinMaxLocResult minMaxLoc(Mat src) {
return minMaxLoc(src, null);
}
""",
'jn_code' :
""" private static native double[] n_minMaxLocManual(long src_nativeObj, long mask_nativeObj);\n""",
'cpp_code' :
"""
// C++: minMaxLoc(Mat src, double* minVal, double* maxVal=0, Point* minLoc=0, Point* maxLoc=0, InputArray mask=noArray())
JNIEXPORT jdoubleArray JNICALL Java_org_opencv_core_Core_n_1minMaxLocManual (JNIEnv*, jclass, jlong, jlong);
JNIEXPORT jdoubleArray JNICALL Java_org_opencv_core_Core_n_1minMaxLocManual
(JNIEnv* env, jclass, jlong src_nativeObj, jlong mask_nativeObj)
{
try {
LOGD("Core::n_1minMaxLoc()");
jdoubleArray result;
result = env->NewDoubleArray(6);
if (result == NULL) {
return NULL; /* out of memory error thrown */
}
Mat& src = *((Mat*)src_nativeObj);
double minVal, maxVal;
Point minLoc, maxLoc;
if (mask_nativeObj != 0) {
Mat& mask = *((Mat*)mask_nativeObj);
minMaxLoc(src, &minVal, &maxVal, &minLoc, &maxLoc, mask);
} else {
minMaxLoc(src, &minVal, &maxVal, &minLoc, &maxLoc);
}
jdouble fill[6];
fill[0]=minVal;
fill[1]=maxVal;
fill[2]=minLoc.x;
fill[3]=minLoc.y;
fill[4]=maxLoc.x;
fill[5]=maxLoc.y;
env->SetDoubleArrayRegion(result, 0, 6, fill);
return result;
} catch(const cv::Exception& e) {
LOGD("Core::n_1minMaxLoc() catched cv::Exception: %s", e.what());
jclass je = env->FindClass("org/opencv/core/CvException");
if(!je) je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, e.what());
return NULL;
} catch (...) {
LOGD("Core::n_1minMaxLoc() catched unknown exception (...)");
jclass je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, "Unknown exception in JNI code {core::minMaxLoc()}");
return NULL;
}
}
""",
}, # minMaxLoc
## "checkRange" : #TBD
## {'j_code' : '/* TBD: checkRange() */', 'jn_code' : '', 'cpp_code' : '' },
"checkHardwareSupport" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"setUseOptimized" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"useOptimized" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
}, # Core
'Imgproc' :
{
'getTextSize' :
{
'j_code' :
"""
// C++: Size getTextSize(const String& text, int fontFace, double fontScale, int thickness, int* baseLine);
//javadoc:getTextSize(text, fontFace, fontScale, thickness, baseLine)
public static Size getTextSize(String text, int fontFace, double fontScale, int thickness, int[] baseLine) {
if(baseLine != null && baseLine.length != 1)
throw new java.lang.IllegalArgumentException("'baseLine' must be 'int[1]' or 'null'.");
Size retVal = new Size(n_getTextSize(text, fontFace, fontScale, thickness, baseLine));
return retVal;
}
""",
'jn_code' :
""" private static native double[] n_getTextSize(String text, int fontFace, double fontScale, int thickness, int[] baseLine);\n""",
'cpp_code' :
"""
// C++: Size getTextSize(const String& text, int fontFace, double fontScale, int thickness, int* baseLine);
JNIEXPORT jdoubleArray JNICALL Java_org_opencv_imgproc_Imgproc_n_1getTextSize (JNIEnv*, jclass, jstring, jint, jdouble, jint, jintArray);
JNIEXPORT jdoubleArray JNICALL Java_org_opencv_imgproc_Imgproc_n_1getTextSize
(JNIEnv* env, jclass, jstring text, jint fontFace, jdouble fontScale, jint thickness, jintArray baseLine)
{
try {
LOGD("Core::n_1getTextSize()");
jdoubleArray result;
result = env->NewDoubleArray(2);
if (result == NULL) {
return NULL; /* out of memory error thrown */
}
const char* utf_text = env->GetStringUTFChars(text, 0);
String n_text( utf_text ? utf_text : "" );
env->ReleaseStringUTFChars(text, utf_text);
int _baseLine;
int* pbaseLine = 0;
if (baseLine != NULL)
pbaseLine = &_baseLine;
cv::Size rsize = cv::getTextSize(n_text, (int)fontFace, (double)fontScale, (int)thickness, pbaseLine);
jdouble fill[2];
fill[0]=rsize.width;
fill[1]=rsize.height;
env->SetDoubleArrayRegion(result, 0, 2, fill);
if (baseLine != NULL) {
jint jbaseLine = (jint)(*pbaseLine);
env->SetIntArrayRegion(baseLine, 0, 1, &jbaseLine);
}
return result;
} catch(const cv::Exception& e) {
LOGD("Imgproc::n_1getTextSize() catched cv::Exception: %s", e.what());
jclass je = env->FindClass("org/opencv/core/CvException");
if(!je) je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, e.what());
return NULL;
} catch (...) {
LOGD("Imgproc::n_1getTextSize() catched unknown exception (...)");
jclass je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, "Unknown exception in JNI code {core::getTextSize()}");
return NULL;
}
}
""",
}, # getTextSize
}, # Imgproc
'Highgui' :
{
"namedWindow" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"destroyWindow" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"destroyAllWindows" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"startWindowThread" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"setWindowProperty" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"getWindowProperty" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"getTrackbarPos" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"setTrackbarPos" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"imshow" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"waitKey" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"moveWindow" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
"resizeWindow" : {'j_code' : '', 'jn_code' : '', 'cpp_code' : '' },
}, # Highgui
}
# { class : { func : { arg_name : {"ctype" : ctype, "attrib" : [attrib]} } } }
func_arg_fix = {
'' : {
'randu' : { 'low' : {"ctype" : 'double'},
'high' : {"ctype" : 'double'} },
'randn' : { 'mean' : {"ctype" : 'double'},
'stddev' : {"ctype" : 'double'} },
'inRange' : { 'lowerb' : {"ctype" : 'Scalar'},
'upperb' : {"ctype" : 'Scalar'} },
'goodFeaturesToTrack' : { 'corners' : {"ctype" : 'vector_Point'} },
'findFundamentalMat' : { 'points1' : {"ctype" : 'vector_Point2f'},
'points2' : {"ctype" : 'vector_Point2f'} },
'cornerSubPix' : { 'corners' : {"ctype" : 'vector_Point2f'} },
'minEnclosingCircle' : { 'points' : {"ctype" : 'vector_Point2f'} },
'findHomography' : { 'srcPoints' : {"ctype" : 'vector_Point2f'},
'dstPoints' : {"ctype" : 'vector_Point2f'} },
'solvePnP' : { 'objectPoints' : {"ctype" : 'vector_Point3f'},
'imagePoints' : {"ctype" : 'vector_Point2f'},
'distCoeffs' : {"ctype" : 'vector_double' } },
'solvePnPRansac' : { 'objectPoints' : {"ctype" : 'vector_Point3f'},
'imagePoints' : {"ctype" : 'vector_Point2f'},
'distCoeffs' : {"ctype" : 'vector_double' } },
'calcOpticalFlowPyrLK' : { 'prevPts' : {"ctype" : 'vector_Point2f'},
'nextPts' : {"ctype" : 'vector_Point2f'},
'status' : {"ctype" : 'vector_uchar'},
'err' : {"ctype" : 'vector_float'} },
'fitEllipse' : { 'points' : {"ctype" : 'vector_Point2f'} },
'fillPoly' : { 'pts' : {"ctype" : 'vector_vector_Point'} },
'polylines' : { 'pts' : {"ctype" : 'vector_vector_Point'} },
'fillConvexPoly' : { 'points' : {"ctype" : 'vector_Point'} },
'boundingRect' : { 'points' : {"ctype" : 'vector_Point'} },
'approxPolyDP' : { 'curve' : {"ctype" : 'vector_Point2f'},
'approxCurve' : {"ctype" : 'vector_Point2f'} },
'arcLength' : { 'curve' : {"ctype" : 'vector_Point2f'} },
'pointPolygonTest' : { 'contour' : {"ctype" : 'vector_Point2f'} },
'minAreaRect' : { 'points' : {"ctype" : 'vector_Point2f'} },
'getAffineTransform' : { 'src' : {"ctype" : 'vector_Point2f'},
'dst' : {"ctype" : 'vector_Point2f'} },
'hconcat' : { 'src' : {"ctype" : 'vector_Mat'} },
'vconcat' : { 'src' : {"ctype" : 'vector_Mat'} },
'undistortPoints' : { 'src' : {"ctype" : 'vector_Point2f'},
'dst' : {"ctype" : 'vector_Point2f'} },
'checkRange' : {'pos' : {"ctype" : '*'} },
'meanStdDev' : { 'mean' : {"ctype" : 'vector_double'},
'stddev' : {"ctype" : 'vector_double'} },
'drawContours' : {'contours' : {"ctype" : 'vector_vector_Point'} },
'findContours' : {'contours' : {"ctype" : 'vector_vector_Point'} },
'convexityDefects' : { 'contour' : {"ctype" : 'vector_Point'},
'convexhull' : {"ctype" : 'vector_int'},
'convexityDefects' : {"ctype" : 'vector_Vec4i'} },
'isContourConvex' : { 'contour' : {"ctype" : 'vector_Point'} },
'convexHull' : { 'points' : {"ctype" : 'vector_Point'},
'hull' : {"ctype" : 'vector_int'},
'returnPoints' : {"ctype" : ''} },
'projectPoints' : { 'objectPoints' : {"ctype" : 'vector_Point3f'},
'imagePoints' : {"ctype" : 'vector_Point2f'},
'distCoeffs' : {"ctype" : 'vector_double' } },
'initCameraMatrix2D' : { 'objectPoints' : {"ctype" : 'vector_vector_Point3f'},
'imagePoints' : {"ctype" : 'vector_vector_Point2f'} },
'findChessboardCorners' : { 'corners' : {"ctype" : 'vector_Point2f'} },
'drawChessboardCorners' : { 'corners' : {"ctype" : 'vector_Point2f'} },
'mixChannels' : { 'dst' : {"attrib" : []} },
}, # '', i.e. no class
} # func_arg_fix
def getLibVersion(version_hpp_path):
version_file = open(version_hpp_path, "rt").read()
major = re.search("^W*#\W*define\W+CV_VERSION_MAJOR\W+(\d+)\W*$", version_file, re.MULTILINE).group(1)
minor = re.search("^W*#\W*define\W+CV_VERSION_MINOR\W+(\d+)\W*$", version_file, re.MULTILINE).group(1)
revision = re.search("^W*#\W*define\W+CV_VERSION_REVISION\W+(\d+)\W*$", version_file, re.MULTILINE).group(1)
status = re.search("^W*#\W*define\W+CV_VERSION_STATUS\W+\"(.*?)\"\W*$", version_file, re.MULTILINE).group(1)
return (major, minor, revision, status)
def libVersionBlock():
(major, minor, revision, status) = getLibVersion(
(os.path.dirname(__file__) or '.') + '/../../core/include/opencv2/core/version.hpp')
version_str = '.'.join( (major, minor, revision) ) + status
version_suffix = ''.join( (major, minor, revision) )
return """
// these constants are wrapped inside functions to prevent inlining
private static String getVersion() { return "%(v)s"; }
private static String getNativeLibraryName() { return "opencv_java%(vs)s"; }
private static int getVersionMajor() { return %(ma)s; }
private static int getVersionMinor() { return %(mi)s; }
private static int getVersionRevision() { return %(re)s; }
private static String getVersionStatus() { return "%(st)s"; }
public static final String VERSION = getVersion();
public static final String NATIVE_LIBRARY_NAME = getNativeLibraryName();
public static final int VERSION_MAJOR = getVersionMajor();
public static final int VERSION_MINOR = getVersionMinor();
public static final int VERSION_REVISION = getVersionRevision();
public static final String VERSION_STATUS = getVersionStatus();
""" % { 'v' : version_str, 'vs' : version_suffix, 'ma' : major, 'mi' : minor, 're' : revision, 'st': status }
T_JAVA_START_INHERITED = """
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.$module;
$imports
// C++: class $name
//javadoc: $name
public class $jname extends $base {
protected $jname(long addr) { super(addr); }
"""
T_JAVA_START_ORPHAN = """
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.$module;
$imports
// C++: class $name
//javadoc: $name
public class $jname {
protected final long nativeObj;
protected $jname(long addr) { nativeObj = addr; }
"""
T_JAVA_START_MODULE = """
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.$module;
$imports
public class $jname {
"""
T_CPP_MODULE = """
//
// This file is auto-generated, please don't edit!
//
#define LOG_TAG "org.opencv.$m"
#include "common.h"
#include "opencv2/opencv_modules.hpp"
#ifdef HAVE_OPENCV_$M
#include <string>
#include "opencv2/$m.hpp"
$includes
using namespace cv;
/// throw java exception
static void throwJavaException(JNIEnv *env, const std::exception *e, const char *method) {
std::string what = "unknown exception";
jclass je = 0;
if(e) {
std::string exception_type = "std::exception";
if(dynamic_cast<const cv::Exception*>(e)) {
exception_type = "cv::Exception";
je = env->FindClass("org/opencv/core/CvException");
}
what = exception_type + ": " + e->what();
}
if(!je) je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, what.c_str());
LOGE("%s caught %s", method, what.c_str());
(void)method; // avoid "unused" warning
}
extern "C" {
$code
} // extern "C"
#endif // HAVE_OPENCV_$M
"""
class GeneralInfo():
def __init__(self, name, namespaces):
self.namespace, self.classpath, self.classname, self.name = self.parseName(name, namespaces)
def parseName(self, name, namespaces):
'''
input: full name and available namespaces
returns: (namespace, classpath, classname, name)
'''
name = name[name.find(" ")+1:].strip() # remove struct/class/const prefix
spaceName = ""
localName = name # <classes>.<name>
for namespace in sorted(namespaces, key=len, reverse=True):
if name.startswith(namespace + "."):
spaceName = namespace
localName = name.replace(namespace + ".", "")
break
pieces = localName.split(".")
if len(pieces) > 2: # <class>.<class>.<class>.<name>
return spaceName, ".".join(pieces[:-1]), pieces[-2], pieces[-1]
elif len(pieces) == 2: # <class>.<name>
return spaceName, pieces[0], pieces[0], pieces[1]
elif len(pieces) == 1: # <name>
return spaceName, "", "", pieces[0]
else:
return spaceName, "", "" # error?!
def fullName(self, isCPP=False):
result = ".".join([self.fullClass(), self.name])
return result if not isCPP else result.replace(".", "::")
def fullClass(self, isCPP=False):
result = ".".join([f for f in [self.namespace] + self.classpath.split(".") if len(f)>0])
return result if not isCPP else result.replace(".", "::")
class ConstInfo(GeneralInfo):
def __init__(self, decl, addedManually=False, namespaces=[]):
GeneralInfo.__init__(self, decl[0], namespaces)
self.cname = self.name.replace(".", "::")
self.value = decl[1]
self.addedManually = addedManually
def __repr__(self):
return Template("CONST $name=$value$manual").substitute(name=self.name,
value=self.value,
manual="(manual)" if self.addedManually else "")
def isIgnored(self):
for c in const_ignore_list:
if re.match(c, self.name):
return True
return False
class ClassPropInfo():
def __init__(self, decl): # [f_ctype, f_name, '', '/RW']
self.ctype = decl[0]
self.name = decl[1]
self.rw = "/RW" in decl[3]
def __repr__(self):
return Template("PROP $ctype $name").substitute(ctype=self.ctype, name=self.name)
class ClassInfo(GeneralInfo):
def __init__(self, decl, namespaces=[]): # [ 'class/struct cname', ': base', [modlist] ]
GeneralInfo.__init__(self, decl[0], namespaces)
self.cname = self.name.replace(".", "::")
self.methods = []
self.methods_suffixes = {}
self.consts = [] # using a list to save the occurence order
self.private_consts = []
self.imports = set()
self.props= []
self.jname = self.name
self.smart = None # True if class stores Ptr<T>* instead of T* in nativeObj field
self.j_code = None # java code stream
self.jn_code = None # jni code stream
self.cpp_code = None # cpp code stream
for m in decl[2]:
if m.startswith("="):
self.jname = m[1:]
self.base = ''
if decl[1]:
#self.base = re.sub(r"\b"+self.jname+r"\b", "", decl[1].replace(":", "")).strip()
self.base = re.sub(r"^.*:", "", decl[1].split(",")[0]).strip().replace(self.jname, "")
def __repr__(self):
return Template("CLASS $namespace::$classpath.$name : $base").substitute(**self.__dict__)
def getAllImports(self, module):
return ["import %s;" % c for c in sorted(self.imports) if not c.startswith('org.opencv.'+module)]
def addImports(self, ctype):
if ctype.startswith('vector_vector'):
self.imports.add("org.opencv.core.Mat")
self.imports.add("org.opencv.utils.Converters")
self.imports.add("java.util.List")
self.imports.add("java.util.ArrayList")
self.addImports(ctype.replace('vector_vector', 'vector'))
elif ctype.startswith('Feature2D'):
self.imports.add("org.opencv.features2d.Feature2D")
elif ctype.startswith('vector'):
self.imports.add("org.opencv.core.Mat")
self.imports.add('java.util.ArrayList')
if type_dict[ctype]['j_type'].startswith('MatOf'):
self.imports.add("org.opencv.core." + type_dict[ctype]['j_type'])
else:
self.imports.add("java.util.List")
self.imports.add("org.opencv.utils.Converters")
self.addImports(ctype.replace('vector_', ''))
else:
j_type = ''
if ctype in type_dict:
j_type = type_dict[ctype]['j_type']
elif ctype in ("Algorithm"):
j_type = ctype
if j_type in ( "CvType", "Mat", "Point", "Point3", "Range", "Rect", "RotatedRect", "Scalar", "Size", "TermCriteria", "Algorithm" ):
self.imports.add("org.opencv.core." + j_type)
if j_type == 'String':
self.imports.add("java.lang.String")
def getAllMethods(self):
result = []
result.extend([fi for fi in sorted(self.methods) if fi.isconstructor])
result.extend([fi for fi in sorted(self.methods) if not fi.isconstructor])
return result
def addMethod(self, fi):
self.methods.append(fi)
def getConst(self, name):
for cand in self.consts + self.private_consts:
if cand.name == name:
return cand
return None
def addConst(self, constinfo):
# choose right list (public or private)
consts = self.consts
for c in const_private_list:
if re.match(c, constinfo.name):
consts = self.private_consts
break
consts.append(constinfo)
def initCodeStreams(self, Module):
self.j_code = StringIO()
self.jn_code = StringIO()
self.cpp_code = StringIO();
if self.name != Module:
self.j_code.write(T_JAVA_START_INHERITED if self.base else T_JAVA_START_ORPHAN)
else:
self.j_code.write(T_JAVA_START_MODULE)
# misc handling
if self.name == 'Core':
self.imports.add("java.lang.String")
self.j_code.write(libVersionBlock())
def cleanupCodeStreams(self):
self.j_code.close()
self.jn_code.close()
self.cpp_code.close()
def generateJavaCode(self, m, M):
return Template(self.j_code.getvalue() + "\n\n" + \
self.jn_code.getvalue() + "\n}\n").substitute(\
module = m,
name = self.name,
jname = self.jname,
imports = "\n".join(self.getAllImports(M)),
base = self.base)
def generateCppCode(self):
return self.cpp_code.getvalue()
class ArgInfo():
def __init__(self, arg_tuple): # [ ctype, name, def val, [mod], argno ]
self.pointer = False
ctype = arg_tuple[0]
if ctype.endswith("*"):
ctype = ctype[:-1]
self.pointer = True
if ctype == 'vector_Point2d':
ctype = 'vector_Point2f'
elif ctype == 'vector_Point3d':
ctype = 'vector_Point3f'
self.ctype = ctype
self.name = arg_tuple[1]
self.defval = arg_tuple[2]
self.out = ""
if "/O" in arg_tuple[3]:
self.out = "O"
if "/IO" in arg_tuple[3]:
self.out = "IO"
def __repr__(self):
return Template("ARG $ctype$p $name=$defval").substitute(ctype=self.ctype,
p=" *" if self.pointer else "",
name=self.name,
defval=self.defval)
class FuncInfo(GeneralInfo):
def __init__(self, decl, namespaces=[]): # [ funcname, return_ctype, [modifiers], [args] ]
GeneralInfo.__init__(self, decl[0], namespaces)
self.cname = self.name.replace(".", "::")
self.jname = self.name
self.isconstructor = self.name == self.classname
if "[" in self.name:
self.jname = "getelem"
for m in decl[2]:
if m.startswith("="):
self.jname = m[1:]
self.static = ["","static"][ "/S" in decl[2] ]
self.ctype = re.sub(r"^CvTermCriteria", "TermCriteria", decl[1] or "")
self.args = []
func_fix_map = func_arg_fix.get(self.classname, {}).get(self.jname, {})
for a in decl[3]:
arg = a[:]
arg_fix_map = func_fix_map.get(arg[1], {})
arg[0] = arg_fix_map.get('ctype', arg[0]) #fixing arg type
arg[3] = arg_fix_map.get('attrib', arg[3]) #fixing arg attrib
self.args.append(ArgInfo(arg))
def __repr__(self):
return Template("FUNC <$ctype $namespace.$classpath.$name $args>").substitute(**self.__dict__)
def __lt__(self, other):
return self.__repr__() < other.__repr__()
class JavaWrapperGenerator(object):
def __init__(self):
self.clear()
def clear(self):
self.namespaces = set(["cv"])
self.classes = { "Mat" : ClassInfo([ 'class Mat', '', [], [] ], self.namespaces) }
self.module = ""
self.Module = ""
self.ported_func_list = []
self.skipped_func_list = []
self.def_args_hist = {} # { def_args_cnt : funcs_cnt }
def add_class(self, decl):
classinfo = ClassInfo(decl, namespaces=self.namespaces)
if classinfo.name in class_ignore_list:
logging.info('ignored: %s', classinfo)
return
name = classinfo.name
if self.isWrapped(name):
logging.warning('duplicated: %s', classinfo)
return
self.classes[name] = classinfo
if name in type_dict:
logging.warning('duplicated: %s', classinfo)
return
type_dict[name] = \
{ "j_type" : classinfo.jname,
"jn_type" : "long", "jn_args" : (("__int64", ".nativeObj"),),
"jni_name" : "(*("+classinfo.fullName(isCPP=True)+"*)%(n)s_nativeObj)", "jni_type" : "jlong",
"suffix" : "J" }
type_dict[name+'*'] = \
{ "j_type" : classinfo.jname,
"jn_type" : "long", "jn_args" : (("__int64", ".nativeObj"),),
"jni_name" : "("+classinfo.fullName(isCPP=True)+"*)%(n)s_nativeObj", "jni_type" : "jlong",
"suffix" : "J" }
# missing_consts { Module : { public : [[name, val],...], private : [[]...] } }
if name in missing_consts:
if 'private' in missing_consts[name]:
for (n, val) in missing_consts[name]['private']:
classinfo.private_consts.append( ConstInfo([n, val], addedManually=True) )
if 'public' in missing_consts[name]:
for (n, val) in missing_consts[name]['public']:
classinfo.consts.append( ConstInfo([n, val], addedManually=True) )
# class props
for p in decl[3]:
if True: #"vector" not in p[0]:
classinfo.props.append( ClassPropInfo(p) )
else:
logging.warning("Skipped property: [%s]" % name, p)
if classinfo.base:
classinfo.addImports(classinfo.base)
type_dict["Ptr_"+name] = \
{ "j_type" : classinfo.jname,
"jn_type" : "long", "jn_args" : (("__int64", ".nativeObj"),),
"jni_name" : "Ptr<"+classinfo.fullName(isCPP=True)+">(("+classinfo.fullName(isCPP=True)+"*)%(n)s_nativeObj)", "jni_type" : "jlong",
"suffix" : "J" }
logging.info('ok: class %s, name: %s, base: %s', classinfo, name, classinfo.base)
def add_const(self, decl): # [ "const cname", val, [], [] ]
constinfo = ConstInfo(decl, namespaces=self.namespaces)
if constinfo.isIgnored():
logging.info('ignored: %s', constinfo)
elif not self.isWrapped(constinfo.classname):
logging.info('class not found: %s', constinfo)
else:
ci = self.getClass(constinfo.classname)
duplicate = ci.getConst(constinfo.name)
if duplicate:
if duplicate.addedManually:
logging.info('manual: %s', constinfo)
else:
logging.warning('duplicated: %s', constinfo)
else:
ci.addConst(constinfo)
logging.info('ok: %s', constinfo)
def add_func(self, decl):
fi = FuncInfo(decl, namespaces=self.namespaces)
classname = fi.classname or self.Module
if classname in class_ignore_list:
logging.info('ignored: %s', fi)
elif classname in ManualFuncs and fi.jname in ManualFuncs[classname]:
logging.info('manual: %s', fi)
elif not self.isWrapped(classname):
logging.warning('not found: %s', fi)
else:
self.getClass(classname).addMethod(fi)
logging.info('ok: %s', fi)
# calc args with def val
cnt = len([a for a in fi.args if a.defval])
self.def_args_hist[cnt] = self.def_args_hist.get(cnt, 0) + 1
def save(self, path, buf):
f = open(path, "wt")
f.write(buf)
f.close()
def gen(self, srcfiles, module, output_path, common_headers):
self.clear()
self.module = module
self.Module = module.capitalize()
# TODO: support UMat versions of declarations (implement UMat-wrapper for Java)
parser = hdr_parser.CppHeaderParser(generate_umat_decls=False)
self.add_class( ['class ' + self.Module, '', [], []] ) # [ 'class/struct cname', ':bases', [modlist] [props] ]
# scan the headers and build more descriptive maps of classes, consts, functions
includes = [];
for hdr in common_headers:
logging.info("\n===== Common header : %s =====", hdr)
includes.append('#include "' + hdr + '"')
for hdr in srcfiles:
decls = parser.parse(hdr)
self.namespaces = parser.namespaces
logging.info("\n\n===== Header: %s =====", hdr)
logging.info("Namespaces: %s", parser.namespaces)
if decls:
includes.append('#include "' + hdr + '"')
else:
logging.info("Ignore header: %s", hdr)
for decl in decls:
logging.info("\n--- Incoming ---\n%s", pformat(decl, 4))
name = decl[0]
if name.startswith("struct") or name.startswith("class"):
self.add_class(decl)
elif name.startswith("const"):
self.add_const(decl)
else: # function
self.add_func(decl)
logging.info("\n\n===== Generating... =====")
moduleCppCode = StringIO()
for ci in self.classes.values():
if ci.name == "Mat":
continue
ci.initCodeStreams(self.Module)
self.gen_class(ci)
classJavaCode = ci.generateJavaCode(self.module, self.Module)
self.save("%s/%s+%s.java" % (output_path, module, ci.jname), classJavaCode)
moduleCppCode.write(ci.generateCppCode())
ci.cleanupCodeStreams()
self.save(output_path+"/"+module+".cpp", Template(T_CPP_MODULE).substitute(m = module, M = module.upper(), code = moduleCppCode.getvalue(), includes = "\n".join(includes)))
self.save(output_path+"/"+module+".txt", self.makeReport())
def makeReport(self):
'''
Returns string with generator report
'''
report = StringIO()
total_count = len(self.ported_func_list)+ len(self.skipped_func_list)
report.write("PORTED FUNCs LIST (%i of %i):\n\n" % (len(self.ported_func_list), total_count))
report.write("\n".join(self.ported_func_list))
report.write("\n\nSKIPPED FUNCs LIST (%i of %i):\n\n" % (len(self.skipped_func_list), total_count))
report.write("".join(self.skipped_func_list))
for i in self.def_args_hist.keys():
report.write("\n%i def args - %i funcs" % (i, self.def_args_hist[i]))
return report.getvalue()
def fullTypeName(self, t):
if self.isWrapped(t):
return self.getClass(t).fullName(isCPP=True)
else:
return t
def gen_func(self, ci, fi, prop_name=''):
logging.info("%s", fi)
j_code = ci.j_code
jn_code = ci.jn_code
cpp_code = ci.cpp_code
# c_decl
# e.g: void add(Mat src1, Mat src2, Mat dst, Mat mask = Mat(), int dtype = -1)
if prop_name:
c_decl = "%s %s::%s" % (fi.ctype, fi.classname, prop_name)
else:
decl_args = []
for a in fi.args:
s = a.ctype or ' _hidden_ '
if a.pointer:
s += "*"
elif a.out:
s += "&"
s += " " + a.name
if a.defval:
s += " = "+a.defval
decl_args.append(s)
c_decl = "%s %s %s(%s)" % ( fi.static, fi.ctype, fi.cname, ", ".join(decl_args) )
# java comment
j_code.write( "\n //\n // C++: %s\n //\n\n" % c_decl )
# check if we 'know' all the types
if fi.ctype not in type_dict: # unsupported ret type
msg = "// Return type '%s' is not supported, skipping the function\n\n" % fi.ctype
self.skipped_func_list.append(c_decl + "\n" + msg)
j_code.write( " "*4 + msg )
logging.warning("SKIP:" + c_decl.strip() + "\t due to RET type" + fi.ctype)
return
for a in fi.args:
if a.ctype not in type_dict:
if not a.defval and a.ctype.endswith("*"):
a.defval = 0
if a.defval:
a.ctype = ''
continue
msg = "// Unknown type '%s' (%s), skipping the function\n\n" % (a.ctype, a.out or "I")
self.skipped_func_list.append(c_decl + "\n" + msg)
j_code.write( " "*4 + msg )
logging.warning("SKIP:" + c_decl.strip() + "\t due to ARG type" + a.ctype + "/" + (a.out or "I"))
return
self.ported_func_list.append(c_decl)
# jn & cpp comment
jn_code.write( "\n // C++: %s\n" % c_decl )
cpp_code.write( "\n//\n// %s\n//\n" % c_decl )
# java args
args = fi.args[:] # copy
j_signatures=[]
suffix_counter = int(ci.methods_suffixes.get(fi.jname, -1))
while True:
suffix_counter += 1
ci.methods_suffixes[fi.jname] = suffix_counter
# java native method args
jn_args = []
# jni (cpp) function args
jni_args = [ArgInfo([ "env", "env", "", [], "" ]), ArgInfo([ "cls", "", "", [], "" ])]
j_prologue = []
j_epilogue = []
c_prologue = []
c_epilogue = []
if type_dict[fi.ctype]["jni_type"] == "jdoubleArray":
fields = type_dict[fi.ctype]["jn_args"]
c_epilogue.append( \
("jdoubleArray _da_retval_ = env->NewDoubleArray(%(cnt)i); " +
"jdouble _tmp_retval_[%(cnt)i] = {%(args)s}; " +
"env->SetDoubleArrayRegion(_da_retval_, 0, %(cnt)i, _tmp_retval_);") %
{ "cnt" : len(fields), "args" : ", ".join(["(jdouble)_retval_" + f[1] for f in fields]) } )
if fi.classname and fi.ctype and not fi.static: # non-static class method except c-tor
# adding 'self'
jn_args.append ( ArgInfo([ "__int64", "nativeObj", "", [], "" ]) )
jni_args.append( ArgInfo([ "__int64", "self", "", [], "" ]) )
ci.addImports(fi.ctype)
for a in args:
if not a.ctype: # hidden
continue
ci.addImports(a.ctype)
if "vector" in a.ctype: # pass as Mat
jn_args.append ( ArgInfo([ "__int64", "%s_mat.nativeObj" % a.name, "", [], "" ]) )
jni_args.append ( ArgInfo([ "__int64", "%s_mat_nativeObj" % a.name, "", [], "" ]) )
c_prologue.append( type_dict[a.ctype]["jni_var"] % {"n" : a.name} + ";" )
c_prologue.append( "Mat& %(n)s_mat = *((Mat*)%(n)s_mat_nativeObj)" % {"n" : a.name} + ";" )
if "I" in a.out or not a.out:
if a.ctype.startswith("vector_vector_"):
j_prologue.append( "List<Mat> %(n)s_tmplm = new ArrayList<Mat>((%(n)s != null) ? %(n)s.size() : 0);" % {"n" : a.name } )
j_prologue.append( "Mat %(n)s_mat = Converters.%(t)s_to_Mat(%(n)s, %(n)s_tmplm);" % {"n" : a.name, "t" : a.ctype} )
else:
if not type_dict[a.ctype]["j_type"].startswith("MatOf"):
j_prologue.append( "Mat %(n)s_mat = Converters.%(t)s_to_Mat(%(n)s);" % {"n" : a.name, "t" : a.ctype} )
else:
j_prologue.append( "Mat %s_mat = %s;" % (a.name, a.name) )
c_prologue.append( "Mat_to_%(t)s( %(n)s_mat, %(n)s );" % {"n" : a.name, "t" : a.ctype} )
else:
if not type_dict[a.ctype]["j_type"].startswith("MatOf"):
j_prologue.append( "Mat %s_mat = new Mat();" % a.name )
else:
j_prologue.append( "Mat %s_mat = %s;" % (a.name, a.name) )
if "O" in a.out:
if not type_dict[a.ctype]["j_type"].startswith("MatOf"):
j_epilogue.append("Converters.Mat_to_%(t)s(%(n)s_mat, %(n)s);" % {"t" : a.ctype, "n" : a.name})
j_epilogue.append( "%s_mat.release();" % a.name )
c_epilogue.append( "%(t)s_to_Mat( %(n)s, %(n)s_mat );" % {"n" : a.name, "t" : a.ctype} )
else:
fields = type_dict[a.ctype].get("jn_args", ((a.ctype, ""),))
if "I" in a.out or not a.out or self.isWrapped(a.ctype): # input arg, pass by primitive fields
for f in fields:
jn_args.append ( ArgInfo([ f[0], a.name + f[1], "", [], "" ]) )
jni_args.append( ArgInfo([ f[0], a.name + f[1].replace(".","_").replace("[","").replace("]",""), "", [], "" ]) )
if a.out and not self.isWrapped(a.ctype): # out arg, pass as double[]
jn_args.append ( ArgInfo([ "double[]", "%s_out" % a.name, "", [], "" ]) )
jni_args.append ( ArgInfo([ "double[]", "%s_out" % a.name, "", [], "" ]) )
j_prologue.append( "double[] %s_out = new double[%i];" % (a.name, len(fields)) )
c_epilogue.append( \
"jdouble tmp_%(n)s[%(cnt)i] = {%(args)s}; env->SetDoubleArrayRegion(%(n)s_out, 0, %(cnt)i, tmp_%(n)s);" %
{ "n" : a.name, "cnt" : len(fields), "args" : ", ".join(["(jdouble)" + a.name + f[1] for f in fields]) } )
if a.ctype in ('bool', 'int', 'long', 'float', 'double'):
j_epilogue.append('if(%(n)s!=null) %(n)s[0] = (%(t)s)%(n)s_out[0];' % {'n':a.name,'t':a.ctype})
else:
set_vals = []
i = 0
for f in fields:
set_vals.append( "%(n)s%(f)s = %(t)s%(n)s_out[%(i)i]" %
{"n" : a.name, "t": ("("+type_dict[f[0]]["j_type"]+")", "")[f[0]=="double"], "f" : f[1], "i" : i}
)
i += 1
j_epilogue.append( "if("+a.name+"!=null){ " + "; ".join(set_vals) + "; } ")
# calculate java method signature to check for uniqueness
j_args = []
for a in args:
if not a.ctype: #hidden
continue
jt = type_dict[a.ctype]["j_type"]
if a.out and a.ctype in ('bool', 'int', 'long', 'float', 'double'):
jt += '[]'
j_args.append( jt + ' ' + a.name )
j_signature = type_dict[fi.ctype]["j_type"] + " " + \
fi.jname + "(" + ", ".join(j_args) + ")"
logging.info("java: " + j_signature)
if(j_signature in j_signatures):
if args:
pop(args)
continue
else:
break
# java part:
# private java NATIVE method decl
# e.g.
# private static native void add_0(long src1, long src2, long dst, long mask, int dtype);
jn_code.write( Template(\
" private static native $type $name($args);\n").substitute(\
type = type_dict[fi.ctype].get("jn_type", "double[]"), \
name = fi.jname + '_' + str(suffix_counter), \
args = ", ".join(["%s %s" % (type_dict[a.ctype]["jn_type"], a.name.replace(".","_").replace("[","").replace("]","")) for a in jn_args])
) );
# java part:
#java doc comment
f_name = fi.name
if fi.classname:
f_name = fi.classname + "::" + fi.name
java_doc = "//javadoc: " + f_name + "(%s)" % ", ".join([a.name for a in args if a.ctype])
j_code.write(" "*4 + java_doc + "\n")
# public java wrapper method impl (calling native one above)
# e.g.
# public static void add( Mat src1, Mat src2, Mat dst, Mat mask, int dtype )
# { add_0( src1.nativeObj, src2.nativeObj, dst.nativeObj, mask.nativeObj, dtype ); }
ret_type = fi.ctype
if fi.ctype.endswith('*'):
ret_type = ret_type[:-1]
ret_val = type_dict[ret_type]["j_type"] + " retVal = "
tail = ""
ret = "return retVal;"
if ret_type.startswith('vector'):
tail = ")"
j_type = type_dict[ret_type]["j_type"]
if j_type.startswith('MatOf'):
ret_val += j_type + ".fromNativeAddr("
else:
ret_val = "Mat retValMat = new Mat("
j_prologue.append( j_type + ' retVal = new Array' + j_type+'();')
j_epilogue.append('Converters.Mat_to_' + ret_type + '(retValMat, retVal);')
elif ret_type.startswith("Ptr_"):
ret_val = type_dict[fi.ctype]["j_type"] + " retVal = new " + type_dict[ret_type]["j_type"] + "("
tail = ")"
elif ret_type == "void":
ret_val = ""
ret = "return;"
elif ret_type == "": # c-tor
if fi.classname and ci.base:
ret_val = "super( "
tail = " )"
else:
ret_val = "nativeObj = "
ret = "return;"
elif self.isWrapped(ret_type): # wrapped class
ret_val = type_dict[ret_type]["j_type"] + " retVal = new " + self.getClass(ret_type).jname + "("
tail = ")"
elif "jn_type" not in type_dict[ret_type]:
ret_val = type_dict[fi.ctype]["j_type"] + " retVal = new " + type_dict[ret_type]["j_type"] + "("
tail = ")"
static = "static"
if fi.classname:
static = fi.static
j_code.write( Template(\
""" public $static $j_type $j_name($j_args)
{
$prologue
$ret_val$jn_name($jn_args_call)$tail;
$epilogue
$ret
}
"""
).substitute(\
ret = ret, \
ret_val = ret_val, \
tail = tail, \
prologue = "\n ".join(j_prologue), \
epilogue = "\n ".join(j_epilogue), \
static=static, \
j_type=type_dict[fi.ctype]["j_type"], \
j_name=fi.jname, \
j_args=", ".join(j_args), \
jn_name=fi.jname + '_' + str(suffix_counter), \
jn_args_call=", ".join( [a.name for a in jn_args] ),\
)
)
# cpp part:
# jni_func(..) { _retval_ = cv_func(..); return _retval_; }
ret = "return _retval_;"
default = "return 0;"
if fi.ctype == "void":
ret = "return;"
default = "return;"
elif not fi.ctype: # c-tor
ret = "return (jlong) _retval_;"
elif fi.ctype.startswith('vector'): # c-tor
ret = "return (jlong) _retval_;"
elif fi.ctype == "String":
ret = "return env->NewStringUTF(_retval_.c_str());"
default = 'return env->NewStringUTF("");'
elif self.isWrapped(fi.ctype): # wrapped class:
ret = "return (jlong) new %s(_retval_);" % self.fullTypeName(fi.ctype)
elif fi.ctype.startswith('Ptr_'):
c_prologue.append("typedef Ptr<%s> %s;" % (self.fullTypeName(fi.ctype[4:]), fi.ctype))
ret = "return (jlong)(new %(ctype)s(_retval_));" % { 'ctype':fi.ctype }
elif self.isWrapped(ret_type): # pointer to wrapped class:
ret = "return (jlong) _retval_;"
elif type_dict[fi.ctype]["jni_type"] == "jdoubleArray":
ret = "return _da_retval_;"
# hack: replacing func call with property set/get
name = fi.name
if prop_name:
if args:
name = prop_name + " = "
else:
name = prop_name + ";//"
cvname = fi.fullName(isCPP=True)
retval = self.fullTypeName(fi.ctype) + " _retval_ = "
if fi.ctype == "void":
retval = ""
elif fi.ctype == "String":
retval = "cv::" + retval
elif fi.ctype.startswith('vector'):
retval = type_dict[fi.ctype]['jni_var'] % {"n" : '_ret_val_vector_'} + " = "
c_epilogue.append("Mat* _retval_ = new Mat();")
c_epilogue.append(fi.ctype+"_to_Mat(_ret_val_vector_, *_retval_);")
if len(fi.classname)>0:
if not fi.ctype: # c-tor
retval = fi.fullClass(isCPP=True) + "* _retval_ = "
cvname = "new " + fi.fullClass(isCPP=True)
elif fi.static:
cvname = fi.fullName(isCPP=True)
else:
cvname = ("me->" if not self.isSmartClass(ci) else "(*me)->") + name
c_prologue.append(\
"%(cls)s* me = (%(cls)s*) self; //TODO: check for NULL" \
% { "cls" : self.smartWrap(ci, fi.fullClass(isCPP=True))} \
)
cvargs = []
for a in args:
if a.pointer:
jni_name = "&%(n)s"
else:
jni_name = "%(n)s"
if not a.out and not "jni_var" in type_dict[a.ctype]:
# explicit cast to C type to avoid ambiguous call error on platforms (mingw)
# where jni types are different from native types (e.g. jint is not the same as int)
jni_name = "(%s)%s" % (a.ctype, jni_name)
if not a.ctype: # hidden
jni_name = a.defval
cvargs.append( type_dict[a.ctype].get("jni_name", jni_name) % {"n" : a.name})
if "vector" not in a.ctype :
if ("I" in a.out or not a.out or self.isWrapped(a.ctype)) and "jni_var" in type_dict[a.ctype]: # complex type
c_prologue.append(type_dict[a.ctype]["jni_var"] % {"n" : a.name} + ";")
if a.out and "I" not in a.out and not self.isWrapped(a.ctype) and a.ctype:
c_prologue.append("%s %s;" % (a.ctype, a.name))
rtype = type_dict[fi.ctype].get("jni_type", "jdoubleArray")
clazz = ci.jname
cpp_code.write ( Template( \
"""
${namespace}
JNIEXPORT $rtype JNICALL Java_org_opencv_${module}_${clazz}_$fname ($argst);
JNIEXPORT $rtype JNICALL Java_org_opencv_${module}_${clazz}_$fname
($args)
{
static const char method_name[] = "$module::$fname()";
try {
LOGD("%s", method_name);
$prologue
$retval$cvname( $cvargs );
$epilogue$ret
} catch(const std::exception &e) {
throwJavaException(env, &e, method_name);
} catch (...) {
throwJavaException(env, 0, method_name);
}
$default
}
""" ).substitute( \
rtype = rtype, \
module = self.module.replace('_', '_1'), \
clazz = clazz.replace('_', '_1'), \
fname = (fi.jname + '_' + str(suffix_counter)).replace('_', '_1'), \
args = ", ".join(["%s %s" % (type_dict[a.ctype].get("jni_type"), a.name) for a in jni_args]), \
argst = ", ".join([type_dict[a.ctype].get("jni_type") for a in jni_args]), \
prologue = "\n ".join(c_prologue), \
epilogue = " ".join(c_epilogue) + ("\n " if c_epilogue else ""), \
ret = ret, \
cvname = cvname, \
cvargs = ", ".join(cvargs), \
default = default, \
retval = retval, \
namespace = ('using namespace ' + ci.namespace.replace('.', '::') + ';') if ci.namespace else ''
) )
# adding method signature to dictionarry
j_signatures.append(j_signature)
# processing args with default values
if not args or not args[-1].defval:
break
while args and args[-1].defval:
# 'smart' overloads filtering
a = args.pop()
if a.name in ('mask', 'dtype', 'ddepth', 'lineType', 'borderType', 'borderMode', 'criteria'):
break
def gen_class(self, ci):
logging.info("%s", ci)
# constants
if ci.private_consts:
logging.info("%s", ci.private_consts)
ci.j_code.write("""
private static final int
%s;\n\n""" % (",\n"+" "*12).join(["%s = %s" % (c.name, c.value) for c in ci.private_consts])
)
if ci.consts:
logging.info("%s", ci.consts)
ci.j_code.write("""
public static final int
%s;\n\n""" % (",\n"+" "*12).join(["%s = %s" % (c.name, c.value) for c in ci.consts])
)
# methods
for fi in ci.getAllMethods():
self.gen_func(ci, fi)
# props
for pi in ci.props:
# getter
getter_name = ci.fullName() + ".get_" + pi.name
fi = FuncInfo( [getter_name, pi.ctype, [], []], self.namespaces ) # [ funcname, return_ctype, [modifiers], [args] ]
self.gen_func(ci, fi, pi.name)
if pi.rw:
#setter
setter_name = ci.fullName() + ".set_" + pi.name
fi = FuncInfo( [ setter_name, "void", [], [ [pi.ctype, pi.name, "", [], ""] ] ], self.namespaces)
self.gen_func(ci, fi, pi.name)
# manual ports
if ci.name in ManualFuncs:
for func in ManualFuncs[ci.name].keys():
ci.j_code.write ( ManualFuncs[ci.name][func]["j_code"] )
ci.jn_code.write( ManualFuncs[ci.name][func]["jn_code"] )
ci.cpp_code.write( ManualFuncs[ci.name][func]["cpp_code"] )
if ci.name != self.Module:
# finalize()
ci.j_code.write(
"""
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
""" )
ci.jn_code.write(
"""
// native support for java finalize()
private static native void delete(long nativeObj);
""" )
# native support for java finalize()
ci.cpp_code.write( \
"""
//
// native support for java finalize()
// static void %(cls)s::delete( __int64 self )
//
JNIEXPORT void JNICALL Java_org_opencv_%(module)s_%(j_cls)s_delete(JNIEnv*, jclass, jlong);
JNIEXPORT void JNICALL Java_org_opencv_%(module)s_%(j_cls)s_delete
(JNIEnv*, jclass, jlong self)
{
delete (%(cls)s*) self;
}
""" % {"module" : module.replace('_', '_1'), "cls" : self.smartWrap(ci, ci.fullName(isCPP=True)), "j_cls" : ci.jname.replace('_', '_1')}
)
def getClass(self, classname):
return self.classes[classname or self.Module]
def isWrapped(self, classname):
name = classname or self.Module
return name in self.classes
def isSmartClass(self, ci):
'''
Check if class stores Ptr<T>* instead of T* in nativeObj field
'''
if ci.smart != None:
return ci.smart
# if parents are smart (we hope) then children are!
# if not we believe the class is smart if it has "create" method
ci.smart = False
if ci.base:
ci.smart = True
else:
for fi in ci.methods:
if fi.name == "create":
ci.smart = True
break
return ci.smart
def smartWrap(self, ci, fullname):
'''
Wraps fullname with Ptr<> if needed
'''
if self.isSmartClass(ci):
return "Ptr<" + fullname + ">"
return fullname
if __name__ == "__main__":
if len(sys.argv) < 4:
print("Usage:\n", \
os.path.basename(sys.argv[0]), \
"<full path to hdr_parser.py> <module name> <C++ header> [<C++ header>...]")
print("Current args are: ", ", ".join(["'"+a+"'" for a in sys.argv]))
exit(0)
dstdir = "."
hdr_parser_path = os.path.abspath(sys.argv[1])
if hdr_parser_path.endswith(".py"):
hdr_parser_path = os.path.dirname(hdr_parser_path)
sys.path.append(hdr_parser_path)
import hdr_parser
module = sys.argv[2]
srcfiles = sys.argv[3:]
common_headers = []
if '--common' in srcfiles:
pos = srcfiles.index('--common')
common_headers = srcfiles[pos+1:]
srcfiles = srcfiles[:pos]
logging.basicConfig(filename='%s/%s.log' % (dstdir, module), format=None, filemode='w', level=logging.INFO)
handler = logging.StreamHandler()
handler.setLevel(logging.WARNING)
logging.getLogger().addHandler(handler)
#print("Generating module '" + module + "' from headers:\n\t" + "\n\t".join(srcfiles))
generator = JavaWrapperGenerator()
generator.gen(srcfiles, module, dstdir, common_headers)
| 44.213976
| 185
| 0.515204
|
bde74b17dfc4e7b2155a799540cbc8ef7baf83a7
| 58,094
|
py
|
Python
|
botenv/lib/python3.9/site-packages/redis/connection.py
|
0xtuytuy/unit-crypto-ski-week-poap-bot
|
9bab0a6013a29db9ce76311d4f6fa1d0922ac5c1
|
[
"MIT"
] | null | null | null |
botenv/lib/python3.9/site-packages/redis/connection.py
|
0xtuytuy/unit-crypto-ski-week-poap-bot
|
9bab0a6013a29db9ce76311d4f6fa1d0922ac5c1
|
[
"MIT"
] | null | null | null |
botenv/lib/python3.9/site-packages/redis/connection.py
|
0xtuytuy/unit-crypto-ski-week-poap-bot
|
9bab0a6013a29db9ce76311d4f6fa1d0922ac5c1
|
[
"MIT"
] | null | null | null |
import copy
import errno
import io
import os
import socket
import threading
import weakref
from itertools import chain
from queue import Empty, Full, LifoQueue
from time import time
from urllib.parse import parse_qs, unquote, urlparse
from packaging.version import Version
from redis.backoff import NoBackoff
from redis.exceptions import (
AuthenticationError,
AuthenticationWrongNumberOfArgsError,
BusyLoadingError,
ChildDeadlockedError,
ConnectionError,
DataError,
ExecAbortError,
InvalidResponse,
ModuleError,
NoPermissionError,
NoScriptError,
ReadOnlyError,
RedisError,
ResponseError,
TimeoutError,
)
from redis.retry import Retry
from redis.utils import CRYPTOGRAPHY_AVAILABLE, HIREDIS_AVAILABLE, str_if_bytes
try:
import ssl
ssl_available = True
except ImportError:
ssl_available = False
NONBLOCKING_EXCEPTION_ERROR_NUMBERS = {
BlockingIOError: errno.EWOULDBLOCK,
}
if ssl_available:
if hasattr(ssl, "SSLWantReadError"):
NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLWantReadError] = 2
NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLWantWriteError] = 2
else:
NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLError] = 2
NONBLOCKING_EXCEPTIONS = tuple(NONBLOCKING_EXCEPTION_ERROR_NUMBERS.keys())
if HIREDIS_AVAILABLE:
import hiredis
hiredis_version = Version(hiredis.__version__)
HIREDIS_SUPPORTS_CALLABLE_ERRORS = hiredis_version >= Version("0.1.3")
HIREDIS_SUPPORTS_BYTE_BUFFER = hiredis_version >= Version("0.1.4")
HIREDIS_SUPPORTS_ENCODING_ERRORS = hiredis_version >= Version("1.0.0")
HIREDIS_USE_BYTE_BUFFER = True
# only use byte buffer if hiredis supports it
if not HIREDIS_SUPPORTS_BYTE_BUFFER:
HIREDIS_USE_BYTE_BUFFER = False
SYM_STAR = b"*"
SYM_DOLLAR = b"$"
SYM_CRLF = b"\r\n"
SYM_EMPTY = b""
SERVER_CLOSED_CONNECTION_ERROR = "Connection closed by server."
SENTINEL = object()
MODULE_LOAD_ERROR = "Error loading the extension. " "Please check the server logs."
NO_SUCH_MODULE_ERROR = "Error unloading module: no such module with that name"
MODULE_UNLOAD_NOT_POSSIBLE_ERROR = "Error unloading module: operation not " "possible."
MODULE_EXPORTS_DATA_TYPES_ERROR = (
"Error unloading module: the module "
"exports one or more module-side data "
"types, can't unload"
)
class Encoder:
"Encode strings to bytes-like and decode bytes-like to strings"
def __init__(self, encoding, encoding_errors, decode_responses):
self.encoding = encoding
self.encoding_errors = encoding_errors
self.decode_responses = decode_responses
def encode(self, value):
"Return a bytestring or bytes-like representation of the value"
if isinstance(value, (bytes, memoryview)):
return value
elif isinstance(value, bool):
# special case bool since it is a subclass of int
raise DataError(
"Invalid input of type: 'bool'. Convert to a "
"bytes, string, int or float first."
)
elif isinstance(value, (int, float)):
value = repr(value).encode()
elif not isinstance(value, str):
# a value we don't know how to deal with. throw an error
typename = type(value).__name__
raise DataError(
f"Invalid input of type: '{typename}'. "
f"Convert to a bytes, string, int or float first."
)
if isinstance(value, str):
value = value.encode(self.encoding, self.encoding_errors)
return value
def decode(self, value, force=False):
"Return a unicode string from the bytes-like representation"
if self.decode_responses or force:
if isinstance(value, memoryview):
value = value.tobytes()
if isinstance(value, bytes):
value = value.decode(self.encoding, self.encoding_errors)
return value
class BaseParser:
EXCEPTION_CLASSES = {
"ERR": {
"max number of clients reached": ConnectionError,
"Client sent AUTH, but no password is set": AuthenticationError,
"invalid password": AuthenticationError,
# some Redis server versions report invalid command syntax
# in lowercase
"wrong number of arguments "
"for 'auth' command": AuthenticationWrongNumberOfArgsError,
# some Redis server versions report invalid command syntax
# in uppercase
"wrong number of arguments "
"for 'AUTH' command": AuthenticationWrongNumberOfArgsError,
MODULE_LOAD_ERROR: ModuleError,
MODULE_EXPORTS_DATA_TYPES_ERROR: ModuleError,
NO_SUCH_MODULE_ERROR: ModuleError,
MODULE_UNLOAD_NOT_POSSIBLE_ERROR: ModuleError,
},
"EXECABORT": ExecAbortError,
"LOADING": BusyLoadingError,
"NOSCRIPT": NoScriptError,
"READONLY": ReadOnlyError,
"NOAUTH": AuthenticationError,
"NOPERM": NoPermissionError,
}
def parse_error(self, response):
"Parse an error response"
error_code = response.split(" ")[0]
if error_code in self.EXCEPTION_CLASSES:
response = response[len(error_code) + 1 :]
exception_class = self.EXCEPTION_CLASSES[error_code]
if isinstance(exception_class, dict):
exception_class = exception_class.get(response, ResponseError)
return exception_class(response)
return ResponseError(response)
class SocketBuffer:
def __init__(self, socket, socket_read_size, socket_timeout):
self._sock = socket
self.socket_read_size = socket_read_size
self.socket_timeout = socket_timeout
self._buffer = io.BytesIO()
# number of bytes written to the buffer from the socket
self.bytes_written = 0
# number of bytes read from the buffer
self.bytes_read = 0
@property
def length(self):
return self.bytes_written - self.bytes_read
def _read_from_socket(self, length=None, timeout=SENTINEL, raise_on_timeout=True):
sock = self._sock
socket_read_size = self.socket_read_size
buf = self._buffer
buf.seek(self.bytes_written)
marker = 0
custom_timeout = timeout is not SENTINEL
try:
if custom_timeout:
sock.settimeout(timeout)
while True:
data = self._sock.recv(socket_read_size)
# an empty string indicates the server shutdown the socket
if isinstance(data, bytes) and len(data) == 0:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
buf.write(data)
data_length = len(data)
self.bytes_written += data_length
marker += data_length
if length is not None and length > marker:
continue
return True
except socket.timeout:
if raise_on_timeout:
raise TimeoutError("Timeout reading from socket")
return False
except NONBLOCKING_EXCEPTIONS as ex:
# if we're in nonblocking mode and the recv raises a
# blocking error, simply return False indicating that
# there's no data to be read. otherwise raise the
# original exception.
allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1)
if not raise_on_timeout and ex.errno == allowed:
return False
raise ConnectionError(f"Error while reading from socket: {ex.args}")
finally:
if custom_timeout:
sock.settimeout(self.socket_timeout)
def can_read(self, timeout):
return bool(self.length) or self._read_from_socket(
timeout=timeout, raise_on_timeout=False
)
def read(self, length):
length = length + 2 # make sure to read the \r\n terminator
# make sure we've read enough data from the socket
if length > self.length:
self._read_from_socket(length - self.length)
self._buffer.seek(self.bytes_read)
data = self._buffer.read(length)
self.bytes_read += len(data)
# purge the buffer when we've consumed it all so it doesn't
# grow forever
if self.bytes_read == self.bytes_written:
self.purge()
return data[:-2]
def readline(self):
buf = self._buffer
buf.seek(self.bytes_read)
data = buf.readline()
while not data.endswith(SYM_CRLF):
# there's more data in the socket that we need
self._read_from_socket()
buf.seek(self.bytes_read)
data = buf.readline()
self.bytes_read += len(data)
# purge the buffer when we've consumed it all so it doesn't
# grow forever
if self.bytes_read == self.bytes_written:
self.purge()
return data[:-2]
def purge(self):
self._buffer.seek(0)
self._buffer.truncate()
self.bytes_written = 0
self.bytes_read = 0
def close(self):
try:
self.purge()
self._buffer.close()
except Exception:
# issue #633 suggests the purge/close somehow raised a
# BadFileDescriptor error. Perhaps the client ran out of
# memory or something else? It's probably OK to ignore
# any error being raised from purge/close since we're
# removing the reference to the instance below.
pass
self._buffer = None
self._sock = None
class PythonParser(BaseParser):
"Plain Python parsing class"
def __init__(self, socket_read_size):
self.socket_read_size = socket_read_size
self.encoder = None
self._sock = None
self._buffer = None
def __del__(self):
try:
self.on_disconnect()
except Exception:
pass
def on_connect(self, connection):
"Called when the socket connects"
self._sock = connection._sock
self._buffer = SocketBuffer(
self._sock, self.socket_read_size, connection.socket_timeout
)
self.encoder = connection.encoder
def on_disconnect(self):
"Called when the socket disconnects"
self._sock = None
if self._buffer is not None:
self._buffer.close()
self._buffer = None
self.encoder = None
def can_read(self, timeout):
return self._buffer and self._buffer.can_read(timeout)
def read_response(self, disable_decoding=False):
raw = self._buffer.readline()
if not raw:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
byte, response = raw[:1], raw[1:]
if byte not in (b"-", b"+", b":", b"$", b"*"):
raise InvalidResponse(f"Protocol Error: {raw!r}")
# server returned an error
if byte == b"-":
response = response.decode("utf-8", errors="replace")
error = self.parse_error(response)
# if the error is a ConnectionError, raise immediately so the user
# is notified
if isinstance(error, ConnectionError):
raise error
# otherwise, we're dealing with a ResponseError that might belong
# inside a pipeline response. the connection's read_response()
# and/or the pipeline's execute() will raise this error if
# necessary, so just return the exception instance here.
return error
# single value
elif byte == b"+":
pass
# int value
elif byte == b":":
response = int(response)
# bulk response
elif byte == b"$":
length = int(response)
if length == -1:
return None
response = self._buffer.read(length)
# multi-bulk response
elif byte == b"*":
length = int(response)
if length == -1:
return None
response = [
self.read_response(disable_decoding=disable_decoding)
for i in range(length)
]
if isinstance(response, bytes) and disable_decoding is False:
response = self.encoder.decode(response)
return response
class HiredisParser(BaseParser):
"Parser class for connections using Hiredis"
def __init__(self, socket_read_size):
if not HIREDIS_AVAILABLE:
raise RedisError("Hiredis is not installed")
self.socket_read_size = socket_read_size
if HIREDIS_USE_BYTE_BUFFER:
self._buffer = bytearray(socket_read_size)
def __del__(self):
try:
self.on_disconnect()
except Exception:
pass
def on_connect(self, connection, **kwargs):
self._sock = connection._sock
self._socket_timeout = connection.socket_timeout
kwargs = {
"protocolError": InvalidResponse,
"replyError": self.parse_error,
}
# hiredis < 0.1.3 doesn't support functions that create exceptions
if not HIREDIS_SUPPORTS_CALLABLE_ERRORS:
kwargs["replyError"] = ResponseError
if connection.encoder.decode_responses:
kwargs["encoding"] = connection.encoder.encoding
if HIREDIS_SUPPORTS_ENCODING_ERRORS:
kwargs["errors"] = connection.encoder.encoding_errors
self._reader = hiredis.Reader(**kwargs)
self._next_response = False
def on_disconnect(self):
self._sock = None
self._reader = None
self._next_response = False
def can_read(self, timeout):
if not self._reader:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
if self._next_response is False:
self._next_response = self._reader.gets()
if self._next_response is False:
return self.read_from_socket(timeout=timeout, raise_on_timeout=False)
return True
def read_from_socket(self, timeout=SENTINEL, raise_on_timeout=True):
sock = self._sock
custom_timeout = timeout is not SENTINEL
try:
if custom_timeout:
sock.settimeout(timeout)
if HIREDIS_USE_BYTE_BUFFER:
bufflen = self._sock.recv_into(self._buffer)
if bufflen == 0:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
self._reader.feed(self._buffer, 0, bufflen)
else:
buffer = self._sock.recv(self.socket_read_size)
# an empty string indicates the server shutdown the socket
if not isinstance(buffer, bytes) or len(buffer) == 0:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
self._reader.feed(buffer)
# data was read from the socket and added to the buffer.
# return True to indicate that data was read.
return True
except socket.timeout:
if raise_on_timeout:
raise TimeoutError("Timeout reading from socket")
return False
except NONBLOCKING_EXCEPTIONS as ex:
# if we're in nonblocking mode and the recv raises a
# blocking error, simply return False indicating that
# there's no data to be read. otherwise raise the
# original exception.
allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1)
if not raise_on_timeout and ex.errno == allowed:
return False
raise ConnectionError(f"Error while reading from socket: {ex.args}")
finally:
if custom_timeout:
sock.settimeout(self._socket_timeout)
def read_response(self, disable_decoding=False):
if not self._reader:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
# _next_response might be cached from a can_read() call
if self._next_response is not False:
response = self._next_response
self._next_response = False
return response
response = self._reader.gets()
while response is False:
self.read_from_socket()
response = self._reader.gets()
# if an older version of hiredis is installed, we need to attempt
# to convert ResponseErrors to their appropriate types.
if not HIREDIS_SUPPORTS_CALLABLE_ERRORS:
if isinstance(response, ResponseError):
response = self.parse_error(response.args[0])
elif (
isinstance(response, list)
and response
and isinstance(response[0], ResponseError)
):
response[0] = self.parse_error(response[0].args[0])
# if the response is a ConnectionError or the response is a list and
# the first item is a ConnectionError, raise it as something bad
# happened
if isinstance(response, ConnectionError):
raise response
elif (
isinstance(response, list)
and response
and isinstance(response[0], ConnectionError)
):
raise response[0]
return response
if HIREDIS_AVAILABLE:
DefaultParser = HiredisParser
else:
DefaultParser = PythonParser
class Connection:
"Manages TCP communication to and from a Redis server"
def __init__(
self,
host="localhost",
port=6379,
db=0,
password=None,
socket_timeout=None,
socket_connect_timeout=None,
socket_keepalive=False,
socket_keepalive_options=None,
socket_type=0,
retry_on_timeout=False,
retry_on_error=[],
encoding="utf-8",
encoding_errors="strict",
decode_responses=False,
parser_class=DefaultParser,
socket_read_size=65536,
health_check_interval=0,
client_name=None,
username=None,
retry=None,
redis_connect_func=None,
):
"""
Initialize a new Connection.
To specify a retry policy for specific errors, first set
`retry_on_error` to a list of the error/s to retry on, then set
`retry` to a valid `Retry` object.
To retry on TimeoutError, `retry_on_timeout` can also be set to `True`.
"""
self.pid = os.getpid()
self.host = host
self.port = int(port)
self.db = db
self.username = username
self.client_name = client_name
self.password = password
self.socket_timeout = socket_timeout
self.socket_connect_timeout = socket_connect_timeout or socket_timeout
self.socket_keepalive = socket_keepalive
self.socket_keepalive_options = socket_keepalive_options or {}
self.socket_type = socket_type
self.retry_on_timeout = retry_on_timeout
if retry_on_timeout:
# Add TimeoutError to the errors list to retry on
retry_on_error.append(TimeoutError)
self.retry_on_error = retry_on_error
if retry_on_error:
if retry is None:
self.retry = Retry(NoBackoff(), 1)
else:
# deep-copy the Retry object as it is mutable
self.retry = copy.deepcopy(retry)
# Update the retry's supported errors with the specified errors
self.retry.update_supported_erros(retry_on_error)
else:
self.retry = Retry(NoBackoff(), 0)
self.health_check_interval = health_check_interval
self.next_health_check = 0
self.redis_connect_func = redis_connect_func
self.encoder = Encoder(encoding, encoding_errors, decode_responses)
self._sock = None
self._socket_read_size = socket_read_size
self.set_parser(parser_class)
self._connect_callbacks = []
self._buffer_cutoff = 6000
def __repr__(self):
repr_args = ",".join([f"{k}={v}" for k, v in self.repr_pieces()])
return f"{self.__class__.__name__}<{repr_args}>"
def repr_pieces(self):
pieces = [("host", self.host), ("port", self.port), ("db", self.db)]
if self.client_name:
pieces.append(("client_name", self.client_name))
return pieces
def __del__(self):
try:
self.disconnect()
except Exception:
pass
def register_connect_callback(self, callback):
self._connect_callbacks.append(weakref.WeakMethod(callback))
def clear_connect_callbacks(self):
self._connect_callbacks = []
def set_parser(self, parser_class):
"""
Creates a new instance of parser_class with socket size:
_socket_read_size and assigns it to the parser for the connection
:param parser_class: The required parser class
"""
self._parser = parser_class(socket_read_size=self._socket_read_size)
def connect(self):
"Connects to the Redis server if not already connected"
if self._sock:
return
try:
sock = self._connect()
except socket.timeout:
raise TimeoutError("Timeout connecting to server")
except OSError as e:
raise ConnectionError(self._error_message(e))
self._sock = sock
try:
if self.redis_connect_func is None:
# Use the default on_connect function
self.on_connect()
else:
# Use the passed function redis_connect_func
self.redis_connect_func(self)
except RedisError:
# clean up after any error in on_connect
self.disconnect()
raise
# run any user callbacks. right now the only internal callback
# is for pubsub channel/pattern resubscription
for ref in self._connect_callbacks:
callback = ref()
if callback:
callback(self)
def _connect(self):
"Create a TCP socket connection"
# we want to mimic what socket.create_connection does to support
# ipv4/ipv6, but we want to set options prior to calling
# socket.connect()
err = None
for res in socket.getaddrinfo(
self.host, self.port, self.socket_type, socket.SOCK_STREAM
):
family, socktype, proto, canonname, socket_address = res
sock = None
try:
sock = socket.socket(family, socktype, proto)
# TCP_NODELAY
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# TCP_KEEPALIVE
if self.socket_keepalive:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
for k, v in self.socket_keepalive_options.items():
sock.setsockopt(socket.IPPROTO_TCP, k, v)
# set the socket_connect_timeout before we connect
sock.settimeout(self.socket_connect_timeout)
# connect
sock.connect(socket_address)
# set the socket_timeout now that we're connected
sock.settimeout(self.socket_timeout)
return sock
except OSError as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
raise OSError("socket.getaddrinfo returned an empty list")
def _error_message(self, exception):
# args for socket.error can either be (errno, "message")
# or just "message"
if len(exception.args) == 1:
return f"Error connecting to {self.host}:{self.port}. {exception.args[0]}."
else:
return (
f"Error {exception.args[0]} connecting to "
f"{self.host}:{self.port}. {exception.args[1]}."
)
def on_connect(self):
"Initialize the connection, authenticate and select a database"
self._parser.on_connect(self)
# if username and/or password are set, authenticate
if self.username or self.password:
if self.username:
auth_args = (self.username, self.password or "")
else:
auth_args = (self.password,)
# avoid checking health here -- PING will fail if we try
# to check the health prior to the AUTH
self.send_command("AUTH", *auth_args, check_health=False)
try:
auth_response = self.read_response()
except AuthenticationWrongNumberOfArgsError:
# a username and password were specified but the Redis
# server seems to be < 6.0.0 which expects a single password
# arg. retry auth with just the password.
# https://github.com/andymccurdy/redis-py/issues/1274
self.send_command("AUTH", self.password, check_health=False)
auth_response = self.read_response()
if str_if_bytes(auth_response) != "OK":
raise AuthenticationError("Invalid Username or Password")
# if a client_name is given, set it
if self.client_name:
self.send_command("CLIENT", "SETNAME", self.client_name)
if str_if_bytes(self.read_response()) != "OK":
raise ConnectionError("Error setting client name")
# if a database is specified, switch to it
if self.db:
self.send_command("SELECT", self.db)
if str_if_bytes(self.read_response()) != "OK":
raise ConnectionError("Invalid Database")
def disconnect(self):
"Disconnects from the Redis server"
self._parser.on_disconnect()
if self._sock is None:
return
if os.getpid() == self.pid:
try:
self._sock.shutdown(socket.SHUT_RDWR)
except OSError:
pass
try:
self._sock.close()
except OSError:
pass
self._sock = None
def _send_ping(self):
"""Send PING, expect PONG in return"""
self.send_command("PING", check_health=False)
if str_if_bytes(self.read_response()) != "PONG":
raise ConnectionError("Bad response from PING health check")
def _ping_failed(self, error):
"""Function to call when PING fails"""
self.disconnect()
def check_health(self):
"""Check the health of the connection with a PING/PONG"""
if self.health_check_interval and time() > self.next_health_check:
self.retry.call_with_retry(self._send_ping, self._ping_failed)
def send_packed_command(self, command, check_health=True):
"""Send an already packed command to the Redis server"""
if not self._sock:
self.connect()
# guard against health check recursion
if check_health:
self.check_health()
try:
if isinstance(command, str):
command = [command]
for item in command:
self._sock.sendall(item)
except socket.timeout:
self.disconnect()
raise TimeoutError("Timeout writing to socket")
except OSError as e:
self.disconnect()
if len(e.args) == 1:
errno, errmsg = "UNKNOWN", e.args[0]
else:
errno = e.args[0]
errmsg = e.args[1]
raise ConnectionError(f"Error {errno} while writing to socket. {errmsg}.")
except BaseException:
self.disconnect()
raise
def send_command(self, *args, **kwargs):
"""Pack and send a command to the Redis server"""
self.send_packed_command(
self.pack_command(*args), check_health=kwargs.get("check_health", True)
)
def can_read(self, timeout=0):
"""Poll the socket to see if there's data that can be read."""
sock = self._sock
if not sock:
self.connect()
return self._parser.can_read(timeout)
def read_response(self, disable_decoding=False):
"""Read the response from a previously sent command"""
try:
response = self._parser.read_response(disable_decoding=disable_decoding)
except socket.timeout:
self.disconnect()
raise TimeoutError(f"Timeout reading from {self.host}:{self.port}")
except OSError as e:
self.disconnect()
raise ConnectionError(
f"Error while reading from {self.host}:{self.port}" f" : {e.args}"
)
except BaseException:
self.disconnect()
raise
if self.health_check_interval:
self.next_health_check = time() + self.health_check_interval
if isinstance(response, ResponseError):
raise response
return response
def pack_command(self, *args):
"""Pack a series of arguments into the Redis protocol"""
output = []
# the client might have included 1 or more literal arguments in
# the command name, e.g., 'CONFIG GET'. The Redis server expects these
# arguments to be sent separately, so split the first argument
# manually. These arguments should be bytestrings so that they are
# not encoded.
if isinstance(args[0], str):
args = tuple(args[0].encode().split()) + args[1:]
elif b" " in args[0]:
args = tuple(args[0].split()) + args[1:]
buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF))
buffer_cutoff = self._buffer_cutoff
for arg in map(self.encoder.encode, args):
# to avoid large string mallocs, chunk the command into the
# output list if we're sending large values or memoryviews
arg_length = len(arg)
if (
len(buff) > buffer_cutoff
or arg_length > buffer_cutoff
or isinstance(arg, memoryview)
):
buff = SYM_EMPTY.join(
(buff, SYM_DOLLAR, str(arg_length).encode(), SYM_CRLF)
)
output.append(buff)
output.append(arg)
buff = SYM_CRLF
else:
buff = SYM_EMPTY.join(
(
buff,
SYM_DOLLAR,
str(arg_length).encode(),
SYM_CRLF,
arg,
SYM_CRLF,
)
)
output.append(buff)
return output
def pack_commands(self, commands):
"""Pack multiple commands into the Redis protocol"""
output = []
pieces = []
buffer_length = 0
buffer_cutoff = self._buffer_cutoff
for cmd in commands:
for chunk in self.pack_command(*cmd):
chunklen = len(chunk)
if (
buffer_length > buffer_cutoff
or chunklen > buffer_cutoff
or isinstance(chunk, memoryview)
):
output.append(SYM_EMPTY.join(pieces))
buffer_length = 0
pieces = []
if chunklen > buffer_cutoff or isinstance(chunk, memoryview):
output.append(chunk)
else:
pieces.append(chunk)
buffer_length += chunklen
if pieces:
output.append(SYM_EMPTY.join(pieces))
return output
class SSLConnection(Connection):
"""Manages SSL connections to and from the Redis server(s).
This class extends the Connection class, adding SSL functionality, and making
use of ssl.SSLContext (https://docs.python.org/3/library/ssl.html#ssl.SSLContext)
""" # noqa
def __init__(
self,
ssl_keyfile=None,
ssl_certfile=None,
ssl_cert_reqs="required",
ssl_ca_certs=None,
ssl_check_hostname=False,
ssl_ca_path=None,
ssl_password=None,
ssl_validate_ocsp=False,
**kwargs,
):
"""Constructor
Args:
ssl_keyfile: Path to an ssl private key. Defaults to None.
ssl_certfile: Path to an ssl certificate. Defaults to None.
ssl_cert_reqs: The string value for the SSLContext.verify_mode (none, optional, required). Defaults to "required".
ssl_ca_certs: The path to a file of concatenated CA certificates in PEM format. Defaults to None.
ssl_check_hostname: If set, match the hostname during the SSL handshake. Defaults to False.
ssl_ca_path: The path to a directory containing several CA certificates in PEM format. Defaults to None.
ssl_password: Password for unlocking an encrypted private key. Defaults to None.
Raises:
RedisError
""" # noqa
if not ssl_available:
raise RedisError("Python wasn't built with SSL support")
super().__init__(**kwargs)
self.keyfile = ssl_keyfile
self.certfile = ssl_certfile
if ssl_cert_reqs is None:
ssl_cert_reqs = ssl.CERT_NONE
elif isinstance(ssl_cert_reqs, str):
CERT_REQS = {
"none": ssl.CERT_NONE,
"optional": ssl.CERT_OPTIONAL,
"required": ssl.CERT_REQUIRED,
}
if ssl_cert_reqs not in CERT_REQS:
raise RedisError(
f"Invalid SSL Certificate Requirements Flag: {ssl_cert_reqs}"
)
ssl_cert_reqs = CERT_REQS[ssl_cert_reqs]
self.cert_reqs = ssl_cert_reqs
self.ca_certs = ssl_ca_certs
self.ca_path = ssl_ca_path
self.check_hostname = ssl_check_hostname
self.certificate_password = ssl_password
self.ssl_validate_ocsp = ssl_validate_ocsp
def _connect(self):
"Wrap the socket with SSL support"
sock = super()._connect()
context = ssl.create_default_context()
context.check_hostname = self.check_hostname
context.verify_mode = self.cert_reqs
if self.certfile or self.keyfile:
context.load_cert_chain(
certfile=self.certfile,
keyfile=self.keyfile,
password=self.certificate_password,
)
if self.ca_certs is not None or self.ca_path is not None:
context.load_verify_locations(cafile=self.ca_certs, capath=self.ca_path)
sslsock = context.wrap_socket(sock, server_hostname=self.host)
if self.ssl_validate_ocsp is True and CRYPTOGRAPHY_AVAILABLE is False:
raise RedisError("cryptography is not installed.")
elif self.ssl_validate_ocsp is True and CRYPTOGRAPHY_AVAILABLE:
from .ocsp import OCSPVerifier
o = OCSPVerifier(sslsock, self.host, self.port, self.ca_certs)
if o.is_valid():
return sslsock
else:
raise ConnectionError("ocsp validation error")
return sslsock
class UnixDomainSocketConnection(Connection):
def __init__(
self,
path="",
db=0,
username=None,
password=None,
socket_timeout=None,
encoding="utf-8",
encoding_errors="strict",
decode_responses=False,
retry_on_timeout=False,
retry_on_error=[],
parser_class=DefaultParser,
socket_read_size=65536,
health_check_interval=0,
client_name=None,
retry=None,
redis_connect_func=None,
):
"""
Initialize a new UnixDomainSocketConnection.
To specify a retry policy for specific errors, first set
`retry_on_error` to a list of the error/s to retry on, then set
`retry` to a valid `Retry` object.
To retry on TimeoutError, `retry_on_timeout` can also be set to `True`.
"""
self.pid = os.getpid()
self.path = path
self.db = db
self.username = username
self.client_name = client_name
self.password = password
self.socket_timeout = socket_timeout
self.retry_on_timeout = retry_on_timeout
if retry_on_timeout:
# Add TimeoutError to the errors list to retry on
retry_on_error.append(TimeoutError)
self.retry_on_error = retry_on_error
if self.retry_on_error:
if retry is None:
self.retry = Retry(NoBackoff(), 1)
else:
# deep-copy the Retry object as it is mutable
self.retry = copy.deepcopy(retry)
# Update the retry's supported errors with the specified errors
self.retry.update_supported_erros(retry_on_error)
else:
self.retry = Retry(NoBackoff(), 0)
self.health_check_interval = health_check_interval
self.next_health_check = 0
self.redis_connect_func = redis_connect_func
self.encoder = Encoder(encoding, encoding_errors, decode_responses)
self._sock = None
self._socket_read_size = socket_read_size
self.set_parser(parser_class)
self._connect_callbacks = []
self._buffer_cutoff = 6000
def repr_pieces(self):
pieces = [
("path", self.path),
("db", self.db),
]
if self.client_name:
pieces.append(("client_name", self.client_name))
return pieces
def _connect(self):
"Create a Unix domain socket connection"
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.socket_timeout)
sock.connect(self.path)
return sock
def _error_message(self, exception):
# args for socket.error can either be (errno, "message")
# or just "message"
if len(exception.args) == 1:
return f"Error connecting to unix socket: {self.path}. {exception.args[0]}."
else:
return (
f"Error {exception.args[0]} connecting to unix socket: "
f"{self.path}. {exception.args[1]}."
)
FALSE_STRINGS = ("0", "F", "FALSE", "N", "NO")
def to_bool(value):
if value is None or value == "":
return None
if isinstance(value, str) and value.upper() in FALSE_STRINGS:
return False
return bool(value)
URL_QUERY_ARGUMENT_PARSERS = {
"db": int,
"socket_timeout": float,
"socket_connect_timeout": float,
"socket_keepalive": to_bool,
"retry_on_timeout": to_bool,
"retry_on_error": list,
"max_connections": int,
"health_check_interval": int,
"ssl_check_hostname": to_bool,
}
def parse_url(url):
url = urlparse(url)
kwargs = {}
for name, value in parse_qs(url.query).items():
if value and len(value) > 0:
value = unquote(value[0])
parser = URL_QUERY_ARGUMENT_PARSERS.get(name)
if parser:
try:
kwargs[name] = parser(value)
except (TypeError, ValueError):
raise ValueError(f"Invalid value for `{name}` in connection URL.")
else:
kwargs[name] = value
if url.username:
kwargs["username"] = unquote(url.username)
if url.password:
kwargs["password"] = unquote(url.password)
# We only support redis://, rediss:// and unix:// schemes.
if url.scheme == "unix":
if url.path:
kwargs["path"] = unquote(url.path)
kwargs["connection_class"] = UnixDomainSocketConnection
elif url.scheme in ("redis", "rediss"):
if url.hostname:
kwargs["host"] = unquote(url.hostname)
if url.port:
kwargs["port"] = int(url.port)
# If there's a path argument, use it as the db argument if a
# querystring value wasn't specified
if url.path and "db" not in kwargs:
try:
kwargs["db"] = int(unquote(url.path).replace("/", ""))
except (AttributeError, ValueError):
pass
if url.scheme == "rediss":
kwargs["connection_class"] = SSLConnection
else:
raise ValueError(
"Redis URL must specify one of the following "
"schemes (redis://, rediss://, unix://)"
)
return kwargs
class ConnectionPool:
"""
Create a connection pool. ``If max_connections`` is set, then this
object raises :py:class:`~redis.ConnectionError` when the pool's
limit is reached.
By default, TCP connections are created unless ``connection_class``
is specified. Use :py:class:`~redis.UnixDomainSocketConnection` for
unix sockets.
Any additional keyword arguments are passed to the constructor of
``connection_class``.
"""
@classmethod
def from_url(cls, url, **kwargs):
"""
Return a connection pool configured from the given URL.
For example::
redis://[[username]:[password]]@localhost:6379/0
rediss://[[username]:[password]]@localhost:6379/0
unix://[[username]:[password]]@/path/to/socket.sock?db=0
Three URL schemes are supported:
- `redis://` creates a TCP socket connection. See more at:
<https://www.iana.org/assignments/uri-schemes/prov/redis>
- `rediss://` creates a SSL wrapped TCP socket connection. See more at:
<https://www.iana.org/assignments/uri-schemes/prov/rediss>
- ``unix://``: creates a Unix Domain Socket connection.
The username, password, hostname, path and all querystring values
are passed through urllib.parse.unquote in order to replace any
percent-encoded values with their corresponding characters.
There are several ways to specify a database number. The first value
found will be used:
1. A ``db`` querystring option, e.g. redis://localhost?db=0
2. If using the redis:// or rediss:// schemes, the path argument
of the url, e.g. redis://localhost/0
3. A ``db`` keyword argument to this function.
If none of these options are specified, the default db=0 is used.
All querystring options are cast to their appropriate Python types.
Boolean arguments can be specified with string values "True"/"False"
or "Yes"/"No". Values that cannot be properly cast cause a
``ValueError`` to be raised. Once parsed, the querystring arguments
and keyword arguments are passed to the ``ConnectionPool``'s
class initializer. In the case of conflicting arguments, querystring
arguments always win.
"""
url_options = parse_url(url)
if "connection_class" in kwargs:
url_options["connection_class"] = kwargs["connection_class"]
kwargs.update(url_options)
return cls(**kwargs)
def __init__(
self, connection_class=Connection, max_connections=None, **connection_kwargs
):
max_connections = max_connections or 2 ** 31
if not isinstance(max_connections, int) or max_connections < 0:
raise ValueError('"max_connections" must be a positive integer')
self.connection_class = connection_class
self.connection_kwargs = connection_kwargs
self.max_connections = max_connections
# a lock to protect the critical section in _checkpid().
# this lock is acquired when the process id changes, such as
# after a fork. during this time, multiple threads in the child
# process could attempt to acquire this lock. the first thread
# to acquire the lock will reset the data structures and lock
# object of this pool. subsequent threads acquiring this lock
# will notice the first thread already did the work and simply
# release the lock.
self._fork_lock = threading.Lock()
self.reset()
def __repr__(self):
return (
f"{type(self).__name__}"
f"<{repr(self.connection_class(**self.connection_kwargs))}>"
)
def reset(self):
self._lock = threading.Lock()
self._created_connections = 0
self._available_connections = []
self._in_use_connections = set()
# this must be the last operation in this method. while reset() is
# called when holding _fork_lock, other threads in this process
# can call _checkpid() which compares self.pid and os.getpid() without
# holding any lock (for performance reasons). keeping this assignment
# as the last operation ensures that those other threads will also
# notice a pid difference and block waiting for the first thread to
# release _fork_lock. when each of these threads eventually acquire
# _fork_lock, they will notice that another thread already called
# reset() and they will immediately release _fork_lock and continue on.
self.pid = os.getpid()
def _checkpid(self):
# _checkpid() attempts to keep ConnectionPool fork-safe on modern
# systems. this is called by all ConnectionPool methods that
# manipulate the pool's state such as get_connection() and release().
#
# _checkpid() determines whether the process has forked by comparing
# the current process id to the process id saved on the ConnectionPool
# instance. if these values are the same, _checkpid() simply returns.
#
# when the process ids differ, _checkpid() assumes that the process
# has forked and that we're now running in the child process. the child
# process cannot use the parent's file descriptors (e.g., sockets).
# therefore, when _checkpid() sees the process id change, it calls
# reset() in order to reinitialize the child's ConnectionPool. this
# will cause the child to make all new connection objects.
#
# _checkpid() is protected by self._fork_lock to ensure that multiple
# threads in the child process do not call reset() multiple times.
#
# there is an extremely small chance this could fail in the following
# scenario:
# 1. process A calls _checkpid() for the first time and acquires
# self._fork_lock.
# 2. while holding self._fork_lock, process A forks (the fork()
# could happen in a different thread owned by process A)
# 3. process B (the forked child process) inherits the
# ConnectionPool's state from the parent. that state includes
# a locked _fork_lock. process B will not be notified when
# process A releases the _fork_lock and will thus never be
# able to acquire the _fork_lock.
#
# to mitigate this possible deadlock, _checkpid() will only wait 5
# seconds to acquire _fork_lock. if _fork_lock cannot be acquired in
# that time it is assumed that the child is deadlocked and a
# redis.ChildDeadlockedError error is raised.
if self.pid != os.getpid():
acquired = self._fork_lock.acquire(timeout=5)
if not acquired:
raise ChildDeadlockedError
# reset() the instance for the new process if another thread
# hasn't already done so
try:
if self.pid != os.getpid():
self.reset()
finally:
self._fork_lock.release()
def get_connection(self, command_name, *keys, **options):
"Get a connection from the pool"
self._checkpid()
with self._lock:
try:
connection = self._available_connections.pop()
except IndexError:
connection = self.make_connection()
self._in_use_connections.add(connection)
try:
# ensure this connection is connected to Redis
connection.connect()
# connections that the pool provides should be ready to send
# a command. if not, the connection was either returned to the
# pool before all data has been read or the socket has been
# closed. either way, reconnect and verify everything is good.
try:
if connection.can_read():
raise ConnectionError("Connection has data")
except ConnectionError:
connection.disconnect()
connection.connect()
if connection.can_read():
raise ConnectionError("Connection not ready")
except BaseException:
# release the connection back to the pool so that we don't
# leak it
self.release(connection)
raise
return connection
def get_encoder(self):
"Return an encoder based on encoding settings"
kwargs = self.connection_kwargs
return Encoder(
encoding=kwargs.get("encoding", "utf-8"),
encoding_errors=kwargs.get("encoding_errors", "strict"),
decode_responses=kwargs.get("decode_responses", False),
)
def make_connection(self):
"Create a new connection"
if self._created_connections >= self.max_connections:
raise ConnectionError("Too many connections")
self._created_connections += 1
return self.connection_class(**self.connection_kwargs)
def release(self, connection):
"Releases the connection back to the pool"
self._checkpid()
with self._lock:
try:
self._in_use_connections.remove(connection)
except KeyError:
# Gracefully fail when a connection is returned to this pool
# that the pool doesn't actually own
pass
if self.owns_connection(connection):
self._available_connections.append(connection)
else:
# pool doesn't own this connection. do not add it back
# to the pool and decrement the count so that another
# connection can take its place if needed
self._created_connections -= 1
connection.disconnect()
return
def owns_connection(self, connection):
return connection.pid == self.pid
def disconnect(self, inuse_connections=True):
"""
Disconnects connections in the pool
If ``inuse_connections`` is True, disconnect connections that are
current in use, potentially by other threads. Otherwise only disconnect
connections that are idle in the pool.
"""
self._checkpid()
with self._lock:
if inuse_connections:
connections = chain(
self._available_connections, self._in_use_connections
)
else:
connections = self._available_connections
for connection in connections:
connection.disconnect()
class BlockingConnectionPool(ConnectionPool):
"""
Thread-safe blocking connection pool::
>>> from redis.client import Redis
>>> client = Redis(connection_pool=BlockingConnectionPool())
It performs the same function as the default
:py:class:`~redis.ConnectionPool` implementation, in that,
it maintains a pool of reusable connections that can be shared by
multiple redis clients (safely across threads if required).
The difference is that, in the event that a client tries to get a
connection from the pool when all of connections are in use, rather than
raising a :py:class:`~redis.ConnectionError` (as the default
:py:class:`~redis.ConnectionPool` implementation does), it
makes the client wait ("blocks") for a specified number of seconds until
a connection becomes available.
Use ``max_connections`` to increase / decrease the pool size::
>>> pool = BlockingConnectionPool(max_connections=10)
Use ``timeout`` to tell it either how many seconds to wait for a connection
to become available, or to block forever:
>>> # Block forever.
>>> pool = BlockingConnectionPool(timeout=None)
>>> # Raise a ``ConnectionError`` after five seconds if a connection is
>>> # not available.
>>> pool = BlockingConnectionPool(timeout=5)
"""
def __init__(
self,
max_connections=50,
timeout=20,
connection_class=Connection,
queue_class=LifoQueue,
**connection_kwargs,
):
self.queue_class = queue_class
self.timeout = timeout
super().__init__(
connection_class=connection_class,
max_connections=max_connections,
**connection_kwargs,
)
def reset(self):
# Create and fill up a thread safe queue with ``None`` values.
self.pool = self.queue_class(self.max_connections)
while True:
try:
self.pool.put_nowait(None)
except Full:
break
# Keep a list of actual connection instances so that we can
# disconnect them later.
self._connections = []
# this must be the last operation in this method. while reset() is
# called when holding _fork_lock, other threads in this process
# can call _checkpid() which compares self.pid and os.getpid() without
# holding any lock (for performance reasons). keeping this assignment
# as the last operation ensures that those other threads will also
# notice a pid difference and block waiting for the first thread to
# release _fork_lock. when each of these threads eventually acquire
# _fork_lock, they will notice that another thread already called
# reset() and they will immediately release _fork_lock and continue on.
self.pid = os.getpid()
def make_connection(self):
"Make a fresh connection."
connection = self.connection_class(**self.connection_kwargs)
self._connections.append(connection)
return connection
def get_connection(self, command_name, *keys, **options):
"""
Get a connection, blocking for ``self.timeout`` until a connection
is available from the pool.
If the connection returned is ``None`` then creates a new connection.
Because we use a last-in first-out queue, the existing connections
(having been returned to the pool after the initial ``None`` values
were added) will be returned before ``None`` values. This means we only
create new connections when we need to, i.e.: the actual number of
connections will only increase in response to demand.
"""
# Make sure we haven't changed process.
self._checkpid()
# Try and get a connection from the pool. If one isn't available within
# self.timeout then raise a ``ConnectionError``.
connection = None
try:
connection = self.pool.get(block=True, timeout=self.timeout)
except Empty:
# Note that this is not caught by the redis client and will be
# raised unless handled by application code. If you want never to
raise ConnectionError("No connection available.")
# If the ``connection`` is actually ``None`` then that's a cue to make
# a new connection to add to the pool.
if connection is None:
connection = self.make_connection()
try:
# ensure this connection is connected to Redis
connection.connect()
# connections that the pool provides should be ready to send
# a command. if not, the connection was either returned to the
# pool before all data has been read or the socket has been
# closed. either way, reconnect and verify everything is good.
try:
if connection.can_read():
raise ConnectionError("Connection has data")
except ConnectionError:
connection.disconnect()
connection.connect()
if connection.can_read():
raise ConnectionError("Connection not ready")
except BaseException:
# release the connection back to the pool so that we don't leak it
self.release(connection)
raise
return connection
def release(self, connection):
"Releases the connection back to the pool."
# Make sure we haven't changed process.
self._checkpid()
if not self.owns_connection(connection):
# pool doesn't own this connection. do not add it back
# to the pool. instead add a None value which is a placeholder
# that will cause the pool to recreate the connection if
# its needed.
connection.disconnect()
self.pool.put_nowait(None)
return
# Put the connection back into the pool.
try:
self.pool.put_nowait(connection)
except Full:
# perhaps the pool has been reset() after a fork? regardless,
# we don't want this connection
pass
def disconnect(self):
"Disconnects all connections in the pool."
self._checkpid()
for connection in self._connections:
connection.disconnect()
| 37.431701
| 126
| 0.609633
|
321082411933c2a997df250a29e221806c1a5efd
| 1,006
|
py
|
Python
|
Edge_Detection_Examples/EdgeSlider.py
|
user2745/BeeFanningDetector
|
52d562c3c2a40179386ae3e04b02631e92c5747b
|
[
"MIT"
] | 3
|
2020-01-01T17:42:24.000Z
|
2020-05-28T15:30:33.000Z
|
Edge_Detection_Examples/EdgeSlider.py
|
user2745/BeeFanningDetector
|
52d562c3c2a40179386ae3e04b02631e92c5747b
|
[
"MIT"
] | 2
|
2020-01-21T06:18:10.000Z
|
2020-04-20T22:51:15.000Z
|
Edge_Detection_Examples/EdgeSlider.py
|
user2745/BeeFanningDetector
|
52d562c3c2a40179386ae3e04b02631e92c5747b
|
[
"MIT"
] | 1
|
2020-01-20T05:57:40.000Z
|
2020-01-20T05:57:40.000Z
|
from __future__ import print_function
import cv2 as cv
import argparse
import sys
max_lowThreshold = 100
window_name = sys.argv[2]
title_trackbar = 'Min Threshold:'
ratio = 2
kernel_size = 3
def CannyThreshold(val):
low_threshold = val
img_blur = cv.blur(src_gray, (3,3), 0)
detected_edges = cv.Canny(img_blur, low_threshold, low_threshold*ratio)
mask = detected_edges != 0
dst = src * (mask[:,:,None].astype(src.dtype))
cv.imshow(window_name, detected_edges)
parser = argparse.ArgumentParser(description='Code for Canny Edge Detector tutorial.')
parser.add_argument('--input', help='Path to input image.', default=sys.argv[2])
args = parser.parse_args()
src = cv.imread(cv.samples.findFile(args.input))
if src is None:
print('Could not open or find the image: ', args.input)
exit(0)
src_gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY)
cv.namedWindow(window_name)
cv.createTrackbar(title_trackbar, window_name , 0, max_lowThreshold, CannyThreshold)
CannyThreshold(0)
cv.waitKey()
| 35.928571
| 86
| 0.747515
|
be7f075fea00a4570d50fd30f1685139b70a8bb6
| 123
|
py
|
Python
|
configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py
|
evgps/mmdetection_trashcan
|
aaf4237c2c0d473425cdc7b741d3009177b79751
|
[
"Apache-2.0"
] | 426
|
2020-10-16T08:09:27.000Z
|
2022-03-30T03:36:04.000Z
|
configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py
|
evgps/mmdetection_trashcan
|
aaf4237c2c0d473425cdc7b741d3009177b79751
|
[
"Apache-2.0"
] | 170
|
2020-09-08T12:29:06.000Z
|
2022-03-31T18:28:09.000Z
|
configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py
|
evgps/mmdetection_trashcan
|
aaf4237c2c0d473425cdc7b741d3009177b79751
|
[
"Apache-2.0"
] | 61
|
2021-07-30T07:51:41.000Z
|
2022-03-30T14:40:02.000Z
|
_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py'
model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101))
| 41
| 76
| 0.780488
|
c66402e8e11b9b0cedd0f70dd4582e22bf6ab570
| 8,335
|
py
|
Python
|
sdk/lusid_asyncio/models/paged_resource_list_of_compliance_rule_result.py
|
finbourne/lusid-sdk-python-asyncio-preview
|
290f93590ab5485661216c8622d3de9f7af0ed60
|
[
"MIT"
] | null | null | null |
sdk/lusid_asyncio/models/paged_resource_list_of_compliance_rule_result.py
|
finbourne/lusid-sdk-python-asyncio-preview
|
290f93590ab5485661216c8622d3de9f7af0ed60
|
[
"MIT"
] | null | null | null |
sdk/lusid_asyncio/models/paged_resource_list_of_compliance_rule_result.py
|
finbourne/lusid-sdk-python-asyncio-preview
|
290f93590ab5485661216c8622d3de9f7af0ed60
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.3923
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from lusid_asyncio.configuration import Configuration
class PagedResourceListOfComplianceRuleResult(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'next_page': 'str',
'previous_page': 'str',
'values': 'list[ComplianceRuleResult]',
'href': 'str',
'links': 'list[Link]'
}
attribute_map = {
'next_page': 'nextPage',
'previous_page': 'previousPage',
'values': 'values',
'href': 'href',
'links': 'links'
}
required_map = {
'next_page': 'optional',
'previous_page': 'optional',
'values': 'required',
'href': 'optional',
'links': 'optional'
}
def __init__(self, next_page=None, previous_page=None, values=None, href=None, links=None, local_vars_configuration=None): # noqa: E501
"""PagedResourceListOfComplianceRuleResult - a model defined in OpenAPI"
:param next_page: The next page of results.
:type next_page: str
:param previous_page: The previous page of results.
:type previous_page: str
:param values: The resources to list. (required)
:type values: list[lusid_asyncio.ComplianceRuleResult]
:param href: The URI of the resource list.
:type href: str
:param links: Collection of links.
:type links: list[lusid_asyncio.Link]
""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._next_page = None
self._previous_page = None
self._values = None
self._href = None
self._links = None
self.discriminator = None
self.next_page = next_page
self.previous_page = previous_page
self.values = values
self.href = href
self.links = links
@property
def next_page(self):
"""Gets the next_page of this PagedResourceListOfComplianceRuleResult. # noqa: E501
The next page of results. # noqa: E501
:return: The next_page of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:rtype: str
"""
return self._next_page
@next_page.setter
def next_page(self, next_page):
"""Sets the next_page of this PagedResourceListOfComplianceRuleResult.
The next page of results. # noqa: E501
:param next_page: The next_page of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:type next_page: str
"""
self._next_page = next_page
@property
def previous_page(self):
"""Gets the previous_page of this PagedResourceListOfComplianceRuleResult. # noqa: E501
The previous page of results. # noqa: E501
:return: The previous_page of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:rtype: str
"""
return self._previous_page
@previous_page.setter
def previous_page(self, previous_page):
"""Sets the previous_page of this PagedResourceListOfComplianceRuleResult.
The previous page of results. # noqa: E501
:param previous_page: The previous_page of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:type previous_page: str
"""
self._previous_page = previous_page
@property
def values(self):
"""Gets the values of this PagedResourceListOfComplianceRuleResult. # noqa: E501
The resources to list. # noqa: E501
:return: The values of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:rtype: list[lusid_asyncio.ComplianceRuleResult]
"""
return self._values
@values.setter
def values(self, values):
"""Sets the values of this PagedResourceListOfComplianceRuleResult.
The resources to list. # noqa: E501
:param values: The values of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:type values: list[lusid_asyncio.ComplianceRuleResult]
"""
if self.local_vars_configuration.client_side_validation and values is None: # noqa: E501
raise ValueError("Invalid value for `values`, must not be `None`") # noqa: E501
self._values = values
@property
def href(self):
"""Gets the href of this PagedResourceListOfComplianceRuleResult. # noqa: E501
The URI of the resource list. # noqa: E501
:return: The href of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:rtype: str
"""
return self._href
@href.setter
def href(self, href):
"""Sets the href of this PagedResourceListOfComplianceRuleResult.
The URI of the resource list. # noqa: E501
:param href: The href of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:type href: str
"""
self._href = href
@property
def links(self):
"""Gets the links of this PagedResourceListOfComplianceRuleResult. # noqa: E501
Collection of links. # noqa: E501
:return: The links of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:rtype: list[lusid_asyncio.Link]
"""
return self._links
@links.setter
def links(self, links):
"""Sets the links of this PagedResourceListOfComplianceRuleResult.
Collection of links. # noqa: E501
:param links: The links of this PagedResourceListOfComplianceRuleResult. # noqa: E501
:type links: list[lusid_asyncio.Link]
"""
self._links = links
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PagedResourceListOfComplianceRuleResult):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, PagedResourceListOfComplianceRuleResult):
return True
return self.to_dict() != other.to_dict()
| 31.217228
| 140
| 0.616197
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.